PR c++/43787
[official-gcc/constexpr.git] / gcc / gimplify.c
blob8f7ff89d44cc70cf85ab275f0a000187de79cffd
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "gimple.h"
31 #include "tree-iterator.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51 #include "splay-tree.h"
52 #include "vec.h"
53 #include "gimple.h"
54 #include "tree-pass.h"
57 enum gimplify_omp_var_data
59 GOVD_SEEN = 1,
60 GOVD_EXPLICIT = 2,
61 GOVD_SHARED = 4,
62 GOVD_PRIVATE = 8,
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
65 GOVD_REDUCTION = 64,
66 GOVD_LOCAL = 128,
67 GOVD_DEBUG_PRIVATE = 256,
68 GOVD_PRIVATE_OUTER_REF = 512,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
74 enum omp_region_type
76 ORT_WORKSHARE = 0,
77 ORT_TASK = 1,
78 ORT_PARALLEL = 2,
79 ORT_COMBINED_PARALLEL = 3
82 struct gimplify_omp_ctx
84 struct gimplify_omp_ctx *outer_context;
85 splay_tree variables;
86 struct pointer_set_t *privatized_types;
87 location_t location;
88 enum omp_clause_default_kind default_kind;
89 enum omp_region_type region_type;
92 static struct gimplify_ctx *gimplify_ctxp;
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
96 /* Formal (expression) temporary table handling: Multiple occurrences of
97 the same scalar expression are evaluated into the same temporary. */
99 typedef struct gimple_temp_hash_elt
101 tree val; /* Key */
102 tree temp; /* Value */
103 } elt_t;
105 /* Forward declarations. */
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
109 form and we don't do any syntax checking. */
110 void
111 mark_addressable (tree x)
113 while (handled_component_p (x))
114 x = TREE_OPERAND (x, 0);
115 if (TREE_CODE (x) != VAR_DECL
116 && TREE_CODE (x) != PARM_DECL
117 && TREE_CODE (x) != RESULT_DECL)
118 return ;
119 TREE_ADDRESSABLE (x) = 1;
122 /* Return a hash value for a formal temporary table entry. */
124 static hashval_t
125 gimple_tree_hash (const void *p)
127 tree t = ((const elt_t *) p)->val;
128 return iterative_hash_expr (t, 0);
131 /* Compare two formal temporary table entries. */
133 static int
134 gimple_tree_eq (const void *p1, const void *p2)
136 tree t1 = ((const elt_t *) p1)->val;
137 tree t2 = ((const elt_t *) p2)->val;
138 enum tree_code code = TREE_CODE (t1);
140 if (TREE_CODE (t2) != code
141 || TREE_TYPE (t1) != TREE_TYPE (t2))
142 return 0;
144 if (!operand_equal_p (t1, t2, 0))
145 return 0;
147 /* Only allow them to compare equal if they also hash equal; otherwise
148 results are nondeterminate, and we fail bootstrap comparison. */
149 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
151 return 1;
154 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
155 *SEQ_P is NULL, a new sequence is allocated. This function is
156 similar to gimple_seq_add_stmt, but does not scan the operands.
157 During gimplification, we need to manipulate statement sequences
158 before the def/use vectors have been constructed. */
160 static void
161 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
163 gimple_stmt_iterator si;
165 if (gs == NULL)
166 return;
168 if (*seq_p == NULL)
169 *seq_p = gimple_seq_alloc ();
171 si = gsi_last (*seq_p);
173 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
176 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
177 NULL, a new sequence is allocated. This function is
178 similar to gimple_seq_add_seq, but does not scan the operands.
179 During gimplification, we need to manipulate statement sequences
180 before the def/use vectors have been constructed. */
182 static void
183 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
185 gimple_stmt_iterator si;
187 if (src == NULL)
188 return;
190 if (*dst_p == NULL)
191 *dst_p = gimple_seq_alloc ();
193 si = gsi_last (*dst_p);
194 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
197 /* Set up a context for the gimplifier. */
199 void
200 push_gimplify_context (struct gimplify_ctx *c)
202 memset (c, '\0', sizeof (*c));
203 c->prev_context = gimplify_ctxp;
204 gimplify_ctxp = c;
207 /* Tear down a context for the gimplifier. If BODY is non-null, then
208 put the temporaries into the outer BIND_EXPR. Otherwise, put them
209 in the local_decls.
211 BODY is not a sequence, but the first tuple in a sequence. */
213 void
214 pop_gimplify_context (gimple body)
216 struct gimplify_ctx *c = gimplify_ctxp;
218 gcc_assert (c && (c->bind_expr_stack == NULL
219 || VEC_empty (gimple, c->bind_expr_stack)));
220 VEC_free (gimple, heap, c->bind_expr_stack);
221 gimplify_ctxp = c->prev_context;
223 if (body)
224 declare_vars (c->temps, body, false);
225 else
226 record_vars (c->temps);
228 if (c->temp_htab)
229 htab_delete (c->temp_htab);
232 static void
233 gimple_push_bind_expr (gimple gimple_bind)
235 if (gimplify_ctxp->bind_expr_stack == NULL)
236 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
237 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
240 static void
241 gimple_pop_bind_expr (void)
243 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
246 gimple
247 gimple_current_bind_expr (void)
249 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252 /* Return the stack GIMPLE_BINDs created during gimplification. */
254 VEC(gimple, heap) *
255 gimple_bind_expr_stack (void)
257 return gimplify_ctxp->bind_expr_stack;
260 /* Returns true iff there is a COND_EXPR between us and the innermost
261 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
263 static bool
264 gimple_conditional_context (void)
266 return gimplify_ctxp->conditions > 0;
269 /* Note that we've entered a COND_EXPR. */
271 static void
272 gimple_push_condition (void)
274 #ifdef ENABLE_GIMPLE_CHECKING
275 if (gimplify_ctxp->conditions == 0)
276 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
277 #endif
278 ++(gimplify_ctxp->conditions);
281 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
282 now, add any conditional cleanups we've seen to the prequeue. */
284 static void
285 gimple_pop_condition (gimple_seq *pre_p)
287 int conds = --(gimplify_ctxp->conditions);
289 gcc_assert (conds >= 0);
290 if (conds == 0)
292 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
293 gimplify_ctxp->conditional_cleanups = NULL;
297 /* A stable comparison routine for use with splay trees and DECLs. */
299 static int
300 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
302 tree a = (tree) xa;
303 tree b = (tree) xb;
305 return DECL_UID (a) - DECL_UID (b);
308 /* Create a new omp construct that deals with variable remapping. */
310 static struct gimplify_omp_ctx *
311 new_omp_context (enum omp_region_type region_type)
313 struct gimplify_omp_ctx *c;
315 c = XCNEW (struct gimplify_omp_ctx);
316 c->outer_context = gimplify_omp_ctxp;
317 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
318 c->privatized_types = pointer_set_create ();
319 c->location = input_location;
320 c->region_type = region_type;
321 if (region_type != ORT_TASK)
322 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
323 else
324 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
326 return c;
329 /* Destroy an omp construct that deals with variable remapping. */
331 static void
332 delete_omp_context (struct gimplify_omp_ctx *c)
334 splay_tree_delete (c->variables);
335 pointer_set_destroy (c->privatized_types);
336 XDELETE (c);
339 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
340 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
342 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
344 static void
345 append_to_statement_list_1 (tree t, tree *list_p)
347 tree list = *list_p;
348 tree_stmt_iterator i;
350 if (!list)
352 if (t && TREE_CODE (t) == STATEMENT_LIST)
354 *list_p = t;
355 return;
357 *list_p = list = alloc_stmt_list ();
360 i = tsi_last (list);
361 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
364 /* Add T to the end of the list container pointed to by LIST_P.
365 If T is an expression with no effects, it is ignored. */
367 void
368 append_to_statement_list (tree t, tree *list_p)
370 if (t && TREE_SIDE_EFFECTS (t))
371 append_to_statement_list_1 (t, list_p);
374 /* Similar, but the statement is always added, regardless of side effects. */
376 void
377 append_to_statement_list_force (tree t, tree *list_p)
379 if (t != NULL_TREE)
380 append_to_statement_list_1 (t, list_p);
383 /* Both gimplify the statement T and append it to *SEQ_P. This function
384 behaves exactly as gimplify_stmt, but you don't have to pass T as a
385 reference. */
387 void
388 gimplify_and_add (tree t, gimple_seq *seq_p)
390 gimplify_stmt (&t, seq_p);
393 /* Gimplify statement T into sequence *SEQ_P, and return the first
394 tuple in the sequence of generated tuples for this statement.
395 Return NULL if gimplifying T produced no tuples. */
397 static gimple
398 gimplify_and_return_first (tree t, gimple_seq *seq_p)
400 gimple_stmt_iterator last = gsi_last (*seq_p);
402 gimplify_and_add (t, seq_p);
404 if (!gsi_end_p (last))
406 gsi_next (&last);
407 return gsi_stmt (last);
409 else
410 return gimple_seq_first_stmt (*seq_p);
413 /* Strip off a legitimate source ending from the input string NAME of
414 length LEN. Rather than having to know the names used by all of
415 our front ends, we strip off an ending of a period followed by
416 up to five characters. (Java uses ".class".) */
418 static inline void
419 remove_suffix (char *name, int len)
421 int i;
423 for (i = 2; i < 8 && len > i; i++)
425 if (name[len - i] == '.')
427 name[len - i] = '\0';
428 break;
433 /* Create a new temporary name with PREFIX. Returns an identifier. */
435 static GTY(()) unsigned int tmp_var_id_num;
437 tree
438 create_tmp_var_name (const char *prefix)
440 char *tmp_name;
442 if (prefix)
444 char *preftmp = ASTRDUP (prefix);
446 remove_suffix (preftmp, strlen (preftmp));
447 prefix = preftmp;
450 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
451 return get_identifier (tmp_name);
455 /* Create a new temporary variable declaration of type TYPE.
456 Does NOT push it into the current binding. */
458 tree
459 create_tmp_var_raw (tree type, const char *prefix)
461 tree tmp_var;
462 tree new_type;
464 /* Make the type of the variable writable. */
465 new_type = build_type_variant (type, 0, 0);
466 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
468 tmp_var = build_decl (input_location,
469 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
470 type);
472 /* The variable was declared by the compiler. */
473 DECL_ARTIFICIAL (tmp_var) = 1;
474 /* And we don't want debug info for it. */
475 DECL_IGNORED_P (tmp_var) = 1;
477 /* Make the variable writable. */
478 TREE_READONLY (tmp_var) = 0;
480 DECL_EXTERNAL (tmp_var) = 0;
481 TREE_STATIC (tmp_var) = 0;
482 TREE_USED (tmp_var) = 1;
484 return tmp_var;
487 /* Create a new temporary variable declaration of type TYPE. DOES push the
488 variable into the current binding. Further, assume that this is called
489 only from gimplification or optimization, at which point the creation of
490 certain types are bugs. */
492 tree
493 create_tmp_var (tree type, const char *prefix)
495 tree tmp_var;
497 /* We don't allow types that are addressable (meaning we can't make copies),
498 or incomplete. We also used to reject every variable size objects here,
499 but now support those for which a constant upper bound can be obtained.
500 The processing for variable sizes is performed in gimple_add_tmp_var,
501 point at which it really matters and possibly reached via paths not going
502 through this function, e.g. after direct calls to create_tmp_var_raw. */
503 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
505 tmp_var = create_tmp_var_raw (type, prefix);
506 gimple_add_tmp_var (tmp_var);
507 return tmp_var;
510 /* Create a new temporary variable declaration of type TYPE by calling
511 create_tmp_var and if TYPE is a vector or a complex number, mark the new
512 temporary as gimple register. */
514 tree
515 create_tmp_reg (tree type, const char *prefix)
517 tree tmp;
519 tmp = create_tmp_var (type, prefix);
520 if (TREE_CODE (type) == COMPLEX_TYPE
521 || TREE_CODE (type) == VECTOR_TYPE)
522 DECL_GIMPLE_REG_P (tmp) = 1;
524 return tmp;
527 /* Create a temporary with a name derived from VAL. Subroutine of
528 lookup_tmp_var; nobody else should call this function. */
530 static inline tree
531 create_tmp_from_val (tree val)
533 return create_tmp_var (TREE_TYPE (val), get_name (val));
536 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
537 an existing expression temporary. */
539 static tree
540 lookup_tmp_var (tree val, bool is_formal)
542 tree ret;
544 /* If not optimizing, never really reuse a temporary. local-alloc
545 won't allocate any variable that is used in more than one basic
546 block, which means it will go into memory, causing much extra
547 work in reload and final and poorer code generation, outweighing
548 the extra memory allocation here. */
549 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
550 ret = create_tmp_from_val (val);
551 else
553 elt_t elt, *elt_p;
554 void **slot;
556 elt.val = val;
557 if (gimplify_ctxp->temp_htab == NULL)
558 gimplify_ctxp->temp_htab
559 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
560 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
561 if (*slot == NULL)
563 elt_p = XNEW (elt_t);
564 elt_p->val = val;
565 elt_p->temp = ret = create_tmp_from_val (val);
566 *slot = (void *) elt_p;
568 else
570 elt_p = (elt_t *) *slot;
571 ret = elt_p->temp;
575 return ret;
579 /* Return true if T is a CALL_EXPR or an expression that can be
580 assignmed to a temporary. Note that this predicate should only be
581 used during gimplification. See the rationale for this in
582 gimplify_modify_expr. */
584 static bool
585 is_gimple_reg_rhs_or_call (tree t)
587 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
588 || TREE_CODE (t) == CALL_EXPR);
591 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
592 this predicate should only be used during gimplification. See the
593 rationale for this in gimplify_modify_expr. */
595 static bool
596 is_gimple_mem_rhs_or_call (tree t)
598 /* If we're dealing with a renamable type, either source or dest must be
599 a renamed variable. */
600 if (is_gimple_reg_type (TREE_TYPE (t)))
601 return is_gimple_val (t);
602 else
603 return (is_gimple_val (t) || is_gimple_lvalue (t)
604 || TREE_CODE (t) == CALL_EXPR);
607 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
609 static tree
610 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
611 bool is_formal)
613 tree t, mod;
615 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
616 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
617 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
618 fb_rvalue);
620 t = lookup_tmp_var (val, is_formal);
622 if (is_formal
623 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
624 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
625 DECL_GIMPLE_REG_P (t) = 1;
627 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
629 if (EXPR_HAS_LOCATION (val))
630 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
631 else
632 SET_EXPR_LOCATION (mod, input_location);
634 /* gimplify_modify_expr might want to reduce this further. */
635 gimplify_and_add (mod, pre_p);
636 ggc_free (mod);
638 /* If we're gimplifying into ssa, gimplify_modify_expr will have
639 given our temporary an SSA name. Find and return it. */
640 if (gimplify_ctxp->into_ssa)
642 gimple last = gimple_seq_last_stmt (*pre_p);
643 t = gimple_get_lhs (last);
646 return t;
649 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
650 in gimplify_expr. Only use this function if:
652 1) The value of the unfactored expression represented by VAL will not
653 change between the initialization and use of the temporary, and
654 2) The temporary will not be otherwise modified.
656 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
657 and #2 means it is inappropriate for && temps.
659 For other cases, use get_initialized_tmp_var instead. */
661 tree
662 get_formal_tmp_var (tree val, gimple_seq *pre_p)
664 return internal_get_tmp_var (val, pre_p, NULL, true);
667 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
668 are as in gimplify_expr. */
670 tree
671 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
673 return internal_get_tmp_var (val, pre_p, post_p, false);
676 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
677 true, generate debug info for them; otherwise don't. */
679 void
680 declare_vars (tree vars, gimple scope, bool debug_info)
682 tree last = vars;
683 if (last)
685 tree temps, block;
687 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
689 temps = nreverse (last);
691 block = gimple_bind_block (scope);
692 gcc_assert (!block || TREE_CODE (block) == BLOCK);
693 if (!block || !debug_info)
695 TREE_CHAIN (last) = gimple_bind_vars (scope);
696 gimple_bind_set_vars (scope, temps);
698 else
700 /* We need to attach the nodes both to the BIND_EXPR and to its
701 associated BLOCK for debugging purposes. The key point here
702 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
703 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
704 if (BLOCK_VARS (block))
705 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
706 else
708 gimple_bind_set_vars (scope,
709 chainon (gimple_bind_vars (scope), temps));
710 BLOCK_VARS (block) = temps;
716 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
717 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
718 no such upper bound can be obtained. */
720 static void
721 force_constant_size (tree var)
723 /* The only attempt we make is by querying the maximum size of objects
724 of the variable's type. */
726 HOST_WIDE_INT max_size;
728 gcc_assert (TREE_CODE (var) == VAR_DECL);
730 max_size = max_int_size_in_bytes (TREE_TYPE (var));
732 gcc_assert (max_size >= 0);
734 DECL_SIZE_UNIT (var)
735 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
736 DECL_SIZE (var)
737 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
740 void
741 gimple_add_tmp_var (tree tmp)
743 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
749 force_constant_size (tmp);
751 DECL_CONTEXT (tmp) = current_function_decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
754 if (gimplify_ctxp)
756 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
757 gimplify_ctxp->temps = tmp;
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
763 while (ctx && ctx->region_type == ORT_WORKSHARE)
764 ctx = ctx->outer_context;
765 if (ctx)
766 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
769 else if (cfun)
770 record_vars (tmp);
771 else
773 gimple_seq body_seq;
775 /* This case is for nested functions. We need to expose the locals
776 they create. */
777 body_seq = gimple_body (current_function_decl);
778 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
782 /* Determines whether to assign a location to the statement GS. */
784 static bool
785 should_carry_location_p (gimple gs)
787 /* Don't emit a line note for a label. We particularly don't want to
788 emit one for the break label, since it doesn't actually correspond
789 to the beginning of the loop/switch. */
790 if (gimple_code (gs) == GIMPLE_LABEL)
791 return false;
793 return true;
797 /* Return true if a location should not be emitted for this statement
798 by annotate_one_with_location. */
800 static inline bool
801 gimple_do_not_emit_location_p (gimple g)
803 return gimple_plf (g, GF_PLF_1);
806 /* Mark statement G so a location will not be emitted by
807 annotate_one_with_location. */
809 static inline void
810 gimple_set_do_not_emit_location (gimple g)
812 /* The PLF flags are initialized to 0 when a new tuple is created,
813 so no need to initialize it anywhere. */
814 gimple_set_plf (g, GF_PLF_1, true);
817 /* Set the location for gimple statement GS to LOCATION. */
819 static void
820 annotate_one_with_location (gimple gs, location_t location)
822 if (!gimple_has_location (gs)
823 && !gimple_do_not_emit_location_p (gs)
824 && should_carry_location_p (gs))
825 gimple_set_location (gs, location);
829 /* Set LOCATION for all the statements after iterator GSI in sequence
830 SEQ. If GSI is pointing to the end of the sequence, start with the
831 first statement in SEQ. */
833 static void
834 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
835 location_t location)
837 if (gsi_end_p (gsi))
838 gsi = gsi_start (seq);
839 else
840 gsi_next (&gsi);
842 for (; !gsi_end_p (gsi); gsi_next (&gsi))
843 annotate_one_with_location (gsi_stmt (gsi), location);
847 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
849 void
850 annotate_all_with_location (gimple_seq stmt_p, location_t location)
852 gimple_stmt_iterator i;
854 if (gimple_seq_empty_p (stmt_p))
855 return;
857 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
859 gimple gs = gsi_stmt (i);
860 annotate_one_with_location (gs, location);
865 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
866 These nodes model computations that should only be done once. If we
867 were to unshare something like SAVE_EXPR(i++), the gimplification
868 process would create wrong code. */
870 static tree
871 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
873 enum tree_code code = TREE_CODE (*tp);
874 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
875 if (TREE_CODE_CLASS (code) == tcc_type
876 || TREE_CODE_CLASS (code) == tcc_declaration
877 || TREE_CODE_CLASS (code) == tcc_constant
878 || code == SAVE_EXPR || code == TARGET_EXPR
879 /* We can't do anything sensible with a BLOCK used as an expression,
880 but we also can't just die when we see it because of non-expression
881 uses. So just avert our eyes and cross our fingers. Silly Java. */
882 || code == BLOCK)
883 *walk_subtrees = 0;
884 else
886 gcc_assert (code != BIND_EXPR);
887 copy_tree_r (tp, walk_subtrees, data);
890 return NULL_TREE;
893 /* Callback for walk_tree to unshare most of the shared trees rooted at
894 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
895 then *TP is deep copied by calling copy_tree_r.
897 This unshares the same trees as copy_tree_r with the exception of
898 SAVE_EXPR nodes. These nodes model computations that should only be
899 done once. If we were to unshare something like SAVE_EXPR(i++), the
900 gimplification process would create wrong code. */
902 static tree
903 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
904 void *data ATTRIBUTE_UNUSED)
906 tree t = *tp;
907 enum tree_code code = TREE_CODE (t);
909 /* Skip types, decls, and constants. But we do want to look at their
910 types and the bounds of types. Mark them as visited so we properly
911 unmark their subtrees on the unmark pass. If we've already seen them,
912 don't look down further. */
913 if (TREE_CODE_CLASS (code) == tcc_type
914 || TREE_CODE_CLASS (code) == tcc_declaration
915 || TREE_CODE_CLASS (code) == tcc_constant)
917 if (TREE_VISITED (t))
918 *walk_subtrees = 0;
919 else
920 TREE_VISITED (t) = 1;
923 /* If this node has been visited already, unshare it and don't look
924 any deeper. */
925 else if (TREE_VISITED (t))
927 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
928 *walk_subtrees = 0;
931 /* Otherwise, mark the tree as visited and keep looking. */
932 else
933 TREE_VISITED (t) = 1;
935 return NULL_TREE;
938 static tree
939 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
940 void *data ATTRIBUTE_UNUSED)
942 if (TREE_VISITED (*tp))
943 TREE_VISITED (*tp) = 0;
944 else
945 *walk_subtrees = 0;
947 return NULL_TREE;
950 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
951 bodies of any nested functions if we are unsharing the entire body of
952 FNDECL. */
954 static void
955 unshare_body (tree *body_p, tree fndecl)
957 struct cgraph_node *cgn = cgraph_node (fndecl);
959 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
960 if (body_p == &DECL_SAVED_TREE (fndecl))
961 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
962 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
965 /* Likewise, but mark all trees as not visited. */
967 static void
968 unvisit_body (tree *body_p, tree fndecl)
970 struct cgraph_node *cgn = cgraph_node (fndecl);
972 walk_tree (body_p, unmark_visited_r, NULL, NULL);
973 if (body_p == &DECL_SAVED_TREE (fndecl))
974 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
975 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
978 /* Unconditionally make an unshared copy of EXPR. This is used when using
979 stored expressions which span multiple functions, such as BINFO_VTABLE,
980 as the normal unsharing process can't tell that they're shared. */
982 tree
983 unshare_expr (tree expr)
985 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
986 return expr;
989 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
990 contain statements and have a value. Assign its value to a temporary
991 and give it void_type_node. Returns the temporary, or NULL_TREE if
992 WRAPPER was already void. */
994 tree
995 voidify_wrapper_expr (tree wrapper, tree temp)
997 tree type = TREE_TYPE (wrapper);
998 if (type && !VOID_TYPE_P (type))
1000 tree *p;
1002 /* Set p to point to the body of the wrapper. Loop until we find
1003 something that isn't a wrapper. */
1004 for (p = &wrapper; p && *p; )
1006 switch (TREE_CODE (*p))
1008 case BIND_EXPR:
1009 TREE_SIDE_EFFECTS (*p) = 1;
1010 TREE_TYPE (*p) = void_type_node;
1011 /* For a BIND_EXPR, the body is operand 1. */
1012 p = &BIND_EXPR_BODY (*p);
1013 break;
1015 case CLEANUP_POINT_EXPR:
1016 case TRY_FINALLY_EXPR:
1017 case TRY_CATCH_EXPR:
1018 TREE_SIDE_EFFECTS (*p) = 1;
1019 TREE_TYPE (*p) = void_type_node;
1020 p = &TREE_OPERAND (*p, 0);
1021 break;
1023 case STATEMENT_LIST:
1025 tree_stmt_iterator i = tsi_last (*p);
1026 TREE_SIDE_EFFECTS (*p) = 1;
1027 TREE_TYPE (*p) = void_type_node;
1028 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1030 break;
1032 case COMPOUND_EXPR:
1033 /* Advance to the last statement. Set all container types to void. */
1034 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1036 TREE_SIDE_EFFECTS (*p) = 1;
1037 TREE_TYPE (*p) = void_type_node;
1039 break;
1041 default:
1042 goto out;
1046 out:
1047 if (p == NULL || IS_EMPTY_STMT (*p))
1048 temp = NULL_TREE;
1049 else if (temp)
1051 /* The wrapper is on the RHS of an assignment that we're pushing
1052 down. */
1053 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1054 || TREE_CODE (temp) == MODIFY_EXPR);
1055 TREE_OPERAND (temp, 1) = *p;
1056 *p = temp;
1058 else
1060 temp = create_tmp_var (type, "retval");
1061 *p = build2 (INIT_EXPR, type, temp, *p);
1064 return temp;
1067 return NULL_TREE;
1070 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1071 a temporary through which they communicate. */
1073 static void
1074 build_stack_save_restore (gimple *save, gimple *restore)
1076 tree tmp_var;
1078 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1079 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1080 gimple_call_set_lhs (*save, tmp_var);
1082 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1083 1, tmp_var);
1086 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1088 static enum gimplify_status
1089 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1091 tree bind_expr = *expr_p;
1092 bool old_save_stack = gimplify_ctxp->save_stack;
1093 tree t;
1094 gimple gimple_bind;
1095 gimple_seq body;
1097 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1099 /* Mark variables seen in this bind expr. */
1100 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1102 if (TREE_CODE (t) == VAR_DECL)
1104 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1106 /* Mark variable as local. */
1107 if (ctx && !is_global_var (t)
1108 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1109 || splay_tree_lookup (ctx->variables,
1110 (splay_tree_key) t) == NULL))
1111 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1113 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1115 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1116 cfun->has_local_explicit_reg_vars = true;
1119 /* Preliminarily mark non-addressed complex variables as eligible
1120 for promotion to gimple registers. We'll transform their uses
1121 as we find them.
1122 We exclude complex types if not optimizing because they can be
1123 subject to partial stores in GNU C by means of the __real__ and
1124 __imag__ operators and we cannot promote them to total stores
1125 (see gimplify_modify_expr_complex_part). */
1126 if (optimize
1127 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1128 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1129 && !TREE_THIS_VOLATILE (t)
1130 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1131 && !needs_to_live_in_memory (t))
1132 DECL_GIMPLE_REG_P (t) = 1;
1135 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1136 BIND_EXPR_BLOCK (bind_expr));
1137 gimple_push_bind_expr (gimple_bind);
1139 gimplify_ctxp->save_stack = false;
1141 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1142 body = NULL;
1143 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1144 gimple_bind_set_body (gimple_bind, body);
1146 if (gimplify_ctxp->save_stack)
1148 gimple stack_save, stack_restore, gs;
1149 gimple_seq cleanup, new_body;
1151 /* Save stack on entry and restore it on exit. Add a try_finally
1152 block to achieve this. Note that mudflap depends on the
1153 format of the emitted code: see mx_register_decls(). */
1154 build_stack_save_restore (&stack_save, &stack_restore);
1156 cleanup = new_body = NULL;
1157 gimplify_seq_add_stmt (&cleanup, stack_restore);
1158 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1159 GIMPLE_TRY_FINALLY);
1161 gimplify_seq_add_stmt (&new_body, stack_save);
1162 gimplify_seq_add_stmt (&new_body, gs);
1163 gimple_bind_set_body (gimple_bind, new_body);
1166 gimplify_ctxp->save_stack = old_save_stack;
1167 gimple_pop_bind_expr ();
1169 gimplify_seq_add_stmt (pre_p, gimple_bind);
1171 if (temp)
1173 *expr_p = temp;
1174 return GS_OK;
1177 *expr_p = NULL_TREE;
1178 return GS_ALL_DONE;
1181 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1182 GIMPLE value, it is assigned to a new temporary and the statement is
1183 re-written to return the temporary.
1185 PRE_P points to the sequence where side effects that must happen before
1186 STMT should be stored. */
1188 static enum gimplify_status
1189 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1191 gimple ret;
1192 tree ret_expr = TREE_OPERAND (stmt, 0);
1193 tree result_decl, result;
1195 if (ret_expr == error_mark_node)
1196 return GS_ERROR;
1198 if (!ret_expr
1199 || TREE_CODE (ret_expr) == RESULT_DECL
1200 || ret_expr == error_mark_node)
1202 gimple ret = gimple_build_return (ret_expr);
1203 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1204 gimplify_seq_add_stmt (pre_p, ret);
1205 return GS_ALL_DONE;
1208 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1209 result_decl = NULL_TREE;
1210 else
1212 result_decl = TREE_OPERAND (ret_expr, 0);
1214 /* See through a return by reference. */
1215 if (TREE_CODE (result_decl) == INDIRECT_REF)
1216 result_decl = TREE_OPERAND (result_decl, 0);
1218 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1219 || TREE_CODE (ret_expr) == INIT_EXPR)
1220 && TREE_CODE (result_decl) == RESULT_DECL);
1223 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1224 Recall that aggregate_value_p is FALSE for any aggregate type that is
1225 returned in registers. If we're returning values in registers, then
1226 we don't want to extend the lifetime of the RESULT_DECL, particularly
1227 across another call. In addition, for those aggregates for which
1228 hard_function_value generates a PARALLEL, we'll die during normal
1229 expansion of structure assignments; there's special code in expand_return
1230 to handle this case that does not exist in expand_expr. */
1231 if (!result_decl
1232 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1233 result = result_decl;
1234 else if (gimplify_ctxp->return_temp)
1235 result = gimplify_ctxp->return_temp;
1236 else
1238 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1240 /* ??? With complex control flow (usually involving abnormal edges),
1241 we can wind up warning about an uninitialized value for this. Due
1242 to how this variable is constructed and initialized, this is never
1243 true. Give up and never warn. */
1244 TREE_NO_WARNING (result) = 1;
1246 gimplify_ctxp->return_temp = result;
1249 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1250 Then gimplify the whole thing. */
1251 if (result != result_decl)
1252 TREE_OPERAND (ret_expr, 0) = result;
1254 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1256 ret = gimple_build_return (result);
1257 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1258 gimplify_seq_add_stmt (pre_p, ret);
1260 return GS_ALL_DONE;
1263 static void
1264 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1266 /* This is a variable-sized decl. Simplify its size and mark it
1267 for deferred expansion. Note that mudflap depends on the format
1268 of the emitted code: see mx_register_decls(). */
1269 tree t, addr, ptr_type;
1271 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1272 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1274 /* All occurrences of this decl in final gimplified code will be
1275 replaced by indirection. Setting DECL_VALUE_EXPR does two
1276 things: First, it lets the rest of the gimplifier know what
1277 replacement to use. Second, it lets the debug info know
1278 where to find the value. */
1279 ptr_type = build_pointer_type (TREE_TYPE (decl));
1280 addr = create_tmp_var (ptr_type, get_name (decl));
1281 DECL_IGNORED_P (addr) = 0;
1282 t = build_fold_indirect_ref (addr);
1283 SET_DECL_VALUE_EXPR (decl, t);
1284 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1286 t = built_in_decls[BUILT_IN_ALLOCA];
1287 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1288 t = fold_convert (ptr_type, t);
1289 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1291 gimplify_and_add (t, seq_p);
1293 /* Indicate that we need to restore the stack level when the
1294 enclosing BIND_EXPR is exited. */
1295 gimplify_ctxp->save_stack = true;
1299 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1300 and initialization explicit. */
1302 static enum gimplify_status
1303 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1305 tree stmt = *stmt_p;
1306 tree decl = DECL_EXPR_DECL (stmt);
1308 *stmt_p = NULL_TREE;
1310 if (TREE_TYPE (decl) == error_mark_node)
1311 return GS_ERROR;
1313 if ((TREE_CODE (decl) == TYPE_DECL
1314 || TREE_CODE (decl) == VAR_DECL)
1315 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1316 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1318 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1320 tree init = DECL_INITIAL (decl);
1322 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1323 || (!TREE_STATIC (decl)
1324 && flag_stack_check == GENERIC_STACK_CHECK
1325 && compare_tree_int (DECL_SIZE_UNIT (decl),
1326 STACK_CHECK_MAX_VAR_SIZE) > 0))
1327 gimplify_vla_decl (decl, seq_p);
1329 if (init && init != error_mark_node)
1331 if (!TREE_STATIC (decl))
1333 DECL_INITIAL (decl) = NULL_TREE;
1334 init = build2 (INIT_EXPR, void_type_node, decl, init);
1335 gimplify_and_add (init, seq_p);
1336 ggc_free (init);
1338 else
1339 /* We must still examine initializers for static variables
1340 as they may contain a label address. */
1341 walk_tree (&init, force_labels_r, NULL, NULL);
1344 /* Some front ends do not explicitly declare all anonymous
1345 artificial variables. We compensate here by declaring the
1346 variables, though it would be better if the front ends would
1347 explicitly declare them. */
1348 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1349 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1350 gimple_add_tmp_var (decl);
1353 return GS_ALL_DONE;
1356 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1357 and replacing the LOOP_EXPR with goto, but if the loop contains an
1358 EXIT_EXPR, we need to append a label for it to jump to. */
1360 static enum gimplify_status
1361 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1363 tree saved_label = gimplify_ctxp->exit_label;
1364 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1366 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1368 gimplify_ctxp->exit_label = NULL_TREE;
1370 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1372 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1374 if (gimplify_ctxp->exit_label)
1375 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1377 gimplify_ctxp->exit_label = saved_label;
1379 *expr_p = NULL;
1380 return GS_ALL_DONE;
1383 /* Gimplifies a statement list onto a sequence. These may be created either
1384 by an enlightened front-end, or by shortcut_cond_expr. */
1386 static enum gimplify_status
1387 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1389 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1391 tree_stmt_iterator i = tsi_start (*expr_p);
1393 while (!tsi_end_p (i))
1395 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1396 tsi_delink (&i);
1399 if (temp)
1401 *expr_p = temp;
1402 return GS_OK;
1405 return GS_ALL_DONE;
1408 /* Compare two case labels. Because the front end should already have
1409 made sure that case ranges do not overlap, it is enough to only compare
1410 the CASE_LOW values of each case label. */
1412 static int
1413 compare_case_labels (const void *p1, const void *p2)
1415 const_tree const case1 = *(const_tree const*)p1;
1416 const_tree const case2 = *(const_tree const*)p2;
1418 /* The 'default' case label always goes first. */
1419 if (!CASE_LOW (case1))
1420 return -1;
1421 else if (!CASE_LOW (case2))
1422 return 1;
1423 else
1424 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1428 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1430 void
1431 sort_case_labels (VEC(tree,heap)* label_vec)
1433 size_t len = VEC_length (tree, label_vec);
1434 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1435 compare_case_labels);
1439 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1440 branch to. */
1442 static enum gimplify_status
1443 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1445 tree switch_expr = *expr_p;
1446 gimple_seq switch_body_seq = NULL;
1447 enum gimplify_status ret;
1449 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1450 fb_rvalue);
1451 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1452 return ret;
1454 if (SWITCH_BODY (switch_expr))
1456 VEC (tree,heap) *labels;
1457 VEC (tree,heap) *saved_labels;
1458 tree default_case = NULL_TREE;
1459 size_t i, len;
1460 gimple gimple_switch;
1462 /* If someone can be bothered to fill in the labels, they can
1463 be bothered to null out the body too. */
1464 gcc_assert (!SWITCH_LABELS (switch_expr));
1466 /* save old labels, get new ones from body, then restore the old
1467 labels. Save all the things from the switch body to append after. */
1468 saved_labels = gimplify_ctxp->case_labels;
1469 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1471 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1472 labels = gimplify_ctxp->case_labels;
1473 gimplify_ctxp->case_labels = saved_labels;
1475 i = 0;
1476 while (i < VEC_length (tree, labels))
1478 tree elt = VEC_index (tree, labels, i);
1479 tree low = CASE_LOW (elt);
1480 bool remove_element = FALSE;
1482 if (low)
1484 /* Discard empty ranges. */
1485 tree high = CASE_HIGH (elt);
1486 if (high && tree_int_cst_lt (high, low))
1487 remove_element = TRUE;
1489 else
1491 /* The default case must be the last label in the list. */
1492 gcc_assert (!default_case);
1493 default_case = elt;
1494 remove_element = TRUE;
1497 if (remove_element)
1498 VEC_ordered_remove (tree, labels, i);
1499 else
1500 i++;
1502 len = i;
1504 if (!VEC_empty (tree, labels))
1505 sort_case_labels (labels);
1507 if (!default_case)
1509 tree type = TREE_TYPE (switch_expr);
1511 /* If the switch has no default label, add one, so that we jump
1512 around the switch body. If the labels already cover the whole
1513 range of type, add the default label pointing to one of the
1514 existing labels. */
1515 if (type == void_type_node)
1516 type = TREE_TYPE (SWITCH_COND (switch_expr));
1517 if (len
1518 && INTEGRAL_TYPE_P (type)
1519 && TYPE_MIN_VALUE (type)
1520 && TYPE_MAX_VALUE (type)
1521 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1522 TYPE_MIN_VALUE (type)))
1524 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1525 if (!high)
1526 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1527 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1529 for (i = 1; i < len; i++)
1531 high = CASE_LOW (VEC_index (tree, labels, i));
1532 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1533 if (!low)
1534 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1535 if ((TREE_INT_CST_LOW (low) + 1
1536 != TREE_INT_CST_LOW (high))
1537 || (TREE_INT_CST_HIGH (low)
1538 + (TREE_INT_CST_LOW (high) == 0)
1539 != TREE_INT_CST_HIGH (high)))
1540 break;
1542 if (i == len)
1543 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1544 NULL_TREE, NULL_TREE,
1545 CASE_LABEL (VEC_index (tree,
1546 labels, 0)));
1550 if (!default_case)
1552 gimple new_default;
1554 default_case
1555 = build3 (CASE_LABEL_EXPR, void_type_node,
1556 NULL_TREE, NULL_TREE,
1557 create_artificial_label (UNKNOWN_LOCATION));
1558 new_default = gimple_build_label (CASE_LABEL (default_case));
1559 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1563 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1564 default_case, labels);
1565 gimplify_seq_add_stmt (pre_p, gimple_switch);
1566 gimplify_seq_add_seq (pre_p, switch_body_seq);
1567 VEC_free(tree, heap, labels);
1569 else
1570 gcc_assert (SWITCH_LABELS (switch_expr));
1572 return GS_ALL_DONE;
1576 static enum gimplify_status
1577 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1579 struct gimplify_ctx *ctxp;
1580 gimple gimple_label;
1582 /* Invalid OpenMP programs can play Duff's Device type games with
1583 #pragma omp parallel. At least in the C front end, we don't
1584 detect such invalid branches until after gimplification. */
1585 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1586 if (ctxp->case_labels)
1587 break;
1589 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1590 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1591 gimplify_seq_add_stmt (pre_p, gimple_label);
1593 return GS_ALL_DONE;
1596 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1597 if necessary. */
1599 tree
1600 build_and_jump (tree *label_p)
1602 if (label_p == NULL)
1603 /* If there's nowhere to jump, just fall through. */
1604 return NULL_TREE;
1606 if (*label_p == NULL_TREE)
1608 tree label = create_artificial_label (UNKNOWN_LOCATION);
1609 *label_p = label;
1612 return build1 (GOTO_EXPR, void_type_node, *label_p);
1615 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1616 This also involves building a label to jump to and communicating it to
1617 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1619 static enum gimplify_status
1620 gimplify_exit_expr (tree *expr_p)
1622 tree cond = TREE_OPERAND (*expr_p, 0);
1623 tree expr;
1625 expr = build_and_jump (&gimplify_ctxp->exit_label);
1626 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1627 *expr_p = expr;
1629 return GS_OK;
1632 /* A helper function to be called via walk_tree. Mark all labels under *TP
1633 as being forced. To be called for DECL_INITIAL of static variables. */
1635 tree
1636 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1638 if (TYPE_P (*tp))
1639 *walk_subtrees = 0;
1640 if (TREE_CODE (*tp) == LABEL_DECL)
1641 FORCED_LABEL (*tp) = 1;
1643 return NULL_TREE;
1646 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1647 different from its canonical type, wrap the whole thing inside a
1648 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1649 type.
1651 The canonical type of a COMPONENT_REF is the type of the field being
1652 referenced--unless the field is a bit-field which can be read directly
1653 in a smaller mode, in which case the canonical type is the
1654 sign-appropriate type corresponding to that mode. */
1656 static void
1657 canonicalize_component_ref (tree *expr_p)
1659 tree expr = *expr_p;
1660 tree type;
1662 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1664 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1665 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1666 else
1667 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1669 /* One could argue that all the stuff below is not necessary for
1670 the non-bitfield case and declare it a FE error if type
1671 adjustment would be needed. */
1672 if (TREE_TYPE (expr) != type)
1674 #ifdef ENABLE_TYPES_CHECKING
1675 tree old_type = TREE_TYPE (expr);
1676 #endif
1677 int type_quals;
1679 /* We need to preserve qualifiers and propagate them from
1680 operand 0. */
1681 type_quals = TYPE_QUALS (type)
1682 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1683 if (TYPE_QUALS (type) != type_quals)
1684 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1686 /* Set the type of the COMPONENT_REF to the underlying type. */
1687 TREE_TYPE (expr) = type;
1689 #ifdef ENABLE_TYPES_CHECKING
1690 /* It is now a FE error, if the conversion from the canonical
1691 type to the original expression type is not useless. */
1692 gcc_assert (useless_type_conversion_p (old_type, type));
1693 #endif
1697 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1698 to foo, embed that change in the ADDR_EXPR by converting
1699 T array[U];
1700 (T *)&array
1702 &array[L]
1703 where L is the lower bound. For simplicity, only do this for constant
1704 lower bound.
1705 The constraint is that the type of &array[L] is trivially convertible
1706 to T *. */
1708 static void
1709 canonicalize_addr_expr (tree *expr_p)
1711 tree expr = *expr_p;
1712 tree addr_expr = TREE_OPERAND (expr, 0);
1713 tree datype, ddatype, pddatype;
1715 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1716 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1717 || TREE_CODE (addr_expr) != ADDR_EXPR)
1718 return;
1720 /* The addr_expr type should be a pointer to an array. */
1721 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1722 if (TREE_CODE (datype) != ARRAY_TYPE)
1723 return;
1725 /* The pointer to element type shall be trivially convertible to
1726 the expression pointer type. */
1727 ddatype = TREE_TYPE (datype);
1728 pddatype = build_pointer_type (ddatype);
1729 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1730 pddatype))
1731 return;
1733 /* The lower bound and element sizes must be constant. */
1734 if (!TYPE_SIZE_UNIT (ddatype)
1735 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1736 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1737 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1738 return;
1740 /* All checks succeeded. Build a new node to merge the cast. */
1741 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1742 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1743 NULL_TREE, NULL_TREE);
1744 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1746 /* We can have stripped a required restrict qualifier above. */
1747 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1748 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1751 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1752 underneath as appropriate. */
1754 static enum gimplify_status
1755 gimplify_conversion (tree *expr_p)
1757 tree tem;
1758 location_t loc = EXPR_LOCATION (*expr_p);
1759 gcc_assert (CONVERT_EXPR_P (*expr_p));
1761 /* Then strip away all but the outermost conversion. */
1762 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1764 /* And remove the outermost conversion if it's useless. */
1765 if (tree_ssa_useless_type_conversion (*expr_p))
1766 *expr_p = TREE_OPERAND (*expr_p, 0);
1768 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1769 For example this fold (subclass *)&A into &A->subclass avoiding
1770 a need for statement. */
1771 if (CONVERT_EXPR_P (*expr_p)
1772 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1773 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1774 && (tem = maybe_fold_offset_to_address
1775 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1776 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1777 *expr_p = tem;
1779 /* If we still have a conversion at the toplevel,
1780 then canonicalize some constructs. */
1781 if (CONVERT_EXPR_P (*expr_p))
1783 tree sub = TREE_OPERAND (*expr_p, 0);
1785 /* If a NOP conversion is changing the type of a COMPONENT_REF
1786 expression, then canonicalize its type now in order to expose more
1787 redundant conversions. */
1788 if (TREE_CODE (sub) == COMPONENT_REF)
1789 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1791 /* If a NOP conversion is changing a pointer to array of foo
1792 to a pointer to foo, embed that change in the ADDR_EXPR. */
1793 else if (TREE_CODE (sub) == ADDR_EXPR)
1794 canonicalize_addr_expr (expr_p);
1797 /* If we have a conversion to a non-register type force the
1798 use of a VIEW_CONVERT_EXPR instead. */
1799 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1800 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1801 TREE_OPERAND (*expr_p, 0));
1803 return GS_OK;
1806 /* Nonlocal VLAs seen in the current function. */
1807 static struct pointer_set_t *nonlocal_vlas;
1809 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1810 DECL_VALUE_EXPR, and it's worth re-examining things. */
1812 static enum gimplify_status
1813 gimplify_var_or_parm_decl (tree *expr_p)
1815 tree decl = *expr_p;
1817 /* ??? If this is a local variable, and it has not been seen in any
1818 outer BIND_EXPR, then it's probably the result of a duplicate
1819 declaration, for which we've already issued an error. It would
1820 be really nice if the front end wouldn't leak these at all.
1821 Currently the only known culprit is C++ destructors, as seen
1822 in g++.old-deja/g++.jason/binding.C. */
1823 if (TREE_CODE (decl) == VAR_DECL
1824 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1825 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1826 && decl_function_context (decl) == current_function_decl)
1828 gcc_assert (errorcount || sorrycount);
1829 return GS_ERROR;
1832 /* When within an OpenMP context, notice uses of variables. */
1833 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1834 return GS_ALL_DONE;
1836 /* If the decl is an alias for another expression, substitute it now. */
1837 if (DECL_HAS_VALUE_EXPR_P (decl))
1839 tree value_expr = DECL_VALUE_EXPR (decl);
1841 /* For referenced nonlocal VLAs add a decl for debugging purposes
1842 to the current function. */
1843 if (TREE_CODE (decl) == VAR_DECL
1844 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1845 && nonlocal_vlas != NULL
1846 && TREE_CODE (value_expr) == INDIRECT_REF
1847 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1848 && decl_function_context (decl) != current_function_decl)
1850 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1851 while (ctx && ctx->region_type == ORT_WORKSHARE)
1852 ctx = ctx->outer_context;
1853 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1855 tree copy = copy_node (decl), block;
1857 lang_hooks.dup_lang_specific_decl (copy);
1858 SET_DECL_RTL (copy, NULL_RTX);
1859 TREE_USED (copy) = 1;
1860 block = DECL_INITIAL (current_function_decl);
1861 TREE_CHAIN (copy) = BLOCK_VARS (block);
1862 BLOCK_VARS (block) = copy;
1863 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1864 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1868 *expr_p = unshare_expr (value_expr);
1869 return GS_OK;
1872 return GS_ALL_DONE;
1876 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1877 node *EXPR_P.
1879 compound_lval
1880 : min_lval '[' val ']'
1881 | min_lval '.' ID
1882 | compound_lval '[' val ']'
1883 | compound_lval '.' ID
1885 This is not part of the original SIMPLE definition, which separates
1886 array and member references, but it seems reasonable to handle them
1887 together. Also, this way we don't run into problems with union
1888 aliasing; gcc requires that for accesses through a union to alias, the
1889 union reference must be explicit, which was not always the case when we
1890 were splitting up array and member refs.
1892 PRE_P points to the sequence where side effects that must happen before
1893 *EXPR_P should be stored.
1895 POST_P points to the sequence where side effects that must happen after
1896 *EXPR_P should be stored. */
1898 static enum gimplify_status
1899 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1900 fallback_t fallback)
1902 tree *p;
1903 VEC(tree,heap) *stack;
1904 enum gimplify_status ret = GS_OK, tret;
1905 int i;
1906 location_t loc = EXPR_LOCATION (*expr_p);
1908 /* Create a stack of the subexpressions so later we can walk them in
1909 order from inner to outer. */
1910 stack = VEC_alloc (tree, heap, 10);
1912 /* We can handle anything that get_inner_reference can deal with. */
1913 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1915 restart:
1916 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1917 if (TREE_CODE (*p) == INDIRECT_REF)
1918 *p = fold_indirect_ref_loc (loc, *p);
1920 if (handled_component_p (*p))
1922 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1923 additional COMPONENT_REFs. */
1924 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1925 && gimplify_var_or_parm_decl (p) == GS_OK)
1926 goto restart;
1927 else
1928 break;
1930 VEC_safe_push (tree, heap, stack, *p);
1933 gcc_assert (VEC_length (tree, stack));
1935 /* Now STACK is a stack of pointers to all the refs we've walked through
1936 and P points to the innermost expression.
1938 Java requires that we elaborated nodes in source order. That
1939 means we must gimplify the inner expression followed by each of
1940 the indices, in order. But we can't gimplify the inner
1941 expression until we deal with any variable bounds, sizes, or
1942 positions in order to deal with PLACEHOLDER_EXPRs.
1944 So we do this in three steps. First we deal with the annotations
1945 for any variables in the components, then we gimplify the base,
1946 then we gimplify any indices, from left to right. */
1947 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1949 tree t = VEC_index (tree, stack, i);
1951 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1953 /* Gimplify the low bound and element type size and put them into
1954 the ARRAY_REF. If these values are set, they have already been
1955 gimplified. */
1956 if (TREE_OPERAND (t, 2) == NULL_TREE)
1958 tree low = unshare_expr (array_ref_low_bound (t));
1959 if (!is_gimple_min_invariant (low))
1961 TREE_OPERAND (t, 2) = low;
1962 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1963 post_p, is_gimple_reg,
1964 fb_rvalue);
1965 ret = MIN (ret, tret);
1969 if (!TREE_OPERAND (t, 3))
1971 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1972 tree elmt_size = unshare_expr (array_ref_element_size (t));
1973 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1975 /* Divide the element size by the alignment of the element
1976 type (above). */
1977 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1979 if (!is_gimple_min_invariant (elmt_size))
1981 TREE_OPERAND (t, 3) = elmt_size;
1982 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1983 post_p, is_gimple_reg,
1984 fb_rvalue);
1985 ret = MIN (ret, tret);
1989 else if (TREE_CODE (t) == COMPONENT_REF)
1991 /* Set the field offset into T and gimplify it. */
1992 if (!TREE_OPERAND (t, 2))
1994 tree offset = unshare_expr (component_ref_field_offset (t));
1995 tree field = TREE_OPERAND (t, 1);
1996 tree factor
1997 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1999 /* Divide the offset by its alignment. */
2000 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2002 if (!is_gimple_min_invariant (offset))
2004 TREE_OPERAND (t, 2) = offset;
2005 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2006 post_p, is_gimple_reg,
2007 fb_rvalue);
2008 ret = MIN (ret, tret);
2014 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2015 so as to match the min_lval predicate. Failure to do so may result
2016 in the creation of large aggregate temporaries. */
2017 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2018 fallback | fb_lvalue);
2019 ret = MIN (ret, tret);
2021 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2022 loop we also remove any useless conversions. */
2023 for (; VEC_length (tree, stack) > 0; )
2025 tree t = VEC_pop (tree, stack);
2027 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2029 /* Gimplify the dimension. */
2030 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2032 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2033 is_gimple_val, fb_rvalue);
2034 ret = MIN (ret, tret);
2037 else if (TREE_CODE (t) == BIT_FIELD_REF)
2039 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2040 is_gimple_val, fb_rvalue);
2041 ret = MIN (ret, tret);
2042 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2043 is_gimple_val, fb_rvalue);
2044 ret = MIN (ret, tret);
2047 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2049 /* The innermost expression P may have originally had
2050 TREE_SIDE_EFFECTS set which would have caused all the outer
2051 expressions in *EXPR_P leading to P to also have had
2052 TREE_SIDE_EFFECTS set. */
2053 recalculate_side_effects (t);
2056 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2057 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2059 canonicalize_component_ref (expr_p);
2060 ret = MIN (ret, GS_OK);
2063 VEC_free (tree, heap, stack);
2065 return ret;
2068 /* Gimplify the self modifying expression pointed to by EXPR_P
2069 (++, --, +=, -=).
2071 PRE_P points to the list where side effects that must happen before
2072 *EXPR_P should be stored.
2074 POST_P points to the list where side effects that must happen after
2075 *EXPR_P should be stored.
2077 WANT_VALUE is nonzero iff we want to use the value of this expression
2078 in another expression. */
2080 static enum gimplify_status
2081 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2082 bool want_value)
2084 enum tree_code code;
2085 tree lhs, lvalue, rhs, t1;
2086 gimple_seq post = NULL, *orig_post_p = post_p;
2087 bool postfix;
2088 enum tree_code arith_code;
2089 enum gimplify_status ret;
2090 location_t loc = EXPR_LOCATION (*expr_p);
2092 code = TREE_CODE (*expr_p);
2094 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2095 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2097 /* Prefix or postfix? */
2098 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2099 /* Faster to treat as prefix if result is not used. */
2100 postfix = want_value;
2101 else
2102 postfix = false;
2104 /* For postfix, make sure the inner expression's post side effects
2105 are executed after side effects from this expression. */
2106 if (postfix)
2107 post_p = &post;
2109 /* Add or subtract? */
2110 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2111 arith_code = PLUS_EXPR;
2112 else
2113 arith_code = MINUS_EXPR;
2115 /* Gimplify the LHS into a GIMPLE lvalue. */
2116 lvalue = TREE_OPERAND (*expr_p, 0);
2117 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2118 if (ret == GS_ERROR)
2119 return ret;
2121 /* Extract the operands to the arithmetic operation. */
2122 lhs = lvalue;
2123 rhs = TREE_OPERAND (*expr_p, 1);
2125 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2126 that as the result value and in the postqueue operation. We also
2127 make sure to make lvalue a minimal lval, see
2128 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2129 if (postfix)
2131 if (!is_gimple_min_lval (lvalue))
2133 mark_addressable (lvalue);
2134 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2135 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2136 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2138 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2139 if (ret == GS_ERROR)
2140 return ret;
2143 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2144 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2146 rhs = fold_convert_loc (loc, sizetype, rhs);
2147 if (arith_code == MINUS_EXPR)
2148 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2149 arith_code = POINTER_PLUS_EXPR;
2152 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2154 if (postfix)
2156 gimplify_assign (lvalue, t1, orig_post_p);
2157 gimplify_seq_add_seq (orig_post_p, post);
2158 *expr_p = lhs;
2159 return GS_ALL_DONE;
2161 else
2163 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2164 return GS_OK;
2169 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2171 static void
2172 maybe_with_size_expr (tree *expr_p)
2174 tree expr = *expr_p;
2175 tree type = TREE_TYPE (expr);
2176 tree size;
2178 /* If we've already wrapped this or the type is error_mark_node, we can't do
2179 anything. */
2180 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2181 || type == error_mark_node)
2182 return;
2184 /* If the size isn't known or is a constant, we have nothing to do. */
2185 size = TYPE_SIZE_UNIT (type);
2186 if (!size || TREE_CODE (size) == INTEGER_CST)
2187 return;
2189 /* Otherwise, make a WITH_SIZE_EXPR. */
2190 size = unshare_expr (size);
2191 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2192 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2196 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2197 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2198 the CALL_EXPR. */
2200 static enum gimplify_status
2201 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2203 bool (*test) (tree);
2204 fallback_t fb;
2206 /* In general, we allow lvalues for function arguments to avoid
2207 extra overhead of copying large aggregates out of even larger
2208 aggregates into temporaries only to copy the temporaries to
2209 the argument list. Make optimizers happy by pulling out to
2210 temporaries those types that fit in registers. */
2211 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2212 test = is_gimple_val, fb = fb_rvalue;
2213 else
2214 test = is_gimple_lvalue, fb = fb_either;
2216 /* If this is a variable sized type, we must remember the size. */
2217 maybe_with_size_expr (arg_p);
2219 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2220 /* Make sure arguments have the same location as the function call
2221 itself. */
2222 protected_set_expr_location (*arg_p, call_location);
2224 /* There is a sequence point before a function call. Side effects in
2225 the argument list must occur before the actual call. So, when
2226 gimplifying arguments, force gimplify_expr to use an internal
2227 post queue which is then appended to the end of PRE_P. */
2228 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2232 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2233 WANT_VALUE is true if the result of the call is desired. */
2235 static enum gimplify_status
2236 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2238 tree fndecl, parms, p;
2239 enum gimplify_status ret;
2240 int i, nargs;
2241 gimple call;
2242 bool builtin_va_start_p = FALSE;
2243 location_t loc = EXPR_LOCATION (*expr_p);
2245 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2247 /* For reliable diagnostics during inlining, it is necessary that
2248 every call_expr be annotated with file and line. */
2249 if (! EXPR_HAS_LOCATION (*expr_p))
2250 SET_EXPR_LOCATION (*expr_p, input_location);
2252 /* This may be a call to a builtin function.
2254 Builtin function calls may be transformed into different
2255 (and more efficient) builtin function calls under certain
2256 circumstances. Unfortunately, gimplification can muck things
2257 up enough that the builtin expanders are not aware that certain
2258 transformations are still valid.
2260 So we attempt transformation/gimplification of the call before
2261 we gimplify the CALL_EXPR. At this time we do not manage to
2262 transform all calls in the same manner as the expanders do, but
2263 we do transform most of them. */
2264 fndecl = get_callee_fndecl (*expr_p);
2265 if (fndecl && DECL_BUILT_IN (fndecl))
2267 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2269 if (new_tree && new_tree != *expr_p)
2271 /* There was a transformation of this call which computes the
2272 same value, but in a more efficient way. Return and try
2273 again. */
2274 *expr_p = new_tree;
2275 return GS_OK;
2278 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2279 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2281 builtin_va_start_p = TRUE;
2282 if (call_expr_nargs (*expr_p) < 2)
2284 error ("too few arguments to function %<va_start%>");
2285 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2286 return GS_OK;
2289 if (fold_builtin_next_arg (*expr_p, true))
2291 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2292 return GS_OK;
2297 /* There is a sequence point before the call, so any side effects in
2298 the calling expression must occur before the actual call. Force
2299 gimplify_expr to use an internal post queue. */
2300 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2301 is_gimple_call_addr, fb_rvalue);
2303 nargs = call_expr_nargs (*expr_p);
2305 /* Get argument types for verification. */
2306 fndecl = get_callee_fndecl (*expr_p);
2307 parms = NULL_TREE;
2308 if (fndecl)
2309 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2310 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2311 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2313 if (fndecl && DECL_ARGUMENTS (fndecl))
2314 p = DECL_ARGUMENTS (fndecl);
2315 else if (parms)
2316 p = parms;
2317 else
2318 p = NULL_TREE;
2319 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2322 /* If the last argument is __builtin_va_arg_pack () and it is not
2323 passed as a named argument, decrease the number of CALL_EXPR
2324 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2325 if (!p
2326 && i < nargs
2327 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2329 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2330 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2332 if (last_arg_fndecl
2333 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2334 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2335 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2337 tree call = *expr_p;
2339 --nargs;
2340 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2341 CALL_EXPR_FN (call),
2342 nargs, CALL_EXPR_ARGP (call));
2344 /* Copy all CALL_EXPR flags, location and block, except
2345 CALL_EXPR_VA_ARG_PACK flag. */
2346 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2347 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2348 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2349 = CALL_EXPR_RETURN_SLOT_OPT (call);
2350 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2351 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2352 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2353 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2355 /* Set CALL_EXPR_VA_ARG_PACK. */
2356 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2360 /* Finally, gimplify the function arguments. */
2361 if (nargs > 0)
2363 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2364 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2365 PUSH_ARGS_REVERSED ? i-- : i++)
2367 enum gimplify_status t;
2369 /* Avoid gimplifying the second argument to va_start, which needs to
2370 be the plain PARM_DECL. */
2371 if ((i != 1) || !builtin_va_start_p)
2373 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2374 EXPR_LOCATION (*expr_p));
2376 if (t == GS_ERROR)
2377 ret = GS_ERROR;
2382 /* Verify the function result. */
2383 if (want_value && fndecl
2384 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2386 error_at (loc, "using result of function returning %<void%>");
2387 ret = GS_ERROR;
2390 /* Try this again in case gimplification exposed something. */
2391 if (ret != GS_ERROR)
2393 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2395 if (new_tree && new_tree != *expr_p)
2397 /* There was a transformation of this call which computes the
2398 same value, but in a more efficient way. Return and try
2399 again. */
2400 *expr_p = new_tree;
2401 return GS_OK;
2404 else
2406 *expr_p = error_mark_node;
2407 return GS_ERROR;
2410 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2411 decl. This allows us to eliminate redundant or useless
2412 calls to "const" functions. */
2413 if (TREE_CODE (*expr_p) == CALL_EXPR)
2415 int flags = call_expr_flags (*expr_p);
2416 if (flags & (ECF_CONST | ECF_PURE)
2417 /* An infinite loop is considered a side effect. */
2418 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2419 TREE_SIDE_EFFECTS (*expr_p) = 0;
2422 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2423 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2424 form and delegate the creation of a GIMPLE_CALL to
2425 gimplify_modify_expr. This is always possible because when
2426 WANT_VALUE is true, the caller wants the result of this call into
2427 a temporary, which means that we will emit an INIT_EXPR in
2428 internal_get_tmp_var which will then be handled by
2429 gimplify_modify_expr. */
2430 if (!want_value)
2432 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2433 have to do is replicate it as a GIMPLE_CALL tuple. */
2434 call = gimple_build_call_from_tree (*expr_p);
2435 gimplify_seq_add_stmt (pre_p, call);
2436 *expr_p = NULL_TREE;
2439 return ret;
2442 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2443 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2445 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2446 condition is true or false, respectively. If null, we should generate
2447 our own to skip over the evaluation of this specific expression.
2449 LOCUS is the source location of the COND_EXPR.
2451 This function is the tree equivalent of do_jump.
2453 shortcut_cond_r should only be called by shortcut_cond_expr. */
2455 static tree
2456 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2457 location_t locus)
2459 tree local_label = NULL_TREE;
2460 tree t, expr = NULL;
2462 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2463 retain the shortcut semantics. Just insert the gotos here;
2464 shortcut_cond_expr will append the real blocks later. */
2465 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2467 location_t new_locus;
2469 /* Turn if (a && b) into
2471 if (a); else goto no;
2472 if (b) goto yes; else goto no;
2473 (no:) */
2475 if (false_label_p == NULL)
2476 false_label_p = &local_label;
2478 /* Keep the original source location on the first 'if'. */
2479 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2480 append_to_statement_list (t, &expr);
2482 /* Set the source location of the && on the second 'if'. */
2483 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2484 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2485 new_locus);
2486 append_to_statement_list (t, &expr);
2488 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2490 location_t new_locus;
2492 /* Turn if (a || b) into
2494 if (a) goto yes;
2495 if (b) goto yes; else goto no;
2496 (yes:) */
2498 if (true_label_p == NULL)
2499 true_label_p = &local_label;
2501 /* Keep the original source location on the first 'if'. */
2502 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2503 append_to_statement_list (t, &expr);
2505 /* Set the source location of the || on the second 'if'. */
2506 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2507 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2508 new_locus);
2509 append_to_statement_list (t, &expr);
2511 else if (TREE_CODE (pred) == COND_EXPR)
2513 location_t new_locus;
2515 /* As long as we're messing with gotos, turn if (a ? b : c) into
2516 if (a)
2517 if (b) goto yes; else goto no;
2518 else
2519 if (c) goto yes; else goto no; */
2521 /* Keep the original source location on the first 'if'. Set the source
2522 location of the ? on the second 'if'. */
2523 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2524 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2525 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2526 false_label_p, locus),
2527 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2528 false_label_p, new_locus));
2530 else
2532 expr = build3 (COND_EXPR, void_type_node, pred,
2533 build_and_jump (true_label_p),
2534 build_and_jump (false_label_p));
2535 SET_EXPR_LOCATION (expr, locus);
2538 if (local_label)
2540 t = build1 (LABEL_EXPR, void_type_node, local_label);
2541 append_to_statement_list (t, &expr);
2544 return expr;
2547 /* Given a conditional expression EXPR with short-circuit boolean
2548 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2549 predicate appart into the equivalent sequence of conditionals. */
2551 static tree
2552 shortcut_cond_expr (tree expr)
2554 tree pred = TREE_OPERAND (expr, 0);
2555 tree then_ = TREE_OPERAND (expr, 1);
2556 tree else_ = TREE_OPERAND (expr, 2);
2557 tree true_label, false_label, end_label, t;
2558 tree *true_label_p;
2559 tree *false_label_p;
2560 bool emit_end, emit_false, jump_over_else;
2561 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2562 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2564 /* First do simple transformations. */
2565 if (!else_se)
2567 /* If there is no 'else', turn
2568 if (a && b) then c
2569 into
2570 if (a) if (b) then c. */
2571 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2573 /* Keep the original source location on the first 'if'. */
2574 location_t locus = EXPR_HAS_LOCATION (expr)
2575 ? EXPR_LOCATION (expr) : input_location;
2576 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2577 /* Set the source location of the && on the second 'if'. */
2578 if (EXPR_HAS_LOCATION (pred))
2579 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2580 then_ = shortcut_cond_expr (expr);
2581 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2582 pred = TREE_OPERAND (pred, 0);
2583 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2584 SET_EXPR_LOCATION (expr, locus);
2588 if (!then_se)
2590 /* If there is no 'then', turn
2591 if (a || b); else d
2592 into
2593 if (a); else if (b); else d. */
2594 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2596 /* Keep the original source location on the first 'if'. */
2597 location_t locus = EXPR_HAS_LOCATION (expr)
2598 ? EXPR_LOCATION (expr) : input_location;
2599 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2600 /* Set the source location of the || on the second 'if'. */
2601 if (EXPR_HAS_LOCATION (pred))
2602 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2603 else_ = shortcut_cond_expr (expr);
2604 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2605 pred = TREE_OPERAND (pred, 0);
2606 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2607 SET_EXPR_LOCATION (expr, locus);
2611 /* If we're done, great. */
2612 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2613 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2614 return expr;
2616 /* Otherwise we need to mess with gotos. Change
2617 if (a) c; else d;
2619 if (a); else goto no;
2620 c; goto end;
2621 no: d; end:
2622 and recursively gimplify the condition. */
2624 true_label = false_label = end_label = NULL_TREE;
2626 /* If our arms just jump somewhere, hijack those labels so we don't
2627 generate jumps to jumps. */
2629 if (then_
2630 && TREE_CODE (then_) == GOTO_EXPR
2631 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2633 true_label = GOTO_DESTINATION (then_);
2634 then_ = NULL;
2635 then_se = false;
2638 if (else_
2639 && TREE_CODE (else_) == GOTO_EXPR
2640 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2642 false_label = GOTO_DESTINATION (else_);
2643 else_ = NULL;
2644 else_se = false;
2647 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2648 if (true_label)
2649 true_label_p = &true_label;
2650 else
2651 true_label_p = NULL;
2653 /* The 'else' branch also needs a label if it contains interesting code. */
2654 if (false_label || else_se)
2655 false_label_p = &false_label;
2656 else
2657 false_label_p = NULL;
2659 /* If there was nothing else in our arms, just forward the label(s). */
2660 if (!then_se && !else_se)
2661 return shortcut_cond_r (pred, true_label_p, false_label_p,
2662 EXPR_HAS_LOCATION (expr)
2663 ? EXPR_LOCATION (expr) : input_location);
2665 /* If our last subexpression already has a terminal label, reuse it. */
2666 if (else_se)
2667 t = expr_last (else_);
2668 else if (then_se)
2669 t = expr_last (then_);
2670 else
2671 t = NULL;
2672 if (t && TREE_CODE (t) == LABEL_EXPR)
2673 end_label = LABEL_EXPR_LABEL (t);
2675 /* If we don't care about jumping to the 'else' branch, jump to the end
2676 if the condition is false. */
2677 if (!false_label_p)
2678 false_label_p = &end_label;
2680 /* We only want to emit these labels if we aren't hijacking them. */
2681 emit_end = (end_label == NULL_TREE);
2682 emit_false = (false_label == NULL_TREE);
2684 /* We only emit the jump over the else clause if we have to--if the
2685 then clause may fall through. Otherwise we can wind up with a
2686 useless jump and a useless label at the end of gimplified code,
2687 which will cause us to think that this conditional as a whole
2688 falls through even if it doesn't. If we then inline a function
2689 which ends with such a condition, that can cause us to issue an
2690 inappropriate warning about control reaching the end of a
2691 non-void function. */
2692 jump_over_else = block_may_fallthru (then_);
2694 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2695 EXPR_HAS_LOCATION (expr)
2696 ? EXPR_LOCATION (expr) : input_location);
2698 expr = NULL;
2699 append_to_statement_list (pred, &expr);
2701 append_to_statement_list (then_, &expr);
2702 if (else_se)
2704 if (jump_over_else)
2706 tree last = expr_last (expr);
2707 t = build_and_jump (&end_label);
2708 if (EXPR_HAS_LOCATION (last))
2709 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2710 append_to_statement_list (t, &expr);
2712 if (emit_false)
2714 t = build1 (LABEL_EXPR, void_type_node, false_label);
2715 append_to_statement_list (t, &expr);
2717 append_to_statement_list (else_, &expr);
2719 if (emit_end && end_label)
2721 t = build1 (LABEL_EXPR, void_type_node, end_label);
2722 append_to_statement_list (t, &expr);
2725 return expr;
2728 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2730 tree
2731 gimple_boolify (tree expr)
2733 tree type = TREE_TYPE (expr);
2734 location_t loc = EXPR_LOCATION (expr);
2736 if (TREE_CODE (expr) == NE_EXPR
2737 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2738 && integer_zerop (TREE_OPERAND (expr, 1)))
2740 tree call = TREE_OPERAND (expr, 0);
2741 tree fn = get_callee_fndecl (call);
2743 /* For __builtin_expect ((long) (x), y) recurse into x as well
2744 if x is truth_value_p. */
2745 if (fn
2746 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2747 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2748 && call_expr_nargs (call) == 2)
2750 tree arg = CALL_EXPR_ARG (call, 0);
2751 if (arg)
2753 if (TREE_CODE (arg) == NOP_EXPR
2754 && TREE_TYPE (arg) == TREE_TYPE (call))
2755 arg = TREE_OPERAND (arg, 0);
2756 if (truth_value_p (TREE_CODE (arg)))
2758 arg = gimple_boolify (arg);
2759 CALL_EXPR_ARG (call, 0)
2760 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2766 if (TREE_CODE (type) == BOOLEAN_TYPE)
2767 return expr;
2769 switch (TREE_CODE (expr))
2771 case TRUTH_AND_EXPR:
2772 case TRUTH_OR_EXPR:
2773 case TRUTH_XOR_EXPR:
2774 case TRUTH_ANDIF_EXPR:
2775 case TRUTH_ORIF_EXPR:
2776 /* Also boolify the arguments of truth exprs. */
2777 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2778 /* FALLTHRU */
2780 case TRUTH_NOT_EXPR:
2781 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2782 /* FALLTHRU */
2784 case EQ_EXPR: case NE_EXPR:
2785 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2786 /* These expressions always produce boolean results. */
2787 TREE_TYPE (expr) = boolean_type_node;
2788 return expr;
2790 default:
2791 /* Other expressions that get here must have boolean values, but
2792 might need to be converted to the appropriate mode. */
2793 return fold_convert_loc (loc, boolean_type_node, expr);
2797 /* Given a conditional expression *EXPR_P without side effects, gimplify
2798 its operands. New statements are inserted to PRE_P. */
2800 static enum gimplify_status
2801 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2803 tree expr = *expr_p, cond;
2804 enum gimplify_status ret, tret;
2805 enum tree_code code;
2807 cond = gimple_boolify (COND_EXPR_COND (expr));
2809 /* We need to handle && and || specially, as their gimplification
2810 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2811 code = TREE_CODE (cond);
2812 if (code == TRUTH_ANDIF_EXPR)
2813 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2814 else if (code == TRUTH_ORIF_EXPR)
2815 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2816 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2817 COND_EXPR_COND (*expr_p) = cond;
2819 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2820 is_gimple_val, fb_rvalue);
2821 ret = MIN (ret, tret);
2822 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2823 is_gimple_val, fb_rvalue);
2825 return MIN (ret, tret);
2828 /* Returns true if evaluating EXPR could trap.
2829 EXPR is GENERIC, while tree_could_trap_p can be called
2830 only on GIMPLE. */
2832 static bool
2833 generic_expr_could_trap_p (tree expr)
2835 unsigned i, n;
2837 if (!expr || is_gimple_val (expr))
2838 return false;
2840 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2841 return true;
2843 n = TREE_OPERAND_LENGTH (expr);
2844 for (i = 0; i < n; i++)
2845 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2846 return true;
2848 return false;
2851 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2852 into
2854 if (p) if (p)
2855 t1 = a; a;
2856 else or else
2857 t1 = b; b;
2860 The second form is used when *EXPR_P is of type void.
2862 PRE_P points to the list where side effects that must happen before
2863 *EXPR_P should be stored. */
2865 static enum gimplify_status
2866 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2868 tree expr = *expr_p;
2869 tree type = TREE_TYPE (expr);
2870 location_t loc = EXPR_LOCATION (expr);
2871 tree tmp, arm1, arm2;
2872 enum gimplify_status ret;
2873 tree label_true, label_false, label_cont;
2874 bool have_then_clause_p, have_else_clause_p;
2875 gimple gimple_cond;
2876 enum tree_code pred_code;
2877 gimple_seq seq = NULL;
2879 /* If this COND_EXPR has a value, copy the values into a temporary within
2880 the arms. */
2881 if (!VOID_TYPE_P (type))
2883 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2884 tree result;
2886 /* If either an rvalue is ok or we do not require an lvalue, create the
2887 temporary. But we cannot do that if the type is addressable. */
2888 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2889 && !TREE_ADDRESSABLE (type))
2891 if (gimplify_ctxp->allow_rhs_cond_expr
2892 /* If either branch has side effects or could trap, it can't be
2893 evaluated unconditionally. */
2894 && !TREE_SIDE_EFFECTS (then_)
2895 && !generic_expr_could_trap_p (then_)
2896 && !TREE_SIDE_EFFECTS (else_)
2897 && !generic_expr_could_trap_p (else_))
2898 return gimplify_pure_cond_expr (expr_p, pre_p);
2900 tmp = create_tmp_var (type, "iftmp");
2901 result = tmp;
2904 /* Otherwise, only create and copy references to the values. */
2905 else
2907 type = build_pointer_type (type);
2909 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2910 then_ = build_fold_addr_expr_loc (loc, then_);
2912 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2913 else_ = build_fold_addr_expr_loc (loc, else_);
2915 expr
2916 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2918 tmp = create_tmp_var (type, "iftmp");
2919 result = build_fold_indirect_ref_loc (loc, tmp);
2922 /* Build the new then clause, `tmp = then_;'. But don't build the
2923 assignment if the value is void; in C++ it can be if it's a throw. */
2924 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2925 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2927 /* Similarly, build the new else clause, `tmp = else_;'. */
2928 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2929 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2931 TREE_TYPE (expr) = void_type_node;
2932 recalculate_side_effects (expr);
2934 /* Move the COND_EXPR to the prequeue. */
2935 gimplify_stmt (&expr, pre_p);
2937 *expr_p = result;
2938 return GS_ALL_DONE;
2941 /* Make sure the condition has BOOLEAN_TYPE. */
2942 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2944 /* Break apart && and || conditions. */
2945 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2946 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2948 expr = shortcut_cond_expr (expr);
2950 if (expr != *expr_p)
2952 *expr_p = expr;
2954 /* We can't rely on gimplify_expr to re-gimplify the expanded
2955 form properly, as cleanups might cause the target labels to be
2956 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2957 set up a conditional context. */
2958 gimple_push_condition ();
2959 gimplify_stmt (expr_p, &seq);
2960 gimple_pop_condition (pre_p);
2961 gimple_seq_add_seq (pre_p, seq);
2963 return GS_ALL_DONE;
2967 /* Now do the normal gimplification. */
2969 /* Gimplify condition. */
2970 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2971 fb_rvalue);
2972 if (ret == GS_ERROR)
2973 return GS_ERROR;
2974 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2976 gimple_push_condition ();
2978 have_then_clause_p = have_else_clause_p = false;
2979 if (TREE_OPERAND (expr, 1) != NULL
2980 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2981 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2982 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2983 == current_function_decl)
2984 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2985 have different locations, otherwise we end up with incorrect
2986 location information on the branches. */
2987 && (optimize
2988 || !EXPR_HAS_LOCATION (expr)
2989 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2990 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2992 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2993 have_then_clause_p = true;
2995 else
2996 label_true = create_artificial_label (UNKNOWN_LOCATION);
2997 if (TREE_OPERAND (expr, 2) != NULL
2998 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2999 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3000 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3001 == current_function_decl)
3002 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3003 have different locations, otherwise we end up with incorrect
3004 location information on the branches. */
3005 && (optimize
3006 || !EXPR_HAS_LOCATION (expr)
3007 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3008 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3010 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3011 have_else_clause_p = true;
3013 else
3014 label_false = create_artificial_label (UNKNOWN_LOCATION);
3016 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3017 &arm2);
3019 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3020 label_false);
3022 gimplify_seq_add_stmt (&seq, gimple_cond);
3023 label_cont = NULL_TREE;
3024 if (!have_then_clause_p)
3026 /* For if (...) {} else { code; } put label_true after
3027 the else block. */
3028 if (TREE_OPERAND (expr, 1) == NULL_TREE
3029 && !have_else_clause_p
3030 && TREE_OPERAND (expr, 2) != NULL_TREE)
3031 label_cont = label_true;
3032 else
3034 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3035 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3036 /* For if (...) { code; } else {} or
3037 if (...) { code; } else goto label; or
3038 if (...) { code; return; } else { ... }
3039 label_cont isn't needed. */
3040 if (!have_else_clause_p
3041 && TREE_OPERAND (expr, 2) != NULL_TREE
3042 && gimple_seq_may_fallthru (seq))
3044 gimple g;
3045 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3047 g = gimple_build_goto (label_cont);
3049 /* GIMPLE_COND's are very low level; they have embedded
3050 gotos. This particular embedded goto should not be marked
3051 with the location of the original COND_EXPR, as it would
3052 correspond to the COND_EXPR's condition, not the ELSE or the
3053 THEN arms. To avoid marking it with the wrong location, flag
3054 it as "no location". */
3055 gimple_set_do_not_emit_location (g);
3057 gimplify_seq_add_stmt (&seq, g);
3061 if (!have_else_clause_p)
3063 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3064 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3066 if (label_cont)
3067 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3069 gimple_pop_condition (pre_p);
3070 gimple_seq_add_seq (pre_p, seq);
3072 if (ret == GS_ERROR)
3073 ; /* Do nothing. */
3074 else if (have_then_clause_p || have_else_clause_p)
3075 ret = GS_ALL_DONE;
3076 else
3078 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3079 expr = TREE_OPERAND (expr, 0);
3080 gimplify_stmt (&expr, pre_p);
3083 *expr_p = NULL;
3084 return ret;
3087 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3088 to be marked addressable.
3090 We cannot rely on such an expression being directly markable if a temporary
3091 has been created by the gimplification. In this case, we create another
3092 temporary and initialize it with a copy, which will become a store after we
3093 mark it addressable. This can happen if the front-end passed us something
3094 that it could not mark addressable yet, like a Fortran pass-by-reference
3095 parameter (int) floatvar. */
3097 static void
3098 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3100 while (handled_component_p (*expr_p))
3101 expr_p = &TREE_OPERAND (*expr_p, 0);
3102 if (is_gimple_reg (*expr_p))
3103 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3106 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3107 a call to __builtin_memcpy. */
3109 static enum gimplify_status
3110 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3111 gimple_seq *seq_p)
3113 tree t, to, to_ptr, from, from_ptr;
3114 gimple gs;
3115 location_t loc = EXPR_LOCATION (*expr_p);
3117 to = TREE_OPERAND (*expr_p, 0);
3118 from = TREE_OPERAND (*expr_p, 1);
3120 /* Mark the RHS addressable. Beware that it may not be possible to do so
3121 directly if a temporary has been created by the gimplification. */
3122 prepare_gimple_addressable (&from, seq_p);
3124 mark_addressable (from);
3125 from_ptr = build_fold_addr_expr_loc (loc, from);
3126 gimplify_arg (&from_ptr, seq_p, loc);
3128 mark_addressable (to);
3129 to_ptr = build_fold_addr_expr_loc (loc, to);
3130 gimplify_arg (&to_ptr, seq_p, loc);
3132 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3134 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3136 if (want_value)
3138 /* tmp = memcpy() */
3139 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3140 gimple_call_set_lhs (gs, t);
3141 gimplify_seq_add_stmt (seq_p, gs);
3143 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3144 return GS_ALL_DONE;
3147 gimplify_seq_add_stmt (seq_p, gs);
3148 *expr_p = NULL;
3149 return GS_ALL_DONE;
3152 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3153 a call to __builtin_memset. In this case we know that the RHS is
3154 a CONSTRUCTOR with an empty element list. */
3156 static enum gimplify_status
3157 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3158 gimple_seq *seq_p)
3160 tree t, from, to, to_ptr;
3161 gimple gs;
3162 location_t loc = EXPR_LOCATION (*expr_p);
3164 /* Assert our assumptions, to abort instead of producing wrong code
3165 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3166 not be immediately exposed. */
3167 from = TREE_OPERAND (*expr_p, 1);
3168 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3169 from = TREE_OPERAND (from, 0);
3171 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3172 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3174 /* Now proceed. */
3175 to = TREE_OPERAND (*expr_p, 0);
3177 to_ptr = build_fold_addr_expr_loc (loc, to);
3178 gimplify_arg (&to_ptr, seq_p, loc);
3179 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3181 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3183 if (want_value)
3185 /* tmp = memset() */
3186 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3187 gimple_call_set_lhs (gs, t);
3188 gimplify_seq_add_stmt (seq_p, gs);
3190 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3191 return GS_ALL_DONE;
3194 gimplify_seq_add_stmt (seq_p, gs);
3195 *expr_p = NULL;
3196 return GS_ALL_DONE;
3199 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3200 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3201 assignment. Returns non-null if we detect a potential overlap. */
3203 struct gimplify_init_ctor_preeval_data
3205 /* The base decl of the lhs object. May be NULL, in which case we
3206 have to assume the lhs is indirect. */
3207 tree lhs_base_decl;
3209 /* The alias set of the lhs object. */
3210 alias_set_type lhs_alias_set;
3213 static tree
3214 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3216 struct gimplify_init_ctor_preeval_data *data
3217 = (struct gimplify_init_ctor_preeval_data *) xdata;
3218 tree t = *tp;
3220 /* If we find the base object, obviously we have overlap. */
3221 if (data->lhs_base_decl == t)
3222 return t;
3224 /* If the constructor component is indirect, determine if we have a
3225 potential overlap with the lhs. The only bits of information we
3226 have to go on at this point are addressability and alias sets. */
3227 if (TREE_CODE (t) == INDIRECT_REF
3228 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3229 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3230 return t;
3232 /* If the constructor component is a call, determine if it can hide a
3233 potential overlap with the lhs through an INDIRECT_REF like above. */
3234 if (TREE_CODE (t) == CALL_EXPR)
3236 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3238 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3239 if (POINTER_TYPE_P (TREE_VALUE (type))
3240 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3241 && alias_sets_conflict_p (data->lhs_alias_set,
3242 get_alias_set
3243 (TREE_TYPE (TREE_VALUE (type)))))
3244 return t;
3247 if (IS_TYPE_OR_DECL_P (t))
3248 *walk_subtrees = 0;
3249 return NULL;
3252 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3253 force values that overlap with the lhs (as described by *DATA)
3254 into temporaries. */
3256 static void
3257 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3258 struct gimplify_init_ctor_preeval_data *data)
3260 enum gimplify_status one;
3262 /* If the value is constant, then there's nothing to pre-evaluate. */
3263 if (TREE_CONSTANT (*expr_p))
3265 /* Ensure it does not have side effects, it might contain a reference to
3266 the object we're initializing. */
3267 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3268 return;
3271 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3272 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3273 return;
3275 /* Recurse for nested constructors. */
3276 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3278 unsigned HOST_WIDE_INT ix;
3279 constructor_elt *ce;
3280 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3282 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3283 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3285 return;
3288 /* If this is a variable sized type, we must remember the size. */
3289 maybe_with_size_expr (expr_p);
3291 /* Gimplify the constructor element to something appropriate for the rhs
3292 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3293 the gimplifier will consider this a store to memory. Doing this
3294 gimplification now means that we won't have to deal with complicated
3295 language-specific trees, nor trees like SAVE_EXPR that can induce
3296 exponential search behavior. */
3297 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3298 if (one == GS_ERROR)
3300 *expr_p = NULL;
3301 return;
3304 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3305 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3306 always be true for all scalars, since is_gimple_mem_rhs insists on a
3307 temporary variable for them. */
3308 if (DECL_P (*expr_p))
3309 return;
3311 /* If this is of variable size, we have no choice but to assume it doesn't
3312 overlap since we can't make a temporary for it. */
3313 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3314 return;
3316 /* Otherwise, we must search for overlap ... */
3317 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3318 return;
3320 /* ... and if found, force the value into a temporary. */
3321 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3324 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3325 a RANGE_EXPR in a CONSTRUCTOR for an array.
3327 var = lower;
3328 loop_entry:
3329 object[var] = value;
3330 if (var == upper)
3331 goto loop_exit;
3332 var = var + 1;
3333 goto loop_entry;
3334 loop_exit:
3336 We increment var _after_ the loop exit check because we might otherwise
3337 fail if upper == TYPE_MAX_VALUE (type for upper).
3339 Note that we never have to deal with SAVE_EXPRs here, because this has
3340 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3342 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3343 gimple_seq *, bool);
3345 static void
3346 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3347 tree value, tree array_elt_type,
3348 gimple_seq *pre_p, bool cleared)
3350 tree loop_entry_label, loop_exit_label, fall_thru_label;
3351 tree var, var_type, cref, tmp;
3353 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3354 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3355 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3357 /* Create and initialize the index variable. */
3358 var_type = TREE_TYPE (upper);
3359 var = create_tmp_var (var_type, NULL);
3360 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3362 /* Add the loop entry label. */
3363 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3365 /* Build the reference. */
3366 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3367 var, NULL_TREE, NULL_TREE);
3369 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3370 the store. Otherwise just assign value to the reference. */
3372 if (TREE_CODE (value) == CONSTRUCTOR)
3373 /* NB we might have to call ourself recursively through
3374 gimplify_init_ctor_eval if the value is a constructor. */
3375 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3376 pre_p, cleared);
3377 else
3378 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3380 /* We exit the loop when the index var is equal to the upper bound. */
3381 gimplify_seq_add_stmt (pre_p,
3382 gimple_build_cond (EQ_EXPR, var, upper,
3383 loop_exit_label, fall_thru_label));
3385 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3387 /* Otherwise, increment the index var... */
3388 tmp = build2 (PLUS_EXPR, var_type, var,
3389 fold_convert (var_type, integer_one_node));
3390 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3392 /* ...and jump back to the loop entry. */
3393 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3395 /* Add the loop exit label. */
3396 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3399 /* Return true if FDECL is accessing a field that is zero sized. */
3401 static bool
3402 zero_sized_field_decl (const_tree fdecl)
3404 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3405 && integer_zerop (DECL_SIZE (fdecl)))
3406 return true;
3407 return false;
3410 /* Return true if TYPE is zero sized. */
3412 static bool
3413 zero_sized_type (const_tree type)
3415 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3416 && integer_zerop (TYPE_SIZE (type)))
3417 return true;
3418 return false;
3421 /* A subroutine of gimplify_init_constructor. Generate individual
3422 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3423 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3424 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3425 zeroed first. */
3427 static void
3428 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3429 gimple_seq *pre_p, bool cleared)
3431 tree array_elt_type = NULL;
3432 unsigned HOST_WIDE_INT ix;
3433 tree purpose, value;
3435 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3436 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3438 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3440 tree cref;
3442 /* NULL values are created above for gimplification errors. */
3443 if (value == NULL)
3444 continue;
3446 if (cleared && initializer_zerop (value))
3447 continue;
3449 /* ??? Here's to hoping the front end fills in all of the indices,
3450 so we don't have to figure out what's missing ourselves. */
3451 gcc_assert (purpose);
3453 /* Skip zero-sized fields, unless value has side-effects. This can
3454 happen with calls to functions returning a zero-sized type, which
3455 we shouldn't discard. As a number of downstream passes don't
3456 expect sets of zero-sized fields, we rely on the gimplification of
3457 the MODIFY_EXPR we make below to drop the assignment statement. */
3458 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3459 continue;
3461 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3462 whole range. */
3463 if (TREE_CODE (purpose) == RANGE_EXPR)
3465 tree lower = TREE_OPERAND (purpose, 0);
3466 tree upper = TREE_OPERAND (purpose, 1);
3468 /* If the lower bound is equal to upper, just treat it as if
3469 upper was the index. */
3470 if (simple_cst_equal (lower, upper))
3471 purpose = upper;
3472 else
3474 gimplify_init_ctor_eval_range (object, lower, upper, value,
3475 array_elt_type, pre_p, cleared);
3476 continue;
3480 if (array_elt_type)
3482 /* Do not use bitsizetype for ARRAY_REF indices. */
3483 if (TYPE_DOMAIN (TREE_TYPE (object)))
3484 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3485 purpose);
3486 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3487 purpose, NULL_TREE, NULL_TREE);
3489 else
3491 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3492 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3493 unshare_expr (object), purpose, NULL_TREE);
3496 if (TREE_CODE (value) == CONSTRUCTOR
3497 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3498 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3499 pre_p, cleared);
3500 else
3502 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3503 gimplify_and_add (init, pre_p);
3504 ggc_free (init);
3510 /* Returns the appropriate RHS predicate for this LHS. */
3512 gimple_predicate
3513 rhs_predicate_for (tree lhs)
3515 if (is_gimple_reg (lhs))
3516 return is_gimple_reg_rhs_or_call;
3517 else
3518 return is_gimple_mem_rhs_or_call;
3521 /* Gimplify a C99 compound literal expression. This just means adding
3522 the DECL_EXPR before the current statement and using its anonymous
3523 decl instead. */
3525 static enum gimplify_status
3526 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3528 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3529 tree decl = DECL_EXPR_DECL (decl_s);
3530 /* Mark the decl as addressable if the compound literal
3531 expression is addressable now, otherwise it is marked too late
3532 after we gimplify the initialization expression. */
3533 if (TREE_ADDRESSABLE (*expr_p))
3534 TREE_ADDRESSABLE (decl) = 1;
3536 /* Preliminarily mark non-addressed complex variables as eligible
3537 for promotion to gimple registers. We'll transform their uses
3538 as we find them. */
3539 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3540 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3541 && !TREE_THIS_VOLATILE (decl)
3542 && !needs_to_live_in_memory (decl))
3543 DECL_GIMPLE_REG_P (decl) = 1;
3545 /* This decl isn't mentioned in the enclosing block, so add it to the
3546 list of temps. FIXME it seems a bit of a kludge to say that
3547 anonymous artificial vars aren't pushed, but everything else is. */
3548 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3549 gimple_add_tmp_var (decl);
3551 gimplify_and_add (decl_s, pre_p);
3552 *expr_p = decl;
3553 return GS_OK;
3556 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3557 return a new CONSTRUCTOR if something changed. */
3559 static tree
3560 optimize_compound_literals_in_ctor (tree orig_ctor)
3562 tree ctor = orig_ctor;
3563 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3564 unsigned int idx, num = VEC_length (constructor_elt, elts);
3566 for (idx = 0; idx < num; idx++)
3568 tree value = VEC_index (constructor_elt, elts, idx)->value;
3569 tree newval = value;
3570 if (TREE_CODE (value) == CONSTRUCTOR)
3571 newval = optimize_compound_literals_in_ctor (value);
3572 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3574 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3575 tree decl = DECL_EXPR_DECL (decl_s);
3576 tree init = DECL_INITIAL (decl);
3578 if (!TREE_ADDRESSABLE (value)
3579 && !TREE_ADDRESSABLE (decl)
3580 && init)
3581 newval = optimize_compound_literals_in_ctor (init);
3583 if (newval == value)
3584 continue;
3586 if (ctor == orig_ctor)
3588 ctor = copy_node (orig_ctor);
3589 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3590 elts = CONSTRUCTOR_ELTS (ctor);
3592 VEC_index (constructor_elt, elts, idx)->value = newval;
3594 return ctor;
3599 /* A subroutine of gimplify_modify_expr. Break out elements of a
3600 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3602 Note that we still need to clear any elements that don't have explicit
3603 initializers, so if not all elements are initialized we keep the
3604 original MODIFY_EXPR, we just remove all of the constructor elements.
3606 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3607 GS_ERROR if we would have to create a temporary when gimplifying
3608 this constructor. Otherwise, return GS_OK.
3610 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3612 static enum gimplify_status
3613 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3614 bool want_value, bool notify_temp_creation)
3616 tree object, ctor, type;
3617 enum gimplify_status ret;
3618 VEC(constructor_elt,gc) *elts;
3620 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3622 if (!notify_temp_creation)
3624 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3625 is_gimple_lvalue, fb_lvalue);
3626 if (ret == GS_ERROR)
3627 return ret;
3630 object = TREE_OPERAND (*expr_p, 0);
3631 ctor = TREE_OPERAND (*expr_p, 1) =
3632 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3633 type = TREE_TYPE (ctor);
3634 elts = CONSTRUCTOR_ELTS (ctor);
3635 ret = GS_ALL_DONE;
3637 switch (TREE_CODE (type))
3639 case RECORD_TYPE:
3640 case UNION_TYPE:
3641 case QUAL_UNION_TYPE:
3642 case ARRAY_TYPE:
3644 struct gimplify_init_ctor_preeval_data preeval_data;
3645 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3646 HOST_WIDE_INT num_nonzero_elements;
3647 bool cleared, valid_const_initializer;
3649 /* Aggregate types must lower constructors to initialization of
3650 individual elements. The exception is that a CONSTRUCTOR node
3651 with no elements indicates zero-initialization of the whole. */
3652 if (VEC_empty (constructor_elt, elts))
3654 if (notify_temp_creation)
3655 return GS_OK;
3656 break;
3659 /* Fetch information about the constructor to direct later processing.
3660 We might want to make static versions of it in various cases, and
3661 can only do so if it known to be a valid constant initializer. */
3662 valid_const_initializer
3663 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3664 &num_ctor_elements, &cleared);
3666 /* If a const aggregate variable is being initialized, then it
3667 should never be a lose to promote the variable to be static. */
3668 if (valid_const_initializer
3669 && num_nonzero_elements > 1
3670 && TREE_READONLY (object)
3671 && TREE_CODE (object) == VAR_DECL
3672 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3674 if (notify_temp_creation)
3675 return GS_ERROR;
3676 DECL_INITIAL (object) = ctor;
3677 TREE_STATIC (object) = 1;
3678 if (!DECL_NAME (object))
3679 DECL_NAME (object) = create_tmp_var_name ("C");
3680 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3682 /* ??? C++ doesn't automatically append a .<number> to the
3683 assembler name, and even when it does, it looks a FE private
3684 data structures to figure out what that number should be,
3685 which are not set for this variable. I suppose this is
3686 important for local statics for inline functions, which aren't
3687 "local" in the object file sense. So in order to get a unique
3688 TU-local symbol, we must invoke the lhd version now. */
3689 lhd_set_decl_assembler_name (object);
3691 *expr_p = NULL_TREE;
3692 break;
3695 /* If there are "lots" of initialized elements, even discounting
3696 those that are not address constants (and thus *must* be
3697 computed at runtime), then partition the constructor into
3698 constant and non-constant parts. Block copy the constant
3699 parts in, then generate code for the non-constant parts. */
3700 /* TODO. There's code in cp/typeck.c to do this. */
3702 num_type_elements = count_type_elements (type, true);
3704 /* If count_type_elements could not determine number of type elements
3705 for a constant-sized object, assume clearing is needed.
3706 Don't do this for variable-sized objects, as store_constructor
3707 will ignore the clearing of variable-sized objects. */
3708 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3709 cleared = true;
3710 /* If there are "lots" of zeros, then block clear the object first. */
3711 else if (num_type_elements - num_nonzero_elements
3712 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3713 && num_nonzero_elements < num_type_elements/4)
3714 cleared = true;
3715 /* ??? This bit ought not be needed. For any element not present
3716 in the initializer, we should simply set them to zero. Except
3717 we'd need to *find* the elements that are not present, and that
3718 requires trickery to avoid quadratic compile-time behavior in
3719 large cases or excessive memory use in small cases. */
3720 else if (num_ctor_elements < num_type_elements)
3721 cleared = true;
3723 /* If there are "lots" of initialized elements, and all of them
3724 are valid address constants, then the entire initializer can
3725 be dropped to memory, and then memcpy'd out. Don't do this
3726 for sparse arrays, though, as it's more efficient to follow
3727 the standard CONSTRUCTOR behavior of memset followed by
3728 individual element initialization. Also don't do this for small
3729 all-zero initializers (which aren't big enough to merit
3730 clearing), and don't try to make bitwise copies of
3731 TREE_ADDRESSABLE types. */
3732 if (valid_const_initializer
3733 && !(cleared || num_nonzero_elements == 0)
3734 && !TREE_ADDRESSABLE (type))
3736 HOST_WIDE_INT size = int_size_in_bytes (type);
3737 unsigned int align;
3739 /* ??? We can still get unbounded array types, at least
3740 from the C++ front end. This seems wrong, but attempt
3741 to work around it for now. */
3742 if (size < 0)
3744 size = int_size_in_bytes (TREE_TYPE (object));
3745 if (size >= 0)
3746 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3749 /* Find the maximum alignment we can assume for the object. */
3750 /* ??? Make use of DECL_OFFSET_ALIGN. */
3751 if (DECL_P (object))
3752 align = DECL_ALIGN (object);
3753 else
3754 align = TYPE_ALIGN (type);
3756 if (size > 0
3757 && num_nonzero_elements > 1
3758 && !can_move_by_pieces (size, align))
3760 if (notify_temp_creation)
3761 return GS_ERROR;
3763 walk_tree (&ctor, force_labels_r, NULL, NULL);
3764 TREE_OPERAND (*expr_p, 1) = tree_output_constant_def (ctor);
3766 /* This is no longer an assignment of a CONSTRUCTOR, but
3767 we still may have processing to do on the LHS. So
3768 pretend we didn't do anything here to let that happen. */
3769 return GS_UNHANDLED;
3773 /* If the target is volatile and we have non-zero elements
3774 initialize the target from a temporary. */
3775 if (TREE_THIS_VOLATILE (object)
3776 && !TREE_ADDRESSABLE (type)
3777 && num_nonzero_elements > 0)
3779 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3780 TREE_OPERAND (*expr_p, 0) = temp;
3781 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3782 *expr_p,
3783 build2 (MODIFY_EXPR, void_type_node,
3784 object, temp));
3785 return GS_OK;
3788 if (notify_temp_creation)
3789 return GS_OK;
3791 /* If there are nonzero elements and if needed, pre-evaluate to capture
3792 elements overlapping with the lhs into temporaries. We must do this
3793 before clearing to fetch the values before they are zeroed-out. */
3794 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3796 preeval_data.lhs_base_decl = get_base_address (object);
3797 if (!DECL_P (preeval_data.lhs_base_decl))
3798 preeval_data.lhs_base_decl = NULL;
3799 preeval_data.lhs_alias_set = get_alias_set (object);
3801 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3802 pre_p, post_p, &preeval_data);
3805 if (cleared)
3807 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3808 Note that we still have to gimplify, in order to handle the
3809 case of variable sized types. Avoid shared tree structures. */
3810 CONSTRUCTOR_ELTS (ctor) = NULL;
3811 TREE_SIDE_EFFECTS (ctor) = 0;
3812 object = unshare_expr (object);
3813 gimplify_stmt (expr_p, pre_p);
3816 /* If we have not block cleared the object, or if there are nonzero
3817 elements in the constructor, add assignments to the individual
3818 scalar fields of the object. */
3819 if (!cleared || num_nonzero_elements > 0)
3820 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3822 *expr_p = NULL_TREE;
3824 break;
3826 case COMPLEX_TYPE:
3828 tree r, i;
3830 if (notify_temp_creation)
3831 return GS_OK;
3833 /* Extract the real and imaginary parts out of the ctor. */
3834 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3835 r = VEC_index (constructor_elt, elts, 0)->value;
3836 i = VEC_index (constructor_elt, elts, 1)->value;
3837 if (r == NULL || i == NULL)
3839 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3840 if (r == NULL)
3841 r = zero;
3842 if (i == NULL)
3843 i = zero;
3846 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3847 represent creation of a complex value. */
3848 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3850 ctor = build_complex (type, r, i);
3851 TREE_OPERAND (*expr_p, 1) = ctor;
3853 else
3855 ctor = build2 (COMPLEX_EXPR, type, r, i);
3856 TREE_OPERAND (*expr_p, 1) = ctor;
3857 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3858 pre_p,
3859 post_p,
3860 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3861 fb_rvalue);
3864 break;
3866 case VECTOR_TYPE:
3868 unsigned HOST_WIDE_INT ix;
3869 constructor_elt *ce;
3871 if (notify_temp_creation)
3872 return GS_OK;
3874 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3875 if (TREE_CONSTANT (ctor))
3877 bool constant_p = true;
3878 tree value;
3880 /* Even when ctor is constant, it might contain non-*_CST
3881 elements, such as addresses or trapping values like
3882 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3883 in VECTOR_CST nodes. */
3884 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3885 if (!CONSTANT_CLASS_P (value))
3887 constant_p = false;
3888 break;
3891 if (constant_p)
3893 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3894 break;
3897 /* Don't reduce an initializer constant even if we can't
3898 make a VECTOR_CST. It won't do anything for us, and it'll
3899 prevent us from representing it as a single constant. */
3900 if (initializer_constant_valid_p (ctor, type))
3901 break;
3903 TREE_CONSTANT (ctor) = 0;
3906 /* Vector types use CONSTRUCTOR all the way through gimple
3907 compilation as a general initializer. */
3908 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3910 enum gimplify_status tret;
3911 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3912 fb_rvalue);
3913 if (tret == GS_ERROR)
3914 ret = GS_ERROR;
3916 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3917 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3919 break;
3921 default:
3922 /* So how did we get a CONSTRUCTOR for a scalar type? */
3923 gcc_unreachable ();
3926 if (ret == GS_ERROR)
3927 return GS_ERROR;
3928 else if (want_value)
3930 *expr_p = object;
3931 return GS_OK;
3933 else
3935 /* If we have gimplified both sides of the initializer but have
3936 not emitted an assignment, do so now. */
3937 if (*expr_p)
3939 tree lhs = TREE_OPERAND (*expr_p, 0);
3940 tree rhs = TREE_OPERAND (*expr_p, 1);
3941 gimple init = gimple_build_assign (lhs, rhs);
3942 gimplify_seq_add_stmt (pre_p, init);
3943 *expr_p = NULL;
3946 return GS_ALL_DONE;
3950 /* Given a pointer value OP0, return a simplified version of an
3951 indirection through OP0, or NULL_TREE if no simplification is
3952 possible. Note that the resulting type may be different from
3953 the type pointed to in the sense that it is still compatible
3954 from the langhooks point of view. */
3956 tree
3957 gimple_fold_indirect_ref (tree t)
3959 tree type = TREE_TYPE (TREE_TYPE (t));
3960 tree sub = t;
3961 tree subtype;
3963 STRIP_NOPS (sub);
3964 subtype = TREE_TYPE (sub);
3965 if (!POINTER_TYPE_P (subtype))
3966 return NULL_TREE;
3968 if (TREE_CODE (sub) == ADDR_EXPR)
3970 tree op = TREE_OPERAND (sub, 0);
3971 tree optype = TREE_TYPE (op);
3972 /* *&p => p */
3973 if (useless_type_conversion_p (type, optype))
3974 return op;
3976 /* *(foo *)&fooarray => fooarray[0] */
3977 if (TREE_CODE (optype) == ARRAY_TYPE
3978 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3979 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3981 tree type_domain = TYPE_DOMAIN (optype);
3982 tree min_val = size_zero_node;
3983 if (type_domain && TYPE_MIN_VALUE (type_domain))
3984 min_val = TYPE_MIN_VALUE (type_domain);
3985 if (TREE_CODE (min_val) == INTEGER_CST)
3986 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3988 /* *(foo *)&complexfoo => __real__ complexfoo */
3989 else if (TREE_CODE (optype) == COMPLEX_TYPE
3990 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3991 return fold_build1 (REALPART_EXPR, type, op);
3992 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3993 else if (TREE_CODE (optype) == VECTOR_TYPE
3994 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3996 tree part_width = TYPE_SIZE (type);
3997 tree index = bitsize_int (0);
3998 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4002 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
4003 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4004 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4006 tree op00 = TREE_OPERAND (sub, 0);
4007 tree op01 = TREE_OPERAND (sub, 1);
4008 tree op00type;
4010 STRIP_NOPS (op00);
4011 op00type = TREE_TYPE (op00);
4012 if (TREE_CODE (op00) == ADDR_EXPR
4013 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
4014 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4016 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
4017 tree part_width = TYPE_SIZE (type);
4018 unsigned HOST_WIDE_INT part_widthi
4019 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4020 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4021 tree index = bitsize_int (indexi);
4022 if (offset / part_widthi
4023 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
4024 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
4025 part_width, index);
4029 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
4030 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4031 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4033 tree op00 = TREE_OPERAND (sub, 0);
4034 tree op01 = TREE_OPERAND (sub, 1);
4035 tree op00type;
4037 STRIP_NOPS (op00);
4038 op00type = TREE_TYPE (op00);
4039 if (TREE_CODE (op00) == ADDR_EXPR
4040 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
4041 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4043 tree size = TYPE_SIZE_UNIT (type);
4044 if (tree_int_cst_equal (size, op01))
4045 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
4049 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4050 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4051 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4052 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4054 tree type_domain;
4055 tree min_val = size_zero_node;
4056 tree osub = sub;
4057 sub = gimple_fold_indirect_ref (sub);
4058 if (! sub)
4059 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4060 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4061 if (type_domain && TYPE_MIN_VALUE (type_domain))
4062 min_val = TYPE_MIN_VALUE (type_domain);
4063 if (TREE_CODE (min_val) == INTEGER_CST)
4064 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4067 return NULL_TREE;
4070 /* Given a pointer value OP0, return a simplified version of an
4071 indirection through OP0, or NULL_TREE if no simplification is
4072 possible. This may only be applied to a rhs of an expression.
4073 Note that the resulting type may be different from the type pointed
4074 to in the sense that it is still compatible from the langhooks
4075 point of view. */
4077 static tree
4078 gimple_fold_indirect_ref_rhs (tree t)
4080 return gimple_fold_indirect_ref (t);
4083 /* Subroutine of gimplify_modify_expr to do simplifications of
4084 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4085 something changes. */
4087 static enum gimplify_status
4088 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4089 gimple_seq *pre_p, gimple_seq *post_p,
4090 bool want_value)
4092 enum gimplify_status ret = GS_UNHANDLED;
4093 bool changed;
4097 changed = false;
4098 switch (TREE_CODE (*from_p))
4100 case VAR_DECL:
4101 /* If we're assigning from a read-only variable initialized with
4102 a constructor, do the direct assignment from the constructor,
4103 but only if neither source nor target are volatile since this
4104 latter assignment might end up being done on a per-field basis. */
4105 if (DECL_INITIAL (*from_p)
4106 && TREE_READONLY (*from_p)
4107 && !TREE_THIS_VOLATILE (*from_p)
4108 && !TREE_THIS_VOLATILE (*to_p)
4109 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4111 tree old_from = *from_p;
4112 enum gimplify_status subret;
4114 /* Move the constructor into the RHS. */
4115 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4117 /* Let's see if gimplify_init_constructor will need to put
4118 it in memory. */
4119 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4120 false, true);
4121 if (subret == GS_ERROR)
4123 /* If so, revert the change. */
4124 *from_p = old_from;
4126 else
4128 ret = GS_OK;
4129 changed = true;
4132 break;
4133 case INDIRECT_REF:
4135 /* If we have code like
4137 *(const A*)(A*)&x
4139 where the type of "x" is a (possibly cv-qualified variant
4140 of "A"), treat the entire expression as identical to "x".
4141 This kind of code arises in C++ when an object is bound
4142 to a const reference, and if "x" is a TARGET_EXPR we want
4143 to take advantage of the optimization below. */
4144 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4145 if (t)
4147 *from_p = t;
4148 ret = GS_OK;
4149 changed = true;
4151 break;
4154 case TARGET_EXPR:
4156 /* If we are initializing something from a TARGET_EXPR, strip the
4157 TARGET_EXPR and initialize it directly, if possible. This can't
4158 be done if the initializer is void, since that implies that the
4159 temporary is set in some non-trivial way.
4161 ??? What about code that pulls out the temp and uses it
4162 elsewhere? I think that such code never uses the TARGET_EXPR as
4163 an initializer. If I'm wrong, we'll die because the temp won't
4164 have any RTL. In that case, I guess we'll need to replace
4165 references somehow. */
4166 tree init = TARGET_EXPR_INITIAL (*from_p);
4168 if (init
4169 && !VOID_TYPE_P (TREE_TYPE (init)))
4171 *from_p = init;
4172 ret = GS_OK;
4173 changed = true;
4176 break;
4178 case COMPOUND_EXPR:
4179 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4180 caught. */
4181 gimplify_compound_expr (from_p, pre_p, true);
4182 ret = GS_OK;
4183 changed = true;
4184 break;
4186 case CONSTRUCTOR:
4187 /* If we're initializing from a CONSTRUCTOR, break this into
4188 individual MODIFY_EXPRs. */
4189 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4190 false);
4192 case COND_EXPR:
4193 /* If we're assigning to a non-register type, push the assignment
4194 down into the branches. This is mandatory for ADDRESSABLE types,
4195 since we cannot generate temporaries for such, but it saves a
4196 copy in other cases as well. */
4197 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4199 /* This code should mirror the code in gimplify_cond_expr. */
4200 enum tree_code code = TREE_CODE (*expr_p);
4201 tree cond = *from_p;
4202 tree result = *to_p;
4204 ret = gimplify_expr (&result, pre_p, post_p,
4205 is_gimple_lvalue, fb_lvalue);
4206 if (ret != GS_ERROR)
4207 ret = GS_OK;
4209 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4210 TREE_OPERAND (cond, 1)
4211 = build2 (code, void_type_node, result,
4212 TREE_OPERAND (cond, 1));
4213 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4214 TREE_OPERAND (cond, 2)
4215 = build2 (code, void_type_node, unshare_expr (result),
4216 TREE_OPERAND (cond, 2));
4218 TREE_TYPE (cond) = void_type_node;
4219 recalculate_side_effects (cond);
4221 if (want_value)
4223 gimplify_and_add (cond, pre_p);
4224 *expr_p = unshare_expr (result);
4226 else
4227 *expr_p = cond;
4228 return ret;
4230 break;
4232 case CALL_EXPR:
4233 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4234 return slot so that we don't generate a temporary. */
4235 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4236 && aggregate_value_p (*from_p, *from_p))
4238 bool use_target;
4240 if (!(rhs_predicate_for (*to_p))(*from_p))
4241 /* If we need a temporary, *to_p isn't accurate. */
4242 use_target = false;
4243 else if (TREE_CODE (*to_p) == RESULT_DECL
4244 && DECL_NAME (*to_p) == NULL_TREE
4245 && needs_to_live_in_memory (*to_p))
4246 /* It's OK to use the return slot directly unless it's an NRV. */
4247 use_target = true;
4248 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4249 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4250 /* Don't force regs into memory. */
4251 use_target = false;
4252 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4253 /* It's OK to use the target directly if it's being
4254 initialized. */
4255 use_target = true;
4256 else if (!is_gimple_non_addressable (*to_p))
4257 /* Don't use the original target if it's already addressable;
4258 if its address escapes, and the called function uses the
4259 NRV optimization, a conforming program could see *to_p
4260 change before the called function returns; see c++/19317.
4261 When optimizing, the return_slot pass marks more functions
4262 as safe after we have escape info. */
4263 use_target = false;
4264 else
4265 use_target = true;
4267 if (use_target)
4269 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4270 mark_addressable (*to_p);
4273 break;
4275 case WITH_SIZE_EXPR:
4276 /* Likewise for calls that return an aggregate of non-constant size,
4277 since we would not be able to generate a temporary at all. */
4278 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4280 *from_p = TREE_OPERAND (*from_p, 0);
4281 ret = GS_OK;
4282 changed = true;
4284 break;
4286 /* If we're initializing from a container, push the initialization
4287 inside it. */
4288 case CLEANUP_POINT_EXPR:
4289 case BIND_EXPR:
4290 case STATEMENT_LIST:
4292 tree wrap = *from_p;
4293 tree t;
4295 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4296 fb_lvalue);
4297 if (ret != GS_ERROR)
4298 ret = GS_OK;
4300 t = voidify_wrapper_expr (wrap, *expr_p);
4301 gcc_assert (t == *expr_p);
4303 if (want_value)
4305 gimplify_and_add (wrap, pre_p);
4306 *expr_p = unshare_expr (*to_p);
4308 else
4309 *expr_p = wrap;
4310 return GS_OK;
4313 case COMPOUND_LITERAL_EXPR:
4315 tree complit = TREE_OPERAND (*expr_p, 1);
4316 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4317 tree decl = DECL_EXPR_DECL (decl_s);
4318 tree init = DECL_INITIAL (decl);
4320 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4321 into struct T x = { 0, 1, 2 } if the address of the
4322 compound literal has never been taken. */
4323 if (!TREE_ADDRESSABLE (complit)
4324 && !TREE_ADDRESSABLE (decl)
4325 && init)
4327 *expr_p = copy_node (*expr_p);
4328 TREE_OPERAND (*expr_p, 1) = init;
4329 return GS_OK;
4333 default:
4334 break;
4337 while (changed);
4339 return ret;
4343 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4344 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4345 DECL_GIMPLE_REG_P set.
4347 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4348 other, unmodified part of the complex object just before the total store.
4349 As a consequence, if the object is still uninitialized, an undefined value
4350 will be loaded into a register, which may result in a spurious exception
4351 if the register is floating-point and the value happens to be a signaling
4352 NaN for example. Then the fully-fledged complex operations lowering pass
4353 followed by a DCE pass are necessary in order to fix things up. */
4355 static enum gimplify_status
4356 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4357 bool want_value)
4359 enum tree_code code, ocode;
4360 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4362 lhs = TREE_OPERAND (*expr_p, 0);
4363 rhs = TREE_OPERAND (*expr_p, 1);
4364 code = TREE_CODE (lhs);
4365 lhs = TREE_OPERAND (lhs, 0);
4367 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4368 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4369 other = get_formal_tmp_var (other, pre_p);
4371 realpart = code == REALPART_EXPR ? rhs : other;
4372 imagpart = code == REALPART_EXPR ? other : rhs;
4374 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4375 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4376 else
4377 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4379 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4380 *expr_p = (want_value) ? rhs : NULL_TREE;
4382 return GS_ALL_DONE;
4386 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4388 modify_expr
4389 : varname '=' rhs
4390 | '*' ID '=' rhs
4392 PRE_P points to the list where side effects that must happen before
4393 *EXPR_P should be stored.
4395 POST_P points to the list where side effects that must happen after
4396 *EXPR_P should be stored.
4398 WANT_VALUE is nonzero iff we want to use the value of this expression
4399 in another expression. */
4401 static enum gimplify_status
4402 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4403 bool want_value)
4405 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4406 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4407 enum gimplify_status ret = GS_UNHANDLED;
4408 gimple assign;
4409 location_t loc = EXPR_LOCATION (*expr_p);
4411 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4412 || TREE_CODE (*expr_p) == INIT_EXPR);
4414 /* Insert pointer conversions required by the middle-end that are not
4415 required by the frontend. This fixes middle-end type checking for
4416 for example gcc.dg/redecl-6.c. */
4417 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4419 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4420 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4421 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4424 /* See if any simplifications can be done based on what the RHS is. */
4425 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4426 want_value);
4427 if (ret != GS_UNHANDLED)
4428 return ret;
4430 /* For zero sized types only gimplify the left hand side and right hand
4431 side as statements and throw away the assignment. Do this after
4432 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4433 types properly. */
4434 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4436 gimplify_stmt (from_p, pre_p);
4437 gimplify_stmt (to_p, pre_p);
4438 *expr_p = NULL_TREE;
4439 return GS_ALL_DONE;
4442 /* If the value being copied is of variable width, compute the length
4443 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4444 before gimplifying any of the operands so that we can resolve any
4445 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4446 the size of the expression to be copied, not of the destination, so
4447 that is what we must do here. */
4448 maybe_with_size_expr (from_p);
4450 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4451 if (ret == GS_ERROR)
4452 return ret;
4454 /* As a special case, we have to temporarily allow for assignments
4455 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4456 a toplevel statement, when gimplifying the GENERIC expression
4457 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4458 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4460 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4461 prevent gimplify_expr from trying to create a new temporary for
4462 foo's LHS, we tell it that it should only gimplify until it
4463 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4464 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4465 and all we need to do here is set 'a' to be its LHS. */
4466 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4467 fb_rvalue);
4468 if (ret == GS_ERROR)
4469 return ret;
4471 /* Now see if the above changed *from_p to something we handle specially. */
4472 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4473 want_value);
4474 if (ret != GS_UNHANDLED)
4475 return ret;
4477 /* If we've got a variable sized assignment between two lvalues (i.e. does
4478 not involve a call), then we can make things a bit more straightforward
4479 by converting the assignment to memcpy or memset. */
4480 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4482 tree from = TREE_OPERAND (*from_p, 0);
4483 tree size = TREE_OPERAND (*from_p, 1);
4485 if (TREE_CODE (from) == CONSTRUCTOR)
4486 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4488 if (is_gimple_addressable (from))
4490 *from_p = from;
4491 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4492 pre_p);
4496 /* Transform partial stores to non-addressable complex variables into
4497 total stores. This allows us to use real instead of virtual operands
4498 for these variables, which improves optimization. */
4499 if ((TREE_CODE (*to_p) == REALPART_EXPR
4500 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4501 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4502 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4504 /* Try to alleviate the effects of the gimplification creating artificial
4505 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4506 if (!gimplify_ctxp->into_ssa
4507 && DECL_P (*from_p)
4508 && DECL_IGNORED_P (*from_p)
4509 && DECL_P (*to_p)
4510 && !DECL_IGNORED_P (*to_p))
4512 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4513 DECL_NAME (*from_p)
4514 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4515 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4516 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4519 if (TREE_CODE (*from_p) == CALL_EXPR)
4521 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4522 instead of a GIMPLE_ASSIGN. */
4523 assign = gimple_build_call_from_tree (*from_p);
4524 if (!gimple_call_noreturn_p (assign))
4525 gimple_call_set_lhs (assign, *to_p);
4527 else
4529 assign = gimple_build_assign (*to_p, *from_p);
4530 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4533 gimplify_seq_add_stmt (pre_p, assign);
4535 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4537 /* If we've somehow already got an SSA_NAME on the LHS, then
4538 we've probably modified it twice. Not good. */
4539 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4540 *to_p = make_ssa_name (*to_p, assign);
4541 gimple_set_lhs (assign, *to_p);
4544 if (want_value)
4546 *expr_p = unshare_expr (*to_p);
4547 return GS_OK;
4549 else
4550 *expr_p = NULL;
4552 return GS_ALL_DONE;
4555 /* Gimplify a comparison between two variable-sized objects. Do this
4556 with a call to BUILT_IN_MEMCMP. */
4558 static enum gimplify_status
4559 gimplify_variable_sized_compare (tree *expr_p)
4561 tree op0 = TREE_OPERAND (*expr_p, 0);
4562 tree op1 = TREE_OPERAND (*expr_p, 1);
4563 tree t, arg, dest, src;
4564 location_t loc = EXPR_LOCATION (*expr_p);
4566 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4567 arg = unshare_expr (arg);
4568 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4569 src = build_fold_addr_expr_loc (loc, op1);
4570 dest = build_fold_addr_expr_loc (loc, op0);
4571 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4572 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4573 *expr_p
4574 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4576 return GS_OK;
4579 /* Gimplify a comparison between two aggregate objects of integral scalar
4580 mode as a comparison between the bitwise equivalent scalar values. */
4582 static enum gimplify_status
4583 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4585 location_t loc = EXPR_LOCATION (*expr_p);
4586 tree op0 = TREE_OPERAND (*expr_p, 0);
4587 tree op1 = TREE_OPERAND (*expr_p, 1);
4589 tree type = TREE_TYPE (op0);
4590 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4592 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4593 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4595 *expr_p
4596 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4598 return GS_OK;
4601 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4602 points to the expression to gimplify.
4604 Expressions of the form 'a && b' are gimplified to:
4606 a && b ? true : false
4608 LOCUS is the source location to be put on the generated COND_EXPR.
4609 gimplify_cond_expr will do the rest. */
4611 static enum gimplify_status
4612 gimplify_boolean_expr (tree *expr_p, location_t locus)
4614 /* Preserve the original type of the expression. */
4615 tree type = TREE_TYPE (*expr_p);
4617 *expr_p = build3 (COND_EXPR, type, *expr_p,
4618 fold_convert_loc (locus, type, boolean_true_node),
4619 fold_convert_loc (locus, type, boolean_false_node));
4621 SET_EXPR_LOCATION (*expr_p, locus);
4623 return GS_OK;
4626 /* Gimplifies an expression sequence. This function gimplifies each
4627 expression and re-writes the original expression with the last
4628 expression of the sequence in GIMPLE form.
4630 PRE_P points to the list where the side effects for all the
4631 expressions in the sequence will be emitted.
4633 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4635 static enum gimplify_status
4636 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4638 tree t = *expr_p;
4642 tree *sub_p = &TREE_OPERAND (t, 0);
4644 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4645 gimplify_compound_expr (sub_p, pre_p, false);
4646 else
4647 gimplify_stmt (sub_p, pre_p);
4649 t = TREE_OPERAND (t, 1);
4651 while (TREE_CODE (t) == COMPOUND_EXPR);
4653 *expr_p = t;
4654 if (want_value)
4655 return GS_OK;
4656 else
4658 gimplify_stmt (expr_p, pre_p);
4659 return GS_ALL_DONE;
4664 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4665 gimplify. After gimplification, EXPR_P will point to a new temporary
4666 that holds the original value of the SAVE_EXPR node.
4668 PRE_P points to the list where side effects that must happen before
4669 *EXPR_P should be stored. */
4671 static enum gimplify_status
4672 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4674 enum gimplify_status ret = GS_ALL_DONE;
4675 tree val;
4677 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4678 val = TREE_OPERAND (*expr_p, 0);
4680 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4681 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4683 /* The operand may be a void-valued expression such as SAVE_EXPRs
4684 generated by the Java frontend for class initialization. It is
4685 being executed only for its side-effects. */
4686 if (TREE_TYPE (val) == void_type_node)
4688 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4689 is_gimple_stmt, fb_none);
4690 val = NULL;
4692 else
4693 val = get_initialized_tmp_var (val, pre_p, post_p);
4695 TREE_OPERAND (*expr_p, 0) = val;
4696 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4699 *expr_p = val;
4701 return ret;
4704 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4706 unary_expr
4707 : ...
4708 | '&' varname
4711 PRE_P points to the list where side effects that must happen before
4712 *EXPR_P should be stored.
4714 POST_P points to the list where side effects that must happen after
4715 *EXPR_P should be stored. */
4717 static enum gimplify_status
4718 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4720 tree expr = *expr_p;
4721 tree op0 = TREE_OPERAND (expr, 0);
4722 enum gimplify_status ret;
4723 location_t loc = EXPR_LOCATION (*expr_p);
4725 switch (TREE_CODE (op0))
4727 case INDIRECT_REF:
4728 case MISALIGNED_INDIRECT_REF:
4729 do_indirect_ref:
4730 /* Check if we are dealing with an expression of the form '&*ptr'.
4731 While the front end folds away '&*ptr' into 'ptr', these
4732 expressions may be generated internally by the compiler (e.g.,
4733 builtins like __builtin_va_end). */
4734 /* Caution: the silent array decomposition semantics we allow for
4735 ADDR_EXPR means we can't always discard the pair. */
4736 /* Gimplification of the ADDR_EXPR operand may drop
4737 cv-qualification conversions, so make sure we add them if
4738 needed. */
4740 tree op00 = TREE_OPERAND (op0, 0);
4741 tree t_expr = TREE_TYPE (expr);
4742 tree t_op00 = TREE_TYPE (op00);
4744 if (!useless_type_conversion_p (t_expr, t_op00))
4745 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4746 *expr_p = op00;
4747 ret = GS_OK;
4749 break;
4751 case VIEW_CONVERT_EXPR:
4752 /* Take the address of our operand and then convert it to the type of
4753 this ADDR_EXPR.
4755 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4756 all clear. The impact of this transformation is even less clear. */
4758 /* If the operand is a useless conversion, look through it. Doing so
4759 guarantees that the ADDR_EXPR and its operand will remain of the
4760 same type. */
4761 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4762 op0 = TREE_OPERAND (op0, 0);
4764 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4765 build_fold_addr_expr_loc (loc,
4766 TREE_OPERAND (op0, 0)));
4767 ret = GS_OK;
4768 break;
4770 default:
4771 /* We use fb_either here because the C frontend sometimes takes
4772 the address of a call that returns a struct; see
4773 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4774 the implied temporary explicit. */
4776 /* Make the operand addressable. */
4777 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4778 is_gimple_addressable, fb_either);
4779 if (ret == GS_ERROR)
4780 break;
4782 /* Then mark it. Beware that it may not be possible to do so directly
4783 if a temporary has been created by the gimplification. */
4784 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4786 op0 = TREE_OPERAND (expr, 0);
4788 /* For various reasons, the gimplification of the expression
4789 may have made a new INDIRECT_REF. */
4790 if (TREE_CODE (op0) == INDIRECT_REF)
4791 goto do_indirect_ref;
4793 mark_addressable (TREE_OPERAND (expr, 0));
4795 /* The FEs may end up building ADDR_EXPRs early on a decl with
4796 an incomplete type. Re-build ADDR_EXPRs in canonical form
4797 here. */
4798 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4799 *expr_p = build_fold_addr_expr (op0);
4801 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4802 recompute_tree_invariant_for_addr_expr (*expr_p);
4804 /* If we re-built the ADDR_EXPR add a conversion to the original type
4805 if required. */
4806 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4807 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4809 break;
4812 return ret;
4815 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4816 value; output operands should be a gimple lvalue. */
4818 static enum gimplify_status
4819 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4821 tree expr;
4822 int noutputs;
4823 const char **oconstraints;
4824 int i;
4825 tree link;
4826 const char *constraint;
4827 bool allows_mem, allows_reg, is_inout;
4828 enum gimplify_status ret, tret;
4829 gimple stmt;
4830 VEC(tree, gc) *inputs;
4831 VEC(tree, gc) *outputs;
4832 VEC(tree, gc) *clobbers;
4833 VEC(tree, gc) *labels;
4834 tree link_next;
4836 expr = *expr_p;
4837 noutputs = list_length (ASM_OUTPUTS (expr));
4838 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4840 inputs = outputs = clobbers = labels = NULL;
4842 ret = GS_ALL_DONE;
4843 link_next = NULL_TREE;
4844 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4846 bool ok;
4847 size_t constraint_len;
4849 link_next = TREE_CHAIN (link);
4851 oconstraints[i]
4852 = constraint
4853 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4854 constraint_len = strlen (constraint);
4855 if (constraint_len == 0)
4856 continue;
4858 ok = parse_output_constraint (&constraint, i, 0, 0,
4859 &allows_mem, &allows_reg, &is_inout);
4860 if (!ok)
4862 ret = GS_ERROR;
4863 is_inout = false;
4866 if (!allows_reg && allows_mem)
4867 mark_addressable (TREE_VALUE (link));
4869 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4870 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4871 fb_lvalue | fb_mayfail);
4872 if (tret == GS_ERROR)
4874 error ("invalid lvalue in asm output %d", i);
4875 ret = tret;
4878 VEC_safe_push (tree, gc, outputs, link);
4879 TREE_CHAIN (link) = NULL_TREE;
4881 if (is_inout)
4883 /* An input/output operand. To give the optimizers more
4884 flexibility, split it into separate input and output
4885 operands. */
4886 tree input;
4887 char buf[10];
4889 /* Turn the in/out constraint into an output constraint. */
4890 char *p = xstrdup (constraint);
4891 p[0] = '=';
4892 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4894 /* And add a matching input constraint. */
4895 if (allows_reg)
4897 sprintf (buf, "%d", i);
4899 /* If there are multiple alternatives in the constraint,
4900 handle each of them individually. Those that allow register
4901 will be replaced with operand number, the others will stay
4902 unchanged. */
4903 if (strchr (p, ',') != NULL)
4905 size_t len = 0, buflen = strlen (buf);
4906 char *beg, *end, *str, *dst;
4908 for (beg = p + 1;;)
4910 end = strchr (beg, ',');
4911 if (end == NULL)
4912 end = strchr (beg, '\0');
4913 if ((size_t) (end - beg) < buflen)
4914 len += buflen + 1;
4915 else
4916 len += end - beg + 1;
4917 if (*end)
4918 beg = end + 1;
4919 else
4920 break;
4923 str = (char *) alloca (len);
4924 for (beg = p + 1, dst = str;;)
4926 const char *tem;
4927 bool mem_p, reg_p, inout_p;
4929 end = strchr (beg, ',');
4930 if (end)
4931 *end = '\0';
4932 beg[-1] = '=';
4933 tem = beg - 1;
4934 parse_output_constraint (&tem, i, 0, 0,
4935 &mem_p, &reg_p, &inout_p);
4936 if (dst != str)
4937 *dst++ = ',';
4938 if (reg_p)
4940 memcpy (dst, buf, buflen);
4941 dst += buflen;
4943 else
4945 if (end)
4946 len = end - beg;
4947 else
4948 len = strlen (beg);
4949 memcpy (dst, beg, len);
4950 dst += len;
4952 if (end)
4953 beg = end + 1;
4954 else
4955 break;
4957 *dst = '\0';
4958 input = build_string (dst - str, str);
4960 else
4961 input = build_string (strlen (buf), buf);
4963 else
4964 input = build_string (constraint_len - 1, constraint + 1);
4966 free (p);
4968 input = build_tree_list (build_tree_list (NULL_TREE, input),
4969 unshare_expr (TREE_VALUE (link)));
4970 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4974 link_next = NULL_TREE;
4975 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4977 link_next = TREE_CHAIN (link);
4978 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4979 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4980 oconstraints, &allows_mem, &allows_reg);
4982 /* If we can't make copies, we can only accept memory. */
4983 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4985 if (allows_mem)
4986 allows_reg = 0;
4987 else
4989 error ("impossible constraint in %<asm%>");
4990 error ("non-memory input %d must stay in memory", i);
4991 return GS_ERROR;
4995 /* If the operand is a memory input, it should be an lvalue. */
4996 if (!allows_reg && allows_mem)
4998 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4999 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5000 mark_addressable (TREE_VALUE (link));
5001 if (tret == GS_ERROR)
5003 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5004 input_location = EXPR_LOCATION (TREE_VALUE (link));
5005 error ("memory input %d is not directly addressable", i);
5006 ret = tret;
5009 else
5011 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5012 is_gimple_asm_val, fb_rvalue);
5013 if (tret == GS_ERROR)
5014 ret = tret;
5017 TREE_CHAIN (link) = NULL_TREE;
5018 VEC_safe_push (tree, gc, inputs, link);
5021 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5022 VEC_safe_push (tree, gc, clobbers, link);
5024 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5025 VEC_safe_push (tree, gc, labels, link);
5027 /* Do not add ASMs with errors to the gimple IL stream. */
5028 if (ret != GS_ERROR)
5030 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5031 inputs, outputs, clobbers, labels);
5033 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5034 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5036 gimplify_seq_add_stmt (pre_p, stmt);
5039 return ret;
5042 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5043 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5044 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5045 return to this function.
5047 FIXME should we complexify the prequeue handling instead? Or use flags
5048 for all the cleanups and let the optimizer tighten them up? The current
5049 code seems pretty fragile; it will break on a cleanup within any
5050 non-conditional nesting. But any such nesting would be broken, anyway;
5051 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5052 and continues out of it. We can do that at the RTL level, though, so
5053 having an optimizer to tighten up try/finally regions would be a Good
5054 Thing. */
5056 static enum gimplify_status
5057 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5059 gimple_stmt_iterator iter;
5060 gimple_seq body_sequence = NULL;
5062 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5064 /* We only care about the number of conditions between the innermost
5065 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5066 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5067 int old_conds = gimplify_ctxp->conditions;
5068 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5069 gimplify_ctxp->conditions = 0;
5070 gimplify_ctxp->conditional_cleanups = NULL;
5072 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5074 gimplify_ctxp->conditions = old_conds;
5075 gimplify_ctxp->conditional_cleanups = old_cleanups;
5077 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5079 gimple wce = gsi_stmt (iter);
5081 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5083 if (gsi_one_before_end_p (iter))
5085 /* Note that gsi_insert_seq_before and gsi_remove do not
5086 scan operands, unlike some other sequence mutators. */
5087 gsi_insert_seq_before_without_update (&iter,
5088 gimple_wce_cleanup (wce),
5089 GSI_SAME_STMT);
5090 gsi_remove (&iter, true);
5091 break;
5093 else
5095 gimple gtry;
5096 gimple_seq seq;
5097 enum gimple_try_flags kind;
5099 if (gimple_wce_cleanup_eh_only (wce))
5100 kind = GIMPLE_TRY_CATCH;
5101 else
5102 kind = GIMPLE_TRY_FINALLY;
5103 seq = gsi_split_seq_after (iter);
5105 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5106 /* Do not use gsi_replace here, as it may scan operands.
5107 We want to do a simple structural modification only. */
5108 *gsi_stmt_ptr (&iter) = gtry;
5109 iter = gsi_start (seq);
5112 else
5113 gsi_next (&iter);
5116 gimplify_seq_add_seq (pre_p, body_sequence);
5117 if (temp)
5119 *expr_p = temp;
5120 return GS_OK;
5122 else
5124 *expr_p = NULL;
5125 return GS_ALL_DONE;
5129 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5130 is the cleanup action required. EH_ONLY is true if the cleanup should
5131 only be executed if an exception is thrown, not on normal exit. */
5133 static void
5134 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5136 gimple wce;
5137 gimple_seq cleanup_stmts = NULL;
5139 /* Errors can result in improperly nested cleanups. Which results in
5140 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5141 if (errorcount || sorrycount)
5142 return;
5144 if (gimple_conditional_context ())
5146 /* If we're in a conditional context, this is more complex. We only
5147 want to run the cleanup if we actually ran the initialization that
5148 necessitates it, but we want to run it after the end of the
5149 conditional context. So we wrap the try/finally around the
5150 condition and use a flag to determine whether or not to actually
5151 run the destructor. Thus
5153 test ? f(A()) : 0
5155 becomes (approximately)
5157 flag = 0;
5158 try {
5159 if (test) { A::A(temp); flag = 1; val = f(temp); }
5160 else { val = 0; }
5161 } finally {
5162 if (flag) A::~A(temp);
5166 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5167 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5168 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5170 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5171 gimplify_stmt (&cleanup, &cleanup_stmts);
5172 wce = gimple_build_wce (cleanup_stmts);
5174 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5175 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5176 gimplify_seq_add_stmt (pre_p, ftrue);
5178 /* Because of this manipulation, and the EH edges that jump
5179 threading cannot redirect, the temporary (VAR) will appear
5180 to be used uninitialized. Don't warn. */
5181 TREE_NO_WARNING (var) = 1;
5183 else
5185 gimplify_stmt (&cleanup, &cleanup_stmts);
5186 wce = gimple_build_wce (cleanup_stmts);
5187 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5188 gimplify_seq_add_stmt (pre_p, wce);
5192 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5194 static enum gimplify_status
5195 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5197 tree targ = *expr_p;
5198 tree temp = TARGET_EXPR_SLOT (targ);
5199 tree init = TARGET_EXPR_INITIAL (targ);
5200 enum gimplify_status ret;
5202 if (init)
5204 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5205 to the temps list. Handle also variable length TARGET_EXPRs. */
5206 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5208 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5209 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5210 gimplify_vla_decl (temp, pre_p);
5212 else
5213 gimple_add_tmp_var (temp);
5215 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5216 expression is supposed to initialize the slot. */
5217 if (VOID_TYPE_P (TREE_TYPE (init)))
5218 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5219 else
5221 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5222 init = init_expr;
5223 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5224 init = NULL;
5225 ggc_free (init_expr);
5227 if (ret == GS_ERROR)
5229 /* PR c++/28266 Make sure this is expanded only once. */
5230 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5231 return GS_ERROR;
5233 if (init)
5234 gimplify_and_add (init, pre_p);
5236 /* If needed, push the cleanup for the temp. */
5237 if (TARGET_EXPR_CLEANUP (targ))
5238 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5239 CLEANUP_EH_ONLY (targ), pre_p);
5241 /* Only expand this once. */
5242 TREE_OPERAND (targ, 3) = init;
5243 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5245 else
5246 /* We should have expanded this before. */
5247 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5249 *expr_p = temp;
5250 return GS_OK;
5253 /* Gimplification of expression trees. */
5255 /* Gimplify an expression which appears at statement context. The
5256 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5257 NULL, a new sequence is allocated.
5259 Return true if we actually added a statement to the queue. */
5261 bool
5262 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5264 gimple_seq_node last;
5266 if (!*seq_p)
5267 *seq_p = gimple_seq_alloc ();
5269 last = gimple_seq_last (*seq_p);
5270 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5271 return last != gimple_seq_last (*seq_p);
5275 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5276 to CTX. If entries already exist, force them to be some flavor of private.
5277 If there is no enclosing parallel, do nothing. */
5279 void
5280 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5282 splay_tree_node n;
5284 if (decl == NULL || !DECL_P (decl))
5285 return;
5289 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5290 if (n != NULL)
5292 if (n->value & GOVD_SHARED)
5293 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5294 else
5295 return;
5297 else if (ctx->region_type != ORT_WORKSHARE)
5298 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5300 ctx = ctx->outer_context;
5302 while (ctx);
5305 /* Similarly for each of the type sizes of TYPE. */
5307 static void
5308 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5310 if (type == NULL || type == error_mark_node)
5311 return;
5312 type = TYPE_MAIN_VARIANT (type);
5314 if (pointer_set_insert (ctx->privatized_types, type))
5315 return;
5317 switch (TREE_CODE (type))
5319 case INTEGER_TYPE:
5320 case ENUMERAL_TYPE:
5321 case BOOLEAN_TYPE:
5322 case REAL_TYPE:
5323 case FIXED_POINT_TYPE:
5324 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5325 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5326 break;
5328 case ARRAY_TYPE:
5329 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5330 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5331 break;
5333 case RECORD_TYPE:
5334 case UNION_TYPE:
5335 case QUAL_UNION_TYPE:
5337 tree field;
5338 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5339 if (TREE_CODE (field) == FIELD_DECL)
5341 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5342 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5345 break;
5347 case POINTER_TYPE:
5348 case REFERENCE_TYPE:
5349 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5350 break;
5352 default:
5353 break;
5356 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5357 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5358 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5361 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5363 static void
5364 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5366 splay_tree_node n;
5367 unsigned int nflags;
5368 tree t;
5370 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5371 return;
5373 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5374 there are constructors involved somewhere. */
5375 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5376 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5377 flags |= GOVD_SEEN;
5379 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5380 if (n != NULL)
5382 /* We shouldn't be re-adding the decl with the same data
5383 sharing class. */
5384 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5385 /* The only combination of data sharing classes we should see is
5386 FIRSTPRIVATE and LASTPRIVATE. */
5387 nflags = n->value | flags;
5388 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5389 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5390 n->value = nflags;
5391 return;
5394 /* When adding a variable-sized variable, we have to handle all sorts
5395 of additional bits of data: the pointer replacement variable, and
5396 the parameters of the type. */
5397 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5399 /* Add the pointer replacement variable as PRIVATE if the variable
5400 replacement is private, else FIRSTPRIVATE since we'll need the
5401 address of the original variable either for SHARED, or for the
5402 copy into or out of the context. */
5403 if (!(flags & GOVD_LOCAL))
5405 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5406 nflags |= flags & GOVD_SEEN;
5407 t = DECL_VALUE_EXPR (decl);
5408 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5409 t = TREE_OPERAND (t, 0);
5410 gcc_assert (DECL_P (t));
5411 omp_add_variable (ctx, t, nflags);
5414 /* Add all of the variable and type parameters (which should have
5415 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5416 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5417 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5418 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5420 /* The variable-sized variable itself is never SHARED, only some form
5421 of PRIVATE. The sharing would take place via the pointer variable
5422 which we remapped above. */
5423 if (flags & GOVD_SHARED)
5424 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5425 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5427 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5428 alloca statement we generate for the variable, so make sure it
5429 is available. This isn't automatically needed for the SHARED
5430 case, since we won't be allocating local storage then.
5431 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5432 in this case omp_notice_variable will be called later
5433 on when it is gimplified. */
5434 else if (! (flags & GOVD_LOCAL))
5435 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5437 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5439 gcc_assert ((flags & GOVD_LOCAL) == 0);
5440 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5442 /* Similar to the direct variable sized case above, we'll need the
5443 size of references being privatized. */
5444 if ((flags & GOVD_SHARED) == 0)
5446 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5447 if (TREE_CODE (t) != INTEGER_CST)
5448 omp_notice_variable (ctx, t, true);
5452 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5455 /* Record the fact that DECL was used within the OpenMP context CTX.
5456 IN_CODE is true when real code uses DECL, and false when we should
5457 merely emit default(none) errors. Return true if DECL is going to
5458 be remapped and thus DECL shouldn't be gimplified into its
5459 DECL_VALUE_EXPR (if any). */
5461 static bool
5462 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5464 splay_tree_node n;
5465 unsigned flags = in_code ? GOVD_SEEN : 0;
5466 bool ret = false, shared;
5468 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5469 return false;
5471 /* Threadprivate variables are predetermined. */
5472 if (is_global_var (decl))
5474 if (DECL_THREAD_LOCAL_P (decl))
5475 return false;
5477 if (DECL_HAS_VALUE_EXPR_P (decl))
5479 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5481 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5482 return false;
5486 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5487 if (n == NULL)
5489 enum omp_clause_default_kind default_kind, kind;
5490 struct gimplify_omp_ctx *octx;
5492 if (ctx->region_type == ORT_WORKSHARE)
5493 goto do_outer;
5495 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5496 remapped firstprivate instead of shared. To some extent this is
5497 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5498 default_kind = ctx->default_kind;
5499 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5500 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5501 default_kind = kind;
5503 switch (default_kind)
5505 case OMP_CLAUSE_DEFAULT_NONE:
5506 error ("%qE not specified in enclosing parallel",
5507 DECL_NAME (decl));
5508 error_at (ctx->location, "enclosing parallel");
5509 /* FALLTHRU */
5510 case OMP_CLAUSE_DEFAULT_SHARED:
5511 flags |= GOVD_SHARED;
5512 break;
5513 case OMP_CLAUSE_DEFAULT_PRIVATE:
5514 flags |= GOVD_PRIVATE;
5515 break;
5516 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5517 flags |= GOVD_FIRSTPRIVATE;
5518 break;
5519 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5520 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5521 gcc_assert (ctx->region_type == ORT_TASK);
5522 if (ctx->outer_context)
5523 omp_notice_variable (ctx->outer_context, decl, in_code);
5524 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5526 splay_tree_node n2;
5528 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5529 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5531 flags |= GOVD_FIRSTPRIVATE;
5532 break;
5534 if ((octx->region_type & ORT_PARALLEL) != 0)
5535 break;
5537 if (flags & GOVD_FIRSTPRIVATE)
5538 break;
5539 if (octx == NULL
5540 && (TREE_CODE (decl) == PARM_DECL
5541 || (!is_global_var (decl)
5542 && DECL_CONTEXT (decl) == current_function_decl)))
5544 flags |= GOVD_FIRSTPRIVATE;
5545 break;
5547 flags |= GOVD_SHARED;
5548 break;
5549 default:
5550 gcc_unreachable ();
5553 if ((flags & GOVD_PRIVATE)
5554 && lang_hooks.decls.omp_private_outer_ref (decl))
5555 flags |= GOVD_PRIVATE_OUTER_REF;
5557 omp_add_variable (ctx, decl, flags);
5559 shared = (flags & GOVD_SHARED) != 0;
5560 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5561 goto do_outer;
5564 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5565 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5566 && DECL_SIZE (decl)
5567 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5569 splay_tree_node n2;
5570 tree t = DECL_VALUE_EXPR (decl);
5571 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5572 t = TREE_OPERAND (t, 0);
5573 gcc_assert (DECL_P (t));
5574 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5575 n2->value |= GOVD_SEEN;
5578 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5579 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5581 /* If nothing changed, there's nothing left to do. */
5582 if ((n->value & flags) == flags)
5583 return ret;
5584 flags |= n->value;
5585 n->value = flags;
5587 do_outer:
5588 /* If the variable is private in the current context, then we don't
5589 need to propagate anything to an outer context. */
5590 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5591 return ret;
5592 if (ctx->outer_context
5593 && omp_notice_variable (ctx->outer_context, decl, in_code))
5594 return true;
5595 return ret;
5598 /* Verify that DECL is private within CTX. If there's specific information
5599 to the contrary in the innermost scope, generate an error. */
5601 static bool
5602 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5604 splay_tree_node n;
5606 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5607 if (n != NULL)
5609 if (n->value & GOVD_SHARED)
5611 if (ctx == gimplify_omp_ctxp)
5613 error ("iteration variable %qE should be private",
5614 DECL_NAME (decl));
5615 n->value = GOVD_PRIVATE;
5616 return true;
5618 else
5619 return false;
5621 else if ((n->value & GOVD_EXPLICIT) != 0
5622 && (ctx == gimplify_omp_ctxp
5623 || (ctx->region_type == ORT_COMBINED_PARALLEL
5624 && gimplify_omp_ctxp->outer_context == ctx)))
5626 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5627 error ("iteration variable %qE should not be firstprivate",
5628 DECL_NAME (decl));
5629 else if ((n->value & GOVD_REDUCTION) != 0)
5630 error ("iteration variable %qE should not be reduction",
5631 DECL_NAME (decl));
5633 return (ctx == gimplify_omp_ctxp
5634 || (ctx->region_type == ORT_COMBINED_PARALLEL
5635 && gimplify_omp_ctxp->outer_context == ctx));
5638 if (ctx->region_type != ORT_WORKSHARE)
5639 return false;
5640 else if (ctx->outer_context)
5641 return omp_is_private (ctx->outer_context, decl);
5642 return false;
5645 /* Return true if DECL is private within a parallel region
5646 that binds to the current construct's context or in parallel
5647 region's REDUCTION clause. */
5649 static bool
5650 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5652 splay_tree_node n;
5656 ctx = ctx->outer_context;
5657 if (ctx == NULL)
5658 return !(is_global_var (decl)
5659 /* References might be private, but might be shared too. */
5660 || lang_hooks.decls.omp_privatize_by_reference (decl));
5662 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5663 if (n != NULL)
5664 return (n->value & GOVD_SHARED) == 0;
5666 while (ctx->region_type == ORT_WORKSHARE);
5667 return false;
5670 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5671 and previous omp contexts. */
5673 static void
5674 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5675 enum omp_region_type region_type)
5677 struct gimplify_omp_ctx *ctx, *outer_ctx;
5678 struct gimplify_ctx gctx;
5679 tree c;
5681 ctx = new_omp_context (region_type);
5682 outer_ctx = ctx->outer_context;
5684 while ((c = *list_p) != NULL)
5686 bool remove = false;
5687 bool notice_outer = true;
5688 const char *check_non_private = NULL;
5689 unsigned int flags;
5690 tree decl;
5692 switch (OMP_CLAUSE_CODE (c))
5694 case OMP_CLAUSE_PRIVATE:
5695 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5696 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5698 flags |= GOVD_PRIVATE_OUTER_REF;
5699 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5701 else
5702 notice_outer = false;
5703 goto do_add;
5704 case OMP_CLAUSE_SHARED:
5705 flags = GOVD_SHARED | GOVD_EXPLICIT;
5706 goto do_add;
5707 case OMP_CLAUSE_FIRSTPRIVATE:
5708 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5709 check_non_private = "firstprivate";
5710 goto do_add;
5711 case OMP_CLAUSE_LASTPRIVATE:
5712 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5713 check_non_private = "lastprivate";
5714 goto do_add;
5715 case OMP_CLAUSE_REDUCTION:
5716 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5717 check_non_private = "reduction";
5718 goto do_add;
5720 do_add:
5721 decl = OMP_CLAUSE_DECL (c);
5722 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5724 remove = true;
5725 break;
5727 omp_add_variable (ctx, decl, flags);
5728 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5729 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5731 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5732 GOVD_LOCAL | GOVD_SEEN);
5733 gimplify_omp_ctxp = ctx;
5734 push_gimplify_context (&gctx);
5736 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5737 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5739 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5740 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5741 pop_gimplify_context
5742 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5743 push_gimplify_context (&gctx);
5744 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5745 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5746 pop_gimplify_context
5747 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5748 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5749 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5751 gimplify_omp_ctxp = outer_ctx;
5753 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5754 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5756 gimplify_omp_ctxp = ctx;
5757 push_gimplify_context (&gctx);
5758 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5760 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5761 NULL, NULL);
5762 TREE_SIDE_EFFECTS (bind) = 1;
5763 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5764 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5766 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5767 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5768 pop_gimplify_context
5769 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5770 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5772 gimplify_omp_ctxp = outer_ctx;
5774 if (notice_outer)
5775 goto do_notice;
5776 break;
5778 case OMP_CLAUSE_COPYIN:
5779 case OMP_CLAUSE_COPYPRIVATE:
5780 decl = OMP_CLAUSE_DECL (c);
5781 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5783 remove = true;
5784 break;
5786 do_notice:
5787 if (outer_ctx)
5788 omp_notice_variable (outer_ctx, decl, true);
5789 if (check_non_private
5790 && region_type == ORT_WORKSHARE
5791 && omp_check_private (ctx, decl))
5793 error ("%s variable %qE is private in outer context",
5794 check_non_private, DECL_NAME (decl));
5795 remove = true;
5797 break;
5799 case OMP_CLAUSE_IF:
5800 OMP_CLAUSE_OPERAND (c, 0)
5801 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5802 /* Fall through. */
5804 case OMP_CLAUSE_SCHEDULE:
5805 case OMP_CLAUSE_NUM_THREADS:
5806 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5807 is_gimple_val, fb_rvalue) == GS_ERROR)
5808 remove = true;
5809 break;
5811 case OMP_CLAUSE_NOWAIT:
5812 case OMP_CLAUSE_ORDERED:
5813 case OMP_CLAUSE_UNTIED:
5814 case OMP_CLAUSE_COLLAPSE:
5815 break;
5817 case OMP_CLAUSE_DEFAULT:
5818 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5819 break;
5821 default:
5822 gcc_unreachable ();
5825 if (remove)
5826 *list_p = OMP_CLAUSE_CHAIN (c);
5827 else
5828 list_p = &OMP_CLAUSE_CHAIN (c);
5831 gimplify_omp_ctxp = ctx;
5834 /* For all variables that were not actually used within the context,
5835 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5837 static int
5838 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5840 tree *list_p = (tree *) data;
5841 tree decl = (tree) n->key;
5842 unsigned flags = n->value;
5843 enum omp_clause_code code;
5844 tree clause;
5845 bool private_debug;
5847 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5848 return 0;
5849 if ((flags & GOVD_SEEN) == 0)
5850 return 0;
5851 if (flags & GOVD_DEBUG_PRIVATE)
5853 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5854 private_debug = true;
5856 else
5857 private_debug
5858 = lang_hooks.decls.omp_private_debug_clause (decl,
5859 !!(flags & GOVD_SHARED));
5860 if (private_debug)
5861 code = OMP_CLAUSE_PRIVATE;
5862 else if (flags & GOVD_SHARED)
5864 if (is_global_var (decl))
5866 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5867 while (ctx != NULL)
5869 splay_tree_node on
5870 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5871 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5872 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5873 break;
5874 ctx = ctx->outer_context;
5876 if (ctx == NULL)
5877 return 0;
5879 code = OMP_CLAUSE_SHARED;
5881 else if (flags & GOVD_PRIVATE)
5882 code = OMP_CLAUSE_PRIVATE;
5883 else if (flags & GOVD_FIRSTPRIVATE)
5884 code = OMP_CLAUSE_FIRSTPRIVATE;
5885 else
5886 gcc_unreachable ();
5888 clause = build_omp_clause (input_location, code);
5889 OMP_CLAUSE_DECL (clause) = decl;
5890 OMP_CLAUSE_CHAIN (clause) = *list_p;
5891 if (private_debug)
5892 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5893 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5894 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5895 *list_p = clause;
5896 lang_hooks.decls.omp_finish_clause (clause);
5898 return 0;
5901 static void
5902 gimplify_adjust_omp_clauses (tree *list_p)
5904 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5905 tree c, decl;
5907 while ((c = *list_p) != NULL)
5909 splay_tree_node n;
5910 bool remove = false;
5912 switch (OMP_CLAUSE_CODE (c))
5914 case OMP_CLAUSE_PRIVATE:
5915 case OMP_CLAUSE_SHARED:
5916 case OMP_CLAUSE_FIRSTPRIVATE:
5917 decl = OMP_CLAUSE_DECL (c);
5918 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5919 remove = !(n->value & GOVD_SEEN);
5920 if (! remove)
5922 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5923 if ((n->value & GOVD_DEBUG_PRIVATE)
5924 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5926 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5927 || ((n->value & GOVD_DATA_SHARE_CLASS)
5928 == GOVD_PRIVATE));
5929 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5930 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5933 break;
5935 case OMP_CLAUSE_LASTPRIVATE:
5936 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5937 accurately reflect the presence of a FIRSTPRIVATE clause. */
5938 decl = OMP_CLAUSE_DECL (c);
5939 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5940 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5941 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5942 break;
5944 case OMP_CLAUSE_REDUCTION:
5945 case OMP_CLAUSE_COPYIN:
5946 case OMP_CLAUSE_COPYPRIVATE:
5947 case OMP_CLAUSE_IF:
5948 case OMP_CLAUSE_NUM_THREADS:
5949 case OMP_CLAUSE_SCHEDULE:
5950 case OMP_CLAUSE_NOWAIT:
5951 case OMP_CLAUSE_ORDERED:
5952 case OMP_CLAUSE_DEFAULT:
5953 case OMP_CLAUSE_UNTIED:
5954 case OMP_CLAUSE_COLLAPSE:
5955 break;
5957 default:
5958 gcc_unreachable ();
5961 if (remove)
5962 *list_p = OMP_CLAUSE_CHAIN (c);
5963 else
5964 list_p = &OMP_CLAUSE_CHAIN (c);
5967 /* Add in any implicit data sharing. */
5968 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5970 gimplify_omp_ctxp = ctx->outer_context;
5971 delete_omp_context (ctx);
5974 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5975 gimplification of the body, as well as scanning the body for used
5976 variables. We need to do this scan now, because variable-sized
5977 decls will be decomposed during gimplification. */
5979 static void
5980 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5982 tree expr = *expr_p;
5983 gimple g;
5984 gimple_seq body = NULL;
5985 struct gimplify_ctx gctx;
5987 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5988 OMP_PARALLEL_COMBINED (expr)
5989 ? ORT_COMBINED_PARALLEL
5990 : ORT_PARALLEL);
5992 push_gimplify_context (&gctx);
5994 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5995 if (gimple_code (g) == GIMPLE_BIND)
5996 pop_gimplify_context (g);
5997 else
5998 pop_gimplify_context (NULL);
6000 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6002 g = gimple_build_omp_parallel (body,
6003 OMP_PARALLEL_CLAUSES (expr),
6004 NULL_TREE, NULL_TREE);
6005 if (OMP_PARALLEL_COMBINED (expr))
6006 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6007 gimplify_seq_add_stmt (pre_p, g);
6008 *expr_p = NULL_TREE;
6011 /* Gimplify the contents of an OMP_TASK statement. This involves
6012 gimplification of the body, as well as scanning the body for used
6013 variables. We need to do this scan now, because variable-sized
6014 decls will be decomposed during gimplification. */
6016 static void
6017 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6019 tree expr = *expr_p;
6020 gimple g;
6021 gimple_seq body = NULL;
6022 struct gimplify_ctx gctx;
6024 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
6026 push_gimplify_context (&gctx);
6028 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6029 if (gimple_code (g) == GIMPLE_BIND)
6030 pop_gimplify_context (g);
6031 else
6032 pop_gimplify_context (NULL);
6034 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6036 g = gimple_build_omp_task (body,
6037 OMP_TASK_CLAUSES (expr),
6038 NULL_TREE, NULL_TREE,
6039 NULL_TREE, NULL_TREE, NULL_TREE);
6040 gimplify_seq_add_stmt (pre_p, g);
6041 *expr_p = NULL_TREE;
6044 /* Gimplify the gross structure of an OMP_FOR statement. */
6046 static enum gimplify_status
6047 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6049 tree for_stmt, decl, var, t;
6050 enum gimplify_status ret = GS_ALL_DONE;
6051 enum gimplify_status tret;
6052 gimple gfor;
6053 gimple_seq for_body, for_pre_body;
6054 int i;
6056 for_stmt = *expr_p;
6058 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6059 ORT_WORKSHARE);
6061 /* Handle OMP_FOR_INIT. */
6062 for_pre_body = NULL;
6063 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6064 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6066 for_body = gimple_seq_alloc ();
6067 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6068 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6069 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6070 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6071 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6073 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6074 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6075 decl = TREE_OPERAND (t, 0);
6076 gcc_assert (DECL_P (decl));
6077 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6078 || POINTER_TYPE_P (TREE_TYPE (decl)));
6080 /* Make sure the iteration variable is private. */
6081 if (omp_is_private (gimplify_omp_ctxp, decl))
6082 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6083 else
6084 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6086 /* If DECL is not a gimple register, create a temporary variable to act
6087 as an iteration counter. This is valid, since DECL cannot be
6088 modified in the body of the loop. */
6089 if (!is_gimple_reg (decl))
6091 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6092 TREE_OPERAND (t, 0) = var;
6094 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6096 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6098 else
6099 var = decl;
6101 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6102 is_gimple_val, fb_rvalue);
6103 ret = MIN (ret, tret);
6104 if (ret == GS_ERROR)
6105 return ret;
6107 /* Handle OMP_FOR_COND. */
6108 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6109 gcc_assert (COMPARISON_CLASS_P (t));
6110 gcc_assert (TREE_OPERAND (t, 0) == decl);
6112 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6113 is_gimple_val, fb_rvalue);
6114 ret = MIN (ret, tret);
6116 /* Handle OMP_FOR_INCR. */
6117 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6118 switch (TREE_CODE (t))
6120 case PREINCREMENT_EXPR:
6121 case POSTINCREMENT_EXPR:
6122 t = build_int_cst (TREE_TYPE (decl), 1);
6123 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6124 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6125 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6126 break;
6128 case PREDECREMENT_EXPR:
6129 case POSTDECREMENT_EXPR:
6130 t = build_int_cst (TREE_TYPE (decl), -1);
6131 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6132 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6133 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6134 break;
6136 case MODIFY_EXPR:
6137 gcc_assert (TREE_OPERAND (t, 0) == decl);
6138 TREE_OPERAND (t, 0) = var;
6140 t = TREE_OPERAND (t, 1);
6141 switch (TREE_CODE (t))
6143 case PLUS_EXPR:
6144 if (TREE_OPERAND (t, 1) == decl)
6146 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6147 TREE_OPERAND (t, 0) = var;
6148 break;
6151 /* Fallthru. */
6152 case MINUS_EXPR:
6153 case POINTER_PLUS_EXPR:
6154 gcc_assert (TREE_OPERAND (t, 0) == decl);
6155 TREE_OPERAND (t, 0) = var;
6156 break;
6157 default:
6158 gcc_unreachable ();
6161 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6162 is_gimple_val, fb_rvalue);
6163 ret = MIN (ret, tret);
6164 break;
6166 default:
6167 gcc_unreachable ();
6170 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6172 tree c;
6173 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6174 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6175 && OMP_CLAUSE_DECL (c) == decl
6176 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6178 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6179 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6180 gcc_assert (TREE_OPERAND (t, 0) == var);
6181 t = TREE_OPERAND (t, 1);
6182 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6183 || TREE_CODE (t) == MINUS_EXPR
6184 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6185 gcc_assert (TREE_OPERAND (t, 0) == var);
6186 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6187 TREE_OPERAND (t, 1));
6188 gimplify_assign (decl, t,
6189 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6194 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6196 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6198 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6199 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6200 for_pre_body);
6202 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6204 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6205 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6206 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6207 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6208 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6209 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6210 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6211 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6214 gimplify_seq_add_stmt (pre_p, gfor);
6215 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6218 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6219 In particular, OMP_SECTIONS and OMP_SINGLE. */
6221 static void
6222 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6224 tree expr = *expr_p;
6225 gimple stmt;
6226 gimple_seq body = NULL;
6228 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6229 gimplify_and_add (OMP_BODY (expr), &body);
6230 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6232 if (TREE_CODE (expr) == OMP_SECTIONS)
6233 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6234 else if (TREE_CODE (expr) == OMP_SINGLE)
6235 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6236 else
6237 gcc_unreachable ();
6239 gimplify_seq_add_stmt (pre_p, stmt);
6242 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6243 stabilized the lhs of the atomic operation as *ADDR. Return true if
6244 EXPR is this stabilized form. */
6246 static bool
6247 goa_lhs_expr_p (tree expr, tree addr)
6249 /* Also include casts to other type variants. The C front end is fond
6250 of adding these for e.g. volatile variables. This is like
6251 STRIP_TYPE_NOPS but includes the main variant lookup. */
6252 STRIP_USELESS_TYPE_CONVERSION (expr);
6254 if (TREE_CODE (expr) == INDIRECT_REF)
6256 expr = TREE_OPERAND (expr, 0);
6257 while (expr != addr
6258 && (CONVERT_EXPR_P (expr)
6259 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6260 && TREE_CODE (expr) == TREE_CODE (addr)
6261 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6263 expr = TREE_OPERAND (expr, 0);
6264 addr = TREE_OPERAND (addr, 0);
6266 if (expr == addr)
6267 return true;
6268 return (TREE_CODE (addr) == ADDR_EXPR
6269 && TREE_CODE (expr) == ADDR_EXPR
6270 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6272 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6273 return true;
6274 return false;
6277 /* Walk *EXPR_P and replace
6278 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6279 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6280 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6282 static int
6283 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6284 tree lhs_var)
6286 tree expr = *expr_p;
6287 int saw_lhs;
6289 if (goa_lhs_expr_p (expr, lhs_addr))
6291 *expr_p = lhs_var;
6292 return 1;
6294 if (is_gimple_val (expr))
6295 return 0;
6297 saw_lhs = 0;
6298 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6300 case tcc_binary:
6301 case tcc_comparison:
6302 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6303 lhs_var);
6304 case tcc_unary:
6305 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6306 lhs_var);
6307 break;
6308 case tcc_expression:
6309 switch (TREE_CODE (expr))
6311 case TRUTH_ANDIF_EXPR:
6312 case TRUTH_ORIF_EXPR:
6313 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6314 lhs_addr, lhs_var);
6315 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6316 lhs_addr, lhs_var);
6317 break;
6318 default:
6319 break;
6321 break;
6322 default:
6323 break;
6326 if (saw_lhs == 0)
6328 enum gimplify_status gs;
6329 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6330 if (gs != GS_ALL_DONE)
6331 saw_lhs = -1;
6334 return saw_lhs;
6338 /* Gimplify an OMP_ATOMIC statement. */
6340 static enum gimplify_status
6341 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6343 tree addr = TREE_OPERAND (*expr_p, 0);
6344 tree rhs = TREE_OPERAND (*expr_p, 1);
6345 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6346 tree tmp_load;
6348 tmp_load = create_tmp_reg (type, NULL);
6349 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6350 return GS_ERROR;
6352 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6353 != GS_ALL_DONE)
6354 return GS_ERROR;
6356 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6357 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6358 != GS_ALL_DONE)
6359 return GS_ERROR;
6360 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6361 *expr_p = NULL;
6363 return GS_ALL_DONE;
6367 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6368 expression produces a value to be used as an operand inside a GIMPLE
6369 statement, the value will be stored back in *EXPR_P. This value will
6370 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6371 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6372 emitted in PRE_P and POST_P.
6374 Additionally, this process may overwrite parts of the input
6375 expression during gimplification. Ideally, it should be
6376 possible to do non-destructive gimplification.
6378 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6379 the expression needs to evaluate to a value to be used as
6380 an operand in a GIMPLE statement, this value will be stored in
6381 *EXPR_P on exit. This happens when the caller specifies one
6382 of fb_lvalue or fb_rvalue fallback flags.
6384 PRE_P will contain the sequence of GIMPLE statements corresponding
6385 to the evaluation of EXPR and all the side-effects that must
6386 be executed before the main expression. On exit, the last
6387 statement of PRE_P is the core statement being gimplified. For
6388 instance, when gimplifying 'if (++a)' the last statement in
6389 PRE_P will be 'if (t.1)' where t.1 is the result of
6390 pre-incrementing 'a'.
6392 POST_P will contain the sequence of GIMPLE statements corresponding
6393 to the evaluation of all the side-effects that must be executed
6394 after the main expression. If this is NULL, the post
6395 side-effects are stored at the end of PRE_P.
6397 The reason why the output is split in two is to handle post
6398 side-effects explicitly. In some cases, an expression may have
6399 inner and outer post side-effects which need to be emitted in
6400 an order different from the one given by the recursive
6401 traversal. For instance, for the expression (*p--)++ the post
6402 side-effects of '--' must actually occur *after* the post
6403 side-effects of '++'. However, gimplification will first visit
6404 the inner expression, so if a separate POST sequence was not
6405 used, the resulting sequence would be:
6407 1 t.1 = *p
6408 2 p = p - 1
6409 3 t.2 = t.1 + 1
6410 4 *p = t.2
6412 However, the post-decrement operation in line #2 must not be
6413 evaluated until after the store to *p at line #4, so the
6414 correct sequence should be:
6416 1 t.1 = *p
6417 2 t.2 = t.1 + 1
6418 3 *p = t.2
6419 4 p = p - 1
6421 So, by specifying a separate post queue, it is possible
6422 to emit the post side-effects in the correct order.
6423 If POST_P is NULL, an internal queue will be used. Before
6424 returning to the caller, the sequence POST_P is appended to
6425 the main output sequence PRE_P.
6427 GIMPLE_TEST_F points to a function that takes a tree T and
6428 returns nonzero if T is in the GIMPLE form requested by the
6429 caller. The GIMPLE predicates are in tree-gimple.c.
6431 FALLBACK tells the function what sort of a temporary we want if
6432 gimplification cannot produce an expression that complies with
6433 GIMPLE_TEST_F.
6435 fb_none means that no temporary should be generated
6436 fb_rvalue means that an rvalue is OK to generate
6437 fb_lvalue means that an lvalue is OK to generate
6438 fb_either means that either is OK, but an lvalue is preferable.
6439 fb_mayfail means that gimplification may fail (in which case
6440 GS_ERROR will be returned)
6442 The return value is either GS_ERROR or GS_ALL_DONE, since this
6443 function iterates until EXPR is completely gimplified or an error
6444 occurs. */
6446 enum gimplify_status
6447 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6448 bool (*gimple_test_f) (tree), fallback_t fallback)
6450 tree tmp;
6451 gimple_seq internal_pre = NULL;
6452 gimple_seq internal_post = NULL;
6453 tree save_expr;
6454 bool is_statement;
6455 location_t saved_location;
6456 enum gimplify_status ret;
6457 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6459 save_expr = *expr_p;
6460 if (save_expr == NULL_TREE)
6461 return GS_ALL_DONE;
6463 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6464 is_statement = gimple_test_f == is_gimple_stmt;
6465 if (is_statement)
6466 gcc_assert (pre_p);
6468 /* Consistency checks. */
6469 if (gimple_test_f == is_gimple_reg)
6470 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6471 else if (gimple_test_f == is_gimple_val
6472 || gimple_test_f == is_gimple_call_addr
6473 || gimple_test_f == is_gimple_condexpr
6474 || gimple_test_f == is_gimple_mem_rhs
6475 || gimple_test_f == is_gimple_mem_rhs_or_call
6476 || gimple_test_f == is_gimple_reg_rhs
6477 || gimple_test_f == is_gimple_reg_rhs_or_call
6478 || gimple_test_f == is_gimple_asm_val)
6479 gcc_assert (fallback & fb_rvalue);
6480 else if (gimple_test_f == is_gimple_min_lval
6481 || gimple_test_f == is_gimple_lvalue)
6482 gcc_assert (fallback & fb_lvalue);
6483 else if (gimple_test_f == is_gimple_addressable)
6484 gcc_assert (fallback & fb_either);
6485 else if (gimple_test_f == is_gimple_stmt)
6486 gcc_assert (fallback == fb_none);
6487 else
6489 /* We should have recognized the GIMPLE_TEST_F predicate to
6490 know what kind of fallback to use in case a temporary is
6491 needed to hold the value or address of *EXPR_P. */
6492 gcc_unreachable ();
6495 /* We used to check the predicate here and return immediately if it
6496 succeeds. This is wrong; the design is for gimplification to be
6497 idempotent, and for the predicates to only test for valid forms, not
6498 whether they are fully simplified. */
6499 if (pre_p == NULL)
6500 pre_p = &internal_pre;
6502 if (post_p == NULL)
6503 post_p = &internal_post;
6505 /* Remember the last statements added to PRE_P and POST_P. Every
6506 new statement added by the gimplification helpers needs to be
6507 annotated with location information. To centralize the
6508 responsibility, we remember the last statement that had been
6509 added to both queues before gimplifying *EXPR_P. If
6510 gimplification produces new statements in PRE_P and POST_P, those
6511 statements will be annotated with the same location information
6512 as *EXPR_P. */
6513 pre_last_gsi = gsi_last (*pre_p);
6514 post_last_gsi = gsi_last (*post_p);
6516 saved_location = input_location;
6517 if (save_expr != error_mark_node
6518 && EXPR_HAS_LOCATION (*expr_p))
6519 input_location = EXPR_LOCATION (*expr_p);
6521 /* Loop over the specific gimplifiers until the toplevel node
6522 remains the same. */
6525 /* Strip away as many useless type conversions as possible
6526 at the toplevel. */
6527 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6529 /* Remember the expr. */
6530 save_expr = *expr_p;
6532 /* Die, die, die, my darling. */
6533 if (save_expr == error_mark_node
6534 || (TREE_TYPE (save_expr)
6535 && TREE_TYPE (save_expr) == error_mark_node))
6537 ret = GS_ERROR;
6538 break;
6541 /* Do any language-specific gimplification. */
6542 ret = ((enum gimplify_status)
6543 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6544 if (ret == GS_OK)
6546 if (*expr_p == NULL_TREE)
6547 break;
6548 if (*expr_p != save_expr)
6549 continue;
6551 else if (ret != GS_UNHANDLED)
6552 break;
6554 ret = GS_OK;
6555 switch (TREE_CODE (*expr_p))
6557 /* First deal with the special cases. */
6559 case POSTINCREMENT_EXPR:
6560 case POSTDECREMENT_EXPR:
6561 case PREINCREMENT_EXPR:
6562 case PREDECREMENT_EXPR:
6563 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6564 fallback != fb_none);
6565 break;
6567 case ARRAY_REF:
6568 case ARRAY_RANGE_REF:
6569 case REALPART_EXPR:
6570 case IMAGPART_EXPR:
6571 case COMPONENT_REF:
6572 case VIEW_CONVERT_EXPR:
6573 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6574 fallback ? fallback : fb_rvalue);
6575 break;
6577 case COND_EXPR:
6578 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6580 /* C99 code may assign to an array in a structure value of a
6581 conditional expression, and this has undefined behavior
6582 only on execution, so create a temporary if an lvalue is
6583 required. */
6584 if (fallback == fb_lvalue)
6586 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6587 mark_addressable (*expr_p);
6589 break;
6591 case CALL_EXPR:
6592 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6594 /* C99 code may assign to an array in a structure returned
6595 from a function, and this has undefined behavior only on
6596 execution, so create a temporary if an lvalue is
6597 required. */
6598 if (fallback == fb_lvalue)
6600 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6601 mark_addressable (*expr_p);
6603 break;
6605 case TREE_LIST:
6606 gcc_unreachable ();
6608 case COMPOUND_EXPR:
6609 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6610 break;
6612 case COMPOUND_LITERAL_EXPR:
6613 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6614 break;
6616 case MODIFY_EXPR:
6617 case INIT_EXPR:
6619 tree from = TREE_OPERAND (*expr_p, 1);
6620 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6621 fallback != fb_none);
6622 /* Don't let the end of loop logic change GS_OK into GS_ALL_DONE
6623 if the RHS has changed. */
6624 if (ret == GS_OK && *expr_p == save_expr
6625 && TREE_OPERAND (*expr_p, 1) != from)
6626 continue;
6628 break;
6630 case TRUTH_ANDIF_EXPR:
6631 case TRUTH_ORIF_EXPR:
6632 /* Pass the source location of the outer expression. */
6633 ret = gimplify_boolean_expr (expr_p, saved_location);
6634 break;
6636 case TRUTH_NOT_EXPR:
6637 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6639 tree type = TREE_TYPE (*expr_p);
6640 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6641 ret = GS_OK;
6642 break;
6645 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6646 is_gimple_val, fb_rvalue);
6647 recalculate_side_effects (*expr_p);
6648 break;
6650 case ADDR_EXPR:
6651 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6652 break;
6654 case VA_ARG_EXPR:
6655 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6656 break;
6658 CASE_CONVERT:
6659 if (IS_EMPTY_STMT (*expr_p))
6661 ret = GS_ALL_DONE;
6662 break;
6665 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6666 || fallback == fb_none)
6668 /* Just strip a conversion to void (or in void context) and
6669 try again. */
6670 *expr_p = TREE_OPERAND (*expr_p, 0);
6671 break;
6674 ret = gimplify_conversion (expr_p);
6675 if (ret == GS_ERROR)
6676 break;
6677 if (*expr_p != save_expr)
6678 break;
6679 /* FALLTHRU */
6681 case FIX_TRUNC_EXPR:
6682 /* unary_expr: ... | '(' cast ')' val | ... */
6683 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6684 is_gimple_val, fb_rvalue);
6685 recalculate_side_effects (*expr_p);
6686 break;
6688 case INDIRECT_REF:
6689 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6690 if (*expr_p != save_expr)
6691 break;
6692 /* else fall through. */
6693 case ALIGN_INDIRECT_REF:
6694 case MISALIGNED_INDIRECT_REF:
6695 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6696 is_gimple_reg, fb_rvalue);
6697 recalculate_side_effects (*expr_p);
6698 break;
6700 /* Constants need not be gimplified. */
6701 case INTEGER_CST:
6702 case REAL_CST:
6703 case FIXED_CST:
6704 case STRING_CST:
6705 case COMPLEX_CST:
6706 case VECTOR_CST:
6707 ret = GS_ALL_DONE;
6708 break;
6710 case CONST_DECL:
6711 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6712 CONST_DECL node. Otherwise the decl is replaceable by its
6713 value. */
6714 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6715 if (fallback & fb_lvalue)
6716 ret = GS_ALL_DONE;
6717 else
6718 *expr_p = DECL_INITIAL (*expr_p);
6719 break;
6721 case DECL_EXPR:
6722 ret = gimplify_decl_expr (expr_p, pre_p);
6723 break;
6725 case BIND_EXPR:
6726 ret = gimplify_bind_expr (expr_p, pre_p);
6727 break;
6729 case LOOP_EXPR:
6730 ret = gimplify_loop_expr (expr_p, pre_p);
6731 break;
6733 case SWITCH_EXPR:
6734 ret = gimplify_switch_expr (expr_p, pre_p);
6735 break;
6737 case EXIT_EXPR:
6738 ret = gimplify_exit_expr (expr_p);
6739 break;
6741 case GOTO_EXPR:
6742 /* If the target is not LABEL, then it is a computed jump
6743 and the target needs to be gimplified. */
6744 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6746 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6747 NULL, is_gimple_val, fb_rvalue);
6748 if (ret == GS_ERROR)
6749 break;
6751 gimplify_seq_add_stmt (pre_p,
6752 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6753 break;
6755 case PREDICT_EXPR:
6756 gimplify_seq_add_stmt (pre_p,
6757 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6758 PREDICT_EXPR_OUTCOME (*expr_p)));
6759 ret = GS_ALL_DONE;
6760 break;
6762 case LABEL_EXPR:
6763 ret = GS_ALL_DONE;
6764 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6765 == current_function_decl);
6766 gimplify_seq_add_stmt (pre_p,
6767 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6768 break;
6770 case CASE_LABEL_EXPR:
6771 ret = gimplify_case_label_expr (expr_p, pre_p);
6772 break;
6774 case RETURN_EXPR:
6775 ret = gimplify_return_expr (*expr_p, pre_p);
6776 break;
6778 case CONSTRUCTOR:
6779 /* Don't reduce this in place; let gimplify_init_constructor work its
6780 magic. Buf if we're just elaborating this for side effects, just
6781 gimplify any element that has side-effects. */
6782 if (fallback == fb_none)
6784 unsigned HOST_WIDE_INT ix;
6785 constructor_elt *ce;
6786 tree temp = NULL_TREE;
6787 for (ix = 0;
6788 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6789 ix, ce);
6790 ix++)
6791 if (TREE_SIDE_EFFECTS (ce->value))
6792 append_to_statement_list (ce->value, &temp);
6794 *expr_p = temp;
6795 ret = GS_OK;
6797 /* C99 code may assign to an array in a constructed
6798 structure or union, and this has undefined behavior only
6799 on execution, so create a temporary if an lvalue is
6800 required. */
6801 else if (fallback == fb_lvalue)
6803 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6804 mark_addressable (*expr_p);
6806 else
6807 ret = GS_ALL_DONE;
6808 break;
6810 /* The following are special cases that are not handled by the
6811 original GIMPLE grammar. */
6813 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6814 eliminated. */
6815 case SAVE_EXPR:
6816 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6817 break;
6819 case BIT_FIELD_REF:
6821 enum gimplify_status r0, r1, r2;
6823 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6824 post_p, is_gimple_lvalue, fb_either);
6825 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6826 post_p, is_gimple_val, fb_rvalue);
6827 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6828 post_p, is_gimple_val, fb_rvalue);
6829 recalculate_side_effects (*expr_p);
6831 ret = MIN (r0, MIN (r1, r2));
6833 break;
6835 case TARGET_MEM_REF:
6837 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6839 if (TMR_SYMBOL (*expr_p))
6840 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6841 post_p, is_gimple_lvalue, fb_either);
6842 else if (TMR_BASE (*expr_p))
6843 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6844 post_p, is_gimple_val, fb_either);
6845 if (TMR_INDEX (*expr_p))
6846 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6847 post_p, is_gimple_val, fb_rvalue);
6848 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6849 ret = MIN (r0, r1);
6851 break;
6853 case NON_LVALUE_EXPR:
6854 /* This should have been stripped above. */
6855 gcc_unreachable ();
6857 case ASM_EXPR:
6858 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6859 break;
6861 case TRY_FINALLY_EXPR:
6862 case TRY_CATCH_EXPR:
6864 gimple_seq eval, cleanup;
6865 gimple try_;
6867 eval = cleanup = NULL;
6868 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6869 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6870 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6871 if (gimple_seq_empty_p (cleanup))
6873 gimple_seq_add_seq (pre_p, eval);
6874 ret = GS_ALL_DONE;
6875 break;
6877 try_ = gimple_build_try (eval, cleanup,
6878 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6879 ? GIMPLE_TRY_FINALLY
6880 : GIMPLE_TRY_CATCH);
6881 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6882 gimple_try_set_catch_is_cleanup (try_,
6883 TRY_CATCH_IS_CLEANUP (*expr_p));
6884 gimplify_seq_add_stmt (pre_p, try_);
6885 ret = GS_ALL_DONE;
6886 break;
6889 case CLEANUP_POINT_EXPR:
6890 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6891 break;
6893 case TARGET_EXPR:
6894 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6895 break;
6897 case CATCH_EXPR:
6899 gimple c;
6900 gimple_seq handler = NULL;
6901 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6902 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6903 gimplify_seq_add_stmt (pre_p, c);
6904 ret = GS_ALL_DONE;
6905 break;
6908 case EH_FILTER_EXPR:
6910 gimple ehf;
6911 gimple_seq failure = NULL;
6913 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6914 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6915 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6916 gimplify_seq_add_stmt (pre_p, ehf);
6917 ret = GS_ALL_DONE;
6918 break;
6921 case OBJ_TYPE_REF:
6923 enum gimplify_status r0, r1;
6924 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6925 post_p, is_gimple_val, fb_rvalue);
6926 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6927 post_p, is_gimple_val, fb_rvalue);
6928 TREE_SIDE_EFFECTS (*expr_p) = 0;
6929 ret = MIN (r0, r1);
6931 break;
6933 case LABEL_DECL:
6934 /* We get here when taking the address of a label. We mark
6935 the label as "forced"; meaning it can never be removed and
6936 it is a potential target for any computed goto. */
6937 FORCED_LABEL (*expr_p) = 1;
6938 ret = GS_ALL_DONE;
6939 break;
6941 case STATEMENT_LIST:
6942 ret = gimplify_statement_list (expr_p, pre_p);
6943 break;
6945 case WITH_SIZE_EXPR:
6947 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6948 post_p == &internal_post ? NULL : post_p,
6949 gimple_test_f, fallback);
6950 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6951 is_gimple_val, fb_rvalue);
6953 break;
6955 case VAR_DECL:
6956 case PARM_DECL:
6957 ret = gimplify_var_or_parm_decl (expr_p);
6958 break;
6960 case RESULT_DECL:
6961 /* When within an OpenMP context, notice uses of variables. */
6962 if (gimplify_omp_ctxp)
6963 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6964 ret = GS_ALL_DONE;
6965 break;
6967 case SSA_NAME:
6968 /* Allow callbacks into the gimplifier during optimization. */
6969 ret = GS_ALL_DONE;
6970 break;
6972 case OMP_PARALLEL:
6973 gimplify_omp_parallel (expr_p, pre_p);
6974 ret = GS_ALL_DONE;
6975 break;
6977 case OMP_TASK:
6978 gimplify_omp_task (expr_p, pre_p);
6979 ret = GS_ALL_DONE;
6980 break;
6982 case OMP_FOR:
6983 ret = gimplify_omp_for (expr_p, pre_p);
6984 break;
6986 case OMP_SECTIONS:
6987 case OMP_SINGLE:
6988 gimplify_omp_workshare (expr_p, pre_p);
6989 ret = GS_ALL_DONE;
6990 break;
6992 case OMP_SECTION:
6993 case OMP_MASTER:
6994 case OMP_ORDERED:
6995 case OMP_CRITICAL:
6997 gimple_seq body = NULL;
6998 gimple g;
7000 gimplify_and_add (OMP_BODY (*expr_p), &body);
7001 switch (TREE_CODE (*expr_p))
7003 case OMP_SECTION:
7004 g = gimple_build_omp_section (body);
7005 break;
7006 case OMP_MASTER:
7007 g = gimple_build_omp_master (body);
7008 break;
7009 case OMP_ORDERED:
7010 g = gimple_build_omp_ordered (body);
7011 break;
7012 case OMP_CRITICAL:
7013 g = gimple_build_omp_critical (body,
7014 OMP_CRITICAL_NAME (*expr_p));
7015 break;
7016 default:
7017 gcc_unreachable ();
7019 gimplify_seq_add_stmt (pre_p, g);
7020 ret = GS_ALL_DONE;
7021 break;
7024 case OMP_ATOMIC:
7025 ret = gimplify_omp_atomic (expr_p, pre_p);
7026 break;
7028 case POINTER_PLUS_EXPR:
7029 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
7030 The second is gimple immediate saving a need for extra statement.
7032 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7033 && (tmp = maybe_fold_offset_to_address
7034 (EXPR_LOCATION (*expr_p),
7035 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
7036 TREE_TYPE (*expr_p))))
7038 *expr_p = tmp;
7039 break;
7041 /* Convert (void *)&a + 4 into (void *)&a[1]. */
7042 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
7043 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7044 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
7045 0),0)))
7046 && (tmp = maybe_fold_offset_to_address
7047 (EXPR_LOCATION (*expr_p),
7048 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
7049 TREE_OPERAND (*expr_p, 1),
7050 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
7051 0)))))
7053 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
7054 break;
7056 /* FALLTHRU */
7058 default:
7059 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7061 case tcc_comparison:
7062 /* Handle comparison of objects of non scalar mode aggregates
7063 with a call to memcmp. It would be nice to only have to do
7064 this for variable-sized objects, but then we'd have to allow
7065 the same nest of reference nodes we allow for MODIFY_EXPR and
7066 that's too complex.
7068 Compare scalar mode aggregates as scalar mode values. Using
7069 memcmp for them would be very inefficient at best, and is
7070 plain wrong if bitfields are involved. */
7072 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7074 if (!AGGREGATE_TYPE_P (type))
7075 goto expr_2;
7076 else if (TYPE_MODE (type) != BLKmode)
7077 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7078 else
7079 ret = gimplify_variable_sized_compare (expr_p);
7081 break;
7084 /* If *EXPR_P does not need to be special-cased, handle it
7085 according to its class. */
7086 case tcc_unary:
7087 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7088 post_p, is_gimple_val, fb_rvalue);
7089 break;
7091 case tcc_binary:
7092 expr_2:
7094 enum gimplify_status r0, r1;
7096 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7097 post_p, is_gimple_val, fb_rvalue);
7098 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7099 post_p, is_gimple_val, fb_rvalue);
7101 ret = MIN (r0, r1);
7102 break;
7105 case tcc_declaration:
7106 case tcc_constant:
7107 ret = GS_ALL_DONE;
7108 goto dont_recalculate;
7110 default:
7111 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7112 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7113 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7114 goto expr_2;
7117 recalculate_side_effects (*expr_p);
7119 dont_recalculate:
7120 break;
7123 /* If we replaced *expr_p, gimplify again. */
7124 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7125 ret = GS_ALL_DONE;
7127 while (ret == GS_OK);
7129 /* If we encountered an error_mark somewhere nested inside, either
7130 stub out the statement or propagate the error back out. */
7131 if (ret == GS_ERROR)
7133 if (is_statement)
7134 *expr_p = NULL;
7135 goto out;
7138 /* This was only valid as a return value from the langhook, which
7139 we handled. Make sure it doesn't escape from any other context. */
7140 gcc_assert (ret != GS_UNHANDLED);
7142 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7144 /* We aren't looking for a value, and we don't have a valid
7145 statement. If it doesn't have side-effects, throw it away. */
7146 if (!TREE_SIDE_EFFECTS (*expr_p))
7147 *expr_p = NULL;
7148 else if (!TREE_THIS_VOLATILE (*expr_p))
7150 /* This is probably a _REF that contains something nested that
7151 has side effects. Recurse through the operands to find it. */
7152 enum tree_code code = TREE_CODE (*expr_p);
7154 switch (code)
7156 case COMPONENT_REF:
7157 case REALPART_EXPR:
7158 case IMAGPART_EXPR:
7159 case VIEW_CONVERT_EXPR:
7160 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7161 gimple_test_f, fallback);
7162 break;
7164 case ARRAY_REF:
7165 case ARRAY_RANGE_REF:
7166 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7167 gimple_test_f, fallback);
7168 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7169 gimple_test_f, fallback);
7170 break;
7172 default:
7173 /* Anything else with side-effects must be converted to
7174 a valid statement before we get here. */
7175 gcc_unreachable ();
7178 *expr_p = NULL;
7180 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7181 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7183 /* Historically, the compiler has treated a bare reference
7184 to a non-BLKmode volatile lvalue as forcing a load. */
7185 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7187 /* Normally, we do not want to create a temporary for a
7188 TREE_ADDRESSABLE type because such a type should not be
7189 copied by bitwise-assignment. However, we make an
7190 exception here, as all we are doing here is ensuring that
7191 we read the bytes that make up the type. We use
7192 create_tmp_var_raw because create_tmp_var will abort when
7193 given a TREE_ADDRESSABLE type. */
7194 tree tmp = create_tmp_var_raw (type, "vol");
7195 gimple_add_tmp_var (tmp);
7196 gimplify_assign (tmp, *expr_p, pre_p);
7197 *expr_p = NULL;
7199 else
7200 /* We can't do anything useful with a volatile reference to
7201 an incomplete type, so just throw it away. Likewise for
7202 a BLKmode type, since any implicit inner load should
7203 already have been turned into an explicit one by the
7204 gimplification process. */
7205 *expr_p = NULL;
7208 /* If we are gimplifying at the statement level, we're done. Tack
7209 everything together and return. */
7210 if (fallback == fb_none || is_statement)
7212 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7213 it out for GC to reclaim it. */
7214 *expr_p = NULL_TREE;
7216 if (!gimple_seq_empty_p (internal_pre)
7217 || !gimple_seq_empty_p (internal_post))
7219 gimplify_seq_add_seq (&internal_pre, internal_post);
7220 gimplify_seq_add_seq (pre_p, internal_pre);
7223 /* The result of gimplifying *EXPR_P is going to be the last few
7224 statements in *PRE_P and *POST_P. Add location information
7225 to all the statements that were added by the gimplification
7226 helpers. */
7227 if (!gimple_seq_empty_p (*pre_p))
7228 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7230 if (!gimple_seq_empty_p (*post_p))
7231 annotate_all_with_location_after (*post_p, post_last_gsi,
7232 input_location);
7234 goto out;
7237 #ifdef ENABLE_GIMPLE_CHECKING
7238 if (*expr_p)
7240 enum tree_code code = TREE_CODE (*expr_p);
7241 /* These expressions should already be in gimple IR form. */
7242 gcc_assert (code != MODIFY_EXPR
7243 && code != ASM_EXPR
7244 && code != BIND_EXPR
7245 && code != CATCH_EXPR
7246 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7247 && code != EH_FILTER_EXPR
7248 && code != GOTO_EXPR
7249 && code != LABEL_EXPR
7250 && code != LOOP_EXPR
7251 && code != SWITCH_EXPR
7252 && code != TRY_FINALLY_EXPR
7253 && code != OMP_CRITICAL
7254 && code != OMP_FOR
7255 && code != OMP_MASTER
7256 && code != OMP_ORDERED
7257 && code != OMP_PARALLEL
7258 && code != OMP_SECTIONS
7259 && code != OMP_SECTION
7260 && code != OMP_SINGLE);
7262 #endif
7264 /* Otherwise we're gimplifying a subexpression, so the resulting
7265 value is interesting. If it's a valid operand that matches
7266 GIMPLE_TEST_F, we're done. Unless we are handling some
7267 post-effects internally; if that's the case, we need to copy into
7268 a temporary before adding the post-effects to POST_P. */
7269 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7270 goto out;
7272 /* Otherwise, we need to create a new temporary for the gimplified
7273 expression. */
7275 /* We can't return an lvalue if we have an internal postqueue. The
7276 object the lvalue refers to would (probably) be modified by the
7277 postqueue; we need to copy the value out first, which means an
7278 rvalue. */
7279 if ((fallback & fb_lvalue)
7280 && gimple_seq_empty_p (internal_post)
7281 && is_gimple_addressable (*expr_p))
7283 /* An lvalue will do. Take the address of the expression, store it
7284 in a temporary, and replace the expression with an INDIRECT_REF of
7285 that temporary. */
7286 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7287 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7288 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7290 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7292 /* An rvalue will do. Assign the gimplified expression into a
7293 new temporary TMP and replace the original expression with
7294 TMP. First, make sure that the expression has a type so that
7295 it can be assigned into a temporary. */
7296 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7298 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7299 /* The postqueue might change the value of the expression between
7300 the initialization and use of the temporary, so we can't use a
7301 formal temp. FIXME do we care? */
7303 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7304 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7305 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7306 DECL_GIMPLE_REG_P (*expr_p) = 1;
7308 else
7309 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7311 else
7313 #ifdef ENABLE_GIMPLE_CHECKING
7314 if (!(fallback & fb_mayfail))
7316 fprintf (stderr, "gimplification failed:\n");
7317 print_generic_expr (stderr, *expr_p, 0);
7318 debug_tree (*expr_p);
7319 internal_error ("gimplification failed");
7321 #endif
7322 gcc_assert (fallback & fb_mayfail);
7324 /* If this is an asm statement, and the user asked for the
7325 impossible, don't die. Fail and let gimplify_asm_expr
7326 issue an error. */
7327 ret = GS_ERROR;
7328 goto out;
7331 /* Make sure the temporary matches our predicate. */
7332 gcc_assert ((*gimple_test_f) (*expr_p));
7334 if (!gimple_seq_empty_p (internal_post))
7336 annotate_all_with_location (internal_post, input_location);
7337 gimplify_seq_add_seq (pre_p, internal_post);
7340 out:
7341 input_location = saved_location;
7342 return ret;
7345 /* Look through TYPE for variable-sized objects and gimplify each such
7346 size that we find. Add to LIST_P any statements generated. */
7348 void
7349 gimplify_type_sizes (tree type, gimple_seq *list_p)
7351 tree field, t;
7353 if (type == NULL || type == error_mark_node)
7354 return;
7356 /* We first do the main variant, then copy into any other variants. */
7357 type = TYPE_MAIN_VARIANT (type);
7359 /* Avoid infinite recursion. */
7360 if (TYPE_SIZES_GIMPLIFIED (type))
7361 return;
7363 TYPE_SIZES_GIMPLIFIED (type) = 1;
7365 switch (TREE_CODE (type))
7367 case INTEGER_TYPE:
7368 case ENUMERAL_TYPE:
7369 case BOOLEAN_TYPE:
7370 case REAL_TYPE:
7371 case FIXED_POINT_TYPE:
7372 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7373 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7375 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7377 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7378 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7380 break;
7382 case ARRAY_TYPE:
7383 /* These types may not have declarations, so handle them here. */
7384 gimplify_type_sizes (TREE_TYPE (type), list_p);
7385 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7386 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7387 with assigned stack slots, for -O1+ -g they should be tracked
7388 by VTA. */
7389 if (TYPE_DOMAIN (type)
7390 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7392 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7393 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7394 DECL_IGNORED_P (t) = 0;
7395 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7396 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7397 DECL_IGNORED_P (t) = 0;
7399 break;
7401 case RECORD_TYPE:
7402 case UNION_TYPE:
7403 case QUAL_UNION_TYPE:
7404 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7405 if (TREE_CODE (field) == FIELD_DECL)
7407 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7408 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7409 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7410 gimplify_type_sizes (TREE_TYPE (field), list_p);
7412 break;
7414 case POINTER_TYPE:
7415 case REFERENCE_TYPE:
7416 /* We used to recurse on the pointed-to type here, which turned out to
7417 be incorrect because its definition might refer to variables not
7418 yet initialized at this point if a forward declaration is involved.
7420 It was actually useful for anonymous pointed-to types to ensure
7421 that the sizes evaluation dominates every possible later use of the
7422 values. Restricting to such types here would be safe since there
7423 is no possible forward declaration around, but would introduce an
7424 undesirable middle-end semantic to anonymity. We then defer to
7425 front-ends the responsibility of ensuring that the sizes are
7426 evaluated both early and late enough, e.g. by attaching artificial
7427 type declarations to the tree. */
7428 break;
7430 default:
7431 break;
7434 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7435 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7437 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7439 TYPE_SIZE (t) = TYPE_SIZE (type);
7440 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7441 TYPE_SIZES_GIMPLIFIED (t) = 1;
7445 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7446 a size or position, has had all of its SAVE_EXPRs evaluated.
7447 We add any required statements to *STMT_P. */
7449 void
7450 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7452 tree type, expr = *expr_p;
7454 /* We don't do anything if the value isn't there, is constant, or contains
7455 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7456 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7457 will want to replace it with a new variable, but that will cause problems
7458 if this type is from outside the function. It's OK to have that here. */
7459 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7460 || TREE_CODE (expr) == VAR_DECL
7461 || CONTAINS_PLACEHOLDER_P (expr))
7462 return;
7464 type = TREE_TYPE (expr);
7465 *expr_p = unshare_expr (expr);
7467 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7468 expr = *expr_p;
7470 /* Verify that we've an exact type match with the original expression.
7471 In particular, we do not wish to drop a "sizetype" in favour of a
7472 type of similar dimensions. We don't want to pollute the generic
7473 type-stripping code with this knowledge because it doesn't matter
7474 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7475 and friends retain their "sizetype-ness". */
7476 if (TREE_TYPE (expr) != type
7477 && TREE_CODE (type) == INTEGER_TYPE
7478 && TYPE_IS_SIZETYPE (type))
7480 tree tmp;
7481 gimple stmt;
7483 *expr_p = create_tmp_var (type, NULL);
7484 tmp = build1 (NOP_EXPR, type, expr);
7485 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7486 if (EXPR_HAS_LOCATION (expr))
7487 gimple_set_location (stmt, EXPR_LOCATION (expr));
7488 else
7489 gimple_set_location (stmt, input_location);
7494 /* Gimplify the body of statements pointed to by BODY_P and return a
7495 GIMPLE_BIND containing the sequence of GIMPLE statements
7496 corresponding to BODY_P. FNDECL is the function decl containing
7497 *BODY_P. */
7499 gimple
7500 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7502 location_t saved_location = input_location;
7503 gimple_seq parm_stmts, seq;
7504 gimple outer_bind;
7505 struct gimplify_ctx gctx;
7507 timevar_push (TV_TREE_GIMPLIFY);
7509 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7510 gimplification. */
7511 default_rtl_profile ();
7513 gcc_assert (gimplify_ctxp == NULL);
7514 push_gimplify_context (&gctx);
7516 /* Unshare most shared trees in the body and in that of any nested functions.
7517 It would seem we don't have to do this for nested functions because
7518 they are supposed to be output and then the outer function gimplified
7519 first, but the g++ front end doesn't always do it that way. */
7520 unshare_body (body_p, fndecl);
7521 unvisit_body (body_p, fndecl);
7523 if (cgraph_node (fndecl)->origin)
7524 nonlocal_vlas = pointer_set_create ();
7526 /* Make sure input_location isn't set to something weird. */
7527 input_location = DECL_SOURCE_LOCATION (fndecl);
7529 /* Resolve callee-copies. This has to be done before processing
7530 the body so that DECL_VALUE_EXPR gets processed correctly. */
7531 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7533 /* Gimplify the function's body. */
7534 seq = NULL;
7535 gimplify_stmt (body_p, &seq);
7536 outer_bind = gimple_seq_first_stmt (seq);
7537 if (!outer_bind)
7539 outer_bind = gimple_build_nop ();
7540 gimplify_seq_add_stmt (&seq, outer_bind);
7543 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7544 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7545 if (gimple_code (outer_bind) == GIMPLE_BIND
7546 && gimple_seq_first (seq) == gimple_seq_last (seq))
7548 else
7549 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7551 *body_p = NULL_TREE;
7553 /* If we had callee-copies statements, insert them at the beginning
7554 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
7555 if (!gimple_seq_empty_p (parm_stmts))
7557 tree parm;
7559 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7560 gimple_bind_set_body (outer_bind, parm_stmts);
7562 for (parm = DECL_ARGUMENTS (current_function_decl);
7563 parm; parm = TREE_CHAIN (parm))
7564 if (DECL_HAS_VALUE_EXPR_P (parm))
7566 DECL_HAS_VALUE_EXPR_P (parm) = 0;
7567 DECL_IGNORED_P (parm) = 0;
7571 if (nonlocal_vlas)
7573 pointer_set_destroy (nonlocal_vlas);
7574 nonlocal_vlas = NULL;
7577 pop_gimplify_context (outer_bind);
7578 gcc_assert (gimplify_ctxp == NULL);
7580 #ifdef ENABLE_TYPES_CHECKING
7581 if (!errorcount && !sorrycount)
7582 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7583 #endif
7585 timevar_pop (TV_TREE_GIMPLIFY);
7586 input_location = saved_location;
7588 return outer_bind;
7591 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7592 node for the function we want to gimplify.
7594 Returns the sequence of GIMPLE statements corresponding to the body
7595 of FNDECL. */
7597 void
7598 gimplify_function_tree (tree fndecl)
7600 tree oldfn, parm, ret;
7601 gimple_seq seq;
7602 gimple bind;
7604 gcc_assert (!gimple_body (fndecl));
7606 oldfn = current_function_decl;
7607 current_function_decl = fndecl;
7608 if (DECL_STRUCT_FUNCTION (fndecl))
7609 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7610 else
7611 push_struct_function (fndecl);
7613 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7615 /* Preliminarily mark non-addressed complex variables as eligible
7616 for promotion to gimple registers. We'll transform their uses
7617 as we find them. */
7618 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7619 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7620 && !TREE_THIS_VOLATILE (parm)
7621 && !needs_to_live_in_memory (parm))
7622 DECL_GIMPLE_REG_P (parm) = 1;
7625 ret = DECL_RESULT (fndecl);
7626 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7627 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7628 && !needs_to_live_in_memory (ret))
7629 DECL_GIMPLE_REG_P (ret) = 1;
7631 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7633 /* The tree body of the function is no longer needed, replace it
7634 with the new GIMPLE body. */
7635 seq = gimple_seq_alloc ();
7636 gimple_seq_add_stmt (&seq, bind);
7637 gimple_set_body (fndecl, seq);
7639 /* If we're instrumenting function entry/exit, then prepend the call to
7640 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7641 catch the exit hook. */
7642 /* ??? Add some way to ignore exceptions for this TFE. */
7643 if (flag_instrument_function_entry_exit
7644 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7645 && !flag_instrument_functions_exclude_p (fndecl))
7647 tree x;
7648 gimple new_bind;
7649 gimple tf;
7650 gimple_seq cleanup = NULL, body = NULL;
7652 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7653 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7654 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7656 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7657 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7658 gimplify_seq_add_stmt (&body, tf);
7659 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7660 /* Clear the block for BIND, since it is no longer directly inside
7661 the function, but within a try block. */
7662 gimple_bind_set_block (bind, NULL);
7664 /* Replace the current function body with the body
7665 wrapped in the try/finally TF. */
7666 seq = gimple_seq_alloc ();
7667 gimple_seq_add_stmt (&seq, new_bind);
7668 gimple_set_body (fndecl, seq);
7671 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7672 cfun->curr_properties = PROP_gimple_any;
7674 current_function_decl = oldfn;
7675 pop_cfun ();
7679 /* Some transformations like inlining may invalidate the GIMPLE form
7680 for operands. This function traverses all the operands in STMT and
7681 gimplifies anything that is not a valid gimple operand. Any new
7682 GIMPLE statements are inserted before *GSI_P. */
7684 void
7685 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7687 size_t i, num_ops;
7688 tree orig_lhs = NULL_TREE, lhs, t;
7689 gimple_seq pre = NULL;
7690 gimple post_stmt = NULL;
7691 struct gimplify_ctx gctx;
7693 push_gimplify_context (&gctx);
7694 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7696 switch (gimple_code (stmt))
7698 case GIMPLE_COND:
7699 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7700 is_gimple_val, fb_rvalue);
7701 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7702 is_gimple_val, fb_rvalue);
7703 break;
7704 case GIMPLE_SWITCH:
7705 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7706 is_gimple_val, fb_rvalue);
7707 break;
7708 case GIMPLE_OMP_ATOMIC_LOAD:
7709 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7710 is_gimple_val, fb_rvalue);
7711 break;
7712 case GIMPLE_ASM:
7714 size_t i, noutputs = gimple_asm_noutputs (stmt);
7715 const char *constraint, **oconstraints;
7716 bool allows_mem, allows_reg, is_inout;
7718 oconstraints
7719 = (const char **) alloca ((noutputs) * sizeof (const char *));
7720 for (i = 0; i < noutputs; i++)
7722 tree op = gimple_asm_output_op (stmt, i);
7723 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7724 oconstraints[i] = constraint;
7725 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7726 &allows_reg, &is_inout);
7727 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7728 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7729 fb_lvalue | fb_mayfail);
7731 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7733 tree op = gimple_asm_input_op (stmt, i);
7734 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7735 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7736 oconstraints, &allows_mem, &allows_reg);
7737 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7738 allows_reg = 0;
7739 if (!allows_reg && allows_mem)
7740 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7741 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7742 else
7743 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7744 is_gimple_asm_val, fb_rvalue);
7747 break;
7748 default:
7749 /* NOTE: We start gimplifying operands from last to first to
7750 make sure that side-effects on the RHS of calls, assignments
7751 and ASMs are executed before the LHS. The ordering is not
7752 important for other statements. */
7753 num_ops = gimple_num_ops (stmt);
7754 orig_lhs = gimple_get_lhs (stmt);
7755 for (i = num_ops; i > 0; i--)
7757 tree op = gimple_op (stmt, i - 1);
7758 if (op == NULL_TREE)
7759 continue;
7760 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7761 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7762 else if (i == 2
7763 && is_gimple_assign (stmt)
7764 && num_ops == 2
7765 && get_gimple_rhs_class (gimple_expr_code (stmt))
7766 == GIMPLE_SINGLE_RHS)
7767 gimplify_expr (&op, &pre, NULL,
7768 rhs_predicate_for (gimple_assign_lhs (stmt)),
7769 fb_rvalue);
7770 else if (i == 2 && is_gimple_call (stmt))
7772 if (TREE_CODE (op) == FUNCTION_DECL)
7773 continue;
7774 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7776 else
7777 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7778 gimple_set_op (stmt, i - 1, op);
7781 lhs = gimple_get_lhs (stmt);
7782 /* If the LHS changed it in a way that requires a simple RHS,
7783 create temporary. */
7784 if (lhs && !is_gimple_reg (lhs))
7786 bool need_temp = false;
7788 if (is_gimple_assign (stmt)
7789 && num_ops == 2
7790 && get_gimple_rhs_class (gimple_expr_code (stmt))
7791 == GIMPLE_SINGLE_RHS)
7792 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7793 rhs_predicate_for (gimple_assign_lhs (stmt)),
7794 fb_rvalue);
7795 else if (is_gimple_reg (lhs))
7797 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7799 if (is_gimple_call (stmt))
7801 i = gimple_call_flags (stmt);
7802 if ((i & ECF_LOOPING_CONST_OR_PURE)
7803 || !(i & (ECF_CONST | ECF_PURE)))
7804 need_temp = true;
7806 if (stmt_can_throw_internal (stmt))
7807 need_temp = true;
7810 else
7812 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7813 need_temp = true;
7814 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7816 if (is_gimple_call (stmt))
7818 tree fndecl = gimple_call_fndecl (stmt);
7820 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7821 && !(fndecl && DECL_RESULT (fndecl)
7822 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7823 need_temp = true;
7825 else
7826 need_temp = true;
7829 if (need_temp)
7831 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
7833 if (TREE_CODE (orig_lhs) == SSA_NAME)
7834 orig_lhs = SSA_NAME_VAR (orig_lhs);
7836 if (gimple_in_ssa_p (cfun))
7837 temp = make_ssa_name (temp, NULL);
7838 gimple_set_lhs (stmt, temp);
7839 post_stmt = gimple_build_assign (lhs, temp);
7840 if (TREE_CODE (lhs) == SSA_NAME)
7841 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7844 break;
7847 if (gimple_referenced_vars (cfun))
7848 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7849 add_referenced_var (t);
7851 if (!gimple_seq_empty_p (pre))
7853 if (gimple_in_ssa_p (cfun))
7855 gimple_stmt_iterator i;
7857 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7858 mark_symbols_for_renaming (gsi_stmt (i));
7860 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7862 if (post_stmt)
7863 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7865 pop_gimplify_context (NULL);
7869 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7870 force the result to be either ssa_name or an invariant, otherwise
7871 just force it to be a rhs expression. If VAR is not NULL, make the
7872 base variable of the final destination be VAR if suitable. */
7874 tree
7875 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7877 tree t;
7878 enum gimplify_status ret;
7879 gimple_predicate gimple_test_f;
7880 struct gimplify_ctx gctx;
7882 *stmts = NULL;
7884 if (is_gimple_val (expr))
7885 return expr;
7887 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7889 push_gimplify_context (&gctx);
7890 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7891 gimplify_ctxp->allow_rhs_cond_expr = true;
7893 if (var)
7894 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7896 if (TREE_CODE (expr) != MODIFY_EXPR
7897 && TREE_TYPE (expr) == void_type_node)
7899 gimplify_and_add (expr, stmts);
7900 expr = NULL_TREE;
7902 else
7904 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7905 gcc_assert (ret != GS_ERROR);
7908 if (gimple_referenced_vars (cfun))
7909 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7910 add_referenced_var (t);
7912 pop_gimplify_context (NULL);
7914 return expr;
7917 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7918 some statements are produced, emits them at GSI. If BEFORE is true.
7919 the statements are appended before GSI, otherwise they are appended after
7920 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7921 GSI_CONTINUE_LINKING are the usual values). */
7923 tree
7924 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7925 bool simple_p, tree var, bool before,
7926 enum gsi_iterator_update m)
7928 gimple_seq stmts;
7930 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7932 if (!gimple_seq_empty_p (stmts))
7934 if (gimple_in_ssa_p (cfun))
7936 gimple_stmt_iterator i;
7938 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7939 mark_symbols_for_renaming (gsi_stmt (i));
7942 if (before)
7943 gsi_insert_seq_before (gsi, stmts, m);
7944 else
7945 gsi_insert_seq_after (gsi, stmts, m);
7948 return expr;
7951 #include "gt-gimplify.h"