Merged revision 156805 into branch.
[official-gcc.git] / gcc / gimplify.c
blob013027c3dce6735db17bf7a3c3f1954458b9c581
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53 #include "vec.h"
54 #include "gimple.h"
55 #include "tree-pass.h"
58 enum gimplify_omp_var_data
60 GOVD_SEEN = 1,
61 GOVD_EXPLICIT = 2,
62 GOVD_SHARED = 4,
63 GOVD_PRIVATE = 8,
64 GOVD_FIRSTPRIVATE = 16,
65 GOVD_LASTPRIVATE = 32,
66 GOVD_REDUCTION = 64,
67 GOVD_LOCAL = 128,
68 GOVD_DEBUG_PRIVATE = 256,
69 GOVD_PRIVATE_OUTER_REF = 512,
70 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
71 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
75 enum omp_region_type
77 ORT_WORKSHARE = 0,
78 ORT_TASK = 1,
79 ORT_PARALLEL = 2,
80 ORT_COMBINED_PARALLEL = 3
83 struct gimplify_omp_ctx
85 struct gimplify_omp_ctx *outer_context;
86 splay_tree variables;
87 struct pointer_set_t *privatized_types;
88 location_t location;
89 enum omp_clause_default_kind default_kind;
90 enum omp_region_type region_type;
93 static struct gimplify_ctx *gimplify_ctxp;
94 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
97 /* Formal (expression) temporary table handling: Multiple occurrences of
98 the same scalar expression are evaluated into the same temporary. */
100 typedef struct gimple_temp_hash_elt
102 tree val; /* Key */
103 tree temp; /* Value */
104 } elt_t;
106 /* Forward declarations. */
107 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
109 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
110 form and we don't do any syntax checking. */
111 void
112 mark_addressable (tree x)
114 while (handled_component_p (x))
115 x = TREE_OPERAND (x, 0);
116 if (TREE_CODE (x) != VAR_DECL
117 && TREE_CODE (x) != PARM_DECL
118 && TREE_CODE (x) != RESULT_DECL)
119 return ;
120 TREE_ADDRESSABLE (x) = 1;
123 /* Return a hash value for a formal temporary table entry. */
125 static hashval_t
126 gimple_tree_hash (const void *p)
128 tree t = ((const elt_t *) p)->val;
129 return iterative_hash_expr (t, 0);
132 /* Compare two formal temporary table entries. */
134 static int
135 gimple_tree_eq (const void *p1, const void *p2)
137 tree t1 = ((const elt_t *) p1)->val;
138 tree t2 = ((const elt_t *) p2)->val;
139 enum tree_code code = TREE_CODE (t1);
141 if (TREE_CODE (t2) != code
142 || TREE_TYPE (t1) != TREE_TYPE (t2))
143 return 0;
145 if (!operand_equal_p (t1, t2, 0))
146 return 0;
148 /* Only allow them to compare equal if they also hash equal; otherwise
149 results are nondeterminate, and we fail bootstrap comparison. */
150 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
152 return 1;
155 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
156 *SEQ_P is NULL, a new sequence is allocated. This function is
157 similar to gimple_seq_add_stmt, but does not scan the operands.
158 During gimplification, we need to manipulate statement sequences
159 before the def/use vectors have been constructed. */
161 static void
162 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
164 gimple_stmt_iterator si;
166 if (gs == NULL)
167 return;
169 if (*seq_p == NULL)
170 *seq_p = gimple_seq_alloc ();
172 si = gsi_last (*seq_p);
174 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
177 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
178 NULL, a new sequence is allocated. This function is
179 similar to gimple_seq_add_seq, but does not scan the operands.
180 During gimplification, we need to manipulate statement sequences
181 before the def/use vectors have been constructed. */
183 static void
184 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
186 gimple_stmt_iterator si;
188 if (src == NULL)
189 return;
191 if (*dst_p == NULL)
192 *dst_p = gimple_seq_alloc ();
194 si = gsi_last (*dst_p);
195 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
198 /* Set up a context for the gimplifier. */
200 void
201 push_gimplify_context (struct gimplify_ctx *c)
203 memset (c, '\0', sizeof (*c));
204 c->prev_context = gimplify_ctxp;
205 gimplify_ctxp = c;
208 /* Tear down a context for the gimplifier. If BODY is non-null, then
209 put the temporaries into the outer BIND_EXPR. Otherwise, put them
210 in the local_decls.
212 BODY is not a sequence, but the first tuple in a sequence. */
214 void
215 pop_gimplify_context (gimple body)
217 struct gimplify_ctx *c = gimplify_ctxp;
219 gcc_assert (c && (c->bind_expr_stack == NULL
220 || VEC_empty (gimple, c->bind_expr_stack)));
221 VEC_free (gimple, heap, c->bind_expr_stack);
222 gimplify_ctxp = c->prev_context;
224 if (body)
225 declare_vars (c->temps, body, false);
226 else
227 record_vars (c->temps);
229 if (c->temp_htab)
230 htab_delete (c->temp_htab);
233 static void
234 gimple_push_bind_expr (gimple gimple_bind)
236 if (gimplify_ctxp->bind_expr_stack == NULL)
237 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
238 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
241 static void
242 gimple_pop_bind_expr (void)
244 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
247 gimple
248 gimple_current_bind_expr (void)
250 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
253 /* Return the stack GIMPLE_BINDs created during gimplification. */
255 VEC(gimple, heap) *
256 gimple_bind_expr_stack (void)
258 return gimplify_ctxp->bind_expr_stack;
261 /* Returns true iff there is a COND_EXPR between us and the innermost
262 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
264 static bool
265 gimple_conditional_context (void)
267 return gimplify_ctxp->conditions > 0;
270 /* Note that we've entered a COND_EXPR. */
272 static void
273 gimple_push_condition (void)
275 #ifdef ENABLE_GIMPLE_CHECKING
276 if (gimplify_ctxp->conditions == 0)
277 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
278 #endif
279 ++(gimplify_ctxp->conditions);
282 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
283 now, add any conditional cleanups we've seen to the prequeue. */
285 static void
286 gimple_pop_condition (gimple_seq *pre_p)
288 int conds = --(gimplify_ctxp->conditions);
290 gcc_assert (conds >= 0);
291 if (conds == 0)
293 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
294 gimplify_ctxp->conditional_cleanups = NULL;
298 /* A stable comparison routine for use with splay trees and DECLs. */
300 static int
301 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
303 tree a = (tree) xa;
304 tree b = (tree) xb;
306 return DECL_UID (a) - DECL_UID (b);
309 /* Create a new omp construct that deals with variable remapping. */
311 static struct gimplify_omp_ctx *
312 new_omp_context (enum omp_region_type region_type)
314 struct gimplify_omp_ctx *c;
316 c = XCNEW (struct gimplify_omp_ctx);
317 c->outer_context = gimplify_omp_ctxp;
318 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
319 c->privatized_types = pointer_set_create ();
320 c->location = input_location;
321 c->region_type = region_type;
322 if (region_type != ORT_TASK)
323 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
324 else
325 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
327 return c;
330 /* Destroy an omp construct that deals with variable remapping. */
332 static void
333 delete_omp_context (struct gimplify_omp_ctx *c)
335 splay_tree_delete (c->variables);
336 pointer_set_destroy (c->privatized_types);
337 XDELETE (c);
340 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
341 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
343 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
345 static void
346 append_to_statement_list_1 (tree t, tree *list_p)
348 tree list = *list_p;
349 tree_stmt_iterator i;
351 if (!list)
353 if (t && TREE_CODE (t) == STATEMENT_LIST)
355 *list_p = t;
356 return;
358 *list_p = list = alloc_stmt_list ();
361 i = tsi_last (list);
362 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
365 /* Add T to the end of the list container pointed to by LIST_P.
366 If T is an expression with no effects, it is ignored. */
368 void
369 append_to_statement_list (tree t, tree *list_p)
371 if (t && TREE_SIDE_EFFECTS (t))
372 append_to_statement_list_1 (t, list_p);
375 /* Similar, but the statement is always added, regardless of side effects. */
377 void
378 append_to_statement_list_force (tree t, tree *list_p)
380 if (t != NULL_TREE)
381 append_to_statement_list_1 (t, list_p);
384 /* Both gimplify the statement T and append it to *SEQ_P. This function
385 behaves exactly as gimplify_stmt, but you don't have to pass T as a
386 reference. */
388 void
389 gimplify_and_add (tree t, gimple_seq *seq_p)
391 gimplify_stmt (&t, seq_p);
394 /* Gimplify statement T into sequence *SEQ_P, and return the first
395 tuple in the sequence of generated tuples for this statement.
396 Return NULL if gimplifying T produced no tuples. */
398 static gimple
399 gimplify_and_return_first (tree t, gimple_seq *seq_p)
401 gimple_stmt_iterator last = gsi_last (*seq_p);
403 gimplify_and_add (t, seq_p);
405 if (!gsi_end_p (last))
407 gsi_next (&last);
408 return gsi_stmt (last);
410 else
411 return gimple_seq_first_stmt (*seq_p);
414 /* Strip off a legitimate source ending from the input string NAME of
415 length LEN. Rather than having to know the names used by all of
416 our front ends, we strip off an ending of a period followed by
417 up to five characters. (Java uses ".class".) */
419 static inline void
420 remove_suffix (char *name, int len)
422 int i;
424 for (i = 2; i < 8 && len > i; i++)
426 if (name[len - i] == '.')
428 name[len - i] = '\0';
429 break;
434 /* Create a new temporary name with PREFIX. Returns an identifier. */
436 static GTY(()) unsigned int tmp_var_id_num;
438 tree
439 create_tmp_var_name (const char *prefix)
441 char *tmp_name;
443 if (prefix)
445 char *preftmp = ASTRDUP (prefix);
447 remove_suffix (preftmp, strlen (preftmp));
448 prefix = preftmp;
451 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
452 return get_identifier (tmp_name);
456 /* Create a new temporary variable declaration of type TYPE.
457 Does NOT push it into the current binding. */
459 tree
460 create_tmp_var_raw (tree type, const char *prefix)
462 tree tmp_var;
463 tree new_type;
465 /* Make the type of the variable writable. */
466 new_type = build_type_variant (type, 0, 0);
467 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
469 tmp_var = build_decl (input_location,
470 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
471 type);
473 /* The variable was declared by the compiler. */
474 DECL_ARTIFICIAL (tmp_var) = 1;
475 /* And we don't want debug info for it. */
476 DECL_IGNORED_P (tmp_var) = 1;
478 /* Make the variable writable. */
479 TREE_READONLY (tmp_var) = 0;
481 DECL_EXTERNAL (tmp_var) = 0;
482 TREE_STATIC (tmp_var) = 0;
483 TREE_USED (tmp_var) = 1;
485 return tmp_var;
488 /* Create a new temporary variable declaration of type TYPE. DOES push the
489 variable into the current binding. Further, assume that this is called
490 only from gimplification or optimization, at which point the creation of
491 certain types are bugs. */
493 tree
494 create_tmp_var (tree type, const char *prefix)
496 tree tmp_var;
498 /* We don't allow types that are addressable (meaning we can't make copies),
499 or incomplete. We also used to reject every variable size objects here,
500 but now support those for which a constant upper bound can be obtained.
501 The processing for variable sizes is performed in gimple_add_tmp_var,
502 point at which it really matters and possibly reached via paths not going
503 through this function, e.g. after direct calls to create_tmp_var_raw. */
504 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
506 tmp_var = create_tmp_var_raw (type, prefix);
507 gimple_add_tmp_var (tmp_var);
508 return tmp_var;
511 /* Create a temporary with a name derived from VAL. Subroutine of
512 lookup_tmp_var; nobody else should call this function. */
514 static inline tree
515 create_tmp_from_val (tree val)
517 return create_tmp_var (TREE_TYPE (val), get_name (val));
520 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
521 an existing expression temporary. */
523 static tree
524 lookup_tmp_var (tree val, bool is_formal)
526 tree ret;
528 /* If not optimizing, never really reuse a temporary. local-alloc
529 won't allocate any variable that is used in more than one basic
530 block, which means it will go into memory, causing much extra
531 work in reload and final and poorer code generation, outweighing
532 the extra memory allocation here. */
533 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
534 ret = create_tmp_from_val (val);
535 else
537 elt_t elt, *elt_p;
538 void **slot;
540 elt.val = val;
541 if (gimplify_ctxp->temp_htab == NULL)
542 gimplify_ctxp->temp_htab
543 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
544 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
545 if (*slot == NULL)
547 elt_p = XNEW (elt_t);
548 elt_p->val = val;
549 elt_p->temp = ret = create_tmp_from_val (val);
550 *slot = (void *) elt_p;
552 else
554 elt_p = (elt_t *) *slot;
555 ret = elt_p->temp;
559 return ret;
563 /* Return true if T is a CALL_EXPR or an expression that can be
564 assignmed to a temporary. Note that this predicate should only be
565 used during gimplification. See the rationale for this in
566 gimplify_modify_expr. */
568 static bool
569 is_gimple_reg_rhs_or_call (tree t)
571 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
572 || TREE_CODE (t) == CALL_EXPR);
575 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
576 this predicate should only be used during gimplification. See the
577 rationale for this in gimplify_modify_expr. */
579 static bool
580 is_gimple_mem_rhs_or_call (tree t)
582 /* If we're dealing with a renamable type, either source or dest must be
583 a renamed variable. */
584 if (is_gimple_reg_type (TREE_TYPE (t)))
585 return is_gimple_val (t);
586 else
587 return (is_gimple_val (t) || is_gimple_lvalue (t)
588 || TREE_CODE (t) == CALL_EXPR);
591 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
593 static tree
594 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
595 bool is_formal)
597 tree t, mod;
599 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
600 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
601 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
602 fb_rvalue);
604 t = lookup_tmp_var (val, is_formal);
606 if (is_formal
607 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
608 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
609 DECL_GIMPLE_REG_P (t) = 1;
611 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
613 if (EXPR_HAS_LOCATION (val))
614 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
615 else
616 SET_EXPR_LOCATION (mod, input_location);
618 /* gimplify_modify_expr might want to reduce this further. */
619 gimplify_and_add (mod, pre_p);
620 ggc_free (mod);
622 /* If we're gimplifying into ssa, gimplify_modify_expr will have
623 given our temporary an SSA name. Find and return it. */
624 if (gimplify_ctxp->into_ssa)
626 gimple last = gimple_seq_last_stmt (*pre_p);
627 t = gimple_get_lhs (last);
630 return t;
633 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
634 in gimplify_expr. Only use this function if:
636 1) The value of the unfactored expression represented by VAL will not
637 change between the initialization and use of the temporary, and
638 2) The temporary will not be otherwise modified.
640 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
641 and #2 means it is inappropriate for && temps.
643 For other cases, use get_initialized_tmp_var instead. */
645 tree
646 get_formal_tmp_var (tree val, gimple_seq *pre_p)
648 return internal_get_tmp_var (val, pre_p, NULL, true);
651 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
652 are as in gimplify_expr. */
654 tree
655 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
657 return internal_get_tmp_var (val, pre_p, post_p, false);
660 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
661 true, generate debug info for them; otherwise don't. */
663 void
664 declare_vars (tree vars, gimple scope, bool debug_info)
666 tree last = vars;
667 if (last)
669 tree temps, block;
671 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
673 temps = nreverse (last);
675 block = gimple_bind_block (scope);
676 gcc_assert (!block || TREE_CODE (block) == BLOCK);
677 if (!block || !debug_info)
679 TREE_CHAIN (last) = gimple_bind_vars (scope);
680 gimple_bind_set_vars (scope, temps);
682 else
684 /* We need to attach the nodes both to the BIND_EXPR and to its
685 associated BLOCK for debugging purposes. The key point here
686 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
687 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
688 if (BLOCK_VARS (block))
689 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
690 else
692 gimple_bind_set_vars (scope,
693 chainon (gimple_bind_vars (scope), temps));
694 BLOCK_VARS (block) = temps;
700 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
701 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
702 no such upper bound can be obtained. */
704 static void
705 force_constant_size (tree var)
707 /* The only attempt we make is by querying the maximum size of objects
708 of the variable's type. */
710 HOST_WIDE_INT max_size;
712 gcc_assert (TREE_CODE (var) == VAR_DECL);
714 max_size = max_int_size_in_bytes (TREE_TYPE (var));
716 gcc_assert (max_size >= 0);
718 DECL_SIZE_UNIT (var)
719 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
720 DECL_SIZE (var)
721 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
724 void
725 gimple_add_tmp_var (tree tmp)
727 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
729 /* Later processing assumes that the object size is constant, which might
730 not be true at this point. Force the use of a constant upper bound in
731 this case. */
732 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
733 force_constant_size (tmp);
735 DECL_CONTEXT (tmp) = current_function_decl;
736 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
738 if (gimplify_ctxp)
740 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
741 gimplify_ctxp->temps = tmp;
743 /* Mark temporaries local within the nearest enclosing parallel. */
744 if (gimplify_omp_ctxp)
746 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
747 while (ctx && ctx->region_type == ORT_WORKSHARE)
748 ctx = ctx->outer_context;
749 if (ctx)
750 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
753 else if (cfun)
754 record_vars (tmp);
755 else
757 gimple_seq body_seq;
759 /* This case is for nested functions. We need to expose the locals
760 they create. */
761 body_seq = gimple_body (current_function_decl);
762 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
766 /* Determines whether to assign a location to the statement GS. */
768 static bool
769 should_carry_location_p (gimple gs)
771 /* Don't emit a line note for a label. We particularly don't want to
772 emit one for the break label, since it doesn't actually correspond
773 to the beginning of the loop/switch. */
774 if (gimple_code (gs) == GIMPLE_LABEL)
775 return false;
777 return true;
781 /* Return true if a location should not be emitted for this statement
782 by annotate_one_with_location. */
784 static inline bool
785 gimple_do_not_emit_location_p (gimple g)
787 return gimple_plf (g, GF_PLF_1);
790 /* Mark statement G so a location will not be emitted by
791 annotate_one_with_location. */
793 static inline void
794 gimple_set_do_not_emit_location (gimple g)
796 /* The PLF flags are initialized to 0 when a new tuple is created,
797 so no need to initialize it anywhere. */
798 gimple_set_plf (g, GF_PLF_1, true);
801 /* Set the location for gimple statement GS to LOCATION. */
803 static void
804 annotate_one_with_location (gimple gs, location_t location)
806 if (!gimple_has_location (gs)
807 && !gimple_do_not_emit_location_p (gs)
808 && should_carry_location_p (gs))
809 gimple_set_location (gs, location);
813 /* Set LOCATION for all the statements after iterator GSI in sequence
814 SEQ. If GSI is pointing to the end of the sequence, start with the
815 first statement in SEQ. */
817 static void
818 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
819 location_t location)
821 if (gsi_end_p (gsi))
822 gsi = gsi_start (seq);
823 else
824 gsi_next (&gsi);
826 for (; !gsi_end_p (gsi); gsi_next (&gsi))
827 annotate_one_with_location (gsi_stmt (gsi), location);
831 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
833 void
834 annotate_all_with_location (gimple_seq stmt_p, location_t location)
836 gimple_stmt_iterator i;
838 if (gimple_seq_empty_p (stmt_p))
839 return;
841 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
843 gimple gs = gsi_stmt (i);
844 annotate_one_with_location (gs, location);
849 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
850 These nodes model computations that should only be done once. If we
851 were to unshare something like SAVE_EXPR(i++), the gimplification
852 process would create wrong code. */
854 static tree
855 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
857 enum tree_code code = TREE_CODE (*tp);
858 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
859 if (TREE_CODE_CLASS (code) == tcc_type
860 || TREE_CODE_CLASS (code) == tcc_declaration
861 || TREE_CODE_CLASS (code) == tcc_constant
862 || code == SAVE_EXPR || code == TARGET_EXPR
863 /* We can't do anything sensible with a BLOCK used as an expression,
864 but we also can't just die when we see it because of non-expression
865 uses. So just avert our eyes and cross our fingers. Silly Java. */
866 || code == BLOCK)
867 *walk_subtrees = 0;
868 else
870 gcc_assert (code != BIND_EXPR);
871 copy_tree_r (tp, walk_subtrees, data);
874 return NULL_TREE;
877 /* Callback for walk_tree to unshare most of the shared trees rooted at
878 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
879 then *TP is deep copied by calling copy_tree_r.
881 This unshares the same trees as copy_tree_r with the exception of
882 SAVE_EXPR nodes. These nodes model computations that should only be
883 done once. If we were to unshare something like SAVE_EXPR(i++), the
884 gimplification process would create wrong code. */
886 static tree
887 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
888 void *data ATTRIBUTE_UNUSED)
890 tree t = *tp;
891 enum tree_code code = TREE_CODE (t);
893 /* Skip types, decls, and constants. But we do want to look at their
894 types and the bounds of types. Mark them as visited so we properly
895 unmark their subtrees on the unmark pass. If we've already seen them,
896 don't look down further. */
897 if (TREE_CODE_CLASS (code) == tcc_type
898 || TREE_CODE_CLASS (code) == tcc_declaration
899 || TREE_CODE_CLASS (code) == tcc_constant)
901 if (TREE_VISITED (t))
902 *walk_subtrees = 0;
903 else
904 TREE_VISITED (t) = 1;
907 /* If this node has been visited already, unshare it and don't look
908 any deeper. */
909 else if (TREE_VISITED (t))
911 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
912 *walk_subtrees = 0;
915 /* Otherwise, mark the tree as visited and keep looking. */
916 else
917 TREE_VISITED (t) = 1;
919 return NULL_TREE;
922 static tree
923 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
924 void *data ATTRIBUTE_UNUSED)
926 if (TREE_VISITED (*tp))
927 TREE_VISITED (*tp) = 0;
928 else
929 *walk_subtrees = 0;
931 return NULL_TREE;
934 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
935 bodies of any nested functions if we are unsharing the entire body of
936 FNDECL. */
938 static void
939 unshare_body (tree *body_p, tree fndecl)
941 struct cgraph_node *cgn = cgraph_node (fndecl);
943 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
944 if (body_p == &DECL_SAVED_TREE (fndecl))
945 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
946 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
949 /* Likewise, but mark all trees as not visited. */
951 static void
952 unvisit_body (tree *body_p, tree fndecl)
954 struct cgraph_node *cgn = cgraph_node (fndecl);
956 walk_tree (body_p, unmark_visited_r, NULL, NULL);
957 if (body_p == &DECL_SAVED_TREE (fndecl))
958 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
959 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
962 /* Unconditionally make an unshared copy of EXPR. This is used when using
963 stored expressions which span multiple functions, such as BINFO_VTABLE,
964 as the normal unsharing process can't tell that they're shared. */
966 tree
967 unshare_expr (tree expr)
969 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
970 return expr;
973 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
974 contain statements and have a value. Assign its value to a temporary
975 and give it void_type_node. Returns the temporary, or NULL_TREE if
976 WRAPPER was already void. */
978 tree
979 voidify_wrapper_expr (tree wrapper, tree temp)
981 tree type = TREE_TYPE (wrapper);
982 if (type && !VOID_TYPE_P (type))
984 tree *p;
986 /* Set p to point to the body of the wrapper. Loop until we find
987 something that isn't a wrapper. */
988 for (p = &wrapper; p && *p; )
990 switch (TREE_CODE (*p))
992 case BIND_EXPR:
993 TREE_SIDE_EFFECTS (*p) = 1;
994 TREE_TYPE (*p) = void_type_node;
995 /* For a BIND_EXPR, the body is operand 1. */
996 p = &BIND_EXPR_BODY (*p);
997 break;
999 case CLEANUP_POINT_EXPR:
1000 case TRY_FINALLY_EXPR:
1001 case TRY_CATCH_EXPR:
1002 TREE_SIDE_EFFECTS (*p) = 1;
1003 TREE_TYPE (*p) = void_type_node;
1004 p = &TREE_OPERAND (*p, 0);
1005 break;
1007 case STATEMENT_LIST:
1009 tree_stmt_iterator i = tsi_last (*p);
1010 TREE_SIDE_EFFECTS (*p) = 1;
1011 TREE_TYPE (*p) = void_type_node;
1012 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1014 break;
1016 case COMPOUND_EXPR:
1017 /* Advance to the last statement. Set all container types to void. */
1018 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1020 TREE_SIDE_EFFECTS (*p) = 1;
1021 TREE_TYPE (*p) = void_type_node;
1023 break;
1025 default:
1026 goto out;
1030 out:
1031 if (p == NULL || IS_EMPTY_STMT (*p))
1032 temp = NULL_TREE;
1033 else if (temp)
1035 /* The wrapper is on the RHS of an assignment that we're pushing
1036 down. */
1037 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1038 || TREE_CODE (temp) == MODIFY_EXPR);
1039 TREE_OPERAND (temp, 1) = *p;
1040 *p = temp;
1042 else
1044 temp = create_tmp_var (type, "retval");
1045 *p = build2 (INIT_EXPR, type, temp, *p);
1048 return temp;
1051 return NULL_TREE;
1054 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1055 a temporary through which they communicate. */
1057 static void
1058 build_stack_save_restore (gimple *save, gimple *restore)
1060 tree tmp_var;
1062 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1063 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1064 gimple_call_set_lhs (*save, tmp_var);
1066 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1067 1, tmp_var);
1070 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1072 static enum gimplify_status
1073 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1075 tree bind_expr = *expr_p;
1076 bool old_save_stack = gimplify_ctxp->save_stack;
1077 tree t;
1078 gimple gimple_bind;
1079 gimple_seq body;
1081 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1083 /* Mark variables seen in this bind expr. */
1084 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1086 if (TREE_CODE (t) == VAR_DECL)
1088 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1090 /* Mark variable as local. */
1091 if (ctx && !is_global_var (t)
1092 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1093 || splay_tree_lookup (ctx->variables,
1094 (splay_tree_key) t) == NULL))
1095 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1097 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1099 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1100 cfun->has_local_explicit_reg_vars = true;
1103 /* Preliminarily mark non-addressed complex variables as eligible
1104 for promotion to gimple registers. We'll transform their uses
1105 as we find them.
1106 We exclude complex types if not optimizing because they can be
1107 subject to partial stores in GNU C by means of the __real__ and
1108 __imag__ operators and we cannot promote them to total stores
1109 (see gimplify_modify_expr_complex_part). */
1110 if (optimize
1111 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1112 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1113 && !TREE_THIS_VOLATILE (t)
1114 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1115 && !needs_to_live_in_memory (t))
1116 DECL_GIMPLE_REG_P (t) = 1;
1119 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1120 BIND_EXPR_BLOCK (bind_expr));
1121 gimple_push_bind_expr (gimple_bind);
1123 gimplify_ctxp->save_stack = false;
1125 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1126 body = NULL;
1127 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1128 gimple_bind_set_body (gimple_bind, body);
1130 if (gimplify_ctxp->save_stack)
1132 gimple stack_save, stack_restore, gs;
1133 gimple_seq cleanup, new_body;
1135 /* Save stack on entry and restore it on exit. Add a try_finally
1136 block to achieve this. Note that mudflap depends on the
1137 format of the emitted code: see mx_register_decls(). */
1138 build_stack_save_restore (&stack_save, &stack_restore);
1140 cleanup = new_body = NULL;
1141 gimplify_seq_add_stmt (&cleanup, stack_restore);
1142 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1143 GIMPLE_TRY_FINALLY);
1145 gimplify_seq_add_stmt (&new_body, stack_save);
1146 gimplify_seq_add_stmt (&new_body, gs);
1147 gimple_bind_set_body (gimple_bind, new_body);
1150 gimplify_ctxp->save_stack = old_save_stack;
1151 gimple_pop_bind_expr ();
1153 gimplify_seq_add_stmt (pre_p, gimple_bind);
1155 if (temp)
1157 *expr_p = temp;
1158 return GS_OK;
1161 *expr_p = NULL_TREE;
1162 return GS_ALL_DONE;
1165 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1166 GIMPLE value, it is assigned to a new temporary and the statement is
1167 re-written to return the temporary.
1169 PRE_P points to the sequence where side effects that must happen before
1170 STMT should be stored. */
1172 static enum gimplify_status
1173 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1175 gimple ret;
1176 tree ret_expr = TREE_OPERAND (stmt, 0);
1177 tree result_decl, result;
1179 if (ret_expr == error_mark_node)
1180 return GS_ERROR;
1182 if (!ret_expr
1183 || TREE_CODE (ret_expr) == RESULT_DECL
1184 || ret_expr == error_mark_node)
1186 gimple ret = gimple_build_return (ret_expr);
1187 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1188 gimplify_seq_add_stmt (pre_p, ret);
1189 return GS_ALL_DONE;
1192 if (VOID_TYPE_P (function_return_type (current_function_decl)))
1193 result_decl = NULL_TREE;
1194 else
1196 result_decl = TREE_OPERAND (ret_expr, 0);
1198 /* See through a return by reference. */
1199 if (TREE_CODE (result_decl) == INDIRECT_REF)
1200 result_decl = TREE_OPERAND (result_decl, 0);
1202 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1203 || TREE_CODE (ret_expr) == INIT_EXPR)
1204 && TREE_CODE (result_decl) == RESULT_DECL);
1207 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1208 Recall that aggregate_value_p is FALSE for any aggregate type that is
1209 returned in registers. If we're returning values in registers, then
1210 we don't want to extend the lifetime of the RESULT_DECL, particularly
1211 across another call. In addition, for those aggregates for which
1212 hard_function_value generates a PARALLEL, we'll die during normal
1213 expansion of structure assignments; there's special code in expand_return
1214 to handle this case that does not exist in expand_expr. */
1215 if (!result_decl
1216 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1217 result = result_decl;
1218 else if (gimplify_ctxp->return_temp)
1219 result = gimplify_ctxp->return_temp;
1220 else
1222 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1223 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1224 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1225 DECL_GIMPLE_REG_P (result) = 1;
1227 /* ??? With complex control flow (usually involving abnormal edges),
1228 we can wind up warning about an uninitialized value for this. Due
1229 to how this variable is constructed and initialized, this is never
1230 true. Give up and never warn. */
1231 TREE_NO_WARNING (result) = 1;
1233 gimplify_ctxp->return_temp = result;
1236 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1237 Then gimplify the whole thing. */
1238 if (result != result_decl)
1239 TREE_OPERAND (ret_expr, 0) = result;
1241 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1243 ret = gimple_build_return (result);
1244 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1245 gimplify_seq_add_stmt (pre_p, ret);
1247 return GS_ALL_DONE;
1250 static void
1251 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1253 /* This is a variable-sized decl. Simplify its size and mark it
1254 for deferred expansion. Note that mudflap depends on the format
1255 of the emitted code: see mx_register_decls(). */
1256 tree t, addr, ptr_type;
1258 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1259 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1261 /* All occurrences of this decl in final gimplified code will be
1262 replaced by indirection. Setting DECL_VALUE_EXPR does two
1263 things: First, it lets the rest of the gimplifier know what
1264 replacement to use. Second, it lets the debug info know
1265 where to find the value. */
1266 ptr_type = build_pointer_type (TREE_TYPE (decl));
1267 addr = create_tmp_var (ptr_type, get_name (decl));
1268 DECL_IGNORED_P (addr) = 0;
1269 t = build_fold_indirect_ref (addr);
1270 SET_DECL_VALUE_EXPR (decl, t);
1271 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1273 t = built_in_decls[BUILT_IN_ALLOCA];
1274 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1275 t = fold_convert (ptr_type, t);
1276 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1278 gimplify_and_add (t, seq_p);
1280 /* Indicate that we need to restore the stack level when the
1281 enclosing BIND_EXPR is exited. */
1282 gimplify_ctxp->save_stack = true;
1286 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1287 and initialization explicit. */
1289 static enum gimplify_status
1290 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1292 tree stmt = *stmt_p;
1293 tree decl = DECL_EXPR_DECL (stmt);
1295 *stmt_p = NULL_TREE;
1297 if (TREE_TYPE (decl) == error_mark_node)
1298 return GS_ERROR;
1300 if ((TREE_CODE (decl) == TYPE_DECL
1301 || TREE_CODE (decl) == VAR_DECL)
1302 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1303 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1305 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1307 tree init = DECL_INITIAL (decl);
1309 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1310 || (!TREE_STATIC (decl)
1311 && flag_stack_check == GENERIC_STACK_CHECK
1312 && compare_tree_int (DECL_SIZE_UNIT (decl),
1313 STACK_CHECK_MAX_VAR_SIZE) > 0))
1314 gimplify_vla_decl (decl, seq_p);
1316 if (init && init != error_mark_node)
1318 if (!TREE_STATIC (decl))
1320 DECL_INITIAL (decl) = NULL_TREE;
1321 init = build2 (INIT_EXPR, void_type_node, decl, init);
1322 gimplify_and_add (init, seq_p);
1323 ggc_free (init);
1325 else
1326 /* We must still examine initializers for static variables
1327 as they may contain a label address. */
1328 walk_tree (&init, force_labels_r, NULL, NULL);
1331 /* Some front ends do not explicitly declare all anonymous
1332 artificial variables. We compensate here by declaring the
1333 variables, though it would be better if the front ends would
1334 explicitly declare them. */
1335 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1336 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1337 gimple_add_tmp_var (decl);
1340 return GS_ALL_DONE;
1343 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1344 and replacing the LOOP_EXPR with goto, but if the loop contains an
1345 EXIT_EXPR, we need to append a label for it to jump to. */
1347 static enum gimplify_status
1348 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1350 tree saved_label = gimplify_ctxp->exit_label;
1351 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1353 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1355 gimplify_ctxp->exit_label = NULL_TREE;
1357 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1359 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1361 if (gimplify_ctxp->exit_label)
1362 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1364 gimplify_ctxp->exit_label = saved_label;
1366 *expr_p = NULL;
1367 return GS_ALL_DONE;
1370 /* Gimplifies a statement list onto a sequence. These may be created either
1371 by an enlightened front-end, or by shortcut_cond_expr. */
1373 static enum gimplify_status
1374 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1376 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1378 tree_stmt_iterator i = tsi_start (*expr_p);
1380 while (!tsi_end_p (i))
1382 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1383 tsi_delink (&i);
1386 if (temp)
1388 *expr_p = temp;
1389 return GS_OK;
1392 return GS_ALL_DONE;
1395 /* Compare two case labels. Because the front end should already have
1396 made sure that case ranges do not overlap, it is enough to only compare
1397 the CASE_LOW values of each case label. */
1399 static int
1400 compare_case_labels (const void *p1, const void *p2)
1402 const_tree const case1 = *(const_tree const*)p1;
1403 const_tree const case2 = *(const_tree const*)p2;
1405 /* The 'default' case label always goes first. */
1406 if (!CASE_LOW (case1))
1407 return -1;
1408 else if (!CASE_LOW (case2))
1409 return 1;
1410 else
1411 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1415 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1417 void
1418 sort_case_labels (VEC(tree,heap)* label_vec)
1420 size_t len = VEC_length (tree, label_vec);
1421 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1422 compare_case_labels);
1426 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1427 branch to. */
1429 static enum gimplify_status
1430 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1432 tree switch_expr = *expr_p;
1433 gimple_seq switch_body_seq = NULL;
1434 enum gimplify_status ret;
1436 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1437 fb_rvalue);
1438 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1439 return ret;
1441 if (SWITCH_BODY (switch_expr))
1443 VEC (tree,heap) *labels;
1444 VEC (tree,heap) *saved_labels;
1445 tree default_case = NULL_TREE;
1446 size_t i, len;
1447 gimple gimple_switch;
1449 /* If someone can be bothered to fill in the labels, they can
1450 be bothered to null out the body too. */
1451 gcc_assert (!SWITCH_LABELS (switch_expr));
1453 /* save old labels, get new ones from body, then restore the old
1454 labels. Save all the things from the switch body to append after. */
1455 saved_labels = gimplify_ctxp->case_labels;
1456 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1458 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1459 labels = gimplify_ctxp->case_labels;
1460 gimplify_ctxp->case_labels = saved_labels;
1462 i = 0;
1463 while (i < VEC_length (tree, labels))
1465 tree elt = VEC_index (tree, labels, i);
1466 tree low = CASE_LOW (elt);
1467 bool remove_element = FALSE;
1469 if (low)
1471 /* Discard empty ranges. */
1472 tree high = CASE_HIGH (elt);
1473 if (high && tree_int_cst_lt (high, low))
1474 remove_element = TRUE;
1476 else
1478 /* The default case must be the last label in the list. */
1479 gcc_assert (!default_case);
1480 default_case = elt;
1481 remove_element = TRUE;
1484 if (remove_element)
1485 VEC_ordered_remove (tree, labels, i);
1486 else
1487 i++;
1489 len = i;
1491 if (!VEC_empty (tree, labels))
1492 sort_case_labels (labels);
1494 if (!default_case)
1496 tree type = TREE_TYPE (switch_expr);
1498 /* If the switch has no default label, add one, so that we jump
1499 around the switch body. If the labels already cover the whole
1500 range of type, add the default label pointing to one of the
1501 existing labels. */
1502 if (type == void_type_node)
1503 type = TREE_TYPE (SWITCH_COND (switch_expr));
1504 if (len
1505 && INTEGRAL_TYPE_P (type)
1506 && TYPE_MIN_VALUE (type)
1507 && TYPE_MAX_VALUE (type)
1508 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1509 TYPE_MIN_VALUE (type)))
1511 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1512 if (!high)
1513 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1514 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1516 for (i = 1; i < len; i++)
1518 high = CASE_LOW (VEC_index (tree, labels, i));
1519 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1520 if (!low)
1521 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1522 if ((TREE_INT_CST_LOW (low) + 1
1523 != TREE_INT_CST_LOW (high))
1524 || (TREE_INT_CST_HIGH (low)
1525 + (TREE_INT_CST_LOW (high) == 0)
1526 != TREE_INT_CST_HIGH (high)))
1527 break;
1529 if (i == len)
1530 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1531 NULL_TREE, NULL_TREE,
1532 CASE_LABEL (VEC_index (tree,
1533 labels, 0)));
1537 if (!default_case)
1539 gimple new_default;
1541 default_case
1542 = build3 (CASE_LABEL_EXPR, void_type_node,
1543 NULL_TREE, NULL_TREE,
1544 create_artificial_label (UNKNOWN_LOCATION));
1545 new_default = gimple_build_label (CASE_LABEL (default_case));
1546 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1550 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1551 default_case, labels);
1552 gimplify_seq_add_stmt (pre_p, gimple_switch);
1553 gimplify_seq_add_seq (pre_p, switch_body_seq);
1554 VEC_free(tree, heap, labels);
1556 else
1557 gcc_assert (SWITCH_LABELS (switch_expr));
1559 return GS_ALL_DONE;
1563 static enum gimplify_status
1564 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1566 struct gimplify_ctx *ctxp;
1567 gimple gimple_label;
1569 /* Invalid OpenMP programs can play Duff's Device type games with
1570 #pragma omp parallel. At least in the C front end, we don't
1571 detect such invalid branches until after gimplification. */
1572 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1573 if (ctxp->case_labels)
1574 break;
1576 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1577 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1578 gimplify_seq_add_stmt (pre_p, gimple_label);
1580 return GS_ALL_DONE;
1583 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1584 if necessary. */
1586 tree
1587 build_and_jump (tree *label_p)
1589 if (label_p == NULL)
1590 /* If there's nowhere to jump, just fall through. */
1591 return NULL_TREE;
1593 if (*label_p == NULL_TREE)
1595 tree label = create_artificial_label (UNKNOWN_LOCATION);
1596 *label_p = label;
1599 return build1 (GOTO_EXPR, void_type_node, *label_p);
1602 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1603 This also involves building a label to jump to and communicating it to
1604 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1606 static enum gimplify_status
1607 gimplify_exit_expr (tree *expr_p)
1609 tree cond = TREE_OPERAND (*expr_p, 0);
1610 tree expr;
1612 expr = build_and_jump (&gimplify_ctxp->exit_label);
1613 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1614 *expr_p = expr;
1616 return GS_OK;
1619 /* A helper function to be called via walk_tree. Mark all labels under *TP
1620 as being forced. To be called for DECL_INITIAL of static variables. */
1622 tree
1623 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1625 if (TYPE_P (*tp))
1626 *walk_subtrees = 0;
1627 if (TREE_CODE (*tp) == LABEL_DECL)
1628 FORCED_LABEL (*tp) = 1;
1630 return NULL_TREE;
1633 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1634 different from its canonical type, wrap the whole thing inside a
1635 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1636 type.
1638 The canonical type of a COMPONENT_REF is the type of the field being
1639 referenced--unless the field is a bit-field which can be read directly
1640 in a smaller mode, in which case the canonical type is the
1641 sign-appropriate type corresponding to that mode. */
1643 static void
1644 canonicalize_component_ref (tree *expr_p)
1646 tree expr = *expr_p;
1647 tree type;
1649 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1651 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1652 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1653 else
1654 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1656 /* One could argue that all the stuff below is not necessary for
1657 the non-bitfield case and declare it a FE error if type
1658 adjustment would be needed. */
1659 if (TREE_TYPE (expr) != type)
1661 #ifdef ENABLE_TYPES_CHECKING
1662 tree old_type = TREE_TYPE (expr);
1663 #endif
1664 int type_quals;
1666 /* We need to preserve qualifiers and propagate them from
1667 operand 0. */
1668 type_quals = TYPE_QUALS (type)
1669 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1670 if (TYPE_QUALS (type) != type_quals)
1671 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1673 /* Set the type of the COMPONENT_REF to the underlying type. */
1674 TREE_TYPE (expr) = type;
1676 #ifdef ENABLE_TYPES_CHECKING
1677 /* It is now a FE error, if the conversion from the canonical
1678 type to the original expression type is not useless. */
1679 gcc_assert (useless_type_conversion_p (old_type, type));
1680 #endif
1684 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1685 to foo, embed that change in the ADDR_EXPR by converting
1686 T array[U];
1687 (T *)&array
1689 &array[L]
1690 where L is the lower bound. For simplicity, only do this for constant
1691 lower bound.
1692 The constraint is that the type of &array[L] is trivially convertible
1693 to T *. */
1695 static void
1696 canonicalize_addr_expr (tree *expr_p)
1698 tree expr = *expr_p;
1699 tree addr_expr = TREE_OPERAND (expr, 0);
1700 tree datype, ddatype, pddatype;
1702 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1703 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1704 || TREE_CODE (addr_expr) != ADDR_EXPR)
1705 return;
1707 /* The addr_expr type should be a pointer to an array. */
1708 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1709 if (TREE_CODE (datype) != ARRAY_TYPE)
1710 return;
1712 /* The pointer to element type shall be trivially convertible to
1713 the expression pointer type. */
1714 ddatype = TREE_TYPE (datype);
1715 pddatype = build_pointer_type (ddatype);
1716 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1717 pddatype))
1718 return;
1720 /* The lower bound and element sizes must be constant. */
1721 if (!TYPE_SIZE_UNIT (ddatype)
1722 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1723 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1724 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1725 return;
1727 /* All checks succeeded. Build a new node to merge the cast. */
1728 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1729 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1730 NULL_TREE, NULL_TREE);
1731 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1733 /* We can have stripped a required restrict qualifier above. */
1734 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1735 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1738 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1739 underneath as appropriate. */
1741 static enum gimplify_status
1742 gimplify_conversion (tree *expr_p)
1744 tree tem;
1745 location_t loc = EXPR_LOCATION (*expr_p);
1746 gcc_assert (CONVERT_EXPR_P (*expr_p));
1748 /* Then strip away all but the outermost conversion. */
1749 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1751 /* And remove the outermost conversion if it's useless. */
1752 if (tree_ssa_useless_type_conversion (*expr_p))
1753 *expr_p = TREE_OPERAND (*expr_p, 0);
1755 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1756 For example this fold (subclass *)&A into &A->subclass avoiding
1757 a need for statement. */
1758 if (CONVERT_EXPR_P (*expr_p)
1759 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1760 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1761 && (tem = maybe_fold_offset_to_address
1762 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1763 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1764 *expr_p = tem;
1766 /* If we still have a conversion at the toplevel,
1767 then canonicalize some constructs. */
1768 if (CONVERT_EXPR_P (*expr_p))
1770 tree sub = TREE_OPERAND (*expr_p, 0);
1772 /* If a NOP conversion is changing the type of a COMPONENT_REF
1773 expression, then canonicalize its type now in order to expose more
1774 redundant conversions. */
1775 if (TREE_CODE (sub) == COMPONENT_REF)
1776 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1778 /* If a NOP conversion is changing a pointer to array of foo
1779 to a pointer to foo, embed that change in the ADDR_EXPR. */
1780 else if (TREE_CODE (sub) == ADDR_EXPR)
1781 canonicalize_addr_expr (expr_p);
1784 /* If we have a conversion to a non-register type force the
1785 use of a VIEW_CONVERT_EXPR instead. */
1786 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1787 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1788 TREE_OPERAND (*expr_p, 0));
1790 return GS_OK;
1793 /* Nonlocal VLAs seen in the current function. */
1794 static struct pointer_set_t *nonlocal_vlas;
1796 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1797 DECL_VALUE_EXPR, and it's worth re-examining things. */
1799 static enum gimplify_status
1800 gimplify_var_or_parm_decl (tree *expr_p)
1802 tree decl = *expr_p;
1804 /* ??? If this is a local variable, and it has not been seen in any
1805 outer BIND_EXPR, then it's probably the result of a duplicate
1806 declaration, for which we've already issued an error. It would
1807 be really nice if the front end wouldn't leak these at all.
1808 Currently the only known culprit is C++ destructors, as seen
1809 in g++.old-deja/g++.jason/binding.C. */
1810 if (TREE_CODE (decl) == VAR_DECL
1811 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1812 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1813 && decl_function_context (decl) == current_function_decl)
1815 gcc_assert (errorcount || sorrycount);
1816 return GS_ERROR;
1819 /* When within an OpenMP context, notice uses of variables. */
1820 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1821 return GS_ALL_DONE;
1823 /* If the decl is an alias for another expression, substitute it now. */
1824 if (DECL_HAS_VALUE_EXPR_P (decl))
1826 tree value_expr = DECL_VALUE_EXPR (decl);
1828 /* For referenced nonlocal VLAs add a decl for debugging purposes
1829 to the current function. */
1830 if (TREE_CODE (decl) == VAR_DECL
1831 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1832 && nonlocal_vlas != NULL
1833 && TREE_CODE (value_expr) == INDIRECT_REF
1834 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1835 && decl_function_context (decl) != current_function_decl)
1837 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1838 while (ctx && ctx->region_type == ORT_WORKSHARE)
1839 ctx = ctx->outer_context;
1840 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1842 tree copy = copy_node (decl), block;
1844 lang_hooks.dup_lang_specific_decl (copy);
1845 SET_DECL_RTL (copy, NULL_RTX);
1846 TREE_USED (copy) = 1;
1847 block = DECL_INITIAL (current_function_decl);
1848 TREE_CHAIN (copy) = BLOCK_VARS (block);
1849 BLOCK_VARS (block) = copy;
1850 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1851 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1855 *expr_p = unshare_expr (value_expr);
1856 return GS_OK;
1859 return GS_ALL_DONE;
1863 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1864 node *EXPR_P.
1866 compound_lval
1867 : min_lval '[' val ']'
1868 | min_lval '.' ID
1869 | compound_lval '[' val ']'
1870 | compound_lval '.' ID
1872 This is not part of the original SIMPLE definition, which separates
1873 array and member references, but it seems reasonable to handle them
1874 together. Also, this way we don't run into problems with union
1875 aliasing; gcc requires that for accesses through a union to alias, the
1876 union reference must be explicit, which was not always the case when we
1877 were splitting up array and member refs.
1879 PRE_P points to the sequence where side effects that must happen before
1880 *EXPR_P should be stored.
1882 POST_P points to the sequence where side effects that must happen after
1883 *EXPR_P should be stored. */
1885 static enum gimplify_status
1886 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1887 fallback_t fallback)
1889 tree *p;
1890 VEC(tree,heap) *stack;
1891 enum gimplify_status ret = GS_OK, tret;
1892 int i;
1893 location_t loc = EXPR_LOCATION (*expr_p);
1895 /* Create a stack of the subexpressions so later we can walk them in
1896 order from inner to outer. */
1897 stack = VEC_alloc (tree, heap, 10);
1899 /* We can handle anything that get_inner_reference can deal with. */
1900 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1902 restart:
1903 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1904 if (TREE_CODE (*p) == INDIRECT_REF)
1905 *p = fold_indirect_ref_loc (loc, *p);
1907 if (handled_component_p (*p))
1909 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1910 additional COMPONENT_REFs. */
1911 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1912 && gimplify_var_or_parm_decl (p) == GS_OK)
1913 goto restart;
1914 else
1915 break;
1917 VEC_safe_push (tree, heap, stack, *p);
1920 gcc_assert (VEC_length (tree, stack));
1922 /* Now STACK is a stack of pointers to all the refs we've walked through
1923 and P points to the innermost expression.
1925 Java requires that we elaborated nodes in source order. That
1926 means we must gimplify the inner expression followed by each of
1927 the indices, in order. But we can't gimplify the inner
1928 expression until we deal with any variable bounds, sizes, or
1929 positions in order to deal with PLACEHOLDER_EXPRs.
1931 So we do this in three steps. First we deal with the annotations
1932 for any variables in the components, then we gimplify the base,
1933 then we gimplify any indices, from left to right. */
1934 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1936 tree t = VEC_index (tree, stack, i);
1938 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1940 /* Gimplify the low bound and element type size and put them into
1941 the ARRAY_REF. If these values are set, they have already been
1942 gimplified. */
1943 if (TREE_OPERAND (t, 2) == NULL_TREE)
1945 tree low = unshare_expr (array_ref_low_bound (t));
1946 if (!is_gimple_min_invariant (low))
1948 TREE_OPERAND (t, 2) = low;
1949 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1950 post_p, is_gimple_reg,
1951 fb_rvalue);
1952 ret = MIN (ret, tret);
1956 if (!TREE_OPERAND (t, 3))
1958 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1959 tree elmt_size = unshare_expr (array_ref_element_size (t));
1960 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1962 /* Divide the element size by the alignment of the element
1963 type (above). */
1964 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1966 if (!is_gimple_min_invariant (elmt_size))
1968 TREE_OPERAND (t, 3) = elmt_size;
1969 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1970 post_p, is_gimple_reg,
1971 fb_rvalue);
1972 ret = MIN (ret, tret);
1976 else if (TREE_CODE (t) == COMPONENT_REF)
1978 /* Set the field offset into T and gimplify it. */
1979 if (!TREE_OPERAND (t, 2))
1981 tree offset = unshare_expr (component_ref_field_offset (t));
1982 tree field = TREE_OPERAND (t, 1);
1983 tree factor
1984 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1986 /* Divide the offset by its alignment. */
1987 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
1989 if (!is_gimple_min_invariant (offset))
1991 TREE_OPERAND (t, 2) = offset;
1992 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1993 post_p, is_gimple_reg,
1994 fb_rvalue);
1995 ret = MIN (ret, tret);
2001 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2002 so as to match the min_lval predicate. Failure to do so may result
2003 in the creation of large aggregate temporaries. */
2004 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2005 fallback | fb_lvalue);
2006 ret = MIN (ret, tret);
2008 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2009 loop we also remove any useless conversions. */
2010 for (; VEC_length (tree, stack) > 0; )
2012 tree t = VEC_pop (tree, stack);
2014 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2016 /* Gimplify the dimension. */
2017 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2019 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2020 is_gimple_val, fb_rvalue);
2021 ret = MIN (ret, tret);
2024 else if (TREE_CODE (t) == BIT_FIELD_REF)
2026 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2027 is_gimple_val, fb_rvalue);
2028 ret = MIN (ret, tret);
2029 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2030 is_gimple_val, fb_rvalue);
2031 ret = MIN (ret, tret);
2034 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2036 /* The innermost expression P may have originally had
2037 TREE_SIDE_EFFECTS set which would have caused all the outer
2038 expressions in *EXPR_P leading to P to also have had
2039 TREE_SIDE_EFFECTS set. */
2040 recalculate_side_effects (t);
2043 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2044 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2046 canonicalize_component_ref (expr_p);
2047 ret = MIN (ret, GS_OK);
2050 VEC_free (tree, heap, stack);
2052 return ret;
2055 /* Gimplify the self modifying expression pointed to by EXPR_P
2056 (++, --, +=, -=).
2058 PRE_P points to the list where side effects that must happen before
2059 *EXPR_P should be stored.
2061 POST_P points to the list where side effects that must happen after
2062 *EXPR_P should be stored.
2064 WANT_VALUE is nonzero iff we want to use the value of this expression
2065 in another expression. */
2067 static enum gimplify_status
2068 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2069 bool want_value)
2071 enum tree_code code;
2072 tree lhs, lvalue, rhs, t1;
2073 gimple_seq post = NULL, *orig_post_p = post_p;
2074 bool postfix;
2075 enum tree_code arith_code;
2076 enum gimplify_status ret;
2077 location_t loc = EXPR_LOCATION (*expr_p);
2079 code = TREE_CODE (*expr_p);
2081 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2082 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2084 /* Prefix or postfix? */
2085 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2086 /* Faster to treat as prefix if result is not used. */
2087 postfix = want_value;
2088 else
2089 postfix = false;
2091 /* For postfix, make sure the inner expression's post side effects
2092 are executed after side effects from this expression. */
2093 if (postfix)
2094 post_p = &post;
2096 /* Add or subtract? */
2097 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2098 arith_code = PLUS_EXPR;
2099 else
2100 arith_code = MINUS_EXPR;
2102 /* Gimplify the LHS into a GIMPLE lvalue. */
2103 lvalue = TREE_OPERAND (*expr_p, 0);
2104 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2105 if (ret == GS_ERROR)
2106 return ret;
2108 /* Extract the operands to the arithmetic operation. */
2109 lhs = lvalue;
2110 rhs = TREE_OPERAND (*expr_p, 1);
2112 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2113 that as the result value and in the postqueue operation. We also
2114 make sure to make lvalue a minimal lval, see
2115 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2116 if (postfix)
2118 if (!is_gimple_min_lval (lvalue))
2120 mark_addressable (lvalue);
2121 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2122 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2123 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2125 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2126 if (ret == GS_ERROR)
2127 return ret;
2130 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2131 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2133 rhs = fold_convert_loc (loc, sizetype, rhs);
2134 if (arith_code == MINUS_EXPR)
2135 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2136 arith_code = POINTER_PLUS_EXPR;
2139 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2141 if (postfix)
2143 gimplify_assign (lvalue, t1, orig_post_p);
2144 gimplify_seq_add_seq (orig_post_p, post);
2145 *expr_p = lhs;
2146 return GS_ALL_DONE;
2148 else
2150 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2151 return GS_OK;
2156 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2158 static void
2159 maybe_with_size_expr (tree *expr_p)
2161 tree expr = *expr_p;
2162 tree type = TREE_TYPE (expr);
2163 tree size;
2165 /* If we've already wrapped this or the type is error_mark_node, we can't do
2166 anything. */
2167 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2168 || type == error_mark_node)
2169 return;
2171 /* If the size isn't known or is a constant, we have nothing to do. */
2172 size = TYPE_SIZE_UNIT (type);
2173 if (!size || TREE_CODE (size) == INTEGER_CST)
2174 return;
2176 /* Otherwise, make a WITH_SIZE_EXPR. */
2177 size = unshare_expr (size);
2178 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2179 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2183 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2184 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2185 the CALL_EXPR. */
2187 static enum gimplify_status
2188 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2190 bool (*test) (tree);
2191 fallback_t fb;
2193 /* In general, we allow lvalues for function arguments to avoid
2194 extra overhead of copying large aggregates out of even larger
2195 aggregates into temporaries only to copy the temporaries to
2196 the argument list. Make optimizers happy by pulling out to
2197 temporaries those types that fit in registers. */
2198 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2199 test = is_gimple_val, fb = fb_rvalue;
2200 else
2201 test = is_gimple_lvalue, fb = fb_either;
2203 /* If this is a variable sized type, we must remember the size. */
2204 maybe_with_size_expr (arg_p);
2206 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2207 /* Make sure arguments have the same location as the function call
2208 itself. */
2209 protected_set_expr_location (*arg_p, call_location);
2211 /* There is a sequence point before a function call. Side effects in
2212 the argument list must occur before the actual call. So, when
2213 gimplifying arguments, force gimplify_expr to use an internal
2214 post queue which is then appended to the end of PRE_P. */
2215 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2219 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2220 WANT_VALUE is true if the result of the call is desired. */
2222 static enum gimplify_status
2223 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2225 tree fndecl, parms, p;
2226 enum gimplify_status ret;
2227 int i, nargs;
2228 gimple call;
2229 bool builtin_va_start_p = FALSE;
2230 location_t loc = EXPR_LOCATION (*expr_p);
2232 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2234 /* For reliable diagnostics during inlining, it is necessary that
2235 every call_expr be annotated with file and line. */
2236 if (! EXPR_HAS_LOCATION (*expr_p))
2237 SET_EXPR_LOCATION (*expr_p, input_location);
2239 /* This may be a call to a builtin function.
2241 Builtin function calls may be transformed into different
2242 (and more efficient) builtin function calls under certain
2243 circumstances. Unfortunately, gimplification can muck things
2244 up enough that the builtin expanders are not aware that certain
2245 transformations are still valid.
2247 So we attempt transformation/gimplification of the call before
2248 we gimplify the CALL_EXPR. At this time we do not manage to
2249 transform all calls in the same manner as the expanders do, but
2250 we do transform most of them. */
2251 fndecl = get_callee_fndecl (*expr_p);
2252 if (fndecl && DECL_BUILT_IN (fndecl))
2254 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2256 if (new_tree && new_tree != *expr_p)
2258 /* There was a transformation of this call which computes the
2259 same value, but in a more efficient way. Return and try
2260 again. */
2261 *expr_p = new_tree;
2262 return GS_OK;
2265 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2266 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2268 builtin_va_start_p = TRUE;
2269 if (call_expr_nargs (*expr_p) < 2)
2271 error ("too few arguments to function %<va_start%>");
2272 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2273 return GS_OK;
2276 if (fold_builtin_next_arg (*expr_p, true))
2278 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2279 return GS_OK;
2284 /* There is a sequence point before the call, so any side effects in
2285 the calling expression must occur before the actual call. Force
2286 gimplify_expr to use an internal post queue. */
2287 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2288 is_gimple_call_addr, fb_rvalue);
2290 nargs = call_expr_nargs (*expr_p);
2292 /* Get argument types for verification. */
2293 fndecl = get_callee_fndecl (*expr_p);
2294 parms = NULL_TREE;
2295 if (fndecl)
2296 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2297 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2298 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2300 if (fndecl && DECL_ARGUMENTS (fndecl))
2301 p = DECL_ARGUMENTS (fndecl);
2302 else if (parms)
2303 p = parms;
2304 else
2305 p = NULL_TREE;
2306 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2309 /* If the last argument is __builtin_va_arg_pack () and it is not
2310 passed as a named argument, decrease the number of CALL_EXPR
2311 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2312 if (!p
2313 && i < nargs
2314 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2316 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2317 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2319 if (last_arg_fndecl
2320 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2321 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2322 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2324 tree call = *expr_p;
2326 --nargs;
2327 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2328 CALL_EXPR_FN (call),
2329 nargs, CALL_EXPR_ARGP (call));
2331 /* Copy all CALL_EXPR flags, location and block, except
2332 CALL_EXPR_VA_ARG_PACK flag. */
2333 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2334 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2335 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2336 = CALL_EXPR_RETURN_SLOT_OPT (call);
2337 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2338 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2339 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2340 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2342 /* Set CALL_EXPR_VA_ARG_PACK. */
2343 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2347 /* Finally, gimplify the function arguments. */
2348 if (nargs > 0)
2350 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2351 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2352 PUSH_ARGS_REVERSED ? i-- : i++)
2354 enum gimplify_status t;
2356 /* Avoid gimplifying the second argument to va_start, which needs to
2357 be the plain PARM_DECL. */
2358 if ((i != 1) || !builtin_va_start_p)
2360 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2361 EXPR_LOCATION (*expr_p));
2363 if (t == GS_ERROR)
2364 ret = GS_ERROR;
2369 /* Verify the function result. */
2370 if (want_value && fndecl
2371 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2373 error_at (loc, "using result of function returning %<void%>");
2374 ret = GS_ERROR;
2377 /* Try this again in case gimplification exposed something. */
2378 if (ret != GS_ERROR)
2380 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2382 if (new_tree && new_tree != *expr_p)
2384 /* There was a transformation of this call which computes the
2385 same value, but in a more efficient way. Return and try
2386 again. */
2387 *expr_p = new_tree;
2388 return GS_OK;
2391 else
2393 *expr_p = error_mark_node;
2394 return GS_ERROR;
2397 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2398 decl. This allows us to eliminate redundant or useless
2399 calls to "const" functions. */
2400 if (TREE_CODE (*expr_p) == CALL_EXPR)
2402 int flags = call_expr_flags (*expr_p);
2403 if (flags & (ECF_CONST | ECF_PURE)
2404 /* An infinite loop is considered a side effect. */
2405 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2406 TREE_SIDE_EFFECTS (*expr_p) = 0;
2409 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2410 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2411 form and delegate the creation of a GIMPLE_CALL to
2412 gimplify_modify_expr. This is always possible because when
2413 WANT_VALUE is true, the caller wants the result of this call into
2414 a temporary, which means that we will emit an INIT_EXPR in
2415 internal_get_tmp_var which will then be handled by
2416 gimplify_modify_expr. */
2417 if (!want_value)
2419 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2420 have to do is replicate it as a GIMPLE_CALL tuple. */
2421 call = gimple_build_call_from_tree (*expr_p);
2422 gimplify_seq_add_stmt (pre_p, call);
2423 *expr_p = NULL_TREE;
2426 return ret;
2429 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2430 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2432 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2433 condition is true or false, respectively. If null, we should generate
2434 our own to skip over the evaluation of this specific expression.
2436 LOCUS is the source location of the COND_EXPR.
2438 This function is the tree equivalent of do_jump.
2440 shortcut_cond_r should only be called by shortcut_cond_expr. */
2442 static tree
2443 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2444 location_t locus)
2446 tree local_label = NULL_TREE;
2447 tree t, expr = NULL;
2449 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2450 retain the shortcut semantics. Just insert the gotos here;
2451 shortcut_cond_expr will append the real blocks later. */
2452 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2454 location_t new_locus;
2456 /* Turn if (a && b) into
2458 if (a); else goto no;
2459 if (b) goto yes; else goto no;
2460 (no:) */
2462 if (false_label_p == NULL)
2463 false_label_p = &local_label;
2465 /* Keep the original source location on the first 'if'. */
2466 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2467 append_to_statement_list (t, &expr);
2469 /* Set the source location of the && on the second 'if'. */
2470 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2471 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2472 new_locus);
2473 append_to_statement_list (t, &expr);
2475 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2477 location_t new_locus;
2479 /* Turn if (a || b) into
2481 if (a) goto yes;
2482 if (b) goto yes; else goto no;
2483 (yes:) */
2485 if (true_label_p == NULL)
2486 true_label_p = &local_label;
2488 /* Keep the original source location on the first 'if'. */
2489 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2490 append_to_statement_list (t, &expr);
2492 /* Set the source location of the || on the second 'if'. */
2493 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2494 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2495 new_locus);
2496 append_to_statement_list (t, &expr);
2498 else if (TREE_CODE (pred) == COND_EXPR)
2500 location_t new_locus;
2502 /* As long as we're messing with gotos, turn if (a ? b : c) into
2503 if (a)
2504 if (b) goto yes; else goto no;
2505 else
2506 if (c) goto yes; else goto no; */
2508 /* Keep the original source location on the first 'if'. Set the source
2509 location of the ? on the second 'if'. */
2510 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2511 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2512 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2513 false_label_p, locus),
2514 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2515 false_label_p, new_locus));
2517 else
2519 expr = build3 (COND_EXPR, void_type_node, pred,
2520 build_and_jump (true_label_p),
2521 build_and_jump (false_label_p));
2522 SET_EXPR_LOCATION (expr, locus);
2525 if (local_label)
2527 t = build1 (LABEL_EXPR, void_type_node, local_label);
2528 append_to_statement_list (t, &expr);
2531 return expr;
2534 /* Given a conditional expression EXPR with short-circuit boolean
2535 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2536 predicate appart into the equivalent sequence of conditionals. */
2538 static tree
2539 shortcut_cond_expr (tree expr)
2541 tree pred = TREE_OPERAND (expr, 0);
2542 tree then_ = TREE_OPERAND (expr, 1);
2543 tree else_ = TREE_OPERAND (expr, 2);
2544 tree true_label, false_label, end_label, t;
2545 tree *true_label_p;
2546 tree *false_label_p;
2547 bool emit_end, emit_false, jump_over_else;
2548 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2549 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2551 /* First do simple transformations. */
2552 if (!else_se)
2554 /* If there is no 'else', turn
2555 if (a && b) then c
2556 into
2557 if (a) if (b) then c. */
2558 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2560 /* Keep the original source location on the first 'if'. */
2561 location_t locus = EXPR_HAS_LOCATION (expr)
2562 ? EXPR_LOCATION (expr) : input_location;
2563 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2564 /* Set the source location of the && on the second 'if'. */
2565 if (EXPR_HAS_LOCATION (pred))
2566 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2567 then_ = shortcut_cond_expr (expr);
2568 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2569 pred = TREE_OPERAND (pred, 0);
2570 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2571 SET_EXPR_LOCATION (expr, locus);
2575 if (!then_se)
2577 /* If there is no 'then', turn
2578 if (a || b); else d
2579 into
2580 if (a); else if (b); else d. */
2581 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2583 /* Keep the original source location on the first 'if'. */
2584 location_t locus = EXPR_HAS_LOCATION (expr)
2585 ? EXPR_LOCATION (expr) : input_location;
2586 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2587 /* Set the source location of the || on the second 'if'. */
2588 if (EXPR_HAS_LOCATION (pred))
2589 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2590 else_ = shortcut_cond_expr (expr);
2591 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2592 pred = TREE_OPERAND (pred, 0);
2593 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2594 SET_EXPR_LOCATION (expr, locus);
2598 /* If we're done, great. */
2599 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2600 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2601 return expr;
2603 /* Otherwise we need to mess with gotos. Change
2604 if (a) c; else d;
2606 if (a); else goto no;
2607 c; goto end;
2608 no: d; end:
2609 and recursively gimplify the condition. */
2611 true_label = false_label = end_label = NULL_TREE;
2613 /* If our arms just jump somewhere, hijack those labels so we don't
2614 generate jumps to jumps. */
2616 if (then_
2617 && TREE_CODE (then_) == GOTO_EXPR
2618 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2620 true_label = GOTO_DESTINATION (then_);
2621 then_ = NULL;
2622 then_se = false;
2625 if (else_
2626 && TREE_CODE (else_) == GOTO_EXPR
2627 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2629 false_label = GOTO_DESTINATION (else_);
2630 else_ = NULL;
2631 else_se = false;
2634 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2635 if (true_label)
2636 true_label_p = &true_label;
2637 else
2638 true_label_p = NULL;
2640 /* The 'else' branch also needs a label if it contains interesting code. */
2641 if (false_label || else_se)
2642 false_label_p = &false_label;
2643 else
2644 false_label_p = NULL;
2646 /* If there was nothing else in our arms, just forward the label(s). */
2647 if (!then_se && !else_se)
2648 return shortcut_cond_r (pred, true_label_p, false_label_p,
2649 EXPR_HAS_LOCATION (expr)
2650 ? EXPR_LOCATION (expr) : input_location);
2652 /* If our last subexpression already has a terminal label, reuse it. */
2653 if (else_se)
2654 t = expr_last (else_);
2655 else if (then_se)
2656 t = expr_last (then_);
2657 else
2658 t = NULL;
2659 if (t && TREE_CODE (t) == LABEL_EXPR)
2660 end_label = LABEL_EXPR_LABEL (t);
2662 /* If we don't care about jumping to the 'else' branch, jump to the end
2663 if the condition is false. */
2664 if (!false_label_p)
2665 false_label_p = &end_label;
2667 /* We only want to emit these labels if we aren't hijacking them. */
2668 emit_end = (end_label == NULL_TREE);
2669 emit_false = (false_label == NULL_TREE);
2671 /* We only emit the jump over the else clause if we have to--if the
2672 then clause may fall through. Otherwise we can wind up with a
2673 useless jump and a useless label at the end of gimplified code,
2674 which will cause us to think that this conditional as a whole
2675 falls through even if it doesn't. If we then inline a function
2676 which ends with such a condition, that can cause us to issue an
2677 inappropriate warning about control reaching the end of a
2678 non-void function. */
2679 jump_over_else = block_may_fallthru (then_);
2681 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2682 EXPR_HAS_LOCATION (expr)
2683 ? EXPR_LOCATION (expr) : input_location);
2685 expr = NULL;
2686 append_to_statement_list (pred, &expr);
2688 append_to_statement_list (then_, &expr);
2689 if (else_se)
2691 if (jump_over_else)
2693 tree last = expr_last (expr);
2694 t = build_and_jump (&end_label);
2695 if (EXPR_HAS_LOCATION (last))
2696 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2697 append_to_statement_list (t, &expr);
2699 if (emit_false)
2701 t = build1 (LABEL_EXPR, void_type_node, false_label);
2702 append_to_statement_list (t, &expr);
2704 append_to_statement_list (else_, &expr);
2706 if (emit_end && end_label)
2708 t = build1 (LABEL_EXPR, void_type_node, end_label);
2709 append_to_statement_list (t, &expr);
2712 return expr;
2715 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2717 tree
2718 gimple_boolify (tree expr)
2720 tree type = TREE_TYPE (expr);
2721 location_t loc = EXPR_LOCATION (expr);
2723 if (TREE_CODE (type) == BOOLEAN_TYPE)
2724 return expr;
2726 switch (TREE_CODE (expr))
2728 case TRUTH_AND_EXPR:
2729 case TRUTH_OR_EXPR:
2730 case TRUTH_XOR_EXPR:
2731 case TRUTH_ANDIF_EXPR:
2732 case TRUTH_ORIF_EXPR:
2733 /* Also boolify the arguments of truth exprs. */
2734 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2735 /* FALLTHRU */
2737 case TRUTH_NOT_EXPR:
2738 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2739 /* FALLTHRU */
2741 case EQ_EXPR: case NE_EXPR:
2742 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2743 /* These expressions always produce boolean results. */
2744 TREE_TYPE (expr) = boolean_type_node;
2745 return expr;
2747 default:
2748 /* Other expressions that get here must have boolean values, but
2749 might need to be converted to the appropriate mode. */
2750 return fold_convert_loc (loc, boolean_type_node, expr);
2754 /* Given a conditional expression *EXPR_P without side effects, gimplify
2755 its operands. New statements are inserted to PRE_P. */
2757 static enum gimplify_status
2758 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2760 tree expr = *expr_p, cond;
2761 enum gimplify_status ret, tret;
2762 enum tree_code code;
2764 cond = gimple_boolify (COND_EXPR_COND (expr));
2766 /* We need to handle && and || specially, as their gimplification
2767 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2768 code = TREE_CODE (cond);
2769 if (code == TRUTH_ANDIF_EXPR)
2770 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2771 else if (code == TRUTH_ORIF_EXPR)
2772 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2773 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2774 COND_EXPR_COND (*expr_p) = cond;
2776 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2777 is_gimple_val, fb_rvalue);
2778 ret = MIN (ret, tret);
2779 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2780 is_gimple_val, fb_rvalue);
2782 return MIN (ret, tret);
2785 /* Returns true if evaluating EXPR could trap.
2786 EXPR is GENERIC, while tree_could_trap_p can be called
2787 only on GIMPLE. */
2789 static bool
2790 generic_expr_could_trap_p (tree expr)
2792 unsigned i, n;
2794 if (!expr || is_gimple_val (expr))
2795 return false;
2797 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2798 return true;
2800 n = TREE_OPERAND_LENGTH (expr);
2801 for (i = 0; i < n; i++)
2802 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2803 return true;
2805 return false;
2808 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2809 into
2811 if (p) if (p)
2812 t1 = a; a;
2813 else or else
2814 t1 = b; b;
2817 The second form is used when *EXPR_P is of type void.
2819 PRE_P points to the list where side effects that must happen before
2820 *EXPR_P should be stored. */
2822 static enum gimplify_status
2823 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2825 tree expr = *expr_p;
2826 tree tmp, type, arm1, arm2;
2827 enum gimplify_status ret;
2828 tree label_true, label_false, label_cont;
2829 bool have_then_clause_p, have_else_clause_p;
2830 gimple gimple_cond;
2831 enum tree_code pred_code;
2832 gimple_seq seq = NULL;
2833 location_t loc = EXPR_LOCATION (*expr_p);
2835 type = TREE_TYPE (expr);
2837 /* If this COND_EXPR has a value, copy the values into a temporary within
2838 the arms. */
2839 if (! VOID_TYPE_P (type))
2841 tree result;
2843 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2844 an addressable temporary. */
2845 if (((fallback & fb_rvalue)
2846 || !(fallback & fb_lvalue))
2847 && !TREE_ADDRESSABLE (type))
2849 if (gimplify_ctxp->allow_rhs_cond_expr
2850 /* If either branch has side effects or could trap, it can't be
2851 evaluated unconditionally. */
2852 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2853 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2854 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2855 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2856 return gimplify_pure_cond_expr (expr_p, pre_p);
2858 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2859 ret = GS_ALL_DONE;
2861 else
2863 tree type = build_pointer_type (TREE_TYPE (expr));
2865 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2866 TREE_OPERAND (expr, 1) =
2867 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1));
2869 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2870 TREE_OPERAND (expr, 2) =
2871 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2));
2873 tmp = create_tmp_var (type, "iftmp");
2875 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2876 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2878 result = build_fold_indirect_ref_loc (loc, tmp);
2881 /* Build the then clause, 't1 = a;'. But don't build an assignment
2882 if this branch is void; in C++ it can be, if it's a throw. */
2883 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2884 TREE_OPERAND (expr, 1)
2885 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2887 /* Build the else clause, 't1 = b;'. */
2888 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2889 TREE_OPERAND (expr, 2)
2890 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2892 TREE_TYPE (expr) = void_type_node;
2893 recalculate_side_effects (expr);
2895 /* Move the COND_EXPR to the prequeue. */
2896 gimplify_stmt (&expr, pre_p);
2898 *expr_p = result;
2899 return GS_ALL_DONE;
2902 /* Make sure the condition has BOOLEAN_TYPE. */
2903 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2905 /* Break apart && and || conditions. */
2906 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2907 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2909 expr = shortcut_cond_expr (expr);
2911 if (expr != *expr_p)
2913 *expr_p = expr;
2915 /* We can't rely on gimplify_expr to re-gimplify the expanded
2916 form properly, as cleanups might cause the target labels to be
2917 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2918 set up a conditional context. */
2919 gimple_push_condition ();
2920 gimplify_stmt (expr_p, &seq);
2921 gimple_pop_condition (pre_p);
2922 gimple_seq_add_seq (pre_p, seq);
2924 return GS_ALL_DONE;
2928 /* Now do the normal gimplification. */
2930 /* Gimplify condition. */
2931 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2932 fb_rvalue);
2933 if (ret == GS_ERROR)
2934 return GS_ERROR;
2935 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2937 gimple_push_condition ();
2939 have_then_clause_p = have_else_clause_p = false;
2940 if (TREE_OPERAND (expr, 1) != NULL
2941 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2942 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2943 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2944 == current_function_decl)
2945 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2946 have different locations, otherwise we end up with incorrect
2947 location information on the branches. */
2948 && (optimize
2949 || !EXPR_HAS_LOCATION (expr)
2950 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2951 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2953 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2954 have_then_clause_p = true;
2956 else
2957 label_true = create_artificial_label (UNKNOWN_LOCATION);
2958 if (TREE_OPERAND (expr, 2) != NULL
2959 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2960 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2961 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2962 == current_function_decl)
2963 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2964 have different locations, otherwise we end up with incorrect
2965 location information on the branches. */
2966 && (optimize
2967 || !EXPR_HAS_LOCATION (expr)
2968 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2969 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
2971 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
2972 have_else_clause_p = true;
2974 else
2975 label_false = create_artificial_label (UNKNOWN_LOCATION);
2977 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
2978 &arm2);
2980 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
2981 label_false);
2983 gimplify_seq_add_stmt (&seq, gimple_cond);
2984 label_cont = NULL_TREE;
2985 if (!have_then_clause_p)
2987 /* For if (...) {} else { code; } put label_true after
2988 the else block. */
2989 if (TREE_OPERAND (expr, 1) == NULL_TREE
2990 && !have_else_clause_p
2991 && TREE_OPERAND (expr, 2) != NULL_TREE)
2992 label_cont = label_true;
2993 else
2995 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
2996 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
2997 /* For if (...) { code; } else {} or
2998 if (...) { code; } else goto label; or
2999 if (...) { code; return; } else { ... }
3000 label_cont isn't needed. */
3001 if (!have_else_clause_p
3002 && TREE_OPERAND (expr, 2) != NULL_TREE
3003 && gimple_seq_may_fallthru (seq))
3005 gimple g;
3006 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3008 g = gimple_build_goto (label_cont);
3010 /* GIMPLE_COND's are very low level; they have embedded
3011 gotos. This particular embedded goto should not be marked
3012 with the location of the original COND_EXPR, as it would
3013 correspond to the COND_EXPR's condition, not the ELSE or the
3014 THEN arms. To avoid marking it with the wrong location, flag
3015 it as "no location". */
3016 gimple_set_do_not_emit_location (g);
3018 gimplify_seq_add_stmt (&seq, g);
3022 if (!have_else_clause_p)
3024 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3025 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3027 if (label_cont)
3028 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3030 gimple_pop_condition (pre_p);
3031 gimple_seq_add_seq (pre_p, seq);
3033 if (ret == GS_ERROR)
3034 ; /* Do nothing. */
3035 else if (have_then_clause_p || have_else_clause_p)
3036 ret = GS_ALL_DONE;
3037 else
3039 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3040 expr = TREE_OPERAND (expr, 0);
3041 gimplify_stmt (&expr, pre_p);
3044 *expr_p = NULL;
3045 return ret;
3048 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3049 to be marked addressable.
3051 We cannot rely on such an expression being directly markable if a temporary
3052 has been created by the gimplification. In this case, we create another
3053 temporary and initialize it with a copy, which will become a store after we
3054 mark it addressable. This can happen if the front-end passed us something
3055 that it could not mark addressable yet, like a Fortran pass-by-reference
3056 parameter (int) floatvar. */
3058 static void
3059 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3061 while (handled_component_p (*expr_p))
3062 expr_p = &TREE_OPERAND (*expr_p, 0);
3063 if (is_gimple_reg (*expr_p))
3064 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3067 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3068 a call to __builtin_memcpy. */
3070 static enum gimplify_status
3071 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3072 gimple_seq *seq_p)
3074 tree t, to, to_ptr, from, from_ptr;
3075 gimple gs;
3076 location_t loc = EXPR_LOCATION (*expr_p);
3078 to = TREE_OPERAND (*expr_p, 0);
3079 from = TREE_OPERAND (*expr_p, 1);
3081 /* Mark the RHS addressable. Beware that it may not be possible to do so
3082 directly if a temporary has been created by the gimplification. */
3083 prepare_gimple_addressable (&from, seq_p);
3085 mark_addressable (from);
3086 from_ptr = build_fold_addr_expr_loc (loc, from);
3087 gimplify_arg (&from_ptr, seq_p, loc);
3089 mark_addressable (to);
3090 to_ptr = build_fold_addr_expr_loc (loc, to);
3091 gimplify_arg (&to_ptr, seq_p, loc);
3093 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3095 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3097 if (want_value)
3099 /* tmp = memcpy() */
3100 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3101 gimple_call_set_lhs (gs, t);
3102 gimplify_seq_add_stmt (seq_p, gs);
3104 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3105 return GS_ALL_DONE;
3108 gimplify_seq_add_stmt (seq_p, gs);
3109 *expr_p = NULL;
3110 return GS_ALL_DONE;
3113 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3114 a call to __builtin_memset. In this case we know that the RHS is
3115 a CONSTRUCTOR with an empty element list. */
3117 static enum gimplify_status
3118 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3119 gimple_seq *seq_p)
3121 tree t, from, to, to_ptr;
3122 gimple gs;
3123 location_t loc = EXPR_LOCATION (*expr_p);
3125 /* Assert our assumptions, to abort instead of producing wrong code
3126 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3127 not be immediately exposed. */
3128 from = TREE_OPERAND (*expr_p, 1);
3129 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3130 from = TREE_OPERAND (from, 0);
3132 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3133 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3135 /* Now proceed. */
3136 to = TREE_OPERAND (*expr_p, 0);
3138 to_ptr = build_fold_addr_expr_loc (loc, to);
3139 gimplify_arg (&to_ptr, seq_p, loc);
3140 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3142 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3144 if (want_value)
3146 /* tmp = memset() */
3147 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3148 gimple_call_set_lhs (gs, t);
3149 gimplify_seq_add_stmt (seq_p, gs);
3151 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3152 return GS_ALL_DONE;
3155 gimplify_seq_add_stmt (seq_p, gs);
3156 *expr_p = NULL;
3157 return GS_ALL_DONE;
3160 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3161 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3162 assignment. Returns non-null if we detect a potential overlap. */
3164 struct gimplify_init_ctor_preeval_data
3166 /* The base decl of the lhs object. May be NULL, in which case we
3167 have to assume the lhs is indirect. */
3168 tree lhs_base_decl;
3170 /* The alias set of the lhs object. */
3171 alias_set_type lhs_alias_set;
3174 static tree
3175 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3177 struct gimplify_init_ctor_preeval_data *data
3178 = (struct gimplify_init_ctor_preeval_data *) xdata;
3179 tree t = *tp;
3181 /* If we find the base object, obviously we have overlap. */
3182 if (data->lhs_base_decl == t)
3183 return t;
3185 /* If the constructor component is indirect, determine if we have a
3186 potential overlap with the lhs. The only bits of information we
3187 have to go on at this point are addressability and alias sets. */
3188 if (TREE_CODE (t) == INDIRECT_REF
3189 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3190 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3191 return t;
3193 /* If the constructor component is a call, determine if it can hide a
3194 potential overlap with the lhs through an INDIRECT_REF like above. */
3195 if (TREE_CODE (t) == CALL_EXPR)
3197 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3199 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3200 if (POINTER_TYPE_P (TREE_VALUE (type))
3201 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3202 && alias_sets_conflict_p (data->lhs_alias_set,
3203 get_alias_set
3204 (TREE_TYPE (TREE_VALUE (type)))))
3205 return t;
3208 if (IS_TYPE_OR_DECL_P (t))
3209 *walk_subtrees = 0;
3210 return NULL;
3213 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3214 force values that overlap with the lhs (as described by *DATA)
3215 into temporaries. */
3217 static void
3218 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3219 struct gimplify_init_ctor_preeval_data *data)
3221 enum gimplify_status one;
3223 /* If the value is constant, then there's nothing to pre-evaluate. */
3224 if (TREE_CONSTANT (*expr_p))
3226 /* Ensure it does not have side effects, it might contain a reference to
3227 the object we're initializing. */
3228 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3229 return;
3232 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3233 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3234 return;
3236 /* Recurse for nested constructors. */
3237 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3239 unsigned HOST_WIDE_INT ix;
3240 constructor_elt *ce;
3241 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3243 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3244 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3246 return;
3249 /* If this is a variable sized type, we must remember the size. */
3250 maybe_with_size_expr (expr_p);
3252 /* Gimplify the constructor element to something appropriate for the rhs
3253 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3254 the gimplifier will consider this a store to memory. Doing this
3255 gimplification now means that we won't have to deal with complicated
3256 language-specific trees, nor trees like SAVE_EXPR that can induce
3257 exponential search behavior. */
3258 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3259 if (one == GS_ERROR)
3261 *expr_p = NULL;
3262 return;
3265 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3266 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3267 always be true for all scalars, since is_gimple_mem_rhs insists on a
3268 temporary variable for them. */
3269 if (DECL_P (*expr_p))
3270 return;
3272 /* If this is of variable size, we have no choice but to assume it doesn't
3273 overlap since we can't make a temporary for it. */
3274 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3275 return;
3277 /* Otherwise, we must search for overlap ... */
3278 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3279 return;
3281 /* ... and if found, force the value into a temporary. */
3282 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3285 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3286 a RANGE_EXPR in a CONSTRUCTOR for an array.
3288 var = lower;
3289 loop_entry:
3290 object[var] = value;
3291 if (var == upper)
3292 goto loop_exit;
3293 var = var + 1;
3294 goto loop_entry;
3295 loop_exit:
3297 We increment var _after_ the loop exit check because we might otherwise
3298 fail if upper == TYPE_MAX_VALUE (type for upper).
3300 Note that we never have to deal with SAVE_EXPRs here, because this has
3301 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3303 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3304 gimple_seq *, bool);
3306 static void
3307 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3308 tree value, tree array_elt_type,
3309 gimple_seq *pre_p, bool cleared)
3311 tree loop_entry_label, loop_exit_label, fall_thru_label;
3312 tree var, var_type, cref, tmp;
3314 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3315 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3316 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3318 /* Create and initialize the index variable. */
3319 var_type = TREE_TYPE (upper);
3320 var = create_tmp_var (var_type, NULL);
3321 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3323 /* Add the loop entry label. */
3324 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3326 /* Build the reference. */
3327 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3328 var, NULL_TREE, NULL_TREE);
3330 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3331 the store. Otherwise just assign value to the reference. */
3333 if (TREE_CODE (value) == CONSTRUCTOR)
3334 /* NB we might have to call ourself recursively through
3335 gimplify_init_ctor_eval if the value is a constructor. */
3336 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3337 pre_p, cleared);
3338 else
3339 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3341 /* We exit the loop when the index var is equal to the upper bound. */
3342 gimplify_seq_add_stmt (pre_p,
3343 gimple_build_cond (EQ_EXPR, var, upper,
3344 loop_exit_label, fall_thru_label));
3346 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3348 /* Otherwise, increment the index var... */
3349 tmp = build2 (PLUS_EXPR, var_type, var,
3350 fold_convert (var_type, integer_one_node));
3351 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3353 /* ...and jump back to the loop entry. */
3354 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3356 /* Add the loop exit label. */
3357 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3360 /* Return true if FDECL is accessing a field that is zero sized. */
3362 static bool
3363 zero_sized_field_decl (const_tree fdecl)
3365 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3366 && integer_zerop (DECL_SIZE (fdecl)))
3367 return true;
3368 return false;
3371 /* Return true if TYPE is zero sized. */
3373 static bool
3374 zero_sized_type (const_tree type)
3376 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3377 && integer_zerop (TYPE_SIZE (type)))
3378 return true;
3379 return false;
3382 /* A subroutine of gimplify_init_constructor. Generate individual
3383 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3384 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3385 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3386 zeroed first. */
3388 static void
3389 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3390 gimple_seq *pre_p, bool cleared)
3392 tree array_elt_type = NULL;
3393 unsigned HOST_WIDE_INT ix;
3394 tree purpose, value;
3396 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3397 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3399 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3401 tree cref;
3403 /* NULL values are created above for gimplification errors. */
3404 if (value == NULL)
3405 continue;
3407 if (cleared && initializer_zerop (value))
3408 continue;
3410 /* ??? Here's to hoping the front end fills in all of the indices,
3411 so we don't have to figure out what's missing ourselves. */
3412 gcc_assert (purpose);
3414 /* Skip zero-sized fields, unless value has side-effects. This can
3415 happen with calls to functions returning a zero-sized type, which
3416 we shouldn't discard. As a number of downstream passes don't
3417 expect sets of zero-sized fields, we rely on the gimplification of
3418 the MODIFY_EXPR we make below to drop the assignment statement. */
3419 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3420 continue;
3422 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3423 whole range. */
3424 if (TREE_CODE (purpose) == RANGE_EXPR)
3426 tree lower = TREE_OPERAND (purpose, 0);
3427 tree upper = TREE_OPERAND (purpose, 1);
3429 /* If the lower bound is equal to upper, just treat it as if
3430 upper was the index. */
3431 if (simple_cst_equal (lower, upper))
3432 purpose = upper;
3433 else
3435 gimplify_init_ctor_eval_range (object, lower, upper, value,
3436 array_elt_type, pre_p, cleared);
3437 continue;
3441 if (array_elt_type)
3443 /* Do not use bitsizetype for ARRAY_REF indices. */
3444 if (TYPE_DOMAIN (TREE_TYPE (object)))
3445 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3446 purpose);
3447 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3448 purpose, NULL_TREE, NULL_TREE);
3450 else
3452 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3453 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3454 unshare_expr (object), purpose, NULL_TREE);
3457 if (TREE_CODE (value) == CONSTRUCTOR
3458 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3459 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3460 pre_p, cleared);
3461 else
3463 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3464 gimplify_and_add (init, pre_p);
3465 ggc_free (init);
3471 /* Returns the appropriate RHS predicate for this LHS. */
3473 gimple_predicate
3474 rhs_predicate_for (tree lhs)
3476 if (is_gimple_reg (lhs))
3477 return is_gimple_reg_rhs_or_call;
3478 else
3479 return is_gimple_mem_rhs_or_call;
3482 /* Gimplify a C99 compound literal expression. This just means adding
3483 the DECL_EXPR before the current statement and using its anonymous
3484 decl instead. */
3486 static enum gimplify_status
3487 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3489 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3490 tree decl = DECL_EXPR_DECL (decl_s);
3491 /* Mark the decl as addressable if the compound literal
3492 expression is addressable now, otherwise it is marked too late
3493 after we gimplify the initialization expression. */
3494 if (TREE_ADDRESSABLE (*expr_p))
3495 TREE_ADDRESSABLE (decl) = 1;
3497 /* Preliminarily mark non-addressed complex variables as eligible
3498 for promotion to gimple registers. We'll transform their uses
3499 as we find them. */
3500 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3501 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3502 && !TREE_THIS_VOLATILE (decl)
3503 && !needs_to_live_in_memory (decl))
3504 DECL_GIMPLE_REG_P (decl) = 1;
3506 /* This decl isn't mentioned in the enclosing block, so add it to the
3507 list of temps. FIXME it seems a bit of a kludge to say that
3508 anonymous artificial vars aren't pushed, but everything else is. */
3509 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3510 gimple_add_tmp_var (decl);
3512 gimplify_and_add (decl_s, pre_p);
3513 *expr_p = decl;
3514 return GS_OK;
3517 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3518 return a new CONSTRUCTOR if something changed. */
3520 static tree
3521 optimize_compound_literals_in_ctor (tree orig_ctor)
3523 tree ctor = orig_ctor;
3524 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3525 unsigned int idx, num = VEC_length (constructor_elt, elts);
3527 for (idx = 0; idx < num; idx++)
3529 tree value = VEC_index (constructor_elt, elts, idx)->value;
3530 tree newval = value;
3531 if (TREE_CODE (value) == CONSTRUCTOR)
3532 newval = optimize_compound_literals_in_ctor (value);
3533 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3535 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3536 tree decl = DECL_EXPR_DECL (decl_s);
3537 tree init = DECL_INITIAL (decl);
3539 if (!TREE_ADDRESSABLE (value)
3540 && !TREE_ADDRESSABLE (decl)
3541 && init)
3542 newval = optimize_compound_literals_in_ctor (init);
3544 if (newval == value)
3545 continue;
3547 if (ctor == orig_ctor)
3549 ctor = copy_node (orig_ctor);
3550 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3551 elts = CONSTRUCTOR_ELTS (ctor);
3553 VEC_index (constructor_elt, elts, idx)->value = newval;
3555 return ctor;
3560 /* A subroutine of gimplify_modify_expr. Break out elements of a
3561 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3563 Note that we still need to clear any elements that don't have explicit
3564 initializers, so if not all elements are initialized we keep the
3565 original MODIFY_EXPR, we just remove all of the constructor elements.
3567 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3568 GS_ERROR if we would have to create a temporary when gimplifying
3569 this constructor. Otherwise, return GS_OK.
3571 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3573 static enum gimplify_status
3574 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3575 bool want_value, bool notify_temp_creation)
3577 tree object, ctor, type;
3578 enum gimplify_status ret;
3579 VEC(constructor_elt,gc) *elts;
3581 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3583 if (!notify_temp_creation)
3585 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3586 is_gimple_lvalue, fb_lvalue);
3587 if (ret == GS_ERROR)
3588 return ret;
3591 object = TREE_OPERAND (*expr_p, 0);
3592 ctor = TREE_OPERAND (*expr_p, 1) =
3593 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3594 type = TREE_TYPE (ctor);
3595 elts = CONSTRUCTOR_ELTS (ctor);
3596 ret = GS_ALL_DONE;
3598 switch (TREE_CODE (type))
3600 case RECORD_TYPE:
3601 case UNION_TYPE:
3602 case QUAL_UNION_TYPE:
3603 case ARRAY_TYPE:
3605 struct gimplify_init_ctor_preeval_data preeval_data;
3606 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3607 HOST_WIDE_INT num_nonzero_elements;
3608 bool cleared, valid_const_initializer;
3610 /* Aggregate types must lower constructors to initialization of
3611 individual elements. The exception is that a CONSTRUCTOR node
3612 with no elements indicates zero-initialization of the whole. */
3613 if (VEC_empty (constructor_elt, elts))
3615 if (notify_temp_creation)
3616 return GS_OK;
3617 break;
3620 /* Fetch information about the constructor to direct later processing.
3621 We might want to make static versions of it in various cases, and
3622 can only do so if it known to be a valid constant initializer. */
3623 valid_const_initializer
3624 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3625 &num_ctor_elements, &cleared);
3627 /* If a const aggregate variable is being initialized, then it
3628 should never be a lose to promote the variable to be static. */
3629 if (valid_const_initializer
3630 && num_nonzero_elements > 1
3631 && TREE_READONLY (object)
3632 && TREE_CODE (object) == VAR_DECL
3633 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3635 if (notify_temp_creation)
3636 return GS_ERROR;
3637 DECL_INITIAL (object) = ctor;
3638 TREE_STATIC (object) = 1;
3639 if (!DECL_NAME (object))
3640 DECL_NAME (object) = create_tmp_var_name ("C");
3641 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3643 /* ??? C++ doesn't automatically append a .<number> to the
3644 assembler name, and even when it does, it looks a FE private
3645 data structures to figure out what that number should be,
3646 which are not set for this variable. I suppose this is
3647 important for local statics for inline functions, which aren't
3648 "local" in the object file sense. So in order to get a unique
3649 TU-local symbol, we must invoke the lhd version now. */
3650 lhd_set_decl_assembler_name (object);
3652 *expr_p = NULL_TREE;
3653 break;
3656 /* If there are "lots" of initialized elements, even discounting
3657 those that are not address constants (and thus *must* be
3658 computed at runtime), then partition the constructor into
3659 constant and non-constant parts. Block copy the constant
3660 parts in, then generate code for the non-constant parts. */
3661 /* TODO. There's code in cp/typeck.c to do this. */
3663 num_type_elements = count_type_elements (type, true);
3665 /* If count_type_elements could not determine number of type elements
3666 for a constant-sized object, assume clearing is needed.
3667 Don't do this for variable-sized objects, as store_constructor
3668 will ignore the clearing of variable-sized objects. */
3669 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3670 cleared = true;
3671 /* If there are "lots" of zeros, then block clear the object first. */
3672 else if (num_type_elements - num_nonzero_elements
3673 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3674 && num_nonzero_elements < num_type_elements/4)
3675 cleared = true;
3676 /* ??? This bit ought not be needed. For any element not present
3677 in the initializer, we should simply set them to zero. Except
3678 we'd need to *find* the elements that are not present, and that
3679 requires trickery to avoid quadratic compile-time behavior in
3680 large cases or excessive memory use in small cases. */
3681 else if (num_ctor_elements < num_type_elements)
3682 cleared = true;
3684 /* If there are "lots" of initialized elements, and all of them
3685 are valid address constants, then the entire initializer can
3686 be dropped to memory, and then memcpy'd out. Don't do this
3687 for sparse arrays, though, as it's more efficient to follow
3688 the standard CONSTRUCTOR behavior of memset followed by
3689 individual element initialization. Also don't do this for small
3690 all-zero initializers (which aren't big enough to merit
3691 clearing), and don't try to make bitwise copies of
3692 TREE_ADDRESSABLE types. */
3693 if (valid_const_initializer
3694 && !(cleared || num_nonzero_elements == 0)
3695 && !TREE_ADDRESSABLE (type))
3697 HOST_WIDE_INT size = int_size_in_bytes (type);
3698 unsigned int align;
3700 /* ??? We can still get unbounded array types, at least
3701 from the C++ front end. This seems wrong, but attempt
3702 to work around it for now. */
3703 if (size < 0)
3705 size = int_size_in_bytes (TREE_TYPE (object));
3706 if (size >= 0)
3707 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3710 /* Find the maximum alignment we can assume for the object. */
3711 /* ??? Make use of DECL_OFFSET_ALIGN. */
3712 if (DECL_P (object))
3713 align = DECL_ALIGN (object);
3714 else
3715 align = TYPE_ALIGN (type);
3717 if (size > 0
3718 && num_nonzero_elements > 1
3719 && !can_move_by_pieces (size, align))
3721 tree new_tree;
3723 if (notify_temp_creation)
3724 return GS_ERROR;
3726 new_tree = create_tmp_var_raw (type, "C");
3728 gimple_add_tmp_var (new_tree);
3729 TREE_STATIC (new_tree) = 1;
3730 TREE_READONLY (new_tree) = 1;
3731 DECL_INITIAL (new_tree) = ctor;
3732 if (align > DECL_ALIGN (new_tree))
3734 DECL_ALIGN (new_tree) = align;
3735 DECL_USER_ALIGN (new_tree) = 1;
3737 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3739 TREE_OPERAND (*expr_p, 1) = new_tree;
3741 /* This is no longer an assignment of a CONSTRUCTOR, but
3742 we still may have processing to do on the LHS. So
3743 pretend we didn't do anything here to let that happen. */
3744 return GS_UNHANDLED;
3748 /* If the target is volatile and we have non-zero elements
3749 initialize the target from a temporary. */
3750 if (TREE_THIS_VOLATILE (object)
3751 && !TREE_ADDRESSABLE (type)
3752 && num_nonzero_elements > 0)
3754 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3755 TREE_OPERAND (*expr_p, 0) = temp;
3756 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3757 *expr_p,
3758 build2 (MODIFY_EXPR, void_type_node,
3759 object, temp));
3760 return GS_OK;
3763 if (notify_temp_creation)
3764 return GS_OK;
3766 /* If there are nonzero elements, pre-evaluate to capture elements
3767 overlapping with the lhs into temporaries. We must do this before
3768 clearing to fetch the values before they are zeroed-out. */
3769 if (num_nonzero_elements > 0)
3771 preeval_data.lhs_base_decl = get_base_address (object);
3772 if (!DECL_P (preeval_data.lhs_base_decl))
3773 preeval_data.lhs_base_decl = NULL;
3774 preeval_data.lhs_alias_set = get_alias_set (object);
3776 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3777 pre_p, post_p, &preeval_data);
3780 if (cleared)
3782 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3783 Note that we still have to gimplify, in order to handle the
3784 case of variable sized types. Avoid shared tree structures. */
3785 CONSTRUCTOR_ELTS (ctor) = NULL;
3786 TREE_SIDE_EFFECTS (ctor) = 0;
3787 object = unshare_expr (object);
3788 gimplify_stmt (expr_p, pre_p);
3791 /* If we have not block cleared the object, or if there are nonzero
3792 elements in the constructor, add assignments to the individual
3793 scalar fields of the object. */
3794 if (!cleared || num_nonzero_elements > 0)
3795 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3797 *expr_p = NULL_TREE;
3799 break;
3801 case COMPLEX_TYPE:
3803 tree r, i;
3805 if (notify_temp_creation)
3806 return GS_OK;
3808 /* Extract the real and imaginary parts out of the ctor. */
3809 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3810 r = VEC_index (constructor_elt, elts, 0)->value;
3811 i = VEC_index (constructor_elt, elts, 1)->value;
3812 if (r == NULL || i == NULL)
3814 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3815 if (r == NULL)
3816 r = zero;
3817 if (i == NULL)
3818 i = zero;
3821 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3822 represent creation of a complex value. */
3823 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3825 ctor = build_complex (type, r, i);
3826 TREE_OPERAND (*expr_p, 1) = ctor;
3828 else
3830 ctor = build2 (COMPLEX_EXPR, type, r, i);
3831 TREE_OPERAND (*expr_p, 1) = ctor;
3832 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3833 pre_p,
3834 post_p,
3835 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3836 fb_rvalue);
3839 break;
3841 case VECTOR_TYPE:
3843 unsigned HOST_WIDE_INT ix;
3844 constructor_elt *ce;
3846 if (notify_temp_creation)
3847 return GS_OK;
3849 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3850 if (TREE_CONSTANT (ctor))
3852 bool constant_p = true;
3853 tree value;
3855 /* Even when ctor is constant, it might contain non-*_CST
3856 elements, such as addresses or trapping values like
3857 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3858 in VECTOR_CST nodes. */
3859 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3860 if (!CONSTANT_CLASS_P (value))
3862 constant_p = false;
3863 break;
3866 if (constant_p)
3868 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3869 break;
3872 /* Don't reduce an initializer constant even if we can't
3873 make a VECTOR_CST. It won't do anything for us, and it'll
3874 prevent us from representing it as a single constant. */
3875 if (initializer_constant_valid_p (ctor, type))
3876 break;
3878 TREE_CONSTANT (ctor) = 0;
3881 /* Vector types use CONSTRUCTOR all the way through gimple
3882 compilation as a general initializer. */
3883 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3885 enum gimplify_status tret;
3886 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3887 fb_rvalue);
3888 if (tret == GS_ERROR)
3889 ret = GS_ERROR;
3891 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3892 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3894 break;
3896 default:
3897 /* So how did we get a CONSTRUCTOR for a scalar type? */
3898 gcc_unreachable ();
3901 if (ret == GS_ERROR)
3902 return GS_ERROR;
3903 else if (want_value)
3905 *expr_p = object;
3906 return GS_OK;
3908 else
3910 /* If we have gimplified both sides of the initializer but have
3911 not emitted an assignment, do so now. */
3912 if (*expr_p)
3914 tree lhs = TREE_OPERAND (*expr_p, 0);
3915 tree rhs = TREE_OPERAND (*expr_p, 1);
3916 gimple init = gimple_build_assign (lhs, rhs);
3917 gimplify_seq_add_stmt (pre_p, init);
3918 *expr_p = NULL;
3921 return GS_ALL_DONE;
3925 /* Given a pointer value OP0, return a simplified version of an
3926 indirection through OP0, or NULL_TREE if no simplification is
3927 possible. Note that the resulting type may be different from
3928 the type pointed to in the sense that it is still compatible
3929 from the langhooks point of view. */
3931 tree
3932 gimple_fold_indirect_ref (tree t)
3934 tree type = TREE_TYPE (TREE_TYPE (t));
3935 tree sub = t;
3936 tree subtype;
3938 STRIP_NOPS (sub);
3939 subtype = TREE_TYPE (sub);
3940 if (!POINTER_TYPE_P (subtype))
3941 return NULL_TREE;
3943 if (TREE_CODE (sub) == ADDR_EXPR)
3945 tree op = TREE_OPERAND (sub, 0);
3946 tree optype = TREE_TYPE (op);
3947 /* *&p => p */
3948 if (useless_type_conversion_p (type, optype))
3949 return op;
3951 /* *(foo *)&fooarray => fooarray[0] */
3952 if (TREE_CODE (optype) == ARRAY_TYPE
3953 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3954 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3956 tree type_domain = TYPE_DOMAIN (optype);
3957 tree min_val = size_zero_node;
3958 if (type_domain && TYPE_MIN_VALUE (type_domain))
3959 min_val = TYPE_MIN_VALUE (type_domain);
3960 if (TREE_CODE (min_val) == INTEGER_CST)
3961 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3963 /* *(foo *)&complexfoo => __real__ complexfoo */
3964 else if (TREE_CODE (optype) == COMPLEX_TYPE
3965 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3966 return fold_build1 (REALPART_EXPR, type, op);
3967 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3968 else if (TREE_CODE (optype) == VECTOR_TYPE
3969 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3971 tree part_width = TYPE_SIZE (type);
3972 tree index = bitsize_int (0);
3973 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3977 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
3978 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3979 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3981 tree op00 = TREE_OPERAND (sub, 0);
3982 tree op01 = TREE_OPERAND (sub, 1);
3983 tree op00type;
3985 STRIP_NOPS (op00);
3986 op00type = TREE_TYPE (op00);
3987 if (TREE_CODE (op00) == ADDR_EXPR
3988 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
3989 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
3991 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
3992 tree part_width = TYPE_SIZE (type);
3993 unsigned HOST_WIDE_INT part_widthi
3994 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
3995 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3996 tree index = bitsize_int (indexi);
3997 if (offset / part_widthi
3998 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
3999 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
4000 part_width, index);
4004 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
4005 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4006 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4008 tree op00 = TREE_OPERAND (sub, 0);
4009 tree op01 = TREE_OPERAND (sub, 1);
4010 tree op00type;
4012 STRIP_NOPS (op00);
4013 op00type = TREE_TYPE (op00);
4014 if (TREE_CODE (op00) == ADDR_EXPR
4015 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
4016 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4018 tree size = TYPE_SIZE_UNIT (type);
4019 if (tree_int_cst_equal (size, op01))
4020 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
4024 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4025 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4026 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4027 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4029 tree type_domain;
4030 tree min_val = size_zero_node;
4031 tree osub = sub;
4032 sub = gimple_fold_indirect_ref (sub);
4033 if (! sub)
4034 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4035 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4036 if (type_domain && TYPE_MIN_VALUE (type_domain))
4037 min_val = TYPE_MIN_VALUE (type_domain);
4038 if (TREE_CODE (min_val) == INTEGER_CST)
4039 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4042 return NULL_TREE;
4045 /* Given a pointer value OP0, return a simplified version of an
4046 indirection through OP0, or NULL_TREE if no simplification is
4047 possible. This may only be applied to a rhs of an expression.
4048 Note that the resulting type may be different from the type pointed
4049 to in the sense that it is still compatible from the langhooks
4050 point of view. */
4052 static tree
4053 gimple_fold_indirect_ref_rhs (tree t)
4055 return gimple_fold_indirect_ref (t);
4058 /* Subroutine of gimplify_modify_expr to do simplifications of
4059 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4060 something changes. */
4062 static enum gimplify_status
4063 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4064 gimple_seq *pre_p, gimple_seq *post_p,
4065 bool want_value)
4067 enum gimplify_status ret = GS_OK;
4069 while (ret != GS_UNHANDLED)
4070 switch (TREE_CODE (*from_p))
4072 case VAR_DECL:
4073 /* If we're assigning from a read-only variable initialized with
4074 a constructor, do the direct assignment from the constructor,
4075 but only if neither source nor target are volatile since this
4076 latter assignment might end up being done on a per-field basis. */
4077 if (DECL_INITIAL (*from_p)
4078 && TREE_READONLY (*from_p)
4079 && !TREE_THIS_VOLATILE (*from_p)
4080 && !TREE_THIS_VOLATILE (*to_p)
4081 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4083 tree old_from = *from_p;
4085 /* Move the constructor into the RHS. */
4086 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4088 /* Let's see if gimplify_init_constructor will need to put
4089 it in memory. If so, revert the change. */
4090 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
4091 if (ret == GS_ERROR)
4093 *from_p = old_from;
4094 /* Fall through. */
4096 else
4098 ret = GS_OK;
4099 break;
4102 ret = GS_UNHANDLED;
4103 break;
4104 case INDIRECT_REF:
4106 /* If we have code like
4108 *(const A*)(A*)&x
4110 where the type of "x" is a (possibly cv-qualified variant
4111 of "A"), treat the entire expression as identical to "x".
4112 This kind of code arises in C++ when an object is bound
4113 to a const reference, and if "x" is a TARGET_EXPR we want
4114 to take advantage of the optimization below. */
4115 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4116 if (t)
4118 *from_p = t;
4119 ret = GS_OK;
4121 else
4122 ret = GS_UNHANDLED;
4123 break;
4126 case TARGET_EXPR:
4128 /* If we are initializing something from a TARGET_EXPR, strip the
4129 TARGET_EXPR and initialize it directly, if possible. This can't
4130 be done if the initializer is void, since that implies that the
4131 temporary is set in some non-trivial way.
4133 ??? What about code that pulls out the temp and uses it
4134 elsewhere? I think that such code never uses the TARGET_EXPR as
4135 an initializer. If I'm wrong, we'll die because the temp won't
4136 have any RTL. In that case, I guess we'll need to replace
4137 references somehow. */
4138 tree init = TARGET_EXPR_INITIAL (*from_p);
4140 if (init
4141 && !VOID_TYPE_P (TREE_TYPE (init)))
4143 *from_p = init;
4144 ret = GS_OK;
4146 else
4147 ret = GS_UNHANDLED;
4149 break;
4151 case COMPOUND_EXPR:
4152 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4153 caught. */
4154 gimplify_compound_expr (from_p, pre_p, true);
4155 ret = GS_OK;
4156 break;
4158 case CONSTRUCTOR:
4159 /* If we're initializing from a CONSTRUCTOR, break this into
4160 individual MODIFY_EXPRs. */
4161 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4162 false);
4164 case COND_EXPR:
4165 /* If we're assigning to a non-register type, push the assignment
4166 down into the branches. This is mandatory for ADDRESSABLE types,
4167 since we cannot generate temporaries for such, but it saves a
4168 copy in other cases as well. */
4169 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4171 /* This code should mirror the code in gimplify_cond_expr. */
4172 enum tree_code code = TREE_CODE (*expr_p);
4173 tree cond = *from_p;
4174 tree result = *to_p;
4176 ret = gimplify_expr (&result, pre_p, post_p,
4177 is_gimple_lvalue, fb_lvalue);
4178 if (ret != GS_ERROR)
4179 ret = GS_OK;
4181 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4182 TREE_OPERAND (cond, 1)
4183 = build2 (code, void_type_node, result,
4184 TREE_OPERAND (cond, 1));
4185 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4186 TREE_OPERAND (cond, 2)
4187 = build2 (code, void_type_node, unshare_expr (result),
4188 TREE_OPERAND (cond, 2));
4190 TREE_TYPE (cond) = void_type_node;
4191 recalculate_side_effects (cond);
4193 if (want_value)
4195 gimplify_and_add (cond, pre_p);
4196 *expr_p = unshare_expr (result);
4198 else
4199 *expr_p = cond;
4200 return ret;
4202 else
4203 ret = GS_UNHANDLED;
4204 break;
4206 case CALL_EXPR:
4207 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4208 return slot so that we don't generate a temporary. */
4209 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4210 && aggregate_value_p (*from_p, *from_p))
4212 bool use_target;
4214 if (!(rhs_predicate_for (*to_p))(*from_p))
4215 /* If we need a temporary, *to_p isn't accurate. */
4216 use_target = false;
4217 else if (TREE_CODE (*to_p) == RESULT_DECL
4218 && DECL_NAME (*to_p) == NULL_TREE
4219 && needs_to_live_in_memory (*to_p))
4220 /* It's OK to use the return slot directly unless it's an NRV. */
4221 use_target = true;
4222 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4223 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4224 /* Don't force regs into memory. */
4225 use_target = false;
4226 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4227 /* It's OK to use the target directly if it's being
4228 initialized. */
4229 use_target = true;
4230 else if (!is_gimple_non_addressable (*to_p))
4231 /* Don't use the original target if it's already addressable;
4232 if its address escapes, and the called function uses the
4233 NRV optimization, a conforming program could see *to_p
4234 change before the called function returns; see c++/19317.
4235 When optimizing, the return_slot pass marks more functions
4236 as safe after we have escape info. */
4237 use_target = false;
4238 else
4239 use_target = true;
4241 if (use_target)
4243 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4244 mark_addressable (*to_p);
4248 ret = GS_UNHANDLED;
4249 break;
4251 /* If we're initializing from a container, push the initialization
4252 inside it. */
4253 case CLEANUP_POINT_EXPR:
4254 case BIND_EXPR:
4255 case STATEMENT_LIST:
4257 tree wrap = *from_p;
4258 tree t;
4260 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4261 fb_lvalue);
4262 if (ret != GS_ERROR)
4263 ret = GS_OK;
4265 t = voidify_wrapper_expr (wrap, *expr_p);
4266 gcc_assert (t == *expr_p);
4268 if (want_value)
4270 gimplify_and_add (wrap, pre_p);
4271 *expr_p = unshare_expr (*to_p);
4273 else
4274 *expr_p = wrap;
4275 return GS_OK;
4278 case COMPOUND_LITERAL_EXPR:
4280 tree complit = TREE_OPERAND (*expr_p, 1);
4281 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4282 tree decl = DECL_EXPR_DECL (decl_s);
4283 tree init = DECL_INITIAL (decl);
4285 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4286 into struct T x = { 0, 1, 2 } if the address of the
4287 compound literal has never been taken. */
4288 if (!TREE_ADDRESSABLE (complit)
4289 && !TREE_ADDRESSABLE (decl)
4290 && init)
4292 *expr_p = copy_node (*expr_p);
4293 TREE_OPERAND (*expr_p, 1) = init;
4294 return GS_OK;
4298 default:
4299 ret = GS_UNHANDLED;
4300 break;
4303 return ret;
4307 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4308 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4309 DECL_GIMPLE_REG_P set.
4311 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4312 other, unmodified part of the complex object just before the total store.
4313 As a consequence, if the object is still uninitialized, an undefined value
4314 will be loaded into a register, which may result in a spurious exception
4315 if the register is floating-point and the value happens to be a signaling
4316 NaN for example. Then the fully-fledged complex operations lowering pass
4317 followed by a DCE pass are necessary in order to fix things up. */
4319 static enum gimplify_status
4320 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4321 bool want_value)
4323 enum tree_code code, ocode;
4324 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4326 lhs = TREE_OPERAND (*expr_p, 0);
4327 rhs = TREE_OPERAND (*expr_p, 1);
4328 code = TREE_CODE (lhs);
4329 lhs = TREE_OPERAND (lhs, 0);
4331 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4332 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4333 other = get_formal_tmp_var (other, pre_p);
4335 realpart = code == REALPART_EXPR ? rhs : other;
4336 imagpart = code == REALPART_EXPR ? other : rhs;
4338 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4339 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4340 else
4341 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4343 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4344 *expr_p = (want_value) ? rhs : NULL_TREE;
4346 return GS_ALL_DONE;
4350 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4352 modify_expr
4353 : varname '=' rhs
4354 | '*' ID '=' rhs
4356 PRE_P points to the list where side effects that must happen before
4357 *EXPR_P should be stored.
4359 POST_P points to the list where side effects that must happen after
4360 *EXPR_P should be stored.
4362 WANT_VALUE is nonzero iff we want to use the value of this expression
4363 in another expression. */
4365 static enum gimplify_status
4366 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4367 bool want_value)
4369 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4370 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4371 enum gimplify_status ret = GS_UNHANDLED;
4372 gimple assign;
4373 location_t loc = EXPR_LOCATION (*expr_p);
4375 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4376 || TREE_CODE (*expr_p) == INIT_EXPR);
4378 /* Insert pointer conversions required by the middle-end that are not
4379 required by the frontend. This fixes middle-end type checking for
4380 for example gcc.dg/redecl-6.c. */
4381 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4383 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4384 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4385 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4388 /* See if any simplifications can be done based on what the RHS is. */
4389 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4390 want_value);
4391 if (ret != GS_UNHANDLED)
4392 return ret;
4394 /* For zero sized types only gimplify the left hand side and right hand
4395 side as statements and throw away the assignment. Do this after
4396 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4397 types properly. */
4398 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4400 gimplify_stmt (from_p, pre_p);
4401 gimplify_stmt (to_p, pre_p);
4402 *expr_p = NULL_TREE;
4403 return GS_ALL_DONE;
4406 /* If the value being copied is of variable width, compute the length
4407 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4408 before gimplifying any of the operands so that we can resolve any
4409 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4410 the size of the expression to be copied, not of the destination, so
4411 that is what we must do here. */
4412 maybe_with_size_expr (from_p);
4414 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4415 if (ret == GS_ERROR)
4416 return ret;
4418 /* As a special case, we have to temporarily allow for assignments
4419 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4420 a toplevel statement, when gimplifying the GENERIC expression
4421 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4422 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4424 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4425 prevent gimplify_expr from trying to create a new temporary for
4426 foo's LHS, we tell it that it should only gimplify until it
4427 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4428 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4429 and all we need to do here is set 'a' to be its LHS. */
4430 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4431 fb_rvalue);
4432 if (ret == GS_ERROR)
4433 return ret;
4435 /* Now see if the above changed *from_p to something we handle specially. */
4436 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4437 want_value);
4438 if (ret != GS_UNHANDLED)
4439 return ret;
4441 /* If we've got a variable sized assignment between two lvalues (i.e. does
4442 not involve a call), then we can make things a bit more straightforward
4443 by converting the assignment to memcpy or memset. */
4444 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4446 tree from = TREE_OPERAND (*from_p, 0);
4447 tree size = TREE_OPERAND (*from_p, 1);
4449 if (TREE_CODE (from) == CONSTRUCTOR)
4450 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4452 if (is_gimple_addressable (from))
4454 *from_p = from;
4455 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4456 pre_p);
4460 /* Transform partial stores to non-addressable complex variables into
4461 total stores. This allows us to use real instead of virtual operands
4462 for these variables, which improves optimization. */
4463 if ((TREE_CODE (*to_p) == REALPART_EXPR
4464 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4465 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4466 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4468 /* Try to alleviate the effects of the gimplification creating artificial
4469 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4470 if (!gimplify_ctxp->into_ssa
4471 && DECL_P (*from_p)
4472 && DECL_IGNORED_P (*from_p)
4473 && DECL_P (*to_p)
4474 && !DECL_IGNORED_P (*to_p))
4476 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4477 DECL_NAME (*from_p)
4478 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4479 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4480 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4483 if (TREE_CODE (*from_p) == CALL_EXPR)
4485 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4486 instead of a GIMPLE_ASSIGN. */
4487 assign = gimple_build_call_from_tree (*from_p);
4488 if (!gimple_call_noreturn_p (assign))
4489 gimple_call_set_lhs (assign, *to_p);
4491 else
4493 assign = gimple_build_assign (*to_p, *from_p);
4494 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4497 gimplify_seq_add_stmt (pre_p, assign);
4499 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4501 /* If we've somehow already got an SSA_NAME on the LHS, then
4502 we've probably modified it twice. Not good. */
4503 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4504 *to_p = make_ssa_name (*to_p, assign);
4505 gimple_set_lhs (assign, *to_p);
4508 if (want_value)
4510 *expr_p = unshare_expr (*to_p);
4511 return GS_OK;
4513 else
4514 *expr_p = NULL;
4516 return GS_ALL_DONE;
4519 /* Gimplify a comparison between two variable-sized objects. Do this
4520 with a call to BUILT_IN_MEMCMP. */
4522 static enum gimplify_status
4523 gimplify_variable_sized_compare (tree *expr_p)
4525 tree op0 = TREE_OPERAND (*expr_p, 0);
4526 tree op1 = TREE_OPERAND (*expr_p, 1);
4527 tree t, arg, dest, src;
4528 location_t loc = EXPR_LOCATION (*expr_p);
4530 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4531 arg = unshare_expr (arg);
4532 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4533 src = build_fold_addr_expr_loc (loc, op1);
4534 dest = build_fold_addr_expr_loc (loc, op0);
4535 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4536 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4537 *expr_p
4538 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4540 return GS_OK;
4543 /* Gimplify a comparison between two aggregate objects of integral scalar
4544 mode as a comparison between the bitwise equivalent scalar values. */
4546 static enum gimplify_status
4547 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4549 location_t loc = EXPR_LOCATION (*expr_p);
4550 tree op0 = TREE_OPERAND (*expr_p, 0);
4551 tree op1 = TREE_OPERAND (*expr_p, 1);
4553 tree type = TREE_TYPE (op0);
4554 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4556 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4557 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4559 *expr_p
4560 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4562 return GS_OK;
4565 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4566 points to the expression to gimplify.
4568 Expressions of the form 'a && b' are gimplified to:
4570 a && b ? true : false
4572 LOCUS is the source location to be put on the generated COND_EXPR.
4573 gimplify_cond_expr will do the rest. */
4575 static enum gimplify_status
4576 gimplify_boolean_expr (tree *expr_p, location_t locus)
4578 /* Preserve the original type of the expression. */
4579 tree type = TREE_TYPE (*expr_p);
4581 *expr_p = build3 (COND_EXPR, type, *expr_p,
4582 fold_convert_loc (locus, type, boolean_true_node),
4583 fold_convert_loc (locus, type, boolean_false_node));
4585 SET_EXPR_LOCATION (*expr_p, locus);
4587 return GS_OK;
4590 /* Gimplifies an expression sequence. This function gimplifies each
4591 expression and re-writes the original expression with the last
4592 expression of the sequence in GIMPLE form.
4594 PRE_P points to the list where the side effects for all the
4595 expressions in the sequence will be emitted.
4597 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4599 static enum gimplify_status
4600 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4602 tree t = *expr_p;
4606 tree *sub_p = &TREE_OPERAND (t, 0);
4608 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4609 gimplify_compound_expr (sub_p, pre_p, false);
4610 else
4611 gimplify_stmt (sub_p, pre_p);
4613 t = TREE_OPERAND (t, 1);
4615 while (TREE_CODE (t) == COMPOUND_EXPR);
4617 *expr_p = t;
4618 if (want_value)
4619 return GS_OK;
4620 else
4622 gimplify_stmt (expr_p, pre_p);
4623 return GS_ALL_DONE;
4628 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4629 gimplify. After gimplification, EXPR_P will point to a new temporary
4630 that holds the original value of the SAVE_EXPR node.
4632 PRE_P points to the list where side effects that must happen before
4633 *EXPR_P should be stored. */
4635 static enum gimplify_status
4636 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4638 enum gimplify_status ret = GS_ALL_DONE;
4639 tree val;
4641 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4642 val = TREE_OPERAND (*expr_p, 0);
4644 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4645 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4647 /* The operand may be a void-valued expression such as SAVE_EXPRs
4648 generated by the Java frontend for class initialization. It is
4649 being executed only for its side-effects. */
4650 if (TREE_TYPE (val) == void_type_node)
4652 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4653 is_gimple_stmt, fb_none);
4654 val = NULL;
4656 else
4657 val = get_initialized_tmp_var (val, pre_p, post_p);
4659 TREE_OPERAND (*expr_p, 0) = val;
4660 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4663 *expr_p = val;
4665 return ret;
4668 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4670 unary_expr
4671 : ...
4672 | '&' varname
4675 PRE_P points to the list where side effects that must happen before
4676 *EXPR_P should be stored.
4678 POST_P points to the list where side effects that must happen after
4679 *EXPR_P should be stored. */
4681 static enum gimplify_status
4682 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4684 tree expr = *expr_p;
4685 tree op0 = TREE_OPERAND (expr, 0);
4686 enum gimplify_status ret;
4687 location_t loc = EXPR_LOCATION (*expr_p);
4689 switch (TREE_CODE (op0))
4691 case INDIRECT_REF:
4692 case MISALIGNED_INDIRECT_REF:
4693 do_indirect_ref:
4694 /* Check if we are dealing with an expression of the form '&*ptr'.
4695 While the front end folds away '&*ptr' into 'ptr', these
4696 expressions may be generated internally by the compiler (e.g.,
4697 builtins like __builtin_va_end). */
4698 /* Caution: the silent array decomposition semantics we allow for
4699 ADDR_EXPR means we can't always discard the pair. */
4700 /* Gimplification of the ADDR_EXPR operand may drop
4701 cv-qualification conversions, so make sure we add them if
4702 needed. */
4704 tree op00 = TREE_OPERAND (op0, 0);
4705 tree t_expr = TREE_TYPE (expr);
4706 tree t_op00 = TREE_TYPE (op00);
4708 if (!useless_type_conversion_p (t_expr, t_op00))
4709 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4710 *expr_p = op00;
4711 ret = GS_OK;
4713 break;
4715 case VIEW_CONVERT_EXPR:
4716 /* Take the address of our operand and then convert it to the type of
4717 this ADDR_EXPR.
4719 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4720 all clear. The impact of this transformation is even less clear. */
4722 /* If the operand is a useless conversion, look through it. Doing so
4723 guarantees that the ADDR_EXPR and its operand will remain of the
4724 same type. */
4725 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4726 op0 = TREE_OPERAND (op0, 0);
4728 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4729 build_fold_addr_expr_loc (loc,
4730 TREE_OPERAND (op0, 0)));
4731 ret = GS_OK;
4732 break;
4734 default:
4735 /* We use fb_either here because the C frontend sometimes takes
4736 the address of a call that returns a struct; see
4737 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4738 the implied temporary explicit. */
4740 /* Make the operand addressable. */
4741 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4742 is_gimple_addressable, fb_either);
4743 if (ret == GS_ERROR)
4744 break;
4746 /* Then mark it. Beware that it may not be possible to do so directly
4747 if a temporary has been created by the gimplification. */
4748 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4750 op0 = TREE_OPERAND (expr, 0);
4752 /* For various reasons, the gimplification of the expression
4753 may have made a new INDIRECT_REF. */
4754 if (TREE_CODE (op0) == INDIRECT_REF)
4755 goto do_indirect_ref;
4757 mark_addressable (TREE_OPERAND (expr, 0));
4759 /* The FEs may end up building ADDR_EXPRs early on a decl with
4760 an incomplete type. Re-build ADDR_EXPRs in canonical form
4761 here. */
4762 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4763 *expr_p = build_fold_addr_expr (op0);
4765 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4766 recompute_tree_invariant_for_addr_expr (*expr_p);
4768 /* If we re-built the ADDR_EXPR add a conversion to the original type
4769 if required. */
4770 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4771 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4773 break;
4776 return ret;
4779 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4780 value; output operands should be a gimple lvalue. */
4782 static enum gimplify_status
4783 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4785 tree expr;
4786 int noutputs;
4787 const char **oconstraints;
4788 int i;
4789 tree link;
4790 const char *constraint;
4791 bool allows_mem, allows_reg, is_inout;
4792 enum gimplify_status ret, tret;
4793 gimple stmt;
4794 VEC(tree, gc) *inputs;
4795 VEC(tree, gc) *outputs;
4796 VEC(tree, gc) *clobbers;
4797 VEC(tree, gc) *labels;
4798 tree link_next;
4800 expr = *expr_p;
4801 noutputs = list_length (ASM_OUTPUTS (expr));
4802 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4804 inputs = outputs = clobbers = labels = NULL;
4806 ret = GS_ALL_DONE;
4807 link_next = NULL_TREE;
4808 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4810 bool ok;
4811 size_t constraint_len;
4813 link_next = TREE_CHAIN (link);
4815 oconstraints[i]
4816 = constraint
4817 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4818 constraint_len = strlen (constraint);
4819 if (constraint_len == 0)
4820 continue;
4822 ok = parse_output_constraint (&constraint, i, 0, 0,
4823 &allows_mem, &allows_reg, &is_inout);
4824 if (!ok)
4826 ret = GS_ERROR;
4827 is_inout = false;
4830 if (!allows_reg && allows_mem)
4831 mark_addressable (TREE_VALUE (link));
4833 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4834 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4835 fb_lvalue | fb_mayfail);
4836 if (tret == GS_ERROR)
4838 error ("invalid lvalue in asm output %d", i);
4839 ret = tret;
4842 VEC_safe_push (tree, gc, outputs, link);
4843 TREE_CHAIN (link) = NULL_TREE;
4845 if (is_inout)
4847 /* An input/output operand. To give the optimizers more
4848 flexibility, split it into separate input and output
4849 operands. */
4850 tree input;
4851 char buf[10];
4853 /* Turn the in/out constraint into an output constraint. */
4854 char *p = xstrdup (constraint);
4855 p[0] = '=';
4856 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4858 /* And add a matching input constraint. */
4859 if (allows_reg)
4861 sprintf (buf, "%d", i);
4863 /* If there are multiple alternatives in the constraint,
4864 handle each of them individually. Those that allow register
4865 will be replaced with operand number, the others will stay
4866 unchanged. */
4867 if (strchr (p, ',') != NULL)
4869 size_t len = 0, buflen = strlen (buf);
4870 char *beg, *end, *str, *dst;
4872 for (beg = p + 1;;)
4874 end = strchr (beg, ',');
4875 if (end == NULL)
4876 end = strchr (beg, '\0');
4877 if ((size_t) (end - beg) < buflen)
4878 len += buflen + 1;
4879 else
4880 len += end - beg + 1;
4881 if (*end)
4882 beg = end + 1;
4883 else
4884 break;
4887 str = (char *) alloca (len);
4888 for (beg = p + 1, dst = str;;)
4890 const char *tem;
4891 bool mem_p, reg_p, inout_p;
4893 end = strchr (beg, ',');
4894 if (end)
4895 *end = '\0';
4896 beg[-1] = '=';
4897 tem = beg - 1;
4898 parse_output_constraint (&tem, i, 0, 0,
4899 &mem_p, &reg_p, &inout_p);
4900 if (dst != str)
4901 *dst++ = ',';
4902 if (reg_p)
4904 memcpy (dst, buf, buflen);
4905 dst += buflen;
4907 else
4909 if (end)
4910 len = end - beg;
4911 else
4912 len = strlen (beg);
4913 memcpy (dst, beg, len);
4914 dst += len;
4916 if (end)
4917 beg = end + 1;
4918 else
4919 break;
4921 *dst = '\0';
4922 input = build_string (dst - str, str);
4924 else
4925 input = build_string (strlen (buf), buf);
4927 else
4928 input = build_string (constraint_len - 1, constraint + 1);
4930 free (p);
4932 input = build_tree_list (build_tree_list (NULL_TREE, input),
4933 unshare_expr (TREE_VALUE (link)));
4934 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4938 link_next = NULL_TREE;
4939 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4941 link_next = TREE_CHAIN (link);
4942 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4943 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4944 oconstraints, &allows_mem, &allows_reg);
4946 /* If we can't make copies, we can only accept memory. */
4947 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4949 if (allows_mem)
4950 allows_reg = 0;
4951 else
4953 error ("impossible constraint in %<asm%>");
4954 error ("non-memory input %d must stay in memory", i);
4955 return GS_ERROR;
4959 /* If the operand is a memory input, it should be an lvalue. */
4960 if (!allows_reg && allows_mem)
4962 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4963 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4964 mark_addressable (TREE_VALUE (link));
4965 if (tret == GS_ERROR)
4967 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4968 input_location = EXPR_LOCATION (TREE_VALUE (link));
4969 error ("memory input %d is not directly addressable", i);
4970 ret = tret;
4973 else
4975 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4976 is_gimple_asm_val, fb_rvalue);
4977 if (tret == GS_ERROR)
4978 ret = tret;
4981 TREE_CHAIN (link) = NULL_TREE;
4982 VEC_safe_push (tree, gc, inputs, link);
4985 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
4986 VEC_safe_push (tree, gc, clobbers, link);
4988 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
4989 VEC_safe_push (tree, gc, labels, link);
4991 /* Do not add ASMs with errors to the gimple IL stream. */
4992 if (ret != GS_ERROR)
4994 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4995 inputs, outputs, clobbers, labels);
4997 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4998 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5000 gimplify_seq_add_stmt (pre_p, stmt);
5003 return ret;
5006 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5007 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5008 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5009 return to this function.
5011 FIXME should we complexify the prequeue handling instead? Or use flags
5012 for all the cleanups and let the optimizer tighten them up? The current
5013 code seems pretty fragile; it will break on a cleanup within any
5014 non-conditional nesting. But any such nesting would be broken, anyway;
5015 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5016 and continues out of it. We can do that at the RTL level, though, so
5017 having an optimizer to tighten up try/finally regions would be a Good
5018 Thing. */
5020 static enum gimplify_status
5021 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5023 gimple_stmt_iterator iter;
5024 gimple_seq body_sequence = NULL;
5026 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5028 /* We only care about the number of conditions between the innermost
5029 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5030 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5031 int old_conds = gimplify_ctxp->conditions;
5032 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5033 gimplify_ctxp->conditions = 0;
5034 gimplify_ctxp->conditional_cleanups = NULL;
5036 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5038 gimplify_ctxp->conditions = old_conds;
5039 gimplify_ctxp->conditional_cleanups = old_cleanups;
5041 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5043 gimple wce = gsi_stmt (iter);
5045 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5047 if (gsi_one_before_end_p (iter))
5049 /* Note that gsi_insert_seq_before and gsi_remove do not
5050 scan operands, unlike some other sequence mutators. */
5051 gsi_insert_seq_before_without_update (&iter,
5052 gimple_wce_cleanup (wce),
5053 GSI_SAME_STMT);
5054 gsi_remove (&iter, true);
5055 break;
5057 else
5059 gimple gtry;
5060 gimple_seq seq;
5061 enum gimple_try_flags kind;
5063 if (gimple_wce_cleanup_eh_only (wce))
5064 kind = GIMPLE_TRY_CATCH;
5065 else
5066 kind = GIMPLE_TRY_FINALLY;
5067 seq = gsi_split_seq_after (iter);
5069 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5070 /* Do not use gsi_replace here, as it may scan operands.
5071 We want to do a simple structural modification only. */
5072 *gsi_stmt_ptr (&iter) = gtry;
5073 iter = gsi_start (seq);
5076 else
5077 gsi_next (&iter);
5080 gimplify_seq_add_seq (pre_p, body_sequence);
5081 if (temp)
5083 *expr_p = temp;
5084 return GS_OK;
5086 else
5088 *expr_p = NULL;
5089 return GS_ALL_DONE;
5093 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5094 is the cleanup action required. EH_ONLY is true if the cleanup should
5095 only be executed if an exception is thrown, not on normal exit. */
5097 static void
5098 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5100 gimple wce;
5101 gimple_seq cleanup_stmts = NULL;
5103 /* Errors can result in improperly nested cleanups. Which results in
5104 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5105 if (errorcount || sorrycount)
5106 return;
5108 if (gimple_conditional_context ())
5110 /* If we're in a conditional context, this is more complex. We only
5111 want to run the cleanup if we actually ran the initialization that
5112 necessitates it, but we want to run it after the end of the
5113 conditional context. So we wrap the try/finally around the
5114 condition and use a flag to determine whether or not to actually
5115 run the destructor. Thus
5117 test ? f(A()) : 0
5119 becomes (approximately)
5121 flag = 0;
5122 try {
5123 if (test) { A::A(temp); flag = 1; val = f(temp); }
5124 else { val = 0; }
5125 } finally {
5126 if (flag) A::~A(temp);
5130 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5131 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5132 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5134 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5135 gimplify_stmt (&cleanup, &cleanup_stmts);
5136 wce = gimple_build_wce (cleanup_stmts);
5138 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5139 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5140 gimplify_seq_add_stmt (pre_p, ftrue);
5142 /* Because of this manipulation, and the EH edges that jump
5143 threading cannot redirect, the temporary (VAR) will appear
5144 to be used uninitialized. Don't warn. */
5145 TREE_NO_WARNING (var) = 1;
5147 else
5149 gimplify_stmt (&cleanup, &cleanup_stmts);
5150 wce = gimple_build_wce (cleanup_stmts);
5151 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5152 gimplify_seq_add_stmt (pre_p, wce);
5156 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5158 static enum gimplify_status
5159 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5161 tree targ = *expr_p;
5162 tree temp = TARGET_EXPR_SLOT (targ);
5163 tree init = TARGET_EXPR_INITIAL (targ);
5164 enum gimplify_status ret;
5166 if (init)
5168 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5169 to the temps list. Handle also variable length TARGET_EXPRs. */
5170 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5172 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5173 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5174 gimplify_vla_decl (temp, pre_p);
5176 else
5177 gimple_add_tmp_var (temp);
5179 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5180 expression is supposed to initialize the slot. */
5181 if (VOID_TYPE_P (TREE_TYPE (init)))
5182 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5183 else
5185 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5186 init = init_expr;
5187 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5188 init = NULL;
5189 ggc_free (init_expr);
5191 if (ret == GS_ERROR)
5193 /* PR c++/28266 Make sure this is expanded only once. */
5194 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5195 return GS_ERROR;
5197 if (init)
5198 gimplify_and_add (init, pre_p);
5200 /* If needed, push the cleanup for the temp. */
5201 if (TARGET_EXPR_CLEANUP (targ))
5202 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5203 CLEANUP_EH_ONLY (targ), pre_p);
5205 /* Only expand this once. */
5206 TREE_OPERAND (targ, 3) = init;
5207 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5209 else
5210 /* We should have expanded this before. */
5211 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5213 *expr_p = temp;
5214 return GS_OK;
5217 /* Gimplification of expression trees. */
5219 /* Gimplify an expression which appears at statement context. The
5220 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5221 NULL, a new sequence is allocated.
5223 Return true if we actually added a statement to the queue. */
5225 bool
5226 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5228 gimple_seq_node last;
5230 if (!*seq_p)
5231 *seq_p = gimple_seq_alloc ();
5233 last = gimple_seq_last (*seq_p);
5234 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5235 return last != gimple_seq_last (*seq_p);
5239 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5240 to CTX. If entries already exist, force them to be some flavor of private.
5241 If there is no enclosing parallel, do nothing. */
5243 void
5244 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5246 splay_tree_node n;
5248 if (decl == NULL || !DECL_P (decl))
5249 return;
5253 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5254 if (n != NULL)
5256 if (n->value & GOVD_SHARED)
5257 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5258 else
5259 return;
5261 else if (ctx->region_type != ORT_WORKSHARE)
5262 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5264 ctx = ctx->outer_context;
5266 while (ctx);
5269 /* Similarly for each of the type sizes of TYPE. */
5271 static void
5272 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5274 if (type == NULL || type == error_mark_node)
5275 return;
5276 type = TYPE_MAIN_VARIANT (type);
5278 if (pointer_set_insert (ctx->privatized_types, type))
5279 return;
5281 switch (TREE_CODE (type))
5283 case INTEGER_TYPE:
5284 case ENUMERAL_TYPE:
5285 case BOOLEAN_TYPE:
5286 case REAL_TYPE:
5287 case FIXED_POINT_TYPE:
5288 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5289 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5290 break;
5292 case ARRAY_TYPE:
5293 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5294 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5295 break;
5297 case RECORD_TYPE:
5298 case UNION_TYPE:
5299 case QUAL_UNION_TYPE:
5301 tree field;
5302 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5303 if (TREE_CODE (field) == FIELD_DECL)
5305 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5306 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5309 break;
5311 case POINTER_TYPE:
5312 case REFERENCE_TYPE:
5313 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5314 break;
5316 default:
5317 break;
5320 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5321 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5322 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5325 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5327 static void
5328 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5330 splay_tree_node n;
5331 unsigned int nflags;
5332 tree t;
5334 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5335 return;
5337 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5338 there are constructors involved somewhere. */
5339 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5340 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5341 flags |= GOVD_SEEN;
5343 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5344 if (n != NULL)
5346 /* We shouldn't be re-adding the decl with the same data
5347 sharing class. */
5348 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5349 /* The only combination of data sharing classes we should see is
5350 FIRSTPRIVATE and LASTPRIVATE. */
5351 nflags = n->value | flags;
5352 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5353 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5354 n->value = nflags;
5355 return;
5358 /* When adding a variable-sized variable, we have to handle all sorts
5359 of additional bits of data: the pointer replacement variable, and
5360 the parameters of the type. */
5361 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5363 /* Add the pointer replacement variable as PRIVATE if the variable
5364 replacement is private, else FIRSTPRIVATE since we'll need the
5365 address of the original variable either for SHARED, or for the
5366 copy into or out of the context. */
5367 if (!(flags & GOVD_LOCAL))
5369 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5370 nflags |= flags & GOVD_SEEN;
5371 t = DECL_VALUE_EXPR (decl);
5372 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5373 t = TREE_OPERAND (t, 0);
5374 gcc_assert (DECL_P (t));
5375 omp_add_variable (ctx, t, nflags);
5378 /* Add all of the variable and type parameters (which should have
5379 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5380 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5381 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5382 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5384 /* The variable-sized variable itself is never SHARED, only some form
5385 of PRIVATE. The sharing would take place via the pointer variable
5386 which we remapped above. */
5387 if (flags & GOVD_SHARED)
5388 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5389 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5391 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5392 alloca statement we generate for the variable, so make sure it
5393 is available. This isn't automatically needed for the SHARED
5394 case, since we won't be allocating local storage then.
5395 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5396 in this case omp_notice_variable will be called later
5397 on when it is gimplified. */
5398 else if (! (flags & GOVD_LOCAL))
5399 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5401 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5403 gcc_assert ((flags & GOVD_LOCAL) == 0);
5404 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5406 /* Similar to the direct variable sized case above, we'll need the
5407 size of references being privatized. */
5408 if ((flags & GOVD_SHARED) == 0)
5410 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5411 if (TREE_CODE (t) != INTEGER_CST)
5412 omp_notice_variable (ctx, t, true);
5416 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5419 /* Record the fact that DECL was used within the OpenMP context CTX.
5420 IN_CODE is true when real code uses DECL, and false when we should
5421 merely emit default(none) errors. Return true if DECL is going to
5422 be remapped and thus DECL shouldn't be gimplified into its
5423 DECL_VALUE_EXPR (if any). */
5425 static bool
5426 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5428 splay_tree_node n;
5429 unsigned flags = in_code ? GOVD_SEEN : 0;
5430 bool ret = false, shared;
5432 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5433 return false;
5435 /* Threadprivate variables are predetermined. */
5436 if (is_global_var (decl))
5438 if (DECL_THREAD_LOCAL_P (decl))
5439 return false;
5441 if (DECL_HAS_VALUE_EXPR_P (decl))
5443 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5445 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5446 return false;
5450 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5451 if (n == NULL)
5453 enum omp_clause_default_kind default_kind, kind;
5454 struct gimplify_omp_ctx *octx;
5456 if (ctx->region_type == ORT_WORKSHARE)
5457 goto do_outer;
5459 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5460 remapped firstprivate instead of shared. To some extent this is
5461 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5462 default_kind = ctx->default_kind;
5463 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5464 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5465 default_kind = kind;
5467 switch (default_kind)
5469 case OMP_CLAUSE_DEFAULT_NONE:
5470 error ("%qE not specified in enclosing parallel",
5471 DECL_NAME (decl));
5472 error_at (ctx->location, "enclosing parallel");
5473 /* FALLTHRU */
5474 case OMP_CLAUSE_DEFAULT_SHARED:
5475 flags |= GOVD_SHARED;
5476 break;
5477 case OMP_CLAUSE_DEFAULT_PRIVATE:
5478 flags |= GOVD_PRIVATE;
5479 break;
5480 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5481 flags |= GOVD_FIRSTPRIVATE;
5482 break;
5483 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5484 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5485 gcc_assert (ctx->region_type == ORT_TASK);
5486 if (ctx->outer_context)
5487 omp_notice_variable (ctx->outer_context, decl, in_code);
5488 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5490 splay_tree_node n2;
5492 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5493 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5495 flags |= GOVD_FIRSTPRIVATE;
5496 break;
5498 if ((octx->region_type & ORT_PARALLEL) != 0)
5499 break;
5501 if (flags & GOVD_FIRSTPRIVATE)
5502 break;
5503 if (octx == NULL
5504 && (TREE_CODE (decl) == PARM_DECL
5505 || (!is_global_var (decl)
5506 && DECL_CONTEXT (decl) == current_function_decl)))
5508 flags |= GOVD_FIRSTPRIVATE;
5509 break;
5511 flags |= GOVD_SHARED;
5512 break;
5513 default:
5514 gcc_unreachable ();
5517 if ((flags & GOVD_PRIVATE)
5518 && lang_hooks.decls.omp_private_outer_ref (decl))
5519 flags |= GOVD_PRIVATE_OUTER_REF;
5521 omp_add_variable (ctx, decl, flags);
5523 shared = (flags & GOVD_SHARED) != 0;
5524 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5525 goto do_outer;
5528 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5529 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5530 && DECL_SIZE (decl)
5531 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5533 splay_tree_node n2;
5534 tree t = DECL_VALUE_EXPR (decl);
5535 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5536 t = TREE_OPERAND (t, 0);
5537 gcc_assert (DECL_P (t));
5538 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5539 n2->value |= GOVD_SEEN;
5542 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5543 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5545 /* If nothing changed, there's nothing left to do. */
5546 if ((n->value & flags) == flags)
5547 return ret;
5548 flags |= n->value;
5549 n->value = flags;
5551 do_outer:
5552 /* If the variable is private in the current context, then we don't
5553 need to propagate anything to an outer context. */
5554 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5555 return ret;
5556 if (ctx->outer_context
5557 && omp_notice_variable (ctx->outer_context, decl, in_code))
5558 return true;
5559 return ret;
5562 /* Verify that DECL is private within CTX. If there's specific information
5563 to the contrary in the innermost scope, generate an error. */
5565 static bool
5566 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5568 splay_tree_node n;
5570 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5571 if (n != NULL)
5573 if (n->value & GOVD_SHARED)
5575 if (ctx == gimplify_omp_ctxp)
5577 error ("iteration variable %qE should be private",
5578 DECL_NAME (decl));
5579 n->value = GOVD_PRIVATE;
5580 return true;
5582 else
5583 return false;
5585 else if ((n->value & GOVD_EXPLICIT) != 0
5586 && (ctx == gimplify_omp_ctxp
5587 || (ctx->region_type == ORT_COMBINED_PARALLEL
5588 && gimplify_omp_ctxp->outer_context == ctx)))
5590 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5591 error ("iteration variable %qE should not be firstprivate",
5592 DECL_NAME (decl));
5593 else if ((n->value & GOVD_REDUCTION) != 0)
5594 error ("iteration variable %qE should not be reduction",
5595 DECL_NAME (decl));
5597 return (ctx == gimplify_omp_ctxp
5598 || (ctx->region_type == ORT_COMBINED_PARALLEL
5599 && gimplify_omp_ctxp->outer_context == ctx));
5602 if (ctx->region_type != ORT_WORKSHARE)
5603 return false;
5604 else if (ctx->outer_context)
5605 return omp_is_private (ctx->outer_context, decl);
5606 return false;
5609 /* Return true if DECL is private within a parallel region
5610 that binds to the current construct's context or in parallel
5611 region's REDUCTION clause. */
5613 static bool
5614 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5616 splay_tree_node n;
5620 ctx = ctx->outer_context;
5621 if (ctx == NULL)
5622 return !(is_global_var (decl)
5623 /* References might be private, but might be shared too. */
5624 || lang_hooks.decls.omp_privatize_by_reference (decl));
5626 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5627 if (n != NULL)
5628 return (n->value & GOVD_SHARED) == 0;
5630 while (ctx->region_type == ORT_WORKSHARE);
5631 return false;
5634 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5635 and previous omp contexts. */
5637 static void
5638 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5639 enum omp_region_type region_type)
5641 struct gimplify_omp_ctx *ctx, *outer_ctx;
5642 struct gimplify_ctx gctx;
5643 tree c;
5645 ctx = new_omp_context (region_type);
5646 outer_ctx = ctx->outer_context;
5648 while ((c = *list_p) != NULL)
5650 bool remove = false;
5651 bool notice_outer = true;
5652 const char *check_non_private = NULL;
5653 unsigned int flags;
5654 tree decl;
5656 switch (OMP_CLAUSE_CODE (c))
5658 case OMP_CLAUSE_PRIVATE:
5659 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5660 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5662 flags |= GOVD_PRIVATE_OUTER_REF;
5663 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5665 else
5666 notice_outer = false;
5667 goto do_add;
5668 case OMP_CLAUSE_SHARED:
5669 flags = GOVD_SHARED | GOVD_EXPLICIT;
5670 goto do_add;
5671 case OMP_CLAUSE_FIRSTPRIVATE:
5672 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5673 check_non_private = "firstprivate";
5674 goto do_add;
5675 case OMP_CLAUSE_LASTPRIVATE:
5676 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5677 check_non_private = "lastprivate";
5678 goto do_add;
5679 case OMP_CLAUSE_REDUCTION:
5680 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5681 check_non_private = "reduction";
5682 goto do_add;
5684 do_add:
5685 decl = OMP_CLAUSE_DECL (c);
5686 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5688 remove = true;
5689 break;
5691 omp_add_variable (ctx, decl, flags);
5692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5693 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5695 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5696 GOVD_LOCAL | GOVD_SEEN);
5697 gimplify_omp_ctxp = ctx;
5698 push_gimplify_context (&gctx);
5700 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5701 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5703 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5704 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5705 pop_gimplify_context
5706 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5707 push_gimplify_context (&gctx);
5708 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5709 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5710 pop_gimplify_context
5711 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5712 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5713 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5715 gimplify_omp_ctxp = outer_ctx;
5717 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5718 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5720 gimplify_omp_ctxp = ctx;
5721 push_gimplify_context (&gctx);
5722 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5724 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5725 NULL, NULL);
5726 TREE_SIDE_EFFECTS (bind) = 1;
5727 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5728 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5730 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5731 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5732 pop_gimplify_context
5733 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5734 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5736 gimplify_omp_ctxp = outer_ctx;
5738 if (notice_outer)
5739 goto do_notice;
5740 break;
5742 case OMP_CLAUSE_COPYIN:
5743 case OMP_CLAUSE_COPYPRIVATE:
5744 decl = OMP_CLAUSE_DECL (c);
5745 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5747 remove = true;
5748 break;
5750 do_notice:
5751 if (outer_ctx)
5752 omp_notice_variable (outer_ctx, decl, true);
5753 if (check_non_private
5754 && region_type == ORT_WORKSHARE
5755 && omp_check_private (ctx, decl))
5757 error ("%s variable %qE is private in outer context",
5758 check_non_private, DECL_NAME (decl));
5759 remove = true;
5761 break;
5763 case OMP_CLAUSE_IF:
5764 OMP_CLAUSE_OPERAND (c, 0)
5765 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5766 /* Fall through. */
5768 case OMP_CLAUSE_SCHEDULE:
5769 case OMP_CLAUSE_NUM_THREADS:
5770 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5771 is_gimple_val, fb_rvalue) == GS_ERROR)
5772 remove = true;
5773 break;
5775 case OMP_CLAUSE_NOWAIT:
5776 case OMP_CLAUSE_ORDERED:
5777 case OMP_CLAUSE_UNTIED:
5778 case OMP_CLAUSE_COLLAPSE:
5779 break;
5781 case OMP_CLAUSE_DEFAULT:
5782 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5783 break;
5785 default:
5786 gcc_unreachable ();
5789 if (remove)
5790 *list_p = OMP_CLAUSE_CHAIN (c);
5791 else
5792 list_p = &OMP_CLAUSE_CHAIN (c);
5795 gimplify_omp_ctxp = ctx;
5798 /* For all variables that were not actually used within the context,
5799 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5801 static int
5802 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5804 tree *list_p = (tree *) data;
5805 tree decl = (tree) n->key;
5806 unsigned flags = n->value;
5807 enum omp_clause_code code;
5808 tree clause;
5809 bool private_debug;
5811 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5812 return 0;
5813 if ((flags & GOVD_SEEN) == 0)
5814 return 0;
5815 if (flags & GOVD_DEBUG_PRIVATE)
5817 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5818 private_debug = true;
5820 else
5821 private_debug
5822 = lang_hooks.decls.omp_private_debug_clause (decl,
5823 !!(flags & GOVD_SHARED));
5824 if (private_debug)
5825 code = OMP_CLAUSE_PRIVATE;
5826 else if (flags & GOVD_SHARED)
5828 if (is_global_var (decl))
5830 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5831 while (ctx != NULL)
5833 splay_tree_node on
5834 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5835 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5836 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5837 break;
5838 ctx = ctx->outer_context;
5840 if (ctx == NULL)
5841 return 0;
5843 code = OMP_CLAUSE_SHARED;
5845 else if (flags & GOVD_PRIVATE)
5846 code = OMP_CLAUSE_PRIVATE;
5847 else if (flags & GOVD_FIRSTPRIVATE)
5848 code = OMP_CLAUSE_FIRSTPRIVATE;
5849 else
5850 gcc_unreachable ();
5852 clause = build_omp_clause (input_location, code);
5853 OMP_CLAUSE_DECL (clause) = decl;
5854 OMP_CLAUSE_CHAIN (clause) = *list_p;
5855 if (private_debug)
5856 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5857 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5858 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5859 *list_p = clause;
5860 lang_hooks.decls.omp_finish_clause (clause);
5862 return 0;
5865 static void
5866 gimplify_adjust_omp_clauses (tree *list_p)
5868 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5869 tree c, decl;
5871 while ((c = *list_p) != NULL)
5873 splay_tree_node n;
5874 bool remove = false;
5876 switch (OMP_CLAUSE_CODE (c))
5878 case OMP_CLAUSE_PRIVATE:
5879 case OMP_CLAUSE_SHARED:
5880 case OMP_CLAUSE_FIRSTPRIVATE:
5881 decl = OMP_CLAUSE_DECL (c);
5882 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5883 remove = !(n->value & GOVD_SEEN);
5884 if (! remove)
5886 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5887 if ((n->value & GOVD_DEBUG_PRIVATE)
5888 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5890 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5891 || ((n->value & GOVD_DATA_SHARE_CLASS)
5892 == GOVD_PRIVATE));
5893 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5894 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5897 break;
5899 case OMP_CLAUSE_LASTPRIVATE:
5900 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5901 accurately reflect the presence of a FIRSTPRIVATE clause. */
5902 decl = OMP_CLAUSE_DECL (c);
5903 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5904 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5905 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5906 break;
5908 case OMP_CLAUSE_REDUCTION:
5909 case OMP_CLAUSE_COPYIN:
5910 case OMP_CLAUSE_COPYPRIVATE:
5911 case OMP_CLAUSE_IF:
5912 case OMP_CLAUSE_NUM_THREADS:
5913 case OMP_CLAUSE_SCHEDULE:
5914 case OMP_CLAUSE_NOWAIT:
5915 case OMP_CLAUSE_ORDERED:
5916 case OMP_CLAUSE_DEFAULT:
5917 case OMP_CLAUSE_UNTIED:
5918 case OMP_CLAUSE_COLLAPSE:
5919 break;
5921 default:
5922 gcc_unreachable ();
5925 if (remove)
5926 *list_p = OMP_CLAUSE_CHAIN (c);
5927 else
5928 list_p = &OMP_CLAUSE_CHAIN (c);
5931 /* Add in any implicit data sharing. */
5932 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5934 gimplify_omp_ctxp = ctx->outer_context;
5935 delete_omp_context (ctx);
5938 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5939 gimplification of the body, as well as scanning the body for used
5940 variables. We need to do this scan now, because variable-sized
5941 decls will be decomposed during gimplification. */
5943 static void
5944 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5946 tree expr = *expr_p;
5947 gimple g;
5948 gimple_seq body = NULL;
5949 struct gimplify_ctx gctx;
5951 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5952 OMP_PARALLEL_COMBINED (expr)
5953 ? ORT_COMBINED_PARALLEL
5954 : ORT_PARALLEL);
5956 push_gimplify_context (&gctx);
5958 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5959 if (gimple_code (g) == GIMPLE_BIND)
5960 pop_gimplify_context (g);
5961 else
5962 pop_gimplify_context (NULL);
5964 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5966 g = gimple_build_omp_parallel (body,
5967 OMP_PARALLEL_CLAUSES (expr),
5968 NULL_TREE, NULL_TREE);
5969 if (OMP_PARALLEL_COMBINED (expr))
5970 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
5971 gimplify_seq_add_stmt (pre_p, g);
5972 *expr_p = NULL_TREE;
5975 /* Gimplify the contents of an OMP_TASK statement. This involves
5976 gimplification of the body, as well as scanning the body for used
5977 variables. We need to do this scan now, because variable-sized
5978 decls will be decomposed during gimplification. */
5980 static void
5981 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
5983 tree expr = *expr_p;
5984 gimple g;
5985 gimple_seq body = NULL;
5986 struct gimplify_ctx gctx;
5988 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
5990 push_gimplify_context (&gctx);
5992 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5993 if (gimple_code (g) == GIMPLE_BIND)
5994 pop_gimplify_context (g);
5995 else
5996 pop_gimplify_context (NULL);
5998 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6000 g = gimple_build_omp_task (body,
6001 OMP_TASK_CLAUSES (expr),
6002 NULL_TREE, NULL_TREE,
6003 NULL_TREE, NULL_TREE, NULL_TREE);
6004 gimplify_seq_add_stmt (pre_p, g);
6005 *expr_p = NULL_TREE;
6008 /* Gimplify the gross structure of an OMP_FOR statement. */
6010 static enum gimplify_status
6011 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6013 tree for_stmt, decl, var, t;
6014 enum gimplify_status ret = GS_ALL_DONE;
6015 enum gimplify_status tret;
6016 gimple gfor;
6017 gimple_seq for_body, for_pre_body;
6018 int i;
6020 for_stmt = *expr_p;
6022 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6023 ORT_WORKSHARE);
6025 /* Handle OMP_FOR_INIT. */
6026 for_pre_body = NULL;
6027 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6028 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6030 for_body = gimple_seq_alloc ();
6031 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6032 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6033 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6034 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6035 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6037 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6038 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6039 decl = TREE_OPERAND (t, 0);
6040 gcc_assert (DECL_P (decl));
6041 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6042 || POINTER_TYPE_P (TREE_TYPE (decl)));
6044 /* Make sure the iteration variable is private. */
6045 if (omp_is_private (gimplify_omp_ctxp, decl))
6046 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6047 else
6048 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6050 /* If DECL is not a gimple register, create a temporary variable to act
6051 as an iteration counter. This is valid, since DECL cannot be
6052 modified in the body of the loop. */
6053 if (!is_gimple_reg (decl))
6055 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6056 TREE_OPERAND (t, 0) = var;
6058 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6060 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6062 else
6063 var = decl;
6065 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6066 is_gimple_val, fb_rvalue);
6067 ret = MIN (ret, tret);
6068 if (ret == GS_ERROR)
6069 return ret;
6071 /* Handle OMP_FOR_COND. */
6072 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6073 gcc_assert (COMPARISON_CLASS_P (t));
6074 gcc_assert (TREE_OPERAND (t, 0) == decl);
6076 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6077 is_gimple_val, fb_rvalue);
6078 ret = MIN (ret, tret);
6080 /* Handle OMP_FOR_INCR. */
6081 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6082 switch (TREE_CODE (t))
6084 case PREINCREMENT_EXPR:
6085 case POSTINCREMENT_EXPR:
6086 t = build_int_cst (TREE_TYPE (decl), 1);
6087 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6088 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6089 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6090 break;
6092 case PREDECREMENT_EXPR:
6093 case POSTDECREMENT_EXPR:
6094 t = build_int_cst (TREE_TYPE (decl), -1);
6095 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6096 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6097 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6098 break;
6100 case MODIFY_EXPR:
6101 gcc_assert (TREE_OPERAND (t, 0) == decl);
6102 TREE_OPERAND (t, 0) = var;
6104 t = TREE_OPERAND (t, 1);
6105 switch (TREE_CODE (t))
6107 case PLUS_EXPR:
6108 if (TREE_OPERAND (t, 1) == decl)
6110 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6111 TREE_OPERAND (t, 0) = var;
6112 break;
6115 /* Fallthru. */
6116 case MINUS_EXPR:
6117 case POINTER_PLUS_EXPR:
6118 gcc_assert (TREE_OPERAND (t, 0) == decl);
6119 TREE_OPERAND (t, 0) = var;
6120 break;
6121 default:
6122 gcc_unreachable ();
6125 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6126 is_gimple_val, fb_rvalue);
6127 ret = MIN (ret, tret);
6128 break;
6130 default:
6131 gcc_unreachable ();
6134 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6136 tree c;
6137 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6138 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6139 && OMP_CLAUSE_DECL (c) == decl
6140 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6142 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6143 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6144 gcc_assert (TREE_OPERAND (t, 0) == var);
6145 t = TREE_OPERAND (t, 1);
6146 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6147 || TREE_CODE (t) == MINUS_EXPR
6148 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6149 gcc_assert (TREE_OPERAND (t, 0) == var);
6150 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6151 TREE_OPERAND (t, 1));
6152 gimplify_assign (decl, t,
6153 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6158 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6160 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6162 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6163 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6164 for_pre_body);
6166 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6168 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6169 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6170 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6171 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6172 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6173 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6174 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6175 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6178 gimplify_seq_add_stmt (pre_p, gfor);
6179 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6182 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6183 In particular, OMP_SECTIONS and OMP_SINGLE. */
6185 static void
6186 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6188 tree expr = *expr_p;
6189 gimple stmt;
6190 gimple_seq body = NULL;
6192 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6193 gimplify_and_add (OMP_BODY (expr), &body);
6194 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6196 if (TREE_CODE (expr) == OMP_SECTIONS)
6197 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6198 else if (TREE_CODE (expr) == OMP_SINGLE)
6199 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6200 else
6201 gcc_unreachable ();
6203 gimplify_seq_add_stmt (pre_p, stmt);
6206 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6207 stabilized the lhs of the atomic operation as *ADDR. Return true if
6208 EXPR is this stabilized form. */
6210 static bool
6211 goa_lhs_expr_p (tree expr, tree addr)
6213 /* Also include casts to other type variants. The C front end is fond
6214 of adding these for e.g. volatile variables. This is like
6215 STRIP_TYPE_NOPS but includes the main variant lookup. */
6216 STRIP_USELESS_TYPE_CONVERSION (expr);
6218 if (TREE_CODE (expr) == INDIRECT_REF)
6220 expr = TREE_OPERAND (expr, 0);
6221 while (expr != addr
6222 && (CONVERT_EXPR_P (expr)
6223 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6224 && TREE_CODE (expr) == TREE_CODE (addr)
6225 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6227 expr = TREE_OPERAND (expr, 0);
6228 addr = TREE_OPERAND (addr, 0);
6230 if (expr == addr)
6231 return true;
6232 return (TREE_CODE (addr) == ADDR_EXPR
6233 && TREE_CODE (expr) == ADDR_EXPR
6234 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6236 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6237 return true;
6238 return false;
6241 /* Walk *EXPR_P and replace
6242 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6243 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6244 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6246 static int
6247 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6248 tree lhs_var)
6250 tree expr = *expr_p;
6251 int saw_lhs;
6253 if (goa_lhs_expr_p (expr, lhs_addr))
6255 *expr_p = lhs_var;
6256 return 1;
6258 if (is_gimple_val (expr))
6259 return 0;
6261 saw_lhs = 0;
6262 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6264 case tcc_binary:
6265 case tcc_comparison:
6266 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6267 lhs_var);
6268 case tcc_unary:
6269 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6270 lhs_var);
6271 break;
6272 case tcc_expression:
6273 switch (TREE_CODE (expr))
6275 case TRUTH_ANDIF_EXPR:
6276 case TRUTH_ORIF_EXPR:
6277 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6278 lhs_addr, lhs_var);
6279 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6280 lhs_addr, lhs_var);
6281 break;
6282 default:
6283 break;
6285 break;
6286 default:
6287 break;
6290 if (saw_lhs == 0)
6292 enum gimplify_status gs;
6293 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6294 if (gs != GS_ALL_DONE)
6295 saw_lhs = -1;
6298 return saw_lhs;
6302 /* Gimplify an OMP_ATOMIC statement. */
6304 static enum gimplify_status
6305 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6307 tree addr = TREE_OPERAND (*expr_p, 0);
6308 tree rhs = TREE_OPERAND (*expr_p, 1);
6309 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6310 tree tmp_load;
6312 tmp_load = create_tmp_var (type, NULL);
6313 if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE)
6314 DECL_GIMPLE_REG_P (tmp_load) = 1;
6315 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6316 return GS_ERROR;
6318 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6319 != GS_ALL_DONE)
6320 return GS_ERROR;
6322 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6323 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6324 != GS_ALL_DONE)
6325 return GS_ERROR;
6326 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6327 *expr_p = NULL;
6329 return GS_ALL_DONE;
6333 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6334 expression produces a value to be used as an operand inside a GIMPLE
6335 statement, the value will be stored back in *EXPR_P. This value will
6336 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6337 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6338 emitted in PRE_P and POST_P.
6340 Additionally, this process may overwrite parts of the input
6341 expression during gimplification. Ideally, it should be
6342 possible to do non-destructive gimplification.
6344 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6345 the expression needs to evaluate to a value to be used as
6346 an operand in a GIMPLE statement, this value will be stored in
6347 *EXPR_P on exit. This happens when the caller specifies one
6348 of fb_lvalue or fb_rvalue fallback flags.
6350 PRE_P will contain the sequence of GIMPLE statements corresponding
6351 to the evaluation of EXPR and all the side-effects that must
6352 be executed before the main expression. On exit, the last
6353 statement of PRE_P is the core statement being gimplified. For
6354 instance, when gimplifying 'if (++a)' the last statement in
6355 PRE_P will be 'if (t.1)' where t.1 is the result of
6356 pre-incrementing 'a'.
6358 POST_P will contain the sequence of GIMPLE statements corresponding
6359 to the evaluation of all the side-effects that must be executed
6360 after the main expression. If this is NULL, the post
6361 side-effects are stored at the end of PRE_P.
6363 The reason why the output is split in two is to handle post
6364 side-effects explicitly. In some cases, an expression may have
6365 inner and outer post side-effects which need to be emitted in
6366 an order different from the one given by the recursive
6367 traversal. For instance, for the expression (*p--)++ the post
6368 side-effects of '--' must actually occur *after* the post
6369 side-effects of '++'. However, gimplification will first visit
6370 the inner expression, so if a separate POST sequence was not
6371 used, the resulting sequence would be:
6373 1 t.1 = *p
6374 2 p = p - 1
6375 3 t.2 = t.1 + 1
6376 4 *p = t.2
6378 However, the post-decrement operation in line #2 must not be
6379 evaluated until after the store to *p at line #4, so the
6380 correct sequence should be:
6382 1 t.1 = *p
6383 2 t.2 = t.1 + 1
6384 3 *p = t.2
6385 4 p = p - 1
6387 So, by specifying a separate post queue, it is possible
6388 to emit the post side-effects in the correct order.
6389 If POST_P is NULL, an internal queue will be used. Before
6390 returning to the caller, the sequence POST_P is appended to
6391 the main output sequence PRE_P.
6393 GIMPLE_TEST_F points to a function that takes a tree T and
6394 returns nonzero if T is in the GIMPLE form requested by the
6395 caller. The GIMPLE predicates are in tree-gimple.c.
6397 FALLBACK tells the function what sort of a temporary we want if
6398 gimplification cannot produce an expression that complies with
6399 GIMPLE_TEST_F.
6401 fb_none means that no temporary should be generated
6402 fb_rvalue means that an rvalue is OK to generate
6403 fb_lvalue means that an lvalue is OK to generate
6404 fb_either means that either is OK, but an lvalue is preferable.
6405 fb_mayfail means that gimplification may fail (in which case
6406 GS_ERROR will be returned)
6408 The return value is either GS_ERROR or GS_ALL_DONE, since this
6409 function iterates until EXPR is completely gimplified or an error
6410 occurs. */
6412 enum gimplify_status
6413 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6414 bool (*gimple_test_f) (tree), fallback_t fallback)
6416 tree tmp;
6417 gimple_seq internal_pre = NULL;
6418 gimple_seq internal_post = NULL;
6419 tree save_expr;
6420 bool is_statement;
6421 location_t saved_location;
6422 enum gimplify_status ret;
6423 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6425 save_expr = *expr_p;
6426 if (save_expr == NULL_TREE)
6427 return GS_ALL_DONE;
6429 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6430 is_statement = gimple_test_f == is_gimple_stmt;
6431 if (is_statement)
6432 gcc_assert (pre_p);
6434 /* Consistency checks. */
6435 if (gimple_test_f == is_gimple_reg)
6436 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6437 else if (gimple_test_f == is_gimple_val
6438 || gimple_test_f == is_gimple_call_addr
6439 || gimple_test_f == is_gimple_condexpr
6440 || gimple_test_f == is_gimple_mem_rhs
6441 || gimple_test_f == is_gimple_mem_rhs_or_call
6442 || gimple_test_f == is_gimple_reg_rhs
6443 || gimple_test_f == is_gimple_reg_rhs_or_call
6444 || gimple_test_f == is_gimple_asm_val)
6445 gcc_assert (fallback & fb_rvalue);
6446 else if (gimple_test_f == is_gimple_min_lval
6447 || gimple_test_f == is_gimple_lvalue)
6448 gcc_assert (fallback & fb_lvalue);
6449 else if (gimple_test_f == is_gimple_addressable)
6450 gcc_assert (fallback & fb_either);
6451 else if (gimple_test_f == is_gimple_stmt)
6452 gcc_assert (fallback == fb_none);
6453 else
6455 /* We should have recognized the GIMPLE_TEST_F predicate to
6456 know what kind of fallback to use in case a temporary is
6457 needed to hold the value or address of *EXPR_P. */
6458 gcc_unreachable ();
6461 /* We used to check the predicate here and return immediately if it
6462 succeeds. This is wrong; the design is for gimplification to be
6463 idempotent, and for the predicates to only test for valid forms, not
6464 whether they are fully simplified. */
6465 if (pre_p == NULL)
6466 pre_p = &internal_pre;
6468 if (post_p == NULL)
6469 post_p = &internal_post;
6471 /* Remember the last statements added to PRE_P and POST_P. Every
6472 new statement added by the gimplification helpers needs to be
6473 annotated with location information. To centralize the
6474 responsibility, we remember the last statement that had been
6475 added to both queues before gimplifying *EXPR_P. If
6476 gimplification produces new statements in PRE_P and POST_P, those
6477 statements will be annotated with the same location information
6478 as *EXPR_P. */
6479 pre_last_gsi = gsi_last (*pre_p);
6480 post_last_gsi = gsi_last (*post_p);
6482 saved_location = input_location;
6483 if (save_expr != error_mark_node
6484 && EXPR_HAS_LOCATION (*expr_p))
6485 input_location = EXPR_LOCATION (*expr_p);
6487 /* Loop over the specific gimplifiers until the toplevel node
6488 remains the same. */
6491 /* Strip away as many useless type conversions as possible
6492 at the toplevel. */
6493 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6495 /* Remember the expr. */
6496 save_expr = *expr_p;
6498 /* Die, die, die, my darling. */
6499 if (save_expr == error_mark_node
6500 || (TREE_TYPE (save_expr)
6501 && TREE_TYPE (save_expr) == error_mark_node))
6503 ret = GS_ERROR;
6504 break;
6507 /* Do any language-specific gimplification. */
6508 ret = ((enum gimplify_status)
6509 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6510 if (ret == GS_OK)
6512 if (*expr_p == NULL_TREE)
6513 break;
6514 if (*expr_p != save_expr)
6515 continue;
6517 else if (ret != GS_UNHANDLED)
6518 break;
6520 ret = GS_OK;
6521 switch (TREE_CODE (*expr_p))
6523 /* First deal with the special cases. */
6525 case POSTINCREMENT_EXPR:
6526 case POSTDECREMENT_EXPR:
6527 case PREINCREMENT_EXPR:
6528 case PREDECREMENT_EXPR:
6529 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6530 fallback != fb_none);
6531 break;
6533 case ARRAY_REF:
6534 case ARRAY_RANGE_REF:
6535 case REALPART_EXPR:
6536 case IMAGPART_EXPR:
6537 case COMPONENT_REF:
6538 case VIEW_CONVERT_EXPR:
6539 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6540 fallback ? fallback : fb_rvalue);
6541 break;
6543 case COND_EXPR:
6544 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6546 /* C99 code may assign to an array in a structure value of a
6547 conditional expression, and this has undefined behavior
6548 only on execution, so create a temporary if an lvalue is
6549 required. */
6550 if (fallback == fb_lvalue)
6552 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6553 mark_addressable (*expr_p);
6555 break;
6557 case CALL_EXPR:
6558 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6560 /* C99 code may assign to an array in a structure returned
6561 from a function, and this has undefined behavior only on
6562 execution, so create a temporary if an lvalue is
6563 required. */
6564 if (fallback == fb_lvalue)
6566 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6567 mark_addressable (*expr_p);
6569 break;
6571 case TREE_LIST:
6572 gcc_unreachable ();
6574 case COMPOUND_EXPR:
6575 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6576 break;
6578 case COMPOUND_LITERAL_EXPR:
6579 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6580 break;
6582 case MODIFY_EXPR:
6583 case INIT_EXPR:
6584 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6585 fallback != fb_none);
6586 break;
6588 case TRUTH_ANDIF_EXPR:
6589 case TRUTH_ORIF_EXPR:
6590 /* Pass the source location of the outer expression. */
6591 ret = gimplify_boolean_expr (expr_p, saved_location);
6592 break;
6594 case TRUTH_NOT_EXPR:
6595 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6597 tree type = TREE_TYPE (*expr_p);
6598 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6599 ret = GS_OK;
6600 break;
6603 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6604 is_gimple_val, fb_rvalue);
6605 recalculate_side_effects (*expr_p);
6606 break;
6608 case ADDR_EXPR:
6609 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6610 break;
6612 case VA_ARG_EXPR:
6613 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6614 break;
6616 CASE_CONVERT:
6617 if (IS_EMPTY_STMT (*expr_p))
6619 ret = GS_ALL_DONE;
6620 break;
6623 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6624 || fallback == fb_none)
6626 /* Just strip a conversion to void (or in void context) and
6627 try again. */
6628 *expr_p = TREE_OPERAND (*expr_p, 0);
6629 break;
6632 ret = gimplify_conversion (expr_p);
6633 if (ret == GS_ERROR)
6634 break;
6635 if (*expr_p != save_expr)
6636 break;
6637 /* FALLTHRU */
6639 case FIX_TRUNC_EXPR:
6640 /* unary_expr: ... | '(' cast ')' val | ... */
6641 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6642 is_gimple_val, fb_rvalue);
6643 recalculate_side_effects (*expr_p);
6644 break;
6646 case INDIRECT_REF:
6647 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6648 if (*expr_p != save_expr)
6649 break;
6650 /* else fall through. */
6651 case ALIGN_INDIRECT_REF:
6652 case MISALIGNED_INDIRECT_REF:
6653 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6654 is_gimple_reg, fb_rvalue);
6655 recalculate_side_effects (*expr_p);
6656 break;
6658 /* Constants need not be gimplified. */
6659 case INTEGER_CST:
6660 case REAL_CST:
6661 case FIXED_CST:
6662 case STRING_CST:
6663 case COMPLEX_CST:
6664 case VECTOR_CST:
6665 ret = GS_ALL_DONE;
6666 break;
6668 case CONST_DECL:
6669 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6670 CONST_DECL node. Otherwise the decl is replaceable by its
6671 value. */
6672 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6673 if (fallback & fb_lvalue)
6674 ret = GS_ALL_DONE;
6675 else
6676 *expr_p = DECL_INITIAL (*expr_p);
6677 break;
6679 case DECL_EXPR:
6680 ret = gimplify_decl_expr (expr_p, pre_p);
6681 break;
6683 case BIND_EXPR:
6684 ret = gimplify_bind_expr (expr_p, pre_p);
6685 break;
6687 case LOOP_EXPR:
6688 ret = gimplify_loop_expr (expr_p, pre_p);
6689 break;
6691 case SWITCH_EXPR:
6692 ret = gimplify_switch_expr (expr_p, pre_p);
6693 break;
6695 case EXIT_EXPR:
6696 ret = gimplify_exit_expr (expr_p);
6697 break;
6699 case GOTO_EXPR:
6700 /* If the target is not LABEL, then it is a computed jump
6701 and the target needs to be gimplified. */
6702 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6704 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6705 NULL, is_gimple_val, fb_rvalue);
6706 if (ret == GS_ERROR)
6707 break;
6709 gimplify_seq_add_stmt (pre_p,
6710 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6711 break;
6713 case PREDICT_EXPR:
6714 gimplify_seq_add_stmt (pre_p,
6715 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6716 PREDICT_EXPR_OUTCOME (*expr_p)));
6717 ret = GS_ALL_DONE;
6718 break;
6720 case LABEL_EXPR:
6721 ret = GS_ALL_DONE;
6722 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6723 == current_function_decl);
6724 gimplify_seq_add_stmt (pre_p,
6725 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6726 break;
6728 case CASE_LABEL_EXPR:
6729 ret = gimplify_case_label_expr (expr_p, pre_p);
6730 break;
6732 case RETURN_EXPR:
6733 ret = gimplify_return_expr (*expr_p, pre_p);
6734 break;
6736 case CONSTRUCTOR:
6737 /* Don't reduce this in place; let gimplify_init_constructor work its
6738 magic. Buf if we're just elaborating this for side effects, just
6739 gimplify any element that has side-effects. */
6740 if (fallback == fb_none)
6742 unsigned HOST_WIDE_INT ix;
6743 constructor_elt *ce;
6744 tree temp = NULL_TREE;
6745 for (ix = 0;
6746 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6747 ix, ce);
6748 ix++)
6749 if (TREE_SIDE_EFFECTS (ce->value))
6750 append_to_statement_list (ce->value, &temp);
6752 *expr_p = temp;
6753 ret = GS_OK;
6755 /* C99 code may assign to an array in a constructed
6756 structure or union, and this has undefined behavior only
6757 on execution, so create a temporary if an lvalue is
6758 required. */
6759 else if (fallback == fb_lvalue)
6761 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6762 mark_addressable (*expr_p);
6764 else
6765 ret = GS_ALL_DONE;
6766 break;
6768 /* The following are special cases that are not handled by the
6769 original GIMPLE grammar. */
6771 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6772 eliminated. */
6773 case SAVE_EXPR:
6774 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6775 break;
6777 case BIT_FIELD_REF:
6779 enum gimplify_status r0, r1, r2;
6781 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6782 post_p, is_gimple_lvalue, fb_either);
6783 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6784 post_p, is_gimple_val, fb_rvalue);
6785 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6786 post_p, is_gimple_val, fb_rvalue);
6787 recalculate_side_effects (*expr_p);
6789 ret = MIN (r0, MIN (r1, r2));
6791 break;
6793 case TARGET_MEM_REF:
6795 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6797 if (TMR_SYMBOL (*expr_p))
6798 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6799 post_p, is_gimple_lvalue, fb_either);
6800 else if (TMR_BASE (*expr_p))
6801 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6802 post_p, is_gimple_val, fb_either);
6803 if (TMR_INDEX (*expr_p))
6804 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6805 post_p, is_gimple_val, fb_rvalue);
6806 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6807 ret = MIN (r0, r1);
6809 break;
6811 case NON_LVALUE_EXPR:
6812 /* This should have been stripped above. */
6813 gcc_unreachable ();
6815 case ASM_EXPR:
6816 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6817 break;
6819 case TRY_FINALLY_EXPR:
6820 case TRY_CATCH_EXPR:
6822 gimple_seq eval, cleanup;
6823 gimple try_;
6825 eval = cleanup = NULL;
6826 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6827 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6828 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6829 if (gimple_seq_empty_p (cleanup))
6831 gimple_seq_add_seq (pre_p, eval);
6832 ret = GS_ALL_DONE;
6833 break;
6835 try_ = gimple_build_try (eval, cleanup,
6836 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6837 ? GIMPLE_TRY_FINALLY
6838 : GIMPLE_TRY_CATCH);
6839 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6840 gimple_try_set_catch_is_cleanup (try_,
6841 TRY_CATCH_IS_CLEANUP (*expr_p));
6842 gimplify_seq_add_stmt (pre_p, try_);
6843 ret = GS_ALL_DONE;
6844 break;
6847 case CLEANUP_POINT_EXPR:
6848 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6849 break;
6851 case TARGET_EXPR:
6852 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6853 break;
6855 case CATCH_EXPR:
6857 gimple c;
6858 gimple_seq handler = NULL;
6859 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6860 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6861 gimplify_seq_add_stmt (pre_p, c);
6862 ret = GS_ALL_DONE;
6863 break;
6866 case EH_FILTER_EXPR:
6868 gimple ehf;
6869 gimple_seq failure = NULL;
6871 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6872 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6873 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6874 gimplify_seq_add_stmt (pre_p, ehf);
6875 ret = GS_ALL_DONE;
6876 break;
6879 case OBJ_TYPE_REF:
6881 enum gimplify_status r0, r1;
6882 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6883 post_p, is_gimple_val, fb_rvalue);
6884 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6885 post_p, is_gimple_val, fb_rvalue);
6886 TREE_SIDE_EFFECTS (*expr_p) = 0;
6887 ret = MIN (r0, r1);
6889 break;
6891 case LABEL_DECL:
6892 /* We get here when taking the address of a label. We mark
6893 the label as "forced"; meaning it can never be removed and
6894 it is a potential target for any computed goto. */
6895 FORCED_LABEL (*expr_p) = 1;
6896 ret = GS_ALL_DONE;
6897 break;
6899 case STATEMENT_LIST:
6900 ret = gimplify_statement_list (expr_p, pre_p);
6901 break;
6903 case WITH_SIZE_EXPR:
6905 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6906 post_p == &internal_post ? NULL : post_p,
6907 gimple_test_f, fallback);
6908 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6909 is_gimple_val, fb_rvalue);
6911 break;
6913 case VAR_DECL:
6914 case PARM_DECL:
6915 ret = gimplify_var_or_parm_decl (expr_p);
6916 break;
6918 case RESULT_DECL:
6919 /* When within an OpenMP context, notice uses of variables. */
6920 if (gimplify_omp_ctxp)
6921 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6922 ret = GS_ALL_DONE;
6923 break;
6925 case SSA_NAME:
6926 /* Allow callbacks into the gimplifier during optimization. */
6927 ret = GS_ALL_DONE;
6928 break;
6930 case OMP_PARALLEL:
6931 gimplify_omp_parallel (expr_p, pre_p);
6932 ret = GS_ALL_DONE;
6933 break;
6935 case OMP_TASK:
6936 gimplify_omp_task (expr_p, pre_p);
6937 ret = GS_ALL_DONE;
6938 break;
6940 case OMP_FOR:
6941 ret = gimplify_omp_for (expr_p, pre_p);
6942 break;
6944 case OMP_SECTIONS:
6945 case OMP_SINGLE:
6946 gimplify_omp_workshare (expr_p, pre_p);
6947 ret = GS_ALL_DONE;
6948 break;
6950 case OMP_SECTION:
6951 case OMP_MASTER:
6952 case OMP_ORDERED:
6953 case OMP_CRITICAL:
6955 gimple_seq body = NULL;
6956 gimple g;
6958 gimplify_and_add (OMP_BODY (*expr_p), &body);
6959 switch (TREE_CODE (*expr_p))
6961 case OMP_SECTION:
6962 g = gimple_build_omp_section (body);
6963 break;
6964 case OMP_MASTER:
6965 g = gimple_build_omp_master (body);
6966 break;
6967 case OMP_ORDERED:
6968 g = gimple_build_omp_ordered (body);
6969 break;
6970 case OMP_CRITICAL:
6971 g = gimple_build_omp_critical (body,
6972 OMP_CRITICAL_NAME (*expr_p));
6973 break;
6974 default:
6975 gcc_unreachable ();
6977 gimplify_seq_add_stmt (pre_p, g);
6978 ret = GS_ALL_DONE;
6979 break;
6982 case OMP_ATOMIC:
6983 ret = gimplify_omp_atomic (expr_p, pre_p);
6984 break;
6986 case POINTER_PLUS_EXPR:
6987 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6988 The second is gimple immediate saving a need for extra statement.
6990 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6991 && (tmp = maybe_fold_offset_to_address
6992 (EXPR_LOCATION (*expr_p),
6993 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6994 TREE_TYPE (*expr_p))))
6996 *expr_p = tmp;
6997 break;
6999 /* Convert (void *)&a + 4 into (void *)&a[1]. */
7000 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
7001 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7002 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
7003 0),0)))
7004 && (tmp = maybe_fold_offset_to_address
7005 (EXPR_LOCATION (*expr_p),
7006 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
7007 TREE_OPERAND (*expr_p, 1),
7008 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
7009 0)))))
7011 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
7012 break;
7014 /* FALLTHRU */
7016 default:
7017 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7019 case tcc_comparison:
7020 /* Handle comparison of objects of non scalar mode aggregates
7021 with a call to memcmp. It would be nice to only have to do
7022 this for variable-sized objects, but then we'd have to allow
7023 the same nest of reference nodes we allow for MODIFY_EXPR and
7024 that's too complex.
7026 Compare scalar mode aggregates as scalar mode values. Using
7027 memcmp for them would be very inefficient at best, and is
7028 plain wrong if bitfields are involved. */
7030 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7032 if (!AGGREGATE_TYPE_P (type))
7033 goto expr_2;
7034 else if (TYPE_MODE (type) != BLKmode)
7035 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7036 else
7037 ret = gimplify_variable_sized_compare (expr_p);
7039 break;
7042 /* If *EXPR_P does not need to be special-cased, handle it
7043 according to its class. */
7044 case tcc_unary:
7045 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7046 post_p, is_gimple_val, fb_rvalue);
7047 break;
7049 case tcc_binary:
7050 expr_2:
7052 enum gimplify_status r0, r1;
7054 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7055 post_p, is_gimple_val, fb_rvalue);
7056 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7057 post_p, is_gimple_val, fb_rvalue);
7059 ret = MIN (r0, r1);
7060 break;
7063 case tcc_declaration:
7064 case tcc_constant:
7065 ret = GS_ALL_DONE;
7066 goto dont_recalculate;
7068 default:
7069 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7070 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7071 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7072 goto expr_2;
7075 recalculate_side_effects (*expr_p);
7077 dont_recalculate:
7078 break;
7081 /* If we replaced *expr_p, gimplify again. */
7082 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7083 ret = GS_ALL_DONE;
7085 while (ret == GS_OK);
7087 /* If we encountered an error_mark somewhere nested inside, either
7088 stub out the statement or propagate the error back out. */
7089 if (ret == GS_ERROR)
7091 if (is_statement)
7092 *expr_p = NULL;
7093 goto out;
7096 /* This was only valid as a return value from the langhook, which
7097 we handled. Make sure it doesn't escape from any other context. */
7098 gcc_assert (ret != GS_UNHANDLED);
7100 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7102 /* We aren't looking for a value, and we don't have a valid
7103 statement. If it doesn't have side-effects, throw it away. */
7104 if (!TREE_SIDE_EFFECTS (*expr_p))
7105 *expr_p = NULL;
7106 else if (!TREE_THIS_VOLATILE (*expr_p))
7108 /* This is probably a _REF that contains something nested that
7109 has side effects. Recurse through the operands to find it. */
7110 enum tree_code code = TREE_CODE (*expr_p);
7112 switch (code)
7114 case COMPONENT_REF:
7115 case REALPART_EXPR:
7116 case IMAGPART_EXPR:
7117 case VIEW_CONVERT_EXPR:
7118 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7119 gimple_test_f, fallback);
7120 break;
7122 case ARRAY_REF:
7123 case ARRAY_RANGE_REF:
7124 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7125 gimple_test_f, fallback);
7126 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7127 gimple_test_f, fallback);
7128 break;
7130 default:
7131 /* Anything else with side-effects must be converted to
7132 a valid statement before we get here. */
7133 gcc_unreachable ();
7136 *expr_p = NULL;
7138 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7139 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7141 /* Historically, the compiler has treated a bare reference
7142 to a non-BLKmode volatile lvalue as forcing a load. */
7143 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7145 /* Normally, we do not want to create a temporary for a
7146 TREE_ADDRESSABLE type because such a type should not be
7147 copied by bitwise-assignment. However, we make an
7148 exception here, as all we are doing here is ensuring that
7149 we read the bytes that make up the type. We use
7150 create_tmp_var_raw because create_tmp_var will abort when
7151 given a TREE_ADDRESSABLE type. */
7152 tree tmp = create_tmp_var_raw (type, "vol");
7153 gimple_add_tmp_var (tmp);
7154 gimplify_assign (tmp, *expr_p, pre_p);
7155 *expr_p = NULL;
7157 else
7158 /* We can't do anything useful with a volatile reference to
7159 an incomplete type, so just throw it away. Likewise for
7160 a BLKmode type, since any implicit inner load should
7161 already have been turned into an explicit one by the
7162 gimplification process. */
7163 *expr_p = NULL;
7166 /* If we are gimplifying at the statement level, we're done. Tack
7167 everything together and return. */
7168 if (fallback == fb_none || is_statement)
7170 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7171 it out for GC to reclaim it. */
7172 *expr_p = NULL_TREE;
7174 if (!gimple_seq_empty_p (internal_pre)
7175 || !gimple_seq_empty_p (internal_post))
7177 gimplify_seq_add_seq (&internal_pre, internal_post);
7178 gimplify_seq_add_seq (pre_p, internal_pre);
7181 /* The result of gimplifying *EXPR_P is going to be the last few
7182 statements in *PRE_P and *POST_P. Add location information
7183 to all the statements that were added by the gimplification
7184 helpers. */
7185 if (!gimple_seq_empty_p (*pre_p))
7186 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7188 if (!gimple_seq_empty_p (*post_p))
7189 annotate_all_with_location_after (*post_p, post_last_gsi,
7190 input_location);
7192 goto out;
7195 #ifdef ENABLE_GIMPLE_CHECKING
7196 if (*expr_p)
7198 enum tree_code code = TREE_CODE (*expr_p);
7199 /* These expressions should already be in gimple IR form. */
7200 gcc_assert (code != MODIFY_EXPR
7201 && code != ASM_EXPR
7202 && code != BIND_EXPR
7203 && code != CATCH_EXPR
7204 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7205 && code != EH_FILTER_EXPR
7206 && code != GOTO_EXPR
7207 && code != LABEL_EXPR
7208 && code != LOOP_EXPR
7209 && code != SWITCH_EXPR
7210 && code != TRY_FINALLY_EXPR
7211 && code != OMP_CRITICAL
7212 && code != OMP_FOR
7213 && code != OMP_MASTER
7214 && code != OMP_ORDERED
7215 && code != OMP_PARALLEL
7216 && code != OMP_SECTIONS
7217 && code != OMP_SECTION
7218 && code != OMP_SINGLE);
7220 #endif
7222 /* Otherwise we're gimplifying a subexpression, so the resulting
7223 value is interesting. If it's a valid operand that matches
7224 GIMPLE_TEST_F, we're done. Unless we are handling some
7225 post-effects internally; if that's the case, we need to copy into
7226 a temporary before adding the post-effects to POST_P. */
7227 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7228 goto out;
7230 /* Otherwise, we need to create a new temporary for the gimplified
7231 expression. */
7233 /* We can't return an lvalue if we have an internal postqueue. The
7234 object the lvalue refers to would (probably) be modified by the
7235 postqueue; we need to copy the value out first, which means an
7236 rvalue. */
7237 if ((fallback & fb_lvalue)
7238 && gimple_seq_empty_p (internal_post)
7239 && is_gimple_addressable (*expr_p))
7241 /* An lvalue will do. Take the address of the expression, store it
7242 in a temporary, and replace the expression with an INDIRECT_REF of
7243 that temporary. */
7244 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7245 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7246 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7248 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7250 /* An rvalue will do. Assign the gimplified expression into a
7251 new temporary TMP and replace the original expression with
7252 TMP. First, make sure that the expression has a type so that
7253 it can be assigned into a temporary. */
7254 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7256 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7257 /* The postqueue might change the value of the expression between
7258 the initialization and use of the temporary, so we can't use a
7259 formal temp. FIXME do we care? */
7261 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7262 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7263 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7264 DECL_GIMPLE_REG_P (*expr_p) = 1;
7266 else
7267 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7269 else
7271 #ifdef ENABLE_GIMPLE_CHECKING
7272 if (!(fallback & fb_mayfail))
7274 fprintf (stderr, "gimplification failed:\n");
7275 print_generic_expr (stderr, *expr_p, 0);
7276 debug_tree (*expr_p);
7277 internal_error ("gimplification failed");
7279 #endif
7280 gcc_assert (fallback & fb_mayfail);
7282 /* If this is an asm statement, and the user asked for the
7283 impossible, don't die. Fail and let gimplify_asm_expr
7284 issue an error. */
7285 ret = GS_ERROR;
7286 goto out;
7289 /* Make sure the temporary matches our predicate. */
7290 gcc_assert ((*gimple_test_f) (*expr_p));
7292 if (!gimple_seq_empty_p (internal_post))
7294 annotate_all_with_location (internal_post, input_location);
7295 gimplify_seq_add_seq (pre_p, internal_post);
7298 out:
7299 input_location = saved_location;
7300 return ret;
7303 /* Look through TYPE for variable-sized objects and gimplify each such
7304 size that we find. Add to LIST_P any statements generated. */
7306 void
7307 gimplify_type_sizes (tree type, gimple_seq *list_p)
7309 tree field, t;
7311 if (type == NULL || type == error_mark_node)
7312 return;
7314 /* We first do the main variant, then copy into any other variants. */
7315 type = TYPE_MAIN_VARIANT (type);
7317 /* Avoid infinite recursion. */
7318 if (TYPE_SIZES_GIMPLIFIED (type))
7319 return;
7321 TYPE_SIZES_GIMPLIFIED (type) = 1;
7323 switch (TREE_CODE (type))
7325 case INTEGER_TYPE:
7326 case ENUMERAL_TYPE:
7327 case BOOLEAN_TYPE:
7328 case REAL_TYPE:
7329 case FIXED_POINT_TYPE:
7330 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7331 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7333 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7335 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7336 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7338 break;
7340 case ARRAY_TYPE:
7341 /* These types may not have declarations, so handle them here. */
7342 gimplify_type_sizes (TREE_TYPE (type), list_p);
7343 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7344 /* When not optimizing, ensure VLA bounds aren't removed. */
7345 if (!optimize
7346 && TYPE_DOMAIN (type)
7347 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7349 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7350 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7351 DECL_IGNORED_P (t) = 0;
7352 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7353 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7354 DECL_IGNORED_P (t) = 0;
7356 break;
7358 case RECORD_TYPE:
7359 case UNION_TYPE:
7360 case QUAL_UNION_TYPE:
7361 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7362 if (TREE_CODE (field) == FIELD_DECL)
7364 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7365 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7366 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7367 gimplify_type_sizes (TREE_TYPE (field), list_p);
7369 break;
7371 case POINTER_TYPE:
7372 case REFERENCE_TYPE:
7373 /* We used to recurse on the pointed-to type here, which turned out to
7374 be incorrect because its definition might refer to variables not
7375 yet initialized at this point if a forward declaration is involved.
7377 It was actually useful for anonymous pointed-to types to ensure
7378 that the sizes evaluation dominates every possible later use of the
7379 values. Restricting to such types here would be safe since there
7380 is no possible forward declaration around, but would introduce an
7381 undesirable middle-end semantic to anonymity. We then defer to
7382 front-ends the responsibility of ensuring that the sizes are
7383 evaluated both early and late enough, e.g. by attaching artificial
7384 type declarations to the tree. */
7385 break;
7387 default:
7388 break;
7391 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7392 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7394 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7396 TYPE_SIZE (t) = TYPE_SIZE (type);
7397 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7398 TYPE_SIZES_GIMPLIFIED (t) = 1;
7402 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7403 a size or position, has had all of its SAVE_EXPRs evaluated.
7404 We add any required statements to *STMT_P. */
7406 void
7407 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7409 tree type, expr = *expr_p;
7411 /* We don't do anything if the value isn't there, is constant, or contains
7412 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7413 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7414 will want to replace it with a new variable, but that will cause problems
7415 if this type is from outside the function. It's OK to have that here. */
7416 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7417 || TREE_CODE (expr) == VAR_DECL
7418 || CONTAINS_PLACEHOLDER_P (expr))
7419 return;
7421 type = TREE_TYPE (expr);
7422 *expr_p = unshare_expr (expr);
7424 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7425 expr = *expr_p;
7427 /* Verify that we've an exact type match with the original expression.
7428 In particular, we do not wish to drop a "sizetype" in favour of a
7429 type of similar dimensions. We don't want to pollute the generic
7430 type-stripping code with this knowledge because it doesn't matter
7431 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7432 and friends retain their "sizetype-ness". */
7433 if (TREE_TYPE (expr) != type
7434 && TREE_CODE (type) == INTEGER_TYPE
7435 && TYPE_IS_SIZETYPE (type))
7437 tree tmp;
7438 gimple stmt;
7440 *expr_p = create_tmp_var (type, NULL);
7441 tmp = build1 (NOP_EXPR, type, expr);
7442 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7443 if (EXPR_HAS_LOCATION (expr))
7444 gimple_set_location (stmt, EXPR_LOCATION (expr));
7445 else
7446 gimple_set_location (stmt, input_location);
7451 /* Gimplify the body of statements pointed to by BODY_P and return a
7452 GIMPLE_BIND containing the sequence of GIMPLE statements
7453 corresponding to BODY_P. FNDECL is the function decl containing
7454 *BODY_P. */
7456 gimple
7457 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7459 location_t saved_location = input_location;
7460 gimple_seq parm_stmts, seq;
7461 gimple outer_bind;
7462 struct gimplify_ctx gctx;
7464 timevar_push (TV_TREE_GIMPLIFY);
7466 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7467 gimplification. */
7468 default_rtl_profile ();
7470 gcc_assert (gimplify_ctxp == NULL);
7471 push_gimplify_context (&gctx);
7473 /* Unshare most shared trees in the body and in that of any nested functions.
7474 It would seem we don't have to do this for nested functions because
7475 they are supposed to be output and then the outer function gimplified
7476 first, but the g++ front end doesn't always do it that way. */
7477 unshare_body (body_p, fndecl);
7478 unvisit_body (body_p, fndecl);
7480 if (cgraph_node (fndecl)->origin)
7481 nonlocal_vlas = pointer_set_create ();
7483 /* Make sure input_location isn't set to something weird. */
7484 input_location = DECL_SOURCE_LOCATION (fndecl);
7486 /* Resolve callee-copies. This has to be done before processing
7487 the body so that DECL_VALUE_EXPR gets processed correctly. */
7488 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7490 /* Gimplify the function's body. */
7491 seq = NULL;
7492 gimplify_stmt (body_p, &seq);
7493 outer_bind = gimple_seq_first_stmt (seq);
7494 if (!outer_bind)
7496 outer_bind = gimple_build_nop ();
7497 gimplify_seq_add_stmt (&seq, outer_bind);
7500 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7501 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7502 if (gimple_code (outer_bind) == GIMPLE_BIND
7503 && gimple_seq_first (seq) == gimple_seq_last (seq))
7505 else
7506 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7508 *body_p = NULL_TREE;
7510 /* If we had callee-copies statements, insert them at the beginning
7511 of the function. */
7512 if (!gimple_seq_empty_p (parm_stmts))
7514 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7515 gimple_bind_set_body (outer_bind, parm_stmts);
7518 if (nonlocal_vlas)
7520 pointer_set_destroy (nonlocal_vlas);
7521 nonlocal_vlas = NULL;
7524 pop_gimplify_context (outer_bind);
7525 gcc_assert (gimplify_ctxp == NULL);
7527 #ifdef ENABLE_TYPES_CHECKING
7528 if (!errorcount && !sorrycount)
7529 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7530 #endif
7532 timevar_pop (TV_TREE_GIMPLIFY);
7533 input_location = saved_location;
7535 return outer_bind;
7538 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7539 node for the function we want to gimplify.
7541 Returns the sequence of GIMPLE statements corresponding to the body
7542 of FNDECL. */
7544 void
7545 gimplify_function_tree (tree fndecl)
7547 tree oldfn, parm, ret;
7548 gimple_seq seq;
7549 gimple bind;
7551 gcc_assert (!gimple_body (fndecl));
7553 oldfn = current_function_decl;
7554 current_function_decl = fndecl;
7555 if (DECL_STRUCT_FUNCTION (fndecl))
7556 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7557 else
7558 push_struct_function (fndecl);
7560 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7562 /* Preliminarily mark non-addressed complex variables as eligible
7563 for promotion to gimple registers. We'll transform their uses
7564 as we find them. */
7565 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7566 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7567 && !TREE_THIS_VOLATILE (parm)
7568 && !needs_to_live_in_memory (parm))
7569 DECL_GIMPLE_REG_P (parm) = 1;
7572 ret = DECL_RESULT (fndecl);
7573 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7574 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7575 && !needs_to_live_in_memory (ret))
7576 DECL_GIMPLE_REG_P (ret) = 1;
7578 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7580 /* The tree body of the function is no longer needed, replace it
7581 with the new GIMPLE body. */
7582 seq = gimple_seq_alloc ();
7583 gimple_seq_add_stmt (&seq, bind);
7584 gimple_set_body (fndecl, seq);
7586 /* If we're instrumenting function entry/exit, then prepend the call to
7587 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7588 catch the exit hook. */
7589 /* ??? Add some way to ignore exceptions for this TFE. */
7590 if (flag_instrument_function_entry_exit
7591 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7592 && !flag_instrument_functions_exclude_p (fndecl))
7594 tree x;
7595 gimple new_bind;
7596 gimple tf;
7597 gimple_seq cleanup = NULL, body = NULL;
7599 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7600 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7601 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7603 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7604 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7605 gimplify_seq_add_stmt (&body, tf);
7606 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7607 /* Clear the block for BIND, since it is no longer directly inside
7608 the function, but within a try block. */
7609 gimple_bind_set_block (bind, NULL);
7611 /* Replace the current function body with the body
7612 wrapped in the try/finally TF. */
7613 seq = gimple_seq_alloc ();
7614 gimple_seq_add_stmt (&seq, new_bind);
7615 gimple_set_body (fndecl, seq);
7618 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7619 cfun->curr_properties = PROP_gimple_any;
7621 current_function_decl = oldfn;
7622 pop_cfun ();
7626 /* Some transformations like inlining may invalidate the GIMPLE form
7627 for operands. This function traverses all the operands in STMT and
7628 gimplifies anything that is not a valid gimple operand. Any new
7629 GIMPLE statements are inserted before *GSI_P. */
7631 void
7632 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7634 size_t i, num_ops;
7635 tree orig_lhs = NULL_TREE, lhs, t;
7636 gimple_seq pre = NULL;
7637 gimple post_stmt = NULL;
7638 struct gimplify_ctx gctx;
7640 push_gimplify_context (&gctx);
7641 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7643 switch (gimple_code (stmt))
7645 case GIMPLE_COND:
7646 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7647 is_gimple_val, fb_rvalue);
7648 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7649 is_gimple_val, fb_rvalue);
7650 break;
7651 case GIMPLE_SWITCH:
7652 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7653 is_gimple_val, fb_rvalue);
7654 break;
7655 case GIMPLE_OMP_ATOMIC_LOAD:
7656 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7657 is_gimple_val, fb_rvalue);
7658 break;
7659 case GIMPLE_ASM:
7661 size_t i, noutputs = gimple_asm_noutputs (stmt);
7662 const char *constraint, **oconstraints;
7663 bool allows_mem, allows_reg, is_inout;
7665 oconstraints
7666 = (const char **) alloca ((noutputs) * sizeof (const char *));
7667 for (i = 0; i < noutputs; i++)
7669 tree op = gimple_asm_output_op (stmt, i);
7670 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7671 oconstraints[i] = constraint;
7672 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7673 &allows_reg, &is_inout);
7674 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7675 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7676 fb_lvalue | fb_mayfail);
7678 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7680 tree op = gimple_asm_input_op (stmt, i);
7681 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7682 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7683 oconstraints, &allows_mem, &allows_reg);
7684 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7685 allows_reg = 0;
7686 if (!allows_reg && allows_mem)
7687 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7688 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7689 else
7690 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7691 is_gimple_asm_val, fb_rvalue);
7694 break;
7695 default:
7696 /* NOTE: We start gimplifying operands from last to first to
7697 make sure that side-effects on the RHS of calls, assignments
7698 and ASMs are executed before the LHS. The ordering is not
7699 important for other statements. */
7700 num_ops = gimple_num_ops (stmt);
7701 orig_lhs = gimple_get_lhs (stmt);
7702 for (i = num_ops; i > 0; i--)
7704 tree op = gimple_op (stmt, i - 1);
7705 if (op == NULL_TREE)
7706 continue;
7707 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7708 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7709 else if (i == 2
7710 && is_gimple_assign (stmt)
7711 && num_ops == 2
7712 && get_gimple_rhs_class (gimple_expr_code (stmt))
7713 == GIMPLE_SINGLE_RHS)
7714 gimplify_expr (&op, &pre, NULL,
7715 rhs_predicate_for (gimple_assign_lhs (stmt)),
7716 fb_rvalue);
7717 else if (i == 2 && is_gimple_call (stmt))
7719 if (TREE_CODE (op) == FUNCTION_DECL)
7720 continue;
7721 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7723 else
7724 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7725 gimple_set_op (stmt, i - 1, op);
7728 lhs = gimple_get_lhs (stmt);
7729 /* If the LHS changed it in a way that requires a simple RHS,
7730 create temporary. */
7731 if (lhs && !is_gimple_reg (lhs))
7733 bool need_temp = false;
7735 if (is_gimple_assign (stmt)
7736 && num_ops == 2
7737 && get_gimple_rhs_class (gimple_expr_code (stmt))
7738 == GIMPLE_SINGLE_RHS)
7739 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7740 rhs_predicate_for (gimple_assign_lhs (stmt)),
7741 fb_rvalue);
7742 else if (is_gimple_reg (lhs))
7744 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7746 if (is_gimple_call (stmt))
7748 i = gimple_call_flags (stmt);
7749 if ((i & ECF_LOOPING_CONST_OR_PURE)
7750 || !(i & (ECF_CONST | ECF_PURE)))
7751 need_temp = true;
7753 if (stmt_can_throw_internal (stmt))
7754 need_temp = true;
7757 else
7759 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7760 need_temp = true;
7761 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7763 if (is_gimple_call (stmt))
7765 tree fndecl = gimple_call_fndecl (stmt);
7767 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7768 && !(fndecl && DECL_RESULT (fndecl)
7769 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7770 need_temp = true;
7772 else
7773 need_temp = true;
7776 if (need_temp)
7778 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7780 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7781 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7782 DECL_GIMPLE_REG_P (temp) = 1;
7783 if (TREE_CODE (orig_lhs) == SSA_NAME)
7784 orig_lhs = SSA_NAME_VAR (orig_lhs);
7786 if (gimple_in_ssa_p (cfun))
7787 temp = make_ssa_name (temp, NULL);
7788 gimple_set_lhs (stmt, temp);
7789 post_stmt = gimple_build_assign (lhs, temp);
7790 if (TREE_CODE (lhs) == SSA_NAME)
7791 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7794 break;
7797 if (gimple_referenced_vars (cfun))
7798 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7799 add_referenced_var (t);
7801 if (!gimple_seq_empty_p (pre))
7803 if (gimple_in_ssa_p (cfun))
7805 gimple_stmt_iterator i;
7807 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7808 mark_symbols_for_renaming (gsi_stmt (i));
7810 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7812 if (post_stmt)
7813 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7815 pop_gimplify_context (NULL);
7819 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7820 force the result to be either ssa_name or an invariant, otherwise
7821 just force it to be a rhs expression. If VAR is not NULL, make the
7822 base variable of the final destination be VAR if suitable. */
7824 tree
7825 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7827 tree t;
7828 enum gimplify_status ret;
7829 gimple_predicate gimple_test_f;
7830 struct gimplify_ctx gctx;
7832 *stmts = NULL;
7834 if (is_gimple_val (expr))
7835 return expr;
7837 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7839 push_gimplify_context (&gctx);
7840 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7841 gimplify_ctxp->allow_rhs_cond_expr = true;
7843 if (var)
7844 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7846 if (TREE_CODE (expr) != MODIFY_EXPR
7847 && TREE_TYPE (expr) == void_type_node)
7849 gimplify_and_add (expr, stmts);
7850 expr = NULL_TREE;
7852 else
7854 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7855 gcc_assert (ret != GS_ERROR);
7858 if (gimple_referenced_vars (cfun))
7859 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7860 add_referenced_var (t);
7862 pop_gimplify_context (NULL);
7864 return expr;
7867 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7868 some statements are produced, emits them at GSI. If BEFORE is true.
7869 the statements are appended before GSI, otherwise they are appended after
7870 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7871 GSI_CONTINUE_LINKING are the usual values). */
7873 tree
7874 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7875 bool simple_p, tree var, bool before,
7876 enum gsi_iterator_update m)
7878 gimple_seq stmts;
7880 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7882 if (!gimple_seq_empty_p (stmts))
7884 if (gimple_in_ssa_p (cfun))
7886 gimple_stmt_iterator i;
7888 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7889 mark_symbols_for_renaming (gsi_stmt (i));
7892 if (before)
7893 gsi_insert_seq_before (gsi, stmts, m);
7894 else
7895 gsi_insert_seq_after (gsi, stmts, m);
7898 return expr;
7901 #include "gt-gimplify.h"