* combine.c (make_compound_operation) <SUBREG>: If force_to_mode
[official-gcc.git] / gcc / gimplify.c
blobdc8d0c089a47c3b15fd1ee619b57e5e5ddbbb3d4
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53 #include "vec.h"
54 #include "gimple.h"
55 #include "tree-pass.h"
58 enum gimplify_omp_var_data
60 GOVD_SEEN = 1,
61 GOVD_EXPLICIT = 2,
62 GOVD_SHARED = 4,
63 GOVD_PRIVATE = 8,
64 GOVD_FIRSTPRIVATE = 16,
65 GOVD_LASTPRIVATE = 32,
66 GOVD_REDUCTION = 64,
67 GOVD_LOCAL = 128,
68 GOVD_DEBUG_PRIVATE = 256,
69 GOVD_PRIVATE_OUTER_REF = 512,
70 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
71 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
75 enum omp_region_type
77 ORT_WORKSHARE = 0,
78 ORT_TASK = 1,
79 ORT_PARALLEL = 2,
80 ORT_COMBINED_PARALLEL = 3
83 struct gimplify_omp_ctx
85 struct gimplify_omp_ctx *outer_context;
86 splay_tree variables;
87 struct pointer_set_t *privatized_types;
88 location_t location;
89 enum omp_clause_default_kind default_kind;
90 enum omp_region_type region_type;
93 static struct gimplify_ctx *gimplify_ctxp;
94 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
97 /* Formal (expression) temporary table handling: Multiple occurrences of
98 the same scalar expression are evaluated into the same temporary. */
100 typedef struct gimple_temp_hash_elt
102 tree val; /* Key */
103 tree temp; /* Value */
104 } elt_t;
106 /* Forward declarations. */
107 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
109 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
110 form and we don't do any syntax checking. */
111 void
112 mark_addressable (tree x)
114 while (handled_component_p (x))
115 x = TREE_OPERAND (x, 0);
116 if (TREE_CODE (x) != VAR_DECL
117 && TREE_CODE (x) != PARM_DECL
118 && TREE_CODE (x) != RESULT_DECL)
119 return ;
120 TREE_ADDRESSABLE (x) = 1;
123 /* Return a hash value for a formal temporary table entry. */
125 static hashval_t
126 gimple_tree_hash (const void *p)
128 tree t = ((const elt_t *) p)->val;
129 return iterative_hash_expr (t, 0);
132 /* Compare two formal temporary table entries. */
134 static int
135 gimple_tree_eq (const void *p1, const void *p2)
137 tree t1 = ((const elt_t *) p1)->val;
138 tree t2 = ((const elt_t *) p2)->val;
139 enum tree_code code = TREE_CODE (t1);
141 if (TREE_CODE (t2) != code
142 || TREE_TYPE (t1) != TREE_TYPE (t2))
143 return 0;
145 if (!operand_equal_p (t1, t2, 0))
146 return 0;
148 /* Only allow them to compare equal if they also hash equal; otherwise
149 results are nondeterminate, and we fail bootstrap comparison. */
150 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
152 return 1;
155 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
156 *SEQ_P is NULL, a new sequence is allocated. This function is
157 similar to gimple_seq_add_stmt, but does not scan the operands.
158 During gimplification, we need to manipulate statement sequences
159 before the def/use vectors have been constructed. */
161 static void
162 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
164 gimple_stmt_iterator si;
166 if (gs == NULL)
167 return;
169 if (*seq_p == NULL)
170 *seq_p = gimple_seq_alloc ();
172 si = gsi_last (*seq_p);
174 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
177 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
178 NULL, a new sequence is allocated. This function is
179 similar to gimple_seq_add_seq, but does not scan the operands.
180 During gimplification, we need to manipulate statement sequences
181 before the def/use vectors have been constructed. */
183 static void
184 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
186 gimple_stmt_iterator si;
188 if (src == NULL)
189 return;
191 if (*dst_p == NULL)
192 *dst_p = gimple_seq_alloc ();
194 si = gsi_last (*dst_p);
195 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
198 /* Set up a context for the gimplifier. */
200 void
201 push_gimplify_context (struct gimplify_ctx *c)
203 memset (c, '\0', sizeof (*c));
204 c->prev_context = gimplify_ctxp;
205 gimplify_ctxp = c;
208 /* Tear down a context for the gimplifier. If BODY is non-null, then
209 put the temporaries into the outer BIND_EXPR. Otherwise, put them
210 in the local_decls.
212 BODY is not a sequence, but the first tuple in a sequence. */
214 void
215 pop_gimplify_context (gimple body)
217 struct gimplify_ctx *c = gimplify_ctxp;
219 gcc_assert (c && (c->bind_expr_stack == NULL
220 || VEC_empty (gimple, c->bind_expr_stack)));
221 VEC_free (gimple, heap, c->bind_expr_stack);
222 gimplify_ctxp = c->prev_context;
224 if (body)
225 declare_vars (c->temps, body, false);
226 else
227 record_vars (c->temps);
229 if (c->temp_htab)
230 htab_delete (c->temp_htab);
233 static void
234 gimple_push_bind_expr (gimple gimple_bind)
236 if (gimplify_ctxp->bind_expr_stack == NULL)
237 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
238 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
241 static void
242 gimple_pop_bind_expr (void)
244 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
247 gimple
248 gimple_current_bind_expr (void)
250 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
253 /* Return the stack GIMPLE_BINDs created during gimplification. */
255 VEC(gimple, heap) *
256 gimple_bind_expr_stack (void)
258 return gimplify_ctxp->bind_expr_stack;
261 /* Returns true iff there is a COND_EXPR between us and the innermost
262 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
264 static bool
265 gimple_conditional_context (void)
267 return gimplify_ctxp->conditions > 0;
270 /* Note that we've entered a COND_EXPR. */
272 static void
273 gimple_push_condition (void)
275 #ifdef ENABLE_GIMPLE_CHECKING
276 if (gimplify_ctxp->conditions == 0)
277 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
278 #endif
279 ++(gimplify_ctxp->conditions);
282 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
283 now, add any conditional cleanups we've seen to the prequeue. */
285 static void
286 gimple_pop_condition (gimple_seq *pre_p)
288 int conds = --(gimplify_ctxp->conditions);
290 gcc_assert (conds >= 0);
291 if (conds == 0)
293 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
294 gimplify_ctxp->conditional_cleanups = NULL;
298 /* A stable comparison routine for use with splay trees and DECLs. */
300 static int
301 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
303 tree a = (tree) xa;
304 tree b = (tree) xb;
306 return DECL_UID (a) - DECL_UID (b);
309 /* Create a new omp construct that deals with variable remapping. */
311 static struct gimplify_omp_ctx *
312 new_omp_context (enum omp_region_type region_type)
314 struct gimplify_omp_ctx *c;
316 c = XCNEW (struct gimplify_omp_ctx);
317 c->outer_context = gimplify_omp_ctxp;
318 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
319 c->privatized_types = pointer_set_create ();
320 c->location = input_location;
321 c->region_type = region_type;
322 if (region_type != ORT_TASK)
323 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
324 else
325 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
327 return c;
330 /* Destroy an omp construct that deals with variable remapping. */
332 static void
333 delete_omp_context (struct gimplify_omp_ctx *c)
335 splay_tree_delete (c->variables);
336 pointer_set_destroy (c->privatized_types);
337 XDELETE (c);
340 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
341 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
343 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
345 static void
346 append_to_statement_list_1 (tree t, tree *list_p)
348 tree list = *list_p;
349 tree_stmt_iterator i;
351 if (!list)
353 if (t && TREE_CODE (t) == STATEMENT_LIST)
355 *list_p = t;
356 return;
358 *list_p = list = alloc_stmt_list ();
361 i = tsi_last (list);
362 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
365 /* Add T to the end of the list container pointed to by LIST_P.
366 If T is an expression with no effects, it is ignored. */
368 void
369 append_to_statement_list (tree t, tree *list_p)
371 if (t && TREE_SIDE_EFFECTS (t))
372 append_to_statement_list_1 (t, list_p);
375 /* Similar, but the statement is always added, regardless of side effects. */
377 void
378 append_to_statement_list_force (tree t, tree *list_p)
380 if (t != NULL_TREE)
381 append_to_statement_list_1 (t, list_p);
384 /* Both gimplify the statement T and append it to *SEQ_P. This function
385 behaves exactly as gimplify_stmt, but you don't have to pass T as a
386 reference. */
388 void
389 gimplify_and_add (tree t, gimple_seq *seq_p)
391 gimplify_stmt (&t, seq_p);
394 /* Gimplify statement T into sequence *SEQ_P, and return the first
395 tuple in the sequence of generated tuples for this statement.
396 Return NULL if gimplifying T produced no tuples. */
398 static gimple
399 gimplify_and_return_first (tree t, gimple_seq *seq_p)
401 gimple_stmt_iterator last = gsi_last (*seq_p);
403 gimplify_and_add (t, seq_p);
405 if (!gsi_end_p (last))
407 gsi_next (&last);
408 return gsi_stmt (last);
410 else
411 return gimple_seq_first_stmt (*seq_p);
414 /* Strip off a legitimate source ending from the input string NAME of
415 length LEN. Rather than having to know the names used by all of
416 our front ends, we strip off an ending of a period followed by
417 up to five characters. (Java uses ".class".) */
419 static inline void
420 remove_suffix (char *name, int len)
422 int i;
424 for (i = 2; i < 8 && len > i; i++)
426 if (name[len - i] == '.')
428 name[len - i] = '\0';
429 break;
434 /* Create a new temporary name with PREFIX. Returns an identifier. */
436 static GTY(()) unsigned int tmp_var_id_num;
438 tree
439 create_tmp_var_name (const char *prefix)
441 char *tmp_name;
443 if (prefix)
445 char *preftmp = ASTRDUP (prefix);
447 remove_suffix (preftmp, strlen (preftmp));
448 prefix = preftmp;
451 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
452 return get_identifier (tmp_name);
456 /* Create a new temporary variable declaration of type TYPE.
457 Does NOT push it into the current binding. */
459 tree
460 create_tmp_var_raw (tree type, const char *prefix)
462 tree tmp_var;
463 tree new_type;
465 /* Make the type of the variable writable. */
466 new_type = build_type_variant (type, 0, 0);
467 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
469 tmp_var = build_decl (input_location,
470 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
471 type);
473 /* The variable was declared by the compiler. */
474 DECL_ARTIFICIAL (tmp_var) = 1;
475 /* And we don't want debug info for it. */
476 DECL_IGNORED_P (tmp_var) = 1;
478 /* Make the variable writable. */
479 TREE_READONLY (tmp_var) = 0;
481 DECL_EXTERNAL (tmp_var) = 0;
482 TREE_STATIC (tmp_var) = 0;
483 TREE_USED (tmp_var) = 1;
485 return tmp_var;
488 /* Create a new temporary variable declaration of type TYPE. DOES push the
489 variable into the current binding. Further, assume that this is called
490 only from gimplification or optimization, at which point the creation of
491 certain types are bugs. */
493 tree
494 create_tmp_var (tree type, const char *prefix)
496 tree tmp_var;
498 /* We don't allow types that are addressable (meaning we can't make copies),
499 or incomplete. We also used to reject every variable size objects here,
500 but now support those for which a constant upper bound can be obtained.
501 The processing for variable sizes is performed in gimple_add_tmp_var,
502 point at which it really matters and possibly reached via paths not going
503 through this function, e.g. after direct calls to create_tmp_var_raw. */
504 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
506 tmp_var = create_tmp_var_raw (type, prefix);
507 gimple_add_tmp_var (tmp_var);
508 return tmp_var;
511 /* Create a temporary with a name derived from VAL. Subroutine of
512 lookup_tmp_var; nobody else should call this function. */
514 static inline tree
515 create_tmp_from_val (tree val)
517 return create_tmp_var (TREE_TYPE (val), get_name (val));
520 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
521 an existing expression temporary. */
523 static tree
524 lookup_tmp_var (tree val, bool is_formal)
526 tree ret;
528 /* If not optimizing, never really reuse a temporary. local-alloc
529 won't allocate any variable that is used in more than one basic
530 block, which means it will go into memory, causing much extra
531 work in reload and final and poorer code generation, outweighing
532 the extra memory allocation here. */
533 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
534 ret = create_tmp_from_val (val);
535 else
537 elt_t elt, *elt_p;
538 void **slot;
540 elt.val = val;
541 if (gimplify_ctxp->temp_htab == NULL)
542 gimplify_ctxp->temp_htab
543 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
544 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
545 if (*slot == NULL)
547 elt_p = XNEW (elt_t);
548 elt_p->val = val;
549 elt_p->temp = ret = create_tmp_from_val (val);
550 *slot = (void *) elt_p;
552 else
554 elt_p = (elt_t *) *slot;
555 ret = elt_p->temp;
559 return ret;
563 /* Return true if T is a CALL_EXPR or an expression that can be
564 assignmed to a temporary. Note that this predicate should only be
565 used during gimplification. See the rationale for this in
566 gimplify_modify_expr. */
568 static bool
569 is_gimple_reg_rhs_or_call (tree t)
571 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
572 || TREE_CODE (t) == CALL_EXPR);
575 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
576 this predicate should only be used during gimplification. See the
577 rationale for this in gimplify_modify_expr. */
579 static bool
580 is_gimple_mem_rhs_or_call (tree t)
582 /* If we're dealing with a renamable type, either source or dest must be
583 a renamed variable. */
584 if (is_gimple_reg_type (TREE_TYPE (t)))
585 return is_gimple_val (t);
586 else
587 return (is_gimple_val (t) || is_gimple_lvalue (t)
588 || TREE_CODE (t) == CALL_EXPR);
591 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
593 static tree
594 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
595 bool is_formal)
597 tree t, mod;
599 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
600 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
601 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
602 fb_rvalue);
604 t = lookup_tmp_var (val, is_formal);
606 if (is_formal
607 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
608 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
609 DECL_GIMPLE_REG_P (t) = 1;
611 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
613 if (EXPR_HAS_LOCATION (val))
614 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
615 else
616 SET_EXPR_LOCATION (mod, input_location);
618 /* gimplify_modify_expr might want to reduce this further. */
619 gimplify_and_add (mod, pre_p);
620 ggc_free (mod);
622 /* If we're gimplifying into ssa, gimplify_modify_expr will have
623 given our temporary an SSA name. Find and return it. */
624 if (gimplify_ctxp->into_ssa)
626 gimple last = gimple_seq_last_stmt (*pre_p);
627 t = gimple_get_lhs (last);
630 return t;
633 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
634 in gimplify_expr. Only use this function if:
636 1) The value of the unfactored expression represented by VAL will not
637 change between the initialization and use of the temporary, and
638 2) The temporary will not be otherwise modified.
640 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
641 and #2 means it is inappropriate for && temps.
643 For other cases, use get_initialized_tmp_var instead. */
645 tree
646 get_formal_tmp_var (tree val, gimple_seq *pre_p)
648 return internal_get_tmp_var (val, pre_p, NULL, true);
651 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
652 are as in gimplify_expr. */
654 tree
655 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
657 return internal_get_tmp_var (val, pre_p, post_p, false);
660 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
661 true, generate debug info for them; otherwise don't. */
663 void
664 declare_vars (tree vars, gimple scope, bool debug_info)
666 tree last = vars;
667 if (last)
669 tree temps, block;
671 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
673 temps = nreverse (last);
675 block = gimple_bind_block (scope);
676 gcc_assert (!block || TREE_CODE (block) == BLOCK);
677 if (!block || !debug_info)
679 TREE_CHAIN (last) = gimple_bind_vars (scope);
680 gimple_bind_set_vars (scope, temps);
682 else
684 /* We need to attach the nodes both to the BIND_EXPR and to its
685 associated BLOCK for debugging purposes. The key point here
686 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
687 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
688 if (BLOCK_VARS (block))
689 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
690 else
692 gimple_bind_set_vars (scope,
693 chainon (gimple_bind_vars (scope), temps));
694 BLOCK_VARS (block) = temps;
700 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
701 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
702 no such upper bound can be obtained. */
704 static void
705 force_constant_size (tree var)
707 /* The only attempt we make is by querying the maximum size of objects
708 of the variable's type. */
710 HOST_WIDE_INT max_size;
712 gcc_assert (TREE_CODE (var) == VAR_DECL);
714 max_size = max_int_size_in_bytes (TREE_TYPE (var));
716 gcc_assert (max_size >= 0);
718 DECL_SIZE_UNIT (var)
719 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
720 DECL_SIZE (var)
721 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
724 void
725 gimple_add_tmp_var (tree tmp)
727 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
729 /* Later processing assumes that the object size is constant, which might
730 not be true at this point. Force the use of a constant upper bound in
731 this case. */
732 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
733 force_constant_size (tmp);
735 DECL_CONTEXT (tmp) = current_function_decl;
736 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
738 if (gimplify_ctxp)
740 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
741 gimplify_ctxp->temps = tmp;
743 /* Mark temporaries local within the nearest enclosing parallel. */
744 if (gimplify_omp_ctxp)
746 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
747 while (ctx && ctx->region_type == ORT_WORKSHARE)
748 ctx = ctx->outer_context;
749 if (ctx)
750 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
753 else if (cfun)
754 record_vars (tmp);
755 else
757 gimple_seq body_seq;
759 /* This case is for nested functions. We need to expose the locals
760 they create. */
761 body_seq = gimple_body (current_function_decl);
762 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
766 /* Determines whether to assign a location to the statement GS. */
768 static bool
769 should_carry_location_p (gimple gs)
771 /* Don't emit a line note for a label. We particularly don't want to
772 emit one for the break label, since it doesn't actually correspond
773 to the beginning of the loop/switch. */
774 if (gimple_code (gs) == GIMPLE_LABEL)
775 return false;
777 return true;
780 /* Same, but for a tree. */
782 static bool
783 tree_should_carry_location_p (const_tree stmt)
785 /* Don't emit a line note for a label. We particularly don't want to
786 emit one for the break label, since it doesn't actually correspond
787 to the beginning of the loop/switch. */
788 if (TREE_CODE (stmt) == LABEL_EXPR)
789 return false;
791 /* Do not annotate empty statements, since it confuses gcov. */
792 if (!TREE_SIDE_EFFECTS (stmt))
793 return false;
795 return true;
798 /* Return true if a location should not be emitted for this statement
799 by annotate_one_with_location. */
801 static inline bool
802 gimple_do_not_emit_location_p (gimple g)
804 return gimple_plf (g, GF_PLF_1);
807 /* Mark statement G so a location will not be emitted by
808 annotate_one_with_location. */
810 static inline void
811 gimple_set_do_not_emit_location (gimple g)
813 /* The PLF flags are initialized to 0 when a new tuple is created,
814 so no need to initialize it anywhere. */
815 gimple_set_plf (g, GF_PLF_1, true);
818 /* Set the location for gimple statement GS to LOCATION. */
820 static void
821 annotate_one_with_location (gimple gs, location_t location)
823 if (!gimple_has_location (gs)
824 && !gimple_do_not_emit_location_p (gs)
825 && should_carry_location_p (gs))
826 gimple_set_location (gs, location);
829 /* Same, but for tree T. */
831 static void
832 tree_annotate_one_with_location (tree t, location_t location)
834 if (CAN_HAVE_LOCATION_P (t)
835 && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t))
836 SET_EXPR_LOCATION (t, location);
840 /* Set LOCATION for all the statements after iterator GSI in sequence
841 SEQ. If GSI is pointing to the end of the sequence, start with the
842 first statement in SEQ. */
844 static void
845 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
846 location_t location)
848 if (gsi_end_p (gsi))
849 gsi = gsi_start (seq);
850 else
851 gsi_next (&gsi);
853 for (; !gsi_end_p (gsi); gsi_next (&gsi))
854 annotate_one_with_location (gsi_stmt (gsi), location);
858 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
860 void
861 annotate_all_with_location (gimple_seq stmt_p, location_t location)
863 gimple_stmt_iterator i;
865 if (gimple_seq_empty_p (stmt_p))
866 return;
868 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
870 gimple gs = gsi_stmt (i);
871 annotate_one_with_location (gs, location);
875 /* Same, but for statement or statement list in *STMT_P. */
877 void
878 tree_annotate_all_with_location (tree *stmt_p, location_t location)
880 tree_stmt_iterator i;
882 if (!*stmt_p)
883 return;
885 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
887 tree t = tsi_stmt (i);
889 /* Assuming we've already been gimplified, we shouldn't
890 see nested chaining constructs anymore. */
891 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
892 && TREE_CODE (t) != COMPOUND_EXPR);
894 tree_annotate_one_with_location (t, location);
899 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
900 These nodes model computations that should only be done once. If we
901 were to unshare something like SAVE_EXPR(i++), the gimplification
902 process would create wrong code. */
904 static tree
905 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
907 enum tree_code code = TREE_CODE (*tp);
908 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
909 if (TREE_CODE_CLASS (code) == tcc_type
910 || TREE_CODE_CLASS (code) == tcc_declaration
911 || TREE_CODE_CLASS (code) == tcc_constant
912 || code == SAVE_EXPR || code == TARGET_EXPR
913 /* We can't do anything sensible with a BLOCK used as an expression,
914 but we also can't just die when we see it because of non-expression
915 uses. So just avert our eyes and cross our fingers. Silly Java. */
916 || code == BLOCK)
917 *walk_subtrees = 0;
918 else
920 gcc_assert (code != BIND_EXPR);
921 copy_tree_r (tp, walk_subtrees, data);
924 return NULL_TREE;
927 /* Callback for walk_tree to unshare most of the shared trees rooted at
928 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
929 then *TP is deep copied by calling copy_tree_r.
931 This unshares the same trees as copy_tree_r with the exception of
932 SAVE_EXPR nodes. These nodes model computations that should only be
933 done once. If we were to unshare something like SAVE_EXPR(i++), the
934 gimplification process would create wrong code. */
936 static tree
937 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
938 void *data ATTRIBUTE_UNUSED)
940 tree t = *tp;
941 enum tree_code code = TREE_CODE (t);
943 /* Skip types, decls, and constants. But we do want to look at their
944 types and the bounds of types. Mark them as visited so we properly
945 unmark their subtrees on the unmark pass. If we've already seen them,
946 don't look down further. */
947 if (TREE_CODE_CLASS (code) == tcc_type
948 || TREE_CODE_CLASS (code) == tcc_declaration
949 || TREE_CODE_CLASS (code) == tcc_constant)
951 if (TREE_VISITED (t))
952 *walk_subtrees = 0;
953 else
954 TREE_VISITED (t) = 1;
957 /* If this node has been visited already, unshare it and don't look
958 any deeper. */
959 else if (TREE_VISITED (t))
961 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
962 *walk_subtrees = 0;
965 /* Otherwise, mark the tree as visited and keep looking. */
966 else
967 TREE_VISITED (t) = 1;
969 return NULL_TREE;
972 static tree
973 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
974 void *data ATTRIBUTE_UNUSED)
976 if (TREE_VISITED (*tp))
977 TREE_VISITED (*tp) = 0;
978 else
979 *walk_subtrees = 0;
981 return NULL_TREE;
984 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
985 bodies of any nested functions if we are unsharing the entire body of
986 FNDECL. */
988 static void
989 unshare_body (tree *body_p, tree fndecl)
991 struct cgraph_node *cgn = cgraph_node (fndecl);
993 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
994 if (body_p == &DECL_SAVED_TREE (fndecl))
995 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
996 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
999 /* Likewise, but mark all trees as not visited. */
1001 static void
1002 unvisit_body (tree *body_p, tree fndecl)
1004 struct cgraph_node *cgn = cgraph_node (fndecl);
1006 walk_tree (body_p, unmark_visited_r, NULL, NULL);
1007 if (body_p == &DECL_SAVED_TREE (fndecl))
1008 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1009 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
1012 /* Unconditionally make an unshared copy of EXPR. This is used when using
1013 stored expressions which span multiple functions, such as BINFO_VTABLE,
1014 as the normal unsharing process can't tell that they're shared. */
1016 tree
1017 unshare_expr (tree expr)
1019 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1020 return expr;
1023 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1024 contain statements and have a value. Assign its value to a temporary
1025 and give it void_type_node. Returns the temporary, or NULL_TREE if
1026 WRAPPER was already void. */
1028 tree
1029 voidify_wrapper_expr (tree wrapper, tree temp)
1031 tree type = TREE_TYPE (wrapper);
1032 if (type && !VOID_TYPE_P (type))
1034 tree *p;
1036 /* Set p to point to the body of the wrapper. Loop until we find
1037 something that isn't a wrapper. */
1038 for (p = &wrapper; p && *p; )
1040 switch (TREE_CODE (*p))
1042 case BIND_EXPR:
1043 TREE_SIDE_EFFECTS (*p) = 1;
1044 TREE_TYPE (*p) = void_type_node;
1045 /* For a BIND_EXPR, the body is operand 1. */
1046 p = &BIND_EXPR_BODY (*p);
1047 break;
1049 case CLEANUP_POINT_EXPR:
1050 case TRY_FINALLY_EXPR:
1051 case TRY_CATCH_EXPR:
1052 TREE_SIDE_EFFECTS (*p) = 1;
1053 TREE_TYPE (*p) = void_type_node;
1054 p = &TREE_OPERAND (*p, 0);
1055 break;
1057 case STATEMENT_LIST:
1059 tree_stmt_iterator i = tsi_last (*p);
1060 TREE_SIDE_EFFECTS (*p) = 1;
1061 TREE_TYPE (*p) = void_type_node;
1062 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1064 break;
1066 case COMPOUND_EXPR:
1067 /* Advance to the last statement. Set all container types to void. */
1068 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1070 TREE_SIDE_EFFECTS (*p) = 1;
1071 TREE_TYPE (*p) = void_type_node;
1073 break;
1075 default:
1076 goto out;
1080 out:
1081 if (p == NULL || IS_EMPTY_STMT (*p))
1082 temp = NULL_TREE;
1083 else if (temp)
1085 /* The wrapper is on the RHS of an assignment that we're pushing
1086 down. */
1087 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1088 || TREE_CODE (temp) == MODIFY_EXPR);
1089 TREE_OPERAND (temp, 1) = *p;
1090 *p = temp;
1092 else
1094 temp = create_tmp_var (type, "retval");
1095 *p = build2 (INIT_EXPR, type, temp, *p);
1098 return temp;
1101 return NULL_TREE;
1104 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1105 a temporary through which they communicate. */
1107 static void
1108 build_stack_save_restore (gimple *save, gimple *restore)
1110 tree tmp_var;
1112 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1113 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1114 gimple_call_set_lhs (*save, tmp_var);
1116 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1117 1, tmp_var);
1120 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1122 static enum gimplify_status
1123 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1125 tree bind_expr = *expr_p;
1126 bool old_save_stack = gimplify_ctxp->save_stack;
1127 tree t;
1128 gimple gimple_bind;
1129 gimple_seq body;
1131 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1133 /* Mark variables seen in this bind expr. */
1134 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1136 if (TREE_CODE (t) == VAR_DECL)
1138 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1140 /* Mark variable as local. */
1141 if (ctx && !is_global_var (t)
1142 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1143 || splay_tree_lookup (ctx->variables,
1144 (splay_tree_key) t) == NULL))
1145 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1147 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1149 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1150 cfun->has_local_explicit_reg_vars = true;
1153 /* Preliminarily mark non-addressed complex variables as eligible
1154 for promotion to gimple registers. We'll transform their uses
1155 as we find them.
1156 We exclude complex types if not optimizing because they can be
1157 subject to partial stores in GNU C by means of the __real__ and
1158 __imag__ operators and we cannot promote them to total stores
1159 (see gimplify_modify_expr_complex_part). */
1160 if (optimize
1161 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1162 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1163 && !TREE_THIS_VOLATILE (t)
1164 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1165 && !needs_to_live_in_memory (t))
1166 DECL_GIMPLE_REG_P (t) = 1;
1169 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1170 BIND_EXPR_BLOCK (bind_expr));
1171 gimple_push_bind_expr (gimple_bind);
1173 gimplify_ctxp->save_stack = false;
1175 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1176 body = NULL;
1177 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1178 gimple_bind_set_body (gimple_bind, body);
1180 if (gimplify_ctxp->save_stack)
1182 gimple stack_save, stack_restore, gs;
1183 gimple_seq cleanup, new_body;
1185 /* Save stack on entry and restore it on exit. Add a try_finally
1186 block to achieve this. Note that mudflap depends on the
1187 format of the emitted code: see mx_register_decls(). */
1188 build_stack_save_restore (&stack_save, &stack_restore);
1190 cleanup = new_body = NULL;
1191 gimplify_seq_add_stmt (&cleanup, stack_restore);
1192 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1193 GIMPLE_TRY_FINALLY);
1195 gimplify_seq_add_stmt (&new_body, stack_save);
1196 gimplify_seq_add_stmt (&new_body, gs);
1197 gimple_bind_set_body (gimple_bind, new_body);
1200 gimplify_ctxp->save_stack = old_save_stack;
1201 gimple_pop_bind_expr ();
1203 gimplify_seq_add_stmt (pre_p, gimple_bind);
1205 if (temp)
1207 *expr_p = temp;
1208 return GS_OK;
1211 *expr_p = NULL_TREE;
1212 return GS_ALL_DONE;
1215 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1216 GIMPLE value, it is assigned to a new temporary and the statement is
1217 re-written to return the temporary.
1219 PRE_P points to the sequence where side effects that must happen before
1220 STMT should be stored. */
1222 static enum gimplify_status
1223 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1225 gimple ret;
1226 tree ret_expr = TREE_OPERAND (stmt, 0);
1227 tree result_decl, result;
1229 if (ret_expr == error_mark_node)
1230 return GS_ERROR;
1232 if (!ret_expr
1233 || TREE_CODE (ret_expr) == RESULT_DECL
1234 || ret_expr == error_mark_node)
1236 gimple ret = gimple_build_return (ret_expr);
1237 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1238 gimplify_seq_add_stmt (pre_p, ret);
1239 return GS_ALL_DONE;
1242 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1243 result_decl = NULL_TREE;
1244 else
1246 result_decl = TREE_OPERAND (ret_expr, 0);
1248 /* See through a return by reference. */
1249 if (TREE_CODE (result_decl) == INDIRECT_REF)
1250 result_decl = TREE_OPERAND (result_decl, 0);
1252 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1253 || TREE_CODE (ret_expr) == INIT_EXPR)
1254 && TREE_CODE (result_decl) == RESULT_DECL);
1257 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1258 Recall that aggregate_value_p is FALSE for any aggregate type that is
1259 returned in registers. If we're returning values in registers, then
1260 we don't want to extend the lifetime of the RESULT_DECL, particularly
1261 across another call. In addition, for those aggregates for which
1262 hard_function_value generates a PARALLEL, we'll die during normal
1263 expansion of structure assignments; there's special code in expand_return
1264 to handle this case that does not exist in expand_expr. */
1265 if (!result_decl
1266 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1267 result = result_decl;
1268 else if (gimplify_ctxp->return_temp)
1269 result = gimplify_ctxp->return_temp;
1270 else
1272 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1273 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1274 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1275 DECL_GIMPLE_REG_P (result) = 1;
1277 /* ??? With complex control flow (usually involving abnormal edges),
1278 we can wind up warning about an uninitialized value for this. Due
1279 to how this variable is constructed and initialized, this is never
1280 true. Give up and never warn. */
1281 TREE_NO_WARNING (result) = 1;
1283 gimplify_ctxp->return_temp = result;
1286 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1287 Then gimplify the whole thing. */
1288 if (result != result_decl)
1289 TREE_OPERAND (ret_expr, 0) = result;
1291 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1293 ret = gimple_build_return (result);
1294 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1295 gimplify_seq_add_stmt (pre_p, ret);
1297 return GS_ALL_DONE;
1300 static void
1301 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1303 /* This is a variable-sized decl. Simplify its size and mark it
1304 for deferred expansion. Note that mudflap depends on the format
1305 of the emitted code: see mx_register_decls(). */
1306 tree t, addr, ptr_type;
1308 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1309 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1311 /* All occurrences of this decl in final gimplified code will be
1312 replaced by indirection. Setting DECL_VALUE_EXPR does two
1313 things: First, it lets the rest of the gimplifier know what
1314 replacement to use. Second, it lets the debug info know
1315 where to find the value. */
1316 ptr_type = build_pointer_type (TREE_TYPE (decl));
1317 addr = create_tmp_var (ptr_type, get_name (decl));
1318 DECL_IGNORED_P (addr) = 0;
1319 t = build_fold_indirect_ref (addr);
1320 SET_DECL_VALUE_EXPR (decl, t);
1321 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1323 t = built_in_decls[BUILT_IN_ALLOCA];
1324 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1325 t = fold_convert (ptr_type, t);
1326 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1328 gimplify_and_add (t, seq_p);
1330 /* Indicate that we need to restore the stack level when the
1331 enclosing BIND_EXPR is exited. */
1332 gimplify_ctxp->save_stack = true;
1336 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1337 and initialization explicit. */
1339 static enum gimplify_status
1340 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1342 tree stmt = *stmt_p;
1343 tree decl = DECL_EXPR_DECL (stmt);
1345 *stmt_p = NULL_TREE;
1347 if (TREE_TYPE (decl) == error_mark_node)
1348 return GS_ERROR;
1350 if ((TREE_CODE (decl) == TYPE_DECL
1351 || TREE_CODE (decl) == VAR_DECL)
1352 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1353 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1355 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1357 tree init = DECL_INITIAL (decl);
1359 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1360 || (!TREE_STATIC (decl)
1361 && flag_stack_check == GENERIC_STACK_CHECK
1362 && compare_tree_int (DECL_SIZE_UNIT (decl),
1363 STACK_CHECK_MAX_VAR_SIZE) > 0))
1364 gimplify_vla_decl (decl, seq_p);
1366 if (init && init != error_mark_node)
1368 if (!TREE_STATIC (decl))
1370 DECL_INITIAL (decl) = NULL_TREE;
1371 init = build2 (INIT_EXPR, void_type_node, decl, init);
1372 gimplify_and_add (init, seq_p);
1373 ggc_free (init);
1375 else
1376 /* We must still examine initializers for static variables
1377 as they may contain a label address. */
1378 walk_tree (&init, force_labels_r, NULL, NULL);
1381 /* Some front ends do not explicitly declare all anonymous
1382 artificial variables. We compensate here by declaring the
1383 variables, though it would be better if the front ends would
1384 explicitly declare them. */
1385 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1386 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1387 gimple_add_tmp_var (decl);
1390 return GS_ALL_DONE;
1393 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1394 and replacing the LOOP_EXPR with goto, but if the loop contains an
1395 EXIT_EXPR, we need to append a label for it to jump to. */
1397 static enum gimplify_status
1398 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1400 tree saved_label = gimplify_ctxp->exit_label;
1401 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1403 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1405 gimplify_ctxp->exit_label = NULL_TREE;
1407 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1409 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1411 if (gimplify_ctxp->exit_label)
1412 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1414 gimplify_ctxp->exit_label = saved_label;
1416 *expr_p = NULL;
1417 return GS_ALL_DONE;
1420 /* Gimplifies a statement list onto a sequence. These may be created either
1421 by an enlightened front-end, or by shortcut_cond_expr. */
1423 static enum gimplify_status
1424 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1426 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1428 tree_stmt_iterator i = tsi_start (*expr_p);
1430 while (!tsi_end_p (i))
1432 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1433 tsi_delink (&i);
1436 if (temp)
1438 *expr_p = temp;
1439 return GS_OK;
1442 return GS_ALL_DONE;
1445 /* Compare two case labels. Because the front end should already have
1446 made sure that case ranges do not overlap, it is enough to only compare
1447 the CASE_LOW values of each case label. */
1449 static int
1450 compare_case_labels (const void *p1, const void *p2)
1452 const_tree const case1 = *(const_tree const*)p1;
1453 const_tree const case2 = *(const_tree const*)p2;
1455 /* The 'default' case label always goes first. */
1456 if (!CASE_LOW (case1))
1457 return -1;
1458 else if (!CASE_LOW (case2))
1459 return 1;
1460 else
1461 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1465 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1467 void
1468 sort_case_labels (VEC(tree,heap)* label_vec)
1470 size_t len = VEC_length (tree, label_vec);
1471 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1472 compare_case_labels);
1476 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1477 branch to. */
1479 static enum gimplify_status
1480 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1482 tree switch_expr = *expr_p;
1483 gimple_seq switch_body_seq = NULL;
1484 enum gimplify_status ret;
1486 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1487 fb_rvalue);
1488 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1489 return ret;
1491 if (SWITCH_BODY (switch_expr))
1493 VEC (tree,heap) *labels;
1494 VEC (tree,heap) *saved_labels;
1495 tree default_case = NULL_TREE;
1496 size_t i, len;
1497 gimple gimple_switch;
1499 /* If someone can be bothered to fill in the labels, they can
1500 be bothered to null out the body too. */
1501 gcc_assert (!SWITCH_LABELS (switch_expr));
1503 /* save old labels, get new ones from body, then restore the old
1504 labels. Save all the things from the switch body to append after. */
1505 saved_labels = gimplify_ctxp->case_labels;
1506 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1508 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1509 labels = gimplify_ctxp->case_labels;
1510 gimplify_ctxp->case_labels = saved_labels;
1512 i = 0;
1513 while (i < VEC_length (tree, labels))
1515 tree elt = VEC_index (tree, labels, i);
1516 tree low = CASE_LOW (elt);
1517 bool remove_element = FALSE;
1519 if (low)
1521 /* Discard empty ranges. */
1522 tree high = CASE_HIGH (elt);
1523 if (high && tree_int_cst_lt (high, low))
1524 remove_element = TRUE;
1526 else
1528 /* The default case must be the last label in the list. */
1529 gcc_assert (!default_case);
1530 default_case = elt;
1531 remove_element = TRUE;
1534 if (remove_element)
1535 VEC_ordered_remove (tree, labels, i);
1536 else
1537 i++;
1539 len = i;
1541 if (!VEC_empty (tree, labels))
1542 sort_case_labels (labels);
1544 if (!default_case)
1546 tree type = TREE_TYPE (switch_expr);
1548 /* If the switch has no default label, add one, so that we jump
1549 around the switch body. If the labels already cover the whole
1550 range of type, add the default label pointing to one of the
1551 existing labels. */
1552 if (type == void_type_node)
1553 type = TREE_TYPE (SWITCH_COND (switch_expr));
1554 if (len
1555 && INTEGRAL_TYPE_P (type)
1556 && TYPE_MIN_VALUE (type)
1557 && TYPE_MAX_VALUE (type)
1558 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1559 TYPE_MIN_VALUE (type)))
1561 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1562 if (!high)
1563 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1564 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1566 for (i = 1; i < len; i++)
1568 high = CASE_LOW (VEC_index (tree, labels, i));
1569 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1570 if (!low)
1571 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1572 if ((TREE_INT_CST_LOW (low) + 1
1573 != TREE_INT_CST_LOW (high))
1574 || (TREE_INT_CST_HIGH (low)
1575 + (TREE_INT_CST_LOW (high) == 0)
1576 != TREE_INT_CST_HIGH (high)))
1577 break;
1579 if (i == len)
1580 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1581 NULL_TREE, NULL_TREE,
1582 CASE_LABEL (VEC_index (tree,
1583 labels, 0)));
1587 if (!default_case)
1589 gimple new_default;
1591 default_case
1592 = build3 (CASE_LABEL_EXPR, void_type_node,
1593 NULL_TREE, NULL_TREE,
1594 create_artificial_label (UNKNOWN_LOCATION));
1595 new_default = gimple_build_label (CASE_LABEL (default_case));
1596 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1600 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1601 default_case, labels);
1602 gimplify_seq_add_stmt (pre_p, gimple_switch);
1603 gimplify_seq_add_seq (pre_p, switch_body_seq);
1604 VEC_free(tree, heap, labels);
1606 else
1607 gcc_assert (SWITCH_LABELS (switch_expr));
1609 return GS_ALL_DONE;
1613 static enum gimplify_status
1614 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1616 struct gimplify_ctx *ctxp;
1617 gimple gimple_label;
1619 /* Invalid OpenMP programs can play Duff's Device type games with
1620 #pragma omp parallel. At least in the C front end, we don't
1621 detect such invalid branches until after gimplification. */
1622 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1623 if (ctxp->case_labels)
1624 break;
1626 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1627 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1628 gimplify_seq_add_stmt (pre_p, gimple_label);
1630 return GS_ALL_DONE;
1633 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1634 if necessary. */
1636 tree
1637 build_and_jump (tree *label_p)
1639 if (label_p == NULL)
1640 /* If there's nowhere to jump, just fall through. */
1641 return NULL_TREE;
1643 if (*label_p == NULL_TREE)
1645 tree label = create_artificial_label (UNKNOWN_LOCATION);
1646 *label_p = label;
1649 return build1 (GOTO_EXPR, void_type_node, *label_p);
1652 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1653 This also involves building a label to jump to and communicating it to
1654 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1656 static enum gimplify_status
1657 gimplify_exit_expr (tree *expr_p)
1659 tree cond = TREE_OPERAND (*expr_p, 0);
1660 tree expr;
1662 expr = build_and_jump (&gimplify_ctxp->exit_label);
1663 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1664 *expr_p = expr;
1666 return GS_OK;
1669 /* A helper function to be called via walk_tree. Mark all labels under *TP
1670 as being forced. To be called for DECL_INITIAL of static variables. */
1672 tree
1673 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1675 if (TYPE_P (*tp))
1676 *walk_subtrees = 0;
1677 if (TREE_CODE (*tp) == LABEL_DECL)
1678 FORCED_LABEL (*tp) = 1;
1680 return NULL_TREE;
1683 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1684 different from its canonical type, wrap the whole thing inside a
1685 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1686 type.
1688 The canonical type of a COMPONENT_REF is the type of the field being
1689 referenced--unless the field is a bit-field which can be read directly
1690 in a smaller mode, in which case the canonical type is the
1691 sign-appropriate type corresponding to that mode. */
1693 static void
1694 canonicalize_component_ref (tree *expr_p)
1696 tree expr = *expr_p;
1697 tree type;
1699 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1701 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1702 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1703 else
1704 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1706 /* One could argue that all the stuff below is not necessary for
1707 the non-bitfield case and declare it a FE error if type
1708 adjustment would be needed. */
1709 if (TREE_TYPE (expr) != type)
1711 #ifdef ENABLE_TYPES_CHECKING
1712 tree old_type = TREE_TYPE (expr);
1713 #endif
1714 int type_quals;
1716 /* We need to preserve qualifiers and propagate them from
1717 operand 0. */
1718 type_quals = TYPE_QUALS (type)
1719 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1720 if (TYPE_QUALS (type) != type_quals)
1721 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1723 /* Set the type of the COMPONENT_REF to the underlying type. */
1724 TREE_TYPE (expr) = type;
1726 #ifdef ENABLE_TYPES_CHECKING
1727 /* It is now a FE error, if the conversion from the canonical
1728 type to the original expression type is not useless. */
1729 gcc_assert (useless_type_conversion_p (old_type, type));
1730 #endif
1734 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1735 to foo, embed that change in the ADDR_EXPR by converting
1736 T array[U];
1737 (T *)&array
1739 &array[L]
1740 where L is the lower bound. For simplicity, only do this for constant
1741 lower bound.
1742 The constraint is that the type of &array[L] is trivially convertible
1743 to T *. */
1745 static void
1746 canonicalize_addr_expr (tree *expr_p)
1748 tree expr = *expr_p;
1749 tree addr_expr = TREE_OPERAND (expr, 0);
1750 tree datype, ddatype, pddatype;
1752 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1753 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1754 || TREE_CODE (addr_expr) != ADDR_EXPR)
1755 return;
1757 /* The addr_expr type should be a pointer to an array. */
1758 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1759 if (TREE_CODE (datype) != ARRAY_TYPE)
1760 return;
1762 /* The pointer to element type shall be trivially convertible to
1763 the expression pointer type. */
1764 ddatype = TREE_TYPE (datype);
1765 pddatype = build_pointer_type (ddatype);
1766 if (!useless_type_conversion_p (pddatype, ddatype))
1767 return;
1769 /* The lower bound and element sizes must be constant. */
1770 if (!TYPE_SIZE_UNIT (ddatype)
1771 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1772 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1773 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1774 return;
1776 /* All checks succeeded. Build a new node to merge the cast. */
1777 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1778 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1779 NULL_TREE, NULL_TREE);
1780 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1783 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1784 underneath as appropriate. */
1786 static enum gimplify_status
1787 gimplify_conversion (tree *expr_p)
1789 tree tem;
1790 location_t loc = EXPR_LOCATION (*expr_p);
1791 gcc_assert (CONVERT_EXPR_P (*expr_p));
1793 /* Then strip away all but the outermost conversion. */
1794 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1796 /* And remove the outermost conversion if it's useless. */
1797 if (tree_ssa_useless_type_conversion (*expr_p))
1798 *expr_p = TREE_OPERAND (*expr_p, 0);
1800 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1801 For example this fold (subclass *)&A into &A->subclass avoiding
1802 a need for statement. */
1803 if (CONVERT_EXPR_P (*expr_p)
1804 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1805 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1806 && (tem = maybe_fold_offset_to_address
1807 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1808 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1809 *expr_p = tem;
1811 /* If we still have a conversion at the toplevel,
1812 then canonicalize some constructs. */
1813 if (CONVERT_EXPR_P (*expr_p))
1815 tree sub = TREE_OPERAND (*expr_p, 0);
1817 /* If a NOP conversion is changing the type of a COMPONENT_REF
1818 expression, then canonicalize its type now in order to expose more
1819 redundant conversions. */
1820 if (TREE_CODE (sub) == COMPONENT_REF)
1821 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1823 /* If a NOP conversion is changing a pointer to array of foo
1824 to a pointer to foo, embed that change in the ADDR_EXPR. */
1825 else if (TREE_CODE (sub) == ADDR_EXPR)
1826 canonicalize_addr_expr (expr_p);
1829 /* If we have a conversion to a non-register type force the
1830 use of a VIEW_CONVERT_EXPR instead. */
1831 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1832 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1833 TREE_OPERAND (*expr_p, 0));
1835 return GS_OK;
1838 /* Nonlocal VLAs seen in the current function. */
1839 static struct pointer_set_t *nonlocal_vlas;
1841 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1842 DECL_VALUE_EXPR, and it's worth re-examining things. */
1844 static enum gimplify_status
1845 gimplify_var_or_parm_decl (tree *expr_p)
1847 tree decl = *expr_p;
1849 /* ??? If this is a local variable, and it has not been seen in any
1850 outer BIND_EXPR, then it's probably the result of a duplicate
1851 declaration, for which we've already issued an error. It would
1852 be really nice if the front end wouldn't leak these at all.
1853 Currently the only known culprit is C++ destructors, as seen
1854 in g++.old-deja/g++.jason/binding.C. */
1855 if (TREE_CODE (decl) == VAR_DECL
1856 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1857 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1858 && decl_function_context (decl) == current_function_decl)
1860 gcc_assert (errorcount || sorrycount);
1861 return GS_ERROR;
1864 /* When within an OpenMP context, notice uses of variables. */
1865 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1866 return GS_ALL_DONE;
1868 /* If the decl is an alias for another expression, substitute it now. */
1869 if (DECL_HAS_VALUE_EXPR_P (decl))
1871 tree value_expr = DECL_VALUE_EXPR (decl);
1873 /* For referenced nonlocal VLAs add a decl for debugging purposes
1874 to the current function. */
1875 if (TREE_CODE (decl) == VAR_DECL
1876 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1877 && nonlocal_vlas != NULL
1878 && TREE_CODE (value_expr) == INDIRECT_REF
1879 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1880 && decl_function_context (decl) != current_function_decl)
1882 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1883 while (ctx && ctx->region_type == ORT_WORKSHARE)
1884 ctx = ctx->outer_context;
1885 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1887 tree copy = copy_node (decl), block;
1889 lang_hooks.dup_lang_specific_decl (copy);
1890 SET_DECL_RTL (copy, NULL_RTX);
1891 TREE_USED (copy) = 1;
1892 block = DECL_INITIAL (current_function_decl);
1893 TREE_CHAIN (copy) = BLOCK_VARS (block);
1894 BLOCK_VARS (block) = copy;
1895 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1896 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1900 *expr_p = unshare_expr (value_expr);
1901 return GS_OK;
1904 return GS_ALL_DONE;
1908 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1909 node *EXPR_P.
1911 compound_lval
1912 : min_lval '[' val ']'
1913 | min_lval '.' ID
1914 | compound_lval '[' val ']'
1915 | compound_lval '.' ID
1917 This is not part of the original SIMPLE definition, which separates
1918 array and member references, but it seems reasonable to handle them
1919 together. Also, this way we don't run into problems with union
1920 aliasing; gcc requires that for accesses through a union to alias, the
1921 union reference must be explicit, which was not always the case when we
1922 were splitting up array and member refs.
1924 PRE_P points to the sequence where side effects that must happen before
1925 *EXPR_P should be stored.
1927 POST_P points to the sequence where side effects that must happen after
1928 *EXPR_P should be stored. */
1930 static enum gimplify_status
1931 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1932 fallback_t fallback)
1934 tree *p;
1935 VEC(tree,heap) *stack;
1936 enum gimplify_status ret = GS_OK, tret;
1937 int i;
1938 location_t loc = EXPR_LOCATION (*expr_p);
1940 /* Create a stack of the subexpressions so later we can walk them in
1941 order from inner to outer. */
1942 stack = VEC_alloc (tree, heap, 10);
1944 /* We can handle anything that get_inner_reference can deal with. */
1945 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1947 restart:
1948 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1949 if (TREE_CODE (*p) == INDIRECT_REF)
1950 *p = fold_indirect_ref_loc (loc, *p);
1952 if (handled_component_p (*p))
1954 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1955 additional COMPONENT_REFs. */
1956 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1957 && gimplify_var_or_parm_decl (p) == GS_OK)
1958 goto restart;
1959 else
1960 break;
1962 VEC_safe_push (tree, heap, stack, *p);
1965 gcc_assert (VEC_length (tree, stack));
1967 /* Now STACK is a stack of pointers to all the refs we've walked through
1968 and P points to the innermost expression.
1970 Java requires that we elaborated nodes in source order. That
1971 means we must gimplify the inner expression followed by each of
1972 the indices, in order. But we can't gimplify the inner
1973 expression until we deal with any variable bounds, sizes, or
1974 positions in order to deal with PLACEHOLDER_EXPRs.
1976 So we do this in three steps. First we deal with the annotations
1977 for any variables in the components, then we gimplify the base,
1978 then we gimplify any indices, from left to right. */
1979 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1981 tree t = VEC_index (tree, stack, i);
1983 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1985 /* Gimplify the low bound and element type size and put them into
1986 the ARRAY_REF. If these values are set, they have already been
1987 gimplified. */
1988 if (TREE_OPERAND (t, 2) == NULL_TREE)
1990 tree low = unshare_expr (array_ref_low_bound (t));
1991 if (!is_gimple_min_invariant (low))
1993 TREE_OPERAND (t, 2) = low;
1994 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1995 post_p, is_gimple_reg,
1996 fb_rvalue);
1997 ret = MIN (ret, tret);
2001 if (!TREE_OPERAND (t, 3))
2003 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2004 tree elmt_size = unshare_expr (array_ref_element_size (t));
2005 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2007 /* Divide the element size by the alignment of the element
2008 type (above). */
2009 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2011 if (!is_gimple_min_invariant (elmt_size))
2013 TREE_OPERAND (t, 3) = elmt_size;
2014 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2015 post_p, is_gimple_reg,
2016 fb_rvalue);
2017 ret = MIN (ret, tret);
2021 else if (TREE_CODE (t) == COMPONENT_REF)
2023 /* Set the field offset into T and gimplify it. */
2024 if (!TREE_OPERAND (t, 2))
2026 tree offset = unshare_expr (component_ref_field_offset (t));
2027 tree field = TREE_OPERAND (t, 1);
2028 tree factor
2029 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2031 /* Divide the offset by its alignment. */
2032 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2034 if (!is_gimple_min_invariant (offset))
2036 TREE_OPERAND (t, 2) = offset;
2037 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2038 post_p, is_gimple_reg,
2039 fb_rvalue);
2040 ret = MIN (ret, tret);
2046 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2047 so as to match the min_lval predicate. Failure to do so may result
2048 in the creation of large aggregate temporaries. */
2049 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2050 fallback | fb_lvalue);
2051 ret = MIN (ret, tret);
2053 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2054 loop we also remove any useless conversions. */
2055 for (; VEC_length (tree, stack) > 0; )
2057 tree t = VEC_pop (tree, stack);
2059 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2061 /* Gimplify the dimension. */
2062 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2064 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2065 is_gimple_val, fb_rvalue);
2066 ret = MIN (ret, tret);
2069 else if (TREE_CODE (t) == BIT_FIELD_REF)
2071 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2072 is_gimple_val, fb_rvalue);
2073 ret = MIN (ret, tret);
2074 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2075 is_gimple_val, fb_rvalue);
2076 ret = MIN (ret, tret);
2079 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2081 /* The innermost expression P may have originally had
2082 TREE_SIDE_EFFECTS set which would have caused all the outer
2083 expressions in *EXPR_P leading to P to also have had
2084 TREE_SIDE_EFFECTS set. */
2085 recalculate_side_effects (t);
2088 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2089 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2091 canonicalize_component_ref (expr_p);
2092 ret = MIN (ret, GS_OK);
2095 VEC_free (tree, heap, stack);
2097 return ret;
2100 /* Gimplify the self modifying expression pointed to by EXPR_P
2101 (++, --, +=, -=).
2103 PRE_P points to the list where side effects that must happen before
2104 *EXPR_P should be stored.
2106 POST_P points to the list where side effects that must happen after
2107 *EXPR_P should be stored.
2109 WANT_VALUE is nonzero iff we want to use the value of this expression
2110 in another expression. */
2112 static enum gimplify_status
2113 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2114 bool want_value)
2116 enum tree_code code;
2117 tree lhs, lvalue, rhs, t1;
2118 gimple_seq post = NULL, *orig_post_p = post_p;
2119 bool postfix;
2120 enum tree_code arith_code;
2121 enum gimplify_status ret;
2122 location_t loc = EXPR_LOCATION (*expr_p);
2124 code = TREE_CODE (*expr_p);
2126 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2127 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2129 /* Prefix or postfix? */
2130 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2131 /* Faster to treat as prefix if result is not used. */
2132 postfix = want_value;
2133 else
2134 postfix = false;
2136 /* For postfix, make sure the inner expression's post side effects
2137 are executed after side effects from this expression. */
2138 if (postfix)
2139 post_p = &post;
2141 /* Add or subtract? */
2142 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2143 arith_code = PLUS_EXPR;
2144 else
2145 arith_code = MINUS_EXPR;
2147 /* Gimplify the LHS into a GIMPLE lvalue. */
2148 lvalue = TREE_OPERAND (*expr_p, 0);
2149 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2150 if (ret == GS_ERROR)
2151 return ret;
2153 /* Extract the operands to the arithmetic operation. */
2154 lhs = lvalue;
2155 rhs = TREE_OPERAND (*expr_p, 1);
2157 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2158 that as the result value and in the postqueue operation. We also
2159 make sure to make lvalue a minimal lval, see
2160 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2161 if (postfix)
2163 if (!is_gimple_min_lval (lvalue))
2165 mark_addressable (lvalue);
2166 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2167 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2168 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2170 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2171 if (ret == GS_ERROR)
2172 return ret;
2175 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2176 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2178 rhs = fold_convert_loc (loc, sizetype, rhs);
2179 if (arith_code == MINUS_EXPR)
2180 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2181 arith_code = POINTER_PLUS_EXPR;
2184 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2186 if (postfix)
2188 gimplify_assign (lvalue, t1, orig_post_p);
2189 gimplify_seq_add_seq (orig_post_p, post);
2190 *expr_p = lhs;
2191 return GS_ALL_DONE;
2193 else
2195 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2196 return GS_OK;
2201 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2203 static void
2204 maybe_with_size_expr (tree *expr_p)
2206 tree expr = *expr_p;
2207 tree type = TREE_TYPE (expr);
2208 tree size;
2210 /* If we've already wrapped this or the type is error_mark_node, we can't do
2211 anything. */
2212 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2213 || type == error_mark_node)
2214 return;
2216 /* If the size isn't known or is a constant, we have nothing to do. */
2217 size = TYPE_SIZE_UNIT (type);
2218 if (!size || TREE_CODE (size) == INTEGER_CST)
2219 return;
2221 /* Otherwise, make a WITH_SIZE_EXPR. */
2222 size = unshare_expr (size);
2223 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2224 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2228 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2229 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2230 the CALL_EXPR. */
2232 static enum gimplify_status
2233 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2235 bool (*test) (tree);
2236 fallback_t fb;
2238 /* In general, we allow lvalues for function arguments to avoid
2239 extra overhead of copying large aggregates out of even larger
2240 aggregates into temporaries only to copy the temporaries to
2241 the argument list. Make optimizers happy by pulling out to
2242 temporaries those types that fit in registers. */
2243 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2244 test = is_gimple_val, fb = fb_rvalue;
2245 else
2246 test = is_gimple_lvalue, fb = fb_either;
2248 /* If this is a variable sized type, we must remember the size. */
2249 maybe_with_size_expr (arg_p);
2251 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2252 /* Make sure arguments have the same location as the function call
2253 itself. */
2254 protected_set_expr_location (*arg_p, call_location);
2256 /* There is a sequence point before a function call. Side effects in
2257 the argument list must occur before the actual call. So, when
2258 gimplifying arguments, force gimplify_expr to use an internal
2259 post queue which is then appended to the end of PRE_P. */
2260 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2264 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2265 WANT_VALUE is true if the result of the call is desired. */
2267 static enum gimplify_status
2268 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2270 tree fndecl, parms, p;
2271 enum gimplify_status ret;
2272 int i, nargs;
2273 gimple call;
2274 bool builtin_va_start_p = FALSE;
2275 location_t loc = EXPR_LOCATION (*expr_p);
2277 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2279 /* For reliable diagnostics during inlining, it is necessary that
2280 every call_expr be annotated with file and line. */
2281 if (! EXPR_HAS_LOCATION (*expr_p))
2282 SET_EXPR_LOCATION (*expr_p, input_location);
2284 /* This may be a call to a builtin function.
2286 Builtin function calls may be transformed into different
2287 (and more efficient) builtin function calls under certain
2288 circumstances. Unfortunately, gimplification can muck things
2289 up enough that the builtin expanders are not aware that certain
2290 transformations are still valid.
2292 So we attempt transformation/gimplification of the call before
2293 we gimplify the CALL_EXPR. At this time we do not manage to
2294 transform all calls in the same manner as the expanders do, but
2295 we do transform most of them. */
2296 fndecl = get_callee_fndecl (*expr_p);
2297 if (fndecl && DECL_BUILT_IN (fndecl))
2299 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2301 if (new_tree && new_tree != *expr_p)
2303 /* There was a transformation of this call which computes the
2304 same value, but in a more efficient way. Return and try
2305 again. */
2306 *expr_p = new_tree;
2307 return GS_OK;
2310 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2311 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2313 builtin_va_start_p = TRUE;
2314 if (call_expr_nargs (*expr_p) < 2)
2316 error ("too few arguments to function %<va_start%>");
2317 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2318 return GS_OK;
2321 if (fold_builtin_next_arg (*expr_p, true))
2323 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2324 return GS_OK;
2329 /* There is a sequence point before the call, so any side effects in
2330 the calling expression must occur before the actual call. Force
2331 gimplify_expr to use an internal post queue. */
2332 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2333 is_gimple_call_addr, fb_rvalue);
2335 nargs = call_expr_nargs (*expr_p);
2337 /* Get argument types for verification. */
2338 fndecl = get_callee_fndecl (*expr_p);
2339 parms = NULL_TREE;
2340 if (fndecl)
2341 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2342 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2343 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2345 if (fndecl && DECL_ARGUMENTS (fndecl))
2346 p = DECL_ARGUMENTS (fndecl);
2347 else if (parms)
2348 p = parms;
2349 else
2350 p = NULL_TREE;
2351 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2354 /* If the last argument is __builtin_va_arg_pack () and it is not
2355 passed as a named argument, decrease the number of CALL_EXPR
2356 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2357 if (!p
2358 && i < nargs
2359 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2361 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2362 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2364 if (last_arg_fndecl
2365 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2366 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2367 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2369 tree call = *expr_p;
2371 --nargs;
2372 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2373 CALL_EXPR_FN (call),
2374 nargs, CALL_EXPR_ARGP (call));
2376 /* Copy all CALL_EXPR flags, location and block, except
2377 CALL_EXPR_VA_ARG_PACK flag. */
2378 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2379 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2380 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2381 = CALL_EXPR_RETURN_SLOT_OPT (call);
2382 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2383 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2384 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2385 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2387 /* Set CALL_EXPR_VA_ARG_PACK. */
2388 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2392 /* Finally, gimplify the function arguments. */
2393 if (nargs > 0)
2395 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2396 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2397 PUSH_ARGS_REVERSED ? i-- : i++)
2399 enum gimplify_status t;
2401 /* Avoid gimplifying the second argument to va_start, which needs to
2402 be the plain PARM_DECL. */
2403 if ((i != 1) || !builtin_va_start_p)
2405 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2406 EXPR_LOCATION (*expr_p));
2408 if (t == GS_ERROR)
2409 ret = GS_ERROR;
2414 /* Verify the function result. */
2415 if (want_value && fndecl
2416 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2418 error_at (loc, "using result of function returning %<void%>");
2419 ret = GS_ERROR;
2422 /* Try this again in case gimplification exposed something. */
2423 if (ret != GS_ERROR)
2425 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2427 if (new_tree && new_tree != *expr_p)
2429 /* There was a transformation of this call which computes the
2430 same value, but in a more efficient way. Return and try
2431 again. */
2432 *expr_p = new_tree;
2433 return GS_OK;
2436 else
2438 *expr_p = error_mark_node;
2439 return GS_ERROR;
2442 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2443 decl. This allows us to eliminate redundant or useless
2444 calls to "const" functions. */
2445 if (TREE_CODE (*expr_p) == CALL_EXPR)
2447 int flags = call_expr_flags (*expr_p);
2448 if (flags & (ECF_CONST | ECF_PURE)
2449 /* An infinite loop is considered a side effect. */
2450 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2451 TREE_SIDE_EFFECTS (*expr_p) = 0;
2454 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2455 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2456 form and delegate the creation of a GIMPLE_CALL to
2457 gimplify_modify_expr. This is always possible because when
2458 WANT_VALUE is true, the caller wants the result of this call into
2459 a temporary, which means that we will emit an INIT_EXPR in
2460 internal_get_tmp_var which will then be handled by
2461 gimplify_modify_expr. */
2462 if (!want_value)
2464 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2465 have to do is replicate it as a GIMPLE_CALL tuple. */
2466 call = gimple_build_call_from_tree (*expr_p);
2467 gimplify_seq_add_stmt (pre_p, call);
2468 *expr_p = NULL_TREE;
2471 return ret;
2474 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2475 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2477 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2478 condition is true or false, respectively. If null, we should generate
2479 our own to skip over the evaluation of this specific expression.
2481 LOCUS is the source location of the COND_EXPR.
2483 This function is the tree equivalent of do_jump.
2485 shortcut_cond_r should only be called by shortcut_cond_expr. */
2487 static tree
2488 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2489 location_t locus)
2491 tree local_label = NULL_TREE;
2492 tree t, expr = NULL;
2494 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2495 retain the shortcut semantics. Just insert the gotos here;
2496 shortcut_cond_expr will append the real blocks later. */
2497 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2499 location_t new_locus;
2501 /* Turn if (a && b) into
2503 if (a); else goto no;
2504 if (b) goto yes; else goto no;
2505 (no:) */
2507 if (false_label_p == NULL)
2508 false_label_p = &local_label;
2510 /* Keep the original source location on the first 'if'. */
2511 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2512 append_to_statement_list (t, &expr);
2514 /* Set the source location of the && on the second 'if'. */
2515 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2516 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2517 new_locus);
2518 append_to_statement_list (t, &expr);
2520 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2522 location_t new_locus;
2524 /* Turn if (a || b) into
2526 if (a) goto yes;
2527 if (b) goto yes; else goto no;
2528 (yes:) */
2530 if (true_label_p == NULL)
2531 true_label_p = &local_label;
2533 /* Keep the original source location on the first 'if'. */
2534 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2535 append_to_statement_list (t, &expr);
2537 /* Set the source location of the || on the second 'if'. */
2538 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2539 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2540 new_locus);
2541 append_to_statement_list (t, &expr);
2543 else if (TREE_CODE (pred) == COND_EXPR)
2545 location_t new_locus;
2547 /* As long as we're messing with gotos, turn if (a ? b : c) into
2548 if (a)
2549 if (b) goto yes; else goto no;
2550 else
2551 if (c) goto yes; else goto no; */
2553 /* Keep the original source location on the first 'if'. Set the source
2554 location of the ? on the second 'if'. */
2555 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2556 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2557 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2558 false_label_p, locus),
2559 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2560 false_label_p, new_locus));
2562 else
2564 expr = build3 (COND_EXPR, void_type_node, pred,
2565 build_and_jump (true_label_p),
2566 build_and_jump (false_label_p));
2567 SET_EXPR_LOCATION (expr, locus);
2570 if (local_label)
2572 t = build1 (LABEL_EXPR, void_type_node, local_label);
2573 append_to_statement_list (t, &expr);
2576 return expr;
2579 /* Given a conditional expression EXPR with short-circuit boolean
2580 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2581 predicate appart into the equivalent sequence of conditionals. */
2583 static tree
2584 shortcut_cond_expr (tree expr)
2586 tree pred = TREE_OPERAND (expr, 0);
2587 tree then_ = TREE_OPERAND (expr, 1);
2588 tree else_ = TREE_OPERAND (expr, 2);
2589 tree true_label, false_label, end_label, t;
2590 tree *true_label_p;
2591 tree *false_label_p;
2592 bool emit_end, emit_false, jump_over_else;
2593 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2594 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2596 /* First do simple transformations. */
2597 if (!else_se)
2599 /* If there is no 'else', turn
2600 if (a && b) then c
2601 into
2602 if (a) if (b) then c. */
2603 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2605 /* Keep the original source location on the first 'if'. */
2606 location_t locus = EXPR_HAS_LOCATION (expr)
2607 ? EXPR_LOCATION (expr) : input_location;
2608 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2609 /* Set the source location of the && on the second 'if'. */
2610 if (EXPR_HAS_LOCATION (pred))
2611 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2612 then_ = shortcut_cond_expr (expr);
2613 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2614 pred = TREE_OPERAND (pred, 0);
2615 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2616 SET_EXPR_LOCATION (expr, locus);
2620 if (!then_se)
2622 /* If there is no 'then', turn
2623 if (a || b); else d
2624 into
2625 if (a); else if (b); else d. */
2626 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2628 /* Keep the original source location on the first 'if'. */
2629 location_t locus = EXPR_HAS_LOCATION (expr)
2630 ? EXPR_LOCATION (expr) : input_location;
2631 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2632 /* Set the source location of the || on the second 'if'. */
2633 if (EXPR_HAS_LOCATION (pred))
2634 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2635 else_ = shortcut_cond_expr (expr);
2636 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2637 pred = TREE_OPERAND (pred, 0);
2638 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2639 SET_EXPR_LOCATION (expr, locus);
2643 /* If we're done, great. */
2644 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2645 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2646 return expr;
2648 /* Otherwise we need to mess with gotos. Change
2649 if (a) c; else d;
2651 if (a); else goto no;
2652 c; goto end;
2653 no: d; end:
2654 and recursively gimplify the condition. */
2656 true_label = false_label = end_label = NULL_TREE;
2658 /* If our arms just jump somewhere, hijack those labels so we don't
2659 generate jumps to jumps. */
2661 if (then_
2662 && TREE_CODE (then_) == GOTO_EXPR
2663 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2665 true_label = GOTO_DESTINATION (then_);
2666 then_ = NULL;
2667 then_se = false;
2670 if (else_
2671 && TREE_CODE (else_) == GOTO_EXPR
2672 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2674 false_label = GOTO_DESTINATION (else_);
2675 else_ = NULL;
2676 else_se = false;
2679 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2680 if (true_label)
2681 true_label_p = &true_label;
2682 else
2683 true_label_p = NULL;
2685 /* The 'else' branch also needs a label if it contains interesting code. */
2686 if (false_label || else_se)
2687 false_label_p = &false_label;
2688 else
2689 false_label_p = NULL;
2691 /* If there was nothing else in our arms, just forward the label(s). */
2692 if (!then_se && !else_se)
2693 return shortcut_cond_r (pred, true_label_p, false_label_p,
2694 EXPR_HAS_LOCATION (expr)
2695 ? EXPR_LOCATION (expr) : input_location);
2697 /* If our last subexpression already has a terminal label, reuse it. */
2698 if (else_se)
2699 t = expr_last (else_);
2700 else if (then_se)
2701 t = expr_last (then_);
2702 else
2703 t = NULL;
2704 if (t && TREE_CODE (t) == LABEL_EXPR)
2705 end_label = LABEL_EXPR_LABEL (t);
2707 /* If we don't care about jumping to the 'else' branch, jump to the end
2708 if the condition is false. */
2709 if (!false_label_p)
2710 false_label_p = &end_label;
2712 /* We only want to emit these labels if we aren't hijacking them. */
2713 emit_end = (end_label == NULL_TREE);
2714 emit_false = (false_label == NULL_TREE);
2716 /* We only emit the jump over the else clause if we have to--if the
2717 then clause may fall through. Otherwise we can wind up with a
2718 useless jump and a useless label at the end of gimplified code,
2719 which will cause us to think that this conditional as a whole
2720 falls through even if it doesn't. If we then inline a function
2721 which ends with such a condition, that can cause us to issue an
2722 inappropriate warning about control reaching the end of a
2723 non-void function. */
2724 jump_over_else = block_may_fallthru (then_);
2726 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2727 EXPR_HAS_LOCATION (expr)
2728 ? EXPR_LOCATION (expr) : input_location);
2730 expr = NULL;
2731 append_to_statement_list (pred, &expr);
2733 append_to_statement_list (then_, &expr);
2734 if (else_se)
2736 if (jump_over_else)
2738 tree last = expr_last (expr);
2739 t = build_and_jump (&end_label);
2740 if (EXPR_HAS_LOCATION (last))
2741 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2742 append_to_statement_list (t, &expr);
2744 if (emit_false)
2746 t = build1 (LABEL_EXPR, void_type_node, false_label);
2747 append_to_statement_list (t, &expr);
2749 append_to_statement_list (else_, &expr);
2751 if (emit_end && end_label)
2753 t = build1 (LABEL_EXPR, void_type_node, end_label);
2754 append_to_statement_list (t, &expr);
2757 return expr;
2760 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2762 tree
2763 gimple_boolify (tree expr)
2765 tree type = TREE_TYPE (expr);
2766 location_t loc = EXPR_LOCATION (expr);
2768 if (TREE_CODE (type) == BOOLEAN_TYPE)
2769 return expr;
2771 switch (TREE_CODE (expr))
2773 case TRUTH_AND_EXPR:
2774 case TRUTH_OR_EXPR:
2775 case TRUTH_XOR_EXPR:
2776 case TRUTH_ANDIF_EXPR:
2777 case TRUTH_ORIF_EXPR:
2778 /* Also boolify the arguments of truth exprs. */
2779 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2780 /* FALLTHRU */
2782 case TRUTH_NOT_EXPR:
2783 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2784 /* FALLTHRU */
2786 case EQ_EXPR: case NE_EXPR:
2787 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2788 /* These expressions always produce boolean results. */
2789 TREE_TYPE (expr) = boolean_type_node;
2790 return expr;
2792 default:
2793 /* Other expressions that get here must have boolean values, but
2794 might need to be converted to the appropriate mode. */
2795 return fold_convert_loc (loc, boolean_type_node, expr);
2799 /* Given a conditional expression *EXPR_P without side effects, gimplify
2800 its operands. New statements are inserted to PRE_P. */
2802 static enum gimplify_status
2803 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2805 tree expr = *expr_p, cond;
2806 enum gimplify_status ret, tret;
2807 enum tree_code code;
2809 cond = gimple_boolify (COND_EXPR_COND (expr));
2811 /* We need to handle && and || specially, as their gimplification
2812 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2813 code = TREE_CODE (cond);
2814 if (code == TRUTH_ANDIF_EXPR)
2815 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2816 else if (code == TRUTH_ORIF_EXPR)
2817 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2818 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2819 COND_EXPR_COND (*expr_p) = cond;
2821 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2822 is_gimple_val, fb_rvalue);
2823 ret = MIN (ret, tret);
2824 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2825 is_gimple_val, fb_rvalue);
2827 return MIN (ret, tret);
2830 /* Returns true if evaluating EXPR could trap.
2831 EXPR is GENERIC, while tree_could_trap_p can be called
2832 only on GIMPLE. */
2834 static bool
2835 generic_expr_could_trap_p (tree expr)
2837 unsigned i, n;
2839 if (!expr || is_gimple_val (expr))
2840 return false;
2842 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2843 return true;
2845 n = TREE_OPERAND_LENGTH (expr);
2846 for (i = 0; i < n; i++)
2847 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2848 return true;
2850 return false;
2853 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2854 into
2856 if (p) if (p)
2857 t1 = a; a;
2858 else or else
2859 t1 = b; b;
2862 The second form is used when *EXPR_P is of type void.
2864 PRE_P points to the list where side effects that must happen before
2865 *EXPR_P should be stored. */
2867 static enum gimplify_status
2868 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2870 tree expr = *expr_p;
2871 tree tmp, type, arm1, arm2;
2872 enum gimplify_status ret;
2873 tree label_true, label_false, label_cont;
2874 bool have_then_clause_p, have_else_clause_p;
2875 gimple gimple_cond;
2876 enum tree_code pred_code;
2877 gimple_seq seq = NULL;
2878 location_t loc = EXPR_LOCATION (*expr_p);
2880 type = TREE_TYPE (expr);
2882 /* If this COND_EXPR has a value, copy the values into a temporary within
2883 the arms. */
2884 if (! VOID_TYPE_P (type))
2886 tree result;
2888 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2889 an addressable temporary. */
2890 if (((fallback & fb_rvalue)
2891 || !(fallback & fb_lvalue))
2892 && !TREE_ADDRESSABLE (type))
2894 if (gimplify_ctxp->allow_rhs_cond_expr
2895 /* If either branch has side effects or could trap, it can't be
2896 evaluated unconditionally. */
2897 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2898 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2899 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2900 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2901 return gimplify_pure_cond_expr (expr_p, pre_p);
2903 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2904 ret = GS_ALL_DONE;
2906 else
2908 tree type = build_pointer_type (TREE_TYPE (expr));
2910 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2911 TREE_OPERAND (expr, 1) =
2912 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1));
2914 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2915 TREE_OPERAND (expr, 2) =
2916 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2));
2918 tmp = create_tmp_var (type, "iftmp");
2920 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2921 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2923 result = build_fold_indirect_ref_loc (loc, tmp);
2926 /* Build the then clause, 't1 = a;'. But don't build an assignment
2927 if this branch is void; in C++ it can be, if it's a throw. */
2928 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2929 TREE_OPERAND (expr, 1)
2930 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2932 /* Build the else clause, 't1 = b;'. */
2933 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2934 TREE_OPERAND (expr, 2)
2935 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2937 TREE_TYPE (expr) = void_type_node;
2938 recalculate_side_effects (expr);
2940 /* Move the COND_EXPR to the prequeue. */
2941 gimplify_stmt (&expr, pre_p);
2943 *expr_p = result;
2944 return GS_ALL_DONE;
2947 /* Make sure the condition has BOOLEAN_TYPE. */
2948 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2950 /* Break apart && and || conditions. */
2951 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2952 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2954 expr = shortcut_cond_expr (expr);
2956 if (expr != *expr_p)
2958 *expr_p = expr;
2960 /* We can't rely on gimplify_expr to re-gimplify the expanded
2961 form properly, as cleanups might cause the target labels to be
2962 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2963 set up a conditional context. */
2964 gimple_push_condition ();
2965 gimplify_stmt (expr_p, &seq);
2966 gimple_pop_condition (pre_p);
2967 gimple_seq_add_seq (pre_p, seq);
2969 return GS_ALL_DONE;
2973 /* Now do the normal gimplification. */
2975 /* Gimplify condition. */
2976 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2977 fb_rvalue);
2978 if (ret == GS_ERROR)
2979 return GS_ERROR;
2980 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2982 gimple_push_condition ();
2984 have_then_clause_p = have_else_clause_p = false;
2985 if (TREE_OPERAND (expr, 1) != NULL
2986 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2987 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2988 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2989 == current_function_decl)
2990 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2991 have different locations, otherwise we end up with incorrect
2992 location information on the branches. */
2993 && (optimize
2994 || !EXPR_HAS_LOCATION (expr)
2995 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2996 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2998 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2999 have_then_clause_p = true;
3001 else
3002 label_true = create_artificial_label (UNKNOWN_LOCATION);
3003 if (TREE_OPERAND (expr, 2) != NULL
3004 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3005 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3006 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3007 == current_function_decl)
3008 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3009 have different locations, otherwise we end up with incorrect
3010 location information on the branches. */
3011 && (optimize
3012 || !EXPR_HAS_LOCATION (expr)
3013 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3014 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3016 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3017 have_else_clause_p = true;
3019 else
3020 label_false = create_artificial_label (UNKNOWN_LOCATION);
3022 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3023 &arm2);
3025 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3026 label_false);
3028 gimplify_seq_add_stmt (&seq, gimple_cond);
3029 label_cont = NULL_TREE;
3030 if (!have_then_clause_p)
3032 /* For if (...) {} else { code; } put label_true after
3033 the else block. */
3034 if (TREE_OPERAND (expr, 1) == NULL_TREE
3035 && !have_else_clause_p
3036 && TREE_OPERAND (expr, 2) != NULL_TREE)
3037 label_cont = label_true;
3038 else
3040 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3041 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3042 /* For if (...) { code; } else {} or
3043 if (...) { code; } else goto label; or
3044 if (...) { code; return; } else { ... }
3045 label_cont isn't needed. */
3046 if (!have_else_clause_p
3047 && TREE_OPERAND (expr, 2) != NULL_TREE
3048 && gimple_seq_may_fallthru (seq))
3050 gimple g;
3051 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3053 g = gimple_build_goto (label_cont);
3055 /* GIMPLE_COND's are very low level; they have embedded
3056 gotos. This particular embedded goto should not be marked
3057 with the location of the original COND_EXPR, as it would
3058 correspond to the COND_EXPR's condition, not the ELSE or the
3059 THEN arms. To avoid marking it with the wrong location, flag
3060 it as "no location". */
3061 gimple_set_do_not_emit_location (g);
3063 gimplify_seq_add_stmt (&seq, g);
3067 if (!have_else_clause_p)
3069 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3070 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3072 if (label_cont)
3073 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3075 gimple_pop_condition (pre_p);
3076 gimple_seq_add_seq (pre_p, seq);
3078 if (ret == GS_ERROR)
3079 ; /* Do nothing. */
3080 else if (have_then_clause_p || have_else_clause_p)
3081 ret = GS_ALL_DONE;
3082 else
3084 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3085 expr = TREE_OPERAND (expr, 0);
3086 gimplify_stmt (&expr, pre_p);
3089 *expr_p = NULL;
3090 return ret;
3093 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3094 a call to __builtin_memcpy. */
3096 static enum gimplify_status
3097 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3098 gimple_seq *seq_p)
3100 tree t, to, to_ptr, from, from_ptr;
3101 gimple gs;
3102 location_t loc = EXPR_LOCATION (*expr_p);
3104 to = TREE_OPERAND (*expr_p, 0);
3105 from = TREE_OPERAND (*expr_p, 1);
3107 mark_addressable (from);
3108 from_ptr = build_fold_addr_expr_loc (loc, from);
3109 gimplify_arg (&from_ptr, seq_p, loc);
3111 mark_addressable (to);
3112 to_ptr = build_fold_addr_expr_loc (loc, to);
3113 gimplify_arg (&to_ptr, seq_p, loc);
3115 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3117 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3119 if (want_value)
3121 /* tmp = memcpy() */
3122 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3123 gimple_call_set_lhs (gs, t);
3124 gimplify_seq_add_stmt (seq_p, gs);
3126 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3127 return GS_ALL_DONE;
3130 gimplify_seq_add_stmt (seq_p, gs);
3131 *expr_p = NULL;
3132 return GS_ALL_DONE;
3135 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3136 a call to __builtin_memset. In this case we know that the RHS is
3137 a CONSTRUCTOR with an empty element list. */
3139 static enum gimplify_status
3140 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3141 gimple_seq *seq_p)
3143 tree t, from, to, to_ptr;
3144 gimple gs;
3145 location_t loc = EXPR_LOCATION (*expr_p);
3147 /* Assert our assumptions, to abort instead of producing wrong code
3148 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3149 not be immediately exposed. */
3150 from = TREE_OPERAND (*expr_p, 1);
3151 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3152 from = TREE_OPERAND (from, 0);
3154 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3155 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3157 /* Now proceed. */
3158 to = TREE_OPERAND (*expr_p, 0);
3160 to_ptr = build_fold_addr_expr_loc (loc, to);
3161 gimplify_arg (&to_ptr, seq_p, loc);
3162 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3164 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3166 if (want_value)
3168 /* tmp = memset() */
3169 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3170 gimple_call_set_lhs (gs, t);
3171 gimplify_seq_add_stmt (seq_p, gs);
3173 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3174 return GS_ALL_DONE;
3177 gimplify_seq_add_stmt (seq_p, gs);
3178 *expr_p = NULL;
3179 return GS_ALL_DONE;
3182 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3183 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3184 assignment. Returns non-null if we detect a potential overlap. */
3186 struct gimplify_init_ctor_preeval_data
3188 /* The base decl of the lhs object. May be NULL, in which case we
3189 have to assume the lhs is indirect. */
3190 tree lhs_base_decl;
3192 /* The alias set of the lhs object. */
3193 alias_set_type lhs_alias_set;
3196 static tree
3197 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3199 struct gimplify_init_ctor_preeval_data *data
3200 = (struct gimplify_init_ctor_preeval_data *) xdata;
3201 tree t = *tp;
3203 /* If we find the base object, obviously we have overlap. */
3204 if (data->lhs_base_decl == t)
3205 return t;
3207 /* If the constructor component is indirect, determine if we have a
3208 potential overlap with the lhs. The only bits of information we
3209 have to go on at this point are addressability and alias sets. */
3210 if (TREE_CODE (t) == INDIRECT_REF
3211 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3212 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3213 return t;
3215 /* If the constructor component is a call, determine if it can hide a
3216 potential overlap with the lhs through an INDIRECT_REF like above. */
3217 if (TREE_CODE (t) == CALL_EXPR)
3219 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3221 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3222 if (POINTER_TYPE_P (TREE_VALUE (type))
3223 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3224 && alias_sets_conflict_p (data->lhs_alias_set,
3225 get_alias_set
3226 (TREE_TYPE (TREE_VALUE (type)))))
3227 return t;
3230 if (IS_TYPE_OR_DECL_P (t))
3231 *walk_subtrees = 0;
3232 return NULL;
3235 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3236 force values that overlap with the lhs (as described by *DATA)
3237 into temporaries. */
3239 static void
3240 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3241 struct gimplify_init_ctor_preeval_data *data)
3243 enum gimplify_status one;
3245 /* If the value is constant, then there's nothing to pre-evaluate. */
3246 if (TREE_CONSTANT (*expr_p))
3248 /* Ensure it does not have side effects, it might contain a reference to
3249 the object we're initializing. */
3250 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3251 return;
3254 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3255 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3256 return;
3258 /* Recurse for nested constructors. */
3259 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3261 unsigned HOST_WIDE_INT ix;
3262 constructor_elt *ce;
3263 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3265 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3266 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3268 return;
3271 /* If this is a variable sized type, we must remember the size. */
3272 maybe_with_size_expr (expr_p);
3274 /* Gimplify the constructor element to something appropriate for the rhs
3275 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3276 the gimplifier will consider this a store to memory. Doing this
3277 gimplification now means that we won't have to deal with complicated
3278 language-specific trees, nor trees like SAVE_EXPR that can induce
3279 exponential search behavior. */
3280 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3281 if (one == GS_ERROR)
3283 *expr_p = NULL;
3284 return;
3287 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3288 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3289 always be true for all scalars, since is_gimple_mem_rhs insists on a
3290 temporary variable for them. */
3291 if (DECL_P (*expr_p))
3292 return;
3294 /* If this is of variable size, we have no choice but to assume it doesn't
3295 overlap since we can't make a temporary for it. */
3296 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3297 return;
3299 /* Otherwise, we must search for overlap ... */
3300 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3301 return;
3303 /* ... and if found, force the value into a temporary. */
3304 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3307 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3308 a RANGE_EXPR in a CONSTRUCTOR for an array.
3310 var = lower;
3311 loop_entry:
3312 object[var] = value;
3313 if (var == upper)
3314 goto loop_exit;
3315 var = var + 1;
3316 goto loop_entry;
3317 loop_exit:
3319 We increment var _after_ the loop exit check because we might otherwise
3320 fail if upper == TYPE_MAX_VALUE (type for upper).
3322 Note that we never have to deal with SAVE_EXPRs here, because this has
3323 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3325 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3326 gimple_seq *, bool);
3328 static void
3329 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3330 tree value, tree array_elt_type,
3331 gimple_seq *pre_p, bool cleared)
3333 tree loop_entry_label, loop_exit_label, fall_thru_label;
3334 tree var, var_type, cref, tmp;
3336 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3337 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3338 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3340 /* Create and initialize the index variable. */
3341 var_type = TREE_TYPE (upper);
3342 var = create_tmp_var (var_type, NULL);
3343 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3345 /* Add the loop entry label. */
3346 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3348 /* Build the reference. */
3349 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3350 var, NULL_TREE, NULL_TREE);
3352 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3353 the store. Otherwise just assign value to the reference. */
3355 if (TREE_CODE (value) == CONSTRUCTOR)
3356 /* NB we might have to call ourself recursively through
3357 gimplify_init_ctor_eval if the value is a constructor. */
3358 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3359 pre_p, cleared);
3360 else
3361 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3363 /* We exit the loop when the index var is equal to the upper bound. */
3364 gimplify_seq_add_stmt (pre_p,
3365 gimple_build_cond (EQ_EXPR, var, upper,
3366 loop_exit_label, fall_thru_label));
3368 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3370 /* Otherwise, increment the index var... */
3371 tmp = build2 (PLUS_EXPR, var_type, var,
3372 fold_convert (var_type, integer_one_node));
3373 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3375 /* ...and jump back to the loop entry. */
3376 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3378 /* Add the loop exit label. */
3379 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3382 /* Return true if FDECL is accessing a field that is zero sized. */
3384 static bool
3385 zero_sized_field_decl (const_tree fdecl)
3387 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3388 && integer_zerop (DECL_SIZE (fdecl)))
3389 return true;
3390 return false;
3393 /* Return true if TYPE is zero sized. */
3395 static bool
3396 zero_sized_type (const_tree type)
3398 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3399 && integer_zerop (TYPE_SIZE (type)))
3400 return true;
3401 return false;
3404 /* A subroutine of gimplify_init_constructor. Generate individual
3405 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3406 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3407 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3408 zeroed first. */
3410 static void
3411 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3412 gimple_seq *pre_p, bool cleared)
3414 tree array_elt_type = NULL;
3415 unsigned HOST_WIDE_INT ix;
3416 tree purpose, value;
3418 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3419 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3421 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3423 tree cref;
3425 /* NULL values are created above for gimplification errors. */
3426 if (value == NULL)
3427 continue;
3429 if (cleared && initializer_zerop (value))
3430 continue;
3432 /* ??? Here's to hoping the front end fills in all of the indices,
3433 so we don't have to figure out what's missing ourselves. */
3434 gcc_assert (purpose);
3436 /* Skip zero-sized fields, unless value has side-effects. This can
3437 happen with calls to functions returning a zero-sized type, which
3438 we shouldn't discard. As a number of downstream passes don't
3439 expect sets of zero-sized fields, we rely on the gimplification of
3440 the MODIFY_EXPR we make below to drop the assignment statement. */
3441 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3442 continue;
3444 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3445 whole range. */
3446 if (TREE_CODE (purpose) == RANGE_EXPR)
3448 tree lower = TREE_OPERAND (purpose, 0);
3449 tree upper = TREE_OPERAND (purpose, 1);
3451 /* If the lower bound is equal to upper, just treat it as if
3452 upper was the index. */
3453 if (simple_cst_equal (lower, upper))
3454 purpose = upper;
3455 else
3457 gimplify_init_ctor_eval_range (object, lower, upper, value,
3458 array_elt_type, pre_p, cleared);
3459 continue;
3463 if (array_elt_type)
3465 /* Do not use bitsizetype for ARRAY_REF indices. */
3466 if (TYPE_DOMAIN (TREE_TYPE (object)))
3467 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3468 purpose);
3469 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3470 purpose, NULL_TREE, NULL_TREE);
3472 else
3474 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3475 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3476 unshare_expr (object), purpose, NULL_TREE);
3479 if (TREE_CODE (value) == CONSTRUCTOR
3480 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3481 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3482 pre_p, cleared);
3483 else
3485 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3486 gimplify_and_add (init, pre_p);
3487 ggc_free (init);
3493 /* Returns the appropriate RHS predicate for this LHS. */
3495 gimple_predicate
3496 rhs_predicate_for (tree lhs)
3498 if (is_gimple_reg (lhs))
3499 return is_gimple_reg_rhs_or_call;
3500 else
3501 return is_gimple_mem_rhs_or_call;
3504 /* Gimplify a C99 compound literal expression. This just means adding
3505 the DECL_EXPR before the current statement and using its anonymous
3506 decl instead. */
3508 static enum gimplify_status
3509 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3511 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3512 tree decl = DECL_EXPR_DECL (decl_s);
3513 /* Mark the decl as addressable if the compound literal
3514 expression is addressable now, otherwise it is marked too late
3515 after we gimplify the initialization expression. */
3516 if (TREE_ADDRESSABLE (*expr_p))
3517 TREE_ADDRESSABLE (decl) = 1;
3519 /* Preliminarily mark non-addressed complex variables as eligible
3520 for promotion to gimple registers. We'll transform their uses
3521 as we find them. */
3522 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3523 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3524 && !TREE_THIS_VOLATILE (decl)
3525 && !needs_to_live_in_memory (decl))
3526 DECL_GIMPLE_REG_P (decl) = 1;
3528 /* This decl isn't mentioned in the enclosing block, so add it to the
3529 list of temps. FIXME it seems a bit of a kludge to say that
3530 anonymous artificial vars aren't pushed, but everything else is. */
3531 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3532 gimple_add_tmp_var (decl);
3534 gimplify_and_add (decl_s, pre_p);
3535 *expr_p = decl;
3536 return GS_OK;
3539 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3540 return a new CONSTRUCTOR if something changed. */
3542 static tree
3543 optimize_compound_literals_in_ctor (tree orig_ctor)
3545 tree ctor = orig_ctor;
3546 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3547 unsigned int idx, num = VEC_length (constructor_elt, elts);
3549 for (idx = 0; idx < num; idx++)
3551 tree value = VEC_index (constructor_elt, elts, idx)->value;
3552 tree newval = value;
3553 if (TREE_CODE (value) == CONSTRUCTOR)
3554 newval = optimize_compound_literals_in_ctor (value);
3555 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3557 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3558 tree decl = DECL_EXPR_DECL (decl_s);
3559 tree init = DECL_INITIAL (decl);
3561 if (!TREE_ADDRESSABLE (value)
3562 && !TREE_ADDRESSABLE (decl)
3563 && init)
3564 newval = optimize_compound_literals_in_ctor (init);
3566 if (newval == value)
3567 continue;
3569 if (ctor == orig_ctor)
3571 ctor = copy_node (orig_ctor);
3572 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3573 elts = CONSTRUCTOR_ELTS (ctor);
3575 VEC_index (constructor_elt, elts, idx)->value = newval;
3577 return ctor;
3582 /* A subroutine of gimplify_modify_expr. Break out elements of a
3583 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3585 Note that we still need to clear any elements that don't have explicit
3586 initializers, so if not all elements are initialized we keep the
3587 original MODIFY_EXPR, we just remove all of the constructor elements.
3589 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3590 GS_ERROR if we would have to create a temporary when gimplifying
3591 this constructor. Otherwise, return GS_OK.
3593 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3595 static enum gimplify_status
3596 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3597 bool want_value, bool notify_temp_creation)
3599 tree object, ctor, type;
3600 enum gimplify_status ret;
3601 VEC(constructor_elt,gc) *elts;
3603 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3605 if (!notify_temp_creation)
3607 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3608 is_gimple_lvalue, fb_lvalue);
3609 if (ret == GS_ERROR)
3610 return ret;
3613 object = TREE_OPERAND (*expr_p, 0);
3614 ctor = TREE_OPERAND (*expr_p, 1) =
3615 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3616 type = TREE_TYPE (ctor);
3617 elts = CONSTRUCTOR_ELTS (ctor);
3618 ret = GS_ALL_DONE;
3620 switch (TREE_CODE (type))
3622 case RECORD_TYPE:
3623 case UNION_TYPE:
3624 case QUAL_UNION_TYPE:
3625 case ARRAY_TYPE:
3627 struct gimplify_init_ctor_preeval_data preeval_data;
3628 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3629 HOST_WIDE_INT num_nonzero_elements;
3630 bool cleared, valid_const_initializer;
3632 /* Aggregate types must lower constructors to initialization of
3633 individual elements. The exception is that a CONSTRUCTOR node
3634 with no elements indicates zero-initialization of the whole. */
3635 if (VEC_empty (constructor_elt, elts))
3637 if (notify_temp_creation)
3638 return GS_OK;
3639 break;
3642 /* Fetch information about the constructor to direct later processing.
3643 We might want to make static versions of it in various cases, and
3644 can only do so if it known to be a valid constant initializer. */
3645 valid_const_initializer
3646 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3647 &num_ctor_elements, &cleared);
3649 /* If a const aggregate variable is being initialized, then it
3650 should never be a lose to promote the variable to be static. */
3651 if (valid_const_initializer
3652 && num_nonzero_elements > 1
3653 && TREE_READONLY (object)
3654 && TREE_CODE (object) == VAR_DECL
3655 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3657 if (notify_temp_creation)
3658 return GS_ERROR;
3659 DECL_INITIAL (object) = ctor;
3660 TREE_STATIC (object) = 1;
3661 if (!DECL_NAME (object))
3662 DECL_NAME (object) = create_tmp_var_name ("C");
3663 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3665 /* ??? C++ doesn't automatically append a .<number> to the
3666 assembler name, and even when it does, it looks a FE private
3667 data structures to figure out what that number should be,
3668 which are not set for this variable. I suppose this is
3669 important for local statics for inline functions, which aren't
3670 "local" in the object file sense. So in order to get a unique
3671 TU-local symbol, we must invoke the lhd version now. */
3672 lhd_set_decl_assembler_name (object);
3674 *expr_p = NULL_TREE;
3675 break;
3678 /* If there are "lots" of initialized elements, even discounting
3679 those that are not address constants (and thus *must* be
3680 computed at runtime), then partition the constructor into
3681 constant and non-constant parts. Block copy the constant
3682 parts in, then generate code for the non-constant parts. */
3683 /* TODO. There's code in cp/typeck.c to do this. */
3685 num_type_elements = count_type_elements (type, true);
3687 /* If count_type_elements could not determine number of type elements
3688 for a constant-sized object, assume clearing is needed.
3689 Don't do this for variable-sized objects, as store_constructor
3690 will ignore the clearing of variable-sized objects. */
3691 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3692 cleared = true;
3693 /* If there are "lots" of zeros, then block clear the object first. */
3694 else if (num_type_elements - num_nonzero_elements
3695 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3696 && num_nonzero_elements < num_type_elements/4)
3697 cleared = true;
3698 /* ??? This bit ought not be needed. For any element not present
3699 in the initializer, we should simply set them to zero. Except
3700 we'd need to *find* the elements that are not present, and that
3701 requires trickery to avoid quadratic compile-time behavior in
3702 large cases or excessive memory use in small cases. */
3703 else if (num_ctor_elements < num_type_elements)
3704 cleared = true;
3706 /* If there are "lots" of initialized elements, and all of them
3707 are valid address constants, then the entire initializer can
3708 be dropped to memory, and then memcpy'd out. Don't do this
3709 for sparse arrays, though, as it's more efficient to follow
3710 the standard CONSTRUCTOR behavior of memset followed by
3711 individual element initialization. Also don't do this for small
3712 all-zero initializers (which aren't big enough to merit
3713 clearing), and don't try to make bitwise copies of
3714 TREE_ADDRESSABLE types. */
3715 if (valid_const_initializer
3716 && !(cleared || num_nonzero_elements == 0)
3717 && !TREE_ADDRESSABLE (type))
3719 HOST_WIDE_INT size = int_size_in_bytes (type);
3720 unsigned int align;
3722 /* ??? We can still get unbounded array types, at least
3723 from the C++ front end. This seems wrong, but attempt
3724 to work around it for now. */
3725 if (size < 0)
3727 size = int_size_in_bytes (TREE_TYPE (object));
3728 if (size >= 0)
3729 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3732 /* Find the maximum alignment we can assume for the object. */
3733 /* ??? Make use of DECL_OFFSET_ALIGN. */
3734 if (DECL_P (object))
3735 align = DECL_ALIGN (object);
3736 else
3737 align = TYPE_ALIGN (type);
3739 if (size > 0
3740 && num_nonzero_elements > 1
3741 && !can_move_by_pieces (size, align))
3743 tree new_tree;
3745 if (notify_temp_creation)
3746 return GS_ERROR;
3748 new_tree = create_tmp_var_raw (type, "C");
3750 gimple_add_tmp_var (new_tree);
3751 TREE_STATIC (new_tree) = 1;
3752 TREE_READONLY (new_tree) = 1;
3753 DECL_INITIAL (new_tree) = ctor;
3754 if (align > DECL_ALIGN (new_tree))
3756 DECL_ALIGN (new_tree) = align;
3757 DECL_USER_ALIGN (new_tree) = 1;
3759 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3761 TREE_OPERAND (*expr_p, 1) = new_tree;
3763 /* This is no longer an assignment of a CONSTRUCTOR, but
3764 we still may have processing to do on the LHS. So
3765 pretend we didn't do anything here to let that happen. */
3766 return GS_UNHANDLED;
3770 if (notify_temp_creation)
3771 return GS_OK;
3773 /* If there are nonzero elements, pre-evaluate to capture elements
3774 overlapping with the lhs into temporaries. We must do this before
3775 clearing to fetch the values before they are zeroed-out. */
3776 if (num_nonzero_elements > 0)
3778 preeval_data.lhs_base_decl = get_base_address (object);
3779 if (!DECL_P (preeval_data.lhs_base_decl))
3780 preeval_data.lhs_base_decl = NULL;
3781 preeval_data.lhs_alias_set = get_alias_set (object);
3783 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3784 pre_p, post_p, &preeval_data);
3787 if (cleared)
3789 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3790 Note that we still have to gimplify, in order to handle the
3791 case of variable sized types. Avoid shared tree structures. */
3792 CONSTRUCTOR_ELTS (ctor) = NULL;
3793 TREE_SIDE_EFFECTS (ctor) = 0;
3794 object = unshare_expr (object);
3795 gimplify_stmt (expr_p, pre_p);
3798 /* If we have not block cleared the object, or if there are nonzero
3799 elements in the constructor, add assignments to the individual
3800 scalar fields of the object. */
3801 if (!cleared || num_nonzero_elements > 0)
3802 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3804 *expr_p = NULL_TREE;
3806 break;
3808 case COMPLEX_TYPE:
3810 tree r, i;
3812 if (notify_temp_creation)
3813 return GS_OK;
3815 /* Extract the real and imaginary parts out of the ctor. */
3816 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3817 r = VEC_index (constructor_elt, elts, 0)->value;
3818 i = VEC_index (constructor_elt, elts, 1)->value;
3819 if (r == NULL || i == NULL)
3821 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3822 if (r == NULL)
3823 r = zero;
3824 if (i == NULL)
3825 i = zero;
3828 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3829 represent creation of a complex value. */
3830 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3832 ctor = build_complex (type, r, i);
3833 TREE_OPERAND (*expr_p, 1) = ctor;
3835 else
3837 ctor = build2 (COMPLEX_EXPR, type, r, i);
3838 TREE_OPERAND (*expr_p, 1) = ctor;
3839 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3840 pre_p,
3841 post_p,
3842 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3843 fb_rvalue);
3846 break;
3848 case VECTOR_TYPE:
3850 unsigned HOST_WIDE_INT ix;
3851 constructor_elt *ce;
3853 if (notify_temp_creation)
3854 return GS_OK;
3856 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3857 if (TREE_CONSTANT (ctor))
3859 bool constant_p = true;
3860 tree value;
3862 /* Even when ctor is constant, it might contain non-*_CST
3863 elements, such as addresses or trapping values like
3864 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3865 in VECTOR_CST nodes. */
3866 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3867 if (!CONSTANT_CLASS_P (value))
3869 constant_p = false;
3870 break;
3873 if (constant_p)
3875 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3876 break;
3879 /* Don't reduce an initializer constant even if we can't
3880 make a VECTOR_CST. It won't do anything for us, and it'll
3881 prevent us from representing it as a single constant. */
3882 if (initializer_constant_valid_p (ctor, type))
3883 break;
3885 TREE_CONSTANT (ctor) = 0;
3888 /* Vector types use CONSTRUCTOR all the way through gimple
3889 compilation as a general initializer. */
3890 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3892 enum gimplify_status tret;
3893 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3894 fb_rvalue);
3895 if (tret == GS_ERROR)
3896 ret = GS_ERROR;
3898 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3899 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3901 break;
3903 default:
3904 /* So how did we get a CONSTRUCTOR for a scalar type? */
3905 gcc_unreachable ();
3908 if (ret == GS_ERROR)
3909 return GS_ERROR;
3910 else if (want_value)
3912 *expr_p = object;
3913 return GS_OK;
3915 else
3917 /* If we have gimplified both sides of the initializer but have
3918 not emitted an assignment, do so now. */
3919 if (*expr_p)
3921 tree lhs = TREE_OPERAND (*expr_p, 0);
3922 tree rhs = TREE_OPERAND (*expr_p, 1);
3923 gimple init = gimple_build_assign (lhs, rhs);
3924 gimplify_seq_add_stmt (pre_p, init);
3925 *expr_p = NULL;
3928 return GS_ALL_DONE;
3932 /* Given a pointer value OP0, return a simplified version of an
3933 indirection through OP0, or NULL_TREE if no simplification is
3934 possible. Note that the resulting type may be different from
3935 the type pointed to in the sense that it is still compatible
3936 from the langhooks point of view. */
3938 tree
3939 gimple_fold_indirect_ref (tree t)
3941 tree type = TREE_TYPE (TREE_TYPE (t));
3942 tree sub = t;
3943 tree subtype;
3945 STRIP_USELESS_TYPE_CONVERSION (sub);
3946 subtype = TREE_TYPE (sub);
3947 if (!POINTER_TYPE_P (subtype))
3948 return NULL_TREE;
3950 if (TREE_CODE (sub) == ADDR_EXPR)
3952 tree op = TREE_OPERAND (sub, 0);
3953 tree optype = TREE_TYPE (op);
3954 /* *&p => p */
3955 if (useless_type_conversion_p (type, optype))
3956 return op;
3958 /* *(foo *)&fooarray => fooarray[0] */
3959 if (TREE_CODE (optype) == ARRAY_TYPE
3960 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3962 tree type_domain = TYPE_DOMAIN (optype);
3963 tree min_val = size_zero_node;
3964 if (type_domain && TYPE_MIN_VALUE (type_domain))
3965 min_val = TYPE_MIN_VALUE (type_domain);
3966 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3970 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3971 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3972 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3974 tree type_domain;
3975 tree min_val = size_zero_node;
3976 tree osub = sub;
3977 sub = gimple_fold_indirect_ref (sub);
3978 if (! sub)
3979 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3980 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3981 if (type_domain && TYPE_MIN_VALUE (type_domain))
3982 min_val = TYPE_MIN_VALUE (type_domain);
3983 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3986 return NULL_TREE;
3989 /* Given a pointer value OP0, return a simplified version of an
3990 indirection through OP0, or NULL_TREE if no simplification is
3991 possible. This may only be applied to a rhs of an expression.
3992 Note that the resulting type may be different from the type pointed
3993 to in the sense that it is still compatible from the langhooks
3994 point of view. */
3996 static tree
3997 gimple_fold_indirect_ref_rhs (tree t)
3999 return gimple_fold_indirect_ref (t);
4002 /* Subroutine of gimplify_modify_expr to do simplifications of
4003 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4004 something changes. */
4006 static enum gimplify_status
4007 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4008 gimple_seq *pre_p, gimple_seq *post_p,
4009 bool want_value)
4011 enum gimplify_status ret = GS_OK;
4013 while (ret != GS_UNHANDLED)
4014 switch (TREE_CODE (*from_p))
4016 case VAR_DECL:
4017 /* If we're assigning from a read-only variable initialized with
4018 a constructor, do the direct assignment from the constructor,
4019 but only if neither source nor target are volatile since this
4020 latter assignment might end up being done on a per-field basis. */
4021 if (DECL_INITIAL (*from_p)
4022 && TREE_READONLY (*from_p)
4023 && !TREE_THIS_VOLATILE (*from_p)
4024 && !TREE_THIS_VOLATILE (*to_p)
4025 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4027 tree old_from = *from_p;
4029 /* Move the constructor into the RHS. */
4030 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4032 /* Let's see if gimplify_init_constructor will need to put
4033 it in memory. If so, revert the change. */
4034 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
4035 if (ret == GS_ERROR)
4037 *from_p = old_from;
4038 /* Fall through. */
4040 else
4042 ret = GS_OK;
4043 break;
4046 ret = GS_UNHANDLED;
4047 break;
4048 case INDIRECT_REF:
4050 /* If we have code like
4052 *(const A*)(A*)&x
4054 where the type of "x" is a (possibly cv-qualified variant
4055 of "A"), treat the entire expression as identical to "x".
4056 This kind of code arises in C++ when an object is bound
4057 to a const reference, and if "x" is a TARGET_EXPR we want
4058 to take advantage of the optimization below. */
4059 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4060 if (t)
4062 *from_p = t;
4063 ret = GS_OK;
4065 else
4066 ret = GS_UNHANDLED;
4067 break;
4070 case TARGET_EXPR:
4072 /* If we are initializing something from a TARGET_EXPR, strip the
4073 TARGET_EXPR and initialize it directly, if possible. This can't
4074 be done if the initializer is void, since that implies that the
4075 temporary is set in some non-trivial way.
4077 ??? What about code that pulls out the temp and uses it
4078 elsewhere? I think that such code never uses the TARGET_EXPR as
4079 an initializer. If I'm wrong, we'll die because the temp won't
4080 have any RTL. In that case, I guess we'll need to replace
4081 references somehow. */
4082 tree init = TARGET_EXPR_INITIAL (*from_p);
4084 if (init
4085 && !VOID_TYPE_P (TREE_TYPE (init)))
4087 *from_p = init;
4088 ret = GS_OK;
4090 else
4091 ret = GS_UNHANDLED;
4093 break;
4095 case COMPOUND_EXPR:
4096 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4097 caught. */
4098 gimplify_compound_expr (from_p, pre_p, true);
4099 ret = GS_OK;
4100 break;
4102 case CONSTRUCTOR:
4103 /* If we're initializing from a CONSTRUCTOR, break this into
4104 individual MODIFY_EXPRs. */
4105 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4106 false);
4108 case COND_EXPR:
4109 /* If we're assigning to a non-register type, push the assignment
4110 down into the branches. This is mandatory for ADDRESSABLE types,
4111 since we cannot generate temporaries for such, but it saves a
4112 copy in other cases as well. */
4113 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4115 /* This code should mirror the code in gimplify_cond_expr. */
4116 enum tree_code code = TREE_CODE (*expr_p);
4117 tree cond = *from_p;
4118 tree result = *to_p;
4120 ret = gimplify_expr (&result, pre_p, post_p,
4121 is_gimple_lvalue, fb_lvalue);
4122 if (ret != GS_ERROR)
4123 ret = GS_OK;
4125 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4126 TREE_OPERAND (cond, 1)
4127 = build2 (code, void_type_node, result,
4128 TREE_OPERAND (cond, 1));
4129 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4130 TREE_OPERAND (cond, 2)
4131 = build2 (code, void_type_node, unshare_expr (result),
4132 TREE_OPERAND (cond, 2));
4134 TREE_TYPE (cond) = void_type_node;
4135 recalculate_side_effects (cond);
4137 if (want_value)
4139 gimplify_and_add (cond, pre_p);
4140 *expr_p = unshare_expr (result);
4142 else
4143 *expr_p = cond;
4144 return ret;
4146 else
4147 ret = GS_UNHANDLED;
4148 break;
4150 case CALL_EXPR:
4151 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4152 return slot so that we don't generate a temporary. */
4153 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4154 && aggregate_value_p (*from_p, *from_p))
4156 bool use_target;
4158 if (!(rhs_predicate_for (*to_p))(*from_p))
4159 /* If we need a temporary, *to_p isn't accurate. */
4160 use_target = false;
4161 else if (TREE_CODE (*to_p) == RESULT_DECL
4162 && DECL_NAME (*to_p) == NULL_TREE
4163 && needs_to_live_in_memory (*to_p))
4164 /* It's OK to use the return slot directly unless it's an NRV. */
4165 use_target = true;
4166 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4167 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4168 /* Don't force regs into memory. */
4169 use_target = false;
4170 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4171 /* It's OK to use the target directly if it's being
4172 initialized. */
4173 use_target = true;
4174 else if (!is_gimple_non_addressable (*to_p))
4175 /* Don't use the original target if it's already addressable;
4176 if its address escapes, and the called function uses the
4177 NRV optimization, a conforming program could see *to_p
4178 change before the called function returns; see c++/19317.
4179 When optimizing, the return_slot pass marks more functions
4180 as safe after we have escape info. */
4181 use_target = false;
4182 else
4183 use_target = true;
4185 if (use_target)
4187 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4188 mark_addressable (*to_p);
4192 ret = GS_UNHANDLED;
4193 break;
4195 /* If we're initializing from a container, push the initialization
4196 inside it. */
4197 case CLEANUP_POINT_EXPR:
4198 case BIND_EXPR:
4199 case STATEMENT_LIST:
4201 tree wrap = *from_p;
4202 tree t;
4204 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4205 fb_lvalue);
4206 if (ret != GS_ERROR)
4207 ret = GS_OK;
4209 t = voidify_wrapper_expr (wrap, *expr_p);
4210 gcc_assert (t == *expr_p);
4212 if (want_value)
4214 gimplify_and_add (wrap, pre_p);
4215 *expr_p = unshare_expr (*to_p);
4217 else
4218 *expr_p = wrap;
4219 return GS_OK;
4222 case COMPOUND_LITERAL_EXPR:
4224 tree complit = TREE_OPERAND (*expr_p, 1);
4225 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4226 tree decl = DECL_EXPR_DECL (decl_s);
4227 tree init = DECL_INITIAL (decl);
4229 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4230 into struct T x = { 0, 1, 2 } if the address of the
4231 compound literal has never been taken. */
4232 if (!TREE_ADDRESSABLE (complit)
4233 && !TREE_ADDRESSABLE (decl)
4234 && init)
4236 *expr_p = copy_node (*expr_p);
4237 TREE_OPERAND (*expr_p, 1) = init;
4238 return GS_OK;
4242 default:
4243 ret = GS_UNHANDLED;
4244 break;
4247 return ret;
4251 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4252 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4253 DECL_GIMPLE_REG_P set.
4255 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4256 other, unmodified part of the complex object just before the total store.
4257 As a consequence, if the object is still uninitialized, an undefined value
4258 will be loaded into a register, which may result in a spurious exception
4259 if the register is floating-point and the value happens to be a signaling
4260 NaN for example. Then the fully-fledged complex operations lowering pass
4261 followed by a DCE pass are necessary in order to fix things up. */
4263 static enum gimplify_status
4264 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4265 bool want_value)
4267 enum tree_code code, ocode;
4268 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4270 lhs = TREE_OPERAND (*expr_p, 0);
4271 rhs = TREE_OPERAND (*expr_p, 1);
4272 code = TREE_CODE (lhs);
4273 lhs = TREE_OPERAND (lhs, 0);
4275 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4276 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4277 other = get_formal_tmp_var (other, pre_p);
4279 realpart = code == REALPART_EXPR ? rhs : other;
4280 imagpart = code == REALPART_EXPR ? other : rhs;
4282 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4283 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4284 else
4285 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4287 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4288 *expr_p = (want_value) ? rhs : NULL_TREE;
4290 return GS_ALL_DONE;
4294 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4296 modify_expr
4297 : varname '=' rhs
4298 | '*' ID '=' rhs
4300 PRE_P points to the list where side effects that must happen before
4301 *EXPR_P should be stored.
4303 POST_P points to the list where side effects that must happen after
4304 *EXPR_P should be stored.
4306 WANT_VALUE is nonzero iff we want to use the value of this expression
4307 in another expression. */
4309 static enum gimplify_status
4310 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4311 bool want_value)
4313 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4314 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4315 enum gimplify_status ret = GS_UNHANDLED;
4316 gimple assign;
4317 location_t loc = EXPR_LOCATION (*expr_p);
4319 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4320 || TREE_CODE (*expr_p) == INIT_EXPR);
4322 /* Insert pointer conversions required by the middle-end that are not
4323 required by the frontend. This fixes middle-end type checking for
4324 for example gcc.dg/redecl-6.c. */
4325 if (POINTER_TYPE_P (TREE_TYPE (*to_p))
4326 && lang_hooks.types_compatible_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4328 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4329 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4330 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4333 /* See if any simplifications can be done based on what the RHS is. */
4334 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4335 want_value);
4336 if (ret != GS_UNHANDLED)
4337 return ret;
4339 /* For zero sized types only gimplify the left hand side and right hand
4340 side as statements and throw away the assignment. Do this after
4341 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4342 types properly. */
4343 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4345 gimplify_stmt (from_p, pre_p);
4346 gimplify_stmt (to_p, pre_p);
4347 *expr_p = NULL_TREE;
4348 return GS_ALL_DONE;
4351 /* If the value being copied is of variable width, compute the length
4352 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4353 before gimplifying any of the operands so that we can resolve any
4354 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4355 the size of the expression to be copied, not of the destination, so
4356 that is what we must do here. */
4357 maybe_with_size_expr (from_p);
4359 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4360 if (ret == GS_ERROR)
4361 return ret;
4363 /* As a special case, we have to temporarily allow for assignments
4364 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4365 a toplevel statement, when gimplifying the GENERIC expression
4366 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4367 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4369 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4370 prevent gimplify_expr from trying to create a new temporary for
4371 foo's LHS, we tell it that it should only gimplify until it
4372 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4373 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4374 and all we need to do here is set 'a' to be its LHS. */
4375 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4376 fb_rvalue);
4377 if (ret == GS_ERROR)
4378 return ret;
4380 /* Now see if the above changed *from_p to something we handle specially. */
4381 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4382 want_value);
4383 if (ret != GS_UNHANDLED)
4384 return ret;
4386 /* If we've got a variable sized assignment between two lvalues (i.e. does
4387 not involve a call), then we can make things a bit more straightforward
4388 by converting the assignment to memcpy or memset. */
4389 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4391 tree from = TREE_OPERAND (*from_p, 0);
4392 tree size = TREE_OPERAND (*from_p, 1);
4394 if (TREE_CODE (from) == CONSTRUCTOR)
4395 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4397 if (is_gimple_addressable (from))
4399 *from_p = from;
4400 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4401 pre_p);
4405 /* Transform partial stores to non-addressable complex variables into
4406 total stores. This allows us to use real instead of virtual operands
4407 for these variables, which improves optimization. */
4408 if ((TREE_CODE (*to_p) == REALPART_EXPR
4409 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4410 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4411 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4413 /* Try to alleviate the effects of the gimplification creating artificial
4414 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4415 if (!gimplify_ctxp->into_ssa
4416 && DECL_P (*from_p)
4417 && DECL_IGNORED_P (*from_p)
4418 && DECL_P (*to_p)
4419 && !DECL_IGNORED_P (*to_p))
4421 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4422 DECL_NAME (*from_p)
4423 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4424 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4425 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4428 if (TREE_CODE (*from_p) == CALL_EXPR)
4430 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4431 instead of a GIMPLE_ASSIGN. */
4432 assign = gimple_build_call_from_tree (*from_p);
4433 gimple_call_set_lhs (assign, *to_p);
4435 else
4437 assign = gimple_build_assign (*to_p, *from_p);
4438 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4441 gimplify_seq_add_stmt (pre_p, assign);
4443 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4445 /* If we've somehow already got an SSA_NAME on the LHS, then
4446 we've probably modified it twice. Not good. */
4447 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4448 *to_p = make_ssa_name (*to_p, assign);
4449 gimple_set_lhs (assign, *to_p);
4452 if (want_value)
4454 *expr_p = unshare_expr (*to_p);
4455 return GS_OK;
4457 else
4458 *expr_p = NULL;
4460 return GS_ALL_DONE;
4463 /* Gimplify a comparison between two variable-sized objects. Do this
4464 with a call to BUILT_IN_MEMCMP. */
4466 static enum gimplify_status
4467 gimplify_variable_sized_compare (tree *expr_p)
4469 tree op0 = TREE_OPERAND (*expr_p, 0);
4470 tree op1 = TREE_OPERAND (*expr_p, 1);
4471 tree t, arg, dest, src;
4472 location_t loc = EXPR_LOCATION (*expr_p);
4474 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4475 arg = unshare_expr (arg);
4476 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4477 src = build_fold_addr_expr_loc (loc, op1);
4478 dest = build_fold_addr_expr_loc (loc, op0);
4479 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4480 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4481 *expr_p
4482 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4484 return GS_OK;
4487 /* Gimplify a comparison between two aggregate objects of integral scalar
4488 mode as a comparison between the bitwise equivalent scalar values. */
4490 static enum gimplify_status
4491 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4493 location_t loc = EXPR_LOCATION (*expr_p);
4494 tree op0 = TREE_OPERAND (*expr_p, 0);
4495 tree op1 = TREE_OPERAND (*expr_p, 1);
4497 tree type = TREE_TYPE (op0);
4498 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4500 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4501 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4503 *expr_p
4504 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4506 return GS_OK;
4509 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4510 points to the expression to gimplify.
4512 Expressions of the form 'a && b' are gimplified to:
4514 a && b ? true : false
4516 LOCUS is the source location to be put on the generated COND_EXPR.
4517 gimplify_cond_expr will do the rest. */
4519 static enum gimplify_status
4520 gimplify_boolean_expr (tree *expr_p, location_t locus)
4522 /* Preserve the original type of the expression. */
4523 tree type = TREE_TYPE (*expr_p);
4525 *expr_p = build3 (COND_EXPR, type, *expr_p,
4526 fold_convert_loc (locus, type, boolean_true_node),
4527 fold_convert_loc (locus, type, boolean_false_node));
4529 SET_EXPR_LOCATION (*expr_p, locus);
4531 return GS_OK;
4534 /* Gimplifies an expression sequence. This function gimplifies each
4535 expression and re-writes the original expression with the last
4536 expression of the sequence in GIMPLE form.
4538 PRE_P points to the list where the side effects for all the
4539 expressions in the sequence will be emitted.
4541 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4543 static enum gimplify_status
4544 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4546 tree t = *expr_p;
4550 tree *sub_p = &TREE_OPERAND (t, 0);
4552 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4553 gimplify_compound_expr (sub_p, pre_p, false);
4554 else
4555 gimplify_stmt (sub_p, pre_p);
4557 t = TREE_OPERAND (t, 1);
4559 while (TREE_CODE (t) == COMPOUND_EXPR);
4561 *expr_p = t;
4562 if (want_value)
4563 return GS_OK;
4564 else
4566 gimplify_stmt (expr_p, pre_p);
4567 return GS_ALL_DONE;
4572 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4573 gimplify. After gimplification, EXPR_P will point to a new temporary
4574 that holds the original value of the SAVE_EXPR node.
4576 PRE_P points to the list where side effects that must happen before
4577 *EXPR_P should be stored. */
4579 static enum gimplify_status
4580 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4582 enum gimplify_status ret = GS_ALL_DONE;
4583 tree val;
4585 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4586 val = TREE_OPERAND (*expr_p, 0);
4588 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4589 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4591 /* The operand may be a void-valued expression such as SAVE_EXPRs
4592 generated by the Java frontend for class initialization. It is
4593 being executed only for its side-effects. */
4594 if (TREE_TYPE (val) == void_type_node)
4596 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4597 is_gimple_stmt, fb_none);
4598 val = NULL;
4600 else
4601 val = get_initialized_tmp_var (val, pre_p, post_p);
4603 TREE_OPERAND (*expr_p, 0) = val;
4604 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4607 *expr_p = val;
4609 return ret;
4612 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4614 unary_expr
4615 : ...
4616 | '&' varname
4619 PRE_P points to the list where side effects that must happen before
4620 *EXPR_P should be stored.
4622 POST_P points to the list where side effects that must happen after
4623 *EXPR_P should be stored. */
4625 static enum gimplify_status
4626 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4628 tree expr = *expr_p;
4629 tree op0 = TREE_OPERAND (expr, 0);
4630 enum gimplify_status ret;
4631 location_t loc = EXPR_LOCATION (*expr_p);
4633 switch (TREE_CODE (op0))
4635 case INDIRECT_REF:
4636 case MISALIGNED_INDIRECT_REF:
4637 do_indirect_ref:
4638 /* Check if we are dealing with an expression of the form '&*ptr'.
4639 While the front end folds away '&*ptr' into 'ptr', these
4640 expressions may be generated internally by the compiler (e.g.,
4641 builtins like __builtin_va_end). */
4642 /* Caution: the silent array decomposition semantics we allow for
4643 ADDR_EXPR means we can't always discard the pair. */
4644 /* Gimplification of the ADDR_EXPR operand may drop
4645 cv-qualification conversions, so make sure we add them if
4646 needed. */
4648 tree op00 = TREE_OPERAND (op0, 0);
4649 tree t_expr = TREE_TYPE (expr);
4650 tree t_op00 = TREE_TYPE (op00);
4652 if (!useless_type_conversion_p (t_expr, t_op00))
4653 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4654 *expr_p = op00;
4655 ret = GS_OK;
4657 break;
4659 case VIEW_CONVERT_EXPR:
4660 /* Take the address of our operand and then convert it to the type of
4661 this ADDR_EXPR.
4663 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4664 all clear. The impact of this transformation is even less clear. */
4666 /* If the operand is a useless conversion, look through it. Doing so
4667 guarantees that the ADDR_EXPR and its operand will remain of the
4668 same type. */
4669 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4670 op0 = TREE_OPERAND (op0, 0);
4672 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4673 build_fold_addr_expr_loc (loc,
4674 TREE_OPERAND (op0, 0)));
4675 ret = GS_OK;
4676 break;
4678 default:
4679 /* We use fb_either here because the C frontend sometimes takes
4680 the address of a call that returns a struct; see
4681 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4682 the implied temporary explicit. */
4684 /* Mark the RHS addressable. */
4685 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4686 is_gimple_addressable, fb_either);
4687 if (ret == GS_ERROR)
4688 break;
4690 /* We cannot rely on making the RHS addressable if it is
4691 a temporary created by gimplification. In this case create a
4692 new temporary that is initialized by a copy (which will
4693 become a store after we mark it addressable).
4694 This mostly happens if the frontend passed us something that
4695 it could not mark addressable yet, like a fortran
4696 pass-by-reference parameter (int) floatvar. */
4697 if (is_gimple_reg (TREE_OPERAND (expr, 0)))
4698 TREE_OPERAND (expr, 0)
4699 = get_initialized_tmp_var (TREE_OPERAND (expr, 0), pre_p, post_p);
4701 op0 = TREE_OPERAND (expr, 0);
4703 /* For various reasons, the gimplification of the expression
4704 may have made a new INDIRECT_REF. */
4705 if (TREE_CODE (op0) == INDIRECT_REF)
4706 goto do_indirect_ref;
4708 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4709 recompute_tree_invariant_for_addr_expr (expr);
4711 mark_addressable (TREE_OPERAND (expr, 0));
4712 break;
4715 return ret;
4718 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4719 value; output operands should be a gimple lvalue. */
4721 static enum gimplify_status
4722 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4724 tree expr;
4725 int noutputs;
4726 const char **oconstraints;
4727 int i;
4728 tree link;
4729 const char *constraint;
4730 bool allows_mem, allows_reg, is_inout;
4731 enum gimplify_status ret, tret;
4732 gimple stmt;
4733 VEC(tree, gc) *inputs;
4734 VEC(tree, gc) *outputs;
4735 VEC(tree, gc) *clobbers;
4736 tree link_next;
4738 expr = *expr_p;
4739 noutputs = list_length (ASM_OUTPUTS (expr));
4740 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4742 inputs = outputs = clobbers = NULL;
4744 ret = GS_ALL_DONE;
4745 link_next = NULL_TREE;
4746 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4748 bool ok;
4749 size_t constraint_len;
4751 link_next = TREE_CHAIN (link);
4753 oconstraints[i]
4754 = constraint
4755 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4756 constraint_len = strlen (constraint);
4757 if (constraint_len == 0)
4758 continue;
4760 ok = parse_output_constraint (&constraint, i, 0, 0,
4761 &allows_mem, &allows_reg, &is_inout);
4762 if (!ok)
4764 ret = GS_ERROR;
4765 is_inout = false;
4768 if (!allows_reg && allows_mem)
4769 mark_addressable (TREE_VALUE (link));
4771 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4772 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4773 fb_lvalue | fb_mayfail);
4774 if (tret == GS_ERROR)
4776 error ("invalid lvalue in asm output %d", i);
4777 ret = tret;
4780 VEC_safe_push (tree, gc, outputs, link);
4781 TREE_CHAIN (link) = NULL_TREE;
4783 if (is_inout)
4785 /* An input/output operand. To give the optimizers more
4786 flexibility, split it into separate input and output
4787 operands. */
4788 tree input;
4789 char buf[10];
4791 /* Turn the in/out constraint into an output constraint. */
4792 char *p = xstrdup (constraint);
4793 p[0] = '=';
4794 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4796 /* And add a matching input constraint. */
4797 if (allows_reg)
4799 sprintf (buf, "%d", i);
4801 /* If there are multiple alternatives in the constraint,
4802 handle each of them individually. Those that allow register
4803 will be replaced with operand number, the others will stay
4804 unchanged. */
4805 if (strchr (p, ',') != NULL)
4807 size_t len = 0, buflen = strlen (buf);
4808 char *beg, *end, *str, *dst;
4810 for (beg = p + 1;;)
4812 end = strchr (beg, ',');
4813 if (end == NULL)
4814 end = strchr (beg, '\0');
4815 if ((size_t) (end - beg) < buflen)
4816 len += buflen + 1;
4817 else
4818 len += end - beg + 1;
4819 if (*end)
4820 beg = end + 1;
4821 else
4822 break;
4825 str = (char *) alloca (len);
4826 for (beg = p + 1, dst = str;;)
4828 const char *tem;
4829 bool mem_p, reg_p, inout_p;
4831 end = strchr (beg, ',');
4832 if (end)
4833 *end = '\0';
4834 beg[-1] = '=';
4835 tem = beg - 1;
4836 parse_output_constraint (&tem, i, 0, 0,
4837 &mem_p, &reg_p, &inout_p);
4838 if (dst != str)
4839 *dst++ = ',';
4840 if (reg_p)
4842 memcpy (dst, buf, buflen);
4843 dst += buflen;
4845 else
4847 if (end)
4848 len = end - beg;
4849 else
4850 len = strlen (beg);
4851 memcpy (dst, beg, len);
4852 dst += len;
4854 if (end)
4855 beg = end + 1;
4856 else
4857 break;
4859 *dst = '\0';
4860 input = build_string (dst - str, str);
4862 else
4863 input = build_string (strlen (buf), buf);
4865 else
4866 input = build_string (constraint_len - 1, constraint + 1);
4868 free (p);
4870 input = build_tree_list (build_tree_list (NULL_TREE, input),
4871 unshare_expr (TREE_VALUE (link)));
4872 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4876 link_next = NULL_TREE;
4877 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4879 link_next = TREE_CHAIN (link);
4880 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4881 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4882 oconstraints, &allows_mem, &allows_reg);
4884 /* If we can't make copies, we can only accept memory. */
4885 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4887 if (allows_mem)
4888 allows_reg = 0;
4889 else
4891 error ("impossible constraint in %<asm%>");
4892 error ("non-memory input %d must stay in memory", i);
4893 return GS_ERROR;
4897 /* If the operand is a memory input, it should be an lvalue. */
4898 if (!allows_reg && allows_mem)
4900 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4901 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4902 mark_addressable (TREE_VALUE (link));
4903 if (tret == GS_ERROR)
4905 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4906 input_location = EXPR_LOCATION (TREE_VALUE (link));
4907 error ("memory input %d is not directly addressable", i);
4908 ret = tret;
4911 else
4913 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4914 is_gimple_asm_val, fb_rvalue);
4915 if (tret == GS_ERROR)
4916 ret = tret;
4919 TREE_CHAIN (link) = NULL_TREE;
4920 VEC_safe_push (tree, gc, inputs, link);
4923 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
4924 VEC_safe_push (tree, gc, clobbers, link);
4926 /* Do not add ASMs with errors to the gimple IL stream. */
4927 if (ret != GS_ERROR)
4929 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4930 inputs, outputs, clobbers);
4932 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4933 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4935 gimplify_seq_add_stmt (pre_p, stmt);
4938 return ret;
4941 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4942 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4943 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4944 return to this function.
4946 FIXME should we complexify the prequeue handling instead? Or use flags
4947 for all the cleanups and let the optimizer tighten them up? The current
4948 code seems pretty fragile; it will break on a cleanup within any
4949 non-conditional nesting. But any such nesting would be broken, anyway;
4950 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4951 and continues out of it. We can do that at the RTL level, though, so
4952 having an optimizer to tighten up try/finally regions would be a Good
4953 Thing. */
4955 static enum gimplify_status
4956 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4958 gimple_stmt_iterator iter;
4959 gimple_seq body_sequence = NULL;
4961 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4963 /* We only care about the number of conditions between the innermost
4964 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4965 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4966 int old_conds = gimplify_ctxp->conditions;
4967 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4968 gimplify_ctxp->conditions = 0;
4969 gimplify_ctxp->conditional_cleanups = NULL;
4971 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4973 gimplify_ctxp->conditions = old_conds;
4974 gimplify_ctxp->conditional_cleanups = old_cleanups;
4976 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4978 gimple wce = gsi_stmt (iter);
4980 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4982 if (gsi_one_before_end_p (iter))
4984 /* Note that gsi_insert_seq_before and gsi_remove do not
4985 scan operands, unlike some other sequence mutators. */
4986 gsi_insert_seq_before_without_update (&iter,
4987 gimple_wce_cleanup (wce),
4988 GSI_SAME_STMT);
4989 gsi_remove (&iter, true);
4990 break;
4992 else
4994 gimple gtry;
4995 gimple_seq seq;
4996 enum gimple_try_flags kind;
4998 if (gimple_wce_cleanup_eh_only (wce))
4999 kind = GIMPLE_TRY_CATCH;
5000 else
5001 kind = GIMPLE_TRY_FINALLY;
5002 seq = gsi_split_seq_after (iter);
5004 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5005 /* Do not use gsi_replace here, as it may scan operands.
5006 We want to do a simple structural modification only. */
5007 *gsi_stmt_ptr (&iter) = gtry;
5008 iter = gsi_start (seq);
5011 else
5012 gsi_next (&iter);
5015 gimplify_seq_add_seq (pre_p, body_sequence);
5016 if (temp)
5018 *expr_p = temp;
5019 return GS_OK;
5021 else
5023 *expr_p = NULL;
5024 return GS_ALL_DONE;
5028 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5029 is the cleanup action required. EH_ONLY is true if the cleanup should
5030 only be executed if an exception is thrown, not on normal exit. */
5032 static void
5033 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5035 gimple wce;
5036 gimple_seq cleanup_stmts = NULL;
5038 /* Errors can result in improperly nested cleanups. Which results in
5039 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5040 if (errorcount || sorrycount)
5041 return;
5043 if (gimple_conditional_context ())
5045 /* If we're in a conditional context, this is more complex. We only
5046 want to run the cleanup if we actually ran the initialization that
5047 necessitates it, but we want to run it after the end of the
5048 conditional context. So we wrap the try/finally around the
5049 condition and use a flag to determine whether or not to actually
5050 run the destructor. Thus
5052 test ? f(A()) : 0
5054 becomes (approximately)
5056 flag = 0;
5057 try {
5058 if (test) { A::A(temp); flag = 1; val = f(temp); }
5059 else { val = 0; }
5060 } finally {
5061 if (flag) A::~A(temp);
5065 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5066 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5067 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5069 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5070 gimplify_stmt (&cleanup, &cleanup_stmts);
5071 wce = gimple_build_wce (cleanup_stmts);
5073 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5074 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5075 gimplify_seq_add_stmt (pre_p, ftrue);
5077 /* Because of this manipulation, and the EH edges that jump
5078 threading cannot redirect, the temporary (VAR) will appear
5079 to be used uninitialized. Don't warn. */
5080 TREE_NO_WARNING (var) = 1;
5082 else
5084 gimplify_stmt (&cleanup, &cleanup_stmts);
5085 wce = gimple_build_wce (cleanup_stmts);
5086 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5087 gimplify_seq_add_stmt (pre_p, wce);
5091 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5093 static enum gimplify_status
5094 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5096 tree targ = *expr_p;
5097 tree temp = TARGET_EXPR_SLOT (targ);
5098 tree init = TARGET_EXPR_INITIAL (targ);
5099 enum gimplify_status ret;
5101 if (init)
5103 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5104 to the temps list. Handle also variable length TARGET_EXPRs. */
5105 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5107 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5108 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5109 gimplify_vla_decl (temp, pre_p);
5111 else
5112 gimple_add_tmp_var (temp);
5114 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5115 expression is supposed to initialize the slot. */
5116 if (VOID_TYPE_P (TREE_TYPE (init)))
5117 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5118 else
5120 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5121 init = init_expr;
5122 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5123 init = NULL;
5124 ggc_free (init_expr);
5126 if (ret == GS_ERROR)
5128 /* PR c++/28266 Make sure this is expanded only once. */
5129 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5130 return GS_ERROR;
5132 if (init)
5133 gimplify_and_add (init, pre_p);
5135 /* If needed, push the cleanup for the temp. */
5136 if (TARGET_EXPR_CLEANUP (targ))
5137 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5138 CLEANUP_EH_ONLY (targ), pre_p);
5140 /* Only expand this once. */
5141 TREE_OPERAND (targ, 3) = init;
5142 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5144 else
5145 /* We should have expanded this before. */
5146 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5148 *expr_p = temp;
5149 return GS_OK;
5152 /* Gimplification of expression trees. */
5154 /* Gimplify an expression which appears at statement context. The
5155 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5156 NULL, a new sequence is allocated.
5158 Return true if we actually added a statement to the queue. */
5160 bool
5161 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5163 gimple_seq_node last;
5165 if (!*seq_p)
5166 *seq_p = gimple_seq_alloc ();
5168 last = gimple_seq_last (*seq_p);
5169 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5170 return last != gimple_seq_last (*seq_p);
5174 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5175 to CTX. If entries already exist, force them to be some flavor of private.
5176 If there is no enclosing parallel, do nothing. */
5178 void
5179 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5181 splay_tree_node n;
5183 if (decl == NULL || !DECL_P (decl))
5184 return;
5188 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5189 if (n != NULL)
5191 if (n->value & GOVD_SHARED)
5192 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5193 else
5194 return;
5196 else if (ctx->region_type != ORT_WORKSHARE)
5197 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5199 ctx = ctx->outer_context;
5201 while (ctx);
5204 /* Similarly for each of the type sizes of TYPE. */
5206 static void
5207 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5209 if (type == NULL || type == error_mark_node)
5210 return;
5211 type = TYPE_MAIN_VARIANT (type);
5213 if (pointer_set_insert (ctx->privatized_types, type))
5214 return;
5216 switch (TREE_CODE (type))
5218 case INTEGER_TYPE:
5219 case ENUMERAL_TYPE:
5220 case BOOLEAN_TYPE:
5221 case REAL_TYPE:
5222 case FIXED_POINT_TYPE:
5223 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5224 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5225 break;
5227 case ARRAY_TYPE:
5228 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5229 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5230 break;
5232 case RECORD_TYPE:
5233 case UNION_TYPE:
5234 case QUAL_UNION_TYPE:
5236 tree field;
5237 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5238 if (TREE_CODE (field) == FIELD_DECL)
5240 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5241 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5244 break;
5246 case POINTER_TYPE:
5247 case REFERENCE_TYPE:
5248 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5249 break;
5251 default:
5252 break;
5255 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5256 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5257 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5260 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5262 static void
5263 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5265 splay_tree_node n;
5266 unsigned int nflags;
5267 tree t;
5269 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5270 return;
5272 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5273 there are constructors involved somewhere. */
5274 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5275 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5276 flags |= GOVD_SEEN;
5278 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5279 if (n != NULL)
5281 /* We shouldn't be re-adding the decl with the same data
5282 sharing class. */
5283 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5284 /* The only combination of data sharing classes we should see is
5285 FIRSTPRIVATE and LASTPRIVATE. */
5286 nflags = n->value | flags;
5287 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5288 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5289 n->value = nflags;
5290 return;
5293 /* When adding a variable-sized variable, we have to handle all sorts
5294 of additional bits of data: the pointer replacement variable, and
5295 the parameters of the type. */
5296 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5298 /* Add the pointer replacement variable as PRIVATE if the variable
5299 replacement is private, else FIRSTPRIVATE since we'll need the
5300 address of the original variable either for SHARED, or for the
5301 copy into or out of the context. */
5302 if (!(flags & GOVD_LOCAL))
5304 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5305 nflags |= flags & GOVD_SEEN;
5306 t = DECL_VALUE_EXPR (decl);
5307 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5308 t = TREE_OPERAND (t, 0);
5309 gcc_assert (DECL_P (t));
5310 omp_add_variable (ctx, t, nflags);
5313 /* Add all of the variable and type parameters (which should have
5314 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5315 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5316 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5317 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5319 /* The variable-sized variable itself is never SHARED, only some form
5320 of PRIVATE. The sharing would take place via the pointer variable
5321 which we remapped above. */
5322 if (flags & GOVD_SHARED)
5323 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5324 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5326 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5327 alloca statement we generate for the variable, so make sure it
5328 is available. This isn't automatically needed for the SHARED
5329 case, since we won't be allocating local storage then.
5330 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5331 in this case omp_notice_variable will be called later
5332 on when it is gimplified. */
5333 else if (! (flags & GOVD_LOCAL))
5334 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5336 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5338 gcc_assert ((flags & GOVD_LOCAL) == 0);
5339 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5341 /* Similar to the direct variable sized case above, we'll need the
5342 size of references being privatized. */
5343 if ((flags & GOVD_SHARED) == 0)
5345 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5346 if (TREE_CODE (t) != INTEGER_CST)
5347 omp_notice_variable (ctx, t, true);
5351 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5354 /* Record the fact that DECL was used within the OpenMP context CTX.
5355 IN_CODE is true when real code uses DECL, and false when we should
5356 merely emit default(none) errors. Return true if DECL is going to
5357 be remapped and thus DECL shouldn't be gimplified into its
5358 DECL_VALUE_EXPR (if any). */
5360 static bool
5361 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5363 splay_tree_node n;
5364 unsigned flags = in_code ? GOVD_SEEN : 0;
5365 bool ret = false, shared;
5367 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5368 return false;
5370 /* Threadprivate variables are predetermined. */
5371 if (is_global_var (decl))
5373 if (DECL_THREAD_LOCAL_P (decl))
5374 return false;
5376 if (DECL_HAS_VALUE_EXPR_P (decl))
5378 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5380 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5381 return false;
5385 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5386 if (n == NULL)
5388 enum omp_clause_default_kind default_kind, kind;
5389 struct gimplify_omp_ctx *octx;
5391 if (ctx->region_type == ORT_WORKSHARE)
5392 goto do_outer;
5394 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5395 remapped firstprivate instead of shared. To some extent this is
5396 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5397 default_kind = ctx->default_kind;
5398 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5399 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5400 default_kind = kind;
5402 switch (default_kind)
5404 case OMP_CLAUSE_DEFAULT_NONE:
5405 error ("%qE not specified in enclosing parallel",
5406 DECL_NAME (decl));
5407 error_at (ctx->location, "enclosing parallel");
5408 /* FALLTHRU */
5409 case OMP_CLAUSE_DEFAULT_SHARED:
5410 flags |= GOVD_SHARED;
5411 break;
5412 case OMP_CLAUSE_DEFAULT_PRIVATE:
5413 flags |= GOVD_PRIVATE;
5414 break;
5415 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5416 flags |= GOVD_FIRSTPRIVATE;
5417 break;
5418 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5419 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5420 gcc_assert (ctx->region_type == ORT_TASK);
5421 if (ctx->outer_context)
5422 omp_notice_variable (ctx->outer_context, decl, in_code);
5423 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5425 splay_tree_node n2;
5427 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5428 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5430 flags |= GOVD_FIRSTPRIVATE;
5431 break;
5433 if ((octx->region_type & ORT_PARALLEL) != 0)
5434 break;
5436 if (flags & GOVD_FIRSTPRIVATE)
5437 break;
5438 if (octx == NULL
5439 && (TREE_CODE (decl) == PARM_DECL
5440 || (!is_global_var (decl)
5441 && DECL_CONTEXT (decl) == current_function_decl)))
5443 flags |= GOVD_FIRSTPRIVATE;
5444 break;
5446 flags |= GOVD_SHARED;
5447 break;
5448 default:
5449 gcc_unreachable ();
5452 if ((flags & GOVD_PRIVATE)
5453 && lang_hooks.decls.omp_private_outer_ref (decl))
5454 flags |= GOVD_PRIVATE_OUTER_REF;
5456 omp_add_variable (ctx, decl, flags);
5458 shared = (flags & GOVD_SHARED) != 0;
5459 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5460 goto do_outer;
5463 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5464 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5465 && DECL_SIZE (decl)
5466 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5468 splay_tree_node n2;
5469 tree t = DECL_VALUE_EXPR (decl);
5470 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5471 t = TREE_OPERAND (t, 0);
5472 gcc_assert (DECL_P (t));
5473 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5474 n2->value |= GOVD_SEEN;
5477 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5478 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5480 /* If nothing changed, there's nothing left to do. */
5481 if ((n->value & flags) == flags)
5482 return ret;
5483 flags |= n->value;
5484 n->value = flags;
5486 do_outer:
5487 /* If the variable is private in the current context, then we don't
5488 need to propagate anything to an outer context. */
5489 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5490 return ret;
5491 if (ctx->outer_context
5492 && omp_notice_variable (ctx->outer_context, decl, in_code))
5493 return true;
5494 return ret;
5497 /* Verify that DECL is private within CTX. If there's specific information
5498 to the contrary in the innermost scope, generate an error. */
5500 static bool
5501 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5503 splay_tree_node n;
5505 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5506 if (n != NULL)
5508 if (n->value & GOVD_SHARED)
5510 if (ctx == gimplify_omp_ctxp)
5512 error ("iteration variable %qE should be private",
5513 DECL_NAME (decl));
5514 n->value = GOVD_PRIVATE;
5515 return true;
5517 else
5518 return false;
5520 else if ((n->value & GOVD_EXPLICIT) != 0
5521 && (ctx == gimplify_omp_ctxp
5522 || (ctx->region_type == ORT_COMBINED_PARALLEL
5523 && gimplify_omp_ctxp->outer_context == ctx)))
5525 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5526 error ("iteration variable %qE should not be firstprivate",
5527 DECL_NAME (decl));
5528 else if ((n->value & GOVD_REDUCTION) != 0)
5529 error ("iteration variable %qE should not be reduction",
5530 DECL_NAME (decl));
5532 return (ctx == gimplify_omp_ctxp
5533 || (ctx->region_type == ORT_COMBINED_PARALLEL
5534 && gimplify_omp_ctxp->outer_context == ctx));
5537 if (ctx->region_type != ORT_WORKSHARE)
5538 return false;
5539 else if (ctx->outer_context)
5540 return omp_is_private (ctx->outer_context, decl);
5541 return false;
5544 /* Return true if DECL is private within a parallel region
5545 that binds to the current construct's context or in parallel
5546 region's REDUCTION clause. */
5548 static bool
5549 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5551 splay_tree_node n;
5555 ctx = ctx->outer_context;
5556 if (ctx == NULL)
5557 return !(is_global_var (decl)
5558 /* References might be private, but might be shared too. */
5559 || lang_hooks.decls.omp_privatize_by_reference (decl));
5561 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5562 if (n != NULL)
5563 return (n->value & GOVD_SHARED) == 0;
5565 while (ctx->region_type == ORT_WORKSHARE);
5566 return false;
5569 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5570 and previous omp contexts. */
5572 static void
5573 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5574 enum omp_region_type region_type)
5576 struct gimplify_omp_ctx *ctx, *outer_ctx;
5577 struct gimplify_ctx gctx;
5578 tree c;
5580 ctx = new_omp_context (region_type);
5581 outer_ctx = ctx->outer_context;
5583 while ((c = *list_p) != NULL)
5585 bool remove = false;
5586 bool notice_outer = true;
5587 const char *check_non_private = NULL;
5588 unsigned int flags;
5589 tree decl;
5591 switch (OMP_CLAUSE_CODE (c))
5593 case OMP_CLAUSE_PRIVATE:
5594 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5595 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5597 flags |= GOVD_PRIVATE_OUTER_REF;
5598 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5600 else
5601 notice_outer = false;
5602 goto do_add;
5603 case OMP_CLAUSE_SHARED:
5604 flags = GOVD_SHARED | GOVD_EXPLICIT;
5605 goto do_add;
5606 case OMP_CLAUSE_FIRSTPRIVATE:
5607 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5608 check_non_private = "firstprivate";
5609 goto do_add;
5610 case OMP_CLAUSE_LASTPRIVATE:
5611 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5612 check_non_private = "lastprivate";
5613 goto do_add;
5614 case OMP_CLAUSE_REDUCTION:
5615 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5616 check_non_private = "reduction";
5617 goto do_add;
5619 do_add:
5620 decl = OMP_CLAUSE_DECL (c);
5621 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5623 remove = true;
5624 break;
5626 omp_add_variable (ctx, decl, flags);
5627 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5628 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5630 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5631 GOVD_LOCAL | GOVD_SEEN);
5632 gimplify_omp_ctxp = ctx;
5633 push_gimplify_context (&gctx);
5635 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5636 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5638 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5639 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5640 pop_gimplify_context
5641 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5642 push_gimplify_context (&gctx);
5643 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5644 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5645 pop_gimplify_context
5646 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5647 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5648 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5650 gimplify_omp_ctxp = outer_ctx;
5652 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5653 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5655 gimplify_omp_ctxp = ctx;
5656 push_gimplify_context (&gctx);
5657 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5659 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5660 NULL, NULL);
5661 TREE_SIDE_EFFECTS (bind) = 1;
5662 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5663 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5665 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5666 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5667 pop_gimplify_context
5668 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5669 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5671 gimplify_omp_ctxp = outer_ctx;
5673 if (notice_outer)
5674 goto do_notice;
5675 break;
5677 case OMP_CLAUSE_COPYIN:
5678 case OMP_CLAUSE_COPYPRIVATE:
5679 decl = OMP_CLAUSE_DECL (c);
5680 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5682 remove = true;
5683 break;
5685 do_notice:
5686 if (outer_ctx)
5687 omp_notice_variable (outer_ctx, decl, true);
5688 if (check_non_private
5689 && region_type == ORT_WORKSHARE
5690 && omp_check_private (ctx, decl))
5692 error ("%s variable %qE is private in outer context",
5693 check_non_private, DECL_NAME (decl));
5694 remove = true;
5696 break;
5698 case OMP_CLAUSE_IF:
5699 OMP_CLAUSE_OPERAND (c, 0)
5700 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5701 /* Fall through. */
5703 case OMP_CLAUSE_SCHEDULE:
5704 case OMP_CLAUSE_NUM_THREADS:
5705 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5706 is_gimple_val, fb_rvalue) == GS_ERROR)
5707 remove = true;
5708 break;
5710 case OMP_CLAUSE_NOWAIT:
5711 case OMP_CLAUSE_ORDERED:
5712 case OMP_CLAUSE_UNTIED:
5713 case OMP_CLAUSE_COLLAPSE:
5714 break;
5716 case OMP_CLAUSE_DEFAULT:
5717 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5718 break;
5720 default:
5721 gcc_unreachable ();
5724 if (remove)
5725 *list_p = OMP_CLAUSE_CHAIN (c);
5726 else
5727 list_p = &OMP_CLAUSE_CHAIN (c);
5730 gimplify_omp_ctxp = ctx;
5733 /* For all variables that were not actually used within the context,
5734 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5736 static int
5737 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5739 tree *list_p = (tree *) data;
5740 tree decl = (tree) n->key;
5741 unsigned flags = n->value;
5742 enum omp_clause_code code;
5743 tree clause;
5744 bool private_debug;
5746 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5747 return 0;
5748 if ((flags & GOVD_SEEN) == 0)
5749 return 0;
5750 if (flags & GOVD_DEBUG_PRIVATE)
5752 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5753 private_debug = true;
5755 else
5756 private_debug
5757 = lang_hooks.decls.omp_private_debug_clause (decl,
5758 !!(flags & GOVD_SHARED));
5759 if (private_debug)
5760 code = OMP_CLAUSE_PRIVATE;
5761 else if (flags & GOVD_SHARED)
5763 if (is_global_var (decl))
5765 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5766 while (ctx != NULL)
5768 splay_tree_node on
5769 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5770 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5771 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5772 break;
5773 ctx = ctx->outer_context;
5775 if (ctx == NULL)
5776 return 0;
5778 code = OMP_CLAUSE_SHARED;
5780 else if (flags & GOVD_PRIVATE)
5781 code = OMP_CLAUSE_PRIVATE;
5782 else if (flags & GOVD_FIRSTPRIVATE)
5783 code = OMP_CLAUSE_FIRSTPRIVATE;
5784 else
5785 gcc_unreachable ();
5787 clause = build_omp_clause (input_location, code);
5788 OMP_CLAUSE_DECL (clause) = decl;
5789 OMP_CLAUSE_CHAIN (clause) = *list_p;
5790 if (private_debug)
5791 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5792 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5793 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5794 *list_p = clause;
5795 lang_hooks.decls.omp_finish_clause (clause);
5797 return 0;
5800 static void
5801 gimplify_adjust_omp_clauses (tree *list_p)
5803 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5804 tree c, decl;
5806 while ((c = *list_p) != NULL)
5808 splay_tree_node n;
5809 bool remove = false;
5811 switch (OMP_CLAUSE_CODE (c))
5813 case OMP_CLAUSE_PRIVATE:
5814 case OMP_CLAUSE_SHARED:
5815 case OMP_CLAUSE_FIRSTPRIVATE:
5816 decl = OMP_CLAUSE_DECL (c);
5817 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5818 remove = !(n->value & GOVD_SEEN);
5819 if (! remove)
5821 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5822 if ((n->value & GOVD_DEBUG_PRIVATE)
5823 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5825 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5826 || ((n->value & GOVD_DATA_SHARE_CLASS)
5827 == GOVD_PRIVATE));
5828 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5829 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5832 break;
5834 case OMP_CLAUSE_LASTPRIVATE:
5835 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5836 accurately reflect the presence of a FIRSTPRIVATE clause. */
5837 decl = OMP_CLAUSE_DECL (c);
5838 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5839 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5840 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5841 break;
5843 case OMP_CLAUSE_REDUCTION:
5844 case OMP_CLAUSE_COPYIN:
5845 case OMP_CLAUSE_COPYPRIVATE:
5846 case OMP_CLAUSE_IF:
5847 case OMP_CLAUSE_NUM_THREADS:
5848 case OMP_CLAUSE_SCHEDULE:
5849 case OMP_CLAUSE_NOWAIT:
5850 case OMP_CLAUSE_ORDERED:
5851 case OMP_CLAUSE_DEFAULT:
5852 case OMP_CLAUSE_UNTIED:
5853 case OMP_CLAUSE_COLLAPSE:
5854 break;
5856 default:
5857 gcc_unreachable ();
5860 if (remove)
5861 *list_p = OMP_CLAUSE_CHAIN (c);
5862 else
5863 list_p = &OMP_CLAUSE_CHAIN (c);
5866 /* Add in any implicit data sharing. */
5867 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5869 gimplify_omp_ctxp = ctx->outer_context;
5870 delete_omp_context (ctx);
5873 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5874 gimplification of the body, as well as scanning the body for used
5875 variables. We need to do this scan now, because variable-sized
5876 decls will be decomposed during gimplification. */
5878 static void
5879 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5881 tree expr = *expr_p;
5882 gimple g;
5883 gimple_seq body = NULL;
5884 struct gimplify_ctx gctx;
5886 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5887 OMP_PARALLEL_COMBINED (expr)
5888 ? ORT_COMBINED_PARALLEL
5889 : ORT_PARALLEL);
5891 push_gimplify_context (&gctx);
5893 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5894 if (gimple_code (g) == GIMPLE_BIND)
5895 pop_gimplify_context (g);
5896 else
5897 pop_gimplify_context (NULL);
5899 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5901 g = gimple_build_omp_parallel (body,
5902 OMP_PARALLEL_CLAUSES (expr),
5903 NULL_TREE, NULL_TREE);
5904 if (OMP_PARALLEL_COMBINED (expr))
5905 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
5906 gimplify_seq_add_stmt (pre_p, g);
5907 *expr_p = NULL_TREE;
5910 /* Gimplify the contents of an OMP_TASK statement. This involves
5911 gimplification of the body, as well as scanning the body for used
5912 variables. We need to do this scan now, because variable-sized
5913 decls will be decomposed during gimplification. */
5915 static void
5916 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
5918 tree expr = *expr_p;
5919 gimple g;
5920 gimple_seq body = NULL;
5921 struct gimplify_ctx gctx;
5923 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
5925 push_gimplify_context (&gctx);
5927 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5928 if (gimple_code (g) == GIMPLE_BIND)
5929 pop_gimplify_context (g);
5930 else
5931 pop_gimplify_context (NULL);
5933 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
5935 g = gimple_build_omp_task (body,
5936 OMP_TASK_CLAUSES (expr),
5937 NULL_TREE, NULL_TREE,
5938 NULL_TREE, NULL_TREE, NULL_TREE);
5939 gimplify_seq_add_stmt (pre_p, g);
5940 *expr_p = NULL_TREE;
5943 /* Gimplify the gross structure of an OMP_FOR statement. */
5945 static enum gimplify_status
5946 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
5948 tree for_stmt, decl, var, t;
5949 enum gimplify_status ret = GS_ALL_DONE;
5950 enum gimplify_status tret;
5951 gimple gfor;
5952 gimple_seq for_body, for_pre_body;
5953 int i;
5955 for_stmt = *expr_p;
5957 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
5958 ORT_WORKSHARE);
5960 /* Handle OMP_FOR_INIT. */
5961 for_pre_body = NULL;
5962 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
5963 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
5965 for_body = gimple_seq_alloc ();
5966 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5967 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
5968 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5969 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
5970 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5972 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5973 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5974 decl = TREE_OPERAND (t, 0);
5975 gcc_assert (DECL_P (decl));
5976 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
5977 || POINTER_TYPE_P (TREE_TYPE (decl)));
5979 /* Make sure the iteration variable is private. */
5980 if (omp_is_private (gimplify_omp_ctxp, decl))
5981 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5982 else
5983 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5985 /* If DECL is not a gimple register, create a temporary variable to act
5986 as an iteration counter. This is valid, since DECL cannot be
5987 modified in the body of the loop. */
5988 if (!is_gimple_reg (decl))
5990 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
5991 TREE_OPERAND (t, 0) = var;
5993 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
5995 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
5997 else
5998 var = decl;
6000 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6001 is_gimple_val, fb_rvalue);
6002 ret = MIN (ret, tret);
6003 if (ret == GS_ERROR)
6004 return ret;
6006 /* Handle OMP_FOR_COND. */
6007 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6008 gcc_assert (COMPARISON_CLASS_P (t));
6009 gcc_assert (TREE_OPERAND (t, 0) == decl);
6011 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6012 is_gimple_val, fb_rvalue);
6013 ret = MIN (ret, tret);
6015 /* Handle OMP_FOR_INCR. */
6016 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6017 switch (TREE_CODE (t))
6019 case PREINCREMENT_EXPR:
6020 case POSTINCREMENT_EXPR:
6021 t = build_int_cst (TREE_TYPE (decl), 1);
6022 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6023 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6024 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6025 break;
6027 case PREDECREMENT_EXPR:
6028 case POSTDECREMENT_EXPR:
6029 t = build_int_cst (TREE_TYPE (decl), -1);
6030 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6031 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6032 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6033 break;
6035 case MODIFY_EXPR:
6036 gcc_assert (TREE_OPERAND (t, 0) == decl);
6037 TREE_OPERAND (t, 0) = var;
6039 t = TREE_OPERAND (t, 1);
6040 switch (TREE_CODE (t))
6042 case PLUS_EXPR:
6043 if (TREE_OPERAND (t, 1) == decl)
6045 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6046 TREE_OPERAND (t, 0) = var;
6047 break;
6050 /* Fallthru. */
6051 case MINUS_EXPR:
6052 case POINTER_PLUS_EXPR:
6053 gcc_assert (TREE_OPERAND (t, 0) == decl);
6054 TREE_OPERAND (t, 0) = var;
6055 break;
6056 default:
6057 gcc_unreachable ();
6060 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6061 is_gimple_val, fb_rvalue);
6062 ret = MIN (ret, tret);
6063 break;
6065 default:
6066 gcc_unreachable ();
6069 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6071 tree c;
6072 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6073 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6074 && OMP_CLAUSE_DECL (c) == decl
6075 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6077 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6078 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6079 gcc_assert (TREE_OPERAND (t, 0) == var);
6080 t = TREE_OPERAND (t, 1);
6081 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6082 || TREE_CODE (t) == MINUS_EXPR
6083 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6084 gcc_assert (TREE_OPERAND (t, 0) == var);
6085 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6086 TREE_OPERAND (t, 1));
6087 gimplify_assign (decl, t,
6088 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6093 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6095 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6097 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6098 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6099 for_pre_body);
6101 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6103 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6104 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6105 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6106 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6107 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6108 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6109 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6110 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6113 gimplify_seq_add_stmt (pre_p, gfor);
6114 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6117 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6118 In particular, OMP_SECTIONS and OMP_SINGLE. */
6120 static void
6121 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6123 tree expr = *expr_p;
6124 gimple stmt;
6125 gimple_seq body = NULL;
6127 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6128 gimplify_and_add (OMP_BODY (expr), &body);
6129 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6131 if (TREE_CODE (expr) == OMP_SECTIONS)
6132 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6133 else if (TREE_CODE (expr) == OMP_SINGLE)
6134 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6135 else
6136 gcc_unreachable ();
6138 gimplify_seq_add_stmt (pre_p, stmt);
6141 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6142 stabilized the lhs of the atomic operation as *ADDR. Return true if
6143 EXPR is this stabilized form. */
6145 static bool
6146 goa_lhs_expr_p (tree expr, tree addr)
6148 /* Also include casts to other type variants. The C front end is fond
6149 of adding these for e.g. volatile variables. This is like
6150 STRIP_TYPE_NOPS but includes the main variant lookup. */
6151 while ((CONVERT_EXPR_P (expr)
6152 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6153 && TREE_OPERAND (expr, 0) != error_mark_node
6154 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
6155 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
6156 expr = TREE_OPERAND (expr, 0);
6158 if (TREE_CODE (expr) == INDIRECT_REF)
6160 expr = TREE_OPERAND (expr, 0);
6161 while (expr != addr
6162 && (CONVERT_EXPR_P (expr)
6163 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6164 && TREE_CODE (expr) == TREE_CODE (addr)
6165 && TYPE_MAIN_VARIANT (TREE_TYPE (expr))
6166 == TYPE_MAIN_VARIANT (TREE_TYPE (addr)))
6168 expr = TREE_OPERAND (expr, 0);
6169 addr = TREE_OPERAND (addr, 0);
6171 if (expr == addr)
6172 return true;
6173 return (TREE_CODE (addr) == ADDR_EXPR
6174 && TREE_CODE (expr) == ADDR_EXPR
6175 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6177 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6178 return true;
6179 return false;
6182 /* Walk *EXPR_P and replace
6183 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6184 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6185 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6187 static int
6188 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6189 tree lhs_var)
6191 tree expr = *expr_p;
6192 int saw_lhs;
6194 if (goa_lhs_expr_p (expr, lhs_addr))
6196 *expr_p = lhs_var;
6197 return 1;
6199 if (is_gimple_val (expr))
6200 return 0;
6202 saw_lhs = 0;
6203 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6205 case tcc_binary:
6206 case tcc_comparison:
6207 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6208 lhs_var);
6209 case tcc_unary:
6210 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6211 lhs_var);
6212 break;
6213 case tcc_expression:
6214 switch (TREE_CODE (expr))
6216 case TRUTH_ANDIF_EXPR:
6217 case TRUTH_ORIF_EXPR:
6218 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6219 lhs_addr, lhs_var);
6220 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6221 lhs_addr, lhs_var);
6222 break;
6223 default:
6224 break;
6226 break;
6227 default:
6228 break;
6231 if (saw_lhs == 0)
6233 enum gimplify_status gs;
6234 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6235 if (gs != GS_ALL_DONE)
6236 saw_lhs = -1;
6239 return saw_lhs;
6243 /* Gimplify an OMP_ATOMIC statement. */
6245 static enum gimplify_status
6246 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6248 tree addr = TREE_OPERAND (*expr_p, 0);
6249 tree rhs = TREE_OPERAND (*expr_p, 1);
6250 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6251 tree tmp_load;
6253 tmp_load = create_tmp_var (type, NULL);
6254 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6255 return GS_ERROR;
6257 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6258 != GS_ALL_DONE)
6259 return GS_ERROR;
6261 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6262 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6263 != GS_ALL_DONE)
6264 return GS_ERROR;
6265 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6266 *expr_p = NULL;
6268 return GS_ALL_DONE;
6272 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6273 expression produces a value to be used as an operand inside a GIMPLE
6274 statement, the value will be stored back in *EXPR_P. This value will
6275 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6276 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6277 emitted in PRE_P and POST_P.
6279 Additionally, this process may overwrite parts of the input
6280 expression during gimplification. Ideally, it should be
6281 possible to do non-destructive gimplification.
6283 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6284 the expression needs to evaluate to a value to be used as
6285 an operand in a GIMPLE statement, this value will be stored in
6286 *EXPR_P on exit. This happens when the caller specifies one
6287 of fb_lvalue or fb_rvalue fallback flags.
6289 PRE_P will contain the sequence of GIMPLE statements corresponding
6290 to the evaluation of EXPR and all the side-effects that must
6291 be executed before the main expression. On exit, the last
6292 statement of PRE_P is the core statement being gimplified. For
6293 instance, when gimplifying 'if (++a)' the last statement in
6294 PRE_P will be 'if (t.1)' where t.1 is the result of
6295 pre-incrementing 'a'.
6297 POST_P will contain the sequence of GIMPLE statements corresponding
6298 to the evaluation of all the side-effects that must be executed
6299 after the main expression. If this is NULL, the post
6300 side-effects are stored at the end of PRE_P.
6302 The reason why the output is split in two is to handle post
6303 side-effects explicitly. In some cases, an expression may have
6304 inner and outer post side-effects which need to be emitted in
6305 an order different from the one given by the recursive
6306 traversal. For instance, for the expression (*p--)++ the post
6307 side-effects of '--' must actually occur *after* the post
6308 side-effects of '++'. However, gimplification will first visit
6309 the inner expression, so if a separate POST sequence was not
6310 used, the resulting sequence would be:
6312 1 t.1 = *p
6313 2 p = p - 1
6314 3 t.2 = t.1 + 1
6315 4 *p = t.2
6317 However, the post-decrement operation in line #2 must not be
6318 evaluated until after the store to *p at line #4, so the
6319 correct sequence should be:
6321 1 t.1 = *p
6322 2 t.2 = t.1 + 1
6323 3 *p = t.2
6324 4 p = p - 1
6326 So, by specifying a separate post queue, it is possible
6327 to emit the post side-effects in the correct order.
6328 If POST_P is NULL, an internal queue will be used. Before
6329 returning to the caller, the sequence POST_P is appended to
6330 the main output sequence PRE_P.
6332 GIMPLE_TEST_F points to a function that takes a tree T and
6333 returns nonzero if T is in the GIMPLE form requested by the
6334 caller. The GIMPLE predicates are in tree-gimple.c.
6336 FALLBACK tells the function what sort of a temporary we want if
6337 gimplification cannot produce an expression that complies with
6338 GIMPLE_TEST_F.
6340 fb_none means that no temporary should be generated
6341 fb_rvalue means that an rvalue is OK to generate
6342 fb_lvalue means that an lvalue is OK to generate
6343 fb_either means that either is OK, but an lvalue is preferable.
6344 fb_mayfail means that gimplification may fail (in which case
6345 GS_ERROR will be returned)
6347 The return value is either GS_ERROR or GS_ALL_DONE, since this
6348 function iterates until EXPR is completely gimplified or an error
6349 occurs. */
6351 enum gimplify_status
6352 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6353 bool (*gimple_test_f) (tree), fallback_t fallback)
6355 tree tmp;
6356 gimple_seq internal_pre = NULL;
6357 gimple_seq internal_post = NULL;
6358 tree save_expr;
6359 bool is_statement;
6360 location_t saved_location;
6361 enum gimplify_status ret;
6362 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6364 save_expr = *expr_p;
6365 if (save_expr == NULL_TREE)
6366 return GS_ALL_DONE;
6368 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6369 is_statement = gimple_test_f == is_gimple_stmt;
6370 if (is_statement)
6371 gcc_assert (pre_p);
6373 /* Consistency checks. */
6374 if (gimple_test_f == is_gimple_reg)
6375 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6376 else if (gimple_test_f == is_gimple_val
6377 || gimple_test_f == is_gimple_call_addr
6378 || gimple_test_f == is_gimple_condexpr
6379 || gimple_test_f == is_gimple_mem_rhs
6380 || gimple_test_f == is_gimple_mem_rhs_or_call
6381 || gimple_test_f == is_gimple_reg_rhs
6382 || gimple_test_f == is_gimple_reg_rhs_or_call
6383 || gimple_test_f == is_gimple_asm_val)
6384 gcc_assert (fallback & fb_rvalue);
6385 else if (gimple_test_f == is_gimple_min_lval
6386 || gimple_test_f == is_gimple_lvalue)
6387 gcc_assert (fallback & fb_lvalue);
6388 else if (gimple_test_f == is_gimple_addressable)
6389 gcc_assert (fallback & fb_either);
6390 else if (gimple_test_f == is_gimple_stmt)
6391 gcc_assert (fallback == fb_none);
6392 else
6394 /* We should have recognized the GIMPLE_TEST_F predicate to
6395 know what kind of fallback to use in case a temporary is
6396 needed to hold the value or address of *EXPR_P. */
6397 gcc_unreachable ();
6400 /* We used to check the predicate here and return immediately if it
6401 succeeds. This is wrong; the design is for gimplification to be
6402 idempotent, and for the predicates to only test for valid forms, not
6403 whether they are fully simplified. */
6404 if (pre_p == NULL)
6405 pre_p = &internal_pre;
6407 if (post_p == NULL)
6408 post_p = &internal_post;
6410 /* Remember the last statements added to PRE_P and POST_P. Every
6411 new statement added by the gimplification helpers needs to be
6412 annotated with location information. To centralize the
6413 responsibility, we remember the last statement that had been
6414 added to both queues before gimplifying *EXPR_P. If
6415 gimplification produces new statements in PRE_P and POST_P, those
6416 statements will be annotated with the same location information
6417 as *EXPR_P. */
6418 pre_last_gsi = gsi_last (*pre_p);
6419 post_last_gsi = gsi_last (*post_p);
6421 saved_location = input_location;
6422 if (save_expr != error_mark_node
6423 && EXPR_HAS_LOCATION (*expr_p))
6424 input_location = EXPR_LOCATION (*expr_p);
6426 /* Loop over the specific gimplifiers until the toplevel node
6427 remains the same. */
6430 /* Strip away as many useless type conversions as possible
6431 at the toplevel. */
6432 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6434 /* Remember the expr. */
6435 save_expr = *expr_p;
6437 /* Die, die, die, my darling. */
6438 if (save_expr == error_mark_node
6439 || (TREE_TYPE (save_expr)
6440 && TREE_TYPE (save_expr) == error_mark_node))
6442 ret = GS_ERROR;
6443 break;
6446 /* Do any language-specific gimplification. */
6447 ret = ((enum gimplify_status)
6448 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6449 if (ret == GS_OK)
6451 if (*expr_p == NULL_TREE)
6452 break;
6453 if (*expr_p != save_expr)
6454 continue;
6456 else if (ret != GS_UNHANDLED)
6457 break;
6459 ret = GS_OK;
6460 switch (TREE_CODE (*expr_p))
6462 /* First deal with the special cases. */
6464 case POSTINCREMENT_EXPR:
6465 case POSTDECREMENT_EXPR:
6466 case PREINCREMENT_EXPR:
6467 case PREDECREMENT_EXPR:
6468 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6469 fallback != fb_none);
6470 break;
6472 case ARRAY_REF:
6473 case ARRAY_RANGE_REF:
6474 case REALPART_EXPR:
6475 case IMAGPART_EXPR:
6476 case COMPONENT_REF:
6477 case VIEW_CONVERT_EXPR:
6478 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6479 fallback ? fallback : fb_rvalue);
6480 break;
6482 case COND_EXPR:
6483 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6485 /* C99 code may assign to an array in a structure value of a
6486 conditional expression, and this has undefined behavior
6487 only on execution, so create a temporary if an lvalue is
6488 required. */
6489 if (fallback == fb_lvalue)
6491 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6492 mark_addressable (*expr_p);
6494 break;
6496 case CALL_EXPR:
6497 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6499 /* C99 code may assign to an array in a structure returned
6500 from a function, and this has undefined behavior only on
6501 execution, so create a temporary if an lvalue is
6502 required. */
6503 if (fallback == fb_lvalue)
6505 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6506 mark_addressable (*expr_p);
6508 break;
6510 case TREE_LIST:
6511 gcc_unreachable ();
6513 case COMPOUND_EXPR:
6514 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6515 break;
6517 case COMPOUND_LITERAL_EXPR:
6518 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6519 break;
6521 case MODIFY_EXPR:
6522 case INIT_EXPR:
6523 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6524 fallback != fb_none);
6525 break;
6527 case TRUTH_ANDIF_EXPR:
6528 case TRUTH_ORIF_EXPR:
6529 /* Pass the source location of the outer expression. */
6530 ret = gimplify_boolean_expr (expr_p, saved_location);
6531 break;
6533 case TRUTH_NOT_EXPR:
6534 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6536 tree type = TREE_TYPE (*expr_p);
6537 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6538 ret = GS_OK;
6539 break;
6542 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6543 is_gimple_val, fb_rvalue);
6544 recalculate_side_effects (*expr_p);
6545 break;
6547 case ADDR_EXPR:
6548 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6549 break;
6551 case VA_ARG_EXPR:
6552 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6553 break;
6555 CASE_CONVERT:
6556 if (IS_EMPTY_STMT (*expr_p))
6558 ret = GS_ALL_DONE;
6559 break;
6562 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6563 || fallback == fb_none)
6565 /* Just strip a conversion to void (or in void context) and
6566 try again. */
6567 *expr_p = TREE_OPERAND (*expr_p, 0);
6568 break;
6571 ret = gimplify_conversion (expr_p);
6572 if (ret == GS_ERROR)
6573 break;
6574 if (*expr_p != save_expr)
6575 break;
6576 /* FALLTHRU */
6578 case FIX_TRUNC_EXPR:
6579 /* unary_expr: ... | '(' cast ')' val | ... */
6580 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6581 is_gimple_val, fb_rvalue);
6582 recalculate_side_effects (*expr_p);
6583 break;
6585 case INDIRECT_REF:
6586 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6587 if (*expr_p != save_expr)
6588 break;
6589 /* else fall through. */
6590 case ALIGN_INDIRECT_REF:
6591 case MISALIGNED_INDIRECT_REF:
6592 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6593 is_gimple_reg, fb_rvalue);
6594 recalculate_side_effects (*expr_p);
6595 break;
6597 /* Constants need not be gimplified. */
6598 case INTEGER_CST:
6599 case REAL_CST:
6600 case FIXED_CST:
6601 case STRING_CST:
6602 case COMPLEX_CST:
6603 case VECTOR_CST:
6604 ret = GS_ALL_DONE;
6605 break;
6607 case CONST_DECL:
6608 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6609 CONST_DECL node. Otherwise the decl is replaceable by its
6610 value. */
6611 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6612 if (fallback & fb_lvalue)
6613 ret = GS_ALL_DONE;
6614 else
6615 *expr_p = DECL_INITIAL (*expr_p);
6616 break;
6618 case DECL_EXPR:
6619 ret = gimplify_decl_expr (expr_p, pre_p);
6620 break;
6622 case EXC_PTR_EXPR:
6623 /* FIXME make this a decl. */
6624 ret = GS_ALL_DONE;
6625 break;
6627 case BIND_EXPR:
6628 ret = gimplify_bind_expr (expr_p, pre_p);
6629 break;
6631 case LOOP_EXPR:
6632 ret = gimplify_loop_expr (expr_p, pre_p);
6633 break;
6635 case SWITCH_EXPR:
6636 ret = gimplify_switch_expr (expr_p, pre_p);
6637 break;
6639 case EXIT_EXPR:
6640 ret = gimplify_exit_expr (expr_p);
6641 break;
6643 case GOTO_EXPR:
6644 /* If the target is not LABEL, then it is a computed jump
6645 and the target needs to be gimplified. */
6646 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6648 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6649 NULL, is_gimple_val, fb_rvalue);
6650 if (ret == GS_ERROR)
6651 break;
6653 gimplify_seq_add_stmt (pre_p,
6654 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6655 break;
6657 case PREDICT_EXPR:
6658 gimplify_seq_add_stmt (pre_p,
6659 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6660 PREDICT_EXPR_OUTCOME (*expr_p)));
6661 ret = GS_ALL_DONE;
6662 break;
6664 case LABEL_EXPR:
6665 ret = GS_ALL_DONE;
6666 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6667 == current_function_decl);
6668 gimplify_seq_add_stmt (pre_p,
6669 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6670 break;
6672 case CASE_LABEL_EXPR:
6673 ret = gimplify_case_label_expr (expr_p, pre_p);
6674 break;
6676 case RETURN_EXPR:
6677 ret = gimplify_return_expr (*expr_p, pre_p);
6678 break;
6680 case CONSTRUCTOR:
6681 /* Don't reduce this in place; let gimplify_init_constructor work its
6682 magic. Buf if we're just elaborating this for side effects, just
6683 gimplify any element that has side-effects. */
6684 if (fallback == fb_none)
6686 unsigned HOST_WIDE_INT ix;
6687 constructor_elt *ce;
6688 tree temp = NULL_TREE;
6689 for (ix = 0;
6690 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6691 ix, ce);
6692 ix++)
6693 if (TREE_SIDE_EFFECTS (ce->value))
6694 append_to_statement_list (ce->value, &temp);
6696 *expr_p = temp;
6697 ret = GS_OK;
6699 /* C99 code may assign to an array in a constructed
6700 structure or union, and this has undefined behavior only
6701 on execution, so create a temporary if an lvalue is
6702 required. */
6703 else if (fallback == fb_lvalue)
6705 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6706 mark_addressable (*expr_p);
6708 else
6709 ret = GS_ALL_DONE;
6710 break;
6712 /* The following are special cases that are not handled by the
6713 original GIMPLE grammar. */
6715 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6716 eliminated. */
6717 case SAVE_EXPR:
6718 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6719 break;
6721 case BIT_FIELD_REF:
6723 enum gimplify_status r0, r1, r2;
6725 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6726 post_p, is_gimple_lvalue, fb_either);
6727 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6728 post_p, is_gimple_val, fb_rvalue);
6729 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6730 post_p, is_gimple_val, fb_rvalue);
6731 recalculate_side_effects (*expr_p);
6733 ret = MIN (r0, MIN (r1, r2));
6735 break;
6737 case TARGET_MEM_REF:
6739 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6741 if (TMR_SYMBOL (*expr_p))
6742 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6743 post_p, is_gimple_lvalue, fb_either);
6744 else if (TMR_BASE (*expr_p))
6745 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6746 post_p, is_gimple_val, fb_either);
6747 if (TMR_INDEX (*expr_p))
6748 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6749 post_p, is_gimple_val, fb_rvalue);
6750 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6751 ret = MIN (r0, r1);
6753 break;
6755 case NON_LVALUE_EXPR:
6756 /* This should have been stripped above. */
6757 gcc_unreachable ();
6759 case ASM_EXPR:
6760 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6761 break;
6763 case TRY_FINALLY_EXPR:
6764 case TRY_CATCH_EXPR:
6766 gimple_seq eval, cleanup;
6767 gimple try_;
6769 eval = cleanup = NULL;
6770 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6771 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6772 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6773 if (gimple_seq_empty_p (cleanup))
6775 gimple_seq_add_seq (pre_p, eval);
6776 ret = GS_ALL_DONE;
6777 break;
6779 try_ = gimple_build_try (eval, cleanup,
6780 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6781 ? GIMPLE_TRY_FINALLY
6782 : GIMPLE_TRY_CATCH);
6783 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6784 gimple_try_set_catch_is_cleanup (try_,
6785 TRY_CATCH_IS_CLEANUP (*expr_p));
6786 gimplify_seq_add_stmt (pre_p, try_);
6787 ret = GS_ALL_DONE;
6788 break;
6791 case CLEANUP_POINT_EXPR:
6792 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6793 break;
6795 case TARGET_EXPR:
6796 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6797 break;
6799 case CATCH_EXPR:
6801 gimple c;
6802 gimple_seq handler = NULL;
6803 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6804 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6805 gimplify_seq_add_stmt (pre_p, c);
6806 ret = GS_ALL_DONE;
6807 break;
6810 case EH_FILTER_EXPR:
6812 gimple ehf;
6813 gimple_seq failure = NULL;
6815 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6816 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6817 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6818 gimple_eh_filter_set_must_not_throw
6819 (ehf, EH_FILTER_MUST_NOT_THROW (*expr_p));
6820 gimplify_seq_add_stmt (pre_p, ehf);
6821 ret = GS_ALL_DONE;
6822 break;
6825 case OBJ_TYPE_REF:
6827 enum gimplify_status r0, r1;
6828 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6829 post_p, is_gimple_val, fb_rvalue);
6830 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6831 post_p, is_gimple_val, fb_rvalue);
6832 TREE_SIDE_EFFECTS (*expr_p) = 0;
6833 ret = MIN (r0, r1);
6835 break;
6837 case LABEL_DECL:
6838 /* We get here when taking the address of a label. We mark
6839 the label as "forced"; meaning it can never be removed and
6840 it is a potential target for any computed goto. */
6841 FORCED_LABEL (*expr_p) = 1;
6842 ret = GS_ALL_DONE;
6843 break;
6845 case STATEMENT_LIST:
6846 ret = gimplify_statement_list (expr_p, pre_p);
6847 break;
6849 case WITH_SIZE_EXPR:
6851 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6852 post_p == &internal_post ? NULL : post_p,
6853 gimple_test_f, fallback);
6854 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6855 is_gimple_val, fb_rvalue);
6857 break;
6859 case VAR_DECL:
6860 case PARM_DECL:
6861 ret = gimplify_var_or_parm_decl (expr_p);
6862 break;
6864 case RESULT_DECL:
6865 /* When within an OpenMP context, notice uses of variables. */
6866 if (gimplify_omp_ctxp)
6867 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6868 ret = GS_ALL_DONE;
6869 break;
6871 case SSA_NAME:
6872 /* Allow callbacks into the gimplifier during optimization. */
6873 ret = GS_ALL_DONE;
6874 break;
6876 case OMP_PARALLEL:
6877 gimplify_omp_parallel (expr_p, pre_p);
6878 ret = GS_ALL_DONE;
6879 break;
6881 case OMP_TASK:
6882 gimplify_omp_task (expr_p, pre_p);
6883 ret = GS_ALL_DONE;
6884 break;
6886 case OMP_FOR:
6887 ret = gimplify_omp_for (expr_p, pre_p);
6888 break;
6890 case OMP_SECTIONS:
6891 case OMP_SINGLE:
6892 gimplify_omp_workshare (expr_p, pre_p);
6893 ret = GS_ALL_DONE;
6894 break;
6896 case OMP_SECTION:
6897 case OMP_MASTER:
6898 case OMP_ORDERED:
6899 case OMP_CRITICAL:
6901 gimple_seq body = NULL;
6902 gimple g;
6904 gimplify_and_add (OMP_BODY (*expr_p), &body);
6905 switch (TREE_CODE (*expr_p))
6907 case OMP_SECTION:
6908 g = gimple_build_omp_section (body);
6909 break;
6910 case OMP_MASTER:
6911 g = gimple_build_omp_master (body);
6912 break;
6913 case OMP_ORDERED:
6914 g = gimple_build_omp_ordered (body);
6915 break;
6916 case OMP_CRITICAL:
6917 g = gimple_build_omp_critical (body,
6918 OMP_CRITICAL_NAME (*expr_p));
6919 break;
6920 default:
6921 gcc_unreachable ();
6923 gimplify_seq_add_stmt (pre_p, g);
6924 ret = GS_ALL_DONE;
6925 break;
6928 case OMP_ATOMIC:
6929 ret = gimplify_omp_atomic (expr_p, pre_p);
6930 break;
6932 case POINTER_PLUS_EXPR:
6933 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6934 The second is gimple immediate saving a need for extra statement.
6936 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6937 && (tmp = maybe_fold_offset_to_address
6938 (EXPR_LOCATION (*expr_p),
6939 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6940 TREE_TYPE (*expr_p))))
6942 *expr_p = tmp;
6943 break;
6945 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6946 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6947 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6948 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6949 0),0)))
6950 && (tmp = maybe_fold_offset_to_address
6951 (EXPR_LOCATION (*expr_p),
6952 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6953 TREE_OPERAND (*expr_p, 1),
6954 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6955 0)))))
6957 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6958 break;
6960 /* FALLTHRU */
6962 default:
6963 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6965 case tcc_comparison:
6966 /* Handle comparison of objects of non scalar mode aggregates
6967 with a call to memcmp. It would be nice to only have to do
6968 this for variable-sized objects, but then we'd have to allow
6969 the same nest of reference nodes we allow for MODIFY_EXPR and
6970 that's too complex.
6972 Compare scalar mode aggregates as scalar mode values. Using
6973 memcmp for them would be very inefficient at best, and is
6974 plain wrong if bitfields are involved. */
6976 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6978 if (!AGGREGATE_TYPE_P (type))
6979 goto expr_2;
6980 else if (TYPE_MODE (type) != BLKmode)
6981 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6982 else
6983 ret = gimplify_variable_sized_compare (expr_p);
6985 break;
6988 /* If *EXPR_P does not need to be special-cased, handle it
6989 according to its class. */
6990 case tcc_unary:
6991 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6992 post_p, is_gimple_val, fb_rvalue);
6993 break;
6995 case tcc_binary:
6996 expr_2:
6998 enum gimplify_status r0, r1;
7000 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7001 post_p, is_gimple_val, fb_rvalue);
7002 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7003 post_p, is_gimple_val, fb_rvalue);
7005 ret = MIN (r0, r1);
7006 break;
7009 case tcc_declaration:
7010 case tcc_constant:
7011 ret = GS_ALL_DONE;
7012 goto dont_recalculate;
7014 default:
7015 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7016 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7017 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7018 goto expr_2;
7021 recalculate_side_effects (*expr_p);
7023 dont_recalculate:
7024 break;
7027 /* If we replaced *expr_p, gimplify again. */
7028 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7029 ret = GS_ALL_DONE;
7031 while (ret == GS_OK);
7033 /* If we encountered an error_mark somewhere nested inside, either
7034 stub out the statement or propagate the error back out. */
7035 if (ret == GS_ERROR)
7037 if (is_statement)
7038 *expr_p = NULL;
7039 goto out;
7042 /* This was only valid as a return value from the langhook, which
7043 we handled. Make sure it doesn't escape from any other context. */
7044 gcc_assert (ret != GS_UNHANDLED);
7046 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7048 /* We aren't looking for a value, and we don't have a valid
7049 statement. If it doesn't have side-effects, throw it away. */
7050 if (!TREE_SIDE_EFFECTS (*expr_p))
7051 *expr_p = NULL;
7052 else if (!TREE_THIS_VOLATILE (*expr_p))
7054 /* This is probably a _REF that contains something nested that
7055 has side effects. Recurse through the operands to find it. */
7056 enum tree_code code = TREE_CODE (*expr_p);
7058 switch (code)
7060 case COMPONENT_REF:
7061 case REALPART_EXPR:
7062 case IMAGPART_EXPR:
7063 case VIEW_CONVERT_EXPR:
7064 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7065 gimple_test_f, fallback);
7066 break;
7068 case ARRAY_REF:
7069 case ARRAY_RANGE_REF:
7070 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7071 gimple_test_f, fallback);
7072 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7073 gimple_test_f, fallback);
7074 break;
7076 default:
7077 /* Anything else with side-effects must be converted to
7078 a valid statement before we get here. */
7079 gcc_unreachable ();
7082 *expr_p = NULL;
7084 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7085 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7087 /* Historically, the compiler has treated a bare reference
7088 to a non-BLKmode volatile lvalue as forcing a load. */
7089 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7091 /* Normally, we do not want to create a temporary for a
7092 TREE_ADDRESSABLE type because such a type should not be
7093 copied by bitwise-assignment. However, we make an
7094 exception here, as all we are doing here is ensuring that
7095 we read the bytes that make up the type. We use
7096 create_tmp_var_raw because create_tmp_var will abort when
7097 given a TREE_ADDRESSABLE type. */
7098 tree tmp = create_tmp_var_raw (type, "vol");
7099 gimple_add_tmp_var (tmp);
7100 gimplify_assign (tmp, *expr_p, pre_p);
7101 *expr_p = NULL;
7103 else
7104 /* We can't do anything useful with a volatile reference to
7105 an incomplete type, so just throw it away. Likewise for
7106 a BLKmode type, since any implicit inner load should
7107 already have been turned into an explicit one by the
7108 gimplification process. */
7109 *expr_p = NULL;
7112 /* If we are gimplifying at the statement level, we're done. Tack
7113 everything together and return. */
7114 if (fallback == fb_none || is_statement)
7116 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7117 it out for GC to reclaim it. */
7118 *expr_p = NULL_TREE;
7120 if (!gimple_seq_empty_p (internal_pre)
7121 || !gimple_seq_empty_p (internal_post))
7123 gimplify_seq_add_seq (&internal_pre, internal_post);
7124 gimplify_seq_add_seq (pre_p, internal_pre);
7127 /* The result of gimplifying *EXPR_P is going to be the last few
7128 statements in *PRE_P and *POST_P. Add location information
7129 to all the statements that were added by the gimplification
7130 helpers. */
7131 if (!gimple_seq_empty_p (*pre_p))
7132 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7134 if (!gimple_seq_empty_p (*post_p))
7135 annotate_all_with_location_after (*post_p, post_last_gsi,
7136 input_location);
7138 goto out;
7141 #ifdef ENABLE_GIMPLE_CHECKING
7142 if (*expr_p)
7144 enum tree_code code = TREE_CODE (*expr_p);
7145 /* These expressions should already be in gimple IR form. */
7146 gcc_assert (code != MODIFY_EXPR
7147 && code != ASM_EXPR
7148 && code != BIND_EXPR
7149 && code != CATCH_EXPR
7150 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7151 && code != EH_FILTER_EXPR
7152 && code != GOTO_EXPR
7153 && code != LABEL_EXPR
7154 && code != LOOP_EXPR
7155 && code != RESX_EXPR
7156 && code != SWITCH_EXPR
7157 && code != TRY_FINALLY_EXPR
7158 && code != OMP_CRITICAL
7159 && code != OMP_FOR
7160 && code != OMP_MASTER
7161 && code != OMP_ORDERED
7162 && code != OMP_PARALLEL
7163 && code != OMP_SECTIONS
7164 && code != OMP_SECTION
7165 && code != OMP_SINGLE);
7167 #endif
7169 /* Otherwise we're gimplifying a subexpression, so the resulting
7170 value is interesting. If it's a valid operand that matches
7171 GIMPLE_TEST_F, we're done. Unless we are handling some
7172 post-effects internally; if that's the case, we need to copy into
7173 a temporary before adding the post-effects to POST_P. */
7174 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7175 goto out;
7177 /* Otherwise, we need to create a new temporary for the gimplified
7178 expression. */
7180 /* We can't return an lvalue if we have an internal postqueue. The
7181 object the lvalue refers to would (probably) be modified by the
7182 postqueue; we need to copy the value out first, which means an
7183 rvalue. */
7184 if ((fallback & fb_lvalue)
7185 && gimple_seq_empty_p (internal_post)
7186 && is_gimple_addressable (*expr_p))
7188 /* An lvalue will do. Take the address of the expression, store it
7189 in a temporary, and replace the expression with an INDIRECT_REF of
7190 that temporary. */
7191 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7192 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7193 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7195 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7197 /* An rvalue will do. Assign the gimplified expression into a
7198 new temporary TMP and replace the original expression with
7199 TMP. First, make sure that the expression has a type so that
7200 it can be assigned into a temporary. */
7201 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7203 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7204 /* The postqueue might change the value of the expression between
7205 the initialization and use of the temporary, so we can't use a
7206 formal temp. FIXME do we care? */
7208 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7209 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7210 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7211 DECL_GIMPLE_REG_P (*expr_p) = 1;
7213 else
7214 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7216 else
7218 #ifdef ENABLE_GIMPLE_CHECKING
7219 if (!(fallback & fb_mayfail))
7221 fprintf (stderr, "gimplification failed:\n");
7222 print_generic_expr (stderr, *expr_p, 0);
7223 debug_tree (*expr_p);
7224 internal_error ("gimplification failed");
7226 #endif
7227 gcc_assert (fallback & fb_mayfail);
7229 /* If this is an asm statement, and the user asked for the
7230 impossible, don't die. Fail and let gimplify_asm_expr
7231 issue an error. */
7232 ret = GS_ERROR;
7233 goto out;
7236 /* Make sure the temporary matches our predicate. */
7237 gcc_assert ((*gimple_test_f) (*expr_p));
7239 if (!gimple_seq_empty_p (internal_post))
7241 annotate_all_with_location (internal_post, input_location);
7242 gimplify_seq_add_seq (pre_p, internal_post);
7245 out:
7246 input_location = saved_location;
7247 return ret;
7250 /* Look through TYPE for variable-sized objects and gimplify each such
7251 size that we find. Add to LIST_P any statements generated. */
7253 void
7254 gimplify_type_sizes (tree type, gimple_seq *list_p)
7256 tree field, t;
7258 if (type == NULL || type == error_mark_node)
7259 return;
7261 /* We first do the main variant, then copy into any other variants. */
7262 type = TYPE_MAIN_VARIANT (type);
7264 /* Avoid infinite recursion. */
7265 if (TYPE_SIZES_GIMPLIFIED (type))
7266 return;
7268 TYPE_SIZES_GIMPLIFIED (type) = 1;
7270 switch (TREE_CODE (type))
7272 case INTEGER_TYPE:
7273 case ENUMERAL_TYPE:
7274 case BOOLEAN_TYPE:
7275 case REAL_TYPE:
7276 case FIXED_POINT_TYPE:
7277 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7278 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7280 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7282 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7283 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7285 break;
7287 case ARRAY_TYPE:
7288 /* These types may not have declarations, so handle them here. */
7289 gimplify_type_sizes (TREE_TYPE (type), list_p);
7290 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7291 /* When not optimizing, ensure VLA bounds aren't removed. */
7292 if (!optimize
7293 && TYPE_DOMAIN (type)
7294 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7296 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7297 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7298 DECL_IGNORED_P (t) = 0;
7299 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7300 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7301 DECL_IGNORED_P (t) = 0;
7303 break;
7305 case RECORD_TYPE:
7306 case UNION_TYPE:
7307 case QUAL_UNION_TYPE:
7308 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7309 if (TREE_CODE (field) == FIELD_DECL)
7311 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7312 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7313 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7314 gimplify_type_sizes (TREE_TYPE (field), list_p);
7316 break;
7318 case POINTER_TYPE:
7319 case REFERENCE_TYPE:
7320 /* We used to recurse on the pointed-to type here, which turned out to
7321 be incorrect because its definition might refer to variables not
7322 yet initialized at this point if a forward declaration is involved.
7324 It was actually useful for anonymous pointed-to types to ensure
7325 that the sizes evaluation dominates every possible later use of the
7326 values. Restricting to such types here would be safe since there
7327 is no possible forward declaration around, but would introduce an
7328 undesirable middle-end semantic to anonymity. We then defer to
7329 front-ends the responsibility of ensuring that the sizes are
7330 evaluated both early and late enough, e.g. by attaching artificial
7331 type declarations to the tree. */
7332 break;
7334 default:
7335 break;
7338 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7339 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7341 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7343 TYPE_SIZE (t) = TYPE_SIZE (type);
7344 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7345 TYPE_SIZES_GIMPLIFIED (t) = 1;
7349 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7350 a size or position, has had all of its SAVE_EXPRs evaluated.
7351 We add any required statements to *STMT_P. */
7353 void
7354 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7356 tree type, expr = *expr_p;
7358 /* We don't do anything if the value isn't there, is constant, or contains
7359 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7360 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7361 will want to replace it with a new variable, but that will cause problems
7362 if this type is from outside the function. It's OK to have that here. */
7363 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7364 || TREE_CODE (expr) == VAR_DECL
7365 || CONTAINS_PLACEHOLDER_P (expr))
7366 return;
7368 type = TREE_TYPE (expr);
7369 *expr_p = unshare_expr (expr);
7371 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7372 expr = *expr_p;
7374 /* Verify that we've an exact type match with the original expression.
7375 In particular, we do not wish to drop a "sizetype" in favour of a
7376 type of similar dimensions. We don't want to pollute the generic
7377 type-stripping code with this knowledge because it doesn't matter
7378 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7379 and friends retain their "sizetype-ness". */
7380 if (TREE_TYPE (expr) != type
7381 && TREE_CODE (type) == INTEGER_TYPE
7382 && TYPE_IS_SIZETYPE (type))
7384 tree tmp;
7385 gimple stmt;
7387 *expr_p = create_tmp_var (type, NULL);
7388 tmp = build1 (NOP_EXPR, type, expr);
7389 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7390 if (EXPR_HAS_LOCATION (expr))
7391 gimple_set_location (stmt, EXPR_LOCATION (expr));
7392 else
7393 gimple_set_location (stmt, input_location);
7398 /* Gimplify the body of statements pointed to by BODY_P and return a
7399 GIMPLE_BIND containing the sequence of GIMPLE statements
7400 corresponding to BODY_P. FNDECL is the function decl containing
7401 *BODY_P. */
7403 gimple
7404 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7406 location_t saved_location = input_location;
7407 gimple_seq parm_stmts, seq;
7408 gimple outer_bind;
7409 struct gimplify_ctx gctx;
7411 timevar_push (TV_TREE_GIMPLIFY);
7413 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7414 gimplification. */
7415 default_rtl_profile ();
7417 gcc_assert (gimplify_ctxp == NULL);
7418 push_gimplify_context (&gctx);
7420 /* Unshare most shared trees in the body and in that of any nested functions.
7421 It would seem we don't have to do this for nested functions because
7422 they are supposed to be output and then the outer function gimplified
7423 first, but the g++ front end doesn't always do it that way. */
7424 unshare_body (body_p, fndecl);
7425 unvisit_body (body_p, fndecl);
7427 if (cgraph_node (fndecl)->origin)
7428 nonlocal_vlas = pointer_set_create ();
7430 /* Make sure input_location isn't set to something weird. */
7431 input_location = DECL_SOURCE_LOCATION (fndecl);
7433 /* Resolve callee-copies. This has to be done before processing
7434 the body so that DECL_VALUE_EXPR gets processed correctly. */
7435 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7437 /* Gimplify the function's body. */
7438 seq = NULL;
7439 gimplify_stmt (body_p, &seq);
7440 outer_bind = gimple_seq_first_stmt (seq);
7441 if (!outer_bind)
7443 outer_bind = gimple_build_nop ();
7444 gimplify_seq_add_stmt (&seq, outer_bind);
7447 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7448 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7449 if (gimple_code (outer_bind) == GIMPLE_BIND
7450 && gimple_seq_first (seq) == gimple_seq_last (seq))
7452 else
7453 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7455 *body_p = NULL_TREE;
7457 /* If we had callee-copies statements, insert them at the beginning
7458 of the function. */
7459 if (!gimple_seq_empty_p (parm_stmts))
7461 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7462 gimple_bind_set_body (outer_bind, parm_stmts);
7465 if (nonlocal_vlas)
7467 pointer_set_destroy (nonlocal_vlas);
7468 nonlocal_vlas = NULL;
7471 pop_gimplify_context (outer_bind);
7472 gcc_assert (gimplify_ctxp == NULL);
7474 #ifdef ENABLE_TYPES_CHECKING
7475 if (!errorcount && !sorrycount)
7476 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7477 #endif
7479 timevar_pop (TV_TREE_GIMPLIFY);
7480 input_location = saved_location;
7482 return outer_bind;
7485 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7486 node for the function we want to gimplify.
7488 Returns the sequence of GIMPLE statements corresponding to the body
7489 of FNDECL. */
7491 void
7492 gimplify_function_tree (tree fndecl)
7494 tree oldfn, parm, ret;
7495 gimple_seq seq;
7496 gimple bind;
7498 gcc_assert (!gimple_body (fndecl));
7500 oldfn = current_function_decl;
7501 current_function_decl = fndecl;
7502 if (DECL_STRUCT_FUNCTION (fndecl))
7503 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7504 else
7505 push_struct_function (fndecl);
7507 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7509 /* Preliminarily mark non-addressed complex variables as eligible
7510 for promotion to gimple registers. We'll transform their uses
7511 as we find them. */
7512 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7513 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7514 && !TREE_THIS_VOLATILE (parm)
7515 && !needs_to_live_in_memory (parm))
7516 DECL_GIMPLE_REG_P (parm) = 1;
7519 ret = DECL_RESULT (fndecl);
7520 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7521 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7522 && !needs_to_live_in_memory (ret))
7523 DECL_GIMPLE_REG_P (ret) = 1;
7525 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7527 /* The tree body of the function is no longer needed, replace it
7528 with the new GIMPLE body. */
7529 seq = gimple_seq_alloc ();
7530 gimple_seq_add_stmt (&seq, bind);
7531 gimple_set_body (fndecl, seq);
7533 /* If we're instrumenting function entry/exit, then prepend the call to
7534 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7535 catch the exit hook. */
7536 /* ??? Add some way to ignore exceptions for this TFE. */
7537 if (flag_instrument_function_entry_exit
7538 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7539 && !flag_instrument_functions_exclude_p (fndecl))
7541 tree x;
7542 gimple new_bind;
7543 gimple tf;
7544 gimple_seq cleanup = NULL, body = NULL;
7546 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7547 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7548 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7550 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7551 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7552 gimplify_seq_add_stmt (&body, tf);
7553 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7554 /* Clear the block for BIND, since it is no longer directly inside
7555 the function, but within a try block. */
7556 gimple_bind_set_block (bind, NULL);
7558 /* Replace the current function body with the body
7559 wrapped in the try/finally TF. */
7560 seq = gimple_seq_alloc ();
7561 gimple_seq_add_stmt (&seq, new_bind);
7562 gimple_set_body (fndecl, seq);
7565 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7566 cfun->curr_properties = PROP_gimple_any;
7568 current_function_decl = oldfn;
7569 pop_cfun ();
7573 /* Some transformations like inlining may invalidate the GIMPLE form
7574 for operands. This function traverses all the operands in STMT and
7575 gimplifies anything that is not a valid gimple operand. Any new
7576 GIMPLE statements are inserted before *GSI_P. */
7578 void
7579 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7581 size_t i, num_ops;
7582 tree orig_lhs = NULL_TREE, lhs, t;
7583 gimple_seq pre = NULL;
7584 gimple post_stmt = NULL;
7585 struct gimplify_ctx gctx;
7587 push_gimplify_context (&gctx);
7588 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7590 switch (gimple_code (stmt))
7592 case GIMPLE_COND:
7593 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7594 is_gimple_val, fb_rvalue);
7595 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7596 is_gimple_val, fb_rvalue);
7597 break;
7598 case GIMPLE_SWITCH:
7599 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7600 is_gimple_val, fb_rvalue);
7601 break;
7602 case GIMPLE_OMP_ATOMIC_LOAD:
7603 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7604 is_gimple_val, fb_rvalue);
7605 break;
7606 case GIMPLE_ASM:
7608 size_t i, noutputs = gimple_asm_noutputs (stmt);
7609 const char *constraint, **oconstraints;
7610 bool allows_mem, allows_reg, is_inout;
7612 oconstraints
7613 = (const char **) alloca ((noutputs) * sizeof (const char *));
7614 for (i = 0; i < noutputs; i++)
7616 tree op = gimple_asm_output_op (stmt, i);
7617 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7618 oconstraints[i] = constraint;
7619 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7620 &allows_reg, &is_inout);
7621 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7622 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7623 fb_lvalue | fb_mayfail);
7625 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7627 tree op = gimple_asm_input_op (stmt, i);
7628 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7629 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7630 oconstraints, &allows_mem, &allows_reg);
7631 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7632 allows_reg = 0;
7633 if (!allows_reg && allows_mem)
7634 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7635 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7636 else
7637 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7638 is_gimple_asm_val, fb_rvalue);
7641 break;
7642 default:
7643 /* NOTE: We start gimplifying operands from last to first to
7644 make sure that side-effects on the RHS of calls, assignments
7645 and ASMs are executed before the LHS. The ordering is not
7646 important for other statements. */
7647 num_ops = gimple_num_ops (stmt);
7648 orig_lhs = gimple_get_lhs (stmt);
7649 for (i = num_ops; i > 0; i--)
7651 tree op = gimple_op (stmt, i - 1);
7652 if (op == NULL_TREE)
7653 continue;
7654 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7655 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7656 else if (i == 2
7657 && is_gimple_assign (stmt)
7658 && num_ops == 2
7659 && get_gimple_rhs_class (gimple_expr_code (stmt))
7660 == GIMPLE_SINGLE_RHS)
7661 gimplify_expr (&op, &pre, NULL,
7662 rhs_predicate_for (gimple_assign_lhs (stmt)),
7663 fb_rvalue);
7664 else if (i == 2 && is_gimple_call (stmt))
7666 if (TREE_CODE (op) == FUNCTION_DECL)
7667 continue;
7668 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7670 else
7671 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7672 gimple_set_op (stmt, i - 1, op);
7675 lhs = gimple_get_lhs (stmt);
7676 /* If the LHS changed it in a way that requires a simple RHS,
7677 create temporary. */
7678 if (lhs && !is_gimple_reg (lhs))
7680 bool need_temp = false;
7682 if (is_gimple_assign (stmt)
7683 && num_ops == 2
7684 && get_gimple_rhs_class (gimple_expr_code (stmt))
7685 == GIMPLE_SINGLE_RHS)
7686 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7687 rhs_predicate_for (gimple_assign_lhs (stmt)),
7688 fb_rvalue);
7689 else if (is_gimple_reg (lhs))
7691 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7693 if (is_gimple_call (stmt))
7695 i = gimple_call_flags (stmt);
7696 if ((i & ECF_LOOPING_CONST_OR_PURE)
7697 || !(i & (ECF_CONST | ECF_PURE)))
7698 need_temp = true;
7700 if (stmt_can_throw_internal (stmt))
7701 need_temp = true;
7704 else
7706 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7707 need_temp = true;
7708 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7710 if (is_gimple_call (stmt))
7712 tree fndecl = gimple_call_fndecl (stmt);
7714 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7715 && !(fndecl && DECL_RESULT (fndecl)
7716 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7717 need_temp = true;
7719 else
7720 need_temp = true;
7723 if (need_temp)
7725 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7727 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7728 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7729 DECL_GIMPLE_REG_P (temp) = 1;
7730 if (TREE_CODE (orig_lhs) == SSA_NAME)
7731 orig_lhs = SSA_NAME_VAR (orig_lhs);
7733 if (gimple_in_ssa_p (cfun))
7734 temp = make_ssa_name (temp, NULL);
7735 gimple_set_lhs (stmt, temp);
7736 post_stmt = gimple_build_assign (lhs, temp);
7737 if (TREE_CODE (lhs) == SSA_NAME)
7738 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7741 break;
7744 if (gimple_referenced_vars (cfun))
7745 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7746 add_referenced_var (t);
7748 if (!gimple_seq_empty_p (pre))
7750 if (gimple_in_ssa_p (cfun))
7752 gimple_stmt_iterator i;
7754 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7755 mark_symbols_for_renaming (gsi_stmt (i));
7757 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7759 if (post_stmt)
7760 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7762 pop_gimplify_context (NULL);
7766 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7767 force the result to be either ssa_name or an invariant, otherwise
7768 just force it to be a rhs expression. If VAR is not NULL, make the
7769 base variable of the final destination be VAR if suitable. */
7771 tree
7772 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7774 tree t;
7775 enum gimplify_status ret;
7776 gimple_predicate gimple_test_f;
7777 struct gimplify_ctx gctx;
7779 *stmts = NULL;
7781 if (is_gimple_val (expr))
7782 return expr;
7784 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7786 push_gimplify_context (&gctx);
7787 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7788 gimplify_ctxp->allow_rhs_cond_expr = true;
7790 if (var)
7791 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7793 if (TREE_CODE (expr) != MODIFY_EXPR
7794 && TREE_TYPE (expr) == void_type_node)
7796 gimplify_and_add (expr, stmts);
7797 expr = NULL_TREE;
7799 else
7801 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7802 gcc_assert (ret != GS_ERROR);
7805 if (gimple_referenced_vars (cfun))
7806 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7807 add_referenced_var (t);
7809 pop_gimplify_context (NULL);
7811 return expr;
7814 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7815 some statements are produced, emits them at GSI. If BEFORE is true.
7816 the statements are appended before GSI, otherwise they are appended after
7817 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7818 GSI_CONTINUE_LINKING are the usual values). */
7820 tree
7821 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7822 bool simple_p, tree var, bool before,
7823 enum gsi_iterator_update m)
7825 gimple_seq stmts;
7827 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7829 if (!gimple_seq_empty_p (stmts))
7831 if (gimple_in_ssa_p (cfun))
7833 gimple_stmt_iterator i;
7835 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7836 mark_symbols_for_renaming (gsi_stmt (i));
7839 if (before)
7840 gsi_insert_seq_before (gsi, stmts, m);
7841 else
7842 gsi_insert_seq_after (gsi, stmts, m);
7845 return expr;
7848 #include "gt-gimplify.h"