Mark as release
[official-gcc.git] / gcc / gimplify.c
blobabb580f8958b3f8603fb77f40e98652955effc78
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "tree-pretty-print.h"
33 #include "langhooks.h"
34 #include "tree-flow.h"
35 #include "cgraph.h"
36 #include "timevar.h"
37 #include "hashtab.h"
38 #include "flags.h"
39 #include "function.h"
40 #include "output.h"
41 #include "ggc.h"
42 #include "diagnostic-core.h"
43 #include "target.h"
44 #include "pointer-set.h"
45 #include "splay-tree.h"
46 #include "vec.h"
47 #include "gimple.h"
48 #include "tree-pass.h"
50 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name. */
51 #include "expr.h" /* FIXME: for can_move_by_pieces
52 and STACK_CHECK_MAX_VAR_SIZE. */
54 enum gimplify_omp_var_data
56 GOVD_SEEN = 1,
57 GOVD_EXPLICIT = 2,
58 GOVD_SHARED = 4,
59 GOVD_PRIVATE = 8,
60 GOVD_FIRSTPRIVATE = 16,
61 GOVD_LASTPRIVATE = 32,
62 GOVD_REDUCTION = 64,
63 GOVD_LOCAL = 128,
64 GOVD_DEBUG_PRIVATE = 256,
65 GOVD_PRIVATE_OUTER_REF = 512,
66 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
67 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
71 enum omp_region_type
73 ORT_WORKSHARE = 0,
74 ORT_PARALLEL = 2,
75 ORT_COMBINED_PARALLEL = 3,
76 ORT_TASK = 4,
77 ORT_UNTIED_TASK = 5
80 struct gimplify_omp_ctx
82 struct gimplify_omp_ctx *outer_context;
83 splay_tree variables;
84 struct pointer_set_t *privatized_types;
85 location_t location;
86 enum omp_clause_default_kind default_kind;
87 enum omp_region_type region_type;
90 static struct gimplify_ctx *gimplify_ctxp;
91 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
94 /* Formal (expression) temporary table handling: multiple occurrences of
95 the same scalar expression are evaluated into the same temporary. */
97 typedef struct gimple_temp_hash_elt
99 tree val; /* Key */
100 tree temp; /* Value */
101 } elt_t;
103 /* Forward declaration. */
104 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
106 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
107 form and we don't do any syntax checking. */
109 void
110 mark_addressable (tree x)
112 while (handled_component_p (x))
113 x = TREE_OPERAND (x, 0);
114 if (TREE_CODE (x) == MEM_REF
115 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
116 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
117 if (TREE_CODE (x) != VAR_DECL
118 && TREE_CODE (x) != PARM_DECL
119 && TREE_CODE (x) != RESULT_DECL)
120 return;
121 TREE_ADDRESSABLE (x) = 1;
123 /* Also mark the artificial SSA_NAME that points to the partition of X. */
124 if (TREE_CODE (x) == VAR_DECL
125 && !DECL_EXTERNAL (x)
126 && !TREE_STATIC (x)
127 && cfun->gimple_df != NULL
128 && cfun->gimple_df->decls_to_pointers != NULL)
130 void *namep
131 = pointer_map_contains (cfun->gimple_df->decls_to_pointers, x);
132 if (namep)
133 TREE_ADDRESSABLE (*(tree *)namep) = 1;
137 /* Return a hash value for a formal temporary table entry. */
139 static hashval_t
140 gimple_tree_hash (const void *p)
142 tree t = ((const elt_t *) p)->val;
143 return iterative_hash_expr (t, 0);
146 /* Compare two formal temporary table entries. */
148 static int
149 gimple_tree_eq (const void *p1, const void *p2)
151 tree t1 = ((const elt_t *) p1)->val;
152 tree t2 = ((const elt_t *) p2)->val;
153 enum tree_code code = TREE_CODE (t1);
155 if (TREE_CODE (t2) != code
156 || TREE_TYPE (t1) != TREE_TYPE (t2))
157 return 0;
159 if (!operand_equal_p (t1, t2, 0))
160 return 0;
162 #ifdef ENABLE_CHECKING
163 /* Only allow them to compare equal if they also hash equal; otherwise
164 results are nondeterminate, and we fail bootstrap comparison. */
165 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
166 #endif
168 return 1;
171 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
172 *SEQ_P is NULL, a new sequence is allocated. This function is
173 similar to gimple_seq_add_stmt, but does not scan the operands.
174 During gimplification, we need to manipulate statement sequences
175 before the def/use vectors have been constructed. */
177 void
178 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
180 gimple_stmt_iterator si;
182 if (gs == NULL)
183 return;
185 if (*seq_p == NULL)
186 *seq_p = gimple_seq_alloc ();
188 si = gsi_last (*seq_p);
190 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
193 /* Shorter alias name for the above function for use in gimplify.c
194 only. */
196 static inline void
197 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
199 gimple_seq_add_stmt_without_update (seq_p, gs);
202 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
203 NULL, a new sequence is allocated. This function is
204 similar to gimple_seq_add_seq, but does not scan the operands.
205 During gimplification, we need to manipulate statement sequences
206 before the def/use vectors have been constructed. */
208 static void
209 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
211 gimple_stmt_iterator si;
213 if (src == NULL)
214 return;
216 if (*dst_p == NULL)
217 *dst_p = gimple_seq_alloc ();
219 si = gsi_last (*dst_p);
220 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
223 /* Set up a context for the gimplifier. */
225 void
226 push_gimplify_context (struct gimplify_ctx *c)
228 memset (c, '\0', sizeof (*c));
229 c->prev_context = gimplify_ctxp;
230 gimplify_ctxp = c;
233 /* Tear down a context for the gimplifier. If BODY is non-null, then
234 put the temporaries into the outer BIND_EXPR. Otherwise, put them
235 in the local_decls.
237 BODY is not a sequence, but the first tuple in a sequence. */
239 void
240 pop_gimplify_context (gimple body)
242 struct gimplify_ctx *c = gimplify_ctxp;
244 gcc_assert (c && (c->bind_expr_stack == NULL
245 || VEC_empty (gimple, c->bind_expr_stack)));
246 VEC_free (gimple, heap, c->bind_expr_stack);
247 gimplify_ctxp = c->prev_context;
249 if (body)
250 declare_vars (c->temps, body, false);
251 else
252 record_vars (c->temps);
254 if (c->temp_htab)
255 htab_delete (c->temp_htab);
258 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
260 static void
261 gimple_push_bind_expr (gimple gimple_bind)
263 if (gimplify_ctxp->bind_expr_stack == NULL)
264 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
265 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
268 /* Pop the first element off the stack of bindings. */
270 static void
271 gimple_pop_bind_expr (void)
273 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
276 /* Return the first element of the stack of bindings. */
278 gimple
279 gimple_current_bind_expr (void)
281 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
284 /* Return the stack of bindings created during gimplification. */
286 VEC(gimple, heap) *
287 gimple_bind_expr_stack (void)
289 return gimplify_ctxp->bind_expr_stack;
292 /* Return true iff there is a COND_EXPR between us and the innermost
293 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
295 static bool
296 gimple_conditional_context (void)
298 return gimplify_ctxp->conditions > 0;
301 /* Note that we've entered a COND_EXPR. */
303 static void
304 gimple_push_condition (void)
306 #ifdef ENABLE_GIMPLE_CHECKING
307 if (gimplify_ctxp->conditions == 0)
308 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
309 #endif
310 ++(gimplify_ctxp->conditions);
313 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
314 now, add any conditional cleanups we've seen to the prequeue. */
316 static void
317 gimple_pop_condition (gimple_seq *pre_p)
319 int conds = --(gimplify_ctxp->conditions);
321 gcc_assert (conds >= 0);
322 if (conds == 0)
324 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
325 gimplify_ctxp->conditional_cleanups = NULL;
329 /* A stable comparison routine for use with splay trees and DECLs. */
331 static int
332 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
334 tree a = (tree) xa;
335 tree b = (tree) xb;
337 return DECL_UID (a) - DECL_UID (b);
340 /* Create a new omp construct that deals with variable remapping. */
342 static struct gimplify_omp_ctx *
343 new_omp_context (enum omp_region_type region_type)
345 struct gimplify_omp_ctx *c;
347 c = XCNEW (struct gimplify_omp_ctx);
348 c->outer_context = gimplify_omp_ctxp;
349 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
350 c->privatized_types = pointer_set_create ();
351 c->location = input_location;
352 c->region_type = region_type;
353 if ((region_type & ORT_TASK) == 0)
354 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
355 else
356 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
358 return c;
361 /* Destroy an omp construct that deals with variable remapping. */
363 static void
364 delete_omp_context (struct gimplify_omp_ctx *c)
366 splay_tree_delete (c->variables);
367 pointer_set_destroy (c->privatized_types);
368 XDELETE (c);
371 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
372 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
374 /* Both gimplify the statement T and append it to *SEQ_P. This function
375 behaves exactly as gimplify_stmt, but you don't have to pass T as a
376 reference. */
378 void
379 gimplify_and_add (tree t, gimple_seq *seq_p)
381 gimplify_stmt (&t, seq_p);
384 /* Gimplify statement T into sequence *SEQ_P, and return the first
385 tuple in the sequence of generated tuples for this statement.
386 Return NULL if gimplifying T produced no tuples. */
388 static gimple
389 gimplify_and_return_first (tree t, gimple_seq *seq_p)
391 gimple_stmt_iterator last = gsi_last (*seq_p);
393 gimplify_and_add (t, seq_p);
395 if (!gsi_end_p (last))
397 gsi_next (&last);
398 return gsi_stmt (last);
400 else
401 return gimple_seq_first_stmt (*seq_p);
404 /* Strip off a legitimate source ending from the input string NAME of
405 length LEN. Rather than having to know the names used by all of
406 our front ends, we strip off an ending of a period followed by
407 up to five characters. (Java uses ".class".) */
409 static inline void
410 remove_suffix (char *name, int len)
412 int i;
414 for (i = 2; i < 8 && len > i; i++)
416 if (name[len - i] == '.')
418 name[len - i] = '\0';
419 break;
424 /* Create a new temporary name with PREFIX. Return an identifier. */
426 static GTY(()) unsigned int tmp_var_id_num;
428 tree
429 create_tmp_var_name (const char *prefix)
431 char *tmp_name;
433 if (prefix)
435 char *preftmp = ASTRDUP (prefix);
437 remove_suffix (preftmp, strlen (preftmp));
438 clean_symbol_name (preftmp);
440 prefix = preftmp;
443 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
444 return get_identifier (tmp_name);
447 /* Create a new temporary variable declaration of type TYPE.
448 Do NOT push it into the current binding. */
450 tree
451 create_tmp_var_raw (tree type, const char *prefix)
453 tree tmp_var;
455 tmp_var = build_decl (input_location,
456 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
457 type);
459 /* The variable was declared by the compiler. */
460 DECL_ARTIFICIAL (tmp_var) = 1;
461 /* And we don't want debug info for it. */
462 DECL_IGNORED_P (tmp_var) = 1;
464 /* Make the variable writable. */
465 TREE_READONLY (tmp_var) = 0;
467 DECL_EXTERNAL (tmp_var) = 0;
468 TREE_STATIC (tmp_var) = 0;
469 TREE_USED (tmp_var) = 1;
471 return tmp_var;
474 /* Create a new temporary variable declaration of type TYPE. DO push the
475 variable into the current binding. Further, assume that this is called
476 only from gimplification or optimization, at which point the creation of
477 certain types are bugs. */
479 tree
480 create_tmp_var (tree type, const char *prefix)
482 tree tmp_var;
484 /* We don't allow types that are addressable (meaning we can't make copies),
485 or incomplete. We also used to reject every variable size objects here,
486 but now support those for which a constant upper bound can be obtained.
487 The processing for variable sizes is performed in gimple_add_tmp_var,
488 point at which it really matters and possibly reached via paths not going
489 through this function, e.g. after direct calls to create_tmp_var_raw. */
490 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
492 tmp_var = create_tmp_var_raw (type, prefix);
493 gimple_add_tmp_var (tmp_var);
494 return tmp_var;
497 /* Create a new temporary variable declaration of type TYPE by calling
498 create_tmp_var and if TYPE is a vector or a complex number, mark the new
499 temporary as gimple register. */
501 tree
502 create_tmp_reg (tree type, const char *prefix)
504 tree tmp;
506 tmp = create_tmp_var (type, prefix);
507 if (TREE_CODE (type) == COMPLEX_TYPE
508 || TREE_CODE (type) == VECTOR_TYPE)
509 DECL_GIMPLE_REG_P (tmp) = 1;
511 return tmp;
514 /* Create a temporary with a name derived from VAL. Subroutine of
515 lookup_tmp_var; nobody else should call this function. */
517 static inline tree
518 create_tmp_from_val (tree val)
520 /* Drop all qualifiers and address-space information from the value type. */
521 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
524 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
525 an existing expression temporary. */
527 static tree
528 lookup_tmp_var (tree val, bool is_formal)
530 tree ret;
532 /* If not optimizing, never really reuse a temporary. local-alloc
533 won't allocate any variable that is used in more than one basic
534 block, which means it will go into memory, causing much extra
535 work in reload and final and poorer code generation, outweighing
536 the extra memory allocation here. */
537 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
538 ret = create_tmp_from_val (val);
539 else
541 elt_t elt, *elt_p;
542 void **slot;
544 elt.val = val;
545 if (gimplify_ctxp->temp_htab == NULL)
546 gimplify_ctxp->temp_htab
547 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
548 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
549 if (*slot == NULL)
551 elt_p = XNEW (elt_t);
552 elt_p->val = val;
553 elt_p->temp = ret = create_tmp_from_val (val);
554 *slot = (void *) elt_p;
556 else
558 elt_p = (elt_t *) *slot;
559 ret = elt_p->temp;
563 return ret;
566 /* Return true if T is a CALL_EXPR or an expression that can be
567 assigned to a temporary. Note that this predicate should only be
568 used during gimplification. See the rationale for this in
569 gimplify_modify_expr. */
571 static bool
572 is_gimple_reg_rhs_or_call (tree t)
574 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
575 || TREE_CODE (t) == CALL_EXPR);
578 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
579 this predicate should only be used during gimplification. See the
580 rationale for this in gimplify_modify_expr. */
582 static bool
583 is_gimple_mem_rhs_or_call (tree t)
585 /* If we're dealing with a renamable type, either source or dest must be
586 a renamed variable. */
587 if (is_gimple_reg_type (TREE_TYPE (t)))
588 return is_gimple_val (t);
589 else
590 return (is_gimple_val (t) || is_gimple_lvalue (t)
591 || TREE_CODE (t) == CALL_EXPR);
594 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
596 static tree
597 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
598 bool is_formal)
600 tree t, mod;
602 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
603 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
604 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
605 fb_rvalue);
607 t = lookup_tmp_var (val, is_formal);
609 if (is_formal
610 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
611 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
612 DECL_GIMPLE_REG_P (t) = 1;
614 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
616 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
618 /* gimplify_modify_expr might want to reduce this further. */
619 gimplify_and_add (mod, pre_p);
620 ggc_free (mod);
622 /* If we're gimplifying into ssa, gimplify_modify_expr will have
623 given our temporary an SSA name. Find and return it. */
624 if (gimplify_ctxp->into_ssa)
626 gimple last = gimple_seq_last_stmt (*pre_p);
627 t = gimple_get_lhs (last);
630 return t;
633 /* Return a formal temporary variable initialized with VAL. PRE_P is as
634 in gimplify_expr. Only use this function if:
636 1) The value of the unfactored expression represented by VAL will not
637 change between the initialization and use of the temporary, and
638 2) The temporary will not be otherwise modified.
640 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
641 and #2 means it is inappropriate for && temps.
643 For other cases, use get_initialized_tmp_var instead. */
645 tree
646 get_formal_tmp_var (tree val, gimple_seq *pre_p)
648 return internal_get_tmp_var (val, pre_p, NULL, true);
651 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
652 are as in gimplify_expr. */
654 tree
655 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
657 return internal_get_tmp_var (val, pre_p, post_p, false);
660 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
661 generate debug info for them; otherwise don't. */
663 void
664 declare_vars (tree vars, gimple scope, bool debug_info)
666 tree last = vars;
667 if (last)
669 tree temps, block;
671 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
673 temps = nreverse (last);
675 block = gimple_bind_block (scope);
676 gcc_assert (!block || TREE_CODE (block) == BLOCK);
677 if (!block || !debug_info)
679 DECL_CHAIN (last) = gimple_bind_vars (scope);
680 gimple_bind_set_vars (scope, temps);
682 else
684 /* We need to attach the nodes both to the BIND_EXPR and to its
685 associated BLOCK for debugging purposes. The key point here
686 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
687 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
688 if (BLOCK_VARS (block))
689 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
690 else
692 gimple_bind_set_vars (scope,
693 chainon (gimple_bind_vars (scope), temps));
694 BLOCK_VARS (block) = temps;
700 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
701 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
702 no such upper bound can be obtained. */
704 static void
705 force_constant_size (tree var)
707 /* The only attempt we make is by querying the maximum size of objects
708 of the variable's type. */
710 HOST_WIDE_INT max_size;
712 gcc_assert (TREE_CODE (var) == VAR_DECL);
714 max_size = max_int_size_in_bytes (TREE_TYPE (var));
716 gcc_assert (max_size >= 0);
718 DECL_SIZE_UNIT (var)
719 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
720 DECL_SIZE (var)
721 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
724 /* Push the temporary variable TMP into the current binding. */
726 void
727 gimple_add_tmp_var (tree tmp)
729 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
731 /* Later processing assumes that the object size is constant, which might
732 not be true at this point. Force the use of a constant upper bound in
733 this case. */
734 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
735 force_constant_size (tmp);
737 DECL_CONTEXT (tmp) = current_function_decl;
738 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
740 if (gimplify_ctxp)
742 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
743 gimplify_ctxp->temps = tmp;
745 /* Mark temporaries local within the nearest enclosing parallel. */
746 if (gimplify_omp_ctxp)
748 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
749 while (ctx && ctx->region_type == ORT_WORKSHARE)
750 ctx = ctx->outer_context;
751 if (ctx)
752 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
755 else if (cfun)
756 record_vars (tmp);
757 else
759 gimple_seq body_seq;
761 /* This case is for nested functions. We need to expose the locals
762 they create. */
763 body_seq = gimple_body (current_function_decl);
764 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
768 /* Determine whether to assign a location to the statement GS. */
770 static bool
771 should_carry_location_p (gimple gs)
773 /* Don't emit a line note for a label. We particularly don't want to
774 emit one for the break label, since it doesn't actually correspond
775 to the beginning of the loop/switch. */
776 if (gimple_code (gs) == GIMPLE_LABEL)
777 return false;
779 return true;
782 /* Return true if a location should not be emitted for this statement
783 by annotate_one_with_location. */
785 static inline bool
786 gimple_do_not_emit_location_p (gimple g)
788 return gimple_plf (g, GF_PLF_1);
791 /* Mark statement G so a location will not be emitted by
792 annotate_one_with_location. */
794 static inline void
795 gimple_set_do_not_emit_location (gimple g)
797 /* The PLF flags are initialized to 0 when a new tuple is created,
798 so no need to initialize it anywhere. */
799 gimple_set_plf (g, GF_PLF_1, true);
802 /* Set the location for gimple statement GS to LOCATION. */
804 static void
805 annotate_one_with_location (gimple gs, location_t location)
807 if (!gimple_has_location (gs)
808 && !gimple_do_not_emit_location_p (gs)
809 && should_carry_location_p (gs))
810 gimple_set_location (gs, location);
813 /* Set LOCATION for all the statements after iterator GSI in sequence
814 SEQ. If GSI is pointing to the end of the sequence, start with the
815 first statement in SEQ. */
817 static void
818 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
819 location_t location)
821 if (gsi_end_p (gsi))
822 gsi = gsi_start (seq);
823 else
824 gsi_next (&gsi);
826 for (; !gsi_end_p (gsi); gsi_next (&gsi))
827 annotate_one_with_location (gsi_stmt (gsi), location);
830 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
832 void
833 annotate_all_with_location (gimple_seq stmt_p, location_t location)
835 gimple_stmt_iterator i;
837 if (gimple_seq_empty_p (stmt_p))
838 return;
840 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
842 gimple gs = gsi_stmt (i);
843 annotate_one_with_location (gs, location);
847 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
848 nodes that are referenced more than once in GENERIC functions. This is
849 necessary because gimplification (translation into GIMPLE) is performed
850 by modifying tree nodes in-place, so gimplication of a shared node in a
851 first context could generate an invalid GIMPLE form in a second context.
853 This is achieved with a simple mark/copy/unmark algorithm that walks the
854 GENERIC representation top-down, marks nodes with TREE_VISITED the first
855 time it encounters them, duplicates them if they already have TREE_VISITED
856 set, and finally removes the TREE_VISITED marks it has set.
858 The algorithm works only at the function level, i.e. it generates a GENERIC
859 representation of a function with no nodes shared within the function when
860 passed a GENERIC function (except for nodes that are allowed to be shared).
862 At the global level, it is also necessary to unshare tree nodes that are
863 referenced in more than one function, for the same aforementioned reason.
864 This requires some cooperation from the front-end. There are 2 strategies:
866 1. Manual unsharing. The front-end needs to call unshare_expr on every
867 expression that might end up being shared across functions.
869 2. Deep unsharing. This is an extension of regular unsharing. Instead
870 of calling unshare_expr on expressions that might be shared across
871 functions, the front-end pre-marks them with TREE_VISITED. This will
872 ensure that they are unshared on the first reference within functions
873 when the regular unsharing algorithm runs. The counterpart is that
874 this algorithm must look deeper than for manual unsharing, which is
875 specified by LANG_HOOKS_DEEP_UNSHARING.
877 If there are only few specific cases of node sharing across functions, it is
878 probably easier for a front-end to unshare the expressions manually. On the
879 contrary, if the expressions generated at the global level are as widespread
880 as expressions generated within functions, deep unsharing is very likely the
881 way to go. */
883 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
884 These nodes model computations that must be done once. If we were to
885 unshare something like SAVE_EXPR(i++), the gimplification process would
886 create wrong code. However, if DATA is non-null, it must hold a pointer
887 set that is used to unshare the subtrees of these nodes. */
889 static tree
890 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
892 tree t = *tp;
893 enum tree_code code = TREE_CODE (t);
895 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
896 copy their subtrees if we can make sure to do it only once. */
897 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
899 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
901 else
902 *walk_subtrees = 0;
905 /* Stop at types, decls, constants like copy_tree_r. */
906 else if (TREE_CODE_CLASS (code) == tcc_type
907 || TREE_CODE_CLASS (code) == tcc_declaration
908 || TREE_CODE_CLASS (code) == tcc_constant
909 /* We can't do anything sensible with a BLOCK used as an
910 expression, but we also can't just die when we see it
911 because of non-expression uses. So we avert our eyes
912 and cross our fingers. Silly Java. */
913 || code == BLOCK)
914 *walk_subtrees = 0;
916 /* Cope with the statement expression extension. */
917 else if (code == STATEMENT_LIST)
920 /* Leave the bulk of the work to copy_tree_r itself. */
921 else
922 copy_tree_r (tp, walk_subtrees, NULL);
924 return NULL_TREE;
927 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
928 If *TP has been visited already, then *TP is deeply copied by calling
929 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
931 static tree
932 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
934 tree t = *tp;
935 enum tree_code code = TREE_CODE (t);
937 /* Skip types, decls, and constants. But we do want to look at their
938 types and the bounds of types. Mark them as visited so we properly
939 unmark their subtrees on the unmark pass. If we've already seen them,
940 don't look down further. */
941 if (TREE_CODE_CLASS (code) == tcc_type
942 || TREE_CODE_CLASS (code) == tcc_declaration
943 || TREE_CODE_CLASS (code) == tcc_constant)
945 if (TREE_VISITED (t))
946 *walk_subtrees = 0;
947 else
948 TREE_VISITED (t) = 1;
951 /* If this node has been visited already, unshare it and don't look
952 any deeper. */
953 else if (TREE_VISITED (t))
955 walk_tree (tp, mostly_copy_tree_r, data, NULL);
956 *walk_subtrees = 0;
959 /* Otherwise, mark the node as visited and keep looking. */
960 else
961 TREE_VISITED (t) = 1;
963 return NULL_TREE;
966 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
967 copy_if_shared_r callback unmodified. */
969 static inline void
970 copy_if_shared (tree *tp, void *data)
972 walk_tree (tp, copy_if_shared_r, data, NULL);
975 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
976 any nested functions. */
978 static void
979 unshare_body (tree fndecl)
981 struct cgraph_node *cgn = cgraph_get_node (fndecl);
982 /* If the language requires deep unsharing, we need a pointer set to make
983 sure we don't repeatedly unshare subtrees of unshareable nodes. */
984 struct pointer_set_t *visited
985 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
987 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
988 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
989 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
991 if (visited)
992 pointer_set_destroy (visited);
994 if (cgn)
995 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
996 unshare_body (cgn->decl);
999 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
1000 Subtrees are walked until the first unvisited node is encountered. */
1002 static tree
1003 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1005 tree t = *tp;
1007 /* If this node has been visited, unmark it and keep looking. */
1008 if (TREE_VISITED (t))
1009 TREE_VISITED (t) = 0;
1011 /* Otherwise, don't look any deeper. */
1012 else
1013 *walk_subtrees = 0;
1015 return NULL_TREE;
1018 /* Unmark the visited trees rooted at *TP. */
1020 static inline void
1021 unmark_visited (tree *tp)
1023 walk_tree (tp, unmark_visited_r, NULL, NULL);
1026 /* Likewise, but mark all trees as not visited. */
1028 static void
1029 unvisit_body (tree fndecl)
1031 struct cgraph_node *cgn = cgraph_get_node (fndecl);
1033 unmark_visited (&DECL_SAVED_TREE (fndecl));
1034 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1035 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1037 if (cgn)
1038 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1039 unvisit_body (cgn->decl);
1042 /* Unconditionally make an unshared copy of EXPR. This is used when using
1043 stored expressions which span multiple functions, such as BINFO_VTABLE,
1044 as the normal unsharing process can't tell that they're shared. */
1046 tree
1047 unshare_expr (tree expr)
1049 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1050 return expr;
1053 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1054 contain statements and have a value. Assign its value to a temporary
1055 and give it void_type_node. Return the temporary, or NULL_TREE if
1056 WRAPPER was already void. */
1058 tree
1059 voidify_wrapper_expr (tree wrapper, tree temp)
1061 tree type = TREE_TYPE (wrapper);
1062 if (type && !VOID_TYPE_P (type))
1064 tree *p;
1066 /* Set p to point to the body of the wrapper. Loop until we find
1067 something that isn't a wrapper. */
1068 for (p = &wrapper; p && *p; )
1070 switch (TREE_CODE (*p))
1072 case BIND_EXPR:
1073 TREE_SIDE_EFFECTS (*p) = 1;
1074 TREE_TYPE (*p) = void_type_node;
1075 /* For a BIND_EXPR, the body is operand 1. */
1076 p = &BIND_EXPR_BODY (*p);
1077 break;
1079 case CLEANUP_POINT_EXPR:
1080 case TRY_FINALLY_EXPR:
1081 case TRY_CATCH_EXPR:
1082 TREE_SIDE_EFFECTS (*p) = 1;
1083 TREE_TYPE (*p) = void_type_node;
1084 p = &TREE_OPERAND (*p, 0);
1085 break;
1087 case STATEMENT_LIST:
1089 tree_stmt_iterator i = tsi_last (*p);
1090 TREE_SIDE_EFFECTS (*p) = 1;
1091 TREE_TYPE (*p) = void_type_node;
1092 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1094 break;
1096 case COMPOUND_EXPR:
1097 /* Advance to the last statement. Set all container types to
1098 void. */
1099 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1101 TREE_SIDE_EFFECTS (*p) = 1;
1102 TREE_TYPE (*p) = void_type_node;
1104 break;
1106 case TRANSACTION_EXPR:
1107 TREE_SIDE_EFFECTS (*p) = 1;
1108 TREE_TYPE (*p) = void_type_node;
1109 p = &TRANSACTION_EXPR_BODY (*p);
1110 break;
1112 default:
1113 /* Assume that any tree upon which voidify_wrapper_expr is
1114 directly called is a wrapper, and that its body is op0. */
1115 if (p == &wrapper)
1117 TREE_SIDE_EFFECTS (*p) = 1;
1118 TREE_TYPE (*p) = void_type_node;
1119 p = &TREE_OPERAND (*p, 0);
1120 break;
1122 goto out;
1126 out:
1127 if (p == NULL || IS_EMPTY_STMT (*p))
1128 temp = NULL_TREE;
1129 else if (temp)
1131 /* The wrapper is on the RHS of an assignment that we're pushing
1132 down. */
1133 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1134 || TREE_CODE (temp) == MODIFY_EXPR);
1135 TREE_OPERAND (temp, 1) = *p;
1136 *p = temp;
1138 else
1140 temp = create_tmp_var (type, "retval");
1141 *p = build2 (INIT_EXPR, type, temp, *p);
1144 return temp;
1147 return NULL_TREE;
1150 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1151 a temporary through which they communicate. */
1153 static void
1154 build_stack_save_restore (gimple *save, gimple *restore)
1156 tree tmp_var;
1158 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1159 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1160 gimple_call_set_lhs (*save, tmp_var);
1162 *restore
1163 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1164 1, tmp_var);
1167 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1169 static enum gimplify_status
1170 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1172 tree bind_expr = *expr_p;
1173 bool old_save_stack = gimplify_ctxp->save_stack;
1174 tree t;
1175 gimple gimple_bind;
1176 gimple_seq body, cleanup;
1177 gimple stack_save;
1179 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1181 /* Mark variables seen in this bind expr. */
1182 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1184 if (TREE_CODE (t) == VAR_DECL)
1186 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1188 /* Mark variable as local. */
1189 if (ctx && !DECL_EXTERNAL (t)
1190 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1191 || splay_tree_lookup (ctx->variables,
1192 (splay_tree_key) t) == NULL))
1193 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1195 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1197 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1198 cfun->has_local_explicit_reg_vars = true;
1201 /* Preliminarily mark non-addressed complex variables as eligible
1202 for promotion to gimple registers. We'll transform their uses
1203 as we find them. */
1204 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1205 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1206 && !TREE_THIS_VOLATILE (t)
1207 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1208 && !needs_to_live_in_memory (t))
1209 DECL_GIMPLE_REG_P (t) = 1;
1212 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1213 BIND_EXPR_BLOCK (bind_expr));
1214 gimple_push_bind_expr (gimple_bind);
1216 gimplify_ctxp->save_stack = false;
1218 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1219 body = NULL;
1220 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1221 gimple_bind_set_body (gimple_bind, body);
1223 cleanup = NULL;
1224 stack_save = NULL;
1225 if (gimplify_ctxp->save_stack)
1227 gimple stack_restore;
1229 /* Save stack on entry and restore it on exit. Add a try_finally
1230 block to achieve this. Note that mudflap depends on the
1231 format of the emitted code: see mx_register_decls(). */
1232 build_stack_save_restore (&stack_save, &stack_restore);
1234 gimplify_seq_add_stmt (&cleanup, stack_restore);
1237 /* Add clobbers for all variables that go out of scope. */
1238 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1240 if (TREE_CODE (t) == VAR_DECL
1241 && !is_global_var (t)
1242 && DECL_CONTEXT (t) == current_function_decl
1243 && !DECL_HARD_REGISTER (t)
1244 && !TREE_THIS_VOLATILE (t)
1245 && !DECL_HAS_VALUE_EXPR_P (t)
1246 /* Only care for variables that have to be in memory. Others
1247 will be rewritten into SSA names, hence moved to the top-level. */
1248 && !is_gimple_reg (t))
1250 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1251 TREE_THIS_VOLATILE (clobber) = 1;
1252 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1256 if (cleanup)
1258 gimple gs;
1259 gimple_seq new_body;
1261 new_body = NULL;
1262 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1263 GIMPLE_TRY_FINALLY);
1265 if (stack_save)
1266 gimplify_seq_add_stmt (&new_body, stack_save);
1267 gimplify_seq_add_stmt (&new_body, gs);
1268 gimple_bind_set_body (gimple_bind, new_body);
1271 gimplify_ctxp->save_stack = old_save_stack;
1272 gimple_pop_bind_expr ();
1274 gimplify_seq_add_stmt (pre_p, gimple_bind);
1276 if (temp)
1278 *expr_p = temp;
1279 return GS_OK;
1282 *expr_p = NULL_TREE;
1283 return GS_ALL_DONE;
1286 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1287 GIMPLE value, it is assigned to a new temporary and the statement is
1288 re-written to return the temporary.
1290 PRE_P points to the sequence where side effects that must happen before
1291 STMT should be stored. */
1293 static enum gimplify_status
1294 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1296 gimple ret;
1297 tree ret_expr = TREE_OPERAND (stmt, 0);
1298 tree result_decl, result;
1300 if (ret_expr == error_mark_node)
1301 return GS_ERROR;
1303 if (!ret_expr
1304 || TREE_CODE (ret_expr) == RESULT_DECL
1305 || ret_expr == error_mark_node)
1307 gimple ret = gimple_build_return (ret_expr);
1308 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1309 gimplify_seq_add_stmt (pre_p, ret);
1310 return GS_ALL_DONE;
1313 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1314 result_decl = NULL_TREE;
1315 else
1317 result_decl = TREE_OPERAND (ret_expr, 0);
1319 /* See through a return by reference. */
1320 if (TREE_CODE (result_decl) == INDIRECT_REF)
1321 result_decl = TREE_OPERAND (result_decl, 0);
1323 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1324 || TREE_CODE (ret_expr) == INIT_EXPR)
1325 && TREE_CODE (result_decl) == RESULT_DECL);
1328 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1329 Recall that aggregate_value_p is FALSE for any aggregate type that is
1330 returned in registers. If we're returning values in registers, then
1331 we don't want to extend the lifetime of the RESULT_DECL, particularly
1332 across another call. In addition, for those aggregates for which
1333 hard_function_value generates a PARALLEL, we'll die during normal
1334 expansion of structure assignments; there's special code in expand_return
1335 to handle this case that does not exist in expand_expr. */
1336 if (!result_decl)
1337 result = NULL_TREE;
1338 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1340 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1342 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1343 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1344 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1345 should be effectively allocated by the caller, i.e. all calls to
1346 this function must be subject to the Return Slot Optimization. */
1347 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1348 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1350 result = result_decl;
1352 else if (gimplify_ctxp->return_temp)
1353 result = gimplify_ctxp->return_temp;
1354 else
1356 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1358 /* ??? With complex control flow (usually involving abnormal edges),
1359 we can wind up warning about an uninitialized value for this. Due
1360 to how this variable is constructed and initialized, this is never
1361 true. Give up and never warn. */
1362 TREE_NO_WARNING (result) = 1;
1364 gimplify_ctxp->return_temp = result;
1367 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1368 Then gimplify the whole thing. */
1369 if (result != result_decl)
1370 TREE_OPERAND (ret_expr, 0) = result;
1372 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1374 ret = gimple_build_return (result);
1375 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1376 gimplify_seq_add_stmt (pre_p, ret);
1378 return GS_ALL_DONE;
1381 /* Gimplify a variable-length array DECL. */
1383 static void
1384 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1386 /* This is a variable-sized decl. Simplify its size and mark it
1387 for deferred expansion. Note that mudflap depends on the format
1388 of the emitted code: see mx_register_decls(). */
1389 tree t, addr, ptr_type;
1391 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1392 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1394 /* All occurrences of this decl in final gimplified code will be
1395 replaced by indirection. Setting DECL_VALUE_EXPR does two
1396 things: First, it lets the rest of the gimplifier know what
1397 replacement to use. Second, it lets the debug info know
1398 where to find the value. */
1399 ptr_type = build_pointer_type (TREE_TYPE (decl));
1400 addr = create_tmp_var (ptr_type, get_name (decl));
1401 DECL_IGNORED_P (addr) = 0;
1402 t = build_fold_indirect_ref (addr);
1403 TREE_THIS_NOTRAP (t) = 1;
1404 SET_DECL_VALUE_EXPR (decl, t);
1405 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1407 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1408 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1409 size_int (DECL_ALIGN (decl)));
1410 /* The call has been built for a variable-sized object. */
1411 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1412 t = fold_convert (ptr_type, t);
1413 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1415 gimplify_and_add (t, seq_p);
1417 /* Indicate that we need to restore the stack level when the
1418 enclosing BIND_EXPR is exited. */
1419 gimplify_ctxp->save_stack = true;
1422 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1423 and initialization explicit. */
1425 static enum gimplify_status
1426 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1428 tree stmt = *stmt_p;
1429 tree decl = DECL_EXPR_DECL (stmt);
1431 *stmt_p = NULL_TREE;
1433 if (TREE_TYPE (decl) == error_mark_node)
1434 return GS_ERROR;
1436 if ((TREE_CODE (decl) == TYPE_DECL
1437 || TREE_CODE (decl) == VAR_DECL)
1438 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1439 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1441 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1442 in case its size expressions contain problematic nodes like CALL_EXPR. */
1443 if (TREE_CODE (decl) == TYPE_DECL
1444 && DECL_ORIGINAL_TYPE (decl)
1445 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1446 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1448 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1450 tree init = DECL_INITIAL (decl);
1452 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1453 || (!TREE_STATIC (decl)
1454 && flag_stack_check == GENERIC_STACK_CHECK
1455 && compare_tree_int (DECL_SIZE_UNIT (decl),
1456 STACK_CHECK_MAX_VAR_SIZE) > 0))
1457 gimplify_vla_decl (decl, seq_p);
1459 /* Some front ends do not explicitly declare all anonymous
1460 artificial variables. We compensate here by declaring the
1461 variables, though it would be better if the front ends would
1462 explicitly declare them. */
1463 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1464 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1465 gimple_add_tmp_var (decl);
1467 if (init && init != error_mark_node)
1469 if (!TREE_STATIC (decl))
1471 DECL_INITIAL (decl) = NULL_TREE;
1472 init = build2 (INIT_EXPR, void_type_node, decl, init);
1473 gimplify_and_add (init, seq_p);
1474 ggc_free (init);
1476 else
1477 /* We must still examine initializers for static variables
1478 as they may contain a label address. */
1479 walk_tree (&init, force_labels_r, NULL, NULL);
1483 return GS_ALL_DONE;
1486 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1487 and replacing the LOOP_EXPR with goto, but if the loop contains an
1488 EXIT_EXPR, we need to append a label for it to jump to. */
1490 static enum gimplify_status
1491 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1493 tree saved_label = gimplify_ctxp->exit_label;
1494 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1496 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1498 gimplify_ctxp->exit_label = NULL_TREE;
1500 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1502 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1504 if (gimplify_ctxp->exit_label)
1505 gimplify_seq_add_stmt (pre_p,
1506 gimple_build_label (gimplify_ctxp->exit_label));
1508 gimplify_ctxp->exit_label = saved_label;
1510 *expr_p = NULL;
1511 return GS_ALL_DONE;
1514 /* Gimplify a statement list onto a sequence. These may be created either
1515 by an enlightened front-end, or by shortcut_cond_expr. */
1517 static enum gimplify_status
1518 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1520 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1522 tree_stmt_iterator i = tsi_start (*expr_p);
1524 while (!tsi_end_p (i))
1526 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1527 tsi_delink (&i);
1530 if (temp)
1532 *expr_p = temp;
1533 return GS_OK;
1536 return GS_ALL_DONE;
1539 /* Compare two case labels. Because the front end should already have
1540 made sure that case ranges do not overlap, it is enough to only compare
1541 the CASE_LOW values of each case label. */
1543 static int
1544 compare_case_labels (const void *p1, const void *p2)
1546 const_tree const case1 = *(const_tree const*)p1;
1547 const_tree const case2 = *(const_tree const*)p2;
1549 /* The 'default' case label always goes first. */
1550 if (!CASE_LOW (case1))
1551 return -1;
1552 else if (!CASE_LOW (case2))
1553 return 1;
1554 else
1555 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1558 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1560 void
1561 sort_case_labels (VEC(tree,heap)* label_vec)
1563 VEC_qsort (tree, label_vec, compare_case_labels);
1566 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1567 branch to. */
1569 static enum gimplify_status
1570 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1572 tree switch_expr = *expr_p;
1573 gimple_seq switch_body_seq = NULL;
1574 enum gimplify_status ret;
1576 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1577 fb_rvalue);
1578 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1579 return ret;
1581 if (SWITCH_BODY (switch_expr))
1583 VEC (tree,heap) *labels;
1584 VEC (tree,heap) *saved_labels;
1585 tree default_case = NULL_TREE;
1586 size_t i, len;
1587 gimple gimple_switch;
1589 /* If someone can be bothered to fill in the labels, they can
1590 be bothered to null out the body too. */
1591 gcc_assert (!SWITCH_LABELS (switch_expr));
1593 /* save old labels, get new ones from body, then restore the old
1594 labels. Save all the things from the switch body to append after. */
1595 saved_labels = gimplify_ctxp->case_labels;
1596 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1598 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1599 labels = gimplify_ctxp->case_labels;
1600 gimplify_ctxp->case_labels = saved_labels;
1602 i = 0;
1603 while (i < VEC_length (tree, labels))
1605 tree elt = VEC_index (tree, labels, i);
1606 tree low = CASE_LOW (elt);
1607 bool remove_element = FALSE;
1609 if (low)
1611 /* Discard empty ranges. */
1612 tree high = CASE_HIGH (elt);
1613 if (high && tree_int_cst_lt (high, low))
1614 remove_element = TRUE;
1616 else
1618 /* The default case must be the last label in the list. */
1619 gcc_assert (!default_case);
1620 default_case = elt;
1621 remove_element = TRUE;
1624 if (remove_element)
1625 VEC_ordered_remove (tree, labels, i);
1626 else
1627 i++;
1629 len = i;
1631 if (!VEC_empty (tree, labels))
1632 sort_case_labels (labels);
1634 if (!default_case)
1636 tree type = TREE_TYPE (switch_expr);
1638 /* If the switch has no default label, add one, so that we jump
1639 around the switch body. If the labels already cover the whole
1640 range of type, add the default label pointing to one of the
1641 existing labels. */
1642 if (type == void_type_node)
1643 type = TREE_TYPE (SWITCH_COND (switch_expr));
1644 if (len
1645 && INTEGRAL_TYPE_P (type)
1646 && TYPE_MIN_VALUE (type)
1647 && TYPE_MAX_VALUE (type)
1648 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1649 TYPE_MIN_VALUE (type)))
1651 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1652 if (!high)
1653 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1654 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1656 for (i = 1; i < len; i++)
1658 high = CASE_LOW (VEC_index (tree, labels, i));
1659 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1660 if (!low)
1661 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1662 if ((TREE_INT_CST_LOW (low) + 1
1663 != TREE_INT_CST_LOW (high))
1664 || (TREE_INT_CST_HIGH (low)
1665 + (TREE_INT_CST_LOW (high) == 0)
1666 != TREE_INT_CST_HIGH (high)))
1667 break;
1669 if (i == len)
1671 tree label = CASE_LABEL (VEC_index (tree, labels, 0));
1672 default_case = build_case_label (NULL_TREE, NULL_TREE,
1673 label);
1678 if (!default_case)
1680 gimple new_default;
1682 default_case
1683 = build_case_label (NULL_TREE, NULL_TREE,
1684 create_artificial_label (UNKNOWN_LOCATION));
1685 new_default = gimple_build_label (CASE_LABEL (default_case));
1686 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1690 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1691 default_case, labels);
1692 gimplify_seq_add_stmt (pre_p, gimple_switch);
1693 gimplify_seq_add_seq (pre_p, switch_body_seq);
1694 VEC_free(tree, heap, labels);
1696 else
1697 gcc_assert (SWITCH_LABELS (switch_expr));
1699 return GS_ALL_DONE;
1702 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1704 static enum gimplify_status
1705 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1707 struct gimplify_ctx *ctxp;
1708 gimple gimple_label;
1710 /* Invalid OpenMP programs can play Duff's Device type games with
1711 #pragma omp parallel. At least in the C front end, we don't
1712 detect such invalid branches until after gimplification. */
1713 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1714 if (ctxp->case_labels)
1715 break;
1717 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1718 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1719 gimplify_seq_add_stmt (pre_p, gimple_label);
1721 return GS_ALL_DONE;
1724 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1725 if necessary. */
1727 tree
1728 build_and_jump (tree *label_p)
1730 if (label_p == NULL)
1731 /* If there's nowhere to jump, just fall through. */
1732 return NULL_TREE;
1734 if (*label_p == NULL_TREE)
1736 tree label = create_artificial_label (UNKNOWN_LOCATION);
1737 *label_p = label;
1740 return build1 (GOTO_EXPR, void_type_node, *label_p);
1743 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1744 This also involves building a label to jump to and communicating it to
1745 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1747 static enum gimplify_status
1748 gimplify_exit_expr (tree *expr_p)
1750 tree cond = TREE_OPERAND (*expr_p, 0);
1751 tree expr;
1753 expr = build_and_jump (&gimplify_ctxp->exit_label);
1754 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1755 *expr_p = expr;
1757 return GS_OK;
1760 /* A helper function to be called via walk_tree. Mark all labels under *TP
1761 as being forced. To be called for DECL_INITIAL of static variables. */
1763 tree
1764 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1766 if (TYPE_P (*tp))
1767 *walk_subtrees = 0;
1768 if (TREE_CODE (*tp) == LABEL_DECL)
1769 FORCED_LABEL (*tp) = 1;
1771 return NULL_TREE;
1774 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1775 different from its canonical type, wrap the whole thing inside a
1776 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1777 type.
1779 The canonical type of a COMPONENT_REF is the type of the field being
1780 referenced--unless the field is a bit-field which can be read directly
1781 in a smaller mode, in which case the canonical type is the
1782 sign-appropriate type corresponding to that mode. */
1784 static void
1785 canonicalize_component_ref (tree *expr_p)
1787 tree expr = *expr_p;
1788 tree type;
1790 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1792 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1793 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1794 else
1795 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1797 /* One could argue that all the stuff below is not necessary for
1798 the non-bitfield case and declare it a FE error if type
1799 adjustment would be needed. */
1800 if (TREE_TYPE (expr) != type)
1802 #ifdef ENABLE_TYPES_CHECKING
1803 tree old_type = TREE_TYPE (expr);
1804 #endif
1805 int type_quals;
1807 /* We need to preserve qualifiers and propagate them from
1808 operand 0. */
1809 type_quals = TYPE_QUALS (type)
1810 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1811 if (TYPE_QUALS (type) != type_quals)
1812 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1814 /* Set the type of the COMPONENT_REF to the underlying type. */
1815 TREE_TYPE (expr) = type;
1817 #ifdef ENABLE_TYPES_CHECKING
1818 /* It is now a FE error, if the conversion from the canonical
1819 type to the original expression type is not useless. */
1820 gcc_assert (useless_type_conversion_p (old_type, type));
1821 #endif
1825 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1826 to foo, embed that change in the ADDR_EXPR by converting
1827 T array[U];
1828 (T *)&array
1830 &array[L]
1831 where L is the lower bound. For simplicity, only do this for constant
1832 lower bound.
1833 The constraint is that the type of &array[L] is trivially convertible
1834 to T *. */
1836 static void
1837 canonicalize_addr_expr (tree *expr_p)
1839 tree expr = *expr_p;
1840 tree addr_expr = TREE_OPERAND (expr, 0);
1841 tree datype, ddatype, pddatype;
1843 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1844 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1845 || TREE_CODE (addr_expr) != ADDR_EXPR)
1846 return;
1848 /* The addr_expr type should be a pointer to an array. */
1849 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1850 if (TREE_CODE (datype) != ARRAY_TYPE)
1851 return;
1853 /* The pointer to element type shall be trivially convertible to
1854 the expression pointer type. */
1855 ddatype = TREE_TYPE (datype);
1856 pddatype = build_pointer_type (ddatype);
1857 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1858 pddatype))
1859 return;
1861 /* The lower bound and element sizes must be constant. */
1862 if (!TYPE_SIZE_UNIT (ddatype)
1863 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1864 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1865 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1866 return;
1868 /* All checks succeeded. Build a new node to merge the cast. */
1869 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1870 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1871 NULL_TREE, NULL_TREE);
1872 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1874 /* We can have stripped a required restrict qualifier above. */
1875 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1876 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1879 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1880 underneath as appropriate. */
1882 static enum gimplify_status
1883 gimplify_conversion (tree *expr_p)
1885 location_t loc = EXPR_LOCATION (*expr_p);
1886 gcc_assert (CONVERT_EXPR_P (*expr_p));
1888 /* Then strip away all but the outermost conversion. */
1889 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1891 /* And remove the outermost conversion if it's useless. */
1892 if (tree_ssa_useless_type_conversion (*expr_p))
1893 *expr_p = TREE_OPERAND (*expr_p, 0);
1895 /* If we still have a conversion at the toplevel,
1896 then canonicalize some constructs. */
1897 if (CONVERT_EXPR_P (*expr_p))
1899 tree sub = TREE_OPERAND (*expr_p, 0);
1901 /* If a NOP conversion is changing the type of a COMPONENT_REF
1902 expression, then canonicalize its type now in order to expose more
1903 redundant conversions. */
1904 if (TREE_CODE (sub) == COMPONENT_REF)
1905 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1907 /* If a NOP conversion is changing a pointer to array of foo
1908 to a pointer to foo, embed that change in the ADDR_EXPR. */
1909 else if (TREE_CODE (sub) == ADDR_EXPR)
1910 canonicalize_addr_expr (expr_p);
1913 /* If we have a conversion to a non-register type force the
1914 use of a VIEW_CONVERT_EXPR instead. */
1915 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1916 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1917 TREE_OPERAND (*expr_p, 0));
1919 return GS_OK;
1922 /* Nonlocal VLAs seen in the current function. */
1923 static struct pointer_set_t *nonlocal_vlas;
1925 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1926 static tree nonlocal_vla_vars;
1928 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1929 DECL_VALUE_EXPR, and it's worth re-examining things. */
1931 static enum gimplify_status
1932 gimplify_var_or_parm_decl (tree *expr_p)
1934 tree decl = *expr_p;
1936 /* ??? If this is a local variable, and it has not been seen in any
1937 outer BIND_EXPR, then it's probably the result of a duplicate
1938 declaration, for which we've already issued an error. It would
1939 be really nice if the front end wouldn't leak these at all.
1940 Currently the only known culprit is C++ destructors, as seen
1941 in g++.old-deja/g++.jason/binding.C. */
1942 if (TREE_CODE (decl) == VAR_DECL
1943 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1944 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1945 && decl_function_context (decl) == current_function_decl)
1947 gcc_assert (seen_error ());
1948 return GS_ERROR;
1951 /* When within an OpenMP context, notice uses of variables. */
1952 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1953 return GS_ALL_DONE;
1955 /* If the decl is an alias for another expression, substitute it now. */
1956 if (DECL_HAS_VALUE_EXPR_P (decl))
1958 tree value_expr = DECL_VALUE_EXPR (decl);
1960 /* For referenced nonlocal VLAs add a decl for debugging purposes
1961 to the current function. */
1962 if (TREE_CODE (decl) == VAR_DECL
1963 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1964 && nonlocal_vlas != NULL
1965 && TREE_CODE (value_expr) == INDIRECT_REF
1966 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1967 && decl_function_context (decl) != current_function_decl)
1969 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1970 while (ctx && ctx->region_type == ORT_WORKSHARE)
1971 ctx = ctx->outer_context;
1972 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1974 tree copy = copy_node (decl);
1976 lang_hooks.dup_lang_specific_decl (copy);
1977 SET_DECL_RTL (copy, 0);
1978 TREE_USED (copy) = 1;
1979 DECL_CHAIN (copy) = nonlocal_vla_vars;
1980 nonlocal_vla_vars = copy;
1981 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1982 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1986 *expr_p = unshare_expr (value_expr);
1987 return GS_OK;
1990 return GS_ALL_DONE;
1993 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1994 node *EXPR_P.
1996 compound_lval
1997 : min_lval '[' val ']'
1998 | min_lval '.' ID
1999 | compound_lval '[' val ']'
2000 | compound_lval '.' ID
2002 This is not part of the original SIMPLE definition, which separates
2003 array and member references, but it seems reasonable to handle them
2004 together. Also, this way we don't run into problems with union
2005 aliasing; gcc requires that for accesses through a union to alias, the
2006 union reference must be explicit, which was not always the case when we
2007 were splitting up array and member refs.
2009 PRE_P points to the sequence where side effects that must happen before
2010 *EXPR_P should be stored.
2012 POST_P points to the sequence where side effects that must happen after
2013 *EXPR_P should be stored. */
2015 static enum gimplify_status
2016 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2017 fallback_t fallback)
2019 tree *p;
2020 VEC(tree,heap) *stack;
2021 enum gimplify_status ret = GS_ALL_DONE, tret;
2022 int i;
2023 location_t loc = EXPR_LOCATION (*expr_p);
2024 tree expr = *expr_p;
2026 /* Create a stack of the subexpressions so later we can walk them in
2027 order from inner to outer. */
2028 stack = VEC_alloc (tree, heap, 10);
2030 /* We can handle anything that get_inner_reference can deal with. */
2031 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2033 restart:
2034 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2035 if (TREE_CODE (*p) == INDIRECT_REF)
2036 *p = fold_indirect_ref_loc (loc, *p);
2038 if (handled_component_p (*p))
2040 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2041 additional COMPONENT_REFs. */
2042 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2043 && gimplify_var_or_parm_decl (p) == GS_OK)
2044 goto restart;
2045 else
2046 break;
2048 VEC_safe_push (tree, heap, stack, *p);
2051 gcc_assert (VEC_length (tree, stack));
2053 /* Now STACK is a stack of pointers to all the refs we've walked through
2054 and P points to the innermost expression.
2056 Java requires that we elaborated nodes in source order. That
2057 means we must gimplify the inner expression followed by each of
2058 the indices, in order. But we can't gimplify the inner
2059 expression until we deal with any variable bounds, sizes, or
2060 positions in order to deal with PLACEHOLDER_EXPRs.
2062 So we do this in three steps. First we deal with the annotations
2063 for any variables in the components, then we gimplify the base,
2064 then we gimplify any indices, from left to right. */
2065 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
2067 tree t = VEC_index (tree, stack, i);
2069 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2071 /* Gimplify the low bound and element type size and put them into
2072 the ARRAY_REF. If these values are set, they have already been
2073 gimplified. */
2074 if (TREE_OPERAND (t, 2) == NULL_TREE)
2076 tree low = unshare_expr (array_ref_low_bound (t));
2077 if (!is_gimple_min_invariant (low))
2079 TREE_OPERAND (t, 2) = low;
2080 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2081 post_p, is_gimple_reg,
2082 fb_rvalue);
2083 ret = MIN (ret, tret);
2086 else
2088 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2089 is_gimple_reg, fb_rvalue);
2090 ret = MIN (ret, tret);
2093 if (TREE_OPERAND (t, 3) == NULL_TREE)
2095 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2096 tree elmt_size = unshare_expr (array_ref_element_size (t));
2097 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2099 /* Divide the element size by the alignment of the element
2100 type (above). */
2101 elmt_size
2102 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2104 if (!is_gimple_min_invariant (elmt_size))
2106 TREE_OPERAND (t, 3) = elmt_size;
2107 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2108 post_p, is_gimple_reg,
2109 fb_rvalue);
2110 ret = MIN (ret, tret);
2113 else
2115 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2116 is_gimple_reg, fb_rvalue);
2117 ret = MIN (ret, tret);
2120 else if (TREE_CODE (t) == COMPONENT_REF)
2122 /* Set the field offset into T and gimplify it. */
2123 if (TREE_OPERAND (t, 2) == NULL_TREE)
2125 tree offset = unshare_expr (component_ref_field_offset (t));
2126 tree field = TREE_OPERAND (t, 1);
2127 tree factor
2128 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2130 /* Divide the offset by its alignment. */
2131 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2133 if (!is_gimple_min_invariant (offset))
2135 TREE_OPERAND (t, 2) = offset;
2136 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2137 post_p, is_gimple_reg,
2138 fb_rvalue);
2139 ret = MIN (ret, tret);
2142 else
2144 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2145 is_gimple_reg, fb_rvalue);
2146 ret = MIN (ret, tret);
2151 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2152 so as to match the min_lval predicate. Failure to do so may result
2153 in the creation of large aggregate temporaries. */
2154 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2155 fallback | fb_lvalue);
2156 ret = MIN (ret, tret);
2158 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2159 loop we also remove any useless conversions. */
2160 for (; VEC_length (tree, stack) > 0; )
2162 tree t = VEC_pop (tree, stack);
2164 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2166 /* Gimplify the dimension. */
2167 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2169 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2170 is_gimple_val, fb_rvalue);
2171 ret = MIN (ret, tret);
2174 else if (TREE_CODE (t) == BIT_FIELD_REF)
2176 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2177 is_gimple_val, fb_rvalue);
2178 ret = MIN (ret, tret);
2179 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2180 is_gimple_val, fb_rvalue);
2181 ret = MIN (ret, tret);
2184 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2186 /* The innermost expression P may have originally had
2187 TREE_SIDE_EFFECTS set which would have caused all the outer
2188 expressions in *EXPR_P leading to P to also have had
2189 TREE_SIDE_EFFECTS set. */
2190 recalculate_side_effects (t);
2193 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2194 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2196 canonicalize_component_ref (expr_p);
2199 VEC_free (tree, heap, stack);
2201 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2203 return ret;
2206 /* Gimplify the self modifying expression pointed to by EXPR_P
2207 (++, --, +=, -=).
2209 PRE_P points to the list where side effects that must happen before
2210 *EXPR_P should be stored.
2212 POST_P points to the list where side effects that must happen after
2213 *EXPR_P should be stored.
2215 WANT_VALUE is nonzero iff we want to use the value of this expression
2216 in another expression. */
2218 static enum gimplify_status
2219 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2220 bool want_value)
2222 enum tree_code code;
2223 tree lhs, lvalue, rhs, t1;
2224 gimple_seq post = NULL, *orig_post_p = post_p;
2225 bool postfix;
2226 enum tree_code arith_code;
2227 enum gimplify_status ret;
2228 location_t loc = EXPR_LOCATION (*expr_p);
2230 code = TREE_CODE (*expr_p);
2232 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2233 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2235 /* Prefix or postfix? */
2236 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2237 /* Faster to treat as prefix if result is not used. */
2238 postfix = want_value;
2239 else
2240 postfix = false;
2242 /* For postfix, make sure the inner expression's post side effects
2243 are executed after side effects from this expression. */
2244 if (postfix)
2245 post_p = &post;
2247 /* Add or subtract? */
2248 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2249 arith_code = PLUS_EXPR;
2250 else
2251 arith_code = MINUS_EXPR;
2253 /* Gimplify the LHS into a GIMPLE lvalue. */
2254 lvalue = TREE_OPERAND (*expr_p, 0);
2255 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2256 if (ret == GS_ERROR)
2257 return ret;
2259 /* Extract the operands to the arithmetic operation. */
2260 lhs = lvalue;
2261 rhs = TREE_OPERAND (*expr_p, 1);
2263 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2264 that as the result value and in the postqueue operation. We also
2265 make sure to make lvalue a minimal lval, see
2266 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2267 if (postfix)
2269 if (!is_gimple_min_lval (lvalue))
2271 mark_addressable (lvalue);
2272 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2273 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2274 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2276 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2277 if (ret == GS_ERROR)
2278 return ret;
2281 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2282 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2284 rhs = convert_to_ptrofftype_loc (loc, rhs);
2285 if (arith_code == MINUS_EXPR)
2286 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2287 arith_code = POINTER_PLUS_EXPR;
2290 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2292 if (postfix)
2294 gimplify_assign (lvalue, t1, orig_post_p);
2295 gimplify_seq_add_seq (orig_post_p, post);
2296 *expr_p = lhs;
2297 return GS_ALL_DONE;
2299 else
2301 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2302 return GS_OK;
2306 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2308 static void
2309 maybe_with_size_expr (tree *expr_p)
2311 tree expr = *expr_p;
2312 tree type = TREE_TYPE (expr);
2313 tree size;
2315 /* If we've already wrapped this or the type is error_mark_node, we can't do
2316 anything. */
2317 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2318 || type == error_mark_node)
2319 return;
2321 /* If the size isn't known or is a constant, we have nothing to do. */
2322 size = TYPE_SIZE_UNIT (type);
2323 if (!size || TREE_CODE (size) == INTEGER_CST)
2324 return;
2326 /* Otherwise, make a WITH_SIZE_EXPR. */
2327 size = unshare_expr (size);
2328 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2329 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2332 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2333 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2334 the CALL_EXPR. */
2336 static enum gimplify_status
2337 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2339 bool (*test) (tree);
2340 fallback_t fb;
2342 /* In general, we allow lvalues for function arguments to avoid
2343 extra overhead of copying large aggregates out of even larger
2344 aggregates into temporaries only to copy the temporaries to
2345 the argument list. Make optimizers happy by pulling out to
2346 temporaries those types that fit in registers. */
2347 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2348 test = is_gimple_val, fb = fb_rvalue;
2349 else
2351 test = is_gimple_lvalue, fb = fb_either;
2352 /* Also strip a TARGET_EXPR that would force an extra copy. */
2353 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2355 tree init = TARGET_EXPR_INITIAL (*arg_p);
2356 if (init
2357 && !VOID_TYPE_P (TREE_TYPE (init)))
2358 *arg_p = init;
2362 /* If this is a variable sized type, we must remember the size. */
2363 maybe_with_size_expr (arg_p);
2365 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2366 /* Make sure arguments have the same location as the function call
2367 itself. */
2368 protected_set_expr_location (*arg_p, call_location);
2370 /* There is a sequence point before a function call. Side effects in
2371 the argument list must occur before the actual call. So, when
2372 gimplifying arguments, force gimplify_expr to use an internal
2373 post queue which is then appended to the end of PRE_P. */
2374 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2377 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2378 WANT_VALUE is true if the result of the call is desired. */
2380 static enum gimplify_status
2381 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2383 tree fndecl, parms, p, fnptrtype;
2384 enum gimplify_status ret;
2385 int i, nargs;
2386 gimple call;
2387 bool builtin_va_start_p = FALSE;
2388 location_t loc = EXPR_LOCATION (*expr_p);
2390 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2392 /* For reliable diagnostics during inlining, it is necessary that
2393 every call_expr be annotated with file and line. */
2394 if (! EXPR_HAS_LOCATION (*expr_p))
2395 SET_EXPR_LOCATION (*expr_p, input_location);
2397 /* This may be a call to a builtin function.
2399 Builtin function calls may be transformed into different
2400 (and more efficient) builtin function calls under certain
2401 circumstances. Unfortunately, gimplification can muck things
2402 up enough that the builtin expanders are not aware that certain
2403 transformations are still valid.
2405 So we attempt transformation/gimplification of the call before
2406 we gimplify the CALL_EXPR. At this time we do not manage to
2407 transform all calls in the same manner as the expanders do, but
2408 we do transform most of them. */
2409 fndecl = get_callee_fndecl (*expr_p);
2410 if (fndecl && DECL_BUILT_IN (fndecl))
2412 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2414 if (new_tree && new_tree != *expr_p)
2416 /* There was a transformation of this call which computes the
2417 same value, but in a more efficient way. Return and try
2418 again. */
2419 *expr_p = new_tree;
2420 return GS_OK;
2423 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2424 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2426 builtin_va_start_p = TRUE;
2427 if (call_expr_nargs (*expr_p) < 2)
2429 error ("too few arguments to function %<va_start%>");
2430 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2431 return GS_OK;
2434 if (fold_builtin_next_arg (*expr_p, true))
2436 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2437 return GS_OK;
2442 /* Remember the original function pointer type. */
2443 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2445 /* There is a sequence point before the call, so any side effects in
2446 the calling expression must occur before the actual call. Force
2447 gimplify_expr to use an internal post queue. */
2448 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2449 is_gimple_call_addr, fb_rvalue);
2451 nargs = call_expr_nargs (*expr_p);
2453 /* Get argument types for verification. */
2454 fndecl = get_callee_fndecl (*expr_p);
2455 parms = NULL_TREE;
2456 if (fndecl)
2457 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2458 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2459 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2461 if (fndecl && DECL_ARGUMENTS (fndecl))
2462 p = DECL_ARGUMENTS (fndecl);
2463 else if (parms)
2464 p = parms;
2465 else
2466 p = NULL_TREE;
2467 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2470 /* If the last argument is __builtin_va_arg_pack () and it is not
2471 passed as a named argument, decrease the number of CALL_EXPR
2472 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2473 if (!p
2474 && i < nargs
2475 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2477 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2478 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2480 if (last_arg_fndecl
2481 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2482 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2483 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2485 tree call = *expr_p;
2487 --nargs;
2488 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2489 CALL_EXPR_FN (call),
2490 nargs, CALL_EXPR_ARGP (call));
2492 /* Copy all CALL_EXPR flags, location and block, except
2493 CALL_EXPR_VA_ARG_PACK flag. */
2494 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2495 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2496 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2497 = CALL_EXPR_RETURN_SLOT_OPT (call);
2498 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2499 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2500 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2502 /* Set CALL_EXPR_VA_ARG_PACK. */
2503 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2507 /* Finally, gimplify the function arguments. */
2508 if (nargs > 0)
2510 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2511 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2512 PUSH_ARGS_REVERSED ? i-- : i++)
2514 enum gimplify_status t;
2516 /* Avoid gimplifying the second argument to va_start, which needs to
2517 be the plain PARM_DECL. */
2518 if ((i != 1) || !builtin_va_start_p)
2520 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2521 EXPR_LOCATION (*expr_p));
2523 if (t == GS_ERROR)
2524 ret = GS_ERROR;
2529 /* Verify the function result. */
2530 if (want_value && fndecl
2531 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2533 error_at (loc, "using result of function returning %<void%>");
2534 ret = GS_ERROR;
2537 /* Try this again in case gimplification exposed something. */
2538 if (ret != GS_ERROR)
2540 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2542 if (new_tree && new_tree != *expr_p)
2544 /* There was a transformation of this call which computes the
2545 same value, but in a more efficient way. Return and try
2546 again. */
2547 *expr_p = new_tree;
2548 return GS_OK;
2551 else
2553 *expr_p = error_mark_node;
2554 return GS_ERROR;
2557 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2558 decl. This allows us to eliminate redundant or useless
2559 calls to "const" functions. */
2560 if (TREE_CODE (*expr_p) == CALL_EXPR)
2562 int flags = call_expr_flags (*expr_p);
2563 if (flags & (ECF_CONST | ECF_PURE)
2564 /* An infinite loop is considered a side effect. */
2565 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2566 TREE_SIDE_EFFECTS (*expr_p) = 0;
2569 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2570 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2571 form and delegate the creation of a GIMPLE_CALL to
2572 gimplify_modify_expr. This is always possible because when
2573 WANT_VALUE is true, the caller wants the result of this call into
2574 a temporary, which means that we will emit an INIT_EXPR in
2575 internal_get_tmp_var which will then be handled by
2576 gimplify_modify_expr. */
2577 if (!want_value)
2579 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2580 have to do is replicate it as a GIMPLE_CALL tuple. */
2581 gimple_stmt_iterator gsi;
2582 call = gimple_build_call_from_tree (*expr_p);
2583 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2584 gimplify_seq_add_stmt (pre_p, call);
2585 gsi = gsi_last (*pre_p);
2586 fold_stmt (&gsi);
2587 *expr_p = NULL_TREE;
2589 else
2590 /* Remember the original function type. */
2591 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2592 CALL_EXPR_FN (*expr_p));
2594 return ret;
2597 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2598 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2600 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2601 condition is true or false, respectively. If null, we should generate
2602 our own to skip over the evaluation of this specific expression.
2604 LOCUS is the source location of the COND_EXPR.
2606 This function is the tree equivalent of do_jump.
2608 shortcut_cond_r should only be called by shortcut_cond_expr. */
2610 static tree
2611 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2612 location_t locus)
2614 tree local_label = NULL_TREE;
2615 tree t, expr = NULL;
2617 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2618 retain the shortcut semantics. Just insert the gotos here;
2619 shortcut_cond_expr will append the real blocks later. */
2620 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2622 location_t new_locus;
2624 /* Turn if (a && b) into
2626 if (a); else goto no;
2627 if (b) goto yes; else goto no;
2628 (no:) */
2630 if (false_label_p == NULL)
2631 false_label_p = &local_label;
2633 /* Keep the original source location on the first 'if'. */
2634 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2635 append_to_statement_list (t, &expr);
2637 /* Set the source location of the && on the second 'if'. */
2638 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2639 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2640 new_locus);
2641 append_to_statement_list (t, &expr);
2643 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2645 location_t new_locus;
2647 /* Turn if (a || b) into
2649 if (a) goto yes;
2650 if (b) goto yes; else goto no;
2651 (yes:) */
2653 if (true_label_p == NULL)
2654 true_label_p = &local_label;
2656 /* Keep the original source location on the first 'if'. */
2657 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2658 append_to_statement_list (t, &expr);
2660 /* Set the source location of the || on the second 'if'. */
2661 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2662 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2663 new_locus);
2664 append_to_statement_list (t, &expr);
2666 else if (TREE_CODE (pred) == COND_EXPR
2667 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2668 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2670 location_t new_locus;
2672 /* As long as we're messing with gotos, turn if (a ? b : c) into
2673 if (a)
2674 if (b) goto yes; else goto no;
2675 else
2676 if (c) goto yes; else goto no;
2678 Don't do this if one of the arms has void type, which can happen
2679 in C++ when the arm is throw. */
2681 /* Keep the original source location on the first 'if'. Set the source
2682 location of the ? on the second 'if'. */
2683 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2684 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2685 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2686 false_label_p, locus),
2687 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2688 false_label_p, new_locus));
2690 else
2692 expr = build3 (COND_EXPR, void_type_node, pred,
2693 build_and_jump (true_label_p),
2694 build_and_jump (false_label_p));
2695 SET_EXPR_LOCATION (expr, locus);
2698 if (local_label)
2700 t = build1 (LABEL_EXPR, void_type_node, local_label);
2701 append_to_statement_list (t, &expr);
2704 return expr;
2707 /* Given a conditional expression EXPR with short-circuit boolean
2708 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2709 predicate appart into the equivalent sequence of conditionals. */
2711 static tree
2712 shortcut_cond_expr (tree expr)
2714 tree pred = TREE_OPERAND (expr, 0);
2715 tree then_ = TREE_OPERAND (expr, 1);
2716 tree else_ = TREE_OPERAND (expr, 2);
2717 tree true_label, false_label, end_label, t;
2718 tree *true_label_p;
2719 tree *false_label_p;
2720 bool emit_end, emit_false, jump_over_else;
2721 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2722 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2724 /* First do simple transformations. */
2725 if (!else_se)
2727 /* If there is no 'else', turn
2728 if (a && b) then c
2729 into
2730 if (a) if (b) then c. */
2731 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2733 /* Keep the original source location on the first 'if'. */
2734 location_t locus = EXPR_LOC_OR_HERE (expr);
2735 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2736 /* Set the source location of the && on the second 'if'. */
2737 if (EXPR_HAS_LOCATION (pred))
2738 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2739 then_ = shortcut_cond_expr (expr);
2740 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2741 pred = TREE_OPERAND (pred, 0);
2742 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2743 SET_EXPR_LOCATION (expr, locus);
2747 if (!then_se)
2749 /* If there is no 'then', turn
2750 if (a || b); else d
2751 into
2752 if (a); else if (b); else d. */
2753 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2755 /* Keep the original source location on the first 'if'. */
2756 location_t locus = EXPR_LOC_OR_HERE (expr);
2757 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2758 /* Set the source location of the || on the second 'if'. */
2759 if (EXPR_HAS_LOCATION (pred))
2760 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2761 else_ = shortcut_cond_expr (expr);
2762 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2763 pred = TREE_OPERAND (pred, 0);
2764 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2765 SET_EXPR_LOCATION (expr, locus);
2769 /* If we're done, great. */
2770 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2771 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2772 return expr;
2774 /* Otherwise we need to mess with gotos. Change
2775 if (a) c; else d;
2777 if (a); else goto no;
2778 c; goto end;
2779 no: d; end:
2780 and recursively gimplify the condition. */
2782 true_label = false_label = end_label = NULL_TREE;
2784 /* If our arms just jump somewhere, hijack those labels so we don't
2785 generate jumps to jumps. */
2787 if (then_
2788 && TREE_CODE (then_) == GOTO_EXPR
2789 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2791 true_label = GOTO_DESTINATION (then_);
2792 then_ = NULL;
2793 then_se = false;
2796 if (else_
2797 && TREE_CODE (else_) == GOTO_EXPR
2798 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2800 false_label = GOTO_DESTINATION (else_);
2801 else_ = NULL;
2802 else_se = false;
2805 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2806 if (true_label)
2807 true_label_p = &true_label;
2808 else
2809 true_label_p = NULL;
2811 /* The 'else' branch also needs a label if it contains interesting code. */
2812 if (false_label || else_se)
2813 false_label_p = &false_label;
2814 else
2815 false_label_p = NULL;
2817 /* If there was nothing else in our arms, just forward the label(s). */
2818 if (!then_se && !else_se)
2819 return shortcut_cond_r (pred, true_label_p, false_label_p,
2820 EXPR_LOC_OR_HERE (expr));
2822 /* If our last subexpression already has a terminal label, reuse it. */
2823 if (else_se)
2824 t = expr_last (else_);
2825 else if (then_se)
2826 t = expr_last (then_);
2827 else
2828 t = NULL;
2829 if (t && TREE_CODE (t) == LABEL_EXPR)
2830 end_label = LABEL_EXPR_LABEL (t);
2832 /* If we don't care about jumping to the 'else' branch, jump to the end
2833 if the condition is false. */
2834 if (!false_label_p)
2835 false_label_p = &end_label;
2837 /* We only want to emit these labels if we aren't hijacking them. */
2838 emit_end = (end_label == NULL_TREE);
2839 emit_false = (false_label == NULL_TREE);
2841 /* We only emit the jump over the else clause if we have to--if the
2842 then clause may fall through. Otherwise we can wind up with a
2843 useless jump and a useless label at the end of gimplified code,
2844 which will cause us to think that this conditional as a whole
2845 falls through even if it doesn't. If we then inline a function
2846 which ends with such a condition, that can cause us to issue an
2847 inappropriate warning about control reaching the end of a
2848 non-void function. */
2849 jump_over_else = block_may_fallthru (then_);
2851 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2852 EXPR_LOC_OR_HERE (expr));
2854 expr = NULL;
2855 append_to_statement_list (pred, &expr);
2857 append_to_statement_list (then_, &expr);
2858 if (else_se)
2860 if (jump_over_else)
2862 tree last = expr_last (expr);
2863 t = build_and_jump (&end_label);
2864 if (EXPR_HAS_LOCATION (last))
2865 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2866 append_to_statement_list (t, &expr);
2868 if (emit_false)
2870 t = build1 (LABEL_EXPR, void_type_node, false_label);
2871 append_to_statement_list (t, &expr);
2873 append_to_statement_list (else_, &expr);
2875 if (emit_end && end_label)
2877 t = build1 (LABEL_EXPR, void_type_node, end_label);
2878 append_to_statement_list (t, &expr);
2881 return expr;
2884 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2886 tree
2887 gimple_boolify (tree expr)
2889 tree type = TREE_TYPE (expr);
2890 location_t loc = EXPR_LOCATION (expr);
2892 if (TREE_CODE (expr) == NE_EXPR
2893 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2894 && integer_zerop (TREE_OPERAND (expr, 1)))
2896 tree call = TREE_OPERAND (expr, 0);
2897 tree fn = get_callee_fndecl (call);
2899 /* For __builtin_expect ((long) (x), y) recurse into x as well
2900 if x is truth_value_p. */
2901 if (fn
2902 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2903 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2904 && call_expr_nargs (call) == 2)
2906 tree arg = CALL_EXPR_ARG (call, 0);
2907 if (arg)
2909 if (TREE_CODE (arg) == NOP_EXPR
2910 && TREE_TYPE (arg) == TREE_TYPE (call))
2911 arg = TREE_OPERAND (arg, 0);
2912 if (truth_value_p (TREE_CODE (arg)))
2914 arg = gimple_boolify (arg);
2915 CALL_EXPR_ARG (call, 0)
2916 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2922 switch (TREE_CODE (expr))
2924 case TRUTH_AND_EXPR:
2925 case TRUTH_OR_EXPR:
2926 case TRUTH_XOR_EXPR:
2927 case TRUTH_ANDIF_EXPR:
2928 case TRUTH_ORIF_EXPR:
2929 /* Also boolify the arguments of truth exprs. */
2930 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2931 /* FALLTHRU */
2933 case TRUTH_NOT_EXPR:
2934 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2936 /* These expressions always produce boolean results. */
2937 if (TREE_CODE (type) != BOOLEAN_TYPE)
2938 TREE_TYPE (expr) = boolean_type_node;
2939 return expr;
2941 default:
2942 if (COMPARISON_CLASS_P (expr))
2944 /* There expressions always prduce boolean results. */
2945 if (TREE_CODE (type) != BOOLEAN_TYPE)
2946 TREE_TYPE (expr) = boolean_type_node;
2947 return expr;
2949 /* Other expressions that get here must have boolean values, but
2950 might need to be converted to the appropriate mode. */
2951 if (TREE_CODE (type) == BOOLEAN_TYPE)
2952 return expr;
2953 return fold_convert_loc (loc, boolean_type_node, expr);
2957 /* Given a conditional expression *EXPR_P without side effects, gimplify
2958 its operands. New statements are inserted to PRE_P. */
2960 static enum gimplify_status
2961 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2963 tree expr = *expr_p, cond;
2964 enum gimplify_status ret, tret;
2965 enum tree_code code;
2967 cond = gimple_boolify (COND_EXPR_COND (expr));
2969 /* We need to handle && and || specially, as their gimplification
2970 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2971 code = TREE_CODE (cond);
2972 if (code == TRUTH_ANDIF_EXPR)
2973 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2974 else if (code == TRUTH_ORIF_EXPR)
2975 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2976 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2977 COND_EXPR_COND (*expr_p) = cond;
2979 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2980 is_gimple_val, fb_rvalue);
2981 ret = MIN (ret, tret);
2982 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2983 is_gimple_val, fb_rvalue);
2985 return MIN (ret, tret);
2988 /* Return true if evaluating EXPR could trap.
2989 EXPR is GENERIC, while tree_could_trap_p can be called
2990 only on GIMPLE. */
2992 static bool
2993 generic_expr_could_trap_p (tree expr)
2995 unsigned i, n;
2997 if (!expr || is_gimple_val (expr))
2998 return false;
3000 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3001 return true;
3003 n = TREE_OPERAND_LENGTH (expr);
3004 for (i = 0; i < n; i++)
3005 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3006 return true;
3008 return false;
3011 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3012 into
3014 if (p) if (p)
3015 t1 = a; a;
3016 else or else
3017 t1 = b; b;
3020 The second form is used when *EXPR_P is of type void.
3022 PRE_P points to the list where side effects that must happen before
3023 *EXPR_P should be stored. */
3025 static enum gimplify_status
3026 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3028 tree expr = *expr_p;
3029 tree type = TREE_TYPE (expr);
3030 location_t loc = EXPR_LOCATION (expr);
3031 tree tmp, arm1, arm2;
3032 enum gimplify_status ret;
3033 tree label_true, label_false, label_cont;
3034 bool have_then_clause_p, have_else_clause_p;
3035 gimple gimple_cond;
3036 enum tree_code pred_code;
3037 gimple_seq seq = NULL;
3039 /* If this COND_EXPR has a value, copy the values into a temporary within
3040 the arms. */
3041 if (!VOID_TYPE_P (type))
3043 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3044 tree result;
3046 /* If either an rvalue is ok or we do not require an lvalue, create the
3047 temporary. But we cannot do that if the type is addressable. */
3048 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3049 && !TREE_ADDRESSABLE (type))
3051 if (gimplify_ctxp->allow_rhs_cond_expr
3052 /* If either branch has side effects or could trap, it can't be
3053 evaluated unconditionally. */
3054 && !TREE_SIDE_EFFECTS (then_)
3055 && !generic_expr_could_trap_p (then_)
3056 && !TREE_SIDE_EFFECTS (else_)
3057 && !generic_expr_could_trap_p (else_))
3058 return gimplify_pure_cond_expr (expr_p, pre_p);
3060 tmp = create_tmp_var (type, "iftmp");
3061 result = tmp;
3064 /* Otherwise, only create and copy references to the values. */
3065 else
3067 type = build_pointer_type (type);
3069 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3070 then_ = build_fold_addr_expr_loc (loc, then_);
3072 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3073 else_ = build_fold_addr_expr_loc (loc, else_);
3075 expr
3076 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3078 tmp = create_tmp_var (type, "iftmp");
3079 result = build_simple_mem_ref_loc (loc, tmp);
3082 /* Build the new then clause, `tmp = then_;'. But don't build the
3083 assignment if the value is void; in C++ it can be if it's a throw. */
3084 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3085 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3087 /* Similarly, build the new else clause, `tmp = else_;'. */
3088 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3089 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3091 TREE_TYPE (expr) = void_type_node;
3092 recalculate_side_effects (expr);
3094 /* Move the COND_EXPR to the prequeue. */
3095 gimplify_stmt (&expr, pre_p);
3097 *expr_p = result;
3098 return GS_ALL_DONE;
3101 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3102 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3103 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3104 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3106 /* Make sure the condition has BOOLEAN_TYPE. */
3107 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3109 /* Break apart && and || conditions. */
3110 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3111 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3113 expr = shortcut_cond_expr (expr);
3115 if (expr != *expr_p)
3117 *expr_p = expr;
3119 /* We can't rely on gimplify_expr to re-gimplify the expanded
3120 form properly, as cleanups might cause the target labels to be
3121 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3122 set up a conditional context. */
3123 gimple_push_condition ();
3124 gimplify_stmt (expr_p, &seq);
3125 gimple_pop_condition (pre_p);
3126 gimple_seq_add_seq (pre_p, seq);
3128 return GS_ALL_DONE;
3132 /* Now do the normal gimplification. */
3134 /* Gimplify condition. */
3135 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3136 fb_rvalue);
3137 if (ret == GS_ERROR)
3138 return GS_ERROR;
3139 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3141 gimple_push_condition ();
3143 have_then_clause_p = have_else_clause_p = false;
3144 if (TREE_OPERAND (expr, 1) != NULL
3145 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3146 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3147 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3148 == current_function_decl)
3149 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3150 have different locations, otherwise we end up with incorrect
3151 location information on the branches. */
3152 && (optimize
3153 || !EXPR_HAS_LOCATION (expr)
3154 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3155 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3157 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3158 have_then_clause_p = true;
3160 else
3161 label_true = create_artificial_label (UNKNOWN_LOCATION);
3162 if (TREE_OPERAND (expr, 2) != NULL
3163 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3164 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3165 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3166 == current_function_decl)
3167 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3168 have different locations, otherwise we end up with incorrect
3169 location information on the branches. */
3170 && (optimize
3171 || !EXPR_HAS_LOCATION (expr)
3172 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3173 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3175 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3176 have_else_clause_p = true;
3178 else
3179 label_false = create_artificial_label (UNKNOWN_LOCATION);
3181 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3182 &arm2);
3184 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3185 label_false);
3187 gimplify_seq_add_stmt (&seq, gimple_cond);
3188 label_cont = NULL_TREE;
3189 if (!have_then_clause_p)
3191 /* For if (...) {} else { code; } put label_true after
3192 the else block. */
3193 if (TREE_OPERAND (expr, 1) == NULL_TREE
3194 && !have_else_clause_p
3195 && TREE_OPERAND (expr, 2) != NULL_TREE)
3196 label_cont = label_true;
3197 else
3199 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3200 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3201 /* For if (...) { code; } else {} or
3202 if (...) { code; } else goto label; or
3203 if (...) { code; return; } else { ... }
3204 label_cont isn't needed. */
3205 if (!have_else_clause_p
3206 && TREE_OPERAND (expr, 2) != NULL_TREE
3207 && gimple_seq_may_fallthru (seq))
3209 gimple g;
3210 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3212 g = gimple_build_goto (label_cont);
3214 /* GIMPLE_COND's are very low level; they have embedded
3215 gotos. This particular embedded goto should not be marked
3216 with the location of the original COND_EXPR, as it would
3217 correspond to the COND_EXPR's condition, not the ELSE or the
3218 THEN arms. To avoid marking it with the wrong location, flag
3219 it as "no location". */
3220 gimple_set_do_not_emit_location (g);
3222 gimplify_seq_add_stmt (&seq, g);
3226 if (!have_else_clause_p)
3228 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3229 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3231 if (label_cont)
3232 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3234 gimple_pop_condition (pre_p);
3235 gimple_seq_add_seq (pre_p, seq);
3237 if (ret == GS_ERROR)
3238 ; /* Do nothing. */
3239 else if (have_then_clause_p || have_else_clause_p)
3240 ret = GS_ALL_DONE;
3241 else
3243 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3244 expr = TREE_OPERAND (expr, 0);
3245 gimplify_stmt (&expr, pre_p);
3248 *expr_p = NULL;
3249 return ret;
3252 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3253 to be marked addressable.
3255 We cannot rely on such an expression being directly markable if a temporary
3256 has been created by the gimplification. In this case, we create another
3257 temporary and initialize it with a copy, which will become a store after we
3258 mark it addressable. This can happen if the front-end passed us something
3259 that it could not mark addressable yet, like a Fortran pass-by-reference
3260 parameter (int) floatvar. */
3262 static void
3263 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3265 while (handled_component_p (*expr_p))
3266 expr_p = &TREE_OPERAND (*expr_p, 0);
3267 if (is_gimple_reg (*expr_p))
3268 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3271 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3272 a call to __builtin_memcpy. */
3274 static enum gimplify_status
3275 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3276 gimple_seq *seq_p)
3278 tree t, to, to_ptr, from, from_ptr;
3279 gimple gs;
3280 location_t loc = EXPR_LOCATION (*expr_p);
3282 to = TREE_OPERAND (*expr_p, 0);
3283 from = TREE_OPERAND (*expr_p, 1);
3285 /* Mark the RHS addressable. Beware that it may not be possible to do so
3286 directly if a temporary has been created by the gimplification. */
3287 prepare_gimple_addressable (&from, seq_p);
3289 mark_addressable (from);
3290 from_ptr = build_fold_addr_expr_loc (loc, from);
3291 gimplify_arg (&from_ptr, seq_p, loc);
3293 mark_addressable (to);
3294 to_ptr = build_fold_addr_expr_loc (loc, to);
3295 gimplify_arg (&to_ptr, seq_p, loc);
3297 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3299 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3301 if (want_value)
3303 /* tmp = memcpy() */
3304 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3305 gimple_call_set_lhs (gs, t);
3306 gimplify_seq_add_stmt (seq_p, gs);
3308 *expr_p = build_simple_mem_ref (t);
3309 return GS_ALL_DONE;
3312 gimplify_seq_add_stmt (seq_p, gs);
3313 *expr_p = NULL;
3314 return GS_ALL_DONE;
3317 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3318 a call to __builtin_memset. In this case we know that the RHS is
3319 a CONSTRUCTOR with an empty element list. */
3321 static enum gimplify_status
3322 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3323 gimple_seq *seq_p)
3325 tree t, from, to, to_ptr;
3326 gimple gs;
3327 location_t loc = EXPR_LOCATION (*expr_p);
3329 /* Assert our assumptions, to abort instead of producing wrong code
3330 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3331 not be immediately exposed. */
3332 from = TREE_OPERAND (*expr_p, 1);
3333 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3334 from = TREE_OPERAND (from, 0);
3336 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3337 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3339 /* Now proceed. */
3340 to = TREE_OPERAND (*expr_p, 0);
3342 to_ptr = build_fold_addr_expr_loc (loc, to);
3343 gimplify_arg (&to_ptr, seq_p, loc);
3344 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3346 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3348 if (want_value)
3350 /* tmp = memset() */
3351 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3352 gimple_call_set_lhs (gs, t);
3353 gimplify_seq_add_stmt (seq_p, gs);
3355 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3356 return GS_ALL_DONE;
3359 gimplify_seq_add_stmt (seq_p, gs);
3360 *expr_p = NULL;
3361 return GS_ALL_DONE;
3364 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3365 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3366 assignment. Return non-null if we detect a potential overlap. */
3368 struct gimplify_init_ctor_preeval_data
3370 /* The base decl of the lhs object. May be NULL, in which case we
3371 have to assume the lhs is indirect. */
3372 tree lhs_base_decl;
3374 /* The alias set of the lhs object. */
3375 alias_set_type lhs_alias_set;
3378 static tree
3379 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3381 struct gimplify_init_ctor_preeval_data *data
3382 = (struct gimplify_init_ctor_preeval_data *) xdata;
3383 tree t = *tp;
3385 /* If we find the base object, obviously we have overlap. */
3386 if (data->lhs_base_decl == t)
3387 return t;
3389 /* If the constructor component is indirect, determine if we have a
3390 potential overlap with the lhs. The only bits of information we
3391 have to go on at this point are addressability and alias sets. */
3392 if ((INDIRECT_REF_P (t)
3393 || TREE_CODE (t) == MEM_REF)
3394 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3395 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3396 return t;
3398 /* If the constructor component is a call, determine if it can hide a
3399 potential overlap with the lhs through an INDIRECT_REF like above.
3400 ??? Ugh - this is completely broken. In fact this whole analysis
3401 doesn't look conservative. */
3402 if (TREE_CODE (t) == CALL_EXPR)
3404 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3406 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3407 if (POINTER_TYPE_P (TREE_VALUE (type))
3408 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3409 && alias_sets_conflict_p (data->lhs_alias_set,
3410 get_alias_set
3411 (TREE_TYPE (TREE_VALUE (type)))))
3412 return t;
3415 if (IS_TYPE_OR_DECL_P (t))
3416 *walk_subtrees = 0;
3417 return NULL;
3420 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3421 force values that overlap with the lhs (as described by *DATA)
3422 into temporaries. */
3424 static void
3425 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3426 struct gimplify_init_ctor_preeval_data *data)
3428 enum gimplify_status one;
3430 /* If the value is constant, then there's nothing to pre-evaluate. */
3431 if (TREE_CONSTANT (*expr_p))
3433 /* Ensure it does not have side effects, it might contain a reference to
3434 the object we're initializing. */
3435 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3436 return;
3439 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3440 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3441 return;
3443 /* Recurse for nested constructors. */
3444 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3446 unsigned HOST_WIDE_INT ix;
3447 constructor_elt *ce;
3448 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3450 FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
3451 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3453 return;
3456 /* If this is a variable sized type, we must remember the size. */
3457 maybe_with_size_expr (expr_p);
3459 /* Gimplify the constructor element to something appropriate for the rhs
3460 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3461 the gimplifier will consider this a store to memory. Doing this
3462 gimplification now means that we won't have to deal with complicated
3463 language-specific trees, nor trees like SAVE_EXPR that can induce
3464 exponential search behavior. */
3465 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3466 if (one == GS_ERROR)
3468 *expr_p = NULL;
3469 return;
3472 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3473 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3474 always be true for all scalars, since is_gimple_mem_rhs insists on a
3475 temporary variable for them. */
3476 if (DECL_P (*expr_p))
3477 return;
3479 /* If this is of variable size, we have no choice but to assume it doesn't
3480 overlap since we can't make a temporary for it. */
3481 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3482 return;
3484 /* Otherwise, we must search for overlap ... */
3485 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3486 return;
3488 /* ... and if found, force the value into a temporary. */
3489 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3492 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3493 a RANGE_EXPR in a CONSTRUCTOR for an array.
3495 var = lower;
3496 loop_entry:
3497 object[var] = value;
3498 if (var == upper)
3499 goto loop_exit;
3500 var = var + 1;
3501 goto loop_entry;
3502 loop_exit:
3504 We increment var _after_ the loop exit check because we might otherwise
3505 fail if upper == TYPE_MAX_VALUE (type for upper).
3507 Note that we never have to deal with SAVE_EXPRs here, because this has
3508 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3510 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3511 gimple_seq *, bool);
3513 static void
3514 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3515 tree value, tree array_elt_type,
3516 gimple_seq *pre_p, bool cleared)
3518 tree loop_entry_label, loop_exit_label, fall_thru_label;
3519 tree var, var_type, cref, tmp;
3521 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3522 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3523 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3525 /* Create and initialize the index variable. */
3526 var_type = TREE_TYPE (upper);
3527 var = create_tmp_var (var_type, NULL);
3528 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3530 /* Add the loop entry label. */
3531 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3533 /* Build the reference. */
3534 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3535 var, NULL_TREE, NULL_TREE);
3537 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3538 the store. Otherwise just assign value to the reference. */
3540 if (TREE_CODE (value) == CONSTRUCTOR)
3541 /* NB we might have to call ourself recursively through
3542 gimplify_init_ctor_eval if the value is a constructor. */
3543 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3544 pre_p, cleared);
3545 else
3546 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3548 /* We exit the loop when the index var is equal to the upper bound. */
3549 gimplify_seq_add_stmt (pre_p,
3550 gimple_build_cond (EQ_EXPR, var, upper,
3551 loop_exit_label, fall_thru_label));
3553 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3555 /* Otherwise, increment the index var... */
3556 tmp = build2 (PLUS_EXPR, var_type, var,
3557 fold_convert (var_type, integer_one_node));
3558 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3560 /* ...and jump back to the loop entry. */
3561 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3563 /* Add the loop exit label. */
3564 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3567 /* Return true if FDECL is accessing a field that is zero sized. */
3569 static bool
3570 zero_sized_field_decl (const_tree fdecl)
3572 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3573 && integer_zerop (DECL_SIZE (fdecl)))
3574 return true;
3575 return false;
3578 /* Return true if TYPE is zero sized. */
3580 static bool
3581 zero_sized_type (const_tree type)
3583 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3584 && integer_zerop (TYPE_SIZE (type)))
3585 return true;
3586 return false;
3589 /* A subroutine of gimplify_init_constructor. Generate individual
3590 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3591 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3592 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3593 zeroed first. */
3595 static void
3596 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3597 gimple_seq *pre_p, bool cleared)
3599 tree array_elt_type = NULL;
3600 unsigned HOST_WIDE_INT ix;
3601 tree purpose, value;
3603 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3604 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3606 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3608 tree cref;
3610 /* NULL values are created above for gimplification errors. */
3611 if (value == NULL)
3612 continue;
3614 if (cleared && initializer_zerop (value))
3615 continue;
3617 /* ??? Here's to hoping the front end fills in all of the indices,
3618 so we don't have to figure out what's missing ourselves. */
3619 gcc_assert (purpose);
3621 /* Skip zero-sized fields, unless value has side-effects. This can
3622 happen with calls to functions returning a zero-sized type, which
3623 we shouldn't discard. As a number of downstream passes don't
3624 expect sets of zero-sized fields, we rely on the gimplification of
3625 the MODIFY_EXPR we make below to drop the assignment statement. */
3626 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3627 continue;
3629 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3630 whole range. */
3631 if (TREE_CODE (purpose) == RANGE_EXPR)
3633 tree lower = TREE_OPERAND (purpose, 0);
3634 tree upper = TREE_OPERAND (purpose, 1);
3636 /* If the lower bound is equal to upper, just treat it as if
3637 upper was the index. */
3638 if (simple_cst_equal (lower, upper))
3639 purpose = upper;
3640 else
3642 gimplify_init_ctor_eval_range (object, lower, upper, value,
3643 array_elt_type, pre_p, cleared);
3644 continue;
3648 if (array_elt_type)
3650 /* Do not use bitsizetype for ARRAY_REF indices. */
3651 if (TYPE_DOMAIN (TREE_TYPE (object)))
3652 purpose
3653 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3654 purpose);
3655 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3656 purpose, NULL_TREE, NULL_TREE);
3658 else
3660 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3661 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3662 unshare_expr (object), purpose, NULL_TREE);
3665 if (TREE_CODE (value) == CONSTRUCTOR
3666 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3667 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3668 pre_p, cleared);
3669 else
3671 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3672 gimplify_and_add (init, pre_p);
3673 ggc_free (init);
3678 /* Return the appropriate RHS predicate for this LHS. */
3680 gimple_predicate
3681 rhs_predicate_for (tree lhs)
3683 if (is_gimple_reg (lhs))
3684 return is_gimple_reg_rhs_or_call;
3685 else
3686 return is_gimple_mem_rhs_or_call;
3689 /* Gimplify a C99 compound literal expression. This just means adding
3690 the DECL_EXPR before the current statement and using its anonymous
3691 decl instead. */
3693 static enum gimplify_status
3694 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3696 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3697 tree decl = DECL_EXPR_DECL (decl_s);
3698 /* Mark the decl as addressable if the compound literal
3699 expression is addressable now, otherwise it is marked too late
3700 after we gimplify the initialization expression. */
3701 if (TREE_ADDRESSABLE (*expr_p))
3702 TREE_ADDRESSABLE (decl) = 1;
3704 /* Preliminarily mark non-addressed complex variables as eligible
3705 for promotion to gimple registers. We'll transform their uses
3706 as we find them. */
3707 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3708 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3709 && !TREE_THIS_VOLATILE (decl)
3710 && !needs_to_live_in_memory (decl))
3711 DECL_GIMPLE_REG_P (decl) = 1;
3713 /* This decl isn't mentioned in the enclosing block, so add it to the
3714 list of temps. FIXME it seems a bit of a kludge to say that
3715 anonymous artificial vars aren't pushed, but everything else is. */
3716 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3717 gimple_add_tmp_var (decl);
3719 gimplify_and_add (decl_s, pre_p);
3720 *expr_p = decl;
3721 return GS_OK;
3724 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3725 return a new CONSTRUCTOR if something changed. */
3727 static tree
3728 optimize_compound_literals_in_ctor (tree orig_ctor)
3730 tree ctor = orig_ctor;
3731 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3732 unsigned int idx, num = VEC_length (constructor_elt, elts);
3734 for (idx = 0; idx < num; idx++)
3736 tree value = VEC_index (constructor_elt, elts, idx)->value;
3737 tree newval = value;
3738 if (TREE_CODE (value) == CONSTRUCTOR)
3739 newval = optimize_compound_literals_in_ctor (value);
3740 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3742 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3743 tree decl = DECL_EXPR_DECL (decl_s);
3744 tree init = DECL_INITIAL (decl);
3746 if (!TREE_ADDRESSABLE (value)
3747 && !TREE_ADDRESSABLE (decl)
3748 && init
3749 && TREE_CODE (init) == CONSTRUCTOR)
3750 newval = optimize_compound_literals_in_ctor (init);
3752 if (newval == value)
3753 continue;
3755 if (ctor == orig_ctor)
3757 ctor = copy_node (orig_ctor);
3758 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3759 elts = CONSTRUCTOR_ELTS (ctor);
3761 VEC_index (constructor_elt, elts, idx)->value = newval;
3763 return ctor;
3766 /* A subroutine of gimplify_modify_expr. Break out elements of a
3767 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3769 Note that we still need to clear any elements that don't have explicit
3770 initializers, so if not all elements are initialized we keep the
3771 original MODIFY_EXPR, we just remove all of the constructor elements.
3773 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3774 GS_ERROR if we would have to create a temporary when gimplifying
3775 this constructor. Otherwise, return GS_OK.
3777 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3779 static enum gimplify_status
3780 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3781 bool want_value, bool notify_temp_creation)
3783 tree object, ctor, type;
3784 enum gimplify_status ret;
3785 VEC(constructor_elt,gc) *elts;
3787 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3789 if (!notify_temp_creation)
3791 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3792 is_gimple_lvalue, fb_lvalue);
3793 if (ret == GS_ERROR)
3794 return ret;
3797 object = TREE_OPERAND (*expr_p, 0);
3798 ctor = TREE_OPERAND (*expr_p, 1) =
3799 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3800 type = TREE_TYPE (ctor);
3801 elts = CONSTRUCTOR_ELTS (ctor);
3802 ret = GS_ALL_DONE;
3804 switch (TREE_CODE (type))
3806 case RECORD_TYPE:
3807 case UNION_TYPE:
3808 case QUAL_UNION_TYPE:
3809 case ARRAY_TYPE:
3811 struct gimplify_init_ctor_preeval_data preeval_data;
3812 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3813 bool cleared, complete_p, valid_const_initializer;
3815 /* Aggregate types must lower constructors to initialization of
3816 individual elements. The exception is that a CONSTRUCTOR node
3817 with no elements indicates zero-initialization of the whole. */
3818 if (VEC_empty (constructor_elt, elts))
3820 if (notify_temp_creation)
3821 return GS_OK;
3822 break;
3825 /* Fetch information about the constructor to direct later processing.
3826 We might want to make static versions of it in various cases, and
3827 can only do so if it known to be a valid constant initializer. */
3828 valid_const_initializer
3829 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3830 &num_ctor_elements, &complete_p);
3832 /* If a const aggregate variable is being initialized, then it
3833 should never be a lose to promote the variable to be static. */
3834 if (valid_const_initializer
3835 && num_nonzero_elements > 1
3836 && TREE_READONLY (object)
3837 && TREE_CODE (object) == VAR_DECL
3838 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3840 if (notify_temp_creation)
3841 return GS_ERROR;
3842 DECL_INITIAL (object) = ctor;
3843 TREE_STATIC (object) = 1;
3844 if (!DECL_NAME (object))
3845 DECL_NAME (object) = create_tmp_var_name ("C");
3846 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3848 /* ??? C++ doesn't automatically append a .<number> to the
3849 assembler name, and even when it does, it looks a FE private
3850 data structures to figure out what that number should be,
3851 which are not set for this variable. I suppose this is
3852 important for local statics for inline functions, which aren't
3853 "local" in the object file sense. So in order to get a unique
3854 TU-local symbol, we must invoke the lhd version now. */
3855 lhd_set_decl_assembler_name (object);
3857 *expr_p = NULL_TREE;
3858 break;
3861 /* If there are "lots" of initialized elements, even discounting
3862 those that are not address constants (and thus *must* be
3863 computed at runtime), then partition the constructor into
3864 constant and non-constant parts. Block copy the constant
3865 parts in, then generate code for the non-constant parts. */
3866 /* TODO. There's code in cp/typeck.c to do this. */
3868 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3869 /* store_constructor will ignore the clearing of variable-sized
3870 objects. Initializers for such objects must explicitly set
3871 every field that needs to be set. */
3872 cleared = false;
3873 else if (!complete_p)
3874 /* If the constructor isn't complete, clear the whole object
3875 beforehand.
3877 ??? This ought not to be needed. For any element not present
3878 in the initializer, we should simply set them to zero. Except
3879 we'd need to *find* the elements that are not present, and that
3880 requires trickery to avoid quadratic compile-time behavior in
3881 large cases or excessive memory use in small cases. */
3882 cleared = true;
3883 else if (num_ctor_elements - num_nonzero_elements
3884 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3885 && num_nonzero_elements < num_ctor_elements / 4)
3886 /* If there are "lots" of zeros, it's more efficient to clear
3887 the memory and then set the nonzero elements. */
3888 cleared = true;
3889 else
3890 cleared = false;
3892 /* If there are "lots" of initialized elements, and all of them
3893 are valid address constants, then the entire initializer can
3894 be dropped to memory, and then memcpy'd out. Don't do this
3895 for sparse arrays, though, as it's more efficient to follow
3896 the standard CONSTRUCTOR behavior of memset followed by
3897 individual element initialization. Also don't do this for small
3898 all-zero initializers (which aren't big enough to merit
3899 clearing), and don't try to make bitwise copies of
3900 TREE_ADDRESSABLE types. */
3901 if (valid_const_initializer
3902 && !(cleared || num_nonzero_elements == 0)
3903 && !TREE_ADDRESSABLE (type))
3905 HOST_WIDE_INT size = int_size_in_bytes (type);
3906 unsigned int align;
3908 /* ??? We can still get unbounded array types, at least
3909 from the C++ front end. This seems wrong, but attempt
3910 to work around it for now. */
3911 if (size < 0)
3913 size = int_size_in_bytes (TREE_TYPE (object));
3914 if (size >= 0)
3915 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3918 /* Find the maximum alignment we can assume for the object. */
3919 /* ??? Make use of DECL_OFFSET_ALIGN. */
3920 if (DECL_P (object))
3921 align = DECL_ALIGN (object);
3922 else
3923 align = TYPE_ALIGN (type);
3925 if (size > 0
3926 && num_nonzero_elements > 1
3927 && !can_move_by_pieces (size, align))
3929 if (notify_temp_creation)
3930 return GS_ERROR;
3932 walk_tree (&ctor, force_labels_r, NULL, NULL);
3933 ctor = tree_output_constant_def (ctor);
3934 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3935 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3936 TREE_OPERAND (*expr_p, 1) = ctor;
3938 /* This is no longer an assignment of a CONSTRUCTOR, but
3939 we still may have processing to do on the LHS. So
3940 pretend we didn't do anything here to let that happen. */
3941 return GS_UNHANDLED;
3945 /* If the target is volatile, we have non-zero elements and more than
3946 one field to assign, initialize the target from a temporary. */
3947 if (TREE_THIS_VOLATILE (object)
3948 && !TREE_ADDRESSABLE (type)
3949 && num_nonzero_elements > 0
3950 && VEC_length (constructor_elt, elts) > 1)
3952 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3953 TREE_OPERAND (*expr_p, 0) = temp;
3954 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3955 *expr_p,
3956 build2 (MODIFY_EXPR, void_type_node,
3957 object, temp));
3958 return GS_OK;
3961 if (notify_temp_creation)
3962 return GS_OK;
3964 /* If there are nonzero elements and if needed, pre-evaluate to capture
3965 elements overlapping with the lhs into temporaries. We must do this
3966 before clearing to fetch the values before they are zeroed-out. */
3967 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3969 preeval_data.lhs_base_decl = get_base_address (object);
3970 if (!DECL_P (preeval_data.lhs_base_decl))
3971 preeval_data.lhs_base_decl = NULL;
3972 preeval_data.lhs_alias_set = get_alias_set (object);
3974 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3975 pre_p, post_p, &preeval_data);
3978 if (cleared)
3980 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3981 Note that we still have to gimplify, in order to handle the
3982 case of variable sized types. Avoid shared tree structures. */
3983 CONSTRUCTOR_ELTS (ctor) = NULL;
3984 TREE_SIDE_EFFECTS (ctor) = 0;
3985 object = unshare_expr (object);
3986 gimplify_stmt (expr_p, pre_p);
3989 /* If we have not block cleared the object, or if there are nonzero
3990 elements in the constructor, add assignments to the individual
3991 scalar fields of the object. */
3992 if (!cleared || num_nonzero_elements > 0)
3993 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3995 *expr_p = NULL_TREE;
3997 break;
3999 case COMPLEX_TYPE:
4001 tree r, i;
4003 if (notify_temp_creation)
4004 return GS_OK;
4006 /* Extract the real and imaginary parts out of the ctor. */
4007 gcc_assert (VEC_length (constructor_elt, elts) == 2);
4008 r = VEC_index (constructor_elt, elts, 0)->value;
4009 i = VEC_index (constructor_elt, elts, 1)->value;
4010 if (r == NULL || i == NULL)
4012 tree zero = build_zero_cst (TREE_TYPE (type));
4013 if (r == NULL)
4014 r = zero;
4015 if (i == NULL)
4016 i = zero;
4019 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4020 represent creation of a complex value. */
4021 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4023 ctor = build_complex (type, r, i);
4024 TREE_OPERAND (*expr_p, 1) = ctor;
4026 else
4028 ctor = build2 (COMPLEX_EXPR, type, r, i);
4029 TREE_OPERAND (*expr_p, 1) = ctor;
4030 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4031 pre_p,
4032 post_p,
4033 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4034 fb_rvalue);
4037 break;
4039 case VECTOR_TYPE:
4041 unsigned HOST_WIDE_INT ix;
4042 constructor_elt *ce;
4044 if (notify_temp_creation)
4045 return GS_OK;
4047 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4048 if (TREE_CONSTANT (ctor))
4050 bool constant_p = true;
4051 tree value;
4053 /* Even when ctor is constant, it might contain non-*_CST
4054 elements, such as addresses or trapping values like
4055 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4056 in VECTOR_CST nodes. */
4057 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4058 if (!CONSTANT_CLASS_P (value))
4060 constant_p = false;
4061 break;
4064 if (constant_p)
4066 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4067 break;
4070 /* Don't reduce an initializer constant even if we can't
4071 make a VECTOR_CST. It won't do anything for us, and it'll
4072 prevent us from representing it as a single constant. */
4073 if (initializer_constant_valid_p (ctor, type))
4074 break;
4076 TREE_CONSTANT (ctor) = 0;
4079 /* Vector types use CONSTRUCTOR all the way through gimple
4080 compilation as a general initializer. */
4081 FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
4083 enum gimplify_status tret;
4084 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4085 fb_rvalue);
4086 if (tret == GS_ERROR)
4087 ret = GS_ERROR;
4089 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4090 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4092 break;
4094 default:
4095 /* So how did we get a CONSTRUCTOR for a scalar type? */
4096 gcc_unreachable ();
4099 if (ret == GS_ERROR)
4100 return GS_ERROR;
4101 else if (want_value)
4103 *expr_p = object;
4104 return GS_OK;
4106 else
4108 /* If we have gimplified both sides of the initializer but have
4109 not emitted an assignment, do so now. */
4110 if (*expr_p)
4112 tree lhs = TREE_OPERAND (*expr_p, 0);
4113 tree rhs = TREE_OPERAND (*expr_p, 1);
4114 gimple init = gimple_build_assign (lhs, rhs);
4115 gimplify_seq_add_stmt (pre_p, init);
4116 *expr_p = NULL;
4119 return GS_ALL_DONE;
4123 /* Given a pointer value OP0, return a simplified version of an
4124 indirection through OP0, or NULL_TREE if no simplification is
4125 possible. Note that the resulting type may be different from
4126 the type pointed to in the sense that it is still compatible
4127 from the langhooks point of view. */
4129 tree
4130 gimple_fold_indirect_ref (tree t)
4132 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
4133 tree sub = t;
4134 tree subtype;
4136 STRIP_NOPS (sub);
4137 subtype = TREE_TYPE (sub);
4138 if (!POINTER_TYPE_P (subtype))
4139 return NULL_TREE;
4141 if (TREE_CODE (sub) == ADDR_EXPR)
4143 tree op = TREE_OPERAND (sub, 0);
4144 tree optype = TREE_TYPE (op);
4145 /* *&p => p */
4146 if (useless_type_conversion_p (type, optype))
4147 return op;
4149 /* *(foo *)&fooarray => fooarray[0] */
4150 if (TREE_CODE (optype) == ARRAY_TYPE
4151 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4152 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4154 tree type_domain = TYPE_DOMAIN (optype);
4155 tree min_val = size_zero_node;
4156 if (type_domain && TYPE_MIN_VALUE (type_domain))
4157 min_val = TYPE_MIN_VALUE (type_domain);
4158 if (TREE_CODE (min_val) == INTEGER_CST)
4159 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4161 /* *(foo *)&complexfoo => __real__ complexfoo */
4162 else if (TREE_CODE (optype) == COMPLEX_TYPE
4163 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4164 return fold_build1 (REALPART_EXPR, type, op);
4165 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4166 else if (TREE_CODE (optype) == VECTOR_TYPE
4167 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4169 tree part_width = TYPE_SIZE (type);
4170 tree index = bitsize_int (0);
4171 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4175 /* *(p + CST) -> ... */
4176 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4177 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4179 tree addr = TREE_OPERAND (sub, 0);
4180 tree off = TREE_OPERAND (sub, 1);
4181 tree addrtype;
4183 STRIP_NOPS (addr);
4184 addrtype = TREE_TYPE (addr);
4186 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4187 if (TREE_CODE (addr) == ADDR_EXPR
4188 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4189 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
4190 && host_integerp (off, 1))
4192 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
4193 tree part_width = TYPE_SIZE (type);
4194 unsigned HOST_WIDE_INT part_widthi
4195 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4196 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4197 tree index = bitsize_int (indexi);
4198 if (offset / part_widthi
4199 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4200 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4201 part_width, index);
4204 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4205 if (TREE_CODE (addr) == ADDR_EXPR
4206 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4207 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4209 tree size = TYPE_SIZE_UNIT (type);
4210 if (tree_int_cst_equal (size, off))
4211 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4214 /* *(p + CST) -> MEM_REF <p, CST>. */
4215 if (TREE_CODE (addr) != ADDR_EXPR
4216 || DECL_P (TREE_OPERAND (addr, 0)))
4217 return fold_build2 (MEM_REF, type,
4218 addr,
4219 build_int_cst_wide (ptype,
4220 TREE_INT_CST_LOW (off),
4221 TREE_INT_CST_HIGH (off)));
4224 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4225 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4226 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4227 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4229 tree type_domain;
4230 tree min_val = size_zero_node;
4231 tree osub = sub;
4232 sub = gimple_fold_indirect_ref (sub);
4233 if (! sub)
4234 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4235 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4236 if (type_domain && TYPE_MIN_VALUE (type_domain))
4237 min_val = TYPE_MIN_VALUE (type_domain);
4238 if (TREE_CODE (min_val) == INTEGER_CST)
4239 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4242 return NULL_TREE;
4245 /* Given a pointer value OP0, return a simplified version of an
4246 indirection through OP0, or NULL_TREE if no simplification is
4247 possible. This may only be applied to a rhs of an expression.
4248 Note that the resulting type may be different from the type pointed
4249 to in the sense that it is still compatible from the langhooks
4250 point of view. */
4252 static tree
4253 gimple_fold_indirect_ref_rhs (tree t)
4255 return gimple_fold_indirect_ref (t);
4258 /* Subroutine of gimplify_modify_expr to do simplifications of
4259 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4260 something changes. */
4262 static enum gimplify_status
4263 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4264 gimple_seq *pre_p, gimple_seq *post_p,
4265 bool want_value)
4267 enum gimplify_status ret = GS_UNHANDLED;
4268 bool changed;
4272 changed = false;
4273 switch (TREE_CODE (*from_p))
4275 case VAR_DECL:
4276 /* If we're assigning from a read-only variable initialized with
4277 a constructor, do the direct assignment from the constructor,
4278 but only if neither source nor target are volatile since this
4279 latter assignment might end up being done on a per-field basis. */
4280 if (DECL_INITIAL (*from_p)
4281 && TREE_READONLY (*from_p)
4282 && !TREE_THIS_VOLATILE (*from_p)
4283 && !TREE_THIS_VOLATILE (*to_p)
4284 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4286 tree old_from = *from_p;
4287 enum gimplify_status subret;
4289 /* Move the constructor into the RHS. */
4290 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4292 /* Let's see if gimplify_init_constructor will need to put
4293 it in memory. */
4294 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4295 false, true);
4296 if (subret == GS_ERROR)
4298 /* If so, revert the change. */
4299 *from_p = old_from;
4301 else
4303 ret = GS_OK;
4304 changed = true;
4307 break;
4308 case INDIRECT_REF:
4310 /* If we have code like
4312 *(const A*)(A*)&x
4314 where the type of "x" is a (possibly cv-qualified variant
4315 of "A"), treat the entire expression as identical to "x".
4316 This kind of code arises in C++ when an object is bound
4317 to a const reference, and if "x" is a TARGET_EXPR we want
4318 to take advantage of the optimization below. */
4319 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4320 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4321 if (t)
4323 if (TREE_THIS_VOLATILE (t) != volatile_p)
4325 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4326 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4327 build_fold_addr_expr (t));
4328 if (REFERENCE_CLASS_P (t))
4329 TREE_THIS_VOLATILE (t) = volatile_p;
4331 *from_p = t;
4332 ret = GS_OK;
4333 changed = true;
4335 break;
4338 case TARGET_EXPR:
4340 /* If we are initializing something from a TARGET_EXPR, strip the
4341 TARGET_EXPR and initialize it directly, if possible. This can't
4342 be done if the initializer is void, since that implies that the
4343 temporary is set in some non-trivial way.
4345 ??? What about code that pulls out the temp and uses it
4346 elsewhere? I think that such code never uses the TARGET_EXPR as
4347 an initializer. If I'm wrong, we'll die because the temp won't
4348 have any RTL. In that case, I guess we'll need to replace
4349 references somehow. */
4350 tree init = TARGET_EXPR_INITIAL (*from_p);
4352 if (init
4353 && !VOID_TYPE_P (TREE_TYPE (init)))
4355 *from_p = init;
4356 ret = GS_OK;
4357 changed = true;
4360 break;
4362 case COMPOUND_EXPR:
4363 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4364 caught. */
4365 gimplify_compound_expr (from_p, pre_p, true);
4366 ret = GS_OK;
4367 changed = true;
4368 break;
4370 case CONSTRUCTOR:
4371 /* If we already made some changes, let the front end have a
4372 crack at this before we break it down. */
4373 if (ret != GS_UNHANDLED)
4374 break;
4375 /* If we're initializing from a CONSTRUCTOR, break this into
4376 individual MODIFY_EXPRs. */
4377 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4378 false);
4380 case COND_EXPR:
4381 /* If we're assigning to a non-register type, push the assignment
4382 down into the branches. This is mandatory for ADDRESSABLE types,
4383 since we cannot generate temporaries for such, but it saves a
4384 copy in other cases as well. */
4385 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4387 /* This code should mirror the code in gimplify_cond_expr. */
4388 enum tree_code code = TREE_CODE (*expr_p);
4389 tree cond = *from_p;
4390 tree result = *to_p;
4392 ret = gimplify_expr (&result, pre_p, post_p,
4393 is_gimple_lvalue, fb_lvalue);
4394 if (ret != GS_ERROR)
4395 ret = GS_OK;
4397 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4398 TREE_OPERAND (cond, 1)
4399 = build2 (code, void_type_node, result,
4400 TREE_OPERAND (cond, 1));
4401 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4402 TREE_OPERAND (cond, 2)
4403 = build2 (code, void_type_node, unshare_expr (result),
4404 TREE_OPERAND (cond, 2));
4406 TREE_TYPE (cond) = void_type_node;
4407 recalculate_side_effects (cond);
4409 if (want_value)
4411 gimplify_and_add (cond, pre_p);
4412 *expr_p = unshare_expr (result);
4414 else
4415 *expr_p = cond;
4416 return ret;
4418 break;
4420 case CALL_EXPR:
4421 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4422 return slot so that we don't generate a temporary. */
4423 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4424 && aggregate_value_p (*from_p, *from_p))
4426 bool use_target;
4428 if (!(rhs_predicate_for (*to_p))(*from_p))
4429 /* If we need a temporary, *to_p isn't accurate. */
4430 use_target = false;
4431 /* It's OK to use the return slot directly unless it's an NRV. */
4432 else if (TREE_CODE (*to_p) == RESULT_DECL
4433 && DECL_NAME (*to_p) == NULL_TREE
4434 && needs_to_live_in_memory (*to_p))
4435 use_target = true;
4436 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4437 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4438 /* Don't force regs into memory. */
4439 use_target = false;
4440 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4441 /* It's OK to use the target directly if it's being
4442 initialized. */
4443 use_target = true;
4444 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4445 /* Always use the target and thus RSO for variable-sized types.
4446 GIMPLE cannot deal with a variable-sized assignment
4447 embedded in a call statement. */
4448 use_target = true;
4449 else if (TREE_CODE (*to_p) != SSA_NAME
4450 && (!is_gimple_variable (*to_p)
4451 || needs_to_live_in_memory (*to_p)))
4452 /* Don't use the original target if it's already addressable;
4453 if its address escapes, and the called function uses the
4454 NRV optimization, a conforming program could see *to_p
4455 change before the called function returns; see c++/19317.
4456 When optimizing, the return_slot pass marks more functions
4457 as safe after we have escape info. */
4458 use_target = false;
4459 else
4460 use_target = true;
4462 if (use_target)
4464 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4465 mark_addressable (*to_p);
4468 break;
4470 case WITH_SIZE_EXPR:
4471 /* Likewise for calls that return an aggregate of non-constant size,
4472 since we would not be able to generate a temporary at all. */
4473 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4475 *from_p = TREE_OPERAND (*from_p, 0);
4476 /* We don't change ret in this case because the
4477 WITH_SIZE_EXPR might have been added in
4478 gimplify_modify_expr, so returning GS_OK would lead to an
4479 infinite loop. */
4480 changed = true;
4482 break;
4484 /* If we're initializing from a container, push the initialization
4485 inside it. */
4486 case CLEANUP_POINT_EXPR:
4487 case BIND_EXPR:
4488 case STATEMENT_LIST:
4490 tree wrap = *from_p;
4491 tree t;
4493 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4494 fb_lvalue);
4495 if (ret != GS_ERROR)
4496 ret = GS_OK;
4498 t = voidify_wrapper_expr (wrap, *expr_p);
4499 gcc_assert (t == *expr_p);
4501 if (want_value)
4503 gimplify_and_add (wrap, pre_p);
4504 *expr_p = unshare_expr (*to_p);
4506 else
4507 *expr_p = wrap;
4508 return GS_OK;
4511 case COMPOUND_LITERAL_EXPR:
4513 tree complit = TREE_OPERAND (*expr_p, 1);
4514 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4515 tree decl = DECL_EXPR_DECL (decl_s);
4516 tree init = DECL_INITIAL (decl);
4518 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4519 into struct T x = { 0, 1, 2 } if the address of the
4520 compound literal has never been taken. */
4521 if (!TREE_ADDRESSABLE (complit)
4522 && !TREE_ADDRESSABLE (decl)
4523 && init)
4525 *expr_p = copy_node (*expr_p);
4526 TREE_OPERAND (*expr_p, 1) = init;
4527 return GS_OK;
4531 default:
4532 break;
4535 while (changed);
4537 return ret;
4540 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4541 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4542 DECL_GIMPLE_REG_P set.
4544 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4545 other, unmodified part of the complex object just before the total store.
4546 As a consequence, if the object is still uninitialized, an undefined value
4547 will be loaded into a register, which may result in a spurious exception
4548 if the register is floating-point and the value happens to be a signaling
4549 NaN for example. Then the fully-fledged complex operations lowering pass
4550 followed by a DCE pass are necessary in order to fix things up. */
4552 static enum gimplify_status
4553 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4554 bool want_value)
4556 enum tree_code code, ocode;
4557 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4559 lhs = TREE_OPERAND (*expr_p, 0);
4560 rhs = TREE_OPERAND (*expr_p, 1);
4561 code = TREE_CODE (lhs);
4562 lhs = TREE_OPERAND (lhs, 0);
4564 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4565 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4566 TREE_NO_WARNING (other) = 1;
4567 other = get_formal_tmp_var (other, pre_p);
4569 realpart = code == REALPART_EXPR ? rhs : other;
4570 imagpart = code == REALPART_EXPR ? other : rhs;
4572 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4573 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4574 else
4575 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4577 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4578 *expr_p = (want_value) ? rhs : NULL_TREE;
4580 return GS_ALL_DONE;
4583 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4585 modify_expr
4586 : varname '=' rhs
4587 | '*' ID '=' rhs
4589 PRE_P points to the list where side effects that must happen before
4590 *EXPR_P should be stored.
4592 POST_P points to the list where side effects that must happen after
4593 *EXPR_P should be stored.
4595 WANT_VALUE is nonzero iff we want to use the value of this expression
4596 in another expression. */
4598 static enum gimplify_status
4599 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4600 bool want_value)
4602 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4603 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4604 enum gimplify_status ret = GS_UNHANDLED;
4605 gimple assign;
4606 location_t loc = EXPR_LOCATION (*expr_p);
4608 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4609 || TREE_CODE (*expr_p) == INIT_EXPR);
4611 /* Trying to simplify a clobber using normal logic doesn't work,
4612 so handle it here. */
4613 if (TREE_CLOBBER_P (*from_p))
4615 gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL);
4616 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4617 *expr_p = NULL;
4618 return GS_ALL_DONE;
4621 /* Insert pointer conversions required by the middle-end that are not
4622 required by the frontend. This fixes middle-end type checking for
4623 for example gcc.dg/redecl-6.c. */
4624 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4626 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4627 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4628 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4631 /* See if any simplifications can be done based on what the RHS is. */
4632 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4633 want_value);
4634 if (ret != GS_UNHANDLED)
4635 return ret;
4637 /* For zero sized types only gimplify the left hand side and right hand
4638 side as statements and throw away the assignment. Do this after
4639 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4640 types properly. */
4641 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4643 gimplify_stmt (from_p, pre_p);
4644 gimplify_stmt (to_p, pre_p);
4645 *expr_p = NULL_TREE;
4646 return GS_ALL_DONE;
4649 /* If the value being copied is of variable width, compute the length
4650 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4651 before gimplifying any of the operands so that we can resolve any
4652 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4653 the size of the expression to be copied, not of the destination, so
4654 that is what we must do here. */
4655 maybe_with_size_expr (from_p);
4657 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4658 if (ret == GS_ERROR)
4659 return ret;
4661 /* As a special case, we have to temporarily allow for assignments
4662 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4663 a toplevel statement, when gimplifying the GENERIC expression
4664 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4665 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4667 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4668 prevent gimplify_expr from trying to create a new temporary for
4669 foo's LHS, we tell it that it should only gimplify until it
4670 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4671 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4672 and all we need to do here is set 'a' to be its LHS. */
4673 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4674 fb_rvalue);
4675 if (ret == GS_ERROR)
4676 return ret;
4678 /* Now see if the above changed *from_p to something we handle specially. */
4679 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4680 want_value);
4681 if (ret != GS_UNHANDLED)
4682 return ret;
4684 /* If we've got a variable sized assignment between two lvalues (i.e. does
4685 not involve a call), then we can make things a bit more straightforward
4686 by converting the assignment to memcpy or memset. */
4687 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4689 tree from = TREE_OPERAND (*from_p, 0);
4690 tree size = TREE_OPERAND (*from_p, 1);
4692 if (TREE_CODE (from) == CONSTRUCTOR)
4693 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4695 if (is_gimple_addressable (from))
4697 *from_p = from;
4698 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4699 pre_p);
4703 /* Transform partial stores to non-addressable complex variables into
4704 total stores. This allows us to use real instead of virtual operands
4705 for these variables, which improves optimization. */
4706 if ((TREE_CODE (*to_p) == REALPART_EXPR
4707 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4708 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4709 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4711 /* Try to alleviate the effects of the gimplification creating artificial
4712 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4713 if (!gimplify_ctxp->into_ssa
4714 && TREE_CODE (*from_p) == VAR_DECL
4715 && DECL_IGNORED_P (*from_p)
4716 && DECL_P (*to_p)
4717 && !DECL_IGNORED_P (*to_p))
4719 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4720 DECL_NAME (*from_p)
4721 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4722 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4723 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4726 if (want_value && TREE_THIS_VOLATILE (*to_p))
4727 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4729 if (TREE_CODE (*from_p) == CALL_EXPR)
4731 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4732 instead of a GIMPLE_ASSIGN. */
4733 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4734 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4735 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4736 assign = gimple_build_call_from_tree (*from_p);
4737 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4738 if (!gimple_call_noreturn_p (assign))
4739 gimple_call_set_lhs (assign, *to_p);
4741 else
4743 assign = gimple_build_assign (*to_p, *from_p);
4744 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4747 gimplify_seq_add_stmt (pre_p, assign);
4749 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4751 /* If we've somehow already got an SSA_NAME on the LHS, then
4752 we've probably modified it twice. Not good. */
4753 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4754 *to_p = make_ssa_name (*to_p, assign);
4755 gimple_set_lhs (assign, *to_p);
4758 if (want_value)
4760 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4761 return GS_OK;
4763 else
4764 *expr_p = NULL;
4766 return GS_ALL_DONE;
4769 /* Gimplify a comparison between two variable-sized objects. Do this
4770 with a call to BUILT_IN_MEMCMP. */
4772 static enum gimplify_status
4773 gimplify_variable_sized_compare (tree *expr_p)
4775 location_t loc = EXPR_LOCATION (*expr_p);
4776 tree op0 = TREE_OPERAND (*expr_p, 0);
4777 tree op1 = TREE_OPERAND (*expr_p, 1);
4778 tree t, arg, dest, src, expr;
4780 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4781 arg = unshare_expr (arg);
4782 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4783 src = build_fold_addr_expr_loc (loc, op1);
4784 dest = build_fold_addr_expr_loc (loc, op0);
4785 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4786 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4788 expr
4789 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4790 SET_EXPR_LOCATION (expr, loc);
4791 *expr_p = expr;
4793 return GS_OK;
4796 /* Gimplify a comparison between two aggregate objects of integral scalar
4797 mode as a comparison between the bitwise equivalent scalar values. */
4799 static enum gimplify_status
4800 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4802 location_t loc = EXPR_LOCATION (*expr_p);
4803 tree op0 = TREE_OPERAND (*expr_p, 0);
4804 tree op1 = TREE_OPERAND (*expr_p, 1);
4806 tree type = TREE_TYPE (op0);
4807 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4809 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4810 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4812 *expr_p
4813 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4815 return GS_OK;
4818 /* Gimplify an expression sequence. This function gimplifies each
4819 expression and rewrites the original expression with the last
4820 expression of the sequence in GIMPLE form.
4822 PRE_P points to the list where the side effects for all the
4823 expressions in the sequence will be emitted.
4825 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4827 static enum gimplify_status
4828 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4830 tree t = *expr_p;
4834 tree *sub_p = &TREE_OPERAND (t, 0);
4836 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4837 gimplify_compound_expr (sub_p, pre_p, false);
4838 else
4839 gimplify_stmt (sub_p, pre_p);
4841 t = TREE_OPERAND (t, 1);
4843 while (TREE_CODE (t) == COMPOUND_EXPR);
4845 *expr_p = t;
4846 if (want_value)
4847 return GS_OK;
4848 else
4850 gimplify_stmt (expr_p, pre_p);
4851 return GS_ALL_DONE;
4855 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4856 gimplify. After gimplification, EXPR_P will point to a new temporary
4857 that holds the original value of the SAVE_EXPR node.
4859 PRE_P points to the list where side effects that must happen before
4860 *EXPR_P should be stored. */
4862 static enum gimplify_status
4863 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4865 enum gimplify_status ret = GS_ALL_DONE;
4866 tree val;
4868 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4869 val = TREE_OPERAND (*expr_p, 0);
4871 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4872 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4874 /* The operand may be a void-valued expression such as SAVE_EXPRs
4875 generated by the Java frontend for class initialization. It is
4876 being executed only for its side-effects. */
4877 if (TREE_TYPE (val) == void_type_node)
4879 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4880 is_gimple_stmt, fb_none);
4881 val = NULL;
4883 else
4884 val = get_initialized_tmp_var (val, pre_p, post_p);
4886 TREE_OPERAND (*expr_p, 0) = val;
4887 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4890 *expr_p = val;
4892 return ret;
4895 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4897 unary_expr
4898 : ...
4899 | '&' varname
4902 PRE_P points to the list where side effects that must happen before
4903 *EXPR_P should be stored.
4905 POST_P points to the list where side effects that must happen after
4906 *EXPR_P should be stored. */
4908 static enum gimplify_status
4909 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4911 tree expr = *expr_p;
4912 tree op0 = TREE_OPERAND (expr, 0);
4913 enum gimplify_status ret;
4914 location_t loc = EXPR_LOCATION (*expr_p);
4916 switch (TREE_CODE (op0))
4918 case INDIRECT_REF:
4919 do_indirect_ref:
4920 /* Check if we are dealing with an expression of the form '&*ptr'.
4921 While the front end folds away '&*ptr' into 'ptr', these
4922 expressions may be generated internally by the compiler (e.g.,
4923 builtins like __builtin_va_end). */
4924 /* Caution: the silent array decomposition semantics we allow for
4925 ADDR_EXPR means we can't always discard the pair. */
4926 /* Gimplification of the ADDR_EXPR operand may drop
4927 cv-qualification conversions, so make sure we add them if
4928 needed. */
4930 tree op00 = TREE_OPERAND (op0, 0);
4931 tree t_expr = TREE_TYPE (expr);
4932 tree t_op00 = TREE_TYPE (op00);
4934 if (!useless_type_conversion_p (t_expr, t_op00))
4935 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4936 *expr_p = op00;
4937 ret = GS_OK;
4939 break;
4941 case VIEW_CONVERT_EXPR:
4942 /* Take the address of our operand and then convert it to the type of
4943 this ADDR_EXPR.
4945 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4946 all clear. The impact of this transformation is even less clear. */
4948 /* If the operand is a useless conversion, look through it. Doing so
4949 guarantees that the ADDR_EXPR and its operand will remain of the
4950 same type. */
4951 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4952 op0 = TREE_OPERAND (op0, 0);
4954 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4955 build_fold_addr_expr_loc (loc,
4956 TREE_OPERAND (op0, 0)));
4957 ret = GS_OK;
4958 break;
4960 default:
4961 /* We use fb_either here because the C frontend sometimes takes
4962 the address of a call that returns a struct; see
4963 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4964 the implied temporary explicit. */
4966 /* Make the operand addressable. */
4967 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4968 is_gimple_addressable, fb_either);
4969 if (ret == GS_ERROR)
4970 break;
4972 /* Then mark it. Beware that it may not be possible to do so directly
4973 if a temporary has been created by the gimplification. */
4974 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4976 op0 = TREE_OPERAND (expr, 0);
4978 /* For various reasons, the gimplification of the expression
4979 may have made a new INDIRECT_REF. */
4980 if (TREE_CODE (op0) == INDIRECT_REF)
4981 goto do_indirect_ref;
4983 mark_addressable (TREE_OPERAND (expr, 0));
4985 /* The FEs may end up building ADDR_EXPRs early on a decl with
4986 an incomplete type. Re-build ADDR_EXPRs in canonical form
4987 here. */
4988 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4989 *expr_p = build_fold_addr_expr (op0);
4991 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4992 recompute_tree_invariant_for_addr_expr (*expr_p);
4994 /* If we re-built the ADDR_EXPR add a conversion to the original type
4995 if required. */
4996 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4997 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4999 break;
5002 return ret;
5005 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5006 value; output operands should be a gimple lvalue. */
5008 static enum gimplify_status
5009 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5011 tree expr;
5012 int noutputs;
5013 const char **oconstraints;
5014 int i;
5015 tree link;
5016 const char *constraint;
5017 bool allows_mem, allows_reg, is_inout;
5018 enum gimplify_status ret, tret;
5019 gimple stmt;
5020 VEC(tree, gc) *inputs;
5021 VEC(tree, gc) *outputs;
5022 VEC(tree, gc) *clobbers;
5023 VEC(tree, gc) *labels;
5024 tree link_next;
5026 expr = *expr_p;
5027 noutputs = list_length (ASM_OUTPUTS (expr));
5028 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5030 inputs = outputs = clobbers = labels = NULL;
5032 ret = GS_ALL_DONE;
5033 link_next = NULL_TREE;
5034 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5036 bool ok;
5037 size_t constraint_len;
5039 link_next = TREE_CHAIN (link);
5041 oconstraints[i]
5042 = constraint
5043 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5044 constraint_len = strlen (constraint);
5045 if (constraint_len == 0)
5046 continue;
5048 ok = parse_output_constraint (&constraint, i, 0, 0,
5049 &allows_mem, &allows_reg, &is_inout);
5050 if (!ok)
5052 ret = GS_ERROR;
5053 is_inout = false;
5056 if (!allows_reg && allows_mem)
5057 mark_addressable (TREE_VALUE (link));
5059 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5060 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5061 fb_lvalue | fb_mayfail);
5062 if (tret == GS_ERROR)
5064 error ("invalid lvalue in asm output %d", i);
5065 ret = tret;
5068 VEC_safe_push (tree, gc, outputs, link);
5069 TREE_CHAIN (link) = NULL_TREE;
5071 if (is_inout)
5073 /* An input/output operand. To give the optimizers more
5074 flexibility, split it into separate input and output
5075 operands. */
5076 tree input;
5077 char buf[10];
5079 /* Turn the in/out constraint into an output constraint. */
5080 char *p = xstrdup (constraint);
5081 p[0] = '=';
5082 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5084 /* And add a matching input constraint. */
5085 if (allows_reg)
5087 sprintf (buf, "%d", i);
5089 /* If there are multiple alternatives in the constraint,
5090 handle each of them individually. Those that allow register
5091 will be replaced with operand number, the others will stay
5092 unchanged. */
5093 if (strchr (p, ',') != NULL)
5095 size_t len = 0, buflen = strlen (buf);
5096 char *beg, *end, *str, *dst;
5098 for (beg = p + 1;;)
5100 end = strchr (beg, ',');
5101 if (end == NULL)
5102 end = strchr (beg, '\0');
5103 if ((size_t) (end - beg) < buflen)
5104 len += buflen + 1;
5105 else
5106 len += end - beg + 1;
5107 if (*end)
5108 beg = end + 1;
5109 else
5110 break;
5113 str = (char *) alloca (len);
5114 for (beg = p + 1, dst = str;;)
5116 const char *tem;
5117 bool mem_p, reg_p, inout_p;
5119 end = strchr (beg, ',');
5120 if (end)
5121 *end = '\0';
5122 beg[-1] = '=';
5123 tem = beg - 1;
5124 parse_output_constraint (&tem, i, 0, 0,
5125 &mem_p, &reg_p, &inout_p);
5126 if (dst != str)
5127 *dst++ = ',';
5128 if (reg_p)
5130 memcpy (dst, buf, buflen);
5131 dst += buflen;
5133 else
5135 if (end)
5136 len = end - beg;
5137 else
5138 len = strlen (beg);
5139 memcpy (dst, beg, len);
5140 dst += len;
5142 if (end)
5143 beg = end + 1;
5144 else
5145 break;
5147 *dst = '\0';
5148 input = build_string (dst - str, str);
5150 else
5151 input = build_string (strlen (buf), buf);
5153 else
5154 input = build_string (constraint_len - 1, constraint + 1);
5156 free (p);
5158 input = build_tree_list (build_tree_list (NULL_TREE, input),
5159 unshare_expr (TREE_VALUE (link)));
5160 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5164 link_next = NULL_TREE;
5165 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5167 link_next = TREE_CHAIN (link);
5168 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5169 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5170 oconstraints, &allows_mem, &allows_reg);
5172 /* If we can't make copies, we can only accept memory. */
5173 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5175 if (allows_mem)
5176 allows_reg = 0;
5177 else
5179 error ("impossible constraint in %<asm%>");
5180 error ("non-memory input %d must stay in memory", i);
5181 return GS_ERROR;
5185 /* If the operand is a memory input, it should be an lvalue. */
5186 if (!allows_reg && allows_mem)
5188 tree inputv = TREE_VALUE (link);
5189 STRIP_NOPS (inputv);
5190 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5191 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5192 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5193 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5194 TREE_VALUE (link) = error_mark_node;
5195 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5196 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5197 mark_addressable (TREE_VALUE (link));
5198 if (tret == GS_ERROR)
5200 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5201 input_location = EXPR_LOCATION (TREE_VALUE (link));
5202 error ("memory input %d is not directly addressable", i);
5203 ret = tret;
5206 else
5208 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5209 is_gimple_asm_val, fb_rvalue);
5210 if (tret == GS_ERROR)
5211 ret = tret;
5214 TREE_CHAIN (link) = NULL_TREE;
5215 VEC_safe_push (tree, gc, inputs, link);
5218 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5219 VEC_safe_push (tree, gc, clobbers, link);
5221 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5222 VEC_safe_push (tree, gc, labels, link);
5224 /* Do not add ASMs with errors to the gimple IL stream. */
5225 if (ret != GS_ERROR)
5227 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5228 inputs, outputs, clobbers, labels);
5230 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5231 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5233 gimplify_seq_add_stmt (pre_p, stmt);
5236 return ret;
5239 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5240 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5241 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5242 return to this function.
5244 FIXME should we complexify the prequeue handling instead? Or use flags
5245 for all the cleanups and let the optimizer tighten them up? The current
5246 code seems pretty fragile; it will break on a cleanup within any
5247 non-conditional nesting. But any such nesting would be broken, anyway;
5248 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5249 and continues out of it. We can do that at the RTL level, though, so
5250 having an optimizer to tighten up try/finally regions would be a Good
5251 Thing. */
5253 static enum gimplify_status
5254 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5256 gimple_stmt_iterator iter;
5257 gimple_seq body_sequence = NULL;
5259 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5261 /* We only care about the number of conditions between the innermost
5262 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5263 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5264 int old_conds = gimplify_ctxp->conditions;
5265 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5266 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5267 gimplify_ctxp->conditions = 0;
5268 gimplify_ctxp->conditional_cleanups = NULL;
5269 gimplify_ctxp->in_cleanup_point_expr = true;
5271 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5273 gimplify_ctxp->conditions = old_conds;
5274 gimplify_ctxp->conditional_cleanups = old_cleanups;
5275 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5277 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5279 gimple wce = gsi_stmt (iter);
5281 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5283 if (gsi_one_before_end_p (iter))
5285 /* Note that gsi_insert_seq_before and gsi_remove do not
5286 scan operands, unlike some other sequence mutators. */
5287 if (!gimple_wce_cleanup_eh_only (wce))
5288 gsi_insert_seq_before_without_update (&iter,
5289 gimple_wce_cleanup (wce),
5290 GSI_SAME_STMT);
5291 gsi_remove (&iter, true);
5292 break;
5294 else
5296 gimple gtry;
5297 gimple_seq seq;
5298 enum gimple_try_flags kind;
5300 if (gimple_wce_cleanup_eh_only (wce))
5301 kind = GIMPLE_TRY_CATCH;
5302 else
5303 kind = GIMPLE_TRY_FINALLY;
5304 seq = gsi_split_seq_after (iter);
5306 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5307 /* Do not use gsi_replace here, as it may scan operands.
5308 We want to do a simple structural modification only. */
5309 *gsi_stmt_ptr (&iter) = gtry;
5310 iter = gsi_start (seq);
5313 else
5314 gsi_next (&iter);
5317 gimplify_seq_add_seq (pre_p, body_sequence);
5318 if (temp)
5320 *expr_p = temp;
5321 return GS_OK;
5323 else
5325 *expr_p = NULL;
5326 return GS_ALL_DONE;
5330 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5331 is the cleanup action required. EH_ONLY is true if the cleanup should
5332 only be executed if an exception is thrown, not on normal exit. */
5334 static void
5335 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5337 gimple wce;
5338 gimple_seq cleanup_stmts = NULL;
5340 /* Errors can result in improperly nested cleanups. Which results in
5341 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5342 if (seen_error ())
5343 return;
5345 if (gimple_conditional_context ())
5347 /* If we're in a conditional context, this is more complex. We only
5348 want to run the cleanup if we actually ran the initialization that
5349 necessitates it, but we want to run it after the end of the
5350 conditional context. So we wrap the try/finally around the
5351 condition and use a flag to determine whether or not to actually
5352 run the destructor. Thus
5354 test ? f(A()) : 0
5356 becomes (approximately)
5358 flag = 0;
5359 try {
5360 if (test) { A::A(temp); flag = 1; val = f(temp); }
5361 else { val = 0; }
5362 } finally {
5363 if (flag) A::~A(temp);
5367 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5368 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5369 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5371 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5372 gimplify_stmt (&cleanup, &cleanup_stmts);
5373 wce = gimple_build_wce (cleanup_stmts);
5375 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5376 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5377 gimplify_seq_add_stmt (pre_p, ftrue);
5379 /* Because of this manipulation, and the EH edges that jump
5380 threading cannot redirect, the temporary (VAR) will appear
5381 to be used uninitialized. Don't warn. */
5382 TREE_NO_WARNING (var) = 1;
5384 else
5386 gimplify_stmt (&cleanup, &cleanup_stmts);
5387 wce = gimple_build_wce (cleanup_stmts);
5388 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5389 gimplify_seq_add_stmt (pre_p, wce);
5393 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5395 static enum gimplify_status
5396 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5398 tree targ = *expr_p;
5399 tree temp = TARGET_EXPR_SLOT (targ);
5400 tree init = TARGET_EXPR_INITIAL (targ);
5401 enum gimplify_status ret;
5403 if (init)
5405 tree cleanup = NULL_TREE;
5407 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5408 to the temps list. Handle also variable length TARGET_EXPRs. */
5409 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5411 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5412 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5413 gimplify_vla_decl (temp, pre_p);
5415 else
5416 gimple_add_tmp_var (temp);
5418 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5419 expression is supposed to initialize the slot. */
5420 if (VOID_TYPE_P (TREE_TYPE (init)))
5421 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5422 else
5424 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5425 init = init_expr;
5426 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5427 init = NULL;
5428 ggc_free (init_expr);
5430 if (ret == GS_ERROR)
5432 /* PR c++/28266 Make sure this is expanded only once. */
5433 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5434 return GS_ERROR;
5436 if (init)
5437 gimplify_and_add (init, pre_p);
5439 /* If needed, push the cleanup for the temp. */
5440 if (TARGET_EXPR_CLEANUP (targ))
5442 if (CLEANUP_EH_ONLY (targ))
5443 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5444 CLEANUP_EH_ONLY (targ), pre_p);
5445 else
5446 cleanup = TARGET_EXPR_CLEANUP (targ);
5449 /* Add a clobber for the temporary going out of scope, like
5450 gimplify_bind_expr. */
5451 if (gimplify_ctxp->in_cleanup_point_expr
5452 && needs_to_live_in_memory (temp))
5454 tree clobber = build_constructor (TREE_TYPE (temp), NULL);
5455 TREE_THIS_VOLATILE (clobber) = true;
5456 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5457 if (cleanup)
5458 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5459 clobber);
5460 else
5461 cleanup = clobber;
5464 if (cleanup)
5465 gimple_push_cleanup (temp, cleanup, false, pre_p);
5467 /* Only expand this once. */
5468 TREE_OPERAND (targ, 3) = init;
5469 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5471 else
5472 /* We should have expanded this before. */
5473 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5475 *expr_p = temp;
5476 return GS_OK;
5479 /* Gimplification of expression trees. */
5481 /* Gimplify an expression which appears at statement context. The
5482 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5483 NULL, a new sequence is allocated.
5485 Return true if we actually added a statement to the queue. */
5487 bool
5488 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5490 gimple_seq_node last;
5492 if (!*seq_p)
5493 *seq_p = gimple_seq_alloc ();
5495 last = gimple_seq_last (*seq_p);
5496 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5497 return last != gimple_seq_last (*seq_p);
5500 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5501 to CTX. If entries already exist, force them to be some flavor of private.
5502 If there is no enclosing parallel, do nothing. */
5504 void
5505 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5507 splay_tree_node n;
5509 if (decl == NULL || !DECL_P (decl))
5510 return;
5514 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5515 if (n != NULL)
5517 if (n->value & GOVD_SHARED)
5518 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5519 else
5520 return;
5522 else if (ctx->region_type != ORT_WORKSHARE)
5523 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5525 ctx = ctx->outer_context;
5527 while (ctx);
5530 /* Similarly for each of the type sizes of TYPE. */
5532 static void
5533 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5535 if (type == NULL || type == error_mark_node)
5536 return;
5537 type = TYPE_MAIN_VARIANT (type);
5539 if (pointer_set_insert (ctx->privatized_types, type))
5540 return;
5542 switch (TREE_CODE (type))
5544 case INTEGER_TYPE:
5545 case ENUMERAL_TYPE:
5546 case BOOLEAN_TYPE:
5547 case REAL_TYPE:
5548 case FIXED_POINT_TYPE:
5549 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5550 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5551 break;
5553 case ARRAY_TYPE:
5554 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5555 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5556 break;
5558 case RECORD_TYPE:
5559 case UNION_TYPE:
5560 case QUAL_UNION_TYPE:
5562 tree field;
5563 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5564 if (TREE_CODE (field) == FIELD_DECL)
5566 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5567 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5570 break;
5572 case POINTER_TYPE:
5573 case REFERENCE_TYPE:
5574 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5575 break;
5577 default:
5578 break;
5581 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5582 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5583 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5586 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5588 static void
5589 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5591 splay_tree_node n;
5592 unsigned int nflags;
5593 tree t;
5595 if (error_operand_p (decl))
5596 return;
5598 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5599 there are constructors involved somewhere. */
5600 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5601 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5602 flags |= GOVD_SEEN;
5604 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5605 if (n != NULL)
5607 /* We shouldn't be re-adding the decl with the same data
5608 sharing class. */
5609 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5610 /* The only combination of data sharing classes we should see is
5611 FIRSTPRIVATE and LASTPRIVATE. */
5612 nflags = n->value | flags;
5613 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5614 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5615 n->value = nflags;
5616 return;
5619 /* When adding a variable-sized variable, we have to handle all sorts
5620 of additional bits of data: the pointer replacement variable, and
5621 the parameters of the type. */
5622 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5624 /* Add the pointer replacement variable as PRIVATE if the variable
5625 replacement is private, else FIRSTPRIVATE since we'll need the
5626 address of the original variable either for SHARED, or for the
5627 copy into or out of the context. */
5628 if (!(flags & GOVD_LOCAL))
5630 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5631 nflags |= flags & GOVD_SEEN;
5632 t = DECL_VALUE_EXPR (decl);
5633 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5634 t = TREE_OPERAND (t, 0);
5635 gcc_assert (DECL_P (t));
5636 omp_add_variable (ctx, t, nflags);
5639 /* Add all of the variable and type parameters (which should have
5640 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5641 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5642 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5643 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5645 /* The variable-sized variable itself is never SHARED, only some form
5646 of PRIVATE. The sharing would take place via the pointer variable
5647 which we remapped above. */
5648 if (flags & GOVD_SHARED)
5649 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5650 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5652 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5653 alloca statement we generate for the variable, so make sure it
5654 is available. This isn't automatically needed for the SHARED
5655 case, since we won't be allocating local storage then.
5656 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5657 in this case omp_notice_variable will be called later
5658 on when it is gimplified. */
5659 else if (! (flags & GOVD_LOCAL)
5660 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5661 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5663 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5665 gcc_assert ((flags & GOVD_LOCAL) == 0);
5666 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5668 /* Similar to the direct variable sized case above, we'll need the
5669 size of references being privatized. */
5670 if ((flags & GOVD_SHARED) == 0)
5672 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5673 if (TREE_CODE (t) != INTEGER_CST)
5674 omp_notice_variable (ctx, t, true);
5678 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5681 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5682 This just prints out diagnostics about threadprivate variable uses
5683 in untied tasks. If DECL2 is non-NULL, prevent this warning
5684 on that variable. */
5686 static bool
5687 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5688 tree decl2)
5690 splay_tree_node n;
5692 if (ctx->region_type != ORT_UNTIED_TASK)
5693 return false;
5694 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5695 if (n == NULL)
5697 error ("threadprivate variable %qE used in untied task",
5698 DECL_NAME (decl));
5699 error_at (ctx->location, "enclosing task");
5700 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5702 if (decl2)
5703 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5704 return false;
5707 /* Record the fact that DECL was used within the OpenMP context CTX.
5708 IN_CODE is true when real code uses DECL, and false when we should
5709 merely emit default(none) errors. Return true if DECL is going to
5710 be remapped and thus DECL shouldn't be gimplified into its
5711 DECL_VALUE_EXPR (if any). */
5713 static bool
5714 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5716 splay_tree_node n;
5717 unsigned flags = in_code ? GOVD_SEEN : 0;
5718 bool ret = false, shared;
5720 if (error_operand_p (decl))
5721 return false;
5723 /* Threadprivate variables are predetermined. */
5724 if (is_global_var (decl))
5726 if (DECL_THREAD_LOCAL_P (decl))
5727 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5729 if (DECL_HAS_VALUE_EXPR_P (decl))
5731 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5733 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5734 return omp_notice_threadprivate_variable (ctx, decl, value);
5738 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5739 if (n == NULL)
5741 enum omp_clause_default_kind default_kind, kind;
5742 struct gimplify_omp_ctx *octx;
5744 if (ctx->region_type == ORT_WORKSHARE)
5745 goto do_outer;
5747 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5748 remapped firstprivate instead of shared. To some extent this is
5749 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5750 default_kind = ctx->default_kind;
5751 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5752 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5753 default_kind = kind;
5755 switch (default_kind)
5757 case OMP_CLAUSE_DEFAULT_NONE:
5758 error ("%qE not specified in enclosing parallel",
5759 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5760 if ((ctx->region_type & ORT_TASK) != 0)
5761 error_at (ctx->location, "enclosing task");
5762 else
5763 error_at (ctx->location, "enclosing parallel");
5764 /* FALLTHRU */
5765 case OMP_CLAUSE_DEFAULT_SHARED:
5766 flags |= GOVD_SHARED;
5767 break;
5768 case OMP_CLAUSE_DEFAULT_PRIVATE:
5769 flags |= GOVD_PRIVATE;
5770 break;
5771 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5772 flags |= GOVD_FIRSTPRIVATE;
5773 break;
5774 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5775 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5776 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5777 if (ctx->outer_context)
5778 omp_notice_variable (ctx->outer_context, decl, in_code);
5779 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5781 splay_tree_node n2;
5783 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5784 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5786 flags |= GOVD_FIRSTPRIVATE;
5787 break;
5789 if ((octx->region_type & ORT_PARALLEL) != 0)
5790 break;
5792 if (flags & GOVD_FIRSTPRIVATE)
5793 break;
5794 if (octx == NULL
5795 && (TREE_CODE (decl) == PARM_DECL
5796 || (!is_global_var (decl)
5797 && DECL_CONTEXT (decl) == current_function_decl)))
5799 flags |= GOVD_FIRSTPRIVATE;
5800 break;
5802 flags |= GOVD_SHARED;
5803 break;
5804 default:
5805 gcc_unreachable ();
5808 if ((flags & GOVD_PRIVATE)
5809 && lang_hooks.decls.omp_private_outer_ref (decl))
5810 flags |= GOVD_PRIVATE_OUTER_REF;
5812 omp_add_variable (ctx, decl, flags);
5814 shared = (flags & GOVD_SHARED) != 0;
5815 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5816 goto do_outer;
5819 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5820 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5821 && DECL_SIZE (decl)
5822 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5824 splay_tree_node n2;
5825 tree t = DECL_VALUE_EXPR (decl);
5826 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5827 t = TREE_OPERAND (t, 0);
5828 gcc_assert (DECL_P (t));
5829 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5830 n2->value |= GOVD_SEEN;
5833 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5834 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5836 /* If nothing changed, there's nothing left to do. */
5837 if ((n->value & flags) == flags)
5838 return ret;
5839 flags |= n->value;
5840 n->value = flags;
5842 do_outer:
5843 /* If the variable is private in the current context, then we don't
5844 need to propagate anything to an outer context. */
5845 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5846 return ret;
5847 if (ctx->outer_context
5848 && omp_notice_variable (ctx->outer_context, decl, in_code))
5849 return true;
5850 return ret;
5853 /* Verify that DECL is private within CTX. If there's specific information
5854 to the contrary in the innermost scope, generate an error. */
5856 static bool
5857 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5859 splay_tree_node n;
5861 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5862 if (n != NULL)
5864 if (n->value & GOVD_SHARED)
5866 if (ctx == gimplify_omp_ctxp)
5868 error ("iteration variable %qE should be private",
5869 DECL_NAME (decl));
5870 n->value = GOVD_PRIVATE;
5871 return true;
5873 else
5874 return false;
5876 else if ((n->value & GOVD_EXPLICIT) != 0
5877 && (ctx == gimplify_omp_ctxp
5878 || (ctx->region_type == ORT_COMBINED_PARALLEL
5879 && gimplify_omp_ctxp->outer_context == ctx)))
5881 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5882 error ("iteration variable %qE should not be firstprivate",
5883 DECL_NAME (decl));
5884 else if ((n->value & GOVD_REDUCTION) != 0)
5885 error ("iteration variable %qE should not be reduction",
5886 DECL_NAME (decl));
5888 return (ctx == gimplify_omp_ctxp
5889 || (ctx->region_type == ORT_COMBINED_PARALLEL
5890 && gimplify_omp_ctxp->outer_context == ctx));
5893 if (ctx->region_type != ORT_WORKSHARE)
5894 return false;
5895 else if (ctx->outer_context)
5896 return omp_is_private (ctx->outer_context, decl);
5897 return false;
5900 /* Return true if DECL is private within a parallel region
5901 that binds to the current construct's context or in parallel
5902 region's REDUCTION clause. */
5904 static bool
5905 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5907 splay_tree_node n;
5911 ctx = ctx->outer_context;
5912 if (ctx == NULL)
5913 return !(is_global_var (decl)
5914 /* References might be private, but might be shared too. */
5915 || lang_hooks.decls.omp_privatize_by_reference (decl));
5917 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5918 if (n != NULL)
5919 return (n->value & GOVD_SHARED) == 0;
5921 while (ctx->region_type == ORT_WORKSHARE);
5922 return false;
5925 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5926 and previous omp contexts. */
5928 static void
5929 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5930 enum omp_region_type region_type)
5932 struct gimplify_omp_ctx *ctx, *outer_ctx;
5933 struct gimplify_ctx gctx;
5934 tree c;
5936 ctx = new_omp_context (region_type);
5937 outer_ctx = ctx->outer_context;
5939 while ((c = *list_p) != NULL)
5941 bool remove = false;
5942 bool notice_outer = true;
5943 const char *check_non_private = NULL;
5944 unsigned int flags;
5945 tree decl;
5947 switch (OMP_CLAUSE_CODE (c))
5949 case OMP_CLAUSE_PRIVATE:
5950 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5951 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5953 flags |= GOVD_PRIVATE_OUTER_REF;
5954 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5956 else
5957 notice_outer = false;
5958 goto do_add;
5959 case OMP_CLAUSE_SHARED:
5960 flags = GOVD_SHARED | GOVD_EXPLICIT;
5961 goto do_add;
5962 case OMP_CLAUSE_FIRSTPRIVATE:
5963 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5964 check_non_private = "firstprivate";
5965 goto do_add;
5966 case OMP_CLAUSE_LASTPRIVATE:
5967 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5968 check_non_private = "lastprivate";
5969 goto do_add;
5970 case OMP_CLAUSE_REDUCTION:
5971 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5972 check_non_private = "reduction";
5973 goto do_add;
5975 do_add:
5976 decl = OMP_CLAUSE_DECL (c);
5977 if (error_operand_p (decl))
5979 remove = true;
5980 break;
5982 omp_add_variable (ctx, decl, flags);
5983 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5984 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5986 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5987 GOVD_LOCAL | GOVD_SEEN);
5988 gimplify_omp_ctxp = ctx;
5989 push_gimplify_context (&gctx);
5991 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5992 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5994 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5995 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5996 pop_gimplify_context
5997 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5998 push_gimplify_context (&gctx);
5999 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6000 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6001 pop_gimplify_context
6002 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6003 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6004 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6006 gimplify_omp_ctxp = outer_ctx;
6008 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6009 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6011 gimplify_omp_ctxp = ctx;
6012 push_gimplify_context (&gctx);
6013 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6015 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6016 NULL, NULL);
6017 TREE_SIDE_EFFECTS (bind) = 1;
6018 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6019 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6021 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6022 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6023 pop_gimplify_context
6024 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6025 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6027 gimplify_omp_ctxp = outer_ctx;
6029 if (notice_outer)
6030 goto do_notice;
6031 break;
6033 case OMP_CLAUSE_COPYIN:
6034 case OMP_CLAUSE_COPYPRIVATE:
6035 decl = OMP_CLAUSE_DECL (c);
6036 if (error_operand_p (decl))
6038 remove = true;
6039 break;
6041 do_notice:
6042 if (outer_ctx)
6043 omp_notice_variable (outer_ctx, decl, true);
6044 if (check_non_private
6045 && region_type == ORT_WORKSHARE
6046 && omp_check_private (ctx, decl))
6048 error ("%s variable %qE is private in outer context",
6049 check_non_private, DECL_NAME (decl));
6050 remove = true;
6052 break;
6054 case OMP_CLAUSE_FINAL:
6055 case OMP_CLAUSE_IF:
6056 OMP_CLAUSE_OPERAND (c, 0)
6057 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6058 /* Fall through. */
6060 case OMP_CLAUSE_SCHEDULE:
6061 case OMP_CLAUSE_NUM_THREADS:
6062 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6063 is_gimple_val, fb_rvalue) == GS_ERROR)
6064 remove = true;
6065 break;
6067 case OMP_CLAUSE_NOWAIT:
6068 case OMP_CLAUSE_ORDERED:
6069 case OMP_CLAUSE_UNTIED:
6070 case OMP_CLAUSE_COLLAPSE:
6071 case OMP_CLAUSE_MERGEABLE:
6072 break;
6074 case OMP_CLAUSE_DEFAULT:
6075 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6076 break;
6078 default:
6079 gcc_unreachable ();
6082 if (remove)
6083 *list_p = OMP_CLAUSE_CHAIN (c);
6084 else
6085 list_p = &OMP_CLAUSE_CHAIN (c);
6088 gimplify_omp_ctxp = ctx;
6091 /* For all variables that were not actually used within the context,
6092 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6094 static int
6095 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6097 tree *list_p = (tree *) data;
6098 tree decl = (tree) n->key;
6099 unsigned flags = n->value;
6100 enum omp_clause_code code;
6101 tree clause;
6102 bool private_debug;
6104 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6105 return 0;
6106 if ((flags & GOVD_SEEN) == 0)
6107 return 0;
6108 if (flags & GOVD_DEBUG_PRIVATE)
6110 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6111 private_debug = true;
6113 else
6114 private_debug
6115 = lang_hooks.decls.omp_private_debug_clause (decl,
6116 !!(flags & GOVD_SHARED));
6117 if (private_debug)
6118 code = OMP_CLAUSE_PRIVATE;
6119 else if (flags & GOVD_SHARED)
6121 if (is_global_var (decl))
6123 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6124 while (ctx != NULL)
6126 splay_tree_node on
6127 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6128 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6129 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
6130 break;
6131 ctx = ctx->outer_context;
6133 if (ctx == NULL)
6134 return 0;
6136 code = OMP_CLAUSE_SHARED;
6138 else if (flags & GOVD_PRIVATE)
6139 code = OMP_CLAUSE_PRIVATE;
6140 else if (flags & GOVD_FIRSTPRIVATE)
6141 code = OMP_CLAUSE_FIRSTPRIVATE;
6142 else
6143 gcc_unreachable ();
6145 clause = build_omp_clause (input_location, code);
6146 OMP_CLAUSE_DECL (clause) = decl;
6147 OMP_CLAUSE_CHAIN (clause) = *list_p;
6148 if (private_debug)
6149 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6150 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6151 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6152 *list_p = clause;
6153 lang_hooks.decls.omp_finish_clause (clause);
6155 return 0;
6158 static void
6159 gimplify_adjust_omp_clauses (tree *list_p)
6161 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6162 tree c, decl;
6164 while ((c = *list_p) != NULL)
6166 splay_tree_node n;
6167 bool remove = false;
6169 switch (OMP_CLAUSE_CODE (c))
6171 case OMP_CLAUSE_PRIVATE:
6172 case OMP_CLAUSE_SHARED:
6173 case OMP_CLAUSE_FIRSTPRIVATE:
6174 decl = OMP_CLAUSE_DECL (c);
6175 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6176 remove = !(n->value & GOVD_SEEN);
6177 if (! remove)
6179 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6180 if ((n->value & GOVD_DEBUG_PRIVATE)
6181 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6183 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6184 || ((n->value & GOVD_DATA_SHARE_CLASS)
6185 == GOVD_PRIVATE));
6186 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6187 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6190 break;
6192 case OMP_CLAUSE_LASTPRIVATE:
6193 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6194 accurately reflect the presence of a FIRSTPRIVATE clause. */
6195 decl = OMP_CLAUSE_DECL (c);
6196 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6197 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6198 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6199 break;
6201 case OMP_CLAUSE_REDUCTION:
6202 case OMP_CLAUSE_COPYIN:
6203 case OMP_CLAUSE_COPYPRIVATE:
6204 case OMP_CLAUSE_IF:
6205 case OMP_CLAUSE_NUM_THREADS:
6206 case OMP_CLAUSE_SCHEDULE:
6207 case OMP_CLAUSE_NOWAIT:
6208 case OMP_CLAUSE_ORDERED:
6209 case OMP_CLAUSE_DEFAULT:
6210 case OMP_CLAUSE_UNTIED:
6211 case OMP_CLAUSE_COLLAPSE:
6212 case OMP_CLAUSE_FINAL:
6213 case OMP_CLAUSE_MERGEABLE:
6214 break;
6216 default:
6217 gcc_unreachable ();
6220 if (remove)
6221 *list_p = OMP_CLAUSE_CHAIN (c);
6222 else
6223 list_p = &OMP_CLAUSE_CHAIN (c);
6226 /* Add in any implicit data sharing. */
6227 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6229 gimplify_omp_ctxp = ctx->outer_context;
6230 delete_omp_context (ctx);
6233 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6234 gimplification of the body, as well as scanning the body for used
6235 variables. We need to do this scan now, because variable-sized
6236 decls will be decomposed during gimplification. */
6238 static void
6239 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6241 tree expr = *expr_p;
6242 gimple g;
6243 gimple_seq body = NULL;
6244 struct gimplify_ctx gctx;
6246 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6247 OMP_PARALLEL_COMBINED (expr)
6248 ? ORT_COMBINED_PARALLEL
6249 : ORT_PARALLEL);
6251 push_gimplify_context (&gctx);
6253 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6254 if (gimple_code (g) == GIMPLE_BIND)
6255 pop_gimplify_context (g);
6256 else
6257 pop_gimplify_context (NULL);
6259 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6261 g = gimple_build_omp_parallel (body,
6262 OMP_PARALLEL_CLAUSES (expr),
6263 NULL_TREE, NULL_TREE);
6264 if (OMP_PARALLEL_COMBINED (expr))
6265 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6266 gimplify_seq_add_stmt (pre_p, g);
6267 *expr_p = NULL_TREE;
6270 /* Gimplify the contents of an OMP_TASK statement. This involves
6271 gimplification of the body, as well as scanning the body for used
6272 variables. We need to do this scan now, because variable-sized
6273 decls will be decomposed during gimplification. */
6275 static void
6276 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6278 tree expr = *expr_p;
6279 gimple g;
6280 gimple_seq body = NULL;
6281 struct gimplify_ctx gctx;
6283 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6284 find_omp_clause (OMP_TASK_CLAUSES (expr),
6285 OMP_CLAUSE_UNTIED)
6286 ? ORT_UNTIED_TASK : ORT_TASK);
6288 push_gimplify_context (&gctx);
6290 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6291 if (gimple_code (g) == GIMPLE_BIND)
6292 pop_gimplify_context (g);
6293 else
6294 pop_gimplify_context (NULL);
6296 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6298 g = gimple_build_omp_task (body,
6299 OMP_TASK_CLAUSES (expr),
6300 NULL_TREE, NULL_TREE,
6301 NULL_TREE, NULL_TREE, NULL_TREE);
6302 gimplify_seq_add_stmt (pre_p, g);
6303 *expr_p = NULL_TREE;
6306 /* Gimplify the gross structure of an OMP_FOR statement. */
6308 static enum gimplify_status
6309 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6311 tree for_stmt, decl, var, t;
6312 enum gimplify_status ret = GS_ALL_DONE;
6313 enum gimplify_status tret;
6314 gimple gfor;
6315 gimple_seq for_body, for_pre_body;
6316 int i;
6318 for_stmt = *expr_p;
6320 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6321 ORT_WORKSHARE);
6323 /* Handle OMP_FOR_INIT. */
6324 for_pre_body = NULL;
6325 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6326 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6328 for_body = gimple_seq_alloc ();
6329 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6330 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6331 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6332 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6333 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6335 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6336 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6337 decl = TREE_OPERAND (t, 0);
6338 gcc_assert (DECL_P (decl));
6339 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6340 || POINTER_TYPE_P (TREE_TYPE (decl)));
6342 /* Make sure the iteration variable is private. */
6343 if (omp_is_private (gimplify_omp_ctxp, decl))
6344 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6345 else
6346 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6348 /* If DECL is not a gimple register, create a temporary variable to act
6349 as an iteration counter. This is valid, since DECL cannot be
6350 modified in the body of the loop. */
6351 if (!is_gimple_reg (decl))
6353 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6354 TREE_OPERAND (t, 0) = var;
6356 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6358 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6360 else
6361 var = decl;
6363 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6364 is_gimple_val, fb_rvalue);
6365 ret = MIN (ret, tret);
6366 if (ret == GS_ERROR)
6367 return ret;
6369 /* Handle OMP_FOR_COND. */
6370 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6371 gcc_assert (COMPARISON_CLASS_P (t));
6372 gcc_assert (TREE_OPERAND (t, 0) == decl);
6374 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6375 is_gimple_val, fb_rvalue);
6376 ret = MIN (ret, tret);
6378 /* Handle OMP_FOR_INCR. */
6379 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6380 switch (TREE_CODE (t))
6382 case PREINCREMENT_EXPR:
6383 case POSTINCREMENT_EXPR:
6384 t = build_int_cst (TREE_TYPE (decl), 1);
6385 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6386 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6387 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6388 break;
6390 case PREDECREMENT_EXPR:
6391 case POSTDECREMENT_EXPR:
6392 t = build_int_cst (TREE_TYPE (decl), -1);
6393 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6394 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6395 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6396 break;
6398 case MODIFY_EXPR:
6399 gcc_assert (TREE_OPERAND (t, 0) == decl);
6400 TREE_OPERAND (t, 0) = var;
6402 t = TREE_OPERAND (t, 1);
6403 switch (TREE_CODE (t))
6405 case PLUS_EXPR:
6406 if (TREE_OPERAND (t, 1) == decl)
6408 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6409 TREE_OPERAND (t, 0) = var;
6410 break;
6413 /* Fallthru. */
6414 case MINUS_EXPR:
6415 case POINTER_PLUS_EXPR:
6416 gcc_assert (TREE_OPERAND (t, 0) == decl);
6417 TREE_OPERAND (t, 0) = var;
6418 break;
6419 default:
6420 gcc_unreachable ();
6423 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6424 is_gimple_val, fb_rvalue);
6425 ret = MIN (ret, tret);
6426 break;
6428 default:
6429 gcc_unreachable ();
6432 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6434 tree c;
6435 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6437 && OMP_CLAUSE_DECL (c) == decl
6438 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6440 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6441 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6442 gcc_assert (TREE_OPERAND (t, 0) == var);
6443 t = TREE_OPERAND (t, 1);
6444 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6445 || TREE_CODE (t) == MINUS_EXPR
6446 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6447 gcc_assert (TREE_OPERAND (t, 0) == var);
6448 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6449 TREE_OPERAND (t, 1));
6450 gimplify_assign (decl, t,
6451 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6456 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6458 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6460 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6461 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6462 for_pre_body);
6464 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6466 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6467 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6468 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6469 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6470 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6471 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6472 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6473 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6476 gimplify_seq_add_stmt (pre_p, gfor);
6477 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6480 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6481 In particular, OMP_SECTIONS and OMP_SINGLE. */
6483 static void
6484 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6486 tree expr = *expr_p;
6487 gimple stmt;
6488 gimple_seq body = NULL;
6490 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6491 gimplify_and_add (OMP_BODY (expr), &body);
6492 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6494 if (TREE_CODE (expr) == OMP_SECTIONS)
6495 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6496 else if (TREE_CODE (expr) == OMP_SINGLE)
6497 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6498 else
6499 gcc_unreachable ();
6501 gimplify_seq_add_stmt (pre_p, stmt);
6504 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6505 stabilized the lhs of the atomic operation as *ADDR. Return true if
6506 EXPR is this stabilized form. */
6508 static bool
6509 goa_lhs_expr_p (tree expr, tree addr)
6511 /* Also include casts to other type variants. The C front end is fond
6512 of adding these for e.g. volatile variables. This is like
6513 STRIP_TYPE_NOPS but includes the main variant lookup. */
6514 STRIP_USELESS_TYPE_CONVERSION (expr);
6516 if (TREE_CODE (expr) == INDIRECT_REF)
6518 expr = TREE_OPERAND (expr, 0);
6519 while (expr != addr
6520 && (CONVERT_EXPR_P (expr)
6521 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6522 && TREE_CODE (expr) == TREE_CODE (addr)
6523 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6525 expr = TREE_OPERAND (expr, 0);
6526 addr = TREE_OPERAND (addr, 0);
6528 if (expr == addr)
6529 return true;
6530 return (TREE_CODE (addr) == ADDR_EXPR
6531 && TREE_CODE (expr) == ADDR_EXPR
6532 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6534 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6535 return true;
6536 return false;
6539 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6540 expression does not involve the lhs, evaluate it into a temporary.
6541 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6542 or -1 if an error was encountered. */
6544 static int
6545 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6546 tree lhs_var)
6548 tree expr = *expr_p;
6549 int saw_lhs;
6551 if (goa_lhs_expr_p (expr, lhs_addr))
6553 *expr_p = lhs_var;
6554 return 1;
6556 if (is_gimple_val (expr))
6557 return 0;
6559 saw_lhs = 0;
6560 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6562 case tcc_binary:
6563 case tcc_comparison:
6564 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6565 lhs_var);
6566 case tcc_unary:
6567 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6568 lhs_var);
6569 break;
6570 case tcc_expression:
6571 switch (TREE_CODE (expr))
6573 case TRUTH_ANDIF_EXPR:
6574 case TRUTH_ORIF_EXPR:
6575 case TRUTH_AND_EXPR:
6576 case TRUTH_OR_EXPR:
6577 case TRUTH_XOR_EXPR:
6578 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6579 lhs_addr, lhs_var);
6580 case TRUTH_NOT_EXPR:
6581 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6582 lhs_addr, lhs_var);
6583 break;
6584 case COMPOUND_EXPR:
6585 /* Break out any preevaluations from cp_build_modify_expr. */
6586 for (; TREE_CODE (expr) == COMPOUND_EXPR;
6587 expr = TREE_OPERAND (expr, 1))
6588 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6589 *expr_p = expr;
6590 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
6591 default:
6592 break;
6594 break;
6595 default:
6596 break;
6599 if (saw_lhs == 0)
6601 enum gimplify_status gs;
6602 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6603 if (gs != GS_ALL_DONE)
6604 saw_lhs = -1;
6607 return saw_lhs;
6610 /* Gimplify an OMP_ATOMIC statement. */
6612 static enum gimplify_status
6613 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6615 tree addr = TREE_OPERAND (*expr_p, 0);
6616 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6617 ? NULL : TREE_OPERAND (*expr_p, 1);
6618 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6619 tree tmp_load;
6620 gimple loadstmt, storestmt;
6622 tmp_load = create_tmp_reg (type, NULL);
6623 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6624 return GS_ERROR;
6626 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6627 != GS_ALL_DONE)
6628 return GS_ERROR;
6630 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6631 gimplify_seq_add_stmt (pre_p, loadstmt);
6632 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6633 != GS_ALL_DONE)
6634 return GS_ERROR;
6636 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6637 rhs = tmp_load;
6638 storestmt = gimple_build_omp_atomic_store (rhs);
6639 gimplify_seq_add_stmt (pre_p, storestmt);
6640 switch (TREE_CODE (*expr_p))
6642 case OMP_ATOMIC_READ:
6643 case OMP_ATOMIC_CAPTURE_OLD:
6644 *expr_p = tmp_load;
6645 gimple_omp_atomic_set_need_value (loadstmt);
6646 break;
6647 case OMP_ATOMIC_CAPTURE_NEW:
6648 *expr_p = rhs;
6649 gimple_omp_atomic_set_need_value (storestmt);
6650 break;
6651 default:
6652 *expr_p = NULL;
6653 break;
6656 return GS_ALL_DONE;
6659 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
6660 body, and adding some EH bits. */
6662 static enum gimplify_status
6663 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6665 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6666 gimple g;
6667 gimple_seq body = NULL;
6668 struct gimplify_ctx gctx;
6669 int subcode = 0;
6671 /* Wrap the transaction body in a BIND_EXPR so we have a context
6672 where to put decls for OpenMP. */
6673 if (TREE_CODE (tbody) != BIND_EXPR)
6675 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6676 TREE_SIDE_EFFECTS (bind) = 1;
6677 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6678 TRANSACTION_EXPR_BODY (expr) = bind;
6681 push_gimplify_context (&gctx);
6682 temp = voidify_wrapper_expr (*expr_p, NULL);
6684 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
6685 pop_gimplify_context (g);
6687 g = gimple_build_transaction (body, NULL);
6688 if (TRANSACTION_EXPR_OUTER (expr))
6689 subcode = GTMA_IS_OUTER;
6690 else if (TRANSACTION_EXPR_RELAXED (expr))
6691 subcode = GTMA_IS_RELAXED;
6692 gimple_transaction_set_subcode (g, subcode);
6694 gimplify_seq_add_stmt (pre_p, g);
6696 if (temp)
6698 *expr_p = temp;
6699 return GS_OK;
6702 *expr_p = NULL_TREE;
6703 return GS_ALL_DONE;
6706 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
6707 expression produces a value to be used as an operand inside a GIMPLE
6708 statement, the value will be stored back in *EXPR_P. This value will
6709 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6710 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6711 emitted in PRE_P and POST_P.
6713 Additionally, this process may overwrite parts of the input
6714 expression during gimplification. Ideally, it should be
6715 possible to do non-destructive gimplification.
6717 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6718 the expression needs to evaluate to a value to be used as
6719 an operand in a GIMPLE statement, this value will be stored in
6720 *EXPR_P on exit. This happens when the caller specifies one
6721 of fb_lvalue or fb_rvalue fallback flags.
6723 PRE_P will contain the sequence of GIMPLE statements corresponding
6724 to the evaluation of EXPR and all the side-effects that must
6725 be executed before the main expression. On exit, the last
6726 statement of PRE_P is the core statement being gimplified. For
6727 instance, when gimplifying 'if (++a)' the last statement in
6728 PRE_P will be 'if (t.1)' where t.1 is the result of
6729 pre-incrementing 'a'.
6731 POST_P will contain the sequence of GIMPLE statements corresponding
6732 to the evaluation of all the side-effects that must be executed
6733 after the main expression. If this is NULL, the post
6734 side-effects are stored at the end of PRE_P.
6736 The reason why the output is split in two is to handle post
6737 side-effects explicitly. In some cases, an expression may have
6738 inner and outer post side-effects which need to be emitted in
6739 an order different from the one given by the recursive
6740 traversal. For instance, for the expression (*p--)++ the post
6741 side-effects of '--' must actually occur *after* the post
6742 side-effects of '++'. However, gimplification will first visit
6743 the inner expression, so if a separate POST sequence was not
6744 used, the resulting sequence would be:
6746 1 t.1 = *p
6747 2 p = p - 1
6748 3 t.2 = t.1 + 1
6749 4 *p = t.2
6751 However, the post-decrement operation in line #2 must not be
6752 evaluated until after the store to *p at line #4, so the
6753 correct sequence should be:
6755 1 t.1 = *p
6756 2 t.2 = t.1 + 1
6757 3 *p = t.2
6758 4 p = p - 1
6760 So, by specifying a separate post queue, it is possible
6761 to emit the post side-effects in the correct order.
6762 If POST_P is NULL, an internal queue will be used. Before
6763 returning to the caller, the sequence POST_P is appended to
6764 the main output sequence PRE_P.
6766 GIMPLE_TEST_F points to a function that takes a tree T and
6767 returns nonzero if T is in the GIMPLE form requested by the
6768 caller. The GIMPLE predicates are in gimple.c.
6770 FALLBACK tells the function what sort of a temporary we want if
6771 gimplification cannot produce an expression that complies with
6772 GIMPLE_TEST_F.
6774 fb_none means that no temporary should be generated
6775 fb_rvalue means that an rvalue is OK to generate
6776 fb_lvalue means that an lvalue is OK to generate
6777 fb_either means that either is OK, but an lvalue is preferable.
6778 fb_mayfail means that gimplification may fail (in which case
6779 GS_ERROR will be returned)
6781 The return value is either GS_ERROR or GS_ALL_DONE, since this
6782 function iterates until EXPR is completely gimplified or an error
6783 occurs. */
6785 enum gimplify_status
6786 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6787 bool (*gimple_test_f) (tree), fallback_t fallback)
6789 tree tmp;
6790 gimple_seq internal_pre = NULL;
6791 gimple_seq internal_post = NULL;
6792 tree save_expr;
6793 bool is_statement;
6794 location_t saved_location;
6795 enum gimplify_status ret;
6796 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6798 save_expr = *expr_p;
6799 if (save_expr == NULL_TREE)
6800 return GS_ALL_DONE;
6802 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6803 is_statement = gimple_test_f == is_gimple_stmt;
6804 if (is_statement)
6805 gcc_assert (pre_p);
6807 /* Consistency checks. */
6808 if (gimple_test_f == is_gimple_reg)
6809 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6810 else if (gimple_test_f == is_gimple_val
6811 || gimple_test_f == is_gimple_call_addr
6812 || gimple_test_f == is_gimple_condexpr
6813 || gimple_test_f == is_gimple_mem_rhs
6814 || gimple_test_f == is_gimple_mem_rhs_or_call
6815 || gimple_test_f == is_gimple_reg_rhs
6816 || gimple_test_f == is_gimple_reg_rhs_or_call
6817 || gimple_test_f == is_gimple_asm_val
6818 || gimple_test_f == is_gimple_mem_ref_addr)
6819 gcc_assert (fallback & fb_rvalue);
6820 else if (gimple_test_f == is_gimple_min_lval
6821 || gimple_test_f == is_gimple_lvalue)
6822 gcc_assert (fallback & fb_lvalue);
6823 else if (gimple_test_f == is_gimple_addressable)
6824 gcc_assert (fallback & fb_either);
6825 else if (gimple_test_f == is_gimple_stmt)
6826 gcc_assert (fallback == fb_none);
6827 else
6829 /* We should have recognized the GIMPLE_TEST_F predicate to
6830 know what kind of fallback to use in case a temporary is
6831 needed to hold the value or address of *EXPR_P. */
6832 gcc_unreachable ();
6835 /* We used to check the predicate here and return immediately if it
6836 succeeds. This is wrong; the design is for gimplification to be
6837 idempotent, and for the predicates to only test for valid forms, not
6838 whether they are fully simplified. */
6839 if (pre_p == NULL)
6840 pre_p = &internal_pre;
6842 if (post_p == NULL)
6843 post_p = &internal_post;
6845 /* Remember the last statements added to PRE_P and POST_P. Every
6846 new statement added by the gimplification helpers needs to be
6847 annotated with location information. To centralize the
6848 responsibility, we remember the last statement that had been
6849 added to both queues before gimplifying *EXPR_P. If
6850 gimplification produces new statements in PRE_P and POST_P, those
6851 statements will be annotated with the same location information
6852 as *EXPR_P. */
6853 pre_last_gsi = gsi_last (*pre_p);
6854 post_last_gsi = gsi_last (*post_p);
6856 saved_location = input_location;
6857 if (save_expr != error_mark_node
6858 && EXPR_HAS_LOCATION (*expr_p))
6859 input_location = EXPR_LOCATION (*expr_p);
6861 /* Loop over the specific gimplifiers until the toplevel node
6862 remains the same. */
6865 /* Strip away as many useless type conversions as possible
6866 at the toplevel. */
6867 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6869 /* Remember the expr. */
6870 save_expr = *expr_p;
6872 /* Die, die, die, my darling. */
6873 if (save_expr == error_mark_node
6874 || (TREE_TYPE (save_expr)
6875 && TREE_TYPE (save_expr) == error_mark_node))
6877 ret = GS_ERROR;
6878 break;
6881 /* Do any language-specific gimplification. */
6882 ret = ((enum gimplify_status)
6883 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6884 if (ret == GS_OK)
6886 if (*expr_p == NULL_TREE)
6887 break;
6888 if (*expr_p != save_expr)
6889 continue;
6891 else if (ret != GS_UNHANDLED)
6892 break;
6894 /* Make sure that all the cases set 'ret' appropriately. */
6895 ret = GS_UNHANDLED;
6896 switch (TREE_CODE (*expr_p))
6898 /* First deal with the special cases. */
6900 case POSTINCREMENT_EXPR:
6901 case POSTDECREMENT_EXPR:
6902 case PREINCREMENT_EXPR:
6903 case PREDECREMENT_EXPR:
6904 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6905 fallback != fb_none);
6906 break;
6908 case ARRAY_REF:
6909 case ARRAY_RANGE_REF:
6910 case REALPART_EXPR:
6911 case IMAGPART_EXPR:
6912 case COMPONENT_REF:
6913 case VIEW_CONVERT_EXPR:
6914 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6915 fallback ? fallback : fb_rvalue);
6916 break;
6918 case COND_EXPR:
6919 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6921 /* C99 code may assign to an array in a structure value of a
6922 conditional expression, and this has undefined behavior
6923 only on execution, so create a temporary if an lvalue is
6924 required. */
6925 if (fallback == fb_lvalue)
6927 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6928 mark_addressable (*expr_p);
6929 ret = GS_OK;
6931 break;
6933 case CALL_EXPR:
6934 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6936 /* C99 code may assign to an array in a structure returned
6937 from a function, and this has undefined behavior only on
6938 execution, so create a temporary if an lvalue is
6939 required. */
6940 if (fallback == fb_lvalue)
6942 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6943 mark_addressable (*expr_p);
6944 ret = GS_OK;
6946 break;
6948 case TREE_LIST:
6949 gcc_unreachable ();
6951 case COMPOUND_EXPR:
6952 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6953 break;
6955 case COMPOUND_LITERAL_EXPR:
6956 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6957 break;
6959 case MODIFY_EXPR:
6960 case INIT_EXPR:
6961 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6962 fallback != fb_none);
6963 break;
6965 case TRUTH_ANDIF_EXPR:
6966 case TRUTH_ORIF_EXPR:
6968 /* Preserve the original type of the expression and the
6969 source location of the outer expression. */
6970 tree org_type = TREE_TYPE (*expr_p);
6971 *expr_p = gimple_boolify (*expr_p);
6972 *expr_p = build3_loc (input_location, COND_EXPR,
6973 org_type, *expr_p,
6974 fold_convert_loc
6975 (input_location,
6976 org_type, boolean_true_node),
6977 fold_convert_loc
6978 (input_location,
6979 org_type, boolean_false_node));
6980 ret = GS_OK;
6981 break;
6984 case TRUTH_NOT_EXPR:
6986 tree type = TREE_TYPE (*expr_p);
6987 /* The parsers are careful to generate TRUTH_NOT_EXPR
6988 only with operands that are always zero or one.
6989 We do not fold here but handle the only interesting case
6990 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
6991 *expr_p = gimple_boolify (*expr_p);
6992 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
6993 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
6994 TREE_TYPE (*expr_p),
6995 TREE_OPERAND (*expr_p, 0));
6996 else
6997 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
6998 TREE_TYPE (*expr_p),
6999 TREE_OPERAND (*expr_p, 0),
7000 build_int_cst (TREE_TYPE (*expr_p), 1));
7001 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7002 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7003 ret = GS_OK;
7004 break;
7007 case ADDR_EXPR:
7008 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7009 break;
7011 case VA_ARG_EXPR:
7012 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7013 break;
7015 CASE_CONVERT:
7016 if (IS_EMPTY_STMT (*expr_p))
7018 ret = GS_ALL_DONE;
7019 break;
7022 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7023 || fallback == fb_none)
7025 /* Just strip a conversion to void (or in void context) and
7026 try again. */
7027 *expr_p = TREE_OPERAND (*expr_p, 0);
7028 ret = GS_OK;
7029 break;
7032 ret = gimplify_conversion (expr_p);
7033 if (ret == GS_ERROR)
7034 break;
7035 if (*expr_p != save_expr)
7036 break;
7037 /* FALLTHRU */
7039 case FIX_TRUNC_EXPR:
7040 /* unary_expr: ... | '(' cast ')' val | ... */
7041 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7042 is_gimple_val, fb_rvalue);
7043 recalculate_side_effects (*expr_p);
7044 break;
7046 case INDIRECT_REF:
7048 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7049 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7050 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7052 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7053 if (*expr_p != save_expr)
7055 ret = GS_OK;
7056 break;
7059 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7060 is_gimple_reg, fb_rvalue);
7061 if (ret == GS_ERROR)
7062 break;
7064 recalculate_side_effects (*expr_p);
7065 *expr_p = fold_build2_loc (input_location, MEM_REF,
7066 TREE_TYPE (*expr_p),
7067 TREE_OPERAND (*expr_p, 0),
7068 build_int_cst (saved_ptr_type, 0));
7069 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7070 TREE_THIS_NOTRAP (*expr_p) = notrap;
7071 ret = GS_OK;
7072 break;
7075 /* We arrive here through the various re-gimplifcation paths. */
7076 case MEM_REF:
7077 /* First try re-folding the whole thing. */
7078 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7079 TREE_OPERAND (*expr_p, 0),
7080 TREE_OPERAND (*expr_p, 1));
7081 if (tmp)
7083 *expr_p = tmp;
7084 recalculate_side_effects (*expr_p);
7085 ret = GS_OK;
7086 break;
7088 /* Avoid re-gimplifying the address operand if it is already
7089 in suitable form. Re-gimplifying would mark the address
7090 operand addressable. Always gimplify when not in SSA form
7091 as we still may have to gimplify decls with value-exprs. */
7092 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7093 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7095 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7096 is_gimple_mem_ref_addr, fb_rvalue);
7097 if (ret == GS_ERROR)
7098 break;
7100 recalculate_side_effects (*expr_p);
7101 ret = GS_ALL_DONE;
7102 break;
7104 /* Constants need not be gimplified. */
7105 case INTEGER_CST:
7106 case REAL_CST:
7107 case FIXED_CST:
7108 case STRING_CST:
7109 case COMPLEX_CST:
7110 case VECTOR_CST:
7111 ret = GS_ALL_DONE;
7112 break;
7114 case CONST_DECL:
7115 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7116 CONST_DECL node. Otherwise the decl is replaceable by its
7117 value. */
7118 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7119 if (fallback & fb_lvalue)
7120 ret = GS_ALL_DONE;
7121 else
7123 *expr_p = DECL_INITIAL (*expr_p);
7124 ret = GS_OK;
7126 break;
7128 case DECL_EXPR:
7129 ret = gimplify_decl_expr (expr_p, pre_p);
7130 break;
7132 case BIND_EXPR:
7133 ret = gimplify_bind_expr (expr_p, pre_p);
7134 break;
7136 case LOOP_EXPR:
7137 ret = gimplify_loop_expr (expr_p, pre_p);
7138 break;
7140 case SWITCH_EXPR:
7141 ret = gimplify_switch_expr (expr_p, pre_p);
7142 break;
7144 case EXIT_EXPR:
7145 ret = gimplify_exit_expr (expr_p);
7146 break;
7148 case GOTO_EXPR:
7149 /* If the target is not LABEL, then it is a computed jump
7150 and the target needs to be gimplified. */
7151 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7153 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7154 NULL, is_gimple_val, fb_rvalue);
7155 if (ret == GS_ERROR)
7156 break;
7158 gimplify_seq_add_stmt (pre_p,
7159 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7160 ret = GS_ALL_DONE;
7161 break;
7163 case PREDICT_EXPR:
7164 gimplify_seq_add_stmt (pre_p,
7165 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7166 PREDICT_EXPR_OUTCOME (*expr_p)));
7167 ret = GS_ALL_DONE;
7168 break;
7170 case LABEL_EXPR:
7171 ret = GS_ALL_DONE;
7172 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7173 == current_function_decl);
7174 gimplify_seq_add_stmt (pre_p,
7175 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7176 break;
7178 case CASE_LABEL_EXPR:
7179 ret = gimplify_case_label_expr (expr_p, pre_p);
7180 break;
7182 case RETURN_EXPR:
7183 ret = gimplify_return_expr (*expr_p, pre_p);
7184 break;
7186 case CONSTRUCTOR:
7187 /* Don't reduce this in place; let gimplify_init_constructor work its
7188 magic. Buf if we're just elaborating this for side effects, just
7189 gimplify any element that has side-effects. */
7190 if (fallback == fb_none)
7192 unsigned HOST_WIDE_INT ix;
7193 tree val;
7194 tree temp = NULL_TREE;
7195 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7196 if (TREE_SIDE_EFFECTS (val))
7197 append_to_statement_list (val, &temp);
7199 *expr_p = temp;
7200 ret = temp ? GS_OK : GS_ALL_DONE;
7202 /* C99 code may assign to an array in a constructed
7203 structure or union, and this has undefined behavior only
7204 on execution, so create a temporary if an lvalue is
7205 required. */
7206 else if (fallback == fb_lvalue)
7208 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7209 mark_addressable (*expr_p);
7210 ret = GS_OK;
7212 else
7213 ret = GS_ALL_DONE;
7214 break;
7216 /* The following are special cases that are not handled by the
7217 original GIMPLE grammar. */
7219 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7220 eliminated. */
7221 case SAVE_EXPR:
7222 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7223 break;
7225 case BIT_FIELD_REF:
7227 enum gimplify_status r0, r1, r2;
7229 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7230 post_p, is_gimple_lvalue, fb_either);
7231 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7232 post_p, is_gimple_val, fb_rvalue);
7233 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7234 post_p, is_gimple_val, fb_rvalue);
7235 recalculate_side_effects (*expr_p);
7237 ret = MIN (r0, MIN (r1, r2));
7239 break;
7241 case TARGET_MEM_REF:
7243 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7245 if (TMR_BASE (*expr_p))
7246 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7247 post_p, is_gimple_mem_ref_addr, fb_either);
7248 if (TMR_INDEX (*expr_p))
7249 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7250 post_p, is_gimple_val, fb_rvalue);
7251 if (TMR_INDEX2 (*expr_p))
7252 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7253 post_p, is_gimple_val, fb_rvalue);
7254 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7255 ret = MIN (r0, r1);
7257 break;
7259 case NON_LVALUE_EXPR:
7260 /* This should have been stripped above. */
7261 gcc_unreachable ();
7263 case ASM_EXPR:
7264 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7265 break;
7267 case TRY_FINALLY_EXPR:
7268 case TRY_CATCH_EXPR:
7270 gimple_seq eval, cleanup;
7271 gimple try_;
7273 eval = cleanup = NULL;
7274 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7275 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7276 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7277 if (gimple_seq_empty_p (cleanup))
7279 gimple_seq_add_seq (pre_p, eval);
7280 ret = GS_ALL_DONE;
7281 break;
7283 try_ = gimple_build_try (eval, cleanup,
7284 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7285 ? GIMPLE_TRY_FINALLY
7286 : GIMPLE_TRY_CATCH);
7287 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7288 gimple_try_set_catch_is_cleanup (try_,
7289 TRY_CATCH_IS_CLEANUP (*expr_p));
7290 gimplify_seq_add_stmt (pre_p, try_);
7291 ret = GS_ALL_DONE;
7292 break;
7295 case CLEANUP_POINT_EXPR:
7296 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7297 break;
7299 case TARGET_EXPR:
7300 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7301 break;
7303 case CATCH_EXPR:
7305 gimple c;
7306 gimple_seq handler = NULL;
7307 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7308 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7309 gimplify_seq_add_stmt (pre_p, c);
7310 ret = GS_ALL_DONE;
7311 break;
7314 case EH_FILTER_EXPR:
7316 gimple ehf;
7317 gimple_seq failure = NULL;
7319 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7320 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7321 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7322 gimplify_seq_add_stmt (pre_p, ehf);
7323 ret = GS_ALL_DONE;
7324 break;
7327 case OBJ_TYPE_REF:
7329 enum gimplify_status r0, r1;
7330 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7331 post_p, is_gimple_val, fb_rvalue);
7332 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7333 post_p, is_gimple_val, fb_rvalue);
7334 TREE_SIDE_EFFECTS (*expr_p) = 0;
7335 ret = MIN (r0, r1);
7337 break;
7339 case LABEL_DECL:
7340 /* We get here when taking the address of a label. We mark
7341 the label as "forced"; meaning it can never be removed and
7342 it is a potential target for any computed goto. */
7343 FORCED_LABEL (*expr_p) = 1;
7344 ret = GS_ALL_DONE;
7345 break;
7347 case STATEMENT_LIST:
7348 ret = gimplify_statement_list (expr_p, pre_p);
7349 break;
7351 case WITH_SIZE_EXPR:
7353 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7354 post_p == &internal_post ? NULL : post_p,
7355 gimple_test_f, fallback);
7356 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7357 is_gimple_val, fb_rvalue);
7358 ret = GS_ALL_DONE;
7360 break;
7362 case VAR_DECL:
7363 case PARM_DECL:
7364 ret = gimplify_var_or_parm_decl (expr_p);
7365 break;
7367 case RESULT_DECL:
7368 /* When within an OpenMP context, notice uses of variables. */
7369 if (gimplify_omp_ctxp)
7370 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7371 ret = GS_ALL_DONE;
7372 break;
7374 case SSA_NAME:
7375 /* Allow callbacks into the gimplifier during optimization. */
7376 ret = GS_ALL_DONE;
7377 break;
7379 case OMP_PARALLEL:
7380 gimplify_omp_parallel (expr_p, pre_p);
7381 ret = GS_ALL_DONE;
7382 break;
7384 case OMP_TASK:
7385 gimplify_omp_task (expr_p, pre_p);
7386 ret = GS_ALL_DONE;
7387 break;
7389 case OMP_FOR:
7390 ret = gimplify_omp_for (expr_p, pre_p);
7391 break;
7393 case OMP_SECTIONS:
7394 case OMP_SINGLE:
7395 gimplify_omp_workshare (expr_p, pre_p);
7396 ret = GS_ALL_DONE;
7397 break;
7399 case OMP_SECTION:
7400 case OMP_MASTER:
7401 case OMP_ORDERED:
7402 case OMP_CRITICAL:
7404 gimple_seq body = NULL;
7405 gimple g;
7407 gimplify_and_add (OMP_BODY (*expr_p), &body);
7408 switch (TREE_CODE (*expr_p))
7410 case OMP_SECTION:
7411 g = gimple_build_omp_section (body);
7412 break;
7413 case OMP_MASTER:
7414 g = gimple_build_omp_master (body);
7415 break;
7416 case OMP_ORDERED:
7417 g = gimple_build_omp_ordered (body);
7418 break;
7419 case OMP_CRITICAL:
7420 g = gimple_build_omp_critical (body,
7421 OMP_CRITICAL_NAME (*expr_p));
7422 break;
7423 default:
7424 gcc_unreachable ();
7426 gimplify_seq_add_stmt (pre_p, g);
7427 ret = GS_ALL_DONE;
7428 break;
7431 case OMP_ATOMIC:
7432 case OMP_ATOMIC_READ:
7433 case OMP_ATOMIC_CAPTURE_OLD:
7434 case OMP_ATOMIC_CAPTURE_NEW:
7435 ret = gimplify_omp_atomic (expr_p, pre_p);
7436 break;
7438 case TRANSACTION_EXPR:
7439 ret = gimplify_transaction (expr_p, pre_p);
7440 break;
7442 case TRUTH_AND_EXPR:
7443 case TRUTH_OR_EXPR:
7444 case TRUTH_XOR_EXPR:
7446 tree orig_type = TREE_TYPE (*expr_p);
7447 tree new_type, xop0, xop1;
7448 *expr_p = gimple_boolify (*expr_p);
7449 new_type = TREE_TYPE (*expr_p);
7450 if (!useless_type_conversion_p (orig_type, new_type))
7452 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7453 ret = GS_OK;
7454 break;
7457 /* Boolified binary truth expressions are semantically equivalent
7458 to bitwise binary expressions. Canonicalize them to the
7459 bitwise variant. */
7460 switch (TREE_CODE (*expr_p))
7462 case TRUTH_AND_EXPR:
7463 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7464 break;
7465 case TRUTH_OR_EXPR:
7466 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7467 break;
7468 case TRUTH_XOR_EXPR:
7469 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7470 break;
7471 default:
7472 break;
7474 /* Now make sure that operands have compatible type to
7475 expression's new_type. */
7476 xop0 = TREE_OPERAND (*expr_p, 0);
7477 xop1 = TREE_OPERAND (*expr_p, 1);
7478 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7479 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7480 new_type,
7481 xop0);
7482 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7483 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7484 new_type,
7485 xop1);
7486 /* Continue classified as tcc_binary. */
7487 goto expr_2;
7490 case FMA_EXPR:
7491 case VEC_PERM_EXPR:
7492 /* Classified as tcc_expression. */
7493 goto expr_3;
7495 case POINTER_PLUS_EXPR:
7497 enum gimplify_status r0, r1;
7498 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7499 post_p, is_gimple_val, fb_rvalue);
7500 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7501 post_p, is_gimple_val, fb_rvalue);
7502 recalculate_side_effects (*expr_p);
7503 ret = MIN (r0, r1);
7504 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
7505 after gimplifying operands - this is similar to how
7506 it would be folding all gimplified stmts on creation
7507 to have them canonicalized, which is what we eventually
7508 should do anyway. */
7509 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7510 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7512 *expr_p = build_fold_addr_expr_with_type_loc
7513 (input_location,
7514 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7515 TREE_OPERAND (*expr_p, 0),
7516 fold_convert (ptr_type_node,
7517 TREE_OPERAND (*expr_p, 1))),
7518 TREE_TYPE (*expr_p));
7519 ret = MIN (ret, GS_OK);
7521 break;
7524 default:
7525 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7527 case tcc_comparison:
7528 /* Handle comparison of objects of non scalar mode aggregates
7529 with a call to memcmp. It would be nice to only have to do
7530 this for variable-sized objects, but then we'd have to allow
7531 the same nest of reference nodes we allow for MODIFY_EXPR and
7532 that's too complex.
7534 Compare scalar mode aggregates as scalar mode values. Using
7535 memcmp for them would be very inefficient at best, and is
7536 plain wrong if bitfields are involved. */
7538 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7540 /* Vector comparisons need no boolification. */
7541 if (TREE_CODE (type) == VECTOR_TYPE)
7542 goto expr_2;
7543 else if (!AGGREGATE_TYPE_P (type))
7545 tree org_type = TREE_TYPE (*expr_p);
7546 *expr_p = gimple_boolify (*expr_p);
7547 if (!useless_type_conversion_p (org_type,
7548 TREE_TYPE (*expr_p)))
7550 *expr_p = fold_convert_loc (input_location,
7551 org_type, *expr_p);
7552 ret = GS_OK;
7554 else
7555 goto expr_2;
7557 else if (TYPE_MODE (type) != BLKmode)
7558 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7559 else
7560 ret = gimplify_variable_sized_compare (expr_p);
7562 break;
7565 /* If *EXPR_P does not need to be special-cased, handle it
7566 according to its class. */
7567 case tcc_unary:
7568 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7569 post_p, is_gimple_val, fb_rvalue);
7570 break;
7572 case tcc_binary:
7573 expr_2:
7575 enum gimplify_status r0, r1;
7577 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7578 post_p, is_gimple_val, fb_rvalue);
7579 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7580 post_p, is_gimple_val, fb_rvalue);
7582 ret = MIN (r0, r1);
7583 break;
7586 expr_3:
7588 enum gimplify_status r0, r1, r2;
7590 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7591 post_p, is_gimple_val, fb_rvalue);
7592 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7593 post_p, is_gimple_val, fb_rvalue);
7594 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7595 post_p, is_gimple_val, fb_rvalue);
7597 ret = MIN (MIN (r0, r1), r2);
7598 break;
7601 case tcc_declaration:
7602 case tcc_constant:
7603 ret = GS_ALL_DONE;
7604 goto dont_recalculate;
7606 default:
7607 gcc_unreachable ();
7610 recalculate_side_effects (*expr_p);
7612 dont_recalculate:
7613 break;
7616 gcc_assert (*expr_p || ret != GS_OK);
7618 while (ret == GS_OK);
7620 /* If we encountered an error_mark somewhere nested inside, either
7621 stub out the statement or propagate the error back out. */
7622 if (ret == GS_ERROR)
7624 if (is_statement)
7625 *expr_p = NULL;
7626 goto out;
7629 /* This was only valid as a return value from the langhook, which
7630 we handled. Make sure it doesn't escape from any other context. */
7631 gcc_assert (ret != GS_UNHANDLED);
7633 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7635 /* We aren't looking for a value, and we don't have a valid
7636 statement. If it doesn't have side-effects, throw it away. */
7637 if (!TREE_SIDE_EFFECTS (*expr_p))
7638 *expr_p = NULL;
7639 else if (!TREE_THIS_VOLATILE (*expr_p))
7641 /* This is probably a _REF that contains something nested that
7642 has side effects. Recurse through the operands to find it. */
7643 enum tree_code code = TREE_CODE (*expr_p);
7645 switch (code)
7647 case COMPONENT_REF:
7648 case REALPART_EXPR:
7649 case IMAGPART_EXPR:
7650 case VIEW_CONVERT_EXPR:
7651 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7652 gimple_test_f, fallback);
7653 break;
7655 case ARRAY_REF:
7656 case ARRAY_RANGE_REF:
7657 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7658 gimple_test_f, fallback);
7659 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7660 gimple_test_f, fallback);
7661 break;
7663 default:
7664 /* Anything else with side-effects must be converted to
7665 a valid statement before we get here. */
7666 gcc_unreachable ();
7669 *expr_p = NULL;
7671 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7672 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7674 /* Historically, the compiler has treated a bare reference
7675 to a non-BLKmode volatile lvalue as forcing a load. */
7676 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7678 /* Normally, we do not want to create a temporary for a
7679 TREE_ADDRESSABLE type because such a type should not be
7680 copied by bitwise-assignment. However, we make an
7681 exception here, as all we are doing here is ensuring that
7682 we read the bytes that make up the type. We use
7683 create_tmp_var_raw because create_tmp_var will abort when
7684 given a TREE_ADDRESSABLE type. */
7685 tree tmp = create_tmp_var_raw (type, "vol");
7686 gimple_add_tmp_var (tmp);
7687 gimplify_assign (tmp, *expr_p, pre_p);
7688 *expr_p = NULL;
7690 else
7691 /* We can't do anything useful with a volatile reference to
7692 an incomplete type, so just throw it away. Likewise for
7693 a BLKmode type, since any implicit inner load should
7694 already have been turned into an explicit one by the
7695 gimplification process. */
7696 *expr_p = NULL;
7699 /* If we are gimplifying at the statement level, we're done. Tack
7700 everything together and return. */
7701 if (fallback == fb_none || is_statement)
7703 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7704 it out for GC to reclaim it. */
7705 *expr_p = NULL_TREE;
7707 if (!gimple_seq_empty_p (internal_pre)
7708 || !gimple_seq_empty_p (internal_post))
7710 gimplify_seq_add_seq (&internal_pre, internal_post);
7711 gimplify_seq_add_seq (pre_p, internal_pre);
7714 /* The result of gimplifying *EXPR_P is going to be the last few
7715 statements in *PRE_P and *POST_P. Add location information
7716 to all the statements that were added by the gimplification
7717 helpers. */
7718 if (!gimple_seq_empty_p (*pre_p))
7719 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7721 if (!gimple_seq_empty_p (*post_p))
7722 annotate_all_with_location_after (*post_p, post_last_gsi,
7723 input_location);
7725 goto out;
7728 #ifdef ENABLE_GIMPLE_CHECKING
7729 if (*expr_p)
7731 enum tree_code code = TREE_CODE (*expr_p);
7732 /* These expressions should already be in gimple IR form. */
7733 gcc_assert (code != MODIFY_EXPR
7734 && code != ASM_EXPR
7735 && code != BIND_EXPR
7736 && code != CATCH_EXPR
7737 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7738 && code != EH_FILTER_EXPR
7739 && code != GOTO_EXPR
7740 && code != LABEL_EXPR
7741 && code != LOOP_EXPR
7742 && code != SWITCH_EXPR
7743 && code != TRY_FINALLY_EXPR
7744 && code != OMP_CRITICAL
7745 && code != OMP_FOR
7746 && code != OMP_MASTER
7747 && code != OMP_ORDERED
7748 && code != OMP_PARALLEL
7749 && code != OMP_SECTIONS
7750 && code != OMP_SECTION
7751 && code != OMP_SINGLE);
7753 #endif
7755 /* Otherwise we're gimplifying a subexpression, so the resulting
7756 value is interesting. If it's a valid operand that matches
7757 GIMPLE_TEST_F, we're done. Unless we are handling some
7758 post-effects internally; if that's the case, we need to copy into
7759 a temporary before adding the post-effects to POST_P. */
7760 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7761 goto out;
7763 /* Otherwise, we need to create a new temporary for the gimplified
7764 expression. */
7766 /* We can't return an lvalue if we have an internal postqueue. The
7767 object the lvalue refers to would (probably) be modified by the
7768 postqueue; we need to copy the value out first, which means an
7769 rvalue. */
7770 if ((fallback & fb_lvalue)
7771 && gimple_seq_empty_p (internal_post)
7772 && is_gimple_addressable (*expr_p))
7774 /* An lvalue will do. Take the address of the expression, store it
7775 in a temporary, and replace the expression with an INDIRECT_REF of
7776 that temporary. */
7777 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7778 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7779 *expr_p = build_simple_mem_ref (tmp);
7781 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7783 /* An rvalue will do. Assign the gimplified expression into a
7784 new temporary TMP and replace the original expression with
7785 TMP. First, make sure that the expression has a type so that
7786 it can be assigned into a temporary. */
7787 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7789 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7790 /* The postqueue might change the value of the expression between
7791 the initialization and use of the temporary, so we can't use a
7792 formal temp. FIXME do we care? */
7794 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7795 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7796 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7797 DECL_GIMPLE_REG_P (*expr_p) = 1;
7799 else
7800 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7802 else
7804 #ifdef ENABLE_GIMPLE_CHECKING
7805 if (!(fallback & fb_mayfail))
7807 fprintf (stderr, "gimplification failed:\n");
7808 print_generic_expr (stderr, *expr_p, 0);
7809 debug_tree (*expr_p);
7810 internal_error ("gimplification failed");
7812 #endif
7813 gcc_assert (fallback & fb_mayfail);
7815 /* If this is an asm statement, and the user asked for the
7816 impossible, don't die. Fail and let gimplify_asm_expr
7817 issue an error. */
7818 ret = GS_ERROR;
7819 goto out;
7822 /* Make sure the temporary matches our predicate. */
7823 gcc_assert ((*gimple_test_f) (*expr_p));
7825 if (!gimple_seq_empty_p (internal_post))
7827 annotate_all_with_location (internal_post, input_location);
7828 gimplify_seq_add_seq (pre_p, internal_post);
7831 out:
7832 input_location = saved_location;
7833 return ret;
7836 /* Look through TYPE for variable-sized objects and gimplify each such
7837 size that we find. Add to LIST_P any statements generated. */
7839 void
7840 gimplify_type_sizes (tree type, gimple_seq *list_p)
7842 tree field, t;
7844 if (type == NULL || type == error_mark_node)
7845 return;
7847 /* We first do the main variant, then copy into any other variants. */
7848 type = TYPE_MAIN_VARIANT (type);
7850 /* Avoid infinite recursion. */
7851 if (TYPE_SIZES_GIMPLIFIED (type))
7852 return;
7854 TYPE_SIZES_GIMPLIFIED (type) = 1;
7856 switch (TREE_CODE (type))
7858 case INTEGER_TYPE:
7859 case ENUMERAL_TYPE:
7860 case BOOLEAN_TYPE:
7861 case REAL_TYPE:
7862 case FIXED_POINT_TYPE:
7863 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7864 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7866 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7868 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7869 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7871 break;
7873 case ARRAY_TYPE:
7874 /* These types may not have declarations, so handle them here. */
7875 gimplify_type_sizes (TREE_TYPE (type), list_p);
7876 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7877 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7878 with assigned stack slots, for -O1+ -g they should be tracked
7879 by VTA. */
7880 if (!(TYPE_NAME (type)
7881 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
7882 && DECL_IGNORED_P (TYPE_NAME (type)))
7883 && TYPE_DOMAIN (type)
7884 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7886 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7887 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7888 DECL_IGNORED_P (t) = 0;
7889 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7890 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7891 DECL_IGNORED_P (t) = 0;
7893 break;
7895 case RECORD_TYPE:
7896 case UNION_TYPE:
7897 case QUAL_UNION_TYPE:
7898 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7899 if (TREE_CODE (field) == FIELD_DECL)
7901 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7902 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7903 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7904 gimplify_type_sizes (TREE_TYPE (field), list_p);
7906 break;
7908 case POINTER_TYPE:
7909 case REFERENCE_TYPE:
7910 /* We used to recurse on the pointed-to type here, which turned out to
7911 be incorrect because its definition might refer to variables not
7912 yet initialized at this point if a forward declaration is involved.
7914 It was actually useful for anonymous pointed-to types to ensure
7915 that the sizes evaluation dominates every possible later use of the
7916 values. Restricting to such types here would be safe since there
7917 is no possible forward declaration around, but would introduce an
7918 undesirable middle-end semantic to anonymity. We then defer to
7919 front-ends the responsibility of ensuring that the sizes are
7920 evaluated both early and late enough, e.g. by attaching artificial
7921 type declarations to the tree. */
7922 break;
7924 default:
7925 break;
7928 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7929 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7931 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7933 TYPE_SIZE (t) = TYPE_SIZE (type);
7934 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7935 TYPE_SIZES_GIMPLIFIED (t) = 1;
7939 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7940 a size or position, has had all of its SAVE_EXPRs evaluated.
7941 We add any required statements to *STMT_P. */
7943 void
7944 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7946 tree type, expr = *expr_p;
7948 /* We don't do anything if the value isn't there, is constant, or contains
7949 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7950 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7951 will want to replace it with a new variable, but that will cause problems
7952 if this type is from outside the function. It's OK to have that here. */
7953 if (is_gimple_sizepos (expr))
7954 return;
7956 type = TREE_TYPE (expr);
7957 *expr_p = unshare_expr (expr);
7959 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7960 expr = *expr_p;
7962 /* Verify that we've an exact type match with the original expression.
7963 In particular, we do not wish to drop a "sizetype" in favour of a
7964 type of similar dimensions. We don't want to pollute the generic
7965 type-stripping code with this knowledge because it doesn't matter
7966 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7967 and friends retain their "sizetype-ness". */
7968 if (TREE_TYPE (expr) != type
7969 && TREE_CODE (type) == INTEGER_TYPE
7970 && TYPE_IS_SIZETYPE (type))
7972 tree tmp;
7973 gimple stmt;
7975 *expr_p = create_tmp_var (type, NULL);
7976 tmp = build1 (NOP_EXPR, type, expr);
7977 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7978 gimple_set_location (stmt, EXPR_LOC_OR_HERE (expr));
7982 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
7983 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
7984 is true, also gimplify the parameters. */
7986 gimple
7987 gimplify_body (tree fndecl, bool do_parms)
7989 location_t saved_location = input_location;
7990 gimple_seq parm_stmts, seq;
7991 gimple outer_bind;
7992 struct gimplify_ctx gctx;
7993 struct cgraph_node *cgn;
7995 timevar_push (TV_TREE_GIMPLIFY);
7997 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7998 gimplification. */
7999 default_rtl_profile ();
8001 gcc_assert (gimplify_ctxp == NULL);
8002 push_gimplify_context (&gctx);
8004 /* Unshare most shared trees in the body and in that of any nested functions.
8005 It would seem we don't have to do this for nested functions because
8006 they are supposed to be output and then the outer function gimplified
8007 first, but the g++ front end doesn't always do it that way. */
8008 unshare_body (fndecl);
8009 unvisit_body (fndecl);
8011 cgn = cgraph_get_node (fndecl);
8012 if (cgn && cgn->origin)
8013 nonlocal_vlas = pointer_set_create ();
8015 /* Make sure input_location isn't set to something weird. */
8016 input_location = DECL_SOURCE_LOCATION (fndecl);
8018 /* Resolve callee-copies. This has to be done before processing
8019 the body so that DECL_VALUE_EXPR gets processed correctly. */
8020 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8022 /* Gimplify the function's body. */
8023 seq = NULL;
8024 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8025 outer_bind = gimple_seq_first_stmt (seq);
8026 if (!outer_bind)
8028 outer_bind = gimple_build_nop ();
8029 gimplify_seq_add_stmt (&seq, outer_bind);
8032 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8033 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8034 if (gimple_code (outer_bind) == GIMPLE_BIND
8035 && gimple_seq_first (seq) == gimple_seq_last (seq))
8037 else
8038 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8040 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8042 /* If we had callee-copies statements, insert them at the beginning
8043 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8044 if (!gimple_seq_empty_p (parm_stmts))
8046 tree parm;
8048 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8049 gimple_bind_set_body (outer_bind, parm_stmts);
8051 for (parm = DECL_ARGUMENTS (current_function_decl);
8052 parm; parm = DECL_CHAIN (parm))
8053 if (DECL_HAS_VALUE_EXPR_P (parm))
8055 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8056 DECL_IGNORED_P (parm) = 0;
8060 if (nonlocal_vlas)
8062 if (nonlocal_vla_vars)
8064 /* tree-nested.c may later on call declare_vars (..., true);
8065 which relies on BLOCK_VARS chain to be the tail of the
8066 gimple_bind_vars chain. Ensure we don't violate that
8067 assumption. */
8068 if (gimple_bind_block (outer_bind)
8069 == DECL_INITIAL (current_function_decl))
8070 declare_vars (nonlocal_vla_vars, outer_bind, true);
8071 else
8072 BLOCK_VARS (DECL_INITIAL (current_function_decl))
8073 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
8074 nonlocal_vla_vars);
8075 nonlocal_vla_vars = NULL_TREE;
8077 pointer_set_destroy (nonlocal_vlas);
8078 nonlocal_vlas = NULL;
8081 pop_gimplify_context (outer_bind);
8082 gcc_assert (gimplify_ctxp == NULL);
8084 if (!seen_error ())
8085 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8087 timevar_pop (TV_TREE_GIMPLIFY);
8088 input_location = saved_location;
8090 return outer_bind;
8093 typedef char *char_p; /* For DEF_VEC_P. */
8094 DEF_VEC_P(char_p);
8095 DEF_VEC_ALLOC_P(char_p,heap);
8097 /* Return whether we should exclude FNDECL from instrumentation. */
8099 static bool
8100 flag_instrument_functions_exclude_p (tree fndecl)
8102 VEC(char_p,heap) *vec;
8104 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions;
8105 if (VEC_length (char_p, vec) > 0)
8107 const char *name;
8108 int i;
8109 char *s;
8111 name = lang_hooks.decl_printable_name (fndecl, 0);
8112 FOR_EACH_VEC_ELT (char_p, vec, i, s)
8113 if (strstr (name, s) != NULL)
8114 return true;
8117 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files;
8118 if (VEC_length (char_p, vec) > 0)
8120 const char *name;
8121 int i;
8122 char *s;
8124 name = DECL_SOURCE_FILE (fndecl);
8125 FOR_EACH_VEC_ELT (char_p, vec, i, s)
8126 if (strstr (name, s) != NULL)
8127 return true;
8130 return false;
8133 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8134 node for the function we want to gimplify.
8136 Return the sequence of GIMPLE statements corresponding to the body
8137 of FNDECL. */
8139 void
8140 gimplify_function_tree (tree fndecl)
8142 tree oldfn, parm, ret;
8143 gimple_seq seq;
8144 gimple bind;
8146 gcc_assert (!gimple_body (fndecl));
8148 oldfn = current_function_decl;
8149 current_function_decl = fndecl;
8150 if (DECL_STRUCT_FUNCTION (fndecl))
8151 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8152 else
8153 push_struct_function (fndecl);
8155 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8157 /* Preliminarily mark non-addressed complex variables as eligible
8158 for promotion to gimple registers. We'll transform their uses
8159 as we find them. */
8160 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8161 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8162 && !TREE_THIS_VOLATILE (parm)
8163 && !needs_to_live_in_memory (parm))
8164 DECL_GIMPLE_REG_P (parm) = 1;
8167 ret = DECL_RESULT (fndecl);
8168 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8169 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8170 && !needs_to_live_in_memory (ret))
8171 DECL_GIMPLE_REG_P (ret) = 1;
8173 bind = gimplify_body (fndecl, true);
8175 /* The tree body of the function is no longer needed, replace it
8176 with the new GIMPLE body. */
8177 seq = gimple_seq_alloc ();
8178 gimple_seq_add_stmt (&seq, bind);
8179 gimple_set_body (fndecl, seq);
8181 /* If we're instrumenting function entry/exit, then prepend the call to
8182 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8183 catch the exit hook. */
8184 /* ??? Add some way to ignore exceptions for this TFE. */
8185 if (flag_instrument_function_entry_exit
8186 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8187 && !flag_instrument_functions_exclude_p (fndecl))
8189 tree x;
8190 gimple new_bind;
8191 gimple tf;
8192 gimple_seq cleanup = NULL, body = NULL;
8193 tree tmp_var;
8194 gimple call;
8196 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8197 call = gimple_build_call (x, 1, integer_zero_node);
8198 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8199 gimple_call_set_lhs (call, tmp_var);
8200 gimplify_seq_add_stmt (&cleanup, call);
8201 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8202 call = gimple_build_call (x, 2,
8203 build_fold_addr_expr (current_function_decl),
8204 tmp_var);
8205 gimplify_seq_add_stmt (&cleanup, call);
8206 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8208 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8209 call = gimple_build_call (x, 1, integer_zero_node);
8210 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8211 gimple_call_set_lhs (call, tmp_var);
8212 gimplify_seq_add_stmt (&body, call);
8213 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8214 call = gimple_build_call (x, 2,
8215 build_fold_addr_expr (current_function_decl),
8216 tmp_var);
8217 gimplify_seq_add_stmt (&body, call);
8218 gimplify_seq_add_stmt (&body, tf);
8219 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8220 /* Clear the block for BIND, since it is no longer directly inside
8221 the function, but within a try block. */
8222 gimple_bind_set_block (bind, NULL);
8224 /* Replace the current function body with the body
8225 wrapped in the try/finally TF. */
8226 seq = gimple_seq_alloc ();
8227 gimple_seq_add_stmt (&seq, new_bind);
8228 gimple_set_body (fndecl, seq);
8231 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8232 cfun->curr_properties = PROP_gimple_any;
8234 current_function_decl = oldfn;
8235 pop_cfun ();
8238 /* Some transformations like inlining may invalidate the GIMPLE form
8239 for operands. This function traverses all the operands in STMT and
8240 gimplifies anything that is not a valid gimple operand. Any new
8241 GIMPLE statements are inserted before *GSI_P. */
8243 void
8244 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8246 size_t i, num_ops;
8247 tree orig_lhs = NULL_TREE, lhs, t;
8248 gimple_seq pre = NULL;
8249 gimple post_stmt = NULL;
8250 struct gimplify_ctx gctx;
8252 push_gimplify_context (&gctx);
8253 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8255 switch (gimple_code (stmt))
8257 case GIMPLE_COND:
8258 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8259 is_gimple_val, fb_rvalue);
8260 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8261 is_gimple_val, fb_rvalue);
8262 break;
8263 case GIMPLE_SWITCH:
8264 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8265 is_gimple_val, fb_rvalue);
8266 break;
8267 case GIMPLE_OMP_ATOMIC_LOAD:
8268 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8269 is_gimple_val, fb_rvalue);
8270 break;
8271 case GIMPLE_ASM:
8273 size_t i, noutputs = gimple_asm_noutputs (stmt);
8274 const char *constraint, **oconstraints;
8275 bool allows_mem, allows_reg, is_inout;
8277 oconstraints
8278 = (const char **) alloca ((noutputs) * sizeof (const char *));
8279 for (i = 0; i < noutputs; i++)
8281 tree op = gimple_asm_output_op (stmt, i);
8282 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8283 oconstraints[i] = constraint;
8284 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8285 &allows_reg, &is_inout);
8286 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8287 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8288 fb_lvalue | fb_mayfail);
8290 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
8292 tree op = gimple_asm_input_op (stmt, i);
8293 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8294 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
8295 oconstraints, &allows_mem, &allows_reg);
8296 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
8297 allows_reg = 0;
8298 if (!allows_reg && allows_mem)
8299 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8300 is_gimple_lvalue, fb_lvalue | fb_mayfail);
8301 else
8302 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8303 is_gimple_asm_val, fb_rvalue);
8306 break;
8307 default:
8308 /* NOTE: We start gimplifying operands from last to first to
8309 make sure that side-effects on the RHS of calls, assignments
8310 and ASMs are executed before the LHS. The ordering is not
8311 important for other statements. */
8312 num_ops = gimple_num_ops (stmt);
8313 orig_lhs = gimple_get_lhs (stmt);
8314 for (i = num_ops; i > 0; i--)
8316 tree op = gimple_op (stmt, i - 1);
8317 if (op == NULL_TREE)
8318 continue;
8319 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
8320 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
8321 else if (i == 2
8322 && is_gimple_assign (stmt)
8323 && num_ops == 2
8324 && get_gimple_rhs_class (gimple_expr_code (stmt))
8325 == GIMPLE_SINGLE_RHS)
8326 gimplify_expr (&op, &pre, NULL,
8327 rhs_predicate_for (gimple_assign_lhs (stmt)),
8328 fb_rvalue);
8329 else if (i == 2 && is_gimple_call (stmt))
8331 if (TREE_CODE (op) == FUNCTION_DECL)
8332 continue;
8333 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
8335 else
8336 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
8337 gimple_set_op (stmt, i - 1, op);
8340 lhs = gimple_get_lhs (stmt);
8341 /* If the LHS changed it in a way that requires a simple RHS,
8342 create temporary. */
8343 if (lhs && !is_gimple_reg (lhs))
8345 bool need_temp = false;
8347 if (is_gimple_assign (stmt)
8348 && num_ops == 2
8349 && get_gimple_rhs_class (gimple_expr_code (stmt))
8350 == GIMPLE_SINGLE_RHS)
8351 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
8352 rhs_predicate_for (gimple_assign_lhs (stmt)),
8353 fb_rvalue);
8354 else if (is_gimple_reg (lhs))
8356 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8358 if (is_gimple_call (stmt))
8360 i = gimple_call_flags (stmt);
8361 if ((i & ECF_LOOPING_CONST_OR_PURE)
8362 || !(i & (ECF_CONST | ECF_PURE)))
8363 need_temp = true;
8365 if (stmt_can_throw_internal (stmt))
8366 need_temp = true;
8369 else
8371 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8372 need_temp = true;
8373 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
8375 if (is_gimple_call (stmt))
8377 tree fndecl = gimple_call_fndecl (stmt);
8379 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
8380 && !(fndecl && DECL_RESULT (fndecl)
8381 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
8382 need_temp = true;
8384 else
8385 need_temp = true;
8388 if (need_temp)
8390 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
8392 if (TREE_CODE (orig_lhs) == SSA_NAME)
8393 orig_lhs = SSA_NAME_VAR (orig_lhs);
8395 if (gimple_in_ssa_p (cfun))
8396 temp = make_ssa_name (temp, NULL);
8397 gimple_set_lhs (stmt, temp);
8398 post_stmt = gimple_build_assign (lhs, temp);
8399 if (TREE_CODE (lhs) == SSA_NAME)
8400 SSA_NAME_DEF_STMT (lhs) = post_stmt;
8403 break;
8406 if (gimple_referenced_vars (cfun))
8407 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
8408 add_referenced_var (t);
8410 if (!gimple_seq_empty_p (pre))
8412 if (gimple_in_ssa_p (cfun))
8414 gimple_stmt_iterator i;
8416 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
8417 mark_symbols_for_renaming (gsi_stmt (i));
8419 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8421 if (post_stmt)
8422 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8424 pop_gimplify_context (NULL);
8427 /* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
8428 the predicate that will hold for the result. If VAR is not NULL, make the
8429 base variable of the final destination be VAR if suitable. */
8431 tree
8432 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8433 gimple_predicate gimple_test_f, tree var)
8435 tree t;
8436 enum gimplify_status ret;
8437 struct gimplify_ctx gctx;
8439 *stmts = NULL;
8441 /* gimple_test_f might be more strict than is_gimple_val, make
8442 sure we pass both. Just checking gimple_test_f doesn't work
8443 because most gimple predicates do not work recursively. */
8444 if (is_gimple_val (expr)
8445 && (*gimple_test_f) (expr))
8446 return expr;
8448 push_gimplify_context (&gctx);
8449 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8450 gimplify_ctxp->allow_rhs_cond_expr = true;
8452 if (var)
8453 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8455 if (TREE_CODE (expr) != MODIFY_EXPR
8456 && TREE_TYPE (expr) == void_type_node)
8458 gimplify_and_add (expr, stmts);
8459 expr = NULL_TREE;
8461 else
8463 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
8464 gcc_assert (ret != GS_ERROR);
8467 if (gimple_referenced_vars (cfun))
8468 for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t))
8469 add_referenced_var (t);
8471 pop_gimplify_context (NULL);
8473 return expr;
8476 /* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
8477 force the result to be either ssa_name or an invariant, otherwise
8478 just force it to be a rhs expression. If VAR is not NULL, make the
8479 base variable of the final destination be VAR if suitable. */
8481 tree
8482 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8484 return force_gimple_operand_1 (expr, stmts,
8485 simple ? is_gimple_val : is_gimple_reg_rhs,
8486 var);
8489 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
8490 and VAR. If some statements are produced, emits them at GSI.
8491 If BEFORE is true. the statements are appended before GSI, otherwise
8492 they are appended after it. M specifies the way GSI moves after
8493 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
8495 tree
8496 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
8497 gimple_predicate gimple_test_f,
8498 tree var, bool before,
8499 enum gsi_iterator_update m)
8501 gimple_seq stmts;
8503 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
8505 if (!gimple_seq_empty_p (stmts))
8507 if (gimple_in_ssa_p (cfun))
8509 gimple_stmt_iterator i;
8511 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
8512 mark_symbols_for_renaming (gsi_stmt (i));
8515 if (before)
8516 gsi_insert_seq_before (gsi, stmts, m);
8517 else
8518 gsi_insert_seq_after (gsi, stmts, m);
8521 return expr;
8524 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
8525 If SIMPLE is true, force the result to be either ssa_name or an invariant,
8526 otherwise just force it to be a rhs expression. If some statements are
8527 produced, emits them at GSI. If BEFORE is true, the statements are
8528 appended before GSI, otherwise they are appended after it. M specifies
8529 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8530 are the usual values). */
8532 tree
8533 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
8534 bool simple_p, tree var, bool before,
8535 enum gsi_iterator_update m)
8537 return force_gimple_operand_gsi_1 (gsi, expr,
8538 simple_p
8539 ? is_gimple_val : is_gimple_reg_rhs,
8540 var, before, m);
8544 #include "gt-gimplify.h"