Merge trunk version 194076 into gupc branch.
[official-gcc.git] / gcc / gimplify.c
blob5bc5ca128fc4251f74846247a1095d4cd61bbdc4
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "tree-pretty-print.h"
33 #include "langhooks.h"
34 #include "tree-flow.h"
35 #include "cgraph.h"
36 #include "timevar.h"
37 #include "hashtab.h"
38 #include "flags.h"
39 #include "function.h"
40 #include "ggc.h"
41 #include "diagnostic-core.h"
42 #include "target.h"
43 #include "pointer-set.h"
44 #include "splay-tree.h"
45 #include "vec.h"
46 #include "gimple.h"
48 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
49 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
51 enum gimplify_omp_var_data
53 GOVD_SEEN = 1,
54 GOVD_EXPLICIT = 2,
55 GOVD_SHARED = 4,
56 GOVD_PRIVATE = 8,
57 GOVD_FIRSTPRIVATE = 16,
58 GOVD_LASTPRIVATE = 32,
59 GOVD_REDUCTION = 64,
60 GOVD_LOCAL = 128,
61 GOVD_DEBUG_PRIVATE = 256,
62 GOVD_PRIVATE_OUTER_REF = 512,
63 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
64 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
68 enum omp_region_type
70 ORT_WORKSHARE = 0,
71 ORT_PARALLEL = 2,
72 ORT_COMBINED_PARALLEL = 3,
73 ORT_TASK = 4,
74 ORT_UNTIED_TASK = 5
77 struct gimplify_omp_ctx
79 struct gimplify_omp_ctx *outer_context;
80 splay_tree variables;
81 struct pointer_set_t *privatized_types;
82 location_t location;
83 enum omp_clause_default_kind default_kind;
84 enum omp_region_type region_type;
87 static struct gimplify_ctx *gimplify_ctxp;
88 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
91 /* Formal (expression) temporary table handling: multiple occurrences of
92 the same scalar expression are evaluated into the same temporary. */
94 typedef struct gimple_temp_hash_elt
96 tree val; /* Key */
97 tree temp; /* Value */
98 } elt_t;
100 /* Forward declaration. */
101 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
103 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
104 form and we don't do any syntax checking. */
106 void
107 mark_addressable (tree x)
109 while (handled_component_p (x))
110 x = TREE_OPERAND (x, 0);
111 if (TREE_CODE (x) == MEM_REF
112 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
113 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
114 if (TREE_CODE (x) != VAR_DECL
115 && TREE_CODE (x) != PARM_DECL
116 && TREE_CODE (x) != RESULT_DECL)
117 return;
118 TREE_ADDRESSABLE (x) = 1;
120 /* Also mark the artificial SSA_NAME that points to the partition of X. */
121 if (TREE_CODE (x) == VAR_DECL
122 && !DECL_EXTERNAL (x)
123 && !TREE_STATIC (x)
124 && cfun->gimple_df != NULL
125 && cfun->gimple_df->decls_to_pointers != NULL)
127 void *namep
128 = pointer_map_contains (cfun->gimple_df->decls_to_pointers, x);
129 if (namep)
130 TREE_ADDRESSABLE (*(tree *)namep) = 1;
134 /* Return a hash value for a formal temporary table entry. */
136 static hashval_t
137 gimple_tree_hash (const void *p)
139 tree t = ((const elt_t *) p)->val;
140 return iterative_hash_expr (t, 0);
143 /* Compare two formal temporary table entries. */
145 static int
146 gimple_tree_eq (const void *p1, const void *p2)
148 tree t1 = ((const elt_t *) p1)->val;
149 tree t2 = ((const elt_t *) p2)->val;
150 enum tree_code code = TREE_CODE (t1);
152 if (TREE_CODE (t2) != code
153 || TREE_TYPE (t1) != TREE_TYPE (t2))
154 return 0;
156 if (!operand_equal_p (t1, t2, 0))
157 return 0;
159 #ifdef ENABLE_CHECKING
160 /* Only allow them to compare equal if they also hash equal; otherwise
161 results are nondeterminate, and we fail bootstrap comparison. */
162 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
163 #endif
165 return 1;
168 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
169 *SEQ_P is NULL, a new sequence is allocated. This function is
170 similar to gimple_seq_add_stmt, but does not scan the operands.
171 During gimplification, we need to manipulate statement sequences
172 before the def/use vectors have been constructed. */
174 void
175 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
177 gimple_stmt_iterator si;
179 if (gs == NULL)
180 return;
182 si = gsi_last (*seq_p);
183 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
186 /* Shorter alias name for the above function for use in gimplify.c
187 only. */
189 static inline void
190 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
192 gimple_seq_add_stmt_without_update (seq_p, gs);
195 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
196 NULL, a new sequence is allocated. This function is
197 similar to gimple_seq_add_seq, but does not scan the operands.
198 During gimplification, we need to manipulate statement sequences
199 before the def/use vectors have been constructed. */
201 static void
202 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
204 gimple_stmt_iterator si;
206 if (src == NULL)
207 return;
209 si = gsi_last (*dst_p);
210 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
213 /* Set up a context for the gimplifier. */
215 void
216 push_gimplify_context (struct gimplify_ctx *c)
218 memset (c, '\0', sizeof (*c));
219 c->prev_context = gimplify_ctxp;
220 gimplify_ctxp = c;
223 /* Tear down a context for the gimplifier. If BODY is non-null, then
224 put the temporaries into the outer BIND_EXPR. Otherwise, put them
225 in the local_decls.
227 BODY is not a sequence, but the first tuple in a sequence. */
229 void
230 pop_gimplify_context (gimple body)
232 struct gimplify_ctx *c = gimplify_ctxp;
234 gcc_assert (c
235 && (!c->bind_expr_stack.exists ()
236 || c->bind_expr_stack.is_empty ()));
237 c->bind_expr_stack.release ();
238 gimplify_ctxp = c->prev_context;
240 if (body)
241 declare_vars (c->temps, body, false);
242 else
243 record_vars (c->temps);
245 if (c->temp_htab)
246 htab_delete (c->temp_htab);
249 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
251 static void
252 gimple_push_bind_expr (gimple gimple_bind)
254 gimplify_ctxp->bind_expr_stack.reserve (8);
255 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
258 /* Pop the first element off the stack of bindings. */
260 static void
261 gimple_pop_bind_expr (void)
263 gimplify_ctxp->bind_expr_stack.pop ();
266 /* Return the first element of the stack of bindings. */
268 gimple
269 gimple_current_bind_expr (void)
271 return gimplify_ctxp->bind_expr_stack.last ();
274 /* Return the stack of bindings created during gimplification. */
276 vec<gimple>
277 gimple_bind_expr_stack (void)
279 return gimplify_ctxp->bind_expr_stack;
282 /* Return true iff there is a COND_EXPR between us and the innermost
283 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
285 static bool
286 gimple_conditional_context (void)
288 return gimplify_ctxp->conditions > 0;
291 /* Note that we've entered a COND_EXPR. */
293 static void
294 gimple_push_condition (void)
296 #ifdef ENABLE_GIMPLE_CHECKING
297 if (gimplify_ctxp->conditions == 0)
298 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
299 #endif
300 ++(gimplify_ctxp->conditions);
303 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
304 now, add any conditional cleanups we've seen to the prequeue. */
306 static void
307 gimple_pop_condition (gimple_seq *pre_p)
309 int conds = --(gimplify_ctxp->conditions);
311 gcc_assert (conds >= 0);
312 if (conds == 0)
314 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
315 gimplify_ctxp->conditional_cleanups = NULL;
319 /* A stable comparison routine for use with splay trees and DECLs. */
321 static int
322 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
324 tree a = (tree) xa;
325 tree b = (tree) xb;
327 return DECL_UID (a) - DECL_UID (b);
330 /* Create a new omp construct that deals with variable remapping. */
332 static struct gimplify_omp_ctx *
333 new_omp_context (enum omp_region_type region_type)
335 struct gimplify_omp_ctx *c;
337 c = XCNEW (struct gimplify_omp_ctx);
338 c->outer_context = gimplify_omp_ctxp;
339 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
340 c->privatized_types = pointer_set_create ();
341 c->location = input_location;
342 c->region_type = region_type;
343 if ((region_type & ORT_TASK) == 0)
344 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
345 else
346 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
348 return c;
351 /* Destroy an omp construct that deals with variable remapping. */
353 static void
354 delete_omp_context (struct gimplify_omp_ctx *c)
356 splay_tree_delete (c->variables);
357 pointer_set_destroy (c->privatized_types);
358 XDELETE (c);
361 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
362 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
364 /* Both gimplify the statement T and append it to *SEQ_P. This function
365 behaves exactly as gimplify_stmt, but you don't have to pass T as a
366 reference. */
368 void
369 gimplify_and_add (tree t, gimple_seq *seq_p)
371 gimplify_stmt (&t, seq_p);
374 /* Gimplify statement T into sequence *SEQ_P, and return the first
375 tuple in the sequence of generated tuples for this statement.
376 Return NULL if gimplifying T produced no tuples. */
378 static gimple
379 gimplify_and_return_first (tree t, gimple_seq *seq_p)
381 gimple_stmt_iterator last = gsi_last (*seq_p);
383 gimplify_and_add (t, seq_p);
385 if (!gsi_end_p (last))
387 gsi_next (&last);
388 return gsi_stmt (last);
390 else
391 return gimple_seq_first_stmt (*seq_p);
394 /* Strip off a legitimate source ending from the input string NAME of
395 length LEN. Rather than having to know the names used by all of
396 our front ends, we strip off an ending of a period followed by
397 up to five characters. (Java uses ".class".) */
399 static inline void
400 remove_suffix (char *name, int len)
402 int i;
404 for (i = 2; i < 8 && len > i; i++)
406 if (name[len - i] == '.')
408 name[len - i] = '\0';
409 break;
414 /* Create a new temporary name with PREFIX. Return an identifier. */
416 static GTY(()) unsigned int tmp_var_id_num;
418 tree
419 create_tmp_var_name (const char *prefix)
421 char *tmp_name;
423 if (prefix)
425 char *preftmp = ASTRDUP (prefix);
427 remove_suffix (preftmp, strlen (preftmp));
428 clean_symbol_name (preftmp);
430 prefix = preftmp;
433 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
434 return get_identifier (tmp_name);
437 /* Create a new temporary variable declaration of type TYPE.
438 Do NOT push it into the current binding. */
440 tree
441 create_tmp_var_raw (tree type, const char *prefix)
443 tree tmp_var;
445 /* Temps. cannot be UPC shared qualified. */
446 gcc_assert (!upc_shared_type_p (type));
448 tmp_var = build_decl (input_location,
449 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
450 type);
452 /* The variable was declared by the compiler. */
453 DECL_ARTIFICIAL (tmp_var) = 1;
454 /* And we don't want debug info for it. */
455 DECL_IGNORED_P (tmp_var) = 1;
457 /* Make the variable writable. */
458 TREE_READONLY (tmp_var) = 0;
460 DECL_EXTERNAL (tmp_var) = 0;
461 TREE_STATIC (tmp_var) = 0;
462 TREE_USED (tmp_var) = 1;
464 return tmp_var;
467 /* Create a new temporary variable declaration of type TYPE. DO push the
468 variable into the current binding. Further, assume that this is called
469 only from gimplification or optimization, at which point the creation of
470 certain types are bugs. */
472 tree
473 create_tmp_var (tree type, const char *prefix)
475 tree tmp_var;
477 /* We don't allow types that are addressable (meaning we can't make copies),
478 or incomplete. We also used to reject every variable size objects here,
479 but now support those for which a constant upper bound can be obtained.
480 The processing for variable sizes is performed in gimple_add_tmp_var,
481 point at which it really matters and possibly reached via paths not going
482 through this function, e.g. after direct calls to create_tmp_var_raw. */
483 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
485 tmp_var = create_tmp_var_raw (type, prefix);
486 gimple_add_tmp_var (tmp_var);
487 return tmp_var;
490 /* Create a new temporary variable declaration of type TYPE by calling
491 create_tmp_var and if TYPE is a vector or a complex number, mark the new
492 temporary as gimple register. */
494 tree
495 create_tmp_reg (tree type, const char *prefix)
497 tree tmp;
499 tmp = create_tmp_var (type, prefix);
500 if (TREE_CODE (type) == COMPLEX_TYPE
501 || TREE_CODE (type) == VECTOR_TYPE)
502 DECL_GIMPLE_REG_P (tmp) = 1;
504 return tmp;
507 /* Returns true iff T is a valid RHS for an assignment to a renamed
508 user -- or front-end generated artificial -- variable. */
510 static bool
511 is_gimple_reg_rhs (tree t)
513 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
516 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
517 LHS, or for a call argument. */
519 static bool
520 is_gimple_mem_rhs (tree t)
522 /* If we're dealing with a renamable type, either source or dest must be
523 a renamed variable. */
524 if (is_gimple_reg_type (TREE_TYPE (t)))
525 return is_gimple_val (t);
526 else
527 return is_gimple_val (t) || is_gimple_lvalue (t);
530 /* Return true if T is a CALL_EXPR or an expression that can be
531 assigned to a temporary. Note that this predicate should only be
532 used during gimplification. See the rationale for this in
533 gimplify_modify_expr. */
535 static bool
536 is_gimple_reg_rhs_or_call (tree t)
538 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
539 || TREE_CODE (t) == CALL_EXPR);
542 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
543 this predicate should only be used during gimplification. See the
544 rationale for this in gimplify_modify_expr. */
546 static bool
547 is_gimple_mem_rhs_or_call (tree t)
549 /* If we're dealing with a renamable type, either source or dest must be
550 a renamed variable. */
551 if (is_gimple_reg_type (TREE_TYPE (t)))
552 return is_gimple_val (t);
553 else
554 return (is_gimple_val (t) || is_gimple_lvalue (t)
555 || TREE_CODE (t) == CALL_EXPR);
558 /* Create a temporary with a name derived from VAL. Subroutine of
559 lookup_tmp_var; nobody else should call this function. */
561 static inline tree
562 create_tmp_from_val (tree val, bool is_formal)
564 /* Drop all qualifiers and address-space information from the value type. */
565 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
566 tree var = create_tmp_var (type, get_name (val));
567 if (is_formal
568 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
569 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
570 DECL_GIMPLE_REG_P (var) = 1;
571 return var;
574 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
575 an existing expression temporary. */
577 static tree
578 lookup_tmp_var (tree val, bool is_formal)
580 tree ret;
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
588 ret = create_tmp_from_val (val, is_formal);
589 else
591 elt_t elt, *elt_p;
592 void **slot;
594 elt.val = val;
595 if (gimplify_ctxp->temp_htab == NULL)
596 gimplify_ctxp->temp_htab
597 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
598 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
599 if (*slot == NULL)
601 elt_p = XNEW (elt_t);
602 elt_p->val = val;
603 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
604 *slot = (void *) elt_p;
606 else
608 elt_p = (elt_t *) *slot;
609 ret = elt_p->temp;
613 return ret;
616 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
618 static tree
619 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
620 bool is_formal)
622 tree t, mod;
624 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
625 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
626 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
627 fb_rvalue);
629 if (gimplify_ctxp->into_ssa
630 && is_gimple_reg_type (TREE_TYPE (val)))
631 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
632 else
633 t = lookup_tmp_var (val, is_formal);
635 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
637 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
639 /* gimplify_modify_expr might want to reduce this further. */
640 gimplify_and_add (mod, pre_p);
641 ggc_free (mod);
643 return t;
646 /* Return a formal temporary variable initialized with VAL. PRE_P is as
647 in gimplify_expr. Only use this function if:
649 1) The value of the unfactored expression represented by VAL will not
650 change between the initialization and use of the temporary, and
651 2) The temporary will not be otherwise modified.
653 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
654 and #2 means it is inappropriate for && temps.
656 For other cases, use get_initialized_tmp_var instead. */
658 tree
659 get_formal_tmp_var (tree val, gimple_seq *pre_p)
661 return internal_get_tmp_var (val, pre_p, NULL, true);
664 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
665 are as in gimplify_expr. */
667 tree
668 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
670 return internal_get_tmp_var (val, pre_p, post_p, false);
673 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
674 generate debug info for them; otherwise don't. */
676 void
677 declare_vars (tree vars, gimple scope, bool debug_info)
679 tree last = vars;
680 if (last)
682 tree temps, block;
684 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
686 temps = nreverse (last);
688 block = gimple_bind_block (scope);
689 gcc_assert (!block || TREE_CODE (block) == BLOCK);
690 if (!block || !debug_info)
692 DECL_CHAIN (last) = gimple_bind_vars (scope);
693 gimple_bind_set_vars (scope, temps);
695 else
697 /* We need to attach the nodes both to the BIND_EXPR and to its
698 associated BLOCK for debugging purposes. The key point here
699 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
700 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
701 if (BLOCK_VARS (block))
702 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
703 else
705 gimple_bind_set_vars (scope,
706 chainon (gimple_bind_vars (scope), temps));
707 BLOCK_VARS (block) = temps;
713 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
714 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
715 no such upper bound can be obtained. */
717 static void
718 force_constant_size (tree var)
720 /* The only attempt we make is by querying the maximum size of objects
721 of the variable's type. */
723 HOST_WIDE_INT max_size;
725 gcc_assert (TREE_CODE (var) == VAR_DECL);
727 max_size = max_int_size_in_bytes (TREE_TYPE (var));
729 gcc_assert (max_size >= 0);
731 DECL_SIZE_UNIT (var)
732 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
733 DECL_SIZE (var)
734 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
737 /* Push the temporary variable TMP into the current binding. */
739 void
740 gimple_add_tmp_var (tree tmp)
742 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
744 /* Later processing assumes that the object size is constant, which might
745 not be true at this point. Force the use of a constant upper bound in
746 this case. */
747 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
748 force_constant_size (tmp);
750 DECL_CONTEXT (tmp) = current_function_decl;
751 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
753 if (gimplify_ctxp)
755 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
756 gimplify_ctxp->temps = tmp;
758 /* Mark temporaries local within the nearest enclosing parallel. */
759 if (gimplify_omp_ctxp)
761 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
762 while (ctx && ctx->region_type == ORT_WORKSHARE)
763 ctx = ctx->outer_context;
764 if (ctx)
765 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
768 else if (cfun)
769 record_vars (tmp);
770 else
772 gimple_seq body_seq;
774 /* This case is for nested functions. We need to expose the locals
775 they create. */
776 body_seq = gimple_body (current_function_decl);
777 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
781 /* Determine whether to assign a location to the statement GS. */
783 static bool
784 should_carry_location_p (gimple gs)
786 /* Don't emit a line note for a label. We particularly don't want to
787 emit one for the break label, since it doesn't actually correspond
788 to the beginning of the loop/switch. */
789 if (gimple_code (gs) == GIMPLE_LABEL)
790 return false;
792 return true;
795 /* Return true if a location should not be emitted for this statement
796 by annotate_one_with_location. */
798 static inline bool
799 gimple_do_not_emit_location_p (gimple g)
801 return gimple_plf (g, GF_PLF_1);
804 /* Mark statement G so a location will not be emitted by
805 annotate_one_with_location. */
807 static inline void
808 gimple_set_do_not_emit_location (gimple g)
810 /* The PLF flags are initialized to 0 when a new tuple is created,
811 so no need to initialize it anywhere. */
812 gimple_set_plf (g, GF_PLF_1, true);
815 /* Set the location for gimple statement GS to LOCATION. */
817 static void
818 annotate_one_with_location (gimple gs, location_t location)
820 if (!gimple_has_location (gs)
821 && !gimple_do_not_emit_location_p (gs)
822 && should_carry_location_p (gs))
823 gimple_set_location (gs, location);
826 /* Set LOCATION for all the statements after iterator GSI in sequence
827 SEQ. If GSI is pointing to the end of the sequence, start with the
828 first statement in SEQ. */
830 static void
831 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
832 location_t location)
834 if (gsi_end_p (gsi))
835 gsi = gsi_start (seq);
836 else
837 gsi_next (&gsi);
839 for (; !gsi_end_p (gsi); gsi_next (&gsi))
840 annotate_one_with_location (gsi_stmt (gsi), location);
843 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
845 void
846 annotate_all_with_location (gimple_seq stmt_p, location_t location)
848 gimple_stmt_iterator i;
850 if (gimple_seq_empty_p (stmt_p))
851 return;
853 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
855 gimple gs = gsi_stmt (i);
856 annotate_one_with_location (gs, location);
860 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
861 nodes that are referenced more than once in GENERIC functions. This is
862 necessary because gimplification (translation into GIMPLE) is performed
863 by modifying tree nodes in-place, so gimplication of a shared node in a
864 first context could generate an invalid GIMPLE form in a second context.
866 This is achieved with a simple mark/copy/unmark algorithm that walks the
867 GENERIC representation top-down, marks nodes with TREE_VISITED the first
868 time it encounters them, duplicates them if they already have TREE_VISITED
869 set, and finally removes the TREE_VISITED marks it has set.
871 The algorithm works only at the function level, i.e. it generates a GENERIC
872 representation of a function with no nodes shared within the function when
873 passed a GENERIC function (except for nodes that are allowed to be shared).
875 At the global level, it is also necessary to unshare tree nodes that are
876 referenced in more than one function, for the same aforementioned reason.
877 This requires some cooperation from the front-end. There are 2 strategies:
879 1. Manual unsharing. The front-end needs to call unshare_expr on every
880 expression that might end up being shared across functions.
882 2. Deep unsharing. This is an extension of regular unsharing. Instead
883 of calling unshare_expr on expressions that might be shared across
884 functions, the front-end pre-marks them with TREE_VISITED. This will
885 ensure that they are unshared on the first reference within functions
886 when the regular unsharing algorithm runs. The counterpart is that
887 this algorithm must look deeper than for manual unsharing, which is
888 specified by LANG_HOOKS_DEEP_UNSHARING.
890 If there are only few specific cases of node sharing across functions, it is
891 probably easier for a front-end to unshare the expressions manually. On the
892 contrary, if the expressions generated at the global level are as widespread
893 as expressions generated within functions, deep unsharing is very likely the
894 way to go. */
896 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
897 These nodes model computations that must be done once. If we were to
898 unshare something like SAVE_EXPR(i++), the gimplification process would
899 create wrong code. However, if DATA is non-null, it must hold a pointer
900 set that is used to unshare the subtrees of these nodes. */
902 static tree
903 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
905 tree t = *tp;
906 enum tree_code code = TREE_CODE (t);
908 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
909 copy their subtrees if we can make sure to do it only once. */
910 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
912 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
914 else
915 *walk_subtrees = 0;
918 /* Stop at types, decls, constants like copy_tree_r. */
919 else if (TREE_CODE_CLASS (code) == tcc_type
920 || TREE_CODE_CLASS (code) == tcc_declaration
921 || TREE_CODE_CLASS (code) == tcc_constant
922 /* We can't do anything sensible with a BLOCK used as an
923 expression, but we also can't just die when we see it
924 because of non-expression uses. So we avert our eyes
925 and cross our fingers. Silly Java. */
926 || code == BLOCK)
927 *walk_subtrees = 0;
929 /* Cope with the statement expression extension. */
930 else if (code == STATEMENT_LIST)
933 /* Leave the bulk of the work to copy_tree_r itself. */
934 else
935 copy_tree_r (tp, walk_subtrees, NULL);
937 return NULL_TREE;
940 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
941 If *TP has been visited already, then *TP is deeply copied by calling
942 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
944 static tree
945 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
947 tree t = *tp;
948 enum tree_code code = TREE_CODE (t);
950 /* Skip types, decls, and constants. But we do want to look at their
951 types and the bounds of types. Mark them as visited so we properly
952 unmark their subtrees on the unmark pass. If we've already seen them,
953 don't look down further. */
954 if (TREE_CODE_CLASS (code) == tcc_type
955 || TREE_CODE_CLASS (code) == tcc_declaration
956 || TREE_CODE_CLASS (code) == tcc_constant)
958 if (TREE_VISITED (t))
959 *walk_subtrees = 0;
960 else
961 TREE_VISITED (t) = 1;
964 /* If this node has been visited already, unshare it and don't look
965 any deeper. */
966 else if (TREE_VISITED (t))
968 walk_tree (tp, mostly_copy_tree_r, data, NULL);
969 *walk_subtrees = 0;
972 /* Otherwise, mark the node as visited and keep looking. */
973 else
974 TREE_VISITED (t) = 1;
976 return NULL_TREE;
979 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
980 copy_if_shared_r callback unmodified. */
982 static inline void
983 copy_if_shared (tree *tp, void *data)
985 walk_tree (tp, copy_if_shared_r, data, NULL);
988 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
989 any nested functions. */
991 static void
992 unshare_body (tree fndecl)
994 struct cgraph_node *cgn = cgraph_get_node (fndecl);
995 /* If the language requires deep unsharing, we need a pointer set to make
996 sure we don't repeatedly unshare subtrees of unshareable nodes. */
997 struct pointer_set_t *visited
998 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
1000 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
1001 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
1002 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
1004 if (visited)
1005 pointer_set_destroy (visited);
1007 if (cgn)
1008 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1009 unshare_body (cgn->symbol.decl);
1012 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
1013 Subtrees are walked until the first unvisited node is encountered. */
1015 static tree
1016 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1018 tree t = *tp;
1020 /* If this node has been visited, unmark it and keep looking. */
1021 if (TREE_VISITED (t))
1022 TREE_VISITED (t) = 0;
1024 /* Otherwise, don't look any deeper. */
1025 else
1026 *walk_subtrees = 0;
1028 return NULL_TREE;
1031 /* Unmark the visited trees rooted at *TP. */
1033 static inline void
1034 unmark_visited (tree *tp)
1036 walk_tree (tp, unmark_visited_r, NULL, NULL);
1039 /* Likewise, but mark all trees as not visited. */
1041 static void
1042 unvisit_body (tree fndecl)
1044 struct cgraph_node *cgn = cgraph_get_node (fndecl);
1046 unmark_visited (&DECL_SAVED_TREE (fndecl));
1047 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1048 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1050 if (cgn)
1051 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1052 unvisit_body (cgn->symbol.decl);
1055 /* Unconditionally make an unshared copy of EXPR. This is used when using
1056 stored expressions which span multiple functions, such as BINFO_VTABLE,
1057 as the normal unsharing process can't tell that they're shared. */
1059 tree
1060 unshare_expr (tree expr)
1062 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1063 return expr;
1066 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1067 contain statements and have a value. Assign its value to a temporary
1068 and give it void_type_node. Return the temporary, or NULL_TREE if
1069 WRAPPER was already void. */
1071 tree
1072 voidify_wrapper_expr (tree wrapper, tree temp)
1074 tree type = TREE_TYPE (wrapper);
1075 if (type && !VOID_TYPE_P (type))
1077 tree *p;
1079 /* Set p to point to the body of the wrapper. Loop until we find
1080 something that isn't a wrapper. */
1081 for (p = &wrapper; p && *p; )
1083 switch (TREE_CODE (*p))
1085 case BIND_EXPR:
1086 TREE_SIDE_EFFECTS (*p) = 1;
1087 TREE_TYPE (*p) = void_type_node;
1088 /* For a BIND_EXPR, the body is operand 1. */
1089 p = &BIND_EXPR_BODY (*p);
1090 break;
1092 case CLEANUP_POINT_EXPR:
1093 case TRY_FINALLY_EXPR:
1094 case TRY_CATCH_EXPR:
1095 TREE_SIDE_EFFECTS (*p) = 1;
1096 TREE_TYPE (*p) = void_type_node;
1097 p = &TREE_OPERAND (*p, 0);
1098 break;
1100 case STATEMENT_LIST:
1102 tree_stmt_iterator i = tsi_last (*p);
1103 TREE_SIDE_EFFECTS (*p) = 1;
1104 TREE_TYPE (*p) = void_type_node;
1105 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1107 break;
1109 case COMPOUND_EXPR:
1110 /* Advance to the last statement. Set all container types to
1111 void. */
1112 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1114 TREE_SIDE_EFFECTS (*p) = 1;
1115 TREE_TYPE (*p) = void_type_node;
1117 break;
1119 case TRANSACTION_EXPR:
1120 TREE_SIDE_EFFECTS (*p) = 1;
1121 TREE_TYPE (*p) = void_type_node;
1122 p = &TRANSACTION_EXPR_BODY (*p);
1123 break;
1125 default:
1126 /* Assume that any tree upon which voidify_wrapper_expr is
1127 directly called is a wrapper, and that its body is op0. */
1128 if (p == &wrapper)
1130 TREE_SIDE_EFFECTS (*p) = 1;
1131 TREE_TYPE (*p) = void_type_node;
1132 p = &TREE_OPERAND (*p, 0);
1133 break;
1135 goto out;
1139 out:
1140 if (p == NULL || IS_EMPTY_STMT (*p))
1141 temp = NULL_TREE;
1142 else if (temp)
1144 /* The wrapper is on the RHS of an assignment that we're pushing
1145 down. */
1146 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1147 || TREE_CODE (temp) == MODIFY_EXPR);
1148 TREE_OPERAND (temp, 1) = *p;
1149 *p = temp;
1151 else
1153 temp = create_tmp_var (type, "retval");
1154 *p = build2 (INIT_EXPR, type, temp, *p);
1157 return temp;
1160 return NULL_TREE;
1163 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1164 a temporary through which they communicate. */
1166 static void
1167 build_stack_save_restore (gimple *save, gimple *restore)
1169 tree tmp_var;
1171 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1172 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1173 gimple_call_set_lhs (*save, tmp_var);
1175 *restore
1176 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1177 1, tmp_var);
1180 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1182 static enum gimplify_status
1183 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1185 tree bind_expr = *expr_p;
1186 bool old_save_stack = gimplify_ctxp->save_stack;
1187 tree t;
1188 gimple gimple_bind;
1189 gimple_seq body, cleanup;
1190 gimple stack_save;
1192 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1194 /* Mark variables seen in this bind expr. */
1195 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1197 if (TREE_CODE (t) == VAR_DECL)
1199 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1201 /* Mark variable as local. */
1202 if (ctx && !DECL_EXTERNAL (t)
1203 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1204 || splay_tree_lookup (ctx->variables,
1205 (splay_tree_key) t) == NULL))
1206 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1208 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1210 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1211 cfun->has_local_explicit_reg_vars = true;
1214 /* Preliminarily mark non-addressed complex variables as eligible
1215 for promotion to gimple registers. We'll transform their uses
1216 as we find them. */
1217 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1218 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1219 && !TREE_THIS_VOLATILE (t)
1220 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1221 && !needs_to_live_in_memory (t))
1222 DECL_GIMPLE_REG_P (t) = 1;
1225 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1226 BIND_EXPR_BLOCK (bind_expr));
1227 gimple_push_bind_expr (gimple_bind);
1229 gimplify_ctxp->save_stack = false;
1231 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1232 body = NULL;
1233 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1234 gimple_bind_set_body (gimple_bind, body);
1236 cleanup = NULL;
1237 stack_save = NULL;
1238 if (gimplify_ctxp->save_stack)
1240 gimple stack_restore;
1242 /* Save stack on entry and restore it on exit. Add a try_finally
1243 block to achieve this. Note that mudflap depends on the
1244 format of the emitted code: see mx_register_decls(). */
1245 build_stack_save_restore (&stack_save, &stack_restore);
1247 gimplify_seq_add_stmt (&cleanup, stack_restore);
1250 /* Add clobbers for all variables that go out of scope. */
1251 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1253 if (TREE_CODE (t) == VAR_DECL
1254 && !is_global_var (t)
1255 && DECL_CONTEXT (t) == current_function_decl
1256 && !DECL_HARD_REGISTER (t)
1257 && !TREE_THIS_VOLATILE (t)
1258 && !DECL_HAS_VALUE_EXPR_P (t)
1259 /* Only care for variables that have to be in memory. Others
1260 will be rewritten into SSA names, hence moved to the top-level. */
1261 && !is_gimple_reg (t)
1262 && flag_stack_reuse != SR_NONE)
1264 tree clobber = build_constructor (TREE_TYPE (t),
1265 NULL);
1266 TREE_THIS_VOLATILE (clobber) = 1;
1267 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1271 if (cleanup)
1273 gimple gs;
1274 gimple_seq new_body;
1276 new_body = NULL;
1277 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1278 GIMPLE_TRY_FINALLY);
1280 if (stack_save)
1281 gimplify_seq_add_stmt (&new_body, stack_save);
1282 gimplify_seq_add_stmt (&new_body, gs);
1283 gimple_bind_set_body (gimple_bind, new_body);
1286 gimplify_ctxp->save_stack = old_save_stack;
1287 gimple_pop_bind_expr ();
1289 gimplify_seq_add_stmt (pre_p, gimple_bind);
1291 if (temp)
1293 *expr_p = temp;
1294 return GS_OK;
1297 *expr_p = NULL_TREE;
1298 return GS_ALL_DONE;
1301 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1302 GIMPLE value, it is assigned to a new temporary and the statement is
1303 re-written to return the temporary.
1305 PRE_P points to the sequence where side effects that must happen before
1306 STMT should be stored. */
1308 static enum gimplify_status
1309 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1311 gimple ret;
1312 tree ret_expr = TREE_OPERAND (stmt, 0);
1313 tree result_decl, result;
1315 if (ret_expr == error_mark_node)
1316 return GS_ERROR;
1318 if (!ret_expr
1319 || TREE_CODE (ret_expr) == RESULT_DECL
1320 || ret_expr == error_mark_node)
1322 gimple ret = gimple_build_return (ret_expr);
1323 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1324 gimplify_seq_add_stmt (pre_p, ret);
1325 return GS_ALL_DONE;
1328 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1329 result_decl = NULL_TREE;
1330 else
1332 result_decl = TREE_OPERAND (ret_expr, 0);
1334 /* See through a return by reference. */
1335 if (TREE_CODE (result_decl) == INDIRECT_REF)
1336 result_decl = TREE_OPERAND (result_decl, 0);
1338 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1339 || TREE_CODE (ret_expr) == INIT_EXPR)
1340 && TREE_CODE (result_decl) == RESULT_DECL);
1343 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1344 Recall that aggregate_value_p is FALSE for any aggregate type that is
1345 returned in registers. If we're returning values in registers, then
1346 we don't want to extend the lifetime of the RESULT_DECL, particularly
1347 across another call. In addition, for those aggregates for which
1348 hard_function_value generates a PARALLEL, we'll die during normal
1349 expansion of structure assignments; there's special code in expand_return
1350 to handle this case that does not exist in expand_expr. */
1351 if (!result_decl)
1352 result = NULL_TREE;
1353 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1355 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1357 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1358 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1359 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1360 should be effectively allocated by the caller, i.e. all calls to
1361 this function must be subject to the Return Slot Optimization. */
1362 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1363 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1365 result = result_decl;
1367 else if (gimplify_ctxp->return_temp)
1368 result = gimplify_ctxp->return_temp;
1369 else
1371 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1373 /* ??? With complex control flow (usually involving abnormal edges),
1374 we can wind up warning about an uninitialized value for this. Due
1375 to how this variable is constructed and initialized, this is never
1376 true. Give up and never warn. */
1377 TREE_NO_WARNING (result) = 1;
1379 gimplify_ctxp->return_temp = result;
1382 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1383 Then gimplify the whole thing. */
1384 if (result != result_decl)
1385 TREE_OPERAND (ret_expr, 0) = result;
1387 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1389 ret = gimple_build_return (result);
1390 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1391 gimplify_seq_add_stmt (pre_p, ret);
1393 return GS_ALL_DONE;
1396 /* Gimplify a variable-length array DECL. */
1398 static void
1399 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1401 /* This is a variable-sized decl. Simplify its size and mark it
1402 for deferred expansion. Note that mudflap depends on the format
1403 of the emitted code: see mx_register_decls(). */
1404 tree t, addr, ptr_type;
1406 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1407 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1409 /* All occurrences of this decl in final gimplified code will be
1410 replaced by indirection. Setting DECL_VALUE_EXPR does two
1411 things: First, it lets the rest of the gimplifier know what
1412 replacement to use. Second, it lets the debug info know
1413 where to find the value. */
1414 ptr_type = build_pointer_type (TREE_TYPE (decl));
1415 addr = create_tmp_var (ptr_type, get_name (decl));
1416 DECL_IGNORED_P (addr) = 0;
1417 t = build_fold_indirect_ref (addr);
1418 TREE_THIS_NOTRAP (t) = 1;
1419 SET_DECL_VALUE_EXPR (decl, t);
1420 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1422 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1423 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1424 size_int (DECL_ALIGN (decl)));
1425 /* The call has been built for a variable-sized object. */
1426 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1427 t = fold_convert (ptr_type, t);
1428 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1430 gimplify_and_add (t, seq_p);
1432 /* Indicate that we need to restore the stack level when the
1433 enclosing BIND_EXPR is exited. */
1434 gimplify_ctxp->save_stack = true;
1437 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1438 and initialization explicit. */
1440 static enum gimplify_status
1441 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1443 tree stmt = *stmt_p;
1444 tree decl = DECL_EXPR_DECL (stmt);
1446 *stmt_p = NULL_TREE;
1448 if (TREE_TYPE (decl) == error_mark_node)
1449 return GS_ERROR;
1451 if ((TREE_CODE (decl) == TYPE_DECL
1452 || TREE_CODE (decl) == VAR_DECL)
1453 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1454 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1456 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1457 in case its size expressions contain problematic nodes like CALL_EXPR. */
1458 if (TREE_CODE (decl) == TYPE_DECL
1459 && DECL_ORIGINAL_TYPE (decl)
1460 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1461 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1463 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1465 tree init = DECL_INITIAL (decl);
1467 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1468 || (!TREE_STATIC (decl)
1469 && flag_stack_check == GENERIC_STACK_CHECK
1470 && compare_tree_int (DECL_SIZE_UNIT (decl),
1471 STACK_CHECK_MAX_VAR_SIZE) > 0))
1472 gimplify_vla_decl (decl, seq_p);
1474 /* Some front ends do not explicitly declare all anonymous
1475 artificial variables. We compensate here by declaring the
1476 variables, though it would be better if the front ends would
1477 explicitly declare them. */
1478 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1479 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1480 gimple_add_tmp_var (decl);
1482 if (init && init != error_mark_node)
1484 if (!TREE_STATIC (decl))
1486 DECL_INITIAL (decl) = NULL_TREE;
1487 init = build2 (INIT_EXPR, void_type_node, decl, init);
1488 gimplify_and_add (init, seq_p);
1489 ggc_free (init);
1491 else
1492 /* We must still examine initializers for static variables
1493 as they may contain a label address. */
1494 walk_tree (&init, force_labels_r, NULL, NULL);
1498 return GS_ALL_DONE;
1501 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1502 and replacing the LOOP_EXPR with goto, but if the loop contains an
1503 EXIT_EXPR, we need to append a label for it to jump to. */
1505 static enum gimplify_status
1506 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1508 tree saved_label = gimplify_ctxp->exit_label;
1509 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1511 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1513 gimplify_ctxp->exit_label = NULL_TREE;
1515 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1517 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1519 if (gimplify_ctxp->exit_label)
1520 gimplify_seq_add_stmt (pre_p,
1521 gimple_build_label (gimplify_ctxp->exit_label));
1523 gimplify_ctxp->exit_label = saved_label;
1525 *expr_p = NULL;
1526 return GS_ALL_DONE;
1529 /* Gimplify a statement list onto a sequence. These may be created either
1530 by an enlightened front-end, or by shortcut_cond_expr. */
1532 static enum gimplify_status
1533 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1535 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1537 tree_stmt_iterator i = tsi_start (*expr_p);
1539 while (!tsi_end_p (i))
1541 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1542 tsi_delink (&i);
1545 if (temp)
1547 *expr_p = temp;
1548 return GS_OK;
1551 return GS_ALL_DONE;
1554 /* Compare two case labels. Because the front end should already have
1555 made sure that case ranges do not overlap, it is enough to only compare
1556 the CASE_LOW values of each case label. */
1558 static int
1559 compare_case_labels (const void *p1, const void *p2)
1561 const_tree const case1 = *(const_tree const*)p1;
1562 const_tree const case2 = *(const_tree const*)p2;
1564 /* The 'default' case label always goes first. */
1565 if (!CASE_LOW (case1))
1566 return -1;
1567 else if (!CASE_LOW (case2))
1568 return 1;
1569 else
1570 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1573 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1575 void
1576 sort_case_labels (vec<tree> label_vec)
1578 label_vec.qsort (compare_case_labels);
1581 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
1583 LABELS is a vector that contains all case labels to look at.
1585 INDEX_TYPE is the type of the switch index expression. Case labels
1586 in LABELS are discarded if their values are not in the value range
1587 covered by INDEX_TYPE. The remaining case label values are folded
1588 to INDEX_TYPE.
1590 If a default case exists in LABELS, it is removed from LABELS and
1591 returned in DEFAULT_CASEP. If no default case exists, but the
1592 case labels already cover the whole range of INDEX_TYPE, a default
1593 case is returned pointing to one of the existing case labels.
1594 Otherwise DEFAULT_CASEP is set to NULL_TREE.
1596 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
1597 apply and no action is taken regardless of whether a default case is
1598 found or not. */
1600 void
1601 preprocess_case_label_vec_for_gimple (vec<tree> labels,
1602 tree index_type,
1603 tree *default_casep)
1605 tree min_value, max_value;
1606 tree default_case = NULL_TREE;
1607 size_t i, len;
1609 i = 0;
1610 min_value = TYPE_MIN_VALUE (index_type);
1611 max_value = TYPE_MAX_VALUE (index_type);
1612 while (i < labels.length ())
1614 tree elt = labels[i];
1615 tree low = CASE_LOW (elt);
1616 tree high = CASE_HIGH (elt);
1617 bool remove_element = FALSE;
1619 if (low)
1621 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
1622 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
1624 /* This is a non-default case label, i.e. it has a value.
1626 See if the case label is reachable within the range of
1627 the index type. Remove out-of-range case values. Turn
1628 case ranges into a canonical form (high > low strictly)
1629 and convert the case label values to the index type.
1631 NB: The type of gimple_switch_index() may be the promoted
1632 type, but the case labels retain the original type. */
1634 if (high)
1636 /* This is a case range. Discard empty ranges.
1637 If the bounds or the range are equal, turn this
1638 into a simple (one-value) case. */
1639 int cmp = tree_int_cst_compare (high, low);
1640 if (cmp < 0)
1641 remove_element = TRUE;
1642 else if (cmp == 0)
1643 high = NULL_TREE;
1646 if (! high)
1648 /* If the simple case value is unreachable, ignore it. */
1649 if ((TREE_CODE (min_value) == INTEGER_CST
1650 && tree_int_cst_compare (low, min_value) < 0)
1651 || (TREE_CODE (max_value) == INTEGER_CST
1652 && tree_int_cst_compare (low, max_value) > 0))
1653 remove_element = TRUE;
1654 else
1655 low = fold_convert (index_type, low);
1657 else
1659 /* If the entire case range is unreachable, ignore it. */
1660 if ((TREE_CODE (min_value) == INTEGER_CST
1661 && tree_int_cst_compare (high, min_value) < 0)
1662 || (TREE_CODE (max_value) == INTEGER_CST
1663 && tree_int_cst_compare (low, max_value) > 0))
1664 remove_element = TRUE;
1665 else
1667 /* If the lower bound is less than the index type's
1668 minimum value, truncate the range bounds. */
1669 if (TREE_CODE (min_value) == INTEGER_CST
1670 && tree_int_cst_compare (low, min_value) < 0)
1671 low = min_value;
1672 low = fold_convert (index_type, low);
1674 /* If the upper bound is greater than the index type's
1675 maximum value, truncate the range bounds. */
1676 if (TREE_CODE (max_value) == INTEGER_CST
1677 && tree_int_cst_compare (high, max_value) > 0)
1678 high = max_value;
1679 high = fold_convert (index_type, high);
1681 /* We may have folded a case range to a one-value case. */
1682 if (tree_int_cst_equal (low, high))
1683 high = NULL_TREE;
1687 CASE_LOW (elt) = low;
1688 CASE_HIGH (elt) = high;
1690 else
1692 gcc_assert (!default_case);
1693 default_case = elt;
1694 /* The default case must be passed separately to the
1695 gimple_build_switch routine. But if DEFAULT_CASEP
1696 is NULL, we do not remove the default case (it would
1697 be completely lost). */
1698 if (default_casep)
1699 remove_element = TRUE;
1702 if (remove_element)
1703 labels.ordered_remove (i);
1704 else
1705 i++;
1707 len = i;
1709 if (!labels.is_empty ())
1710 sort_case_labels (labels);
1712 if (default_casep && !default_case)
1714 /* If the switch has no default label, add one, so that we jump
1715 around the switch body. If the labels already cover the whole
1716 range of the switch index_type, add the default label pointing
1717 to one of the existing labels. */
1718 if (len
1719 && TYPE_MIN_VALUE (index_type)
1720 && TYPE_MAX_VALUE (index_type)
1721 && tree_int_cst_equal (CASE_LOW (labels[0]),
1722 TYPE_MIN_VALUE (index_type)))
1724 tree low, high = CASE_HIGH (labels[len - 1]);
1725 if (!high)
1726 high = CASE_LOW (labels[len - 1]);
1727 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
1729 for (i = 1; i < len; i++)
1731 high = CASE_LOW (labels[i]);
1732 low = CASE_HIGH (labels[i - 1]);
1733 if (!low)
1734 low = CASE_LOW (labels[i - 1]);
1735 if ((TREE_INT_CST_LOW (low) + 1
1736 != TREE_INT_CST_LOW (high))
1737 || (TREE_INT_CST_HIGH (low)
1738 + (TREE_INT_CST_LOW (high) == 0)
1739 != TREE_INT_CST_HIGH (high)))
1740 break;
1742 if (i == len)
1744 tree label = CASE_LABEL (labels[0]);
1745 default_case = build_case_label (NULL_TREE, NULL_TREE,
1746 label);
1752 if (default_casep)
1753 *default_casep = default_case;
1756 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1757 branch to. */
1759 static enum gimplify_status
1760 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1762 tree switch_expr = *expr_p;
1763 gimple_seq switch_body_seq = NULL;
1764 enum gimplify_status ret;
1765 tree index_type = TREE_TYPE (switch_expr);
1766 if (index_type == NULL_TREE)
1767 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1769 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1770 fb_rvalue);
1771 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1772 return ret;
1774 if (SWITCH_BODY (switch_expr))
1776 vec<tree> labels;
1777 vec<tree> saved_labels;
1778 tree default_case = NULL_TREE;
1779 gimple gimple_switch;
1781 /* If someone can be bothered to fill in the labels, they can
1782 be bothered to null out the body too. */
1783 gcc_assert (!SWITCH_LABELS (switch_expr));
1785 /* Save old labels, get new ones from body, then restore the old
1786 labels. Save all the things from the switch body to append after. */
1787 saved_labels = gimplify_ctxp->case_labels;
1788 gimplify_ctxp->case_labels.create (8);
1790 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1791 labels = gimplify_ctxp->case_labels;
1792 gimplify_ctxp->case_labels = saved_labels;
1794 preprocess_case_label_vec_for_gimple (labels, index_type,
1795 &default_case);
1797 if (!default_case)
1799 gimple new_default;
1801 default_case
1802 = build_case_label (NULL_TREE, NULL_TREE,
1803 create_artificial_label (UNKNOWN_LOCATION));
1804 new_default = gimple_build_label (CASE_LABEL (default_case));
1805 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1808 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1809 default_case, labels);
1810 gimplify_seq_add_stmt (pre_p, gimple_switch);
1811 gimplify_seq_add_seq (pre_p, switch_body_seq);
1812 labels.release ();
1814 else
1815 gcc_assert (SWITCH_LABELS (switch_expr));
1817 return GS_ALL_DONE;
1820 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1822 static enum gimplify_status
1823 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1825 struct gimplify_ctx *ctxp;
1826 gimple gimple_label;
1828 /* Invalid OpenMP programs can play Duff's Device type games with
1829 #pragma omp parallel. At least in the C front end, we don't
1830 detect such invalid branches until after gimplification. */
1831 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1832 if (ctxp->case_labels.exists ())
1833 break;
1835 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1836 ctxp->case_labels.safe_push (*expr_p);
1837 gimplify_seq_add_stmt (pre_p, gimple_label);
1839 return GS_ALL_DONE;
1842 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1843 if necessary. */
1845 tree
1846 build_and_jump (tree *label_p)
1848 if (label_p == NULL)
1849 /* If there's nowhere to jump, just fall through. */
1850 return NULL_TREE;
1852 if (*label_p == NULL_TREE)
1854 tree label = create_artificial_label (UNKNOWN_LOCATION);
1855 *label_p = label;
1858 return build1 (GOTO_EXPR, void_type_node, *label_p);
1861 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1862 This also involves building a label to jump to and communicating it to
1863 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1865 static enum gimplify_status
1866 gimplify_exit_expr (tree *expr_p)
1868 tree cond = TREE_OPERAND (*expr_p, 0);
1869 tree expr;
1871 expr = build_and_jump (&gimplify_ctxp->exit_label);
1872 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1873 *expr_p = expr;
1875 return GS_OK;
1878 /* A helper function to be called via walk_tree. Mark all labels under *TP
1879 as being forced. To be called for DECL_INITIAL of static variables. */
1881 tree
1882 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1884 if (TYPE_P (*tp))
1885 *walk_subtrees = 0;
1886 if (TREE_CODE (*tp) == LABEL_DECL)
1887 FORCED_LABEL (*tp) = 1;
1889 return NULL_TREE;
1892 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1893 different from its canonical type, wrap the whole thing inside a
1894 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1895 type.
1897 The canonical type of a COMPONENT_REF is the type of the field being
1898 referenced--unless the field is a bit-field which can be read directly
1899 in a smaller mode, in which case the canonical type is the
1900 sign-appropriate type corresponding to that mode. */
1902 static void
1903 canonicalize_component_ref (tree *expr_p)
1905 tree expr = *expr_p;
1906 tree type;
1908 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1910 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1911 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1912 else
1913 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1915 /* One could argue that all the stuff below is not necessary for
1916 the non-bitfield case and declare it a FE error if type
1917 adjustment would be needed. */
1918 if (TREE_TYPE (expr) != type)
1920 #ifdef ENABLE_TYPES_CHECKING
1921 tree old_type = TREE_TYPE (expr);
1922 #endif
1923 int type_quals;
1925 /* We need to preserve qualifiers and propagate them from
1926 operand 0. */
1927 type_quals = TYPE_QUALS (type)
1928 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1929 if (TYPE_QUALS (type) != type_quals)
1930 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1932 /* Set the type of the COMPONENT_REF to the underlying type. */
1933 TREE_TYPE (expr) = type;
1935 #ifdef ENABLE_TYPES_CHECKING
1936 /* It is now a FE error, if the conversion from the canonical
1937 type to the original expression type is not useless. */
1938 gcc_assert (useless_type_conversion_p (old_type, type));
1939 #endif
1943 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1944 to foo, embed that change in the ADDR_EXPR by converting
1945 T array[U];
1946 (T *)&array
1948 &array[L]
1949 where L is the lower bound. For simplicity, only do this for constant
1950 lower bound.
1951 The constraint is that the type of &array[L] is trivially convertible
1952 to T *. */
1954 static void
1955 canonicalize_addr_expr (tree *expr_p)
1957 tree expr = *expr_p;
1958 tree addr_expr = TREE_OPERAND (expr, 0);
1959 tree datype, ddatype, pddatype;
1961 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1962 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1963 || TREE_CODE (addr_expr) != ADDR_EXPR)
1964 return;
1966 /* The addr_expr type should be a pointer to an array. */
1967 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1968 if (TREE_CODE (datype) != ARRAY_TYPE)
1969 return;
1971 /* The pointer to element type shall be trivially convertible to
1972 the expression pointer type. */
1973 ddatype = TREE_TYPE (datype);
1974 pddatype = build_pointer_type (ddatype);
1975 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1976 pddatype))
1977 return;
1979 /* The lower bound and element sizes must be constant. */
1980 if (!TYPE_SIZE_UNIT (ddatype)
1981 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1982 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1983 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1984 return;
1986 /* All checks succeeded. Build a new node to merge the cast. */
1987 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1988 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1989 NULL_TREE, NULL_TREE);
1990 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1992 /* We can have stripped a required restrict qualifier above. */
1993 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1994 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1997 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1998 underneath as appropriate. */
2000 static enum gimplify_status
2001 gimplify_conversion (tree *expr_p)
2003 location_t loc = EXPR_LOCATION (*expr_p);
2004 gcc_assert (CONVERT_EXPR_P (*expr_p));
2006 /* Then strip away all but the outermost conversion. */
2007 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2009 /* And remove the outermost conversion if it's useless. */
2010 if (tree_ssa_useless_type_conversion (*expr_p))
2011 *expr_p = TREE_OPERAND (*expr_p, 0);
2013 /* If we still have a conversion at the toplevel,
2014 then canonicalize some constructs. */
2015 if (CONVERT_EXPR_P (*expr_p))
2017 tree sub = TREE_OPERAND (*expr_p, 0);
2019 /* If a NOP conversion is changing the type of a COMPONENT_REF
2020 expression, then canonicalize its type now in order to expose more
2021 redundant conversions. */
2022 if (TREE_CODE (sub) == COMPONENT_REF)
2023 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2025 /* If a NOP conversion is changing a pointer to array of foo
2026 to a pointer to foo, embed that change in the ADDR_EXPR. */
2027 else if (TREE_CODE (sub) == ADDR_EXPR)
2028 canonicalize_addr_expr (expr_p);
2031 /* If we have a conversion to a non-register type force the
2032 use of a VIEW_CONVERT_EXPR instead. */
2033 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2034 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2035 TREE_OPERAND (*expr_p, 0));
2037 return GS_OK;
2040 /* Nonlocal VLAs seen in the current function. */
2041 static struct pointer_set_t *nonlocal_vlas;
2043 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2044 DECL_VALUE_EXPR, and it's worth re-examining things. */
2046 static enum gimplify_status
2047 gimplify_var_or_parm_decl (tree *expr_p)
2049 tree decl = *expr_p;
2051 /* ??? If this is a local variable, and it has not been seen in any
2052 outer BIND_EXPR, then it's probably the result of a duplicate
2053 declaration, for which we've already issued an error. It would
2054 be really nice if the front end wouldn't leak these at all.
2055 Currently the only known culprit is C++ destructors, as seen
2056 in g++.old-deja/g++.jason/binding.C. */
2057 if (TREE_CODE (decl) == VAR_DECL
2058 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2059 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2060 && decl_function_context (decl) == current_function_decl)
2062 gcc_assert (seen_error ());
2063 return GS_ERROR;
2066 /* When within an OpenMP context, notice uses of variables. */
2067 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2068 return GS_ALL_DONE;
2070 /* If the decl is an alias for another expression, substitute it now. */
2071 if (DECL_HAS_VALUE_EXPR_P (decl))
2073 tree value_expr = DECL_VALUE_EXPR (decl);
2075 /* For referenced nonlocal VLAs add a decl for debugging purposes
2076 to the current function. */
2077 if (TREE_CODE (decl) == VAR_DECL
2078 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2079 && nonlocal_vlas != NULL
2080 && TREE_CODE (value_expr) == INDIRECT_REF
2081 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2082 && decl_function_context (decl) != current_function_decl)
2084 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2085 while (ctx && ctx->region_type == ORT_WORKSHARE)
2086 ctx = ctx->outer_context;
2087 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
2089 tree copy = copy_node (decl), block;
2091 lang_hooks.dup_lang_specific_decl (copy);
2092 SET_DECL_RTL (copy, 0);
2093 TREE_USED (copy) = 1;
2094 block = DECL_INITIAL (current_function_decl);
2095 DECL_CHAIN (copy) = BLOCK_VARS (block);
2096 BLOCK_VARS (block) = copy;
2097 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2098 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2102 *expr_p = unshare_expr (value_expr);
2103 return GS_OK;
2106 return GS_ALL_DONE;
2109 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2110 node *EXPR_P.
2112 compound_lval
2113 : min_lval '[' val ']'
2114 | min_lval '.' ID
2115 | compound_lval '[' val ']'
2116 | compound_lval '.' ID
2118 This is not part of the original SIMPLE definition, which separates
2119 array and member references, but it seems reasonable to handle them
2120 together. Also, this way we don't run into problems with union
2121 aliasing; gcc requires that for accesses through a union to alias, the
2122 union reference must be explicit, which was not always the case when we
2123 were splitting up array and member refs.
2125 PRE_P points to the sequence where side effects that must happen before
2126 *EXPR_P should be stored.
2128 POST_P points to the sequence where side effects that must happen after
2129 *EXPR_P should be stored. */
2131 static enum gimplify_status
2132 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2133 fallback_t fallback)
2135 tree *p;
2136 vec<tree> expr_stack;
2137 enum gimplify_status ret = GS_ALL_DONE, tret;
2138 int i;
2139 location_t loc = EXPR_LOCATION (*expr_p);
2140 tree expr = *expr_p;
2142 /* Create a stack of the subexpressions so later we can walk them in
2143 order from inner to outer. */
2144 expr_stack.create (10);
2146 /* We can handle anything that get_inner_reference can deal with. */
2147 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2149 restart:
2150 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2151 if (TREE_CODE (*p) == INDIRECT_REF)
2152 *p = fold_indirect_ref_loc (loc, *p);
2154 if (handled_component_p (*p))
2156 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2157 additional COMPONENT_REFs. */
2158 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2159 && gimplify_var_or_parm_decl (p) == GS_OK)
2160 goto restart;
2161 else
2162 break;
2164 expr_stack.safe_push (*p);
2167 gcc_assert (expr_stack.length ());
2169 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2170 walked through and P points to the innermost expression.
2172 Java requires that we elaborated nodes in source order. That
2173 means we must gimplify the inner expression followed by each of
2174 the indices, in order. But we can't gimplify the inner
2175 expression until we deal with any variable bounds, sizes, or
2176 positions in order to deal with PLACEHOLDER_EXPRs.
2178 So we do this in three steps. First we deal with the annotations
2179 for any variables in the components, then we gimplify the base,
2180 then we gimplify any indices, from left to right. */
2181 for (i = expr_stack.length () - 1; i >= 0; i--)
2183 tree t = expr_stack[i];
2185 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2187 /* Gimplify the low bound and element type size and put them into
2188 the ARRAY_REF. If these values are set, they have already been
2189 gimplified. */
2190 if (TREE_OPERAND (t, 2) == NULL_TREE)
2192 tree low = unshare_expr (array_ref_low_bound (t));
2193 if (!is_gimple_min_invariant (low))
2195 TREE_OPERAND (t, 2) = low;
2196 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2197 post_p, is_gimple_reg,
2198 fb_rvalue);
2199 ret = MIN (ret, tret);
2202 else
2204 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2205 is_gimple_reg, fb_rvalue);
2206 ret = MIN (ret, tret);
2209 if (TREE_OPERAND (t, 3) == NULL_TREE)
2211 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2212 tree elmt_size = unshare_expr (array_ref_element_size (t));
2213 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2215 /* Divide the element size by the alignment of the element
2216 type (above). */
2217 elmt_size
2218 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2220 if (!is_gimple_min_invariant (elmt_size))
2222 TREE_OPERAND (t, 3) = elmt_size;
2223 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2224 post_p, is_gimple_reg,
2225 fb_rvalue);
2226 ret = MIN (ret, tret);
2229 else
2231 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2232 is_gimple_reg, fb_rvalue);
2233 ret = MIN (ret, tret);
2236 else if (TREE_CODE (t) == COMPONENT_REF)
2238 /* Set the field offset into T and gimplify it. */
2239 if (TREE_OPERAND (t, 2) == NULL_TREE)
2241 tree offset = unshare_expr (component_ref_field_offset (t));
2242 tree field = TREE_OPERAND (t, 1);
2243 tree factor
2244 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2246 /* Divide the offset by its alignment. */
2247 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2249 if (!is_gimple_min_invariant (offset))
2251 TREE_OPERAND (t, 2) = offset;
2252 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2253 post_p, is_gimple_reg,
2254 fb_rvalue);
2255 ret = MIN (ret, tret);
2258 else
2260 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2261 is_gimple_reg, fb_rvalue);
2262 ret = MIN (ret, tret);
2267 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2268 so as to match the min_lval predicate. Failure to do so may result
2269 in the creation of large aggregate temporaries. */
2270 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2271 fallback | fb_lvalue);
2272 ret = MIN (ret, tret);
2274 /* And finally, the indices and operands of ARRAY_REF. During this
2275 loop we also remove any useless conversions. */
2276 for (; expr_stack.length () > 0; )
2278 tree t = expr_stack.pop ();
2280 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2282 /* Gimplify the dimension. */
2283 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2285 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2286 is_gimple_val, fb_rvalue);
2287 ret = MIN (ret, tret);
2291 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2293 /* The innermost expression P may have originally had
2294 TREE_SIDE_EFFECTS set which would have caused all the outer
2295 expressions in *EXPR_P leading to P to also have had
2296 TREE_SIDE_EFFECTS set. */
2297 recalculate_side_effects (t);
2300 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2301 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2303 canonicalize_component_ref (expr_p);
2306 expr_stack.release ();
2308 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2310 return ret;
2313 /* Gimplify the self modifying expression pointed to by EXPR_P
2314 (++, --, +=, -=).
2316 PRE_P points to the list where side effects that must happen before
2317 *EXPR_P should be stored.
2319 POST_P points to the list where side effects that must happen after
2320 *EXPR_P should be stored.
2322 WANT_VALUE is nonzero iff we want to use the value of this expression
2323 in another expression.
2325 ARITH_TYPE is the type the computation should be performed in. */
2327 enum gimplify_status
2328 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2329 bool want_value, tree arith_type)
2331 enum tree_code code;
2332 tree lhs, lvalue, rhs, t1;
2333 gimple_seq post = NULL, *orig_post_p = post_p;
2334 bool postfix;
2335 enum tree_code arith_code;
2336 enum gimplify_status ret;
2337 location_t loc = EXPR_LOCATION (*expr_p);
2339 code = TREE_CODE (*expr_p);
2341 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2342 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2344 /* Prefix or postfix? */
2345 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2346 /* Faster to treat as prefix if result is not used. */
2347 postfix = want_value;
2348 else
2349 postfix = false;
2351 /* For postfix, make sure the inner expression's post side effects
2352 are executed after side effects from this expression. */
2353 if (postfix)
2354 post_p = &post;
2356 /* Add or subtract? */
2357 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2358 arith_code = PLUS_EXPR;
2359 else
2360 arith_code = MINUS_EXPR;
2362 /* Gimplify the LHS into a GIMPLE lvalue. */
2363 lvalue = TREE_OPERAND (*expr_p, 0);
2364 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2365 if (ret == GS_ERROR)
2366 return ret;
2368 /* Extract the operands to the arithmetic operation. */
2369 lhs = lvalue;
2370 rhs = TREE_OPERAND (*expr_p, 1);
2372 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2373 that as the result value and in the postqueue operation. We also
2374 make sure to make lvalue a minimal lval, see
2375 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2376 if (postfix)
2378 if (!is_gimple_min_lval (lvalue))
2380 mark_addressable (lvalue);
2381 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2382 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2383 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2385 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2386 if (ret == GS_ERROR)
2387 return ret;
2390 if (postfix)
2391 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2393 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2394 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2396 rhs = convert_to_ptrofftype_loc (loc, rhs);
2397 if (arith_code == MINUS_EXPR)
2398 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2399 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2401 else
2402 t1 = fold_convert (TREE_TYPE (*expr_p),
2403 fold_build2 (arith_code, arith_type,
2404 fold_convert (arith_type, lhs),
2405 fold_convert (arith_type, rhs)));
2407 if (postfix)
2409 gimplify_assign (lvalue, t1, pre_p);
2410 gimplify_seq_add_seq (orig_post_p, post);
2411 *expr_p = lhs;
2412 return GS_ALL_DONE;
2414 else
2416 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2417 return GS_OK;
2421 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2423 static void
2424 maybe_with_size_expr (tree *expr_p)
2426 tree expr = *expr_p;
2427 tree type = TREE_TYPE (expr);
2428 tree size;
2430 /* If we've already wrapped this or the type is error_mark_node, we can't do
2431 anything. */
2432 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2433 || type == error_mark_node)
2434 return;
2436 /* If the size isn't known or is a constant, we have nothing to do. */
2437 size = TYPE_SIZE_UNIT (type);
2438 if (!size || TREE_CODE (size) == INTEGER_CST)
2439 return;
2441 /* Otherwise, make a WITH_SIZE_EXPR. */
2442 size = unshare_expr (size);
2443 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2444 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2447 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2448 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2449 the CALL_EXPR. */
2451 static enum gimplify_status
2452 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2454 bool (*test) (tree);
2455 fallback_t fb;
2457 /* In general, we allow lvalues for function arguments to avoid
2458 extra overhead of copying large aggregates out of even larger
2459 aggregates into temporaries only to copy the temporaries to
2460 the argument list. Make optimizers happy by pulling out to
2461 temporaries those types that fit in registers. */
2462 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2463 test = is_gimple_val, fb = fb_rvalue;
2464 else
2466 test = is_gimple_lvalue, fb = fb_either;
2467 /* Also strip a TARGET_EXPR that would force an extra copy. */
2468 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2470 tree init = TARGET_EXPR_INITIAL (*arg_p);
2471 if (init
2472 && !VOID_TYPE_P (TREE_TYPE (init)))
2473 *arg_p = init;
2477 /* If this is a variable sized type, we must remember the size. */
2478 maybe_with_size_expr (arg_p);
2480 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2481 /* Make sure arguments have the same location as the function call
2482 itself. */
2483 protected_set_expr_location (*arg_p, call_location);
2485 /* There is a sequence point before a function call. Side effects in
2486 the argument list must occur before the actual call. So, when
2487 gimplifying arguments, force gimplify_expr to use an internal
2488 post queue which is then appended to the end of PRE_P. */
2489 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2492 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2493 WANT_VALUE is true if the result of the call is desired. */
2495 static enum gimplify_status
2496 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2498 tree fndecl, parms, p, fnptrtype;
2499 enum gimplify_status ret;
2500 int i, nargs;
2501 gimple call;
2502 bool builtin_va_start_p = FALSE;
2503 location_t loc = EXPR_LOCATION (*expr_p);
2505 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2507 /* For reliable diagnostics during inlining, it is necessary that
2508 every call_expr be annotated with file and line. */
2509 if (! EXPR_HAS_LOCATION (*expr_p))
2510 SET_EXPR_LOCATION (*expr_p, input_location);
2512 /* This may be a call to a builtin function.
2514 Builtin function calls may be transformed into different
2515 (and more efficient) builtin function calls under certain
2516 circumstances. Unfortunately, gimplification can muck things
2517 up enough that the builtin expanders are not aware that certain
2518 transformations are still valid.
2520 So we attempt transformation/gimplification of the call before
2521 we gimplify the CALL_EXPR. At this time we do not manage to
2522 transform all calls in the same manner as the expanders do, but
2523 we do transform most of them. */
2524 fndecl = get_callee_fndecl (*expr_p);
2525 if (fndecl
2526 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2527 switch (DECL_FUNCTION_CODE (fndecl))
2529 case BUILT_IN_VA_START:
2531 builtin_va_start_p = TRUE;
2532 if (call_expr_nargs (*expr_p) < 2)
2534 error ("too few arguments to function %<va_start%>");
2535 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2536 return GS_OK;
2539 if (fold_builtin_next_arg (*expr_p, true))
2541 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2542 return GS_OK;
2544 break;
2546 case BUILT_IN_LINE:
2548 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2549 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2550 return GS_OK;
2552 case BUILT_IN_FILE:
2554 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2555 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2556 return GS_OK;
2558 case BUILT_IN_FUNCTION:
2560 const char *function;
2561 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2562 *expr_p = build_string_literal (strlen (function) + 1, function);
2563 return GS_OK;
2565 default:
2568 if (fndecl && DECL_BUILT_IN (fndecl))
2570 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2571 if (new_tree && new_tree != *expr_p)
2573 /* There was a transformation of this call which computes the
2574 same value, but in a more efficient way. Return and try
2575 again. */
2576 *expr_p = new_tree;
2577 return GS_OK;
2581 /* Remember the original function pointer type. */
2582 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2584 /* There is a sequence point before the call, so any side effects in
2585 the calling expression must occur before the actual call. Force
2586 gimplify_expr to use an internal post queue. */
2587 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2588 is_gimple_call_addr, fb_rvalue);
2590 nargs = call_expr_nargs (*expr_p);
2592 /* Get argument types for verification. */
2593 fndecl = get_callee_fndecl (*expr_p);
2594 parms = NULL_TREE;
2595 if (fndecl)
2596 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2597 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2598 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2600 if (fndecl && DECL_ARGUMENTS (fndecl))
2601 p = DECL_ARGUMENTS (fndecl);
2602 else if (parms)
2603 p = parms;
2604 else
2605 p = NULL_TREE;
2606 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2609 /* If the last argument is __builtin_va_arg_pack () and it is not
2610 passed as a named argument, decrease the number of CALL_EXPR
2611 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2612 if (!p
2613 && i < nargs
2614 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2616 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2617 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2619 if (last_arg_fndecl
2620 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2621 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2622 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2624 tree call = *expr_p;
2626 --nargs;
2627 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2628 CALL_EXPR_FN (call),
2629 nargs, CALL_EXPR_ARGP (call));
2631 /* Copy all CALL_EXPR flags, location and block, except
2632 CALL_EXPR_VA_ARG_PACK flag. */
2633 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2634 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2635 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2636 = CALL_EXPR_RETURN_SLOT_OPT (call);
2637 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2638 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2640 /* Set CALL_EXPR_VA_ARG_PACK. */
2641 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2645 /* Finally, gimplify the function arguments. */
2646 if (nargs > 0)
2648 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2649 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2650 PUSH_ARGS_REVERSED ? i-- : i++)
2652 enum gimplify_status t;
2654 /* Avoid gimplifying the second argument to va_start, which needs to
2655 be the plain PARM_DECL. */
2656 if ((i != 1) || !builtin_va_start_p)
2658 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2659 EXPR_LOCATION (*expr_p));
2661 if (t == GS_ERROR)
2662 ret = GS_ERROR;
2667 /* Verify the function result. */
2668 if (want_value && fndecl
2669 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2671 error_at (loc, "using result of function returning %<void%>");
2672 ret = GS_ERROR;
2675 /* Try this again in case gimplification exposed something. */
2676 if (ret != GS_ERROR)
2678 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2680 if (new_tree && new_tree != *expr_p)
2682 /* There was a transformation of this call which computes the
2683 same value, but in a more efficient way. Return and try
2684 again. */
2685 *expr_p = new_tree;
2686 return GS_OK;
2689 else
2691 *expr_p = error_mark_node;
2692 return GS_ERROR;
2695 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2696 decl. This allows us to eliminate redundant or useless
2697 calls to "const" functions. */
2698 if (TREE_CODE (*expr_p) == CALL_EXPR)
2700 int flags = call_expr_flags (*expr_p);
2701 if (flags & (ECF_CONST | ECF_PURE)
2702 /* An infinite loop is considered a side effect. */
2703 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2704 TREE_SIDE_EFFECTS (*expr_p) = 0;
2707 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2708 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2709 form and delegate the creation of a GIMPLE_CALL to
2710 gimplify_modify_expr. This is always possible because when
2711 WANT_VALUE is true, the caller wants the result of this call into
2712 a temporary, which means that we will emit an INIT_EXPR in
2713 internal_get_tmp_var which will then be handled by
2714 gimplify_modify_expr. */
2715 if (!want_value)
2717 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2718 have to do is replicate it as a GIMPLE_CALL tuple. */
2719 gimple_stmt_iterator gsi;
2720 call = gimple_build_call_from_tree (*expr_p);
2721 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2722 gimplify_seq_add_stmt (pre_p, call);
2723 gsi = gsi_last (*pre_p);
2724 fold_stmt (&gsi);
2725 *expr_p = NULL_TREE;
2727 else
2728 /* Remember the original function type. */
2729 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2730 CALL_EXPR_FN (*expr_p));
2732 return ret;
2735 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2736 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2738 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2739 condition is true or false, respectively. If null, we should generate
2740 our own to skip over the evaluation of this specific expression.
2742 LOCUS is the source location of the COND_EXPR.
2744 This function is the tree equivalent of do_jump.
2746 shortcut_cond_r should only be called by shortcut_cond_expr. */
2748 static tree
2749 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2750 location_t locus)
2752 tree local_label = NULL_TREE;
2753 tree t, expr = NULL;
2755 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2756 retain the shortcut semantics. Just insert the gotos here;
2757 shortcut_cond_expr will append the real blocks later. */
2758 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2760 location_t new_locus;
2762 /* Turn if (a && b) into
2764 if (a); else goto no;
2765 if (b) goto yes; else goto no;
2766 (no:) */
2768 if (false_label_p == NULL)
2769 false_label_p = &local_label;
2771 /* Keep the original source location on the first 'if'. */
2772 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2773 append_to_statement_list (t, &expr);
2775 /* Set the source location of the && on the second 'if'. */
2776 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2777 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2778 new_locus);
2779 append_to_statement_list (t, &expr);
2781 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2783 location_t new_locus;
2785 /* Turn if (a || b) into
2787 if (a) goto yes;
2788 if (b) goto yes; else goto no;
2789 (yes:) */
2791 if (true_label_p == NULL)
2792 true_label_p = &local_label;
2794 /* Keep the original source location on the first 'if'. */
2795 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2796 append_to_statement_list (t, &expr);
2798 /* Set the source location of the || on the second 'if'. */
2799 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2800 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2801 new_locus);
2802 append_to_statement_list (t, &expr);
2804 else if (TREE_CODE (pred) == COND_EXPR
2805 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2806 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2808 location_t new_locus;
2810 /* As long as we're messing with gotos, turn if (a ? b : c) into
2811 if (a)
2812 if (b) goto yes; else goto no;
2813 else
2814 if (c) goto yes; else goto no;
2816 Don't do this if one of the arms has void type, which can happen
2817 in C++ when the arm is throw. */
2819 /* Keep the original source location on the first 'if'. Set the source
2820 location of the ? on the second 'if'. */
2821 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2822 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2823 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2824 false_label_p, locus),
2825 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2826 false_label_p, new_locus));
2828 else
2830 expr = build3 (COND_EXPR, void_type_node, pred,
2831 build_and_jump (true_label_p),
2832 build_and_jump (false_label_p));
2833 SET_EXPR_LOCATION (expr, locus);
2836 if (local_label)
2838 t = build1 (LABEL_EXPR, void_type_node, local_label);
2839 append_to_statement_list (t, &expr);
2842 return expr;
2845 /* Given a conditional expression EXPR with short-circuit boolean
2846 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2847 predicate apart into the equivalent sequence of conditionals. */
2849 static tree
2850 shortcut_cond_expr (tree expr)
2852 tree pred = TREE_OPERAND (expr, 0);
2853 tree then_ = TREE_OPERAND (expr, 1);
2854 tree else_ = TREE_OPERAND (expr, 2);
2855 tree true_label, false_label, end_label, t;
2856 tree *true_label_p;
2857 tree *false_label_p;
2858 bool emit_end, emit_false, jump_over_else;
2859 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2860 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2862 /* First do simple transformations. */
2863 if (!else_se)
2865 /* If there is no 'else', turn
2866 if (a && b) then c
2867 into
2868 if (a) if (b) then c. */
2869 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2871 /* Keep the original source location on the first 'if'. */
2872 location_t locus = EXPR_LOC_OR_HERE (expr);
2873 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2874 /* Set the source location of the && on the second 'if'. */
2875 if (EXPR_HAS_LOCATION (pred))
2876 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2877 then_ = shortcut_cond_expr (expr);
2878 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2879 pred = TREE_OPERAND (pred, 0);
2880 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2881 SET_EXPR_LOCATION (expr, locus);
2885 if (!then_se)
2887 /* If there is no 'then', turn
2888 if (a || b); else d
2889 into
2890 if (a); else if (b); else d. */
2891 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2893 /* Keep the original source location on the first 'if'. */
2894 location_t locus = EXPR_LOC_OR_HERE (expr);
2895 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2896 /* Set the source location of the || on the second 'if'. */
2897 if (EXPR_HAS_LOCATION (pred))
2898 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2899 else_ = shortcut_cond_expr (expr);
2900 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2901 pred = TREE_OPERAND (pred, 0);
2902 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2903 SET_EXPR_LOCATION (expr, locus);
2907 /* If we're done, great. */
2908 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2909 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2910 return expr;
2912 /* Otherwise we need to mess with gotos. Change
2913 if (a) c; else d;
2915 if (a); else goto no;
2916 c; goto end;
2917 no: d; end:
2918 and recursively gimplify the condition. */
2920 true_label = false_label = end_label = NULL_TREE;
2922 /* If our arms just jump somewhere, hijack those labels so we don't
2923 generate jumps to jumps. */
2925 if (then_
2926 && TREE_CODE (then_) == GOTO_EXPR
2927 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2929 true_label = GOTO_DESTINATION (then_);
2930 then_ = NULL;
2931 then_se = false;
2934 if (else_
2935 && TREE_CODE (else_) == GOTO_EXPR
2936 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2938 false_label = GOTO_DESTINATION (else_);
2939 else_ = NULL;
2940 else_se = false;
2943 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2944 if (true_label)
2945 true_label_p = &true_label;
2946 else
2947 true_label_p = NULL;
2949 /* The 'else' branch also needs a label if it contains interesting code. */
2950 if (false_label || else_se)
2951 false_label_p = &false_label;
2952 else
2953 false_label_p = NULL;
2955 /* If there was nothing else in our arms, just forward the label(s). */
2956 if (!then_se && !else_se)
2957 return shortcut_cond_r (pred, true_label_p, false_label_p,
2958 EXPR_LOC_OR_HERE (expr));
2960 /* If our last subexpression already has a terminal label, reuse it. */
2961 if (else_se)
2962 t = expr_last (else_);
2963 else if (then_se)
2964 t = expr_last (then_);
2965 else
2966 t = NULL;
2967 if (t && TREE_CODE (t) == LABEL_EXPR)
2968 end_label = LABEL_EXPR_LABEL (t);
2970 /* If we don't care about jumping to the 'else' branch, jump to the end
2971 if the condition is false. */
2972 if (!false_label_p)
2973 false_label_p = &end_label;
2975 /* We only want to emit these labels if we aren't hijacking them. */
2976 emit_end = (end_label == NULL_TREE);
2977 emit_false = (false_label == NULL_TREE);
2979 /* We only emit the jump over the else clause if we have to--if the
2980 then clause may fall through. Otherwise we can wind up with a
2981 useless jump and a useless label at the end of gimplified code,
2982 which will cause us to think that this conditional as a whole
2983 falls through even if it doesn't. If we then inline a function
2984 which ends with such a condition, that can cause us to issue an
2985 inappropriate warning about control reaching the end of a
2986 non-void function. */
2987 jump_over_else = block_may_fallthru (then_);
2989 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2990 EXPR_LOC_OR_HERE (expr));
2992 expr = NULL;
2993 append_to_statement_list (pred, &expr);
2995 append_to_statement_list (then_, &expr);
2996 if (else_se)
2998 if (jump_over_else)
3000 tree last = expr_last (expr);
3001 t = build_and_jump (&end_label);
3002 if (EXPR_HAS_LOCATION (last))
3003 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3004 append_to_statement_list (t, &expr);
3006 if (emit_false)
3008 t = build1 (LABEL_EXPR, void_type_node, false_label);
3009 append_to_statement_list (t, &expr);
3011 append_to_statement_list (else_, &expr);
3013 if (emit_end && end_label)
3015 t = build1 (LABEL_EXPR, void_type_node, end_label);
3016 append_to_statement_list (t, &expr);
3019 return expr;
3022 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3024 tree
3025 gimple_boolify (tree expr)
3027 tree type = TREE_TYPE (expr);
3028 location_t loc = EXPR_LOCATION (expr);
3030 if (TREE_CODE (expr) == NE_EXPR
3031 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3032 && integer_zerop (TREE_OPERAND (expr, 1)))
3034 tree call = TREE_OPERAND (expr, 0);
3035 tree fn = get_callee_fndecl (call);
3037 /* For __builtin_expect ((long) (x), y) recurse into x as well
3038 if x is truth_value_p. */
3039 if (fn
3040 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3041 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3042 && call_expr_nargs (call) == 2)
3044 tree arg = CALL_EXPR_ARG (call, 0);
3045 if (arg)
3047 if (TREE_CODE (arg) == NOP_EXPR
3048 && TREE_TYPE (arg) == TREE_TYPE (call))
3049 arg = TREE_OPERAND (arg, 0);
3050 if (truth_value_p (TREE_CODE (arg)))
3052 arg = gimple_boolify (arg);
3053 CALL_EXPR_ARG (call, 0)
3054 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3060 switch (TREE_CODE (expr))
3062 case TRUTH_AND_EXPR:
3063 case TRUTH_OR_EXPR:
3064 case TRUTH_XOR_EXPR:
3065 case TRUTH_ANDIF_EXPR:
3066 case TRUTH_ORIF_EXPR:
3067 /* Also boolify the arguments of truth exprs. */
3068 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3069 /* FALLTHRU */
3071 case TRUTH_NOT_EXPR:
3072 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3074 /* These expressions always produce boolean results. */
3075 if (TREE_CODE (type) != BOOLEAN_TYPE)
3076 TREE_TYPE (expr) = boolean_type_node;
3077 return expr;
3079 default:
3080 if (COMPARISON_CLASS_P (expr))
3082 /* There expressions always prduce boolean results. */
3083 if (TREE_CODE (type) != BOOLEAN_TYPE)
3084 TREE_TYPE (expr) = boolean_type_node;
3085 return expr;
3087 /* Other expressions that get here must have boolean values, but
3088 might need to be converted to the appropriate mode. */
3089 if (TREE_CODE (type) == BOOLEAN_TYPE)
3090 return expr;
3091 return fold_convert_loc (loc, boolean_type_node, expr);
3095 /* Given a conditional expression *EXPR_P without side effects, gimplify
3096 its operands. New statements are inserted to PRE_P. */
3098 static enum gimplify_status
3099 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3101 tree expr = *expr_p, cond;
3102 enum gimplify_status ret, tret;
3103 enum tree_code code;
3105 cond = gimple_boolify (COND_EXPR_COND (expr));
3107 /* We need to handle && and || specially, as their gimplification
3108 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3109 code = TREE_CODE (cond);
3110 if (code == TRUTH_ANDIF_EXPR)
3111 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3112 else if (code == TRUTH_ORIF_EXPR)
3113 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3114 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3115 COND_EXPR_COND (*expr_p) = cond;
3117 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3118 is_gimple_val, fb_rvalue);
3119 ret = MIN (ret, tret);
3120 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3121 is_gimple_val, fb_rvalue);
3123 return MIN (ret, tret);
3126 /* Return true if evaluating EXPR could trap.
3127 EXPR is GENERIC, while tree_could_trap_p can be called
3128 only on GIMPLE. */
3130 static bool
3131 generic_expr_could_trap_p (tree expr)
3133 unsigned i, n;
3135 if (!expr || is_gimple_val (expr))
3136 return false;
3138 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3139 return true;
3141 n = TREE_OPERAND_LENGTH (expr);
3142 for (i = 0; i < n; i++)
3143 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3144 return true;
3146 return false;
3149 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3150 into
3152 if (p) if (p)
3153 t1 = a; a;
3154 else or else
3155 t1 = b; b;
3158 The second form is used when *EXPR_P is of type void.
3160 PRE_P points to the list where side effects that must happen before
3161 *EXPR_P should be stored. */
3163 static enum gimplify_status
3164 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3166 tree expr = *expr_p;
3167 tree type = TREE_TYPE (expr);
3168 location_t loc = EXPR_LOCATION (expr);
3169 tree tmp, arm1, arm2;
3170 enum gimplify_status ret;
3171 tree label_true, label_false, label_cont;
3172 bool have_then_clause_p, have_else_clause_p;
3173 gimple gimple_cond;
3174 enum tree_code pred_code;
3175 gimple_seq seq = NULL;
3177 /* If this COND_EXPR has a value, copy the values into a temporary within
3178 the arms. */
3179 if (!VOID_TYPE_P (type))
3181 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3182 tree result;
3184 /* If either an rvalue is ok or we do not require an lvalue, create the
3185 temporary. But we cannot do that if the type is addressable. */
3186 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3187 && !TREE_ADDRESSABLE (type))
3189 if (gimplify_ctxp->allow_rhs_cond_expr
3190 /* If either branch has side effects or could trap, it can't be
3191 evaluated unconditionally. */
3192 && !TREE_SIDE_EFFECTS (then_)
3193 && !generic_expr_could_trap_p (then_)
3194 && !TREE_SIDE_EFFECTS (else_)
3195 && !generic_expr_could_trap_p (else_))
3196 return gimplify_pure_cond_expr (expr_p, pre_p);
3198 tmp = create_tmp_var (type, "iftmp");
3199 result = tmp;
3202 /* Otherwise, only create and copy references to the values. */
3203 else
3205 type = build_pointer_type (type);
3207 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3208 then_ = build_fold_addr_expr_loc (loc, then_);
3210 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3211 else_ = build_fold_addr_expr_loc (loc, else_);
3213 expr
3214 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3216 tmp = create_tmp_var (type, "iftmp");
3217 result = build_simple_mem_ref_loc (loc, tmp);
3220 /* Build the new then clause, `tmp = then_;'. But don't build the
3221 assignment if the value is void; in C++ it can be if it's a throw. */
3222 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3223 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3225 /* Similarly, build the new else clause, `tmp = else_;'. */
3226 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3227 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3229 TREE_TYPE (expr) = void_type_node;
3230 recalculate_side_effects (expr);
3232 /* Move the COND_EXPR to the prequeue. */
3233 gimplify_stmt (&expr, pre_p);
3235 *expr_p = result;
3236 return GS_ALL_DONE;
3239 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3240 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3241 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3242 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3244 /* Make sure the condition has BOOLEAN_TYPE. */
3245 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3247 /* Break apart && and || conditions. */
3248 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3249 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3251 expr = shortcut_cond_expr (expr);
3253 if (expr != *expr_p)
3255 *expr_p = expr;
3257 /* We can't rely on gimplify_expr to re-gimplify the expanded
3258 form properly, as cleanups might cause the target labels to be
3259 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3260 set up a conditional context. */
3261 gimple_push_condition ();
3262 gimplify_stmt (expr_p, &seq);
3263 gimple_pop_condition (pre_p);
3264 gimple_seq_add_seq (pre_p, seq);
3266 return GS_ALL_DONE;
3270 /* Now do the normal gimplification. */
3272 /* Gimplify condition. */
3273 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3274 fb_rvalue);
3275 if (ret == GS_ERROR)
3276 return GS_ERROR;
3277 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3279 gimple_push_condition ();
3281 have_then_clause_p = have_else_clause_p = false;
3282 if (TREE_OPERAND (expr, 1) != NULL
3283 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3284 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3285 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3286 == current_function_decl)
3287 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3288 have different locations, otherwise we end up with incorrect
3289 location information on the branches. */
3290 && (optimize
3291 || !EXPR_HAS_LOCATION (expr)
3292 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3293 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3295 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3296 have_then_clause_p = true;
3298 else
3299 label_true = create_artificial_label (UNKNOWN_LOCATION);
3300 if (TREE_OPERAND (expr, 2) != NULL
3301 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3302 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3303 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3304 == current_function_decl)
3305 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3306 have different locations, otherwise we end up with incorrect
3307 location information on the branches. */
3308 && (optimize
3309 || !EXPR_HAS_LOCATION (expr)
3310 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3311 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3313 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3314 have_else_clause_p = true;
3316 else
3317 label_false = create_artificial_label (UNKNOWN_LOCATION);
3319 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3320 &arm2);
3322 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3323 label_false);
3325 gimplify_seq_add_stmt (&seq, gimple_cond);
3326 label_cont = NULL_TREE;
3327 if (!have_then_clause_p)
3329 /* For if (...) {} else { code; } put label_true after
3330 the else block. */
3331 if (TREE_OPERAND (expr, 1) == NULL_TREE
3332 && !have_else_clause_p
3333 && TREE_OPERAND (expr, 2) != NULL_TREE)
3334 label_cont = label_true;
3335 else
3337 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3338 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3339 /* For if (...) { code; } else {} or
3340 if (...) { code; } else goto label; or
3341 if (...) { code; return; } else { ... }
3342 label_cont isn't needed. */
3343 if (!have_else_clause_p
3344 && TREE_OPERAND (expr, 2) != NULL_TREE
3345 && gimple_seq_may_fallthru (seq))
3347 gimple g;
3348 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3350 g = gimple_build_goto (label_cont);
3352 /* GIMPLE_COND's are very low level; they have embedded
3353 gotos. This particular embedded goto should not be marked
3354 with the location of the original COND_EXPR, as it would
3355 correspond to the COND_EXPR's condition, not the ELSE or the
3356 THEN arms. To avoid marking it with the wrong location, flag
3357 it as "no location". */
3358 gimple_set_do_not_emit_location (g);
3360 gimplify_seq_add_stmt (&seq, g);
3364 if (!have_else_clause_p)
3366 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3367 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3369 if (label_cont)
3370 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3372 gimple_pop_condition (pre_p);
3373 gimple_seq_add_seq (pre_p, seq);
3375 if (ret == GS_ERROR)
3376 ; /* Do nothing. */
3377 else if (have_then_clause_p || have_else_clause_p)
3378 ret = GS_ALL_DONE;
3379 else
3381 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3382 expr = TREE_OPERAND (expr, 0);
3383 gimplify_stmt (&expr, pre_p);
3386 *expr_p = NULL;
3387 return ret;
3390 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3391 to be marked addressable.
3393 We cannot rely on such an expression being directly markable if a temporary
3394 has been created by the gimplification. In this case, we create another
3395 temporary and initialize it with a copy, which will become a store after we
3396 mark it addressable. This can happen if the front-end passed us something
3397 that it could not mark addressable yet, like a Fortran pass-by-reference
3398 parameter (int) floatvar. */
3400 static void
3401 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3403 while (handled_component_p (*expr_p))
3404 expr_p = &TREE_OPERAND (*expr_p, 0);
3405 if (is_gimple_reg (*expr_p))
3406 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3409 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3410 a call to __builtin_memcpy. */
3412 static enum gimplify_status
3413 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3414 gimple_seq *seq_p)
3416 tree t, to, to_ptr, from, from_ptr;
3417 gimple gs;
3418 location_t loc = EXPR_LOCATION (*expr_p);
3420 to = TREE_OPERAND (*expr_p, 0);
3421 from = TREE_OPERAND (*expr_p, 1);
3423 /* Mark the RHS addressable. Beware that it may not be possible to do so
3424 directly if a temporary has been created by the gimplification. */
3425 prepare_gimple_addressable (&from, seq_p);
3427 mark_addressable (from);
3428 from_ptr = build_fold_addr_expr_loc (loc, from);
3429 gimplify_arg (&from_ptr, seq_p, loc);
3431 mark_addressable (to);
3432 to_ptr = build_fold_addr_expr_loc (loc, to);
3433 gimplify_arg (&to_ptr, seq_p, loc);
3435 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3437 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3439 if (want_value)
3441 /* tmp = memcpy() */
3442 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3443 gimple_call_set_lhs (gs, t);
3444 gimplify_seq_add_stmt (seq_p, gs);
3446 *expr_p = build_simple_mem_ref (t);
3447 return GS_ALL_DONE;
3450 gimplify_seq_add_stmt (seq_p, gs);
3451 *expr_p = NULL;
3452 return GS_ALL_DONE;
3455 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3456 a call to __builtin_memset. In this case we know that the RHS is
3457 a CONSTRUCTOR with an empty element list. */
3459 static enum gimplify_status
3460 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3461 gimple_seq *seq_p)
3463 tree t, from, to, to_ptr;
3464 gimple gs;
3465 location_t loc = EXPR_LOCATION (*expr_p);
3467 /* Assert our assumptions, to abort instead of producing wrong code
3468 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3469 not be immediately exposed. */
3470 from = TREE_OPERAND (*expr_p, 1);
3471 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3472 from = TREE_OPERAND (from, 0);
3474 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3475 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3477 /* Now proceed. */
3478 to = TREE_OPERAND (*expr_p, 0);
3480 to_ptr = build_fold_addr_expr_loc (loc, to);
3481 gimplify_arg (&to_ptr, seq_p, loc);
3482 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3484 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3486 if (want_value)
3488 /* tmp = memset() */
3489 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3490 gimple_call_set_lhs (gs, t);
3491 gimplify_seq_add_stmt (seq_p, gs);
3493 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3494 return GS_ALL_DONE;
3497 gimplify_seq_add_stmt (seq_p, gs);
3498 *expr_p = NULL;
3499 return GS_ALL_DONE;
3502 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3503 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3504 assignment. Return non-null if we detect a potential overlap. */
3506 struct gimplify_init_ctor_preeval_data
3508 /* The base decl of the lhs object. May be NULL, in which case we
3509 have to assume the lhs is indirect. */
3510 tree lhs_base_decl;
3512 /* The alias set of the lhs object. */
3513 alias_set_type lhs_alias_set;
3516 static tree
3517 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3519 struct gimplify_init_ctor_preeval_data *data
3520 = (struct gimplify_init_ctor_preeval_data *) xdata;
3521 tree t = *tp;
3523 /* If we find the base object, obviously we have overlap. */
3524 if (data->lhs_base_decl == t)
3525 return t;
3527 /* If the constructor component is indirect, determine if we have a
3528 potential overlap with the lhs. The only bits of information we
3529 have to go on at this point are addressability and alias sets. */
3530 if ((INDIRECT_REF_P (t)
3531 || TREE_CODE (t) == MEM_REF)
3532 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3533 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3534 return t;
3536 /* If the constructor component is a call, determine if it can hide a
3537 potential overlap with the lhs through an INDIRECT_REF like above.
3538 ??? Ugh - this is completely broken. In fact this whole analysis
3539 doesn't look conservative. */
3540 if (TREE_CODE (t) == CALL_EXPR)
3542 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3544 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3545 if (POINTER_TYPE_P (TREE_VALUE (type))
3546 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3547 && alias_sets_conflict_p (data->lhs_alias_set,
3548 get_alias_set
3549 (TREE_TYPE (TREE_VALUE (type)))))
3550 return t;
3553 if (IS_TYPE_OR_DECL_P (t))
3554 *walk_subtrees = 0;
3555 return NULL;
3558 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3559 force values that overlap with the lhs (as described by *DATA)
3560 into temporaries. */
3562 static void
3563 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3564 struct gimplify_init_ctor_preeval_data *data)
3566 enum gimplify_status one;
3568 /* If the value is constant, then there's nothing to pre-evaluate. */
3569 if (TREE_CONSTANT (*expr_p))
3571 /* Ensure it does not have side effects, it might contain a reference to
3572 the object we're initializing. */
3573 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3574 return;
3577 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3578 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3579 return;
3581 /* Recurse for nested constructors. */
3582 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3584 unsigned HOST_WIDE_INT ix;
3585 constructor_elt *ce;
3586 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3588 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3589 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3591 return;
3594 /* If this is a variable sized type, we must remember the size. */
3595 maybe_with_size_expr (expr_p);
3597 /* Gimplify the constructor element to something appropriate for the rhs
3598 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3599 the gimplifier will consider this a store to memory. Doing this
3600 gimplification now means that we won't have to deal with complicated
3601 language-specific trees, nor trees like SAVE_EXPR that can induce
3602 exponential search behavior. */
3603 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3604 if (one == GS_ERROR)
3606 *expr_p = NULL;
3607 return;
3610 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3611 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3612 always be true for all scalars, since is_gimple_mem_rhs insists on a
3613 temporary variable for them. */
3614 if (DECL_P (*expr_p))
3615 return;
3617 /* If this is of variable size, we have no choice but to assume it doesn't
3618 overlap since we can't make a temporary for it. */
3619 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3620 return;
3622 /* Otherwise, we must search for overlap ... */
3623 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3624 return;
3626 /* ... and if found, force the value into a temporary. */
3627 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3630 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3631 a RANGE_EXPR in a CONSTRUCTOR for an array.
3633 var = lower;
3634 loop_entry:
3635 object[var] = value;
3636 if (var == upper)
3637 goto loop_exit;
3638 var = var + 1;
3639 goto loop_entry;
3640 loop_exit:
3642 We increment var _after_ the loop exit check because we might otherwise
3643 fail if upper == TYPE_MAX_VALUE (type for upper).
3645 Note that we never have to deal with SAVE_EXPRs here, because this has
3646 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3648 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3649 gimple_seq *, bool);
3651 static void
3652 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3653 tree value, tree array_elt_type,
3654 gimple_seq *pre_p, bool cleared)
3656 tree loop_entry_label, loop_exit_label, fall_thru_label;
3657 tree var, var_type, cref, tmp;
3659 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3660 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3661 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3663 /* Create and initialize the index variable. */
3664 var_type = TREE_TYPE (upper);
3665 var = create_tmp_var (var_type, NULL);
3666 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3668 /* Add the loop entry label. */
3669 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3671 /* Build the reference. */
3672 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3673 var, NULL_TREE, NULL_TREE);
3675 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3676 the store. Otherwise just assign value to the reference. */
3678 if (TREE_CODE (value) == CONSTRUCTOR)
3679 /* NB we might have to call ourself recursively through
3680 gimplify_init_ctor_eval if the value is a constructor. */
3681 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3682 pre_p, cleared);
3683 else
3684 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3686 /* We exit the loop when the index var is equal to the upper bound. */
3687 gimplify_seq_add_stmt (pre_p,
3688 gimple_build_cond (EQ_EXPR, var, upper,
3689 loop_exit_label, fall_thru_label));
3691 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3693 /* Otherwise, increment the index var... */
3694 tmp = build2 (PLUS_EXPR, var_type, var,
3695 fold_convert (var_type, integer_one_node));
3696 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3698 /* ...and jump back to the loop entry. */
3699 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3701 /* Add the loop exit label. */
3702 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3705 /* Return true if FDECL is accessing a field that is zero sized. */
3707 static bool
3708 zero_sized_field_decl (const_tree fdecl)
3710 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3711 && integer_zerop (DECL_SIZE (fdecl)))
3712 return true;
3713 return false;
3716 /* Return true if TYPE is zero sized. */
3718 static bool
3719 zero_sized_type (const_tree type)
3721 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3722 && integer_zerop (TYPE_SIZE (type)))
3723 return true;
3724 return false;
3727 /* A subroutine of gimplify_init_constructor. Generate individual
3728 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3729 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3730 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3731 zeroed first. */
3733 static void
3734 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3735 gimple_seq *pre_p, bool cleared)
3737 tree array_elt_type = NULL;
3738 unsigned HOST_WIDE_INT ix;
3739 tree purpose, value;
3741 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3742 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3744 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3746 tree cref;
3748 /* NULL values are created above for gimplification errors. */
3749 if (value == NULL)
3750 continue;
3752 if (cleared && initializer_zerop (value))
3753 continue;
3755 /* ??? Here's to hoping the front end fills in all of the indices,
3756 so we don't have to figure out what's missing ourselves. */
3757 gcc_assert (purpose);
3759 /* Skip zero-sized fields, unless value has side-effects. This can
3760 happen with calls to functions returning a zero-sized type, which
3761 we shouldn't discard. As a number of downstream passes don't
3762 expect sets of zero-sized fields, we rely on the gimplification of
3763 the MODIFY_EXPR we make below to drop the assignment statement. */
3764 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3765 continue;
3767 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3768 whole range. */
3769 if (TREE_CODE (purpose) == RANGE_EXPR)
3771 tree lower = TREE_OPERAND (purpose, 0);
3772 tree upper = TREE_OPERAND (purpose, 1);
3774 /* If the lower bound is equal to upper, just treat it as if
3775 upper was the index. */
3776 if (simple_cst_equal (lower, upper))
3777 purpose = upper;
3778 else
3780 gimplify_init_ctor_eval_range (object, lower, upper, value,
3781 array_elt_type, pre_p, cleared);
3782 continue;
3786 if (array_elt_type)
3788 /* Do not use bitsizetype for ARRAY_REF indices. */
3789 if (TYPE_DOMAIN (TREE_TYPE (object)))
3790 purpose
3791 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3792 purpose);
3793 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3794 purpose, NULL_TREE, NULL_TREE);
3796 else
3798 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3799 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3800 unshare_expr (object), purpose, NULL_TREE);
3803 if (TREE_CODE (value) == CONSTRUCTOR
3804 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3805 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3806 pre_p, cleared);
3807 else
3809 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3810 gimplify_and_add (init, pre_p);
3811 ggc_free (init);
3816 /* Return the appropriate RHS predicate for this LHS. */
3818 gimple_predicate
3819 rhs_predicate_for (tree lhs)
3821 if (is_gimple_reg (lhs))
3822 return is_gimple_reg_rhs_or_call;
3823 else
3824 return is_gimple_mem_rhs_or_call;
3827 /* Gimplify a C99 compound literal expression. This just means adding
3828 the DECL_EXPR before the current statement and using its anonymous
3829 decl instead. */
3831 static enum gimplify_status
3832 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3833 bool (*gimple_test_f) (tree),
3834 fallback_t fallback)
3836 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3837 tree decl = DECL_EXPR_DECL (decl_s);
3838 tree init = DECL_INITIAL (decl);
3839 /* Mark the decl as addressable if the compound literal
3840 expression is addressable now, otherwise it is marked too late
3841 after we gimplify the initialization expression. */
3842 if (TREE_ADDRESSABLE (*expr_p))
3843 TREE_ADDRESSABLE (decl) = 1;
3844 /* Otherwise, if we don't need an lvalue and have a literal directly
3845 substitute it. Check if it matches the gimple predicate, as
3846 otherwise we'd generate a new temporary, and we can as well just
3847 use the decl we already have. */
3848 else if (!TREE_ADDRESSABLE (decl)
3849 && init
3850 && (fallback & fb_lvalue) == 0
3851 && gimple_test_f (init))
3853 *expr_p = init;
3854 return GS_OK;
3857 /* Preliminarily mark non-addressed complex variables as eligible
3858 for promotion to gimple registers. We'll transform their uses
3859 as we find them. */
3860 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3861 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3862 && !TREE_THIS_VOLATILE (decl)
3863 && !needs_to_live_in_memory (decl))
3864 DECL_GIMPLE_REG_P (decl) = 1;
3866 /* If the decl is not addressable, then it is being used in some
3867 expression or on the right hand side of a statement, and it can
3868 be put into a readonly data section. */
3869 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3870 TREE_READONLY (decl) = 1;
3872 /* This decl isn't mentioned in the enclosing block, so add it to the
3873 list of temps. FIXME it seems a bit of a kludge to say that
3874 anonymous artificial vars aren't pushed, but everything else is. */
3875 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3876 gimple_add_tmp_var (decl);
3878 gimplify_and_add (decl_s, pre_p);
3879 *expr_p = decl;
3880 return GS_OK;
3883 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3884 return a new CONSTRUCTOR if something changed. */
3886 static tree
3887 optimize_compound_literals_in_ctor (tree orig_ctor)
3889 tree ctor = orig_ctor;
3890 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3891 unsigned int idx, num = vec_safe_length (elts);
3893 for (idx = 0; idx < num; idx++)
3895 tree value = (*elts)[idx].value;
3896 tree newval = value;
3897 if (TREE_CODE (value) == CONSTRUCTOR)
3898 newval = optimize_compound_literals_in_ctor (value);
3899 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3901 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3902 tree decl = DECL_EXPR_DECL (decl_s);
3903 tree init = DECL_INITIAL (decl);
3905 if (!TREE_ADDRESSABLE (value)
3906 && !TREE_ADDRESSABLE (decl)
3907 && init
3908 && TREE_CODE (init) == CONSTRUCTOR)
3909 newval = optimize_compound_literals_in_ctor (init);
3911 if (newval == value)
3912 continue;
3914 if (ctor == orig_ctor)
3916 ctor = copy_node (orig_ctor);
3917 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3918 elts = CONSTRUCTOR_ELTS (ctor);
3920 (*elts)[idx].value = newval;
3922 return ctor;
3925 /* A subroutine of gimplify_modify_expr. Break out elements of a
3926 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3928 Note that we still need to clear any elements that don't have explicit
3929 initializers, so if not all elements are initialized we keep the
3930 original MODIFY_EXPR, we just remove all of the constructor elements.
3932 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3933 GS_ERROR if we would have to create a temporary when gimplifying
3934 this constructor. Otherwise, return GS_OK.
3936 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3938 static enum gimplify_status
3939 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3940 bool want_value, bool notify_temp_creation)
3942 tree object, ctor, type;
3943 enum gimplify_status ret;
3944 vec<constructor_elt, va_gc> *elts;
3946 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3948 if (!notify_temp_creation)
3950 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3951 is_gimple_lvalue, fb_lvalue);
3952 if (ret == GS_ERROR)
3953 return ret;
3956 object = TREE_OPERAND (*expr_p, 0);
3957 ctor = TREE_OPERAND (*expr_p, 1) =
3958 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3959 type = TREE_TYPE (ctor);
3960 elts = CONSTRUCTOR_ELTS (ctor);
3961 ret = GS_ALL_DONE;
3963 switch (TREE_CODE (type))
3965 case RECORD_TYPE:
3966 case UNION_TYPE:
3967 case QUAL_UNION_TYPE:
3968 case ARRAY_TYPE:
3970 struct gimplify_init_ctor_preeval_data preeval_data;
3971 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3972 bool cleared, complete_p, valid_const_initializer;
3974 /* Aggregate types must lower constructors to initialization of
3975 individual elements. The exception is that a CONSTRUCTOR node
3976 with no elements indicates zero-initialization of the whole. */
3977 if (vec_safe_is_empty (elts))
3979 if (notify_temp_creation)
3980 return GS_OK;
3981 break;
3984 /* Fetch information about the constructor to direct later processing.
3985 We might want to make static versions of it in various cases, and
3986 can only do so if it known to be a valid constant initializer. */
3987 valid_const_initializer
3988 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3989 &num_ctor_elements, &complete_p);
3991 /* If a const aggregate variable is being initialized, then it
3992 should never be a lose to promote the variable to be static. */
3993 if (valid_const_initializer
3994 && num_nonzero_elements > 1
3995 && TREE_READONLY (object)
3996 && TREE_CODE (object) == VAR_DECL
3997 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3999 if (notify_temp_creation)
4000 return GS_ERROR;
4001 DECL_INITIAL (object) = ctor;
4002 TREE_STATIC (object) = 1;
4003 if (!DECL_NAME (object))
4004 DECL_NAME (object) = create_tmp_var_name ("C");
4005 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4007 /* ??? C++ doesn't automatically append a .<number> to the
4008 assembler name, and even when it does, it looks at FE private
4009 data structures to figure out what that number should be,
4010 which are not set for this variable. I suppose this is
4011 important for local statics for inline functions, which aren't
4012 "local" in the object file sense. So in order to get a unique
4013 TU-local symbol, we must invoke the lhd version now. */
4014 lhd_set_decl_assembler_name (object);
4016 *expr_p = NULL_TREE;
4017 break;
4020 /* If there are "lots" of initialized elements, even discounting
4021 those that are not address constants (and thus *must* be
4022 computed at runtime), then partition the constructor into
4023 constant and non-constant parts. Block copy the constant
4024 parts in, then generate code for the non-constant parts. */
4025 /* TODO. There's code in cp/typeck.c to do this. */
4027 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4028 /* store_constructor will ignore the clearing of variable-sized
4029 objects. Initializers for such objects must explicitly set
4030 every field that needs to be set. */
4031 cleared = false;
4032 else if (!complete_p)
4033 /* If the constructor isn't complete, clear the whole object
4034 beforehand.
4036 ??? This ought not to be needed. For any element not present
4037 in the initializer, we should simply set them to zero. Except
4038 we'd need to *find* the elements that are not present, and that
4039 requires trickery to avoid quadratic compile-time behavior in
4040 large cases or excessive memory use in small cases. */
4041 cleared = true;
4042 else if (num_ctor_elements - num_nonzero_elements
4043 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4044 && num_nonzero_elements < num_ctor_elements / 4)
4045 /* If there are "lots" of zeros, it's more efficient to clear
4046 the memory and then set the nonzero elements. */
4047 cleared = true;
4048 else
4049 cleared = false;
4051 /* If there are "lots" of initialized elements, and all of them
4052 are valid address constants, then the entire initializer can
4053 be dropped to memory, and then memcpy'd out. Don't do this
4054 for sparse arrays, though, as it's more efficient to follow
4055 the standard CONSTRUCTOR behavior of memset followed by
4056 individual element initialization. Also don't do this for small
4057 all-zero initializers (which aren't big enough to merit
4058 clearing), and don't try to make bitwise copies of
4059 TREE_ADDRESSABLE types. */
4060 if (valid_const_initializer
4061 && !(cleared || num_nonzero_elements == 0)
4062 && !TREE_ADDRESSABLE (type))
4064 HOST_WIDE_INT size = int_size_in_bytes (type);
4065 unsigned int align;
4067 /* ??? We can still get unbounded array types, at least
4068 from the C++ front end. This seems wrong, but attempt
4069 to work around it for now. */
4070 if (size < 0)
4072 size = int_size_in_bytes (TREE_TYPE (object));
4073 if (size >= 0)
4074 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4077 /* Find the maximum alignment we can assume for the object. */
4078 /* ??? Make use of DECL_OFFSET_ALIGN. */
4079 if (DECL_P (object))
4080 align = DECL_ALIGN (object);
4081 else
4082 align = TYPE_ALIGN (type);
4084 /* Do a block move either if the size is so small as to make
4085 each individual move a sub-unit move on average, or if it
4086 is so large as to make individual moves inefficient. */
4087 if (size > 0
4088 && num_nonzero_elements > 1
4089 && (size < num_nonzero_elements
4090 || !can_move_by_pieces (size, align)))
4092 if (notify_temp_creation)
4093 return GS_ERROR;
4095 walk_tree (&ctor, force_labels_r, NULL, NULL);
4096 ctor = tree_output_constant_def (ctor);
4097 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4098 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4099 TREE_OPERAND (*expr_p, 1) = ctor;
4101 /* This is no longer an assignment of a CONSTRUCTOR, but
4102 we still may have processing to do on the LHS. So
4103 pretend we didn't do anything here to let that happen. */
4104 return GS_UNHANDLED;
4108 /* If the target is volatile, we have non-zero elements and more than
4109 one field to assign, initialize the target from a temporary. */
4110 if (TREE_THIS_VOLATILE (object)
4111 && !TREE_ADDRESSABLE (type)
4112 && num_nonzero_elements > 0
4113 && vec_safe_length (elts) > 1)
4115 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
4116 TREE_OPERAND (*expr_p, 0) = temp;
4117 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4118 *expr_p,
4119 build2 (MODIFY_EXPR, void_type_node,
4120 object, temp));
4121 return GS_OK;
4124 if (notify_temp_creation)
4125 return GS_OK;
4127 /* If there are nonzero elements and if needed, pre-evaluate to capture
4128 elements overlapping with the lhs into temporaries. We must do this
4129 before clearing to fetch the values before they are zeroed-out. */
4130 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4132 preeval_data.lhs_base_decl = get_base_address (object);
4133 if (!DECL_P (preeval_data.lhs_base_decl))
4134 preeval_data.lhs_base_decl = NULL;
4135 preeval_data.lhs_alias_set = get_alias_set (object);
4137 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4138 pre_p, post_p, &preeval_data);
4141 if (cleared)
4143 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4144 Note that we still have to gimplify, in order to handle the
4145 case of variable sized types. Avoid shared tree structures. */
4146 CONSTRUCTOR_ELTS (ctor) = NULL;
4147 TREE_SIDE_EFFECTS (ctor) = 0;
4148 object = unshare_expr (object);
4149 gimplify_stmt (expr_p, pre_p);
4152 /* If we have not block cleared the object, or if there are nonzero
4153 elements in the constructor, add assignments to the individual
4154 scalar fields of the object. */
4155 if (!cleared || num_nonzero_elements > 0)
4156 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4158 *expr_p = NULL_TREE;
4160 break;
4162 case COMPLEX_TYPE:
4164 tree r, i;
4166 if (notify_temp_creation)
4167 return GS_OK;
4169 /* Extract the real and imaginary parts out of the ctor. */
4170 gcc_assert (elts->length () == 2);
4171 r = (*elts)[0].value;
4172 i = (*elts)[1].value;
4173 if (r == NULL || i == NULL)
4175 tree zero = build_zero_cst (TREE_TYPE (type));
4176 if (r == NULL)
4177 r = zero;
4178 if (i == NULL)
4179 i = zero;
4182 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4183 represent creation of a complex value. */
4184 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4186 ctor = build_complex (type, r, i);
4187 TREE_OPERAND (*expr_p, 1) = ctor;
4189 else
4191 ctor = build2 (COMPLEX_EXPR, type, r, i);
4192 TREE_OPERAND (*expr_p, 1) = ctor;
4193 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4194 pre_p,
4195 post_p,
4196 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4197 fb_rvalue);
4200 break;
4202 case VECTOR_TYPE:
4204 unsigned HOST_WIDE_INT ix;
4205 constructor_elt *ce;
4207 if (notify_temp_creation)
4208 return GS_OK;
4210 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4211 if (TREE_CONSTANT (ctor))
4213 bool constant_p = true;
4214 tree value;
4216 /* Even when ctor is constant, it might contain non-*_CST
4217 elements, such as addresses or trapping values like
4218 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4219 in VECTOR_CST nodes. */
4220 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4221 if (!CONSTANT_CLASS_P (value))
4223 constant_p = false;
4224 break;
4227 if (constant_p)
4229 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4230 break;
4233 /* Don't reduce an initializer constant even if we can't
4234 make a VECTOR_CST. It won't do anything for us, and it'll
4235 prevent us from representing it as a single constant. */
4236 if (initializer_constant_valid_p (ctor, type))
4237 break;
4239 TREE_CONSTANT (ctor) = 0;
4242 /* Vector types use CONSTRUCTOR all the way through gimple
4243 compilation as a general initializer. */
4244 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4246 enum gimplify_status tret;
4247 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4248 fb_rvalue);
4249 if (tret == GS_ERROR)
4250 ret = GS_ERROR;
4252 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4253 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4255 break;
4257 default:
4258 /* So how did we get a CONSTRUCTOR for a scalar type? */
4259 gcc_unreachable ();
4262 if (ret == GS_ERROR)
4263 return GS_ERROR;
4264 else if (want_value)
4266 *expr_p = object;
4267 return GS_OK;
4269 else
4271 /* If we have gimplified both sides of the initializer but have
4272 not emitted an assignment, do so now. */
4273 if (*expr_p)
4275 tree lhs = TREE_OPERAND (*expr_p, 0);
4276 tree rhs = TREE_OPERAND (*expr_p, 1);
4277 gimple init = gimple_build_assign (lhs, rhs);
4278 gimplify_seq_add_stmt (pre_p, init);
4279 *expr_p = NULL;
4282 return GS_ALL_DONE;
4286 /* Given a pointer value OP0, return a simplified version of an
4287 indirection through OP0, or NULL_TREE if no simplification is
4288 possible. Note that the resulting type may be different from
4289 the type pointed to in the sense that it is still compatible
4290 from the langhooks point of view. */
4292 tree
4293 gimple_fold_indirect_ref (tree t)
4295 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
4296 tree sub = t;
4297 tree subtype;
4299 STRIP_NOPS (sub);
4300 subtype = TREE_TYPE (sub);
4301 if (!POINTER_TYPE_P (subtype))
4302 return NULL_TREE;
4304 if (TREE_CODE (sub) == ADDR_EXPR)
4306 tree op = TREE_OPERAND (sub, 0);
4307 tree optype = TREE_TYPE (op);
4308 /* *&p => p */
4309 if (useless_type_conversion_p (type, optype))
4310 return op;
4312 /* *(foo *)&fooarray => fooarray[0] */
4313 if (TREE_CODE (optype) == ARRAY_TYPE
4314 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4315 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4317 tree type_domain = TYPE_DOMAIN (optype);
4318 tree min_val = size_zero_node;
4319 if (type_domain && TYPE_MIN_VALUE (type_domain))
4320 min_val = TYPE_MIN_VALUE (type_domain);
4321 if (TREE_CODE (min_val) == INTEGER_CST)
4322 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4324 /* *(foo *)&complexfoo => __real__ complexfoo */
4325 else if (TREE_CODE (optype) == COMPLEX_TYPE
4326 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4327 return fold_build1 (REALPART_EXPR, type, op);
4328 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4329 else if (TREE_CODE (optype) == VECTOR_TYPE
4330 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4332 tree part_width = TYPE_SIZE (type);
4333 tree index = bitsize_int (0);
4334 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4338 /* *(p + CST) -> ... */
4339 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4340 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4342 tree addr = TREE_OPERAND (sub, 0);
4343 tree off = TREE_OPERAND (sub, 1);
4344 tree addrtype;
4346 STRIP_NOPS (addr);
4347 addrtype = TREE_TYPE (addr);
4349 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4350 if (TREE_CODE (addr) == ADDR_EXPR
4351 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4352 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
4353 && host_integerp (off, 1))
4355 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
4356 tree part_width = TYPE_SIZE (type);
4357 unsigned HOST_WIDE_INT part_widthi
4358 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4359 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4360 tree index = bitsize_int (indexi);
4361 if (offset / part_widthi
4362 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4363 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4364 part_width, index);
4367 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4368 if (TREE_CODE (addr) == ADDR_EXPR
4369 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4370 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4372 tree size = TYPE_SIZE_UNIT (type);
4373 if (tree_int_cst_equal (size, off))
4374 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4377 /* *(p + CST) -> MEM_REF <p, CST>. */
4378 if (TREE_CODE (addr) != ADDR_EXPR
4379 || DECL_P (TREE_OPERAND (addr, 0)))
4380 return fold_build2 (MEM_REF, type,
4381 addr,
4382 build_int_cst_wide (ptype,
4383 TREE_INT_CST_LOW (off),
4384 TREE_INT_CST_HIGH (off)));
4387 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4388 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4389 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4390 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4392 tree type_domain;
4393 tree min_val = size_zero_node;
4394 tree osub = sub;
4395 sub = gimple_fold_indirect_ref (sub);
4396 if (! sub)
4397 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4398 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4399 if (type_domain && TYPE_MIN_VALUE (type_domain))
4400 min_val = TYPE_MIN_VALUE (type_domain);
4401 if (TREE_CODE (min_val) == INTEGER_CST)
4402 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4405 return NULL_TREE;
4408 /* Given a pointer value OP0, return a simplified version of an
4409 indirection through OP0, or NULL_TREE if no simplification is
4410 possible. This may only be applied to a rhs of an expression.
4411 Note that the resulting type may be different from the type pointed
4412 to in the sense that it is still compatible from the langhooks
4413 point of view. */
4415 static tree
4416 gimple_fold_indirect_ref_rhs (tree t)
4418 return gimple_fold_indirect_ref (t);
4421 /* Subroutine of gimplify_modify_expr to do simplifications of
4422 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4423 something changes. */
4425 static enum gimplify_status
4426 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4427 gimple_seq *pre_p, gimple_seq *post_p,
4428 bool want_value)
4430 enum gimplify_status ret = GS_UNHANDLED;
4431 bool changed;
4435 changed = false;
4436 switch (TREE_CODE (*from_p))
4438 case VAR_DECL:
4439 /* If we're assigning from a read-only variable initialized with
4440 a constructor, do the direct assignment from the constructor,
4441 but only if neither source nor target are volatile since this
4442 latter assignment might end up being done on a per-field basis. */
4443 if (DECL_INITIAL (*from_p)
4444 && TREE_READONLY (*from_p)
4445 && !TREE_THIS_VOLATILE (*from_p)
4446 && !TREE_THIS_VOLATILE (*to_p)
4447 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4449 tree old_from = *from_p;
4450 enum gimplify_status subret;
4452 /* Move the constructor into the RHS. */
4453 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4455 /* Let's see if gimplify_init_constructor will need to put
4456 it in memory. */
4457 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4458 false, true);
4459 if (subret == GS_ERROR)
4461 /* If so, revert the change. */
4462 *from_p = old_from;
4464 else
4466 ret = GS_OK;
4467 changed = true;
4470 break;
4471 case INDIRECT_REF:
4473 /* If we have code like
4475 *(const A*)(A*)&x
4477 where the type of "x" is a (possibly cv-qualified variant
4478 of "A"), treat the entire expression as identical to "x".
4479 This kind of code arises in C++ when an object is bound
4480 to a const reference, and if "x" is a TARGET_EXPR we want
4481 to take advantage of the optimization below. */
4482 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4483 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4484 if (t)
4486 if (TREE_THIS_VOLATILE (t) != volatile_p)
4488 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4489 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4490 build_fold_addr_expr (t));
4491 if (REFERENCE_CLASS_P (t))
4492 TREE_THIS_VOLATILE (t) = volatile_p;
4494 *from_p = t;
4495 ret = GS_OK;
4496 changed = true;
4498 break;
4501 case TARGET_EXPR:
4503 /* If we are initializing something from a TARGET_EXPR, strip the
4504 TARGET_EXPR and initialize it directly, if possible. This can't
4505 be done if the initializer is void, since that implies that the
4506 temporary is set in some non-trivial way.
4508 ??? What about code that pulls out the temp and uses it
4509 elsewhere? I think that such code never uses the TARGET_EXPR as
4510 an initializer. If I'm wrong, we'll die because the temp won't
4511 have any RTL. In that case, I guess we'll need to replace
4512 references somehow. */
4513 tree init = TARGET_EXPR_INITIAL (*from_p);
4515 if (init
4516 && !VOID_TYPE_P (TREE_TYPE (init)))
4518 *from_p = init;
4519 ret = GS_OK;
4520 changed = true;
4523 break;
4525 case COMPOUND_EXPR:
4526 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4527 caught. */
4528 gimplify_compound_expr (from_p, pre_p, true);
4529 ret = GS_OK;
4530 changed = true;
4531 break;
4533 case CONSTRUCTOR:
4534 /* If we already made some changes, let the front end have a
4535 crack at this before we break it down. */
4536 if (ret != GS_UNHANDLED)
4537 break;
4538 /* If we're initializing from a CONSTRUCTOR, break this into
4539 individual MODIFY_EXPRs. */
4540 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4541 false);
4543 case COND_EXPR:
4544 /* If we're assigning to a non-register type, push the assignment
4545 down into the branches. This is mandatory for ADDRESSABLE types,
4546 since we cannot generate temporaries for such, but it saves a
4547 copy in other cases as well. */
4548 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4550 /* This code should mirror the code in gimplify_cond_expr. */
4551 enum tree_code code = TREE_CODE (*expr_p);
4552 tree cond = *from_p;
4553 tree result = *to_p;
4555 ret = gimplify_expr (&result, pre_p, post_p,
4556 is_gimple_lvalue, fb_lvalue);
4557 if (ret != GS_ERROR)
4558 ret = GS_OK;
4560 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4561 TREE_OPERAND (cond, 1)
4562 = build2 (code, void_type_node, result,
4563 TREE_OPERAND (cond, 1));
4564 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4565 TREE_OPERAND (cond, 2)
4566 = build2 (code, void_type_node, unshare_expr (result),
4567 TREE_OPERAND (cond, 2));
4569 TREE_TYPE (cond) = void_type_node;
4570 recalculate_side_effects (cond);
4572 if (want_value)
4574 gimplify_and_add (cond, pre_p);
4575 *expr_p = unshare_expr (result);
4577 else
4578 *expr_p = cond;
4579 return ret;
4581 break;
4583 case CALL_EXPR:
4584 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4585 return slot so that we don't generate a temporary. */
4586 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4587 && aggregate_value_p (*from_p, *from_p))
4589 bool use_target;
4591 if (!(rhs_predicate_for (*to_p))(*from_p))
4592 /* If we need a temporary, *to_p isn't accurate. */
4593 use_target = false;
4594 /* It's OK to use the return slot directly unless it's an NRV. */
4595 else if (TREE_CODE (*to_p) == RESULT_DECL
4596 && DECL_NAME (*to_p) == NULL_TREE
4597 && needs_to_live_in_memory (*to_p))
4598 use_target = true;
4599 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4600 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4601 /* Don't force regs into memory. */
4602 use_target = false;
4603 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4604 /* It's OK to use the target directly if it's being
4605 initialized. */
4606 use_target = true;
4607 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4608 /* Always use the target and thus RSO for variable-sized types.
4609 GIMPLE cannot deal with a variable-sized assignment
4610 embedded in a call statement. */
4611 use_target = true;
4612 else if (TREE_CODE (*to_p) != SSA_NAME
4613 && (!is_gimple_variable (*to_p)
4614 || needs_to_live_in_memory (*to_p)))
4615 /* Don't use the original target if it's already addressable;
4616 if its address escapes, and the called function uses the
4617 NRV optimization, a conforming program could see *to_p
4618 change before the called function returns; see c++/19317.
4619 When optimizing, the return_slot pass marks more functions
4620 as safe after we have escape info. */
4621 use_target = false;
4622 else
4623 use_target = true;
4625 if (use_target)
4627 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4628 mark_addressable (*to_p);
4631 break;
4633 case WITH_SIZE_EXPR:
4634 /* Likewise for calls that return an aggregate of non-constant size,
4635 since we would not be able to generate a temporary at all. */
4636 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4638 *from_p = TREE_OPERAND (*from_p, 0);
4639 /* We don't change ret in this case because the
4640 WITH_SIZE_EXPR might have been added in
4641 gimplify_modify_expr, so returning GS_OK would lead to an
4642 infinite loop. */
4643 changed = true;
4645 break;
4647 /* If we're initializing from a container, push the initialization
4648 inside it. */
4649 case CLEANUP_POINT_EXPR:
4650 case BIND_EXPR:
4651 case STATEMENT_LIST:
4653 tree wrap = *from_p;
4654 tree t;
4656 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4657 fb_lvalue);
4658 if (ret != GS_ERROR)
4659 ret = GS_OK;
4661 t = voidify_wrapper_expr (wrap, *expr_p);
4662 gcc_assert (t == *expr_p);
4664 if (want_value)
4666 gimplify_and_add (wrap, pre_p);
4667 *expr_p = unshare_expr (*to_p);
4669 else
4670 *expr_p = wrap;
4671 return GS_OK;
4674 case COMPOUND_LITERAL_EXPR:
4676 tree complit = TREE_OPERAND (*expr_p, 1);
4677 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4678 tree decl = DECL_EXPR_DECL (decl_s);
4679 tree init = DECL_INITIAL (decl);
4681 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4682 into struct T x = { 0, 1, 2 } if the address of the
4683 compound literal has never been taken. */
4684 if (!TREE_ADDRESSABLE (complit)
4685 && !TREE_ADDRESSABLE (decl)
4686 && init)
4688 *expr_p = copy_node (*expr_p);
4689 TREE_OPERAND (*expr_p, 1) = init;
4690 return GS_OK;
4694 default:
4695 break;
4698 while (changed);
4700 return ret;
4704 /* Return true if T looks like a valid GIMPLE statement. */
4706 static bool
4707 is_gimple_stmt (tree t)
4709 const enum tree_code code = TREE_CODE (t);
4711 switch (code)
4713 case NOP_EXPR:
4714 /* The only valid NOP_EXPR is the empty statement. */
4715 return IS_EMPTY_STMT (t);
4717 case BIND_EXPR:
4718 case COND_EXPR:
4719 /* These are only valid if they're void. */
4720 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4722 case SWITCH_EXPR:
4723 case GOTO_EXPR:
4724 case RETURN_EXPR:
4725 case LABEL_EXPR:
4726 case CASE_LABEL_EXPR:
4727 case TRY_CATCH_EXPR:
4728 case TRY_FINALLY_EXPR:
4729 case EH_FILTER_EXPR:
4730 case CATCH_EXPR:
4731 case ASM_EXPR:
4732 case STATEMENT_LIST:
4733 case OMP_PARALLEL:
4734 case OMP_FOR:
4735 case OMP_SECTIONS:
4736 case OMP_SECTION:
4737 case OMP_SINGLE:
4738 case OMP_MASTER:
4739 case OMP_ORDERED:
4740 case OMP_CRITICAL:
4741 case OMP_TASK:
4742 /* These are always void. */
4743 return true;
4745 case CALL_EXPR:
4746 case MODIFY_EXPR:
4747 case PREDICT_EXPR:
4748 /* These are valid regardless of their type. */
4749 return true;
4751 default:
4752 return false;
4757 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4758 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4759 DECL_GIMPLE_REG_P set.
4761 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4762 other, unmodified part of the complex object just before the total store.
4763 As a consequence, if the object is still uninitialized, an undefined value
4764 will be loaded into a register, which may result in a spurious exception
4765 if the register is floating-point and the value happens to be a signaling
4766 NaN for example. Then the fully-fledged complex operations lowering pass
4767 followed by a DCE pass are necessary in order to fix things up. */
4769 static enum gimplify_status
4770 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4771 bool want_value)
4773 enum tree_code code, ocode;
4774 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4776 lhs = TREE_OPERAND (*expr_p, 0);
4777 rhs = TREE_OPERAND (*expr_p, 1);
4778 code = TREE_CODE (lhs);
4779 lhs = TREE_OPERAND (lhs, 0);
4781 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4782 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4783 TREE_NO_WARNING (other) = 1;
4784 other = get_formal_tmp_var (other, pre_p);
4786 realpart = code == REALPART_EXPR ? rhs : other;
4787 imagpart = code == REALPART_EXPR ? other : rhs;
4789 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4790 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4791 else
4792 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4794 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4795 *expr_p = (want_value) ? rhs : NULL_TREE;
4797 return GS_ALL_DONE;
4800 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4802 modify_expr
4803 : varname '=' rhs
4804 | '*' ID '=' rhs
4806 PRE_P points to the list where side effects that must happen before
4807 *EXPR_P should be stored.
4809 POST_P points to the list where side effects that must happen after
4810 *EXPR_P should be stored.
4812 WANT_VALUE is nonzero iff we want to use the value of this expression
4813 in another expression. */
4815 static enum gimplify_status
4816 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4817 bool want_value)
4819 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4820 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4821 enum gimplify_status ret = GS_UNHANDLED;
4822 gimple assign;
4823 location_t loc = EXPR_LOCATION (*expr_p);
4824 gimple_stmt_iterator gsi;
4826 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4827 || TREE_CODE (*expr_p) == INIT_EXPR);
4829 /* Trying to simplify a clobber using normal logic doesn't work,
4830 so handle it here. */
4831 if (TREE_CLOBBER_P (*from_p))
4833 gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL);
4834 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4835 *expr_p = NULL;
4836 return GS_ALL_DONE;
4839 /* Insert pointer conversions required by the middle-end that are not
4840 required by the frontend. This fixes middle-end type checking for
4841 for example gcc.dg/redecl-6.c. */
4842 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4844 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4845 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4846 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4849 /* See if any simplifications can be done based on what the RHS is. */
4850 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4851 want_value);
4852 if (ret != GS_UNHANDLED)
4853 return ret;
4855 /* For zero sized types only gimplify the left hand side and right hand
4856 side as statements and throw away the assignment. Do this after
4857 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4858 types properly. */
4859 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4861 gimplify_stmt (from_p, pre_p);
4862 gimplify_stmt (to_p, pre_p);
4863 *expr_p = NULL_TREE;
4864 return GS_ALL_DONE;
4867 /* If the value being copied is of variable width, compute the length
4868 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4869 before gimplifying any of the operands so that we can resolve any
4870 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4871 the size of the expression to be copied, not of the destination, so
4872 that is what we must do here. */
4873 maybe_with_size_expr (from_p);
4875 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4876 if (ret == GS_ERROR)
4877 return ret;
4879 /* As a special case, we have to temporarily allow for assignments
4880 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4881 a toplevel statement, when gimplifying the GENERIC expression
4882 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4883 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4885 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4886 prevent gimplify_expr from trying to create a new temporary for
4887 foo's LHS, we tell it that it should only gimplify until it
4888 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4889 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4890 and all we need to do here is set 'a' to be its LHS. */
4891 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4892 fb_rvalue);
4893 if (ret == GS_ERROR)
4894 return ret;
4896 /* Now see if the above changed *from_p to something we handle specially. */
4897 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4898 want_value);
4899 if (ret != GS_UNHANDLED)
4900 return ret;
4902 /* If we've got a variable sized assignment between two lvalues (i.e. does
4903 not involve a call), then we can make things a bit more straightforward
4904 by converting the assignment to memcpy or memset. */
4905 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4907 tree from = TREE_OPERAND (*from_p, 0);
4908 tree size = TREE_OPERAND (*from_p, 1);
4910 if (TREE_CODE (from) == CONSTRUCTOR)
4911 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4913 if (is_gimple_addressable (from))
4915 *from_p = from;
4916 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4917 pre_p);
4921 /* Transform partial stores to non-addressable complex variables into
4922 total stores. This allows us to use real instead of virtual operands
4923 for these variables, which improves optimization. */
4924 if ((TREE_CODE (*to_p) == REALPART_EXPR
4925 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4926 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4927 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4929 /* Try to alleviate the effects of the gimplification creating artificial
4930 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4931 if (!gimplify_ctxp->into_ssa
4932 && TREE_CODE (*from_p) == VAR_DECL
4933 && DECL_IGNORED_P (*from_p)
4934 && DECL_P (*to_p)
4935 && !DECL_IGNORED_P (*to_p))
4937 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4938 DECL_NAME (*from_p)
4939 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4940 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4941 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4944 if (want_value && TREE_THIS_VOLATILE (*to_p))
4945 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4947 if (TREE_CODE (*from_p) == CALL_EXPR)
4949 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4950 instead of a GIMPLE_ASSIGN. */
4951 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4952 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4953 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4954 assign = gimple_build_call_from_tree (*from_p);
4955 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4956 if (!gimple_call_noreturn_p (assign))
4957 gimple_call_set_lhs (assign, *to_p);
4959 else
4961 assign = gimple_build_assign (*to_p, *from_p);
4962 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4965 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4967 /* We should have got an SSA name from the start. */
4968 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4971 gimplify_seq_add_stmt (pre_p, assign);
4972 gsi = gsi_last (*pre_p);
4973 fold_stmt (&gsi);
4975 if (want_value)
4977 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4978 return GS_OK;
4980 else
4981 *expr_p = NULL;
4983 return GS_ALL_DONE;
4986 /* Gimplify a comparison between two variable-sized objects. Do this
4987 with a call to BUILT_IN_MEMCMP. */
4989 static enum gimplify_status
4990 gimplify_variable_sized_compare (tree *expr_p)
4992 location_t loc = EXPR_LOCATION (*expr_p);
4993 tree op0 = TREE_OPERAND (*expr_p, 0);
4994 tree op1 = TREE_OPERAND (*expr_p, 1);
4995 tree t, arg, dest, src, expr;
4997 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4998 arg = unshare_expr (arg);
4999 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5000 src = build_fold_addr_expr_loc (loc, op1);
5001 dest = build_fold_addr_expr_loc (loc, op0);
5002 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5003 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5005 expr
5006 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5007 SET_EXPR_LOCATION (expr, loc);
5008 *expr_p = expr;
5010 return GS_OK;
5013 /* Gimplify a comparison between two aggregate objects of integral scalar
5014 mode as a comparison between the bitwise equivalent scalar values. */
5016 static enum gimplify_status
5017 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5019 location_t loc = EXPR_LOCATION (*expr_p);
5020 tree op0 = TREE_OPERAND (*expr_p, 0);
5021 tree op1 = TREE_OPERAND (*expr_p, 1);
5023 tree type = TREE_TYPE (op0);
5024 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5026 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5027 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5029 *expr_p
5030 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5032 return GS_OK;
5035 /* Gimplify an expression sequence. This function gimplifies each
5036 expression and rewrites the original expression with the last
5037 expression of the sequence in GIMPLE form.
5039 PRE_P points to the list where the side effects for all the
5040 expressions in the sequence will be emitted.
5042 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5044 static enum gimplify_status
5045 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5047 tree t = *expr_p;
5051 tree *sub_p = &TREE_OPERAND (t, 0);
5053 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5054 gimplify_compound_expr (sub_p, pre_p, false);
5055 else
5056 gimplify_stmt (sub_p, pre_p);
5058 t = TREE_OPERAND (t, 1);
5060 while (TREE_CODE (t) == COMPOUND_EXPR);
5062 *expr_p = t;
5063 if (want_value)
5064 return GS_OK;
5065 else
5067 gimplify_stmt (expr_p, pre_p);
5068 return GS_ALL_DONE;
5072 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5073 gimplify. After gimplification, EXPR_P will point to a new temporary
5074 that holds the original value of the SAVE_EXPR node.
5076 PRE_P points to the list where side effects that must happen before
5077 *EXPR_P should be stored. */
5079 static enum gimplify_status
5080 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5082 enum gimplify_status ret = GS_ALL_DONE;
5083 tree val;
5085 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5086 val = TREE_OPERAND (*expr_p, 0);
5088 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5089 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5091 /* The operand may be a void-valued expression such as SAVE_EXPRs
5092 generated by the Java frontend for class initialization. It is
5093 being executed only for its side-effects. */
5094 if (TREE_TYPE (val) == void_type_node)
5096 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5097 is_gimple_stmt, fb_none);
5098 val = NULL;
5100 else
5101 val = get_initialized_tmp_var (val, pre_p, post_p);
5103 TREE_OPERAND (*expr_p, 0) = val;
5104 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5107 *expr_p = val;
5109 return ret;
5112 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5114 unary_expr
5115 : ...
5116 | '&' varname
5119 PRE_P points to the list where side effects that must happen before
5120 *EXPR_P should be stored.
5122 POST_P points to the list where side effects that must happen after
5123 *EXPR_P should be stored. */
5125 static enum gimplify_status
5126 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5128 tree expr = *expr_p;
5129 tree op0 = TREE_OPERAND (expr, 0);
5130 enum gimplify_status ret;
5131 location_t loc = EXPR_LOCATION (*expr_p);
5133 switch (TREE_CODE (op0))
5135 case INDIRECT_REF:
5136 do_indirect_ref:
5137 /* Check if we are dealing with an expression of the form '&*ptr'.
5138 While the front end folds away '&*ptr' into 'ptr', these
5139 expressions may be generated internally by the compiler (e.g.,
5140 builtins like __builtin_va_end). */
5141 /* Caution: the silent array decomposition semantics we allow for
5142 ADDR_EXPR means we can't always discard the pair. */
5143 /* Gimplification of the ADDR_EXPR operand may drop
5144 cv-qualification conversions, so make sure we add them if
5145 needed. */
5147 tree op00 = TREE_OPERAND (op0, 0);
5148 tree t_expr = TREE_TYPE (expr);
5149 tree t_op00 = TREE_TYPE (op00);
5151 if (!useless_type_conversion_p (t_expr, t_op00))
5152 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5153 *expr_p = op00;
5154 ret = GS_OK;
5156 break;
5158 case VIEW_CONVERT_EXPR:
5159 /* Take the address of our operand and then convert it to the type of
5160 this ADDR_EXPR.
5162 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5163 all clear. The impact of this transformation is even less clear. */
5165 /* If the operand is a useless conversion, look through it. Doing so
5166 guarantees that the ADDR_EXPR and its operand will remain of the
5167 same type. */
5168 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5169 op0 = TREE_OPERAND (op0, 0);
5171 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5172 build_fold_addr_expr_loc (loc,
5173 TREE_OPERAND (op0, 0)));
5174 ret = GS_OK;
5175 break;
5177 default:
5178 /* We use fb_either here because the C frontend sometimes takes
5179 the address of a call that returns a struct; see
5180 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5181 the implied temporary explicit. */
5183 /* Make the operand addressable. */
5184 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5185 is_gimple_addressable, fb_either);
5186 if (ret == GS_ERROR)
5187 break;
5189 /* Then mark it. Beware that it may not be possible to do so directly
5190 if a temporary has been created by the gimplification. */
5191 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5193 op0 = TREE_OPERAND (expr, 0);
5195 /* For various reasons, the gimplification of the expression
5196 may have made a new INDIRECT_REF. */
5197 if (TREE_CODE (op0) == INDIRECT_REF)
5198 goto do_indirect_ref;
5200 mark_addressable (TREE_OPERAND (expr, 0));
5202 /* The FEs may end up building ADDR_EXPRs early on a decl with
5203 an incomplete type. Re-build ADDR_EXPRs in canonical form
5204 here. */
5205 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5206 *expr_p = build_fold_addr_expr (op0);
5208 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5209 recompute_tree_invariant_for_addr_expr (*expr_p);
5211 /* If we re-built the ADDR_EXPR add a conversion to the original type
5212 if required. */
5213 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5214 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5216 break;
5219 return ret;
5222 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5223 value; output operands should be a gimple lvalue. */
5225 static enum gimplify_status
5226 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5228 tree expr;
5229 int noutputs;
5230 const char **oconstraints;
5231 int i;
5232 tree link;
5233 const char *constraint;
5234 bool allows_mem, allows_reg, is_inout;
5235 enum gimplify_status ret, tret;
5236 gimple stmt;
5237 vec<tree, va_gc> *inputs;
5238 vec<tree, va_gc> *outputs;
5239 vec<tree, va_gc> *clobbers;
5240 vec<tree, va_gc> *labels;
5241 tree link_next;
5243 expr = *expr_p;
5244 noutputs = list_length (ASM_OUTPUTS (expr));
5245 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5247 inputs = NULL;
5248 outputs = NULL;
5249 clobbers = NULL;
5250 labels = NULL;
5252 ret = GS_ALL_DONE;
5253 link_next = NULL_TREE;
5254 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5256 bool ok;
5257 size_t constraint_len;
5259 link_next = TREE_CHAIN (link);
5261 oconstraints[i]
5262 = constraint
5263 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5264 constraint_len = strlen (constraint);
5265 if (constraint_len == 0)
5266 continue;
5268 ok = parse_output_constraint (&constraint, i, 0, 0,
5269 &allows_mem, &allows_reg, &is_inout);
5270 if (!ok)
5272 ret = GS_ERROR;
5273 is_inout = false;
5276 if (!allows_reg && allows_mem)
5277 mark_addressable (TREE_VALUE (link));
5279 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5280 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5281 fb_lvalue | fb_mayfail);
5282 if (tret == GS_ERROR)
5284 error ("invalid lvalue in asm output %d", i);
5285 ret = tret;
5288 vec_safe_push (outputs, link);
5289 TREE_CHAIN (link) = NULL_TREE;
5291 if (is_inout)
5293 /* An input/output operand. To give the optimizers more
5294 flexibility, split it into separate input and output
5295 operands. */
5296 tree input;
5297 char buf[10];
5299 /* Turn the in/out constraint into an output constraint. */
5300 char *p = xstrdup (constraint);
5301 p[0] = '=';
5302 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5304 /* And add a matching input constraint. */
5305 if (allows_reg)
5307 sprintf (buf, "%d", i);
5309 /* If there are multiple alternatives in the constraint,
5310 handle each of them individually. Those that allow register
5311 will be replaced with operand number, the others will stay
5312 unchanged. */
5313 if (strchr (p, ',') != NULL)
5315 size_t len = 0, buflen = strlen (buf);
5316 char *beg, *end, *str, *dst;
5318 for (beg = p + 1;;)
5320 end = strchr (beg, ',');
5321 if (end == NULL)
5322 end = strchr (beg, '\0');
5323 if ((size_t) (end - beg) < buflen)
5324 len += buflen + 1;
5325 else
5326 len += end - beg + 1;
5327 if (*end)
5328 beg = end + 1;
5329 else
5330 break;
5333 str = (char *) alloca (len);
5334 for (beg = p + 1, dst = str;;)
5336 const char *tem;
5337 bool mem_p, reg_p, inout_p;
5339 end = strchr (beg, ',');
5340 if (end)
5341 *end = '\0';
5342 beg[-1] = '=';
5343 tem = beg - 1;
5344 parse_output_constraint (&tem, i, 0, 0,
5345 &mem_p, &reg_p, &inout_p);
5346 if (dst != str)
5347 *dst++ = ',';
5348 if (reg_p)
5350 memcpy (dst, buf, buflen);
5351 dst += buflen;
5353 else
5355 if (end)
5356 len = end - beg;
5357 else
5358 len = strlen (beg);
5359 memcpy (dst, beg, len);
5360 dst += len;
5362 if (end)
5363 beg = end + 1;
5364 else
5365 break;
5367 *dst = '\0';
5368 input = build_string (dst - str, str);
5370 else
5371 input = build_string (strlen (buf), buf);
5373 else
5374 input = build_string (constraint_len - 1, constraint + 1);
5376 free (p);
5378 input = build_tree_list (build_tree_list (NULL_TREE, input),
5379 unshare_expr (TREE_VALUE (link)));
5380 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5384 link_next = NULL_TREE;
5385 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5387 link_next = TREE_CHAIN (link);
5388 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5389 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5390 oconstraints, &allows_mem, &allows_reg);
5392 /* If we can't make copies, we can only accept memory. */
5393 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5395 if (allows_mem)
5396 allows_reg = 0;
5397 else
5399 error ("impossible constraint in %<asm%>");
5400 error ("non-memory input %d must stay in memory", i);
5401 return GS_ERROR;
5405 /* If the operand is a memory input, it should be an lvalue. */
5406 if (!allows_reg && allows_mem)
5408 tree inputv = TREE_VALUE (link);
5409 STRIP_NOPS (inputv);
5410 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5411 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5412 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5413 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5414 TREE_VALUE (link) = error_mark_node;
5415 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5416 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5417 mark_addressable (TREE_VALUE (link));
5418 if (tret == GS_ERROR)
5420 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5421 input_location = EXPR_LOCATION (TREE_VALUE (link));
5422 error ("memory input %d is not directly addressable", i);
5423 ret = tret;
5426 else
5428 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5429 is_gimple_asm_val, fb_rvalue);
5430 if (tret == GS_ERROR)
5431 ret = tret;
5434 TREE_CHAIN (link) = NULL_TREE;
5435 vec_safe_push (inputs, link);
5438 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5439 vec_safe_push (clobbers, link);
5441 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5442 vec_safe_push (labels, link);
5444 /* Do not add ASMs with errors to the gimple IL stream. */
5445 if (ret != GS_ERROR)
5447 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5448 inputs, outputs, clobbers, labels);
5450 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5451 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5453 gimplify_seq_add_stmt (pre_p, stmt);
5456 return ret;
5459 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5460 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5461 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5462 return to this function.
5464 FIXME should we complexify the prequeue handling instead? Or use flags
5465 for all the cleanups and let the optimizer tighten them up? The current
5466 code seems pretty fragile; it will break on a cleanup within any
5467 non-conditional nesting. But any such nesting would be broken, anyway;
5468 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5469 and continues out of it. We can do that at the RTL level, though, so
5470 having an optimizer to tighten up try/finally regions would be a Good
5471 Thing. */
5473 static enum gimplify_status
5474 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5476 gimple_stmt_iterator iter;
5477 gimple_seq body_sequence = NULL;
5479 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5481 /* We only care about the number of conditions between the innermost
5482 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5483 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5484 int old_conds = gimplify_ctxp->conditions;
5485 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5486 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5487 gimplify_ctxp->conditions = 0;
5488 gimplify_ctxp->conditional_cleanups = NULL;
5489 gimplify_ctxp->in_cleanup_point_expr = true;
5491 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5493 gimplify_ctxp->conditions = old_conds;
5494 gimplify_ctxp->conditional_cleanups = old_cleanups;
5495 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5497 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5499 gimple wce = gsi_stmt (iter);
5501 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5503 if (gsi_one_before_end_p (iter))
5505 /* Note that gsi_insert_seq_before and gsi_remove do not
5506 scan operands, unlike some other sequence mutators. */
5507 if (!gimple_wce_cleanup_eh_only (wce))
5508 gsi_insert_seq_before_without_update (&iter,
5509 gimple_wce_cleanup (wce),
5510 GSI_SAME_STMT);
5511 gsi_remove (&iter, true);
5512 break;
5514 else
5516 gimple gtry;
5517 gimple_seq seq;
5518 enum gimple_try_flags kind;
5520 if (gimple_wce_cleanup_eh_only (wce))
5521 kind = GIMPLE_TRY_CATCH;
5522 else
5523 kind = GIMPLE_TRY_FINALLY;
5524 seq = gsi_split_seq_after (iter);
5526 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5527 /* Do not use gsi_replace here, as it may scan operands.
5528 We want to do a simple structural modification only. */
5529 gsi_set_stmt (&iter, gtry);
5530 iter = gsi_start (gtry->gimple_try.eval);
5533 else
5534 gsi_next (&iter);
5537 gimplify_seq_add_seq (pre_p, body_sequence);
5538 if (temp)
5540 *expr_p = temp;
5541 return GS_OK;
5543 else
5545 *expr_p = NULL;
5546 return GS_ALL_DONE;
5550 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5551 is the cleanup action required. EH_ONLY is true if the cleanup should
5552 only be executed if an exception is thrown, not on normal exit. */
5554 static void
5555 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5557 gimple wce;
5558 gimple_seq cleanup_stmts = NULL;
5560 /* Errors can result in improperly nested cleanups. Which results in
5561 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5562 if (seen_error ())
5563 return;
5565 if (gimple_conditional_context ())
5567 /* If we're in a conditional context, this is more complex. We only
5568 want to run the cleanup if we actually ran the initialization that
5569 necessitates it, but we want to run it after the end of the
5570 conditional context. So we wrap the try/finally around the
5571 condition and use a flag to determine whether or not to actually
5572 run the destructor. Thus
5574 test ? f(A()) : 0
5576 becomes (approximately)
5578 flag = 0;
5579 try {
5580 if (test) { A::A(temp); flag = 1; val = f(temp); }
5581 else { val = 0; }
5582 } finally {
5583 if (flag) A::~A(temp);
5587 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5588 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5589 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5591 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5592 gimplify_stmt (&cleanup, &cleanup_stmts);
5593 wce = gimple_build_wce (cleanup_stmts);
5595 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5596 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5597 gimplify_seq_add_stmt (pre_p, ftrue);
5599 /* Because of this manipulation, and the EH edges that jump
5600 threading cannot redirect, the temporary (VAR) will appear
5601 to be used uninitialized. Don't warn. */
5602 TREE_NO_WARNING (var) = 1;
5604 else
5606 gimplify_stmt (&cleanup, &cleanup_stmts);
5607 wce = gimple_build_wce (cleanup_stmts);
5608 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5609 gimplify_seq_add_stmt (pre_p, wce);
5613 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5615 static enum gimplify_status
5616 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5618 tree targ = *expr_p;
5619 tree temp = TARGET_EXPR_SLOT (targ);
5620 tree init = TARGET_EXPR_INITIAL (targ);
5621 enum gimplify_status ret;
5623 if (init)
5625 tree cleanup = NULL_TREE;
5627 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5628 to the temps list. Handle also variable length TARGET_EXPRs. */
5629 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5631 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5632 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5633 gimplify_vla_decl (temp, pre_p);
5635 else
5636 gimple_add_tmp_var (temp);
5638 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5639 expression is supposed to initialize the slot. */
5640 if (VOID_TYPE_P (TREE_TYPE (init)))
5641 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5642 else
5644 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5645 init = init_expr;
5646 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5647 init = NULL;
5648 ggc_free (init_expr);
5650 if (ret == GS_ERROR)
5652 /* PR c++/28266 Make sure this is expanded only once. */
5653 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5654 return GS_ERROR;
5656 if (init)
5657 gimplify_and_add (init, pre_p);
5659 /* If needed, push the cleanup for the temp. */
5660 if (TARGET_EXPR_CLEANUP (targ))
5662 if (CLEANUP_EH_ONLY (targ))
5663 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5664 CLEANUP_EH_ONLY (targ), pre_p);
5665 else
5666 cleanup = TARGET_EXPR_CLEANUP (targ);
5669 /* Add a clobber for the temporary going out of scope, like
5670 gimplify_bind_expr. */
5671 if (gimplify_ctxp->in_cleanup_point_expr
5672 && needs_to_live_in_memory (temp)
5673 && flag_stack_reuse == SR_ALL)
5675 tree clobber = build_constructor (TREE_TYPE (temp),
5676 NULL);
5677 TREE_THIS_VOLATILE (clobber) = true;
5678 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5679 if (cleanup)
5680 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5681 clobber);
5682 else
5683 cleanup = clobber;
5686 if (cleanup)
5687 gimple_push_cleanup (temp, cleanup, false, pre_p);
5689 /* Only expand this once. */
5690 TREE_OPERAND (targ, 3) = init;
5691 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5693 else
5694 /* We should have expanded this before. */
5695 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5697 *expr_p = temp;
5698 return GS_OK;
5701 /* Gimplification of expression trees. */
5703 /* Gimplify an expression which appears at statement context. The
5704 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5705 NULL, a new sequence is allocated.
5707 Return true if we actually added a statement to the queue. */
5709 bool
5710 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5712 gimple_seq_node last;
5714 last = gimple_seq_last (*seq_p);
5715 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5716 return last != gimple_seq_last (*seq_p);
5719 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5720 to CTX. If entries already exist, force them to be some flavor of private.
5721 If there is no enclosing parallel, do nothing. */
5723 void
5724 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5726 splay_tree_node n;
5728 if (decl == NULL || !DECL_P (decl))
5729 return;
5733 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5734 if (n != NULL)
5736 if (n->value & GOVD_SHARED)
5737 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5738 else
5739 return;
5741 else if (ctx->region_type != ORT_WORKSHARE)
5742 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5744 ctx = ctx->outer_context;
5746 while (ctx);
5749 /* Similarly for each of the type sizes of TYPE. */
5751 static void
5752 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5754 if (type == NULL || type == error_mark_node)
5755 return;
5756 type = TYPE_MAIN_VARIANT (type);
5758 if (pointer_set_insert (ctx->privatized_types, type))
5759 return;
5761 switch (TREE_CODE (type))
5763 case INTEGER_TYPE:
5764 case ENUMERAL_TYPE:
5765 case BOOLEAN_TYPE:
5766 case REAL_TYPE:
5767 case FIXED_POINT_TYPE:
5768 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5769 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5770 break;
5772 case ARRAY_TYPE:
5773 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5774 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5775 break;
5777 case RECORD_TYPE:
5778 case UNION_TYPE:
5779 case QUAL_UNION_TYPE:
5781 tree field;
5782 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5783 if (TREE_CODE (field) == FIELD_DECL)
5785 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5786 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5789 break;
5791 case POINTER_TYPE:
5792 case REFERENCE_TYPE:
5793 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5794 break;
5796 default:
5797 break;
5800 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5801 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5802 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5805 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5807 static void
5808 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5810 splay_tree_node n;
5811 unsigned int nflags;
5812 tree t;
5814 if (error_operand_p (decl))
5815 return;
5817 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5818 there are constructors involved somewhere. */
5819 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5820 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5821 flags |= GOVD_SEEN;
5823 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5824 if (n != NULL)
5826 /* We shouldn't be re-adding the decl with the same data
5827 sharing class. */
5828 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5829 /* The only combination of data sharing classes we should see is
5830 FIRSTPRIVATE and LASTPRIVATE. */
5831 nflags = n->value | flags;
5832 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5833 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5834 n->value = nflags;
5835 return;
5838 /* When adding a variable-sized variable, we have to handle all sorts
5839 of additional bits of data: the pointer replacement variable, and
5840 the parameters of the type. */
5841 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5843 /* Add the pointer replacement variable as PRIVATE if the variable
5844 replacement is private, else FIRSTPRIVATE since we'll need the
5845 address of the original variable either for SHARED, or for the
5846 copy into or out of the context. */
5847 if (!(flags & GOVD_LOCAL))
5849 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5850 nflags |= flags & GOVD_SEEN;
5851 t = DECL_VALUE_EXPR (decl);
5852 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5853 t = TREE_OPERAND (t, 0);
5854 gcc_assert (DECL_P (t));
5855 omp_add_variable (ctx, t, nflags);
5858 /* Add all of the variable and type parameters (which should have
5859 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5860 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5861 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5862 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5864 /* The variable-sized variable itself is never SHARED, only some form
5865 of PRIVATE. The sharing would take place via the pointer variable
5866 which we remapped above. */
5867 if (flags & GOVD_SHARED)
5868 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5869 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5871 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5872 alloca statement we generate for the variable, so make sure it
5873 is available. This isn't automatically needed for the SHARED
5874 case, since we won't be allocating local storage then.
5875 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5876 in this case omp_notice_variable will be called later
5877 on when it is gimplified. */
5878 else if (! (flags & GOVD_LOCAL)
5879 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5880 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5882 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5884 gcc_assert ((flags & GOVD_LOCAL) == 0);
5885 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5887 /* Similar to the direct variable sized case above, we'll need the
5888 size of references being privatized. */
5889 if ((flags & GOVD_SHARED) == 0)
5891 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5892 if (TREE_CODE (t) != INTEGER_CST)
5893 omp_notice_variable (ctx, t, true);
5897 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5900 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5901 This just prints out diagnostics about threadprivate variable uses
5902 in untied tasks. If DECL2 is non-NULL, prevent this warning
5903 on that variable. */
5905 static bool
5906 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5907 tree decl2)
5909 splay_tree_node n;
5911 if (ctx->region_type != ORT_UNTIED_TASK)
5912 return false;
5913 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5914 if (n == NULL)
5916 error ("threadprivate variable %qE used in untied task",
5917 DECL_NAME (decl));
5918 error_at (ctx->location, "enclosing task");
5919 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5921 if (decl2)
5922 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5923 return false;
5926 /* Record the fact that DECL was used within the OpenMP context CTX.
5927 IN_CODE is true when real code uses DECL, and false when we should
5928 merely emit default(none) errors. Return true if DECL is going to
5929 be remapped and thus DECL shouldn't be gimplified into its
5930 DECL_VALUE_EXPR (if any). */
5932 static bool
5933 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5935 splay_tree_node n;
5936 unsigned flags = in_code ? GOVD_SEEN : 0;
5937 bool ret = false, shared;
5939 if (error_operand_p (decl))
5940 return false;
5942 /* Threadprivate variables are predetermined. */
5943 if (is_global_var (decl))
5945 if (DECL_THREAD_LOCAL_P (decl))
5946 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5948 if (DECL_HAS_VALUE_EXPR_P (decl))
5950 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5952 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5953 return omp_notice_threadprivate_variable (ctx, decl, value);
5957 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5958 if (n == NULL)
5960 enum omp_clause_default_kind default_kind, kind;
5961 struct gimplify_omp_ctx *octx;
5963 if (ctx->region_type == ORT_WORKSHARE)
5964 goto do_outer;
5966 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5967 remapped firstprivate instead of shared. To some extent this is
5968 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5969 default_kind = ctx->default_kind;
5970 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5971 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5972 default_kind = kind;
5974 switch (default_kind)
5976 case OMP_CLAUSE_DEFAULT_NONE:
5977 error ("%qE not specified in enclosing parallel",
5978 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5979 if ((ctx->region_type & ORT_TASK) != 0)
5980 error_at (ctx->location, "enclosing task");
5981 else
5982 error_at (ctx->location, "enclosing parallel");
5983 /* FALLTHRU */
5984 case OMP_CLAUSE_DEFAULT_SHARED:
5985 flags |= GOVD_SHARED;
5986 break;
5987 case OMP_CLAUSE_DEFAULT_PRIVATE:
5988 flags |= GOVD_PRIVATE;
5989 break;
5990 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5991 flags |= GOVD_FIRSTPRIVATE;
5992 break;
5993 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5994 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5995 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5996 if (ctx->outer_context)
5997 omp_notice_variable (ctx->outer_context, decl, in_code);
5998 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
6000 splay_tree_node n2;
6002 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6003 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6005 flags |= GOVD_FIRSTPRIVATE;
6006 break;
6008 if ((octx->region_type & ORT_PARALLEL) != 0)
6009 break;
6011 if (flags & GOVD_FIRSTPRIVATE)
6012 break;
6013 if (octx == NULL
6014 && (TREE_CODE (decl) == PARM_DECL
6015 || (!is_global_var (decl)
6016 && DECL_CONTEXT (decl) == current_function_decl)))
6018 flags |= GOVD_FIRSTPRIVATE;
6019 break;
6021 flags |= GOVD_SHARED;
6022 break;
6023 default:
6024 gcc_unreachable ();
6027 if ((flags & GOVD_PRIVATE)
6028 && lang_hooks.decls.omp_private_outer_ref (decl))
6029 flags |= GOVD_PRIVATE_OUTER_REF;
6031 omp_add_variable (ctx, decl, flags);
6033 shared = (flags & GOVD_SHARED) != 0;
6034 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6035 goto do_outer;
6038 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6039 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6040 && DECL_SIZE (decl)
6041 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6043 splay_tree_node n2;
6044 tree t = DECL_VALUE_EXPR (decl);
6045 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6046 t = TREE_OPERAND (t, 0);
6047 gcc_assert (DECL_P (t));
6048 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6049 n2->value |= GOVD_SEEN;
6052 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6053 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6055 /* If nothing changed, there's nothing left to do. */
6056 if ((n->value & flags) == flags)
6057 return ret;
6058 flags |= n->value;
6059 n->value = flags;
6061 do_outer:
6062 /* If the variable is private in the current context, then we don't
6063 need to propagate anything to an outer context. */
6064 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6065 return ret;
6066 if (ctx->outer_context
6067 && omp_notice_variable (ctx->outer_context, decl, in_code))
6068 return true;
6069 return ret;
6072 /* Verify that DECL is private within CTX. If there's specific information
6073 to the contrary in the innermost scope, generate an error. */
6075 static bool
6076 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
6078 splay_tree_node n;
6080 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6081 if (n != NULL)
6083 if (n->value & GOVD_SHARED)
6085 if (ctx == gimplify_omp_ctxp)
6087 error ("iteration variable %qE should be private",
6088 DECL_NAME (decl));
6089 n->value = GOVD_PRIVATE;
6090 return true;
6092 else
6093 return false;
6095 else if ((n->value & GOVD_EXPLICIT) != 0
6096 && (ctx == gimplify_omp_ctxp
6097 || (ctx->region_type == ORT_COMBINED_PARALLEL
6098 && gimplify_omp_ctxp->outer_context == ctx)))
6100 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6101 error ("iteration variable %qE should not be firstprivate",
6102 DECL_NAME (decl));
6103 else if ((n->value & GOVD_REDUCTION) != 0)
6104 error ("iteration variable %qE should not be reduction",
6105 DECL_NAME (decl));
6107 return (ctx == gimplify_omp_ctxp
6108 || (ctx->region_type == ORT_COMBINED_PARALLEL
6109 && gimplify_omp_ctxp->outer_context == ctx));
6112 if (ctx->region_type != ORT_WORKSHARE)
6113 return false;
6114 else if (ctx->outer_context)
6115 return omp_is_private (ctx->outer_context, decl);
6116 return false;
6119 /* Return true if DECL is private within a parallel region
6120 that binds to the current construct's context or in parallel
6121 region's REDUCTION clause. */
6123 static bool
6124 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
6126 splay_tree_node n;
6130 ctx = ctx->outer_context;
6131 if (ctx == NULL)
6132 return !(is_global_var (decl)
6133 /* References might be private, but might be shared too. */
6134 || lang_hooks.decls.omp_privatize_by_reference (decl));
6136 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6137 if (n != NULL)
6138 return (n->value & GOVD_SHARED) == 0;
6140 while (ctx->region_type == ORT_WORKSHARE);
6141 return false;
6144 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
6145 and previous omp contexts. */
6147 static void
6148 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6149 enum omp_region_type region_type)
6151 struct gimplify_omp_ctx *ctx, *outer_ctx;
6152 struct gimplify_ctx gctx;
6153 tree c;
6155 ctx = new_omp_context (region_type);
6156 outer_ctx = ctx->outer_context;
6158 while ((c = *list_p) != NULL)
6160 bool remove = false;
6161 bool notice_outer = true;
6162 const char *check_non_private = NULL;
6163 unsigned int flags;
6164 tree decl;
6166 switch (OMP_CLAUSE_CODE (c))
6168 case OMP_CLAUSE_PRIVATE:
6169 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6170 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6172 flags |= GOVD_PRIVATE_OUTER_REF;
6173 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6175 else
6176 notice_outer = false;
6177 goto do_add;
6178 case OMP_CLAUSE_SHARED:
6179 flags = GOVD_SHARED | GOVD_EXPLICIT;
6180 goto do_add;
6181 case OMP_CLAUSE_FIRSTPRIVATE:
6182 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6183 check_non_private = "firstprivate";
6184 goto do_add;
6185 case OMP_CLAUSE_LASTPRIVATE:
6186 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6187 check_non_private = "lastprivate";
6188 goto do_add;
6189 case OMP_CLAUSE_REDUCTION:
6190 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6191 check_non_private = "reduction";
6192 goto do_add;
6194 do_add:
6195 decl = OMP_CLAUSE_DECL (c);
6196 if (error_operand_p (decl))
6198 remove = true;
6199 break;
6201 omp_add_variable (ctx, decl, flags);
6202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6203 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6205 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6206 GOVD_LOCAL | GOVD_SEEN);
6207 gimplify_omp_ctxp = ctx;
6208 push_gimplify_context (&gctx);
6210 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6211 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6213 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6214 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6215 pop_gimplify_context
6216 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6217 push_gimplify_context (&gctx);
6218 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6219 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6220 pop_gimplify_context
6221 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6222 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6223 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6225 gimplify_omp_ctxp = outer_ctx;
6227 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6228 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6230 gimplify_omp_ctxp = ctx;
6231 push_gimplify_context (&gctx);
6232 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6234 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6235 NULL, NULL);
6236 TREE_SIDE_EFFECTS (bind) = 1;
6237 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6238 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6240 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6241 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6242 pop_gimplify_context
6243 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6244 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6246 gimplify_omp_ctxp = outer_ctx;
6248 if (notice_outer)
6249 goto do_notice;
6250 break;
6252 case OMP_CLAUSE_COPYIN:
6253 case OMP_CLAUSE_COPYPRIVATE:
6254 decl = OMP_CLAUSE_DECL (c);
6255 if (error_operand_p (decl))
6257 remove = true;
6258 break;
6260 do_notice:
6261 if (outer_ctx)
6262 omp_notice_variable (outer_ctx, decl, true);
6263 if (check_non_private
6264 && region_type == ORT_WORKSHARE
6265 && omp_check_private (ctx, decl))
6267 error ("%s variable %qE is private in outer context",
6268 check_non_private, DECL_NAME (decl));
6269 remove = true;
6271 break;
6273 case OMP_CLAUSE_FINAL:
6274 case OMP_CLAUSE_IF:
6275 OMP_CLAUSE_OPERAND (c, 0)
6276 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6277 /* Fall through. */
6279 case OMP_CLAUSE_SCHEDULE:
6280 case OMP_CLAUSE_NUM_THREADS:
6281 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6282 is_gimple_val, fb_rvalue) == GS_ERROR)
6283 remove = true;
6284 break;
6286 case OMP_CLAUSE_NOWAIT:
6287 case OMP_CLAUSE_ORDERED:
6288 case OMP_CLAUSE_UNTIED:
6289 case OMP_CLAUSE_COLLAPSE:
6290 case OMP_CLAUSE_MERGEABLE:
6291 break;
6293 case OMP_CLAUSE_DEFAULT:
6294 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6295 break;
6297 default:
6298 gcc_unreachable ();
6301 if (remove)
6302 *list_p = OMP_CLAUSE_CHAIN (c);
6303 else
6304 list_p = &OMP_CLAUSE_CHAIN (c);
6307 gimplify_omp_ctxp = ctx;
6310 /* For all variables that were not actually used within the context,
6311 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6313 static int
6314 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6316 tree *list_p = (tree *) data;
6317 tree decl = (tree) n->key;
6318 unsigned flags = n->value;
6319 enum omp_clause_code code;
6320 tree clause;
6321 bool private_debug;
6323 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6324 return 0;
6325 if ((flags & GOVD_SEEN) == 0)
6326 return 0;
6327 if (flags & GOVD_DEBUG_PRIVATE)
6329 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6330 private_debug = true;
6332 else
6333 private_debug
6334 = lang_hooks.decls.omp_private_debug_clause (decl,
6335 !!(flags & GOVD_SHARED));
6336 if (private_debug)
6337 code = OMP_CLAUSE_PRIVATE;
6338 else if (flags & GOVD_SHARED)
6340 if (is_global_var (decl))
6342 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6343 while (ctx != NULL)
6345 splay_tree_node on
6346 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6347 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6348 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
6349 break;
6350 ctx = ctx->outer_context;
6352 if (ctx == NULL)
6353 return 0;
6355 code = OMP_CLAUSE_SHARED;
6357 else if (flags & GOVD_PRIVATE)
6358 code = OMP_CLAUSE_PRIVATE;
6359 else if (flags & GOVD_FIRSTPRIVATE)
6360 code = OMP_CLAUSE_FIRSTPRIVATE;
6361 else
6362 gcc_unreachable ();
6364 clause = build_omp_clause (input_location, code);
6365 OMP_CLAUSE_DECL (clause) = decl;
6366 OMP_CLAUSE_CHAIN (clause) = *list_p;
6367 if (private_debug)
6368 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6369 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6370 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6371 *list_p = clause;
6372 lang_hooks.decls.omp_finish_clause (clause);
6374 return 0;
6377 static void
6378 gimplify_adjust_omp_clauses (tree *list_p)
6380 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6381 tree c, decl;
6383 while ((c = *list_p) != NULL)
6385 splay_tree_node n;
6386 bool remove = false;
6388 switch (OMP_CLAUSE_CODE (c))
6390 case OMP_CLAUSE_PRIVATE:
6391 case OMP_CLAUSE_SHARED:
6392 case OMP_CLAUSE_FIRSTPRIVATE:
6393 decl = OMP_CLAUSE_DECL (c);
6394 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6395 remove = !(n->value & GOVD_SEEN);
6396 if (! remove)
6398 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6399 if ((n->value & GOVD_DEBUG_PRIVATE)
6400 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6402 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6403 || ((n->value & GOVD_DATA_SHARE_CLASS)
6404 == GOVD_PRIVATE));
6405 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6406 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6409 break;
6411 case OMP_CLAUSE_LASTPRIVATE:
6412 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6413 accurately reflect the presence of a FIRSTPRIVATE clause. */
6414 decl = OMP_CLAUSE_DECL (c);
6415 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6416 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6417 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6418 break;
6420 case OMP_CLAUSE_REDUCTION:
6421 case OMP_CLAUSE_COPYIN:
6422 case OMP_CLAUSE_COPYPRIVATE:
6423 case OMP_CLAUSE_IF:
6424 case OMP_CLAUSE_NUM_THREADS:
6425 case OMP_CLAUSE_SCHEDULE:
6426 case OMP_CLAUSE_NOWAIT:
6427 case OMP_CLAUSE_ORDERED:
6428 case OMP_CLAUSE_DEFAULT:
6429 case OMP_CLAUSE_UNTIED:
6430 case OMP_CLAUSE_COLLAPSE:
6431 case OMP_CLAUSE_FINAL:
6432 case OMP_CLAUSE_MERGEABLE:
6433 break;
6435 default:
6436 gcc_unreachable ();
6439 if (remove)
6440 *list_p = OMP_CLAUSE_CHAIN (c);
6441 else
6442 list_p = &OMP_CLAUSE_CHAIN (c);
6445 /* Add in any implicit data sharing. */
6446 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6448 gimplify_omp_ctxp = ctx->outer_context;
6449 delete_omp_context (ctx);
6452 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6453 gimplification of the body, as well as scanning the body for used
6454 variables. We need to do this scan now, because variable-sized
6455 decls will be decomposed during gimplification. */
6457 static void
6458 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6460 tree expr = *expr_p;
6461 gimple g;
6462 gimple_seq body = NULL;
6463 struct gimplify_ctx gctx;
6465 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6466 OMP_PARALLEL_COMBINED (expr)
6467 ? ORT_COMBINED_PARALLEL
6468 : ORT_PARALLEL);
6470 push_gimplify_context (&gctx);
6472 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6473 if (gimple_code (g) == GIMPLE_BIND)
6474 pop_gimplify_context (g);
6475 else
6476 pop_gimplify_context (NULL);
6478 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6480 g = gimple_build_omp_parallel (body,
6481 OMP_PARALLEL_CLAUSES (expr),
6482 NULL_TREE, NULL_TREE);
6483 if (OMP_PARALLEL_COMBINED (expr))
6484 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6485 gimplify_seq_add_stmt (pre_p, g);
6486 *expr_p = NULL_TREE;
6489 /* Gimplify the contents of an OMP_TASK statement. This involves
6490 gimplification of the body, as well as scanning the body for used
6491 variables. We need to do this scan now, because variable-sized
6492 decls will be decomposed during gimplification. */
6494 static void
6495 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6497 tree expr = *expr_p;
6498 gimple g;
6499 gimple_seq body = NULL;
6500 struct gimplify_ctx gctx;
6502 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6503 find_omp_clause (OMP_TASK_CLAUSES (expr),
6504 OMP_CLAUSE_UNTIED)
6505 ? ORT_UNTIED_TASK : ORT_TASK);
6507 push_gimplify_context (&gctx);
6509 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6510 if (gimple_code (g) == GIMPLE_BIND)
6511 pop_gimplify_context (g);
6512 else
6513 pop_gimplify_context (NULL);
6515 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6517 g = gimple_build_omp_task (body,
6518 OMP_TASK_CLAUSES (expr),
6519 NULL_TREE, NULL_TREE,
6520 NULL_TREE, NULL_TREE, NULL_TREE);
6521 gimplify_seq_add_stmt (pre_p, g);
6522 *expr_p = NULL_TREE;
6525 /* Gimplify the gross structure of an OMP_FOR statement. */
6527 static enum gimplify_status
6528 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6530 tree for_stmt, decl, var, t;
6531 enum gimplify_status ret = GS_ALL_DONE;
6532 enum gimplify_status tret;
6533 gimple gfor;
6534 gimple_seq for_body, for_pre_body;
6535 int i;
6537 for_stmt = *expr_p;
6539 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6540 ORT_WORKSHARE);
6542 /* Handle OMP_FOR_INIT. */
6543 for_pre_body = NULL;
6544 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6545 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6547 for_body = NULL;
6548 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6549 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6550 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6551 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6552 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6554 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6555 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6556 decl = TREE_OPERAND (t, 0);
6557 gcc_assert (DECL_P (decl));
6558 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6559 || POINTER_TYPE_P (TREE_TYPE (decl)));
6561 /* Make sure the iteration variable is private. */
6562 if (omp_is_private (gimplify_omp_ctxp, decl))
6563 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6564 else
6565 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6567 /* If DECL is not a gimple register, create a temporary variable to act
6568 as an iteration counter. This is valid, since DECL cannot be
6569 modified in the body of the loop. */
6570 if (!is_gimple_reg (decl))
6572 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6573 TREE_OPERAND (t, 0) = var;
6575 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6577 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6579 else
6580 var = decl;
6582 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6583 is_gimple_val, fb_rvalue);
6584 ret = MIN (ret, tret);
6585 if (ret == GS_ERROR)
6586 return ret;
6588 /* Handle OMP_FOR_COND. */
6589 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6590 gcc_assert (COMPARISON_CLASS_P (t));
6591 gcc_assert (TREE_OPERAND (t, 0) == decl);
6593 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6594 is_gimple_val, fb_rvalue);
6595 ret = MIN (ret, tret);
6597 /* Handle OMP_FOR_INCR. */
6598 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6599 switch (TREE_CODE (t))
6601 case PREINCREMENT_EXPR:
6602 case POSTINCREMENT_EXPR:
6603 t = build_int_cst (TREE_TYPE (decl), 1);
6604 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6605 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6606 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6607 break;
6609 case PREDECREMENT_EXPR:
6610 case POSTDECREMENT_EXPR:
6611 t = build_int_cst (TREE_TYPE (decl), -1);
6612 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6613 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6614 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6615 break;
6617 case MODIFY_EXPR:
6618 gcc_assert (TREE_OPERAND (t, 0) == decl);
6619 TREE_OPERAND (t, 0) = var;
6621 t = TREE_OPERAND (t, 1);
6622 switch (TREE_CODE (t))
6624 case PLUS_EXPR:
6625 if (TREE_OPERAND (t, 1) == decl)
6627 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6628 TREE_OPERAND (t, 0) = var;
6629 break;
6632 /* Fallthru. */
6633 case MINUS_EXPR:
6634 case POINTER_PLUS_EXPR:
6635 gcc_assert (TREE_OPERAND (t, 0) == decl);
6636 TREE_OPERAND (t, 0) = var;
6637 break;
6638 default:
6639 gcc_unreachable ();
6642 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6643 is_gimple_val, fb_rvalue);
6644 ret = MIN (ret, tret);
6645 break;
6647 default:
6648 gcc_unreachable ();
6651 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6653 tree c;
6654 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6655 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6656 && OMP_CLAUSE_DECL (c) == decl
6657 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6659 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6660 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6661 gcc_assert (TREE_OPERAND (t, 0) == var);
6662 t = TREE_OPERAND (t, 1);
6663 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6664 || TREE_CODE (t) == MINUS_EXPR
6665 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6666 gcc_assert (TREE_OPERAND (t, 0) == var);
6667 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6668 TREE_OPERAND (t, 1));
6669 gimplify_assign (decl, t,
6670 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6675 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6677 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6679 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6680 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6681 for_pre_body);
6683 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6685 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6686 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6687 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6688 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6689 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6690 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6691 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6692 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6695 gimplify_seq_add_stmt (pre_p, gfor);
6696 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6699 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6700 In particular, OMP_SECTIONS and OMP_SINGLE. */
6702 static void
6703 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6705 tree expr = *expr_p;
6706 gimple stmt;
6707 gimple_seq body = NULL;
6709 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6710 gimplify_and_add (OMP_BODY (expr), &body);
6711 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6713 if (TREE_CODE (expr) == OMP_SECTIONS)
6714 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6715 else if (TREE_CODE (expr) == OMP_SINGLE)
6716 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6717 else
6718 gcc_unreachable ();
6720 gimplify_seq_add_stmt (pre_p, stmt);
6723 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6724 stabilized the lhs of the atomic operation as *ADDR. Return true if
6725 EXPR is this stabilized form. */
6727 static bool
6728 goa_lhs_expr_p (tree expr, tree addr)
6730 /* Also include casts to other type variants. The C front end is fond
6731 of adding these for e.g. volatile variables. This is like
6732 STRIP_TYPE_NOPS but includes the main variant lookup. */
6733 STRIP_USELESS_TYPE_CONVERSION (expr);
6735 if (TREE_CODE (expr) == INDIRECT_REF)
6737 expr = TREE_OPERAND (expr, 0);
6738 while (expr != addr
6739 && (CONVERT_EXPR_P (expr)
6740 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6741 && TREE_CODE (expr) == TREE_CODE (addr)
6742 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6744 expr = TREE_OPERAND (expr, 0);
6745 addr = TREE_OPERAND (addr, 0);
6747 if (expr == addr)
6748 return true;
6749 return (TREE_CODE (addr) == ADDR_EXPR
6750 && TREE_CODE (expr) == ADDR_EXPR
6751 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6753 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6754 return true;
6755 return false;
6758 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6759 expression does not involve the lhs, evaluate it into a temporary.
6760 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6761 or -1 if an error was encountered. */
6763 static int
6764 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6765 tree lhs_var)
6767 tree expr = *expr_p;
6768 int saw_lhs;
6770 if (goa_lhs_expr_p (expr, lhs_addr))
6772 *expr_p = lhs_var;
6773 return 1;
6775 if (is_gimple_val (expr))
6776 return 0;
6778 saw_lhs = 0;
6779 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6781 case tcc_binary:
6782 case tcc_comparison:
6783 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6784 lhs_var);
6785 case tcc_unary:
6786 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6787 lhs_var);
6788 break;
6789 case tcc_expression:
6790 switch (TREE_CODE (expr))
6792 case TRUTH_ANDIF_EXPR:
6793 case TRUTH_ORIF_EXPR:
6794 case TRUTH_AND_EXPR:
6795 case TRUTH_OR_EXPR:
6796 case TRUTH_XOR_EXPR:
6797 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6798 lhs_addr, lhs_var);
6799 case TRUTH_NOT_EXPR:
6800 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6801 lhs_addr, lhs_var);
6802 break;
6803 case COMPOUND_EXPR:
6804 /* Break out any preevaluations from cp_build_modify_expr. */
6805 for (; TREE_CODE (expr) == COMPOUND_EXPR;
6806 expr = TREE_OPERAND (expr, 1))
6807 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6808 *expr_p = expr;
6809 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
6810 default:
6811 break;
6813 break;
6814 default:
6815 break;
6818 if (saw_lhs == 0)
6820 enum gimplify_status gs;
6821 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6822 if (gs != GS_ALL_DONE)
6823 saw_lhs = -1;
6826 return saw_lhs;
6829 /* Gimplify an OMP_ATOMIC statement. */
6831 static enum gimplify_status
6832 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6834 tree addr = TREE_OPERAND (*expr_p, 0);
6835 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6836 ? NULL : TREE_OPERAND (*expr_p, 1);
6837 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6838 tree tmp_load;
6839 gimple loadstmt, storestmt;
6841 tmp_load = create_tmp_reg (type, NULL);
6842 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6843 return GS_ERROR;
6845 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6846 != GS_ALL_DONE)
6847 return GS_ERROR;
6849 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6850 gimplify_seq_add_stmt (pre_p, loadstmt);
6851 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6852 != GS_ALL_DONE)
6853 return GS_ERROR;
6855 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6856 rhs = tmp_load;
6857 storestmt = gimple_build_omp_atomic_store (rhs);
6858 gimplify_seq_add_stmt (pre_p, storestmt);
6859 switch (TREE_CODE (*expr_p))
6861 case OMP_ATOMIC_READ:
6862 case OMP_ATOMIC_CAPTURE_OLD:
6863 *expr_p = tmp_load;
6864 gimple_omp_atomic_set_need_value (loadstmt);
6865 break;
6866 case OMP_ATOMIC_CAPTURE_NEW:
6867 *expr_p = rhs;
6868 gimple_omp_atomic_set_need_value (storestmt);
6869 break;
6870 default:
6871 *expr_p = NULL;
6872 break;
6875 return GS_ALL_DONE;
6878 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
6879 body, and adding some EH bits. */
6881 static enum gimplify_status
6882 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6884 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6885 gimple g;
6886 gimple_seq body = NULL;
6887 struct gimplify_ctx gctx;
6888 int subcode = 0;
6890 /* Wrap the transaction body in a BIND_EXPR so we have a context
6891 where to put decls for OpenMP. */
6892 if (TREE_CODE (tbody) != BIND_EXPR)
6894 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6895 TREE_SIDE_EFFECTS (bind) = 1;
6896 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6897 TRANSACTION_EXPR_BODY (expr) = bind;
6900 push_gimplify_context (&gctx);
6901 temp = voidify_wrapper_expr (*expr_p, NULL);
6903 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
6904 pop_gimplify_context (g);
6906 g = gimple_build_transaction (body, NULL);
6907 if (TRANSACTION_EXPR_OUTER (expr))
6908 subcode = GTMA_IS_OUTER;
6909 else if (TRANSACTION_EXPR_RELAXED (expr))
6910 subcode = GTMA_IS_RELAXED;
6911 gimple_transaction_set_subcode (g, subcode);
6913 gimplify_seq_add_stmt (pre_p, g);
6915 if (temp)
6917 *expr_p = temp;
6918 return GS_OK;
6921 *expr_p = NULL_TREE;
6922 return GS_ALL_DONE;
6925 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
6926 expression produces a value to be used as an operand inside a GIMPLE
6927 statement, the value will be stored back in *EXPR_P. This value will
6928 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6929 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6930 emitted in PRE_P and POST_P.
6932 Additionally, this process may overwrite parts of the input
6933 expression during gimplification. Ideally, it should be
6934 possible to do non-destructive gimplification.
6936 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6937 the expression needs to evaluate to a value to be used as
6938 an operand in a GIMPLE statement, this value will be stored in
6939 *EXPR_P on exit. This happens when the caller specifies one
6940 of fb_lvalue or fb_rvalue fallback flags.
6942 PRE_P will contain the sequence of GIMPLE statements corresponding
6943 to the evaluation of EXPR and all the side-effects that must
6944 be executed before the main expression. On exit, the last
6945 statement of PRE_P is the core statement being gimplified. For
6946 instance, when gimplifying 'if (++a)' the last statement in
6947 PRE_P will be 'if (t.1)' where t.1 is the result of
6948 pre-incrementing 'a'.
6950 POST_P will contain the sequence of GIMPLE statements corresponding
6951 to the evaluation of all the side-effects that must be executed
6952 after the main expression. If this is NULL, the post
6953 side-effects are stored at the end of PRE_P.
6955 The reason why the output is split in two is to handle post
6956 side-effects explicitly. In some cases, an expression may have
6957 inner and outer post side-effects which need to be emitted in
6958 an order different from the one given by the recursive
6959 traversal. For instance, for the expression (*p--)++ the post
6960 side-effects of '--' must actually occur *after* the post
6961 side-effects of '++'. However, gimplification will first visit
6962 the inner expression, so if a separate POST sequence was not
6963 used, the resulting sequence would be:
6965 1 t.1 = *p
6966 2 p = p - 1
6967 3 t.2 = t.1 + 1
6968 4 *p = t.2
6970 However, the post-decrement operation in line #2 must not be
6971 evaluated until after the store to *p at line #4, so the
6972 correct sequence should be:
6974 1 t.1 = *p
6975 2 t.2 = t.1 + 1
6976 3 *p = t.2
6977 4 p = p - 1
6979 So, by specifying a separate post queue, it is possible
6980 to emit the post side-effects in the correct order.
6981 If POST_P is NULL, an internal queue will be used. Before
6982 returning to the caller, the sequence POST_P is appended to
6983 the main output sequence PRE_P.
6985 GIMPLE_TEST_F points to a function that takes a tree T and
6986 returns nonzero if T is in the GIMPLE form requested by the
6987 caller. The GIMPLE predicates are in gimple.c.
6989 FALLBACK tells the function what sort of a temporary we want if
6990 gimplification cannot produce an expression that complies with
6991 GIMPLE_TEST_F.
6993 fb_none means that no temporary should be generated
6994 fb_rvalue means that an rvalue is OK to generate
6995 fb_lvalue means that an lvalue is OK to generate
6996 fb_either means that either is OK, but an lvalue is preferable.
6997 fb_mayfail means that gimplification may fail (in which case
6998 GS_ERROR will be returned)
7000 The return value is either GS_ERROR or GS_ALL_DONE, since this
7001 function iterates until EXPR is completely gimplified or an error
7002 occurs. */
7004 enum gimplify_status
7005 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7006 bool (*gimple_test_f) (tree), fallback_t fallback)
7008 tree tmp;
7009 gimple_seq internal_pre = NULL;
7010 gimple_seq internal_post = NULL;
7011 tree save_expr;
7012 bool is_statement;
7013 location_t saved_location;
7014 enum gimplify_status ret;
7015 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7017 save_expr = *expr_p;
7018 if (save_expr == NULL_TREE)
7019 return GS_ALL_DONE;
7021 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7022 is_statement = gimple_test_f == is_gimple_stmt;
7023 if (is_statement)
7024 gcc_assert (pre_p);
7026 /* Consistency checks. */
7027 if (gimple_test_f == is_gimple_reg)
7028 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7029 else if (gimple_test_f == is_gimple_val
7030 || gimple_test_f == is_gimple_call_addr
7031 || gimple_test_f == is_gimple_condexpr
7032 || gimple_test_f == is_gimple_mem_rhs
7033 || gimple_test_f == is_gimple_mem_rhs_or_call
7034 || gimple_test_f == is_gimple_reg_rhs
7035 || gimple_test_f == is_gimple_reg_rhs_or_call
7036 || gimple_test_f == is_gimple_asm_val
7037 || gimple_test_f == is_gimple_mem_ref_addr)
7038 gcc_assert (fallback & fb_rvalue);
7039 else if (gimple_test_f == is_gimple_min_lval
7040 || gimple_test_f == is_gimple_lvalue)
7041 gcc_assert (fallback & fb_lvalue);
7042 else if (gimple_test_f == is_gimple_addressable)
7043 gcc_assert (fallback & fb_either);
7044 else if (gimple_test_f == is_gimple_stmt)
7045 gcc_assert (fallback == fb_none);
7046 else
7048 /* We should have recognized the GIMPLE_TEST_F predicate to
7049 know what kind of fallback to use in case a temporary is
7050 needed to hold the value or address of *EXPR_P. */
7051 gcc_unreachable ();
7054 /* We used to check the predicate here and return immediately if it
7055 succeeds. This is wrong; the design is for gimplification to be
7056 idempotent, and for the predicates to only test for valid forms, not
7057 whether they are fully simplified. */
7058 if (pre_p == NULL)
7059 pre_p = &internal_pre;
7061 if (post_p == NULL)
7062 post_p = &internal_post;
7064 /* Remember the last statements added to PRE_P and POST_P. Every
7065 new statement added by the gimplification helpers needs to be
7066 annotated with location information. To centralize the
7067 responsibility, we remember the last statement that had been
7068 added to both queues before gimplifying *EXPR_P. If
7069 gimplification produces new statements in PRE_P and POST_P, those
7070 statements will be annotated with the same location information
7071 as *EXPR_P. */
7072 pre_last_gsi = gsi_last (*pre_p);
7073 post_last_gsi = gsi_last (*post_p);
7075 saved_location = input_location;
7076 if (save_expr != error_mark_node
7077 && EXPR_HAS_LOCATION (*expr_p))
7078 input_location = EXPR_LOCATION (*expr_p);
7080 /* Loop over the specific gimplifiers until the toplevel node
7081 remains the same. */
7084 /* Strip away as many useless type conversions as possible
7085 at the toplevel. */
7086 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7088 /* Remember the expr. */
7089 save_expr = *expr_p;
7091 /* Die, die, die, my darling. */
7092 if (save_expr == error_mark_node
7093 || (TREE_TYPE (save_expr)
7094 && TREE_TYPE (save_expr) == error_mark_node))
7096 ret = GS_ERROR;
7097 break;
7100 /* Do any language-specific gimplification. */
7101 ret = ((enum gimplify_status)
7102 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7103 if (ret == GS_OK)
7105 if (*expr_p == NULL_TREE)
7106 break;
7107 if (*expr_p != save_expr)
7108 continue;
7110 else if (ret != GS_UNHANDLED)
7111 break;
7113 /* Make sure that all the cases set 'ret' appropriately. */
7114 ret = GS_UNHANDLED;
7115 switch (TREE_CODE (*expr_p))
7117 /* First deal with the special cases. */
7119 case POSTINCREMENT_EXPR:
7120 case POSTDECREMENT_EXPR:
7121 case PREINCREMENT_EXPR:
7122 case PREDECREMENT_EXPR:
7123 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7124 fallback != fb_none,
7125 TREE_TYPE (*expr_p));
7126 break;
7128 case ARRAY_REF:
7129 case ARRAY_RANGE_REF:
7130 case REALPART_EXPR:
7131 case IMAGPART_EXPR:
7132 case COMPONENT_REF:
7133 case VIEW_CONVERT_EXPR:
7134 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7135 fallback ? fallback : fb_rvalue);
7136 break;
7138 case COND_EXPR:
7139 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7141 /* C99 code may assign to an array in a structure value of a
7142 conditional expression, and this has undefined behavior
7143 only on execution, so create a temporary if an lvalue is
7144 required. */
7145 if (fallback == fb_lvalue)
7147 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7148 mark_addressable (*expr_p);
7149 ret = GS_OK;
7151 break;
7153 case CALL_EXPR:
7154 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7156 /* C99 code may assign to an array in a structure returned
7157 from a function, and this has undefined behavior only on
7158 execution, so create a temporary if an lvalue is
7159 required. */
7160 if (fallback == fb_lvalue)
7162 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7163 mark_addressable (*expr_p);
7164 ret = GS_OK;
7166 break;
7168 case TREE_LIST:
7169 gcc_unreachable ();
7171 case COMPOUND_EXPR:
7172 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7173 break;
7175 case COMPOUND_LITERAL_EXPR:
7176 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7177 gimple_test_f, fallback);
7178 break;
7180 case MODIFY_EXPR:
7181 case INIT_EXPR:
7182 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7183 fallback != fb_none);
7184 break;
7186 case TRUTH_ANDIF_EXPR:
7187 case TRUTH_ORIF_EXPR:
7189 /* Preserve the original type of the expression and the
7190 source location of the outer expression. */
7191 tree org_type = TREE_TYPE (*expr_p);
7192 *expr_p = gimple_boolify (*expr_p);
7193 *expr_p = build3_loc (input_location, COND_EXPR,
7194 org_type, *expr_p,
7195 fold_convert_loc
7196 (input_location,
7197 org_type, boolean_true_node),
7198 fold_convert_loc
7199 (input_location,
7200 org_type, boolean_false_node));
7201 ret = GS_OK;
7202 break;
7205 case TRUTH_NOT_EXPR:
7207 tree type = TREE_TYPE (*expr_p);
7208 /* The parsers are careful to generate TRUTH_NOT_EXPR
7209 only with operands that are always zero or one.
7210 We do not fold here but handle the only interesting case
7211 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7212 *expr_p = gimple_boolify (*expr_p);
7213 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7214 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7215 TREE_TYPE (*expr_p),
7216 TREE_OPERAND (*expr_p, 0));
7217 else
7218 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7219 TREE_TYPE (*expr_p),
7220 TREE_OPERAND (*expr_p, 0),
7221 build_int_cst (TREE_TYPE (*expr_p), 1));
7222 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7223 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7224 ret = GS_OK;
7225 break;
7228 case ADDR_EXPR:
7229 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7230 break;
7232 case VA_ARG_EXPR:
7233 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7234 break;
7236 CASE_CONVERT:
7237 if (IS_EMPTY_STMT (*expr_p))
7239 ret = GS_ALL_DONE;
7240 break;
7243 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7244 || fallback == fb_none)
7246 /* Just strip a conversion to void (or in void context) and
7247 try again. */
7248 *expr_p = TREE_OPERAND (*expr_p, 0);
7249 ret = GS_OK;
7250 break;
7253 ret = gimplify_conversion (expr_p);
7254 if (ret == GS_ERROR)
7255 break;
7256 if (*expr_p != save_expr)
7257 break;
7258 /* FALLTHRU */
7260 case FIX_TRUNC_EXPR:
7261 /* unary_expr: ... | '(' cast ')' val | ... */
7262 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7263 is_gimple_val, fb_rvalue);
7264 recalculate_side_effects (*expr_p);
7265 break;
7267 case INDIRECT_REF:
7269 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7270 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7271 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7273 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7274 if (*expr_p != save_expr)
7276 ret = GS_OK;
7277 break;
7280 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7281 is_gimple_reg, fb_rvalue);
7282 if (ret == GS_ERROR)
7283 break;
7285 recalculate_side_effects (*expr_p);
7286 *expr_p = fold_build2_loc (input_location, MEM_REF,
7287 TREE_TYPE (*expr_p),
7288 TREE_OPERAND (*expr_p, 0),
7289 build_int_cst (saved_ptr_type, 0));
7290 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7291 TREE_THIS_NOTRAP (*expr_p) = notrap;
7292 ret = GS_OK;
7293 break;
7296 /* We arrive here through the various re-gimplifcation paths. */
7297 case MEM_REF:
7298 /* First try re-folding the whole thing. */
7299 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7300 TREE_OPERAND (*expr_p, 0),
7301 TREE_OPERAND (*expr_p, 1));
7302 if (tmp)
7304 *expr_p = tmp;
7305 recalculate_side_effects (*expr_p);
7306 ret = GS_OK;
7307 break;
7309 /* Avoid re-gimplifying the address operand if it is already
7310 in suitable form. Re-gimplifying would mark the address
7311 operand addressable. Always gimplify when not in SSA form
7312 as we still may have to gimplify decls with value-exprs. */
7313 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7314 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7316 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7317 is_gimple_mem_ref_addr, fb_rvalue);
7318 if (ret == GS_ERROR)
7319 break;
7321 recalculate_side_effects (*expr_p);
7322 ret = GS_ALL_DONE;
7323 break;
7325 /* Constants need not be gimplified. */
7326 case INTEGER_CST:
7327 case REAL_CST:
7328 case FIXED_CST:
7329 case STRING_CST:
7330 case COMPLEX_CST:
7331 case VECTOR_CST:
7332 ret = GS_ALL_DONE;
7333 break;
7335 case CONST_DECL:
7336 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7337 CONST_DECL node. Otherwise the decl is replaceable by its
7338 value. */
7339 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7340 if (fallback & fb_lvalue)
7341 ret = GS_ALL_DONE;
7342 else
7344 *expr_p = DECL_INITIAL (*expr_p);
7345 ret = GS_OK;
7347 break;
7349 case DECL_EXPR:
7350 ret = gimplify_decl_expr (expr_p, pre_p);
7351 break;
7353 case BIND_EXPR:
7354 ret = gimplify_bind_expr (expr_p, pre_p);
7355 break;
7357 case LOOP_EXPR:
7358 ret = gimplify_loop_expr (expr_p, pre_p);
7359 break;
7361 case SWITCH_EXPR:
7362 ret = gimplify_switch_expr (expr_p, pre_p);
7363 break;
7365 case EXIT_EXPR:
7366 ret = gimplify_exit_expr (expr_p);
7367 break;
7369 case GOTO_EXPR:
7370 /* If the target is not LABEL, then it is a computed jump
7371 and the target needs to be gimplified. */
7372 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7374 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7375 NULL, is_gimple_val, fb_rvalue);
7376 if (ret == GS_ERROR)
7377 break;
7379 gimplify_seq_add_stmt (pre_p,
7380 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7381 ret = GS_ALL_DONE;
7382 break;
7384 case PREDICT_EXPR:
7385 gimplify_seq_add_stmt (pre_p,
7386 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7387 PREDICT_EXPR_OUTCOME (*expr_p)));
7388 ret = GS_ALL_DONE;
7389 break;
7391 case LABEL_EXPR:
7392 ret = GS_ALL_DONE;
7393 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7394 == current_function_decl);
7395 gimplify_seq_add_stmt (pre_p,
7396 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7397 break;
7399 case CASE_LABEL_EXPR:
7400 ret = gimplify_case_label_expr (expr_p, pre_p);
7401 break;
7403 case RETURN_EXPR:
7404 ret = gimplify_return_expr (*expr_p, pre_p);
7405 break;
7407 case CONSTRUCTOR:
7408 /* Don't reduce this in place; let gimplify_init_constructor work its
7409 magic. Buf if we're just elaborating this for side effects, just
7410 gimplify any element that has side-effects. */
7411 if (fallback == fb_none)
7413 unsigned HOST_WIDE_INT ix;
7414 tree val;
7415 tree temp = NULL_TREE;
7416 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7417 if (TREE_SIDE_EFFECTS (val))
7418 append_to_statement_list (val, &temp);
7420 *expr_p = temp;
7421 ret = temp ? GS_OK : GS_ALL_DONE;
7423 /* C99 code may assign to an array in a constructed
7424 structure or union, and this has undefined behavior only
7425 on execution, so create a temporary if an lvalue is
7426 required. */
7427 else if (fallback == fb_lvalue)
7429 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7430 mark_addressable (*expr_p);
7431 ret = GS_OK;
7433 else
7434 ret = GS_ALL_DONE;
7435 break;
7437 /* The following are special cases that are not handled by the
7438 original GIMPLE grammar. */
7440 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7441 eliminated. */
7442 case SAVE_EXPR:
7443 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7444 break;
7446 case BIT_FIELD_REF:
7447 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7448 post_p, is_gimple_lvalue, fb_either);
7449 recalculate_side_effects (*expr_p);
7450 break;
7452 case TARGET_MEM_REF:
7454 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7456 if (TMR_BASE (*expr_p))
7457 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7458 post_p, is_gimple_mem_ref_addr, fb_either);
7459 if (TMR_INDEX (*expr_p))
7460 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7461 post_p, is_gimple_val, fb_rvalue);
7462 if (TMR_INDEX2 (*expr_p))
7463 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7464 post_p, is_gimple_val, fb_rvalue);
7465 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7466 ret = MIN (r0, r1);
7468 break;
7470 case NON_LVALUE_EXPR:
7471 /* This should have been stripped above. */
7472 gcc_unreachable ();
7474 case ASM_EXPR:
7475 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7476 break;
7478 case TRY_FINALLY_EXPR:
7479 case TRY_CATCH_EXPR:
7481 gimple_seq eval, cleanup;
7482 gimple try_;
7484 /* Calls to destructors are generated automatically in FINALLY/CATCH
7485 block. They should have location as UNKNOWN_LOCATION. However,
7486 gimplify_call_expr will reset these call stmts to input_location
7487 if it finds stmt's location is unknown. To prevent resetting for
7488 destructors, we set the input_location to unknown.
7489 Note that this only affects the destructor calls in FINALLY/CATCH
7490 block, and will automatically reset to its original value by the
7491 end of gimplify_expr. */
7492 input_location = UNKNOWN_LOCATION;
7493 eval = cleanup = NULL;
7494 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7495 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7496 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7497 if (gimple_seq_empty_p (cleanup))
7499 gimple_seq_add_seq (pre_p, eval);
7500 ret = GS_ALL_DONE;
7501 break;
7503 try_ = gimple_build_try (eval, cleanup,
7504 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7505 ? GIMPLE_TRY_FINALLY
7506 : GIMPLE_TRY_CATCH);
7507 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7508 gimple_set_location (try_, saved_location);
7509 else
7510 gimple_set_location (try_, EXPR_LOCATION (save_expr));
7511 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7512 gimple_try_set_catch_is_cleanup (try_,
7513 TRY_CATCH_IS_CLEANUP (*expr_p));
7514 gimplify_seq_add_stmt (pre_p, try_);
7515 ret = GS_ALL_DONE;
7516 break;
7519 case CLEANUP_POINT_EXPR:
7520 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7521 break;
7523 case TARGET_EXPR:
7524 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7525 break;
7527 case CATCH_EXPR:
7529 gimple c;
7530 gimple_seq handler = NULL;
7531 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7532 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7533 gimplify_seq_add_stmt (pre_p, c);
7534 ret = GS_ALL_DONE;
7535 break;
7538 case EH_FILTER_EXPR:
7540 gimple ehf;
7541 gimple_seq failure = NULL;
7543 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7544 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7545 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7546 gimplify_seq_add_stmt (pre_p, ehf);
7547 ret = GS_ALL_DONE;
7548 break;
7551 case OBJ_TYPE_REF:
7553 enum gimplify_status r0, r1;
7554 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7555 post_p, is_gimple_val, fb_rvalue);
7556 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7557 post_p, is_gimple_val, fb_rvalue);
7558 TREE_SIDE_EFFECTS (*expr_p) = 0;
7559 ret = MIN (r0, r1);
7561 break;
7563 case LABEL_DECL:
7564 /* We get here when taking the address of a label. We mark
7565 the label as "forced"; meaning it can never be removed and
7566 it is a potential target for any computed goto. */
7567 FORCED_LABEL (*expr_p) = 1;
7568 ret = GS_ALL_DONE;
7569 break;
7571 case STATEMENT_LIST:
7572 ret = gimplify_statement_list (expr_p, pre_p);
7573 break;
7575 case WITH_SIZE_EXPR:
7577 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7578 post_p == &internal_post ? NULL : post_p,
7579 gimple_test_f, fallback);
7580 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7581 is_gimple_val, fb_rvalue);
7582 ret = GS_ALL_DONE;
7584 break;
7586 case VAR_DECL:
7587 case PARM_DECL:
7588 ret = gimplify_var_or_parm_decl (expr_p);
7589 break;
7591 case RESULT_DECL:
7592 /* When within an OpenMP context, notice uses of variables. */
7593 if (gimplify_omp_ctxp)
7594 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7595 ret = GS_ALL_DONE;
7596 break;
7598 case SSA_NAME:
7599 /* Allow callbacks into the gimplifier during optimization. */
7600 ret = GS_ALL_DONE;
7601 break;
7603 case OMP_PARALLEL:
7604 gimplify_omp_parallel (expr_p, pre_p);
7605 ret = GS_ALL_DONE;
7606 break;
7608 case OMP_TASK:
7609 gimplify_omp_task (expr_p, pre_p);
7610 ret = GS_ALL_DONE;
7611 break;
7613 case OMP_FOR:
7614 ret = gimplify_omp_for (expr_p, pre_p);
7615 break;
7617 case OMP_SECTIONS:
7618 case OMP_SINGLE:
7619 gimplify_omp_workshare (expr_p, pre_p);
7620 ret = GS_ALL_DONE;
7621 break;
7623 case OMP_SECTION:
7624 case OMP_MASTER:
7625 case OMP_ORDERED:
7626 case OMP_CRITICAL:
7628 gimple_seq body = NULL;
7629 gimple g;
7631 gimplify_and_add (OMP_BODY (*expr_p), &body);
7632 switch (TREE_CODE (*expr_p))
7634 case OMP_SECTION:
7635 g = gimple_build_omp_section (body);
7636 break;
7637 case OMP_MASTER:
7638 g = gimple_build_omp_master (body);
7639 break;
7640 case OMP_ORDERED:
7641 g = gimple_build_omp_ordered (body);
7642 break;
7643 case OMP_CRITICAL:
7644 g = gimple_build_omp_critical (body,
7645 OMP_CRITICAL_NAME (*expr_p));
7646 break;
7647 default:
7648 gcc_unreachable ();
7650 gimplify_seq_add_stmt (pre_p, g);
7651 ret = GS_ALL_DONE;
7652 break;
7655 case OMP_ATOMIC:
7656 case OMP_ATOMIC_READ:
7657 case OMP_ATOMIC_CAPTURE_OLD:
7658 case OMP_ATOMIC_CAPTURE_NEW:
7659 ret = gimplify_omp_atomic (expr_p, pre_p);
7660 break;
7662 case TRANSACTION_EXPR:
7663 ret = gimplify_transaction (expr_p, pre_p);
7664 break;
7666 case TRUTH_AND_EXPR:
7667 case TRUTH_OR_EXPR:
7668 case TRUTH_XOR_EXPR:
7670 tree orig_type = TREE_TYPE (*expr_p);
7671 tree new_type, xop0, xop1;
7672 *expr_p = gimple_boolify (*expr_p);
7673 new_type = TREE_TYPE (*expr_p);
7674 if (!useless_type_conversion_p (orig_type, new_type))
7676 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7677 ret = GS_OK;
7678 break;
7681 /* Boolified binary truth expressions are semantically equivalent
7682 to bitwise binary expressions. Canonicalize them to the
7683 bitwise variant. */
7684 switch (TREE_CODE (*expr_p))
7686 case TRUTH_AND_EXPR:
7687 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7688 break;
7689 case TRUTH_OR_EXPR:
7690 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7691 break;
7692 case TRUTH_XOR_EXPR:
7693 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7694 break;
7695 default:
7696 break;
7698 /* Now make sure that operands have compatible type to
7699 expression's new_type. */
7700 xop0 = TREE_OPERAND (*expr_p, 0);
7701 xop1 = TREE_OPERAND (*expr_p, 1);
7702 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7703 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7704 new_type,
7705 xop0);
7706 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7707 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7708 new_type,
7709 xop1);
7710 /* Continue classified as tcc_binary. */
7711 goto expr_2;
7714 case FMA_EXPR:
7715 case VEC_COND_EXPR:
7716 case VEC_PERM_EXPR:
7717 /* Classified as tcc_expression. */
7718 goto expr_3;
7720 case POINTER_PLUS_EXPR:
7722 enum gimplify_status r0, r1;
7723 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7724 post_p, is_gimple_val, fb_rvalue);
7725 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7726 post_p, is_gimple_val, fb_rvalue);
7727 recalculate_side_effects (*expr_p);
7728 ret = MIN (r0, r1);
7729 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
7730 after gimplifying operands - this is similar to how
7731 it would be folding all gimplified stmts on creation
7732 to have them canonicalized, which is what we eventually
7733 should do anyway. */
7734 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7735 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7737 *expr_p = build_fold_addr_expr_with_type_loc
7738 (input_location,
7739 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7740 TREE_OPERAND (*expr_p, 0),
7741 fold_convert (ptr_type_node,
7742 TREE_OPERAND (*expr_p, 1))),
7743 TREE_TYPE (*expr_p));
7744 ret = MIN (ret, GS_OK);
7746 break;
7749 default:
7750 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7752 case tcc_comparison:
7753 /* Handle comparison of objects of non scalar mode aggregates
7754 with a call to memcmp. It would be nice to only have to do
7755 this for variable-sized objects, but then we'd have to allow
7756 the same nest of reference nodes we allow for MODIFY_EXPR and
7757 that's too complex.
7759 Compare scalar mode aggregates as scalar mode values. Using
7760 memcmp for them would be very inefficient at best, and is
7761 plain wrong if bitfields are involved. */
7763 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7765 /* Vector comparisons need no boolification. */
7766 if (TREE_CODE (type) == VECTOR_TYPE)
7767 goto expr_2;
7768 else if (!AGGREGATE_TYPE_P (type))
7770 tree org_type = TREE_TYPE (*expr_p);
7771 *expr_p = gimple_boolify (*expr_p);
7772 if (!useless_type_conversion_p (org_type,
7773 TREE_TYPE (*expr_p)))
7775 *expr_p = fold_convert_loc (input_location,
7776 org_type, *expr_p);
7777 ret = GS_OK;
7779 else
7780 goto expr_2;
7782 else if (TYPE_MODE (type) != BLKmode)
7783 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7784 else
7785 ret = gimplify_variable_sized_compare (expr_p);
7787 break;
7790 /* If *EXPR_P does not need to be special-cased, handle it
7791 according to its class. */
7792 case tcc_unary:
7793 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7794 post_p, is_gimple_val, fb_rvalue);
7795 break;
7797 case tcc_binary:
7798 expr_2:
7800 enum gimplify_status r0, r1;
7802 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7803 post_p, is_gimple_val, fb_rvalue);
7804 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7805 post_p, is_gimple_val, fb_rvalue);
7807 ret = MIN (r0, r1);
7808 break;
7811 expr_3:
7813 enum gimplify_status r0, r1, r2;
7815 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7816 post_p, is_gimple_val, fb_rvalue);
7817 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7818 post_p, is_gimple_val, fb_rvalue);
7819 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7820 post_p, is_gimple_val, fb_rvalue);
7822 ret = MIN (MIN (r0, r1), r2);
7823 break;
7826 case tcc_declaration:
7827 case tcc_constant:
7828 ret = GS_ALL_DONE;
7829 goto dont_recalculate;
7831 default:
7832 gcc_unreachable ();
7835 recalculate_side_effects (*expr_p);
7837 dont_recalculate:
7838 break;
7841 gcc_assert (*expr_p || ret != GS_OK);
7843 while (ret == GS_OK);
7845 /* If we encountered an error_mark somewhere nested inside, either
7846 stub out the statement or propagate the error back out. */
7847 if (ret == GS_ERROR)
7849 if (is_statement)
7850 *expr_p = NULL;
7851 goto out;
7854 /* This was only valid as a return value from the langhook, which
7855 we handled. Make sure it doesn't escape from any other context. */
7856 gcc_assert (ret != GS_UNHANDLED);
7858 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7860 /* We aren't looking for a value, and we don't have a valid
7861 statement. If it doesn't have side-effects, throw it away. */
7862 if (!TREE_SIDE_EFFECTS (*expr_p))
7863 *expr_p = NULL;
7864 else if (!TREE_THIS_VOLATILE (*expr_p))
7866 /* This is probably a _REF that contains something nested that
7867 has side effects. Recurse through the operands to find it. */
7868 enum tree_code code = TREE_CODE (*expr_p);
7870 switch (code)
7872 case COMPONENT_REF:
7873 case REALPART_EXPR:
7874 case IMAGPART_EXPR:
7875 case VIEW_CONVERT_EXPR:
7876 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7877 gimple_test_f, fallback);
7878 break;
7880 case ARRAY_REF:
7881 case ARRAY_RANGE_REF:
7882 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7883 gimple_test_f, fallback);
7884 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7885 gimple_test_f, fallback);
7886 break;
7888 default:
7889 /* Anything else with side-effects must be converted to
7890 a valid statement before we get here. */
7891 gcc_unreachable ();
7894 *expr_p = NULL;
7896 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7897 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7899 /* Historically, the compiler has treated a bare reference
7900 to a non-BLKmode volatile lvalue as forcing a load. */
7901 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7903 /* Normally, we do not want to create a temporary for a
7904 TREE_ADDRESSABLE type because such a type should not be
7905 copied by bitwise-assignment. However, we make an
7906 exception here, as all we are doing here is ensuring that
7907 we read the bytes that make up the type. We use
7908 create_tmp_var_raw because create_tmp_var will abort when
7909 given a TREE_ADDRESSABLE type. */
7910 tree tmp = create_tmp_var_raw (type, "vol");
7911 gimple_add_tmp_var (tmp);
7912 gimplify_assign (tmp, *expr_p, pre_p);
7913 *expr_p = NULL;
7915 else
7916 /* We can't do anything useful with a volatile reference to
7917 an incomplete type, so just throw it away. Likewise for
7918 a BLKmode type, since any implicit inner load should
7919 already have been turned into an explicit one by the
7920 gimplification process. */
7921 *expr_p = NULL;
7924 /* If we are gimplifying at the statement level, we're done. Tack
7925 everything together and return. */
7926 if (fallback == fb_none || is_statement)
7928 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7929 it out for GC to reclaim it. */
7930 *expr_p = NULL_TREE;
7932 if (!gimple_seq_empty_p (internal_pre)
7933 || !gimple_seq_empty_p (internal_post))
7935 gimplify_seq_add_seq (&internal_pre, internal_post);
7936 gimplify_seq_add_seq (pre_p, internal_pre);
7939 /* The result of gimplifying *EXPR_P is going to be the last few
7940 statements in *PRE_P and *POST_P. Add location information
7941 to all the statements that were added by the gimplification
7942 helpers. */
7943 if (!gimple_seq_empty_p (*pre_p))
7944 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7946 if (!gimple_seq_empty_p (*post_p))
7947 annotate_all_with_location_after (*post_p, post_last_gsi,
7948 input_location);
7950 goto out;
7953 #ifdef ENABLE_GIMPLE_CHECKING
7954 if (*expr_p)
7956 enum tree_code code = TREE_CODE (*expr_p);
7957 /* These expressions should already be in gimple IR form. */
7958 gcc_assert (code != MODIFY_EXPR
7959 && code != ASM_EXPR
7960 && code != BIND_EXPR
7961 && code != CATCH_EXPR
7962 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7963 && code != EH_FILTER_EXPR
7964 && code != GOTO_EXPR
7965 && code != LABEL_EXPR
7966 && code != LOOP_EXPR
7967 && code != SWITCH_EXPR
7968 && code != TRY_FINALLY_EXPR
7969 && code != OMP_CRITICAL
7970 && code != OMP_FOR
7971 && code != OMP_MASTER
7972 && code != OMP_ORDERED
7973 && code != OMP_PARALLEL
7974 && code != OMP_SECTIONS
7975 && code != OMP_SECTION
7976 && code != OMP_SINGLE);
7978 #endif
7980 /* Otherwise we're gimplifying a subexpression, so the resulting
7981 value is interesting. If it's a valid operand that matches
7982 GIMPLE_TEST_F, we're done. Unless we are handling some
7983 post-effects internally; if that's the case, we need to copy into
7984 a temporary before adding the post-effects to POST_P. */
7985 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7986 goto out;
7988 /* Otherwise, we need to create a new temporary for the gimplified
7989 expression. */
7991 /* We can't return an lvalue if we have an internal postqueue. The
7992 object the lvalue refers to would (probably) be modified by the
7993 postqueue; we need to copy the value out first, which means an
7994 rvalue. */
7995 if ((fallback & fb_lvalue)
7996 && gimple_seq_empty_p (internal_post)
7997 && is_gimple_addressable (*expr_p))
7999 /* An lvalue will do. Take the address of the expression, store it
8000 in a temporary, and replace the expression with an INDIRECT_REF of
8001 that temporary. */
8002 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8003 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8004 *expr_p = build_simple_mem_ref (tmp);
8006 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8008 /* An rvalue will do. Assign the gimplified expression into a
8009 new temporary TMP and replace the original expression with
8010 TMP. First, make sure that the expression has a type so that
8011 it can be assigned into a temporary. */
8012 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8013 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8015 else
8017 #ifdef ENABLE_GIMPLE_CHECKING
8018 if (!(fallback & fb_mayfail))
8020 fprintf (stderr, "gimplification failed:\n");
8021 print_generic_expr (stderr, *expr_p, 0);
8022 debug_tree (*expr_p);
8023 internal_error ("gimplification failed");
8025 #endif
8026 gcc_assert (fallback & fb_mayfail);
8028 /* If this is an asm statement, and the user asked for the
8029 impossible, don't die. Fail and let gimplify_asm_expr
8030 issue an error. */
8031 ret = GS_ERROR;
8032 goto out;
8035 /* Make sure the temporary matches our predicate. */
8036 gcc_assert ((*gimple_test_f) (*expr_p));
8038 if (!gimple_seq_empty_p (internal_post))
8040 annotate_all_with_location (internal_post, input_location);
8041 gimplify_seq_add_seq (pre_p, internal_post);
8044 out:
8045 input_location = saved_location;
8046 return ret;
8049 /* Look through TYPE for variable-sized objects and gimplify each such
8050 size that we find. Add to LIST_P any statements generated. */
8052 void
8053 gimplify_type_sizes (tree type, gimple_seq *list_p)
8055 tree field, t;
8057 if (type == NULL || type == error_mark_node)
8058 return;
8060 /* We first do the main variant, then copy into any other variants. */
8061 type = TYPE_MAIN_VARIANT (type);
8063 /* Avoid infinite recursion. */
8064 if (TYPE_SIZES_GIMPLIFIED (type))
8065 return;
8067 TYPE_SIZES_GIMPLIFIED (type) = 1;
8069 switch (TREE_CODE (type))
8071 case INTEGER_TYPE:
8072 case ENUMERAL_TYPE:
8073 case BOOLEAN_TYPE:
8074 case REAL_TYPE:
8075 case FIXED_POINT_TYPE:
8076 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8077 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8079 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8081 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8082 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8084 break;
8086 case ARRAY_TYPE:
8087 /* These types may not have declarations, so handle them here. */
8088 gimplify_type_sizes (TREE_TYPE (type), list_p);
8089 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8090 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8091 with assigned stack slots, for -O1+ -g they should be tracked
8092 by VTA. */
8093 if (!(TYPE_NAME (type)
8094 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8095 && DECL_IGNORED_P (TYPE_NAME (type)))
8096 && TYPE_DOMAIN (type)
8097 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8099 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8100 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8101 DECL_IGNORED_P (t) = 0;
8102 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8103 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8104 DECL_IGNORED_P (t) = 0;
8106 break;
8108 case RECORD_TYPE:
8109 case UNION_TYPE:
8110 case QUAL_UNION_TYPE:
8111 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8112 if (TREE_CODE (field) == FIELD_DECL)
8114 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8115 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8116 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8117 gimplify_type_sizes (TREE_TYPE (field), list_p);
8119 break;
8121 case POINTER_TYPE:
8122 case REFERENCE_TYPE:
8123 /* We used to recurse on the pointed-to type here, which turned out to
8124 be incorrect because its definition might refer to variables not
8125 yet initialized at this point if a forward declaration is involved.
8127 It was actually useful for anonymous pointed-to types to ensure
8128 that the sizes evaluation dominates every possible later use of the
8129 values. Restricting to such types here would be safe since there
8130 is no possible forward declaration around, but would introduce an
8131 undesirable middle-end semantic to anonymity. We then defer to
8132 front-ends the responsibility of ensuring that the sizes are
8133 evaluated both early and late enough, e.g. by attaching artificial
8134 type declarations to the tree. */
8135 break;
8137 default:
8138 break;
8141 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8142 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8144 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8146 TYPE_SIZE (t) = TYPE_SIZE (type);
8147 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8148 TYPE_SIZES_GIMPLIFIED (t) = 1;
8152 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8153 a size or position, has had all of its SAVE_EXPRs evaluated.
8154 We add any required statements to *STMT_P. */
8156 void
8157 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8159 tree expr = *expr_p;
8161 /* We don't do anything if the value isn't there, is constant, or contains
8162 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8163 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8164 will want to replace it with a new variable, but that will cause problems
8165 if this type is from outside the function. It's OK to have that here. */
8166 if (is_gimple_sizepos (expr))
8167 return;
8169 *expr_p = unshare_expr (expr);
8171 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8174 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8175 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8176 is true, also gimplify the parameters. */
8178 gimple
8179 gimplify_body (tree fndecl, bool do_parms)
8181 location_t saved_location = input_location;
8182 gimple_seq parm_stmts, seq;
8183 gimple outer_bind;
8184 struct gimplify_ctx gctx;
8185 struct cgraph_node *cgn;
8187 timevar_push (TV_TREE_GIMPLIFY);
8189 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8190 gimplification. */
8191 default_rtl_profile ();
8193 gcc_assert (gimplify_ctxp == NULL);
8194 push_gimplify_context (&gctx);
8196 /* Unshare most shared trees in the body and in that of any nested functions.
8197 It would seem we don't have to do this for nested functions because
8198 they are supposed to be output and then the outer function gimplified
8199 first, but the g++ front end doesn't always do it that way. */
8200 unshare_body (fndecl);
8201 unvisit_body (fndecl);
8203 cgn = cgraph_get_node (fndecl);
8204 if (cgn && cgn->origin)
8205 nonlocal_vlas = pointer_set_create ();
8207 /* Make sure input_location isn't set to something weird. */
8208 input_location = DECL_SOURCE_LOCATION (fndecl);
8210 /* Resolve callee-copies. This has to be done before processing
8211 the body so that DECL_VALUE_EXPR gets processed correctly. */
8212 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8214 /* Gimplify the function's body. */
8215 seq = NULL;
8216 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8217 outer_bind = gimple_seq_first_stmt (seq);
8218 if (!outer_bind)
8220 outer_bind = gimple_build_nop ();
8221 gimplify_seq_add_stmt (&seq, outer_bind);
8224 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8225 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8226 if (gimple_code (outer_bind) == GIMPLE_BIND
8227 && gimple_seq_first (seq) == gimple_seq_last (seq))
8229 else
8230 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8232 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8234 /* If we had callee-copies statements, insert them at the beginning
8235 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8236 if (!gimple_seq_empty_p (parm_stmts))
8238 tree parm;
8240 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8241 gimple_bind_set_body (outer_bind, parm_stmts);
8243 for (parm = DECL_ARGUMENTS (current_function_decl);
8244 parm; parm = DECL_CHAIN (parm))
8245 if (DECL_HAS_VALUE_EXPR_P (parm))
8247 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8248 DECL_IGNORED_P (parm) = 0;
8252 if (nonlocal_vlas)
8254 pointer_set_destroy (nonlocal_vlas);
8255 nonlocal_vlas = NULL;
8258 pop_gimplify_context (outer_bind);
8259 gcc_assert (gimplify_ctxp == NULL);
8261 #ifdef ENABLE_CHECKING
8262 if (!seen_error ())
8263 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8264 #endif
8266 timevar_pop (TV_TREE_GIMPLIFY);
8267 input_location = saved_location;
8269 return outer_bind;
8272 typedef char *char_p; /* For DEF_VEC_P. */
8274 /* Return whether we should exclude FNDECL from instrumentation. */
8276 bool
8277 flag_instrument_functions_exclude_p (tree fndecl)
8279 vec<char_p> *v;
8281 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8282 if (v && v->length () > 0)
8284 const char *name;
8285 int i;
8286 char *s;
8288 name = lang_hooks.decl_printable_name (fndecl, 0);
8289 FOR_EACH_VEC_ELT (*v, i, s)
8290 if (strstr (name, s) != NULL)
8291 return true;
8294 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8295 if (v && v->length () > 0)
8297 const char *name;
8298 int i;
8299 char *s;
8301 name = DECL_SOURCE_FILE (fndecl);
8302 FOR_EACH_VEC_ELT (*v, i, s)
8303 if (strstr (name, s) != NULL)
8304 return true;
8307 return false;
8310 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8311 node for the function we want to gimplify.
8313 Return the sequence of GIMPLE statements corresponding to the body
8314 of FNDECL. */
8316 void
8317 gimplify_function_tree (tree fndecl)
8319 tree parm, ret;
8320 gimple_seq seq;
8321 gimple bind;
8323 gcc_assert (!gimple_body (fndecl));
8325 if (DECL_STRUCT_FUNCTION (fndecl))
8326 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8327 else
8328 push_struct_function (fndecl);
8330 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8332 /* Preliminarily mark non-addressed complex variables as eligible
8333 for promotion to gimple registers. We'll transform their uses
8334 as we find them. */
8335 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8336 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8337 && !TREE_THIS_VOLATILE (parm)
8338 && !needs_to_live_in_memory (parm))
8339 DECL_GIMPLE_REG_P (parm) = 1;
8342 ret = DECL_RESULT (fndecl);
8343 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8344 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8345 && !needs_to_live_in_memory (ret))
8346 DECL_GIMPLE_REG_P (ret) = 1;
8348 bind = gimplify_body (fndecl, true);
8350 /* The tree body of the function is no longer needed, replace it
8351 with the new GIMPLE body. */
8352 seq = NULL;
8353 gimple_seq_add_stmt (&seq, bind);
8354 gimple_set_body (fndecl, seq);
8356 /* If we're instrumenting function entry/exit, then prepend the call to
8357 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8358 catch the exit hook. */
8359 /* ??? Add some way to ignore exceptions for this TFE. */
8360 if (flag_instrument_function_entry_exit
8361 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8362 && !flag_instrument_functions_exclude_p (fndecl))
8364 tree x;
8365 gimple new_bind;
8366 gimple tf;
8367 gimple_seq cleanup = NULL, body = NULL;
8368 tree tmp_var;
8369 gimple call;
8371 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8372 call = gimple_build_call (x, 1, integer_zero_node);
8373 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8374 gimple_call_set_lhs (call, tmp_var);
8375 gimplify_seq_add_stmt (&cleanup, call);
8376 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8377 call = gimple_build_call (x, 2,
8378 build_fold_addr_expr (current_function_decl),
8379 tmp_var);
8380 gimplify_seq_add_stmt (&cleanup, call);
8381 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8383 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8384 call = gimple_build_call (x, 1, integer_zero_node);
8385 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8386 gimple_call_set_lhs (call, tmp_var);
8387 gimplify_seq_add_stmt (&body, call);
8388 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8389 call = gimple_build_call (x, 2,
8390 build_fold_addr_expr (current_function_decl),
8391 tmp_var);
8392 gimplify_seq_add_stmt (&body, call);
8393 gimplify_seq_add_stmt (&body, tf);
8394 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8395 /* Clear the block for BIND, since it is no longer directly inside
8396 the function, but within a try block. */
8397 gimple_bind_set_block (bind, NULL);
8399 /* Replace the current function body with the body
8400 wrapped in the try/finally TF. */
8401 seq = NULL;
8402 gimple_seq_add_stmt (&seq, new_bind);
8403 gimple_set_body (fndecl, seq);
8406 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8407 cfun->curr_properties = PROP_gimple_any;
8409 pop_cfun ();
8412 /* Some transformations like inlining may invalidate the GIMPLE form
8413 for operands. This function traverses all the operands in STMT and
8414 gimplifies anything that is not a valid gimple operand. Any new
8415 GIMPLE statements are inserted before *GSI_P. */
8417 void
8418 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8420 size_t i, num_ops;
8421 tree lhs;
8422 gimple_seq pre = NULL;
8423 gimple post_stmt = NULL;
8424 struct gimplify_ctx gctx;
8426 push_gimplify_context (&gctx);
8427 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8429 switch (gimple_code (stmt))
8431 case GIMPLE_COND:
8432 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8433 is_gimple_val, fb_rvalue);
8434 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8435 is_gimple_val, fb_rvalue);
8436 break;
8437 case GIMPLE_SWITCH:
8438 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8439 is_gimple_val, fb_rvalue);
8440 break;
8441 case GIMPLE_OMP_ATOMIC_LOAD:
8442 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8443 is_gimple_val, fb_rvalue);
8444 break;
8445 case GIMPLE_ASM:
8447 size_t i, noutputs = gimple_asm_noutputs (stmt);
8448 const char *constraint, **oconstraints;
8449 bool allows_mem, allows_reg, is_inout;
8451 oconstraints
8452 = (const char **) alloca ((noutputs) * sizeof (const char *));
8453 for (i = 0; i < noutputs; i++)
8455 tree op = gimple_asm_output_op (stmt, i);
8456 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8457 oconstraints[i] = constraint;
8458 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8459 &allows_reg, &is_inout);
8460 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8461 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8462 fb_lvalue | fb_mayfail);
8464 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
8466 tree op = gimple_asm_input_op (stmt, i);
8467 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8468 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
8469 oconstraints, &allows_mem, &allows_reg);
8470 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
8471 allows_reg = 0;
8472 if (!allows_reg && allows_mem)
8473 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8474 is_gimple_lvalue, fb_lvalue | fb_mayfail);
8475 else
8476 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8477 is_gimple_asm_val, fb_rvalue);
8480 break;
8481 default:
8482 /* NOTE: We start gimplifying operands from last to first to
8483 make sure that side-effects on the RHS of calls, assignments
8484 and ASMs are executed before the LHS. The ordering is not
8485 important for other statements. */
8486 num_ops = gimple_num_ops (stmt);
8487 for (i = num_ops; i > 0; i--)
8489 tree op = gimple_op (stmt, i - 1);
8490 if (op == NULL_TREE)
8491 continue;
8492 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
8493 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
8494 else if (i == 2
8495 && is_gimple_assign (stmt)
8496 && num_ops == 2
8497 && get_gimple_rhs_class (gimple_expr_code (stmt))
8498 == GIMPLE_SINGLE_RHS)
8499 gimplify_expr (&op, &pre, NULL,
8500 rhs_predicate_for (gimple_assign_lhs (stmt)),
8501 fb_rvalue);
8502 else if (i == 2 && is_gimple_call (stmt))
8504 if (TREE_CODE (op) == FUNCTION_DECL)
8505 continue;
8506 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
8508 else
8509 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
8510 gimple_set_op (stmt, i - 1, op);
8513 lhs = gimple_get_lhs (stmt);
8514 /* If the LHS changed it in a way that requires a simple RHS,
8515 create temporary. */
8516 if (lhs && !is_gimple_reg (lhs))
8518 bool need_temp = false;
8520 if (is_gimple_assign (stmt)
8521 && num_ops == 2
8522 && get_gimple_rhs_class (gimple_expr_code (stmt))
8523 == GIMPLE_SINGLE_RHS)
8524 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
8525 rhs_predicate_for (gimple_assign_lhs (stmt)),
8526 fb_rvalue);
8527 else if (is_gimple_reg (lhs))
8529 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8531 if (is_gimple_call (stmt))
8533 i = gimple_call_flags (stmt);
8534 if ((i & ECF_LOOPING_CONST_OR_PURE)
8535 || !(i & (ECF_CONST | ECF_PURE)))
8536 need_temp = true;
8538 if (stmt_can_throw_internal (stmt))
8539 need_temp = true;
8542 else
8544 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8545 need_temp = true;
8546 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
8548 if (is_gimple_call (stmt))
8550 tree fndecl = gimple_call_fndecl (stmt);
8552 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
8553 && !(fndecl && DECL_RESULT (fndecl)
8554 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
8555 need_temp = true;
8557 else
8558 need_temp = true;
8561 if (need_temp)
8563 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
8564 if (gimple_in_ssa_p (cfun))
8565 temp = make_ssa_name (temp, NULL);
8566 gimple_set_lhs (stmt, temp);
8567 post_stmt = gimple_build_assign (lhs, temp);
8568 if (TREE_CODE (lhs) == SSA_NAME)
8569 SSA_NAME_DEF_STMT (lhs) = post_stmt;
8572 break;
8575 if (!gimple_seq_empty_p (pre))
8576 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8577 if (post_stmt)
8578 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8580 pop_gimplify_context (NULL);
8583 /* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
8584 the predicate that will hold for the result. If VAR is not NULL, make the
8585 base variable of the final destination be VAR if suitable. */
8587 tree
8588 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8589 gimple_predicate gimple_test_f, tree var)
8591 enum gimplify_status ret;
8592 struct gimplify_ctx gctx;
8594 *stmts = NULL;
8596 /* gimple_test_f might be more strict than is_gimple_val, make
8597 sure we pass both. Just checking gimple_test_f doesn't work
8598 because most gimple predicates do not work recursively. */
8599 if (is_gimple_val (expr)
8600 && (*gimple_test_f) (expr))
8601 return expr;
8603 push_gimplify_context (&gctx);
8604 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8605 gimplify_ctxp->allow_rhs_cond_expr = true;
8607 if (var)
8609 if (gimplify_ctxp->into_ssa
8610 && is_gimple_reg (var))
8611 var = make_ssa_name (var, NULL);
8612 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8615 if (TREE_CODE (expr) != MODIFY_EXPR
8616 && TREE_TYPE (expr) == void_type_node)
8618 gimplify_and_add (expr, stmts);
8619 expr = NULL_TREE;
8621 else
8623 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
8624 gcc_assert (ret != GS_ERROR);
8627 pop_gimplify_context (NULL);
8629 return expr;
8632 /* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
8633 force the result to be either ssa_name or an invariant, otherwise
8634 just force it to be a rhs expression. If VAR is not NULL, make the
8635 base variable of the final destination be VAR if suitable. */
8637 tree
8638 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8640 return force_gimple_operand_1 (expr, stmts,
8641 simple ? is_gimple_val : is_gimple_reg_rhs,
8642 var);
8645 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
8646 and VAR. If some statements are produced, emits them at GSI.
8647 If BEFORE is true. the statements are appended before GSI, otherwise
8648 they are appended after it. M specifies the way GSI moves after
8649 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
8651 tree
8652 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
8653 gimple_predicate gimple_test_f,
8654 tree var, bool before,
8655 enum gsi_iterator_update m)
8657 gimple_seq stmts;
8659 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
8661 if (!gimple_seq_empty_p (stmts))
8663 if (before)
8664 gsi_insert_seq_before (gsi, stmts, m);
8665 else
8666 gsi_insert_seq_after (gsi, stmts, m);
8669 return expr;
8672 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
8673 If SIMPLE is true, force the result to be either ssa_name or an invariant,
8674 otherwise just force it to be a rhs expression. If some statements are
8675 produced, emits them at GSI. If BEFORE is true, the statements are
8676 appended before GSI, otherwise they are appended after it. M specifies
8677 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8678 are the usual values). */
8680 tree
8681 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
8682 bool simple_p, tree var, bool before,
8683 enum gsi_iterator_update m)
8685 return force_gimple_operand_gsi_1 (gsi, expr,
8686 simple_p
8687 ? is_gimple_val : is_gimple_reg_rhs,
8688 var, before, m);
8692 #include "gt-gimplify.h"