2013-10-11 Marc Glisse <marc.glisse@inria.fr>
[official-gcc.git] / gcc / gimplify.c
blob9bc42e46a7d742f0960718a737859ce93d684137
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "tree-iterator.h"
30 #include "tree-inline.h"
31 #include "tree-pretty-print.h"
32 #include "langhooks.h"
33 #include "tree-ssa.h"
34 #include "cgraph.h"
35 #include "timevar.h"
36 #include "hashtab.h"
37 #include "flags.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "target.h"
42 #include "pointer-set.h"
43 #include "splay-tree.h"
44 #include "vec.h"
46 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
47 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
48 #include "tree-mudflap.h"
49 #include "expr.h"
50 #include "tm_p.h"
52 enum gimplify_omp_var_data
54 GOVD_SEEN = 1,
55 GOVD_EXPLICIT = 2,
56 GOVD_SHARED = 4,
57 GOVD_PRIVATE = 8,
58 GOVD_FIRSTPRIVATE = 16,
59 GOVD_LASTPRIVATE = 32,
60 GOVD_REDUCTION = 64,
61 GOVD_LOCAL = 128,
62 GOVD_MAP = 256,
63 GOVD_DEBUG_PRIVATE = 512,
64 GOVD_PRIVATE_OUTER_REF = 1024,
65 GOVD_LINEAR = 2048,
66 GOVD_ALIGNED = 4096,
67 GOVD_MAP_TO_ONLY = 8192,
68 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
69 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
70 | GOVD_LOCAL)
74 enum omp_region_type
76 ORT_WORKSHARE = 0,
77 ORT_SIMD = 1,
78 ORT_PARALLEL = 2,
79 ORT_COMBINED_PARALLEL = 3,
80 ORT_TASK = 4,
81 ORT_UNTIED_TASK = 5,
82 ORT_TEAMS = 8,
83 ORT_TARGET_DATA = 16,
84 ORT_TARGET = 32
87 struct gimplify_omp_ctx
89 struct gimplify_omp_ctx *outer_context;
90 splay_tree variables;
91 struct pointer_set_t *privatized_types;
92 location_t location;
93 enum omp_clause_default_kind default_kind;
94 enum omp_region_type region_type;
95 bool combined_loop;
98 static struct gimplify_ctx *gimplify_ctxp;
99 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
102 /* Forward declaration. */
103 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
105 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
106 form and we don't do any syntax checking. */
108 void
109 mark_addressable (tree x)
111 while (handled_component_p (x))
112 x = TREE_OPERAND (x, 0);
113 if (TREE_CODE (x) == MEM_REF
114 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
115 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
116 if (TREE_CODE (x) != VAR_DECL
117 && TREE_CODE (x) != PARM_DECL
118 && TREE_CODE (x) != RESULT_DECL)
119 return;
120 TREE_ADDRESSABLE (x) = 1;
122 /* Also mark the artificial SSA_NAME that points to the partition of X. */
123 if (TREE_CODE (x) == VAR_DECL
124 && !DECL_EXTERNAL (x)
125 && !TREE_STATIC (x)
126 && cfun->gimple_df != NULL
127 && cfun->gimple_df->decls_to_pointers != NULL)
129 void *namep
130 = pointer_map_contains (cfun->gimple_df->decls_to_pointers, x);
131 if (namep)
132 TREE_ADDRESSABLE (*(tree *)namep) = 1;
136 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
137 *SEQ_P is NULL, a new sequence is allocated. This function is
138 similar to gimple_seq_add_stmt, but does not scan the operands.
139 During gimplification, we need to manipulate statement sequences
140 before the def/use vectors have been constructed. */
142 void
143 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
145 gimple_stmt_iterator si;
147 if (gs == NULL)
148 return;
150 si = gsi_last (*seq_p);
151 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
154 /* Shorter alias name for the above function for use in gimplify.c
155 only. */
157 static inline void
158 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
160 gimple_seq_add_stmt_without_update (seq_p, gs);
163 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
164 NULL, a new sequence is allocated. This function is
165 similar to gimple_seq_add_seq, but does not scan the operands.
166 During gimplification, we need to manipulate statement sequences
167 before the def/use vectors have been constructed. */
169 static void
170 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
172 gimple_stmt_iterator si;
174 if (src == NULL)
175 return;
177 si = gsi_last (*dst_p);
178 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
181 /* Set up a context for the gimplifier. */
183 void
184 push_gimplify_context (struct gimplify_ctx *c)
186 memset (c, '\0', sizeof (*c));
187 c->prev_context = gimplify_ctxp;
188 gimplify_ctxp = c;
191 /* Tear down a context for the gimplifier. If BODY is non-null, then
192 put the temporaries into the outer BIND_EXPR. Otherwise, put them
193 in the local_decls.
195 BODY is not a sequence, but the first tuple in a sequence. */
197 void
198 pop_gimplify_context (gimple body)
200 struct gimplify_ctx *c = gimplify_ctxp;
202 gcc_assert (c
203 && (!c->bind_expr_stack.exists ()
204 || c->bind_expr_stack.is_empty ()));
205 c->bind_expr_stack.release ();
206 gimplify_ctxp = c->prev_context;
208 if (body)
209 declare_vars (c->temps, body, false);
210 else
211 record_vars (c->temps);
213 if (c->temp_htab.is_created ())
214 c->temp_htab.dispose ();
217 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
219 static void
220 gimple_push_bind_expr (gimple gimple_bind)
222 gimplify_ctxp->bind_expr_stack.reserve (8);
223 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
226 /* Pop the first element off the stack of bindings. */
228 static void
229 gimple_pop_bind_expr (void)
231 gimplify_ctxp->bind_expr_stack.pop ();
234 /* Return the first element of the stack of bindings. */
236 gimple
237 gimple_current_bind_expr (void)
239 return gimplify_ctxp->bind_expr_stack.last ();
242 /* Return the stack of bindings created during gimplification. */
244 vec<gimple>
245 gimple_bind_expr_stack (void)
247 return gimplify_ctxp->bind_expr_stack;
250 /* Return true iff there is a COND_EXPR between us and the innermost
251 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
253 static bool
254 gimple_conditional_context (void)
256 return gimplify_ctxp->conditions > 0;
259 /* Note that we've entered a COND_EXPR. */
261 static void
262 gimple_push_condition (void)
264 #ifdef ENABLE_GIMPLE_CHECKING
265 if (gimplify_ctxp->conditions == 0)
266 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
267 #endif
268 ++(gimplify_ctxp->conditions);
271 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
272 now, add any conditional cleanups we've seen to the prequeue. */
274 static void
275 gimple_pop_condition (gimple_seq *pre_p)
277 int conds = --(gimplify_ctxp->conditions);
279 gcc_assert (conds >= 0);
280 if (conds == 0)
282 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
283 gimplify_ctxp->conditional_cleanups = NULL;
287 /* A stable comparison routine for use with splay trees and DECLs. */
289 static int
290 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
292 tree a = (tree) xa;
293 tree b = (tree) xb;
295 return DECL_UID (a) - DECL_UID (b);
298 /* Create a new omp construct that deals with variable remapping. */
300 static struct gimplify_omp_ctx *
301 new_omp_context (enum omp_region_type region_type)
303 struct gimplify_omp_ctx *c;
305 c = XCNEW (struct gimplify_omp_ctx);
306 c->outer_context = gimplify_omp_ctxp;
307 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
308 c->privatized_types = pointer_set_create ();
309 c->location = input_location;
310 c->region_type = region_type;
311 if ((region_type & ORT_TASK) == 0)
312 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
313 else
314 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
316 return c;
319 /* Destroy an omp construct that deals with variable remapping. */
321 static void
322 delete_omp_context (struct gimplify_omp_ctx *c)
324 splay_tree_delete (c->variables);
325 pointer_set_destroy (c->privatized_types);
326 XDELETE (c);
329 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
330 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
332 /* Both gimplify the statement T and append it to *SEQ_P. This function
333 behaves exactly as gimplify_stmt, but you don't have to pass T as a
334 reference. */
336 void
337 gimplify_and_add (tree t, gimple_seq *seq_p)
339 gimplify_stmt (&t, seq_p);
342 /* Gimplify statement T into sequence *SEQ_P, and return the first
343 tuple in the sequence of generated tuples for this statement.
344 Return NULL if gimplifying T produced no tuples. */
346 static gimple
347 gimplify_and_return_first (tree t, gimple_seq *seq_p)
349 gimple_stmt_iterator last = gsi_last (*seq_p);
351 gimplify_and_add (t, seq_p);
353 if (!gsi_end_p (last))
355 gsi_next (&last);
356 return gsi_stmt (last);
358 else
359 return gimple_seq_first_stmt (*seq_p);
362 /* Strip off a legitimate source ending from the input string NAME of
363 length LEN. Rather than having to know the names used by all of
364 our front ends, we strip off an ending of a period followed by
365 up to five characters. (Java uses ".class".) */
367 static inline void
368 remove_suffix (char *name, int len)
370 int i;
372 for (i = 2; i < 8 && len > i; i++)
374 if (name[len - i] == '.')
376 name[len - i] = '\0';
377 break;
382 /* Create a new temporary name with PREFIX. Return an identifier. */
384 static GTY(()) unsigned int tmp_var_id_num;
386 tree
387 create_tmp_var_name (const char *prefix)
389 char *tmp_name;
391 if (prefix)
393 char *preftmp = ASTRDUP (prefix);
395 remove_suffix (preftmp, strlen (preftmp));
396 clean_symbol_name (preftmp);
398 prefix = preftmp;
401 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
402 return get_identifier (tmp_name);
405 /* Create a new temporary variable declaration of type TYPE.
406 Do NOT push it into the current binding. */
408 tree
409 create_tmp_var_raw (tree type, const char *prefix)
411 tree tmp_var;
413 tmp_var = build_decl (input_location,
414 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
415 type);
417 /* The variable was declared by the compiler. */
418 DECL_ARTIFICIAL (tmp_var) = 1;
419 /* And we don't want debug info for it. */
420 DECL_IGNORED_P (tmp_var) = 1;
422 /* Make the variable writable. */
423 TREE_READONLY (tmp_var) = 0;
425 DECL_EXTERNAL (tmp_var) = 0;
426 TREE_STATIC (tmp_var) = 0;
427 TREE_USED (tmp_var) = 1;
429 return tmp_var;
432 /* Create a new temporary variable declaration of type TYPE. DO push the
433 variable into the current binding. Further, assume that this is called
434 only from gimplification or optimization, at which point the creation of
435 certain types are bugs. */
437 tree
438 create_tmp_var (tree type, const char *prefix)
440 tree tmp_var;
442 /* We don't allow types that are addressable (meaning we can't make copies),
443 or incomplete. We also used to reject every variable size objects here,
444 but now support those for which a constant upper bound can be obtained.
445 The processing for variable sizes is performed in gimple_add_tmp_var,
446 point at which it really matters and possibly reached via paths not going
447 through this function, e.g. after direct calls to create_tmp_var_raw. */
448 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
450 tmp_var = create_tmp_var_raw (type, prefix);
451 gimple_add_tmp_var (tmp_var);
452 return tmp_var;
455 /* Create a new temporary variable declaration of type TYPE by calling
456 create_tmp_var and if TYPE is a vector or a complex number, mark the new
457 temporary as gimple register. */
459 tree
460 create_tmp_reg (tree type, const char *prefix)
462 tree tmp;
464 tmp = create_tmp_var (type, prefix);
465 if (TREE_CODE (type) == COMPLEX_TYPE
466 || TREE_CODE (type) == VECTOR_TYPE)
467 DECL_GIMPLE_REG_P (tmp) = 1;
469 return tmp;
472 /* Returns true iff T is a valid RHS for an assignment to a renamed
473 user -- or front-end generated artificial -- variable. */
475 static bool
476 is_gimple_reg_rhs (tree t)
478 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
481 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
482 LHS, or for a call argument. */
484 static bool
485 is_gimple_mem_rhs (tree t)
487 /* If we're dealing with a renamable type, either source or dest must be
488 a renamed variable. */
489 if (is_gimple_reg_type (TREE_TYPE (t)))
490 return is_gimple_val (t);
491 else
492 return is_gimple_val (t) || is_gimple_lvalue (t);
495 /* Return true if T is a CALL_EXPR or an expression that can be
496 assigned to a temporary. Note that this predicate should only be
497 used during gimplification. See the rationale for this in
498 gimplify_modify_expr. */
500 static bool
501 is_gimple_reg_rhs_or_call (tree t)
503 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
504 || TREE_CODE (t) == CALL_EXPR);
507 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
508 this predicate should only be used during gimplification. See the
509 rationale for this in gimplify_modify_expr. */
511 static bool
512 is_gimple_mem_rhs_or_call (tree t)
514 /* If we're dealing with a renamable type, either source or dest must be
515 a renamed variable. */
516 if (is_gimple_reg_type (TREE_TYPE (t)))
517 return is_gimple_val (t);
518 else
519 return (is_gimple_val (t) || is_gimple_lvalue (t)
520 || TREE_CODE (t) == CALL_EXPR);
523 /* Create a temporary with a name derived from VAL. Subroutine of
524 lookup_tmp_var; nobody else should call this function. */
526 static inline tree
527 create_tmp_from_val (tree val, bool is_formal)
529 /* Drop all qualifiers and address-space information from the value type. */
530 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
531 tree var = create_tmp_var (type, get_name (val));
532 if (is_formal
533 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
534 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
535 DECL_GIMPLE_REG_P (var) = 1;
536 return var;
539 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
540 an existing expression temporary. */
542 static tree
543 lookup_tmp_var (tree val, bool is_formal)
545 tree ret;
547 /* If not optimizing, never really reuse a temporary. local-alloc
548 won't allocate any variable that is used in more than one basic
549 block, which means it will go into memory, causing much extra
550 work in reload and final and poorer code generation, outweighing
551 the extra memory allocation here. */
552 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
553 ret = create_tmp_from_val (val, is_formal);
554 else
556 elt_t elt, *elt_p;
557 elt_t **slot;
559 elt.val = val;
560 if (!gimplify_ctxp->temp_htab.is_created ())
561 gimplify_ctxp->temp_htab.create (1000);
562 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
563 if (*slot == NULL)
565 elt_p = XNEW (elt_t);
566 elt_p->val = val;
567 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
568 *slot = elt_p;
570 else
572 elt_p = *slot;
573 ret = elt_p->temp;
577 return ret;
580 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
582 static tree
583 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
584 bool is_formal)
586 tree t, mod;
588 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
589 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
590 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
591 fb_rvalue);
593 if (gimplify_ctxp->into_ssa
594 && is_gimple_reg_type (TREE_TYPE (val)))
595 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
596 else
597 t = lookup_tmp_var (val, is_formal);
599 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
601 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
603 /* gimplify_modify_expr might want to reduce this further. */
604 gimplify_and_add (mod, pre_p);
605 ggc_free (mod);
607 return t;
610 /* Return a formal temporary variable initialized with VAL. PRE_P is as
611 in gimplify_expr. Only use this function if:
613 1) The value of the unfactored expression represented by VAL will not
614 change between the initialization and use of the temporary, and
615 2) The temporary will not be otherwise modified.
617 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
618 and #2 means it is inappropriate for && temps.
620 For other cases, use get_initialized_tmp_var instead. */
622 tree
623 get_formal_tmp_var (tree val, gimple_seq *pre_p)
625 return internal_get_tmp_var (val, pre_p, NULL, true);
628 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
629 are as in gimplify_expr. */
631 tree
632 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
634 return internal_get_tmp_var (val, pre_p, post_p, false);
637 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
638 generate debug info for them; otherwise don't. */
640 void
641 declare_vars (tree vars, gimple scope, bool debug_info)
643 tree last = vars;
644 if (last)
646 tree temps, block;
648 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
650 temps = nreverse (last);
652 block = gimple_bind_block (scope);
653 gcc_assert (!block || TREE_CODE (block) == BLOCK);
654 if (!block || !debug_info)
656 DECL_CHAIN (last) = gimple_bind_vars (scope);
657 gimple_bind_set_vars (scope, temps);
659 else
661 /* We need to attach the nodes both to the BIND_EXPR and to its
662 associated BLOCK for debugging purposes. The key point here
663 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
664 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
665 if (BLOCK_VARS (block))
666 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
667 else
669 gimple_bind_set_vars (scope,
670 chainon (gimple_bind_vars (scope), temps));
671 BLOCK_VARS (block) = temps;
677 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
678 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
679 no such upper bound can be obtained. */
681 static void
682 force_constant_size (tree var)
684 /* The only attempt we make is by querying the maximum size of objects
685 of the variable's type. */
687 HOST_WIDE_INT max_size;
689 gcc_assert (TREE_CODE (var) == VAR_DECL);
691 max_size = max_int_size_in_bytes (TREE_TYPE (var));
693 gcc_assert (max_size >= 0);
695 DECL_SIZE_UNIT (var)
696 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
697 DECL_SIZE (var)
698 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
701 /* Push the temporary variable TMP into the current binding. */
703 void
704 gimple_add_tmp_var (tree tmp)
706 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
708 /* Later processing assumes that the object size is constant, which might
709 not be true at this point. Force the use of a constant upper bound in
710 this case. */
711 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
712 force_constant_size (tmp);
714 DECL_CONTEXT (tmp) = current_function_decl;
715 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
717 if (gimplify_ctxp)
719 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
720 gimplify_ctxp->temps = tmp;
722 /* Mark temporaries local within the nearest enclosing parallel. */
723 if (gimplify_omp_ctxp)
725 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
726 while (ctx
727 && (ctx->region_type == ORT_WORKSHARE
728 || ctx->region_type == ORT_SIMD))
729 ctx = ctx->outer_context;
730 if (ctx)
731 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
734 else if (cfun)
735 record_vars (tmp);
736 else
738 gimple_seq body_seq;
740 /* This case is for nested functions. We need to expose the locals
741 they create. */
742 body_seq = gimple_body (current_function_decl);
743 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
747 /* Determine whether to assign a location to the statement GS. */
749 static bool
750 should_carry_location_p (gimple gs)
752 /* Don't emit a line note for a label. We particularly don't want to
753 emit one for the break label, since it doesn't actually correspond
754 to the beginning of the loop/switch. */
755 if (gimple_code (gs) == GIMPLE_LABEL)
756 return false;
758 return true;
761 /* Return true if a location should not be emitted for this statement
762 by annotate_one_with_location. */
764 static inline bool
765 gimple_do_not_emit_location_p (gimple g)
767 return gimple_plf (g, GF_PLF_1);
770 /* Mark statement G so a location will not be emitted by
771 annotate_one_with_location. */
773 static inline void
774 gimple_set_do_not_emit_location (gimple g)
776 /* The PLF flags are initialized to 0 when a new tuple is created,
777 so no need to initialize it anywhere. */
778 gimple_set_plf (g, GF_PLF_1, true);
781 /* Set the location for gimple statement GS to LOCATION. */
783 static void
784 annotate_one_with_location (gimple gs, location_t location)
786 if (!gimple_has_location (gs)
787 && !gimple_do_not_emit_location_p (gs)
788 && should_carry_location_p (gs))
789 gimple_set_location (gs, location);
792 /* Set LOCATION for all the statements after iterator GSI in sequence
793 SEQ. If GSI is pointing to the end of the sequence, start with the
794 first statement in SEQ. */
796 static void
797 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
798 location_t location)
800 if (gsi_end_p (gsi))
801 gsi = gsi_start (seq);
802 else
803 gsi_next (&gsi);
805 for (; !gsi_end_p (gsi); gsi_next (&gsi))
806 annotate_one_with_location (gsi_stmt (gsi), location);
809 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
811 void
812 annotate_all_with_location (gimple_seq stmt_p, location_t location)
814 gimple_stmt_iterator i;
816 if (gimple_seq_empty_p (stmt_p))
817 return;
819 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
821 gimple gs = gsi_stmt (i);
822 annotate_one_with_location (gs, location);
826 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
827 nodes that are referenced more than once in GENERIC functions. This is
828 necessary because gimplification (translation into GIMPLE) is performed
829 by modifying tree nodes in-place, so gimplication of a shared node in a
830 first context could generate an invalid GIMPLE form in a second context.
832 This is achieved with a simple mark/copy/unmark algorithm that walks the
833 GENERIC representation top-down, marks nodes with TREE_VISITED the first
834 time it encounters them, duplicates them if they already have TREE_VISITED
835 set, and finally removes the TREE_VISITED marks it has set.
837 The algorithm works only at the function level, i.e. it generates a GENERIC
838 representation of a function with no nodes shared within the function when
839 passed a GENERIC function (except for nodes that are allowed to be shared).
841 At the global level, it is also necessary to unshare tree nodes that are
842 referenced in more than one function, for the same aforementioned reason.
843 This requires some cooperation from the front-end. There are 2 strategies:
845 1. Manual unsharing. The front-end needs to call unshare_expr on every
846 expression that might end up being shared across functions.
848 2. Deep unsharing. This is an extension of regular unsharing. Instead
849 of calling unshare_expr on expressions that might be shared across
850 functions, the front-end pre-marks them with TREE_VISITED. This will
851 ensure that they are unshared on the first reference within functions
852 when the regular unsharing algorithm runs. The counterpart is that
853 this algorithm must look deeper than for manual unsharing, which is
854 specified by LANG_HOOKS_DEEP_UNSHARING.
856 If there are only few specific cases of node sharing across functions, it is
857 probably easier for a front-end to unshare the expressions manually. On the
858 contrary, if the expressions generated at the global level are as widespread
859 as expressions generated within functions, deep unsharing is very likely the
860 way to go. */
862 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
863 These nodes model computations that must be done once. If we were to
864 unshare something like SAVE_EXPR(i++), the gimplification process would
865 create wrong code. However, if DATA is non-null, it must hold a pointer
866 set that is used to unshare the subtrees of these nodes. */
868 static tree
869 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
871 tree t = *tp;
872 enum tree_code code = TREE_CODE (t);
874 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
875 copy their subtrees if we can make sure to do it only once. */
876 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
878 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
880 else
881 *walk_subtrees = 0;
884 /* Stop at types, decls, constants like copy_tree_r. */
885 else if (TREE_CODE_CLASS (code) == tcc_type
886 || TREE_CODE_CLASS (code) == tcc_declaration
887 || TREE_CODE_CLASS (code) == tcc_constant
888 /* We can't do anything sensible with a BLOCK used as an
889 expression, but we also can't just die when we see it
890 because of non-expression uses. So we avert our eyes
891 and cross our fingers. Silly Java. */
892 || code == BLOCK)
893 *walk_subtrees = 0;
895 /* Cope with the statement expression extension. */
896 else if (code == STATEMENT_LIST)
899 /* Leave the bulk of the work to copy_tree_r itself. */
900 else
901 copy_tree_r (tp, walk_subtrees, NULL);
903 return NULL_TREE;
906 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
907 If *TP has been visited already, then *TP is deeply copied by calling
908 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
910 static tree
911 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
913 tree t = *tp;
914 enum tree_code code = TREE_CODE (t);
916 /* Skip types, decls, and constants. But we do want to look at their
917 types and the bounds of types. Mark them as visited so we properly
918 unmark their subtrees on the unmark pass. If we've already seen them,
919 don't look down further. */
920 if (TREE_CODE_CLASS (code) == tcc_type
921 || TREE_CODE_CLASS (code) == tcc_declaration
922 || TREE_CODE_CLASS (code) == tcc_constant)
924 if (TREE_VISITED (t))
925 *walk_subtrees = 0;
926 else
927 TREE_VISITED (t) = 1;
930 /* If this node has been visited already, unshare it and don't look
931 any deeper. */
932 else if (TREE_VISITED (t))
934 walk_tree (tp, mostly_copy_tree_r, data, NULL);
935 *walk_subtrees = 0;
938 /* Otherwise, mark the node as visited and keep looking. */
939 else
940 TREE_VISITED (t) = 1;
942 return NULL_TREE;
945 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
946 copy_if_shared_r callback unmodified. */
948 static inline void
949 copy_if_shared (tree *tp, void *data)
951 walk_tree (tp, copy_if_shared_r, data, NULL);
954 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
955 any nested functions. */
957 static void
958 unshare_body (tree fndecl)
960 struct cgraph_node *cgn = cgraph_get_node (fndecl);
961 /* If the language requires deep unsharing, we need a pointer set to make
962 sure we don't repeatedly unshare subtrees of unshareable nodes. */
963 struct pointer_set_t *visited
964 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
966 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
967 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
968 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
970 if (visited)
971 pointer_set_destroy (visited);
973 if (cgn)
974 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
975 unshare_body (cgn->symbol.decl);
978 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
979 Subtrees are walked until the first unvisited node is encountered. */
981 static tree
982 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
984 tree t = *tp;
986 /* If this node has been visited, unmark it and keep looking. */
987 if (TREE_VISITED (t))
988 TREE_VISITED (t) = 0;
990 /* Otherwise, don't look any deeper. */
991 else
992 *walk_subtrees = 0;
994 return NULL_TREE;
997 /* Unmark the visited trees rooted at *TP. */
999 static inline void
1000 unmark_visited (tree *tp)
1002 walk_tree (tp, unmark_visited_r, NULL, NULL);
1005 /* Likewise, but mark all trees as not visited. */
1007 static void
1008 unvisit_body (tree fndecl)
1010 struct cgraph_node *cgn = cgraph_get_node (fndecl);
1012 unmark_visited (&DECL_SAVED_TREE (fndecl));
1013 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1014 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1016 if (cgn)
1017 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1018 unvisit_body (cgn->symbol.decl);
1021 /* Unconditionally make an unshared copy of EXPR. This is used when using
1022 stored expressions which span multiple functions, such as BINFO_VTABLE,
1023 as the normal unsharing process can't tell that they're shared. */
1025 tree
1026 unshare_expr (tree expr)
1028 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1029 return expr;
1032 /* Worker for unshare_expr_without_location. */
1034 static tree
1035 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1037 if (EXPR_P (*tp))
1038 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1039 else
1040 *walk_subtrees = 0;
1041 return NULL_TREE;
1044 /* Similar to unshare_expr but also prune all expression locations
1045 from EXPR. */
1047 tree
1048 unshare_expr_without_location (tree expr)
1050 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1051 if (EXPR_P (expr))
1052 walk_tree (&expr, prune_expr_location, NULL, NULL);
1053 return expr;
1056 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1057 contain statements and have a value. Assign its value to a temporary
1058 and give it void_type_node. Return the temporary, or NULL_TREE if
1059 WRAPPER was already void. */
1061 tree
1062 voidify_wrapper_expr (tree wrapper, tree temp)
1064 tree type = TREE_TYPE (wrapper);
1065 if (type && !VOID_TYPE_P (type))
1067 tree *p;
1069 /* Set p to point to the body of the wrapper. Loop until we find
1070 something that isn't a wrapper. */
1071 for (p = &wrapper; p && *p; )
1073 switch (TREE_CODE (*p))
1075 case BIND_EXPR:
1076 TREE_SIDE_EFFECTS (*p) = 1;
1077 TREE_TYPE (*p) = void_type_node;
1078 /* For a BIND_EXPR, the body is operand 1. */
1079 p = &BIND_EXPR_BODY (*p);
1080 break;
1082 case CLEANUP_POINT_EXPR:
1083 case TRY_FINALLY_EXPR:
1084 case TRY_CATCH_EXPR:
1085 TREE_SIDE_EFFECTS (*p) = 1;
1086 TREE_TYPE (*p) = void_type_node;
1087 p = &TREE_OPERAND (*p, 0);
1088 break;
1090 case STATEMENT_LIST:
1092 tree_stmt_iterator i = tsi_last (*p);
1093 TREE_SIDE_EFFECTS (*p) = 1;
1094 TREE_TYPE (*p) = void_type_node;
1095 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1097 break;
1099 case COMPOUND_EXPR:
1100 /* Advance to the last statement. Set all container types to
1101 void. */
1102 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1104 TREE_SIDE_EFFECTS (*p) = 1;
1105 TREE_TYPE (*p) = void_type_node;
1107 break;
1109 case TRANSACTION_EXPR:
1110 TREE_SIDE_EFFECTS (*p) = 1;
1111 TREE_TYPE (*p) = void_type_node;
1112 p = &TRANSACTION_EXPR_BODY (*p);
1113 break;
1115 default:
1116 /* Assume that any tree upon which voidify_wrapper_expr is
1117 directly called is a wrapper, and that its body is op0. */
1118 if (p == &wrapper)
1120 TREE_SIDE_EFFECTS (*p) = 1;
1121 TREE_TYPE (*p) = void_type_node;
1122 p = &TREE_OPERAND (*p, 0);
1123 break;
1125 goto out;
1129 out:
1130 if (p == NULL || IS_EMPTY_STMT (*p))
1131 temp = NULL_TREE;
1132 else if (temp)
1134 /* The wrapper is on the RHS of an assignment that we're pushing
1135 down. */
1136 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1137 || TREE_CODE (temp) == MODIFY_EXPR);
1138 TREE_OPERAND (temp, 1) = *p;
1139 *p = temp;
1141 else
1143 temp = create_tmp_var (type, "retval");
1144 *p = build2 (INIT_EXPR, type, temp, *p);
1147 return temp;
1150 return NULL_TREE;
1153 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1154 a temporary through which they communicate. */
1156 static void
1157 build_stack_save_restore (gimple *save, gimple *restore)
1159 tree tmp_var;
1161 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1162 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1163 gimple_call_set_lhs (*save, tmp_var);
1165 *restore
1166 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1167 1, tmp_var);
1170 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1172 static enum gimplify_status
1173 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1175 tree bind_expr = *expr_p;
1176 bool old_save_stack = gimplify_ctxp->save_stack;
1177 tree t;
1178 gimple gimple_bind;
1179 gimple_seq body, cleanup;
1180 gimple stack_save;
1182 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1184 /* Mark variables seen in this bind expr. */
1185 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1187 if (TREE_CODE (t) == VAR_DECL)
1189 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1191 /* Mark variable as local. */
1192 if (ctx && !DECL_EXTERNAL (t)
1193 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1194 || splay_tree_lookup (ctx->variables,
1195 (splay_tree_key) t) == NULL))
1196 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1198 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1200 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1201 cfun->has_local_explicit_reg_vars = true;
1204 /* Preliminarily mark non-addressed complex variables as eligible
1205 for promotion to gimple registers. We'll transform their uses
1206 as we find them. */
1207 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1208 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1209 && !TREE_THIS_VOLATILE (t)
1210 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1211 && !needs_to_live_in_memory (t))
1212 DECL_GIMPLE_REG_P (t) = 1;
1215 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1216 BIND_EXPR_BLOCK (bind_expr));
1217 gimple_push_bind_expr (gimple_bind);
1219 gimplify_ctxp->save_stack = false;
1221 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1222 body = NULL;
1223 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1224 gimple_bind_set_body (gimple_bind, body);
1226 cleanup = NULL;
1227 stack_save = NULL;
1228 if (gimplify_ctxp->save_stack)
1230 gimple stack_restore;
1232 /* Save stack on entry and restore it on exit. Add a try_finally
1233 block to achieve this. Note that mudflap depends on the
1234 format of the emitted code: see mx_register_decls(). */
1235 build_stack_save_restore (&stack_save, &stack_restore);
1237 gimplify_seq_add_stmt (&cleanup, stack_restore);
1240 /* Add clobbers for all variables that go out of scope. */
1241 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1243 if (TREE_CODE (t) == VAR_DECL
1244 && !is_global_var (t)
1245 && DECL_CONTEXT (t) == current_function_decl
1246 && !DECL_HARD_REGISTER (t)
1247 && !TREE_THIS_VOLATILE (t)
1248 && !DECL_HAS_VALUE_EXPR_P (t)
1249 /* Only care for variables that have to be in memory. Others
1250 will be rewritten into SSA names, hence moved to the top-level. */
1251 && !is_gimple_reg (t)
1252 && flag_stack_reuse != SR_NONE)
1254 tree clobber = build_constructor (TREE_TYPE (t),
1255 NULL);
1256 TREE_THIS_VOLATILE (clobber) = 1;
1257 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1261 if (cleanup)
1263 gimple gs;
1264 gimple_seq new_body;
1266 new_body = NULL;
1267 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1268 GIMPLE_TRY_FINALLY);
1270 if (stack_save)
1271 gimplify_seq_add_stmt (&new_body, stack_save);
1272 gimplify_seq_add_stmt (&new_body, gs);
1273 gimple_bind_set_body (gimple_bind, new_body);
1276 gimplify_ctxp->save_stack = old_save_stack;
1277 gimple_pop_bind_expr ();
1279 gimplify_seq_add_stmt (pre_p, gimple_bind);
1281 if (temp)
1283 *expr_p = temp;
1284 return GS_OK;
1287 *expr_p = NULL_TREE;
1288 return GS_ALL_DONE;
1291 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1292 GIMPLE value, it is assigned to a new temporary and the statement is
1293 re-written to return the temporary.
1295 PRE_P points to the sequence where side effects that must happen before
1296 STMT should be stored. */
1298 static enum gimplify_status
1299 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1301 gimple ret;
1302 tree ret_expr = TREE_OPERAND (stmt, 0);
1303 tree result_decl, result;
1305 if (ret_expr == error_mark_node)
1306 return GS_ERROR;
1308 if (!ret_expr
1309 || TREE_CODE (ret_expr) == RESULT_DECL
1310 || ret_expr == error_mark_node)
1312 gimple ret = gimple_build_return (ret_expr);
1313 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1314 gimplify_seq_add_stmt (pre_p, ret);
1315 return GS_ALL_DONE;
1318 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1319 result_decl = NULL_TREE;
1320 else
1322 result_decl = TREE_OPERAND (ret_expr, 0);
1324 /* See through a return by reference. */
1325 if (TREE_CODE (result_decl) == INDIRECT_REF)
1326 result_decl = TREE_OPERAND (result_decl, 0);
1328 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1329 || TREE_CODE (ret_expr) == INIT_EXPR)
1330 && TREE_CODE (result_decl) == RESULT_DECL);
1333 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1334 Recall that aggregate_value_p is FALSE for any aggregate type that is
1335 returned in registers. If we're returning values in registers, then
1336 we don't want to extend the lifetime of the RESULT_DECL, particularly
1337 across another call. In addition, for those aggregates for which
1338 hard_function_value generates a PARALLEL, we'll die during normal
1339 expansion of structure assignments; there's special code in expand_return
1340 to handle this case that does not exist in expand_expr. */
1341 if (!result_decl)
1342 result = NULL_TREE;
1343 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1345 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1347 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1348 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1349 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1350 should be effectively allocated by the caller, i.e. all calls to
1351 this function must be subject to the Return Slot Optimization. */
1352 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1353 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1355 result = result_decl;
1357 else if (gimplify_ctxp->return_temp)
1358 result = gimplify_ctxp->return_temp;
1359 else
1361 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1363 /* ??? With complex control flow (usually involving abnormal edges),
1364 we can wind up warning about an uninitialized value for this. Due
1365 to how this variable is constructed and initialized, this is never
1366 true. Give up and never warn. */
1367 TREE_NO_WARNING (result) = 1;
1369 gimplify_ctxp->return_temp = result;
1372 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1373 Then gimplify the whole thing. */
1374 if (result != result_decl)
1375 TREE_OPERAND (ret_expr, 0) = result;
1377 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1379 ret = gimple_build_return (result);
1380 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1381 gimplify_seq_add_stmt (pre_p, ret);
1383 return GS_ALL_DONE;
1386 /* Gimplify a variable-length array DECL. */
1388 static void
1389 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1391 /* This is a variable-sized decl. Simplify its size and mark it
1392 for deferred expansion. Note that mudflap depends on the format
1393 of the emitted code: see mx_register_decls(). */
1394 tree t, addr, ptr_type;
1396 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1397 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1399 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1400 if (DECL_HAS_VALUE_EXPR_P (decl))
1401 return;
1403 /* All occurrences of this decl in final gimplified code will be
1404 replaced by indirection. Setting DECL_VALUE_EXPR does two
1405 things: First, it lets the rest of the gimplifier know what
1406 replacement to use. Second, it lets the debug info know
1407 where to find the value. */
1408 ptr_type = build_pointer_type (TREE_TYPE (decl));
1409 addr = create_tmp_var (ptr_type, get_name (decl));
1410 DECL_IGNORED_P (addr) = 0;
1411 t = build_fold_indirect_ref (addr);
1412 TREE_THIS_NOTRAP (t) = 1;
1413 SET_DECL_VALUE_EXPR (decl, t);
1414 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1416 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1417 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1418 size_int (DECL_ALIGN (decl)));
1419 /* The call has been built for a variable-sized object. */
1420 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1421 t = fold_convert (ptr_type, t);
1422 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1424 gimplify_and_add (t, seq_p);
1426 /* Indicate that we need to restore the stack level when the
1427 enclosing BIND_EXPR is exited. */
1428 gimplify_ctxp->save_stack = true;
1431 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1432 and initialization explicit. */
1434 static enum gimplify_status
1435 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1437 tree stmt = *stmt_p;
1438 tree decl = DECL_EXPR_DECL (stmt);
1440 *stmt_p = NULL_TREE;
1442 if (TREE_TYPE (decl) == error_mark_node)
1443 return GS_ERROR;
1445 if ((TREE_CODE (decl) == TYPE_DECL
1446 || TREE_CODE (decl) == VAR_DECL)
1447 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1448 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1450 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1451 in case its size expressions contain problematic nodes like CALL_EXPR. */
1452 if (TREE_CODE (decl) == TYPE_DECL
1453 && DECL_ORIGINAL_TYPE (decl)
1454 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1455 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1457 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1459 tree init = DECL_INITIAL (decl);
1461 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1462 || (!TREE_STATIC (decl)
1463 && flag_stack_check == GENERIC_STACK_CHECK
1464 && compare_tree_int (DECL_SIZE_UNIT (decl),
1465 STACK_CHECK_MAX_VAR_SIZE) > 0))
1466 gimplify_vla_decl (decl, seq_p);
1468 /* Some front ends do not explicitly declare all anonymous
1469 artificial variables. We compensate here by declaring the
1470 variables, though it would be better if the front ends would
1471 explicitly declare them. */
1472 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1473 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1474 gimple_add_tmp_var (decl);
1476 if (init && init != error_mark_node)
1478 if (!TREE_STATIC (decl))
1480 DECL_INITIAL (decl) = NULL_TREE;
1481 init = build2 (INIT_EXPR, void_type_node, decl, init);
1482 gimplify_and_add (init, seq_p);
1483 ggc_free (init);
1485 else
1486 /* We must still examine initializers for static variables
1487 as they may contain a label address. */
1488 walk_tree (&init, force_labels_r, NULL, NULL);
1492 return GS_ALL_DONE;
1495 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1496 and replacing the LOOP_EXPR with goto, but if the loop contains an
1497 EXIT_EXPR, we need to append a label for it to jump to. */
1499 static enum gimplify_status
1500 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1502 tree saved_label = gimplify_ctxp->exit_label;
1503 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1505 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1507 gimplify_ctxp->exit_label = NULL_TREE;
1509 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1511 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1513 if (gimplify_ctxp->exit_label)
1514 gimplify_seq_add_stmt (pre_p,
1515 gimple_build_label (gimplify_ctxp->exit_label));
1517 gimplify_ctxp->exit_label = saved_label;
1519 *expr_p = NULL;
1520 return GS_ALL_DONE;
1523 /* Gimplify a statement list onto a sequence. These may be created either
1524 by an enlightened front-end, or by shortcut_cond_expr. */
1526 static enum gimplify_status
1527 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1529 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1531 tree_stmt_iterator i = tsi_start (*expr_p);
1533 while (!tsi_end_p (i))
1535 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1536 tsi_delink (&i);
1539 if (temp)
1541 *expr_p = temp;
1542 return GS_OK;
1545 return GS_ALL_DONE;
1548 /* Compare two case labels. Because the front end should already have
1549 made sure that case ranges do not overlap, it is enough to only compare
1550 the CASE_LOW values of each case label. */
1552 static int
1553 compare_case_labels (const void *p1, const void *p2)
1555 const_tree const case1 = *(const_tree const*)p1;
1556 const_tree const case2 = *(const_tree const*)p2;
1558 /* The 'default' case label always goes first. */
1559 if (!CASE_LOW (case1))
1560 return -1;
1561 else if (!CASE_LOW (case2))
1562 return 1;
1563 else
1564 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1567 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1569 void
1570 sort_case_labels (vec<tree> label_vec)
1572 label_vec.qsort (compare_case_labels);
1575 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
1577 LABELS is a vector that contains all case labels to look at.
1579 INDEX_TYPE is the type of the switch index expression. Case labels
1580 in LABELS are discarded if their values are not in the value range
1581 covered by INDEX_TYPE. The remaining case label values are folded
1582 to INDEX_TYPE.
1584 If a default case exists in LABELS, it is removed from LABELS and
1585 returned in DEFAULT_CASEP. If no default case exists, but the
1586 case labels already cover the whole range of INDEX_TYPE, a default
1587 case is returned pointing to one of the existing case labels.
1588 Otherwise DEFAULT_CASEP is set to NULL_TREE.
1590 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
1591 apply and no action is taken regardless of whether a default case is
1592 found or not. */
1594 void
1595 preprocess_case_label_vec_for_gimple (vec<tree> labels,
1596 tree index_type,
1597 tree *default_casep)
1599 tree min_value, max_value;
1600 tree default_case = NULL_TREE;
1601 size_t i, len;
1603 i = 0;
1604 min_value = TYPE_MIN_VALUE (index_type);
1605 max_value = TYPE_MAX_VALUE (index_type);
1606 while (i < labels.length ())
1608 tree elt = labels[i];
1609 tree low = CASE_LOW (elt);
1610 tree high = CASE_HIGH (elt);
1611 bool remove_element = FALSE;
1613 if (low)
1615 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
1616 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
1618 /* This is a non-default case label, i.e. it has a value.
1620 See if the case label is reachable within the range of
1621 the index type. Remove out-of-range case values. Turn
1622 case ranges into a canonical form (high > low strictly)
1623 and convert the case label values to the index type.
1625 NB: The type of gimple_switch_index() may be the promoted
1626 type, but the case labels retain the original type. */
1628 if (high)
1630 /* This is a case range. Discard empty ranges.
1631 If the bounds or the range are equal, turn this
1632 into a simple (one-value) case. */
1633 int cmp = tree_int_cst_compare (high, low);
1634 if (cmp < 0)
1635 remove_element = TRUE;
1636 else if (cmp == 0)
1637 high = NULL_TREE;
1640 if (! high)
1642 /* If the simple case value is unreachable, ignore it. */
1643 if ((TREE_CODE (min_value) == INTEGER_CST
1644 && tree_int_cst_compare (low, min_value) < 0)
1645 || (TREE_CODE (max_value) == INTEGER_CST
1646 && tree_int_cst_compare (low, max_value) > 0))
1647 remove_element = TRUE;
1648 else
1649 low = fold_convert (index_type, low);
1651 else
1653 /* If the entire case range is unreachable, ignore it. */
1654 if ((TREE_CODE (min_value) == INTEGER_CST
1655 && tree_int_cst_compare (high, min_value) < 0)
1656 || (TREE_CODE (max_value) == INTEGER_CST
1657 && tree_int_cst_compare (low, max_value) > 0))
1658 remove_element = TRUE;
1659 else
1661 /* If the lower bound is less than the index type's
1662 minimum value, truncate the range bounds. */
1663 if (TREE_CODE (min_value) == INTEGER_CST
1664 && tree_int_cst_compare (low, min_value) < 0)
1665 low = min_value;
1666 low = fold_convert (index_type, low);
1668 /* If the upper bound is greater than the index type's
1669 maximum value, truncate the range bounds. */
1670 if (TREE_CODE (max_value) == INTEGER_CST
1671 && tree_int_cst_compare (high, max_value) > 0)
1672 high = max_value;
1673 high = fold_convert (index_type, high);
1675 /* We may have folded a case range to a one-value case. */
1676 if (tree_int_cst_equal (low, high))
1677 high = NULL_TREE;
1681 CASE_LOW (elt) = low;
1682 CASE_HIGH (elt) = high;
1684 else
1686 gcc_assert (!default_case);
1687 default_case = elt;
1688 /* The default case must be passed separately to the
1689 gimple_build_switch routine. But if DEFAULT_CASEP
1690 is NULL, we do not remove the default case (it would
1691 be completely lost). */
1692 if (default_casep)
1693 remove_element = TRUE;
1696 if (remove_element)
1697 labels.ordered_remove (i);
1698 else
1699 i++;
1701 len = i;
1703 if (!labels.is_empty ())
1704 sort_case_labels (labels);
1706 if (default_casep && !default_case)
1708 /* If the switch has no default label, add one, so that we jump
1709 around the switch body. If the labels already cover the whole
1710 range of the switch index_type, add the default label pointing
1711 to one of the existing labels. */
1712 if (len
1713 && TYPE_MIN_VALUE (index_type)
1714 && TYPE_MAX_VALUE (index_type)
1715 && tree_int_cst_equal (CASE_LOW (labels[0]),
1716 TYPE_MIN_VALUE (index_type)))
1718 tree low, high = CASE_HIGH (labels[len - 1]);
1719 if (!high)
1720 high = CASE_LOW (labels[len - 1]);
1721 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
1723 for (i = 1; i < len; i++)
1725 high = CASE_LOW (labels[i]);
1726 low = CASE_HIGH (labels[i - 1]);
1727 if (!low)
1728 low = CASE_LOW (labels[i - 1]);
1729 if ((TREE_INT_CST_LOW (low) + 1
1730 != TREE_INT_CST_LOW (high))
1731 || (TREE_INT_CST_HIGH (low)
1732 + (TREE_INT_CST_LOW (high) == 0)
1733 != TREE_INT_CST_HIGH (high)))
1734 break;
1736 if (i == len)
1738 tree label = CASE_LABEL (labels[0]);
1739 default_case = build_case_label (NULL_TREE, NULL_TREE,
1740 label);
1746 if (default_casep)
1747 *default_casep = default_case;
1750 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1751 branch to. */
1753 static enum gimplify_status
1754 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1756 tree switch_expr = *expr_p;
1757 gimple_seq switch_body_seq = NULL;
1758 enum gimplify_status ret;
1759 tree index_type = TREE_TYPE (switch_expr);
1760 if (index_type == NULL_TREE)
1761 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1763 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1764 fb_rvalue);
1765 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1766 return ret;
1768 if (SWITCH_BODY (switch_expr))
1770 vec<tree> labels;
1771 vec<tree> saved_labels;
1772 tree default_case = NULL_TREE;
1773 gimple gimple_switch;
1775 /* If someone can be bothered to fill in the labels, they can
1776 be bothered to null out the body too. */
1777 gcc_assert (!SWITCH_LABELS (switch_expr));
1779 /* Save old labels, get new ones from body, then restore the old
1780 labels. Save all the things from the switch body to append after. */
1781 saved_labels = gimplify_ctxp->case_labels;
1782 gimplify_ctxp->case_labels.create (8);
1784 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1785 labels = gimplify_ctxp->case_labels;
1786 gimplify_ctxp->case_labels = saved_labels;
1788 preprocess_case_label_vec_for_gimple (labels, index_type,
1789 &default_case);
1791 if (!default_case)
1793 gimple new_default;
1795 default_case
1796 = build_case_label (NULL_TREE, NULL_TREE,
1797 create_artificial_label (UNKNOWN_LOCATION));
1798 new_default = gimple_build_label (CASE_LABEL (default_case));
1799 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1802 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1803 default_case, labels);
1804 gimplify_seq_add_stmt (pre_p, gimple_switch);
1805 gimplify_seq_add_seq (pre_p, switch_body_seq);
1806 labels.release ();
1808 else
1809 gcc_assert (SWITCH_LABELS (switch_expr));
1811 return GS_ALL_DONE;
1814 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1816 static enum gimplify_status
1817 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1819 struct gimplify_ctx *ctxp;
1820 gimple gimple_label;
1822 /* Invalid OpenMP programs can play Duff's Device type games with
1823 #pragma omp parallel. At least in the C front end, we don't
1824 detect such invalid branches until after gimplification. */
1825 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1826 if (ctxp->case_labels.exists ())
1827 break;
1829 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1830 ctxp->case_labels.safe_push (*expr_p);
1831 gimplify_seq_add_stmt (pre_p, gimple_label);
1833 return GS_ALL_DONE;
1836 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1837 if necessary. */
1839 tree
1840 build_and_jump (tree *label_p)
1842 if (label_p == NULL)
1843 /* If there's nowhere to jump, just fall through. */
1844 return NULL_TREE;
1846 if (*label_p == NULL_TREE)
1848 tree label = create_artificial_label (UNKNOWN_LOCATION);
1849 *label_p = label;
1852 return build1 (GOTO_EXPR, void_type_node, *label_p);
1855 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1856 This also involves building a label to jump to and communicating it to
1857 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1859 static enum gimplify_status
1860 gimplify_exit_expr (tree *expr_p)
1862 tree cond = TREE_OPERAND (*expr_p, 0);
1863 tree expr;
1865 expr = build_and_jump (&gimplify_ctxp->exit_label);
1866 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1867 *expr_p = expr;
1869 return GS_OK;
1872 /* A helper function to be called via walk_tree. Mark all labels under *TP
1873 as being forced. To be called for DECL_INITIAL of static variables. */
1875 tree
1876 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1878 if (TYPE_P (*tp))
1879 *walk_subtrees = 0;
1880 if (TREE_CODE (*tp) == LABEL_DECL)
1881 FORCED_LABEL (*tp) = 1;
1883 return NULL_TREE;
1886 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1887 different from its canonical type, wrap the whole thing inside a
1888 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1889 type.
1891 The canonical type of a COMPONENT_REF is the type of the field being
1892 referenced--unless the field is a bit-field which can be read directly
1893 in a smaller mode, in which case the canonical type is the
1894 sign-appropriate type corresponding to that mode. */
1896 static void
1897 canonicalize_component_ref (tree *expr_p)
1899 tree expr = *expr_p;
1900 tree type;
1902 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1904 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1905 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1906 else
1907 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1909 /* One could argue that all the stuff below is not necessary for
1910 the non-bitfield case and declare it a FE error if type
1911 adjustment would be needed. */
1912 if (TREE_TYPE (expr) != type)
1914 #ifdef ENABLE_TYPES_CHECKING
1915 tree old_type = TREE_TYPE (expr);
1916 #endif
1917 int type_quals;
1919 /* We need to preserve qualifiers and propagate them from
1920 operand 0. */
1921 type_quals = TYPE_QUALS (type)
1922 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1923 if (TYPE_QUALS (type) != type_quals)
1924 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1926 /* Set the type of the COMPONENT_REF to the underlying type. */
1927 TREE_TYPE (expr) = type;
1929 #ifdef ENABLE_TYPES_CHECKING
1930 /* It is now a FE error, if the conversion from the canonical
1931 type to the original expression type is not useless. */
1932 gcc_assert (useless_type_conversion_p (old_type, type));
1933 #endif
1937 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1938 to foo, embed that change in the ADDR_EXPR by converting
1939 T array[U];
1940 (T *)&array
1942 &array[L]
1943 where L is the lower bound. For simplicity, only do this for constant
1944 lower bound.
1945 The constraint is that the type of &array[L] is trivially convertible
1946 to T *. */
1948 static void
1949 canonicalize_addr_expr (tree *expr_p)
1951 tree expr = *expr_p;
1952 tree addr_expr = TREE_OPERAND (expr, 0);
1953 tree datype, ddatype, pddatype;
1955 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1956 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1957 || TREE_CODE (addr_expr) != ADDR_EXPR)
1958 return;
1960 /* The addr_expr type should be a pointer to an array. */
1961 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1962 if (TREE_CODE (datype) != ARRAY_TYPE)
1963 return;
1965 /* The pointer to element type shall be trivially convertible to
1966 the expression pointer type. */
1967 ddatype = TREE_TYPE (datype);
1968 pddatype = build_pointer_type (ddatype);
1969 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1970 pddatype))
1971 return;
1973 /* The lower bound and element sizes must be constant. */
1974 if (!TYPE_SIZE_UNIT (ddatype)
1975 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1976 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1977 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1978 return;
1980 /* All checks succeeded. Build a new node to merge the cast. */
1981 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1982 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1983 NULL_TREE, NULL_TREE);
1984 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1986 /* We can have stripped a required restrict qualifier above. */
1987 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1988 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1991 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1992 underneath as appropriate. */
1994 static enum gimplify_status
1995 gimplify_conversion (tree *expr_p)
1997 location_t loc = EXPR_LOCATION (*expr_p);
1998 gcc_assert (CONVERT_EXPR_P (*expr_p));
2000 /* Then strip away all but the outermost conversion. */
2001 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2003 /* And remove the outermost conversion if it's useless. */
2004 if (tree_ssa_useless_type_conversion (*expr_p))
2005 *expr_p = TREE_OPERAND (*expr_p, 0);
2007 /* If we still have a conversion at the toplevel,
2008 then canonicalize some constructs. */
2009 if (CONVERT_EXPR_P (*expr_p))
2011 tree sub = TREE_OPERAND (*expr_p, 0);
2013 /* If a NOP conversion is changing the type of a COMPONENT_REF
2014 expression, then canonicalize its type now in order to expose more
2015 redundant conversions. */
2016 if (TREE_CODE (sub) == COMPONENT_REF)
2017 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2019 /* If a NOP conversion is changing a pointer to array of foo
2020 to a pointer to foo, embed that change in the ADDR_EXPR. */
2021 else if (TREE_CODE (sub) == ADDR_EXPR)
2022 canonicalize_addr_expr (expr_p);
2025 /* If we have a conversion to a non-register type force the
2026 use of a VIEW_CONVERT_EXPR instead. */
2027 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2028 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2029 TREE_OPERAND (*expr_p, 0));
2031 return GS_OK;
2034 /* Nonlocal VLAs seen in the current function. */
2035 static struct pointer_set_t *nonlocal_vlas;
2037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2038 DECL_VALUE_EXPR, and it's worth re-examining things. */
2040 static enum gimplify_status
2041 gimplify_var_or_parm_decl (tree *expr_p)
2043 tree decl = *expr_p;
2045 /* ??? If this is a local variable, and it has not been seen in any
2046 outer BIND_EXPR, then it's probably the result of a duplicate
2047 declaration, for which we've already issued an error. It would
2048 be really nice if the front end wouldn't leak these at all.
2049 Currently the only known culprit is C++ destructors, as seen
2050 in g++.old-deja/g++.jason/binding.C. */
2051 if (TREE_CODE (decl) == VAR_DECL
2052 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2053 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2054 && decl_function_context (decl) == current_function_decl)
2056 gcc_assert (seen_error ());
2057 return GS_ERROR;
2060 /* When within an OpenMP context, notice uses of variables. */
2061 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2062 return GS_ALL_DONE;
2064 /* If the decl is an alias for another expression, substitute it now. */
2065 if (DECL_HAS_VALUE_EXPR_P (decl))
2067 tree value_expr = DECL_VALUE_EXPR (decl);
2069 /* For referenced nonlocal VLAs add a decl for debugging purposes
2070 to the current function. */
2071 if (TREE_CODE (decl) == VAR_DECL
2072 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2073 && nonlocal_vlas != NULL
2074 && TREE_CODE (value_expr) == INDIRECT_REF
2075 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2076 && decl_function_context (decl) != current_function_decl)
2078 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2079 while (ctx
2080 && (ctx->region_type == ORT_WORKSHARE
2081 || ctx->region_type == ORT_SIMD))
2082 ctx = ctx->outer_context;
2083 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
2085 tree copy = copy_node (decl), block;
2087 lang_hooks.dup_lang_specific_decl (copy);
2088 SET_DECL_RTL (copy, 0);
2089 TREE_USED (copy) = 1;
2090 block = DECL_INITIAL (current_function_decl);
2091 DECL_CHAIN (copy) = BLOCK_VARS (block);
2092 BLOCK_VARS (block) = copy;
2093 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2094 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2098 *expr_p = unshare_expr (value_expr);
2099 return GS_OK;
2102 return GS_ALL_DONE;
2105 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2106 node *EXPR_P.
2108 compound_lval
2109 : min_lval '[' val ']'
2110 | min_lval '.' ID
2111 | compound_lval '[' val ']'
2112 | compound_lval '.' ID
2114 This is not part of the original SIMPLE definition, which separates
2115 array and member references, but it seems reasonable to handle them
2116 together. Also, this way we don't run into problems with union
2117 aliasing; gcc requires that for accesses through a union to alias, the
2118 union reference must be explicit, which was not always the case when we
2119 were splitting up array and member refs.
2121 PRE_P points to the sequence where side effects that must happen before
2122 *EXPR_P should be stored.
2124 POST_P points to the sequence where side effects that must happen after
2125 *EXPR_P should be stored. */
2127 static enum gimplify_status
2128 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2129 fallback_t fallback)
2131 tree *p;
2132 vec<tree> expr_stack;
2133 enum gimplify_status ret = GS_ALL_DONE, tret;
2134 int i;
2135 location_t loc = EXPR_LOCATION (*expr_p);
2136 tree expr = *expr_p;
2138 /* Create a stack of the subexpressions so later we can walk them in
2139 order from inner to outer. */
2140 expr_stack.create (10);
2142 /* We can handle anything that get_inner_reference can deal with. */
2143 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2145 restart:
2146 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2147 if (TREE_CODE (*p) == INDIRECT_REF)
2148 *p = fold_indirect_ref_loc (loc, *p);
2150 if (handled_component_p (*p))
2152 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2153 additional COMPONENT_REFs. */
2154 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2155 && gimplify_var_or_parm_decl (p) == GS_OK)
2156 goto restart;
2157 else
2158 break;
2160 expr_stack.safe_push (*p);
2163 gcc_assert (expr_stack.length ());
2165 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2166 walked through and P points to the innermost expression.
2168 Java requires that we elaborated nodes in source order. That
2169 means we must gimplify the inner expression followed by each of
2170 the indices, in order. But we can't gimplify the inner
2171 expression until we deal with any variable bounds, sizes, or
2172 positions in order to deal with PLACEHOLDER_EXPRs.
2174 So we do this in three steps. First we deal with the annotations
2175 for any variables in the components, then we gimplify the base,
2176 then we gimplify any indices, from left to right. */
2177 for (i = expr_stack.length () - 1; i >= 0; i--)
2179 tree t = expr_stack[i];
2181 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2183 /* Gimplify the low bound and element type size and put them into
2184 the ARRAY_REF. If these values are set, they have already been
2185 gimplified. */
2186 if (TREE_OPERAND (t, 2) == NULL_TREE)
2188 tree low = unshare_expr (array_ref_low_bound (t));
2189 if (!is_gimple_min_invariant (low))
2191 TREE_OPERAND (t, 2) = low;
2192 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2193 post_p, is_gimple_reg,
2194 fb_rvalue);
2195 ret = MIN (ret, tret);
2198 else
2200 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2201 is_gimple_reg, fb_rvalue);
2202 ret = MIN (ret, tret);
2205 if (TREE_OPERAND (t, 3) == NULL_TREE)
2207 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2208 tree elmt_size = unshare_expr (array_ref_element_size (t));
2209 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2211 /* Divide the element size by the alignment of the element
2212 type (above). */
2213 elmt_size
2214 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2216 if (!is_gimple_min_invariant (elmt_size))
2218 TREE_OPERAND (t, 3) = elmt_size;
2219 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2220 post_p, is_gimple_reg,
2221 fb_rvalue);
2222 ret = MIN (ret, tret);
2225 else
2227 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2228 is_gimple_reg, fb_rvalue);
2229 ret = MIN (ret, tret);
2232 else if (TREE_CODE (t) == COMPONENT_REF)
2234 /* Set the field offset into T and gimplify it. */
2235 if (TREE_OPERAND (t, 2) == NULL_TREE)
2237 tree offset = unshare_expr (component_ref_field_offset (t));
2238 tree field = TREE_OPERAND (t, 1);
2239 tree factor
2240 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2242 /* Divide the offset by its alignment. */
2243 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2245 if (!is_gimple_min_invariant (offset))
2247 TREE_OPERAND (t, 2) = offset;
2248 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2249 post_p, is_gimple_reg,
2250 fb_rvalue);
2251 ret = MIN (ret, tret);
2254 else
2256 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2257 is_gimple_reg, fb_rvalue);
2258 ret = MIN (ret, tret);
2263 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2264 so as to match the min_lval predicate. Failure to do so may result
2265 in the creation of large aggregate temporaries. */
2266 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2267 fallback | fb_lvalue);
2268 ret = MIN (ret, tret);
2270 /* And finally, the indices and operands of ARRAY_REF. During this
2271 loop we also remove any useless conversions. */
2272 for (; expr_stack.length () > 0; )
2274 tree t = expr_stack.pop ();
2276 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2278 /* Gimplify the dimension. */
2279 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2281 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2282 is_gimple_val, fb_rvalue);
2283 ret = MIN (ret, tret);
2287 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2289 /* The innermost expression P may have originally had
2290 TREE_SIDE_EFFECTS set which would have caused all the outer
2291 expressions in *EXPR_P leading to P to also have had
2292 TREE_SIDE_EFFECTS set. */
2293 recalculate_side_effects (t);
2296 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2297 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2299 canonicalize_component_ref (expr_p);
2302 expr_stack.release ();
2304 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2306 return ret;
2309 /* Gimplify the self modifying expression pointed to by EXPR_P
2310 (++, --, +=, -=).
2312 PRE_P points to the list where side effects that must happen before
2313 *EXPR_P should be stored.
2315 POST_P points to the list where side effects that must happen after
2316 *EXPR_P should be stored.
2318 WANT_VALUE is nonzero iff we want to use the value of this expression
2319 in another expression.
2321 ARITH_TYPE is the type the computation should be performed in. */
2323 enum gimplify_status
2324 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2325 bool want_value, tree arith_type)
2327 enum tree_code code;
2328 tree lhs, lvalue, rhs, t1;
2329 gimple_seq post = NULL, *orig_post_p = post_p;
2330 bool postfix;
2331 enum tree_code arith_code;
2332 enum gimplify_status ret;
2333 location_t loc = EXPR_LOCATION (*expr_p);
2335 code = TREE_CODE (*expr_p);
2337 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2338 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2340 /* Prefix or postfix? */
2341 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2342 /* Faster to treat as prefix if result is not used. */
2343 postfix = want_value;
2344 else
2345 postfix = false;
2347 /* For postfix, make sure the inner expression's post side effects
2348 are executed after side effects from this expression. */
2349 if (postfix)
2350 post_p = &post;
2352 /* Add or subtract? */
2353 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2354 arith_code = PLUS_EXPR;
2355 else
2356 arith_code = MINUS_EXPR;
2358 /* Gimplify the LHS into a GIMPLE lvalue. */
2359 lvalue = TREE_OPERAND (*expr_p, 0);
2360 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2361 if (ret == GS_ERROR)
2362 return ret;
2364 /* Extract the operands to the arithmetic operation. */
2365 lhs = lvalue;
2366 rhs = TREE_OPERAND (*expr_p, 1);
2368 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2369 that as the result value and in the postqueue operation. */
2370 if (postfix)
2372 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2373 if (ret == GS_ERROR)
2374 return ret;
2376 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2379 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2380 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2382 rhs = convert_to_ptrofftype_loc (loc, rhs);
2383 if (arith_code == MINUS_EXPR)
2384 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2385 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2387 else
2388 t1 = fold_convert (TREE_TYPE (*expr_p),
2389 fold_build2 (arith_code, arith_type,
2390 fold_convert (arith_type, lhs),
2391 fold_convert (arith_type, rhs)));
2393 if (postfix)
2395 gimplify_assign (lvalue, t1, pre_p);
2396 gimplify_seq_add_seq (orig_post_p, post);
2397 *expr_p = lhs;
2398 return GS_ALL_DONE;
2400 else
2402 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2403 return GS_OK;
2407 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2409 static void
2410 maybe_with_size_expr (tree *expr_p)
2412 tree expr = *expr_p;
2413 tree type = TREE_TYPE (expr);
2414 tree size;
2416 /* If we've already wrapped this or the type is error_mark_node, we can't do
2417 anything. */
2418 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2419 || type == error_mark_node)
2420 return;
2422 /* If the size isn't known or is a constant, we have nothing to do. */
2423 size = TYPE_SIZE_UNIT (type);
2424 if (!size || TREE_CODE (size) == INTEGER_CST)
2425 return;
2427 /* Otherwise, make a WITH_SIZE_EXPR. */
2428 size = unshare_expr (size);
2429 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2430 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2433 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2434 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2435 the CALL_EXPR. */
2437 static enum gimplify_status
2438 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2440 bool (*test) (tree);
2441 fallback_t fb;
2443 /* In general, we allow lvalues for function arguments to avoid
2444 extra overhead of copying large aggregates out of even larger
2445 aggregates into temporaries only to copy the temporaries to
2446 the argument list. Make optimizers happy by pulling out to
2447 temporaries those types that fit in registers. */
2448 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2449 test = is_gimple_val, fb = fb_rvalue;
2450 else
2452 test = is_gimple_lvalue, fb = fb_either;
2453 /* Also strip a TARGET_EXPR that would force an extra copy. */
2454 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2456 tree init = TARGET_EXPR_INITIAL (*arg_p);
2457 if (init
2458 && !VOID_TYPE_P (TREE_TYPE (init)))
2459 *arg_p = init;
2463 /* If this is a variable sized type, we must remember the size. */
2464 maybe_with_size_expr (arg_p);
2466 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2467 /* Make sure arguments have the same location as the function call
2468 itself. */
2469 protected_set_expr_location (*arg_p, call_location);
2471 /* There is a sequence point before a function call. Side effects in
2472 the argument list must occur before the actual call. So, when
2473 gimplifying arguments, force gimplify_expr to use an internal
2474 post queue which is then appended to the end of PRE_P. */
2475 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2478 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2479 WANT_VALUE is true if the result of the call is desired. */
2481 static enum gimplify_status
2482 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2484 tree fndecl, parms, p, fnptrtype;
2485 enum gimplify_status ret;
2486 int i, nargs;
2487 gimple call;
2488 bool builtin_va_start_p = FALSE;
2489 location_t loc = EXPR_LOCATION (*expr_p);
2491 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2493 /* For reliable diagnostics during inlining, it is necessary that
2494 every call_expr be annotated with file and line. */
2495 if (! EXPR_HAS_LOCATION (*expr_p))
2496 SET_EXPR_LOCATION (*expr_p, input_location);
2498 /* This may be a call to a builtin function.
2500 Builtin function calls may be transformed into different
2501 (and more efficient) builtin function calls under certain
2502 circumstances. Unfortunately, gimplification can muck things
2503 up enough that the builtin expanders are not aware that certain
2504 transformations are still valid.
2506 So we attempt transformation/gimplification of the call before
2507 we gimplify the CALL_EXPR. At this time we do not manage to
2508 transform all calls in the same manner as the expanders do, but
2509 we do transform most of them. */
2510 fndecl = get_callee_fndecl (*expr_p);
2511 if (fndecl
2512 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2513 switch (DECL_FUNCTION_CODE (fndecl))
2515 case BUILT_IN_VA_START:
2517 builtin_va_start_p = TRUE;
2518 if (call_expr_nargs (*expr_p) < 2)
2520 error ("too few arguments to function %<va_start%>");
2521 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2522 return GS_OK;
2525 if (fold_builtin_next_arg (*expr_p, true))
2527 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2528 return GS_OK;
2530 break;
2532 case BUILT_IN_LINE:
2534 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2535 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2536 return GS_OK;
2538 case BUILT_IN_FILE:
2540 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2541 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2542 return GS_OK;
2544 case BUILT_IN_FUNCTION:
2546 const char *function;
2547 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2548 *expr_p = build_string_literal (strlen (function) + 1, function);
2549 return GS_OK;
2551 default:
2554 if (fndecl && DECL_BUILT_IN (fndecl))
2556 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2557 if (new_tree && new_tree != *expr_p)
2559 /* There was a transformation of this call which computes the
2560 same value, but in a more efficient way. Return and try
2561 again. */
2562 *expr_p = new_tree;
2563 return GS_OK;
2567 /* Remember the original function pointer type. */
2568 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2570 /* There is a sequence point before the call, so any side effects in
2571 the calling expression must occur before the actual call. Force
2572 gimplify_expr to use an internal post queue. */
2573 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2574 is_gimple_call_addr, fb_rvalue);
2576 nargs = call_expr_nargs (*expr_p);
2578 /* Get argument types for verification. */
2579 fndecl = get_callee_fndecl (*expr_p);
2580 parms = NULL_TREE;
2581 if (fndecl)
2582 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2583 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2584 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2586 if (fndecl && DECL_ARGUMENTS (fndecl))
2587 p = DECL_ARGUMENTS (fndecl);
2588 else if (parms)
2589 p = parms;
2590 else
2591 p = NULL_TREE;
2592 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2595 /* If the last argument is __builtin_va_arg_pack () and it is not
2596 passed as a named argument, decrease the number of CALL_EXPR
2597 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2598 if (!p
2599 && i < nargs
2600 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2602 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2603 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2605 if (last_arg_fndecl
2606 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2607 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2608 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2610 tree call = *expr_p;
2612 --nargs;
2613 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2614 CALL_EXPR_FN (call),
2615 nargs, CALL_EXPR_ARGP (call));
2617 /* Copy all CALL_EXPR flags, location and block, except
2618 CALL_EXPR_VA_ARG_PACK flag. */
2619 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2620 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2621 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2622 = CALL_EXPR_RETURN_SLOT_OPT (call);
2623 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2624 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2626 /* Set CALL_EXPR_VA_ARG_PACK. */
2627 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2631 /* Finally, gimplify the function arguments. */
2632 if (nargs > 0)
2634 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2635 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2636 PUSH_ARGS_REVERSED ? i-- : i++)
2638 enum gimplify_status t;
2640 /* Avoid gimplifying the second argument to va_start, which needs to
2641 be the plain PARM_DECL. */
2642 if ((i != 1) || !builtin_va_start_p)
2644 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2645 EXPR_LOCATION (*expr_p));
2647 if (t == GS_ERROR)
2648 ret = GS_ERROR;
2653 /* Verify the function result. */
2654 if (want_value && fndecl
2655 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2657 error_at (loc, "using result of function returning %<void%>");
2658 ret = GS_ERROR;
2661 /* Try this again in case gimplification exposed something. */
2662 if (ret != GS_ERROR)
2664 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2666 if (new_tree && new_tree != *expr_p)
2668 /* There was a transformation of this call which computes the
2669 same value, but in a more efficient way. Return and try
2670 again. */
2671 *expr_p = new_tree;
2672 return GS_OK;
2675 else
2677 *expr_p = error_mark_node;
2678 return GS_ERROR;
2681 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2682 decl. This allows us to eliminate redundant or useless
2683 calls to "const" functions. */
2684 if (TREE_CODE (*expr_p) == CALL_EXPR)
2686 int flags = call_expr_flags (*expr_p);
2687 if (flags & (ECF_CONST | ECF_PURE)
2688 /* An infinite loop is considered a side effect. */
2689 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2690 TREE_SIDE_EFFECTS (*expr_p) = 0;
2693 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2694 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2695 form and delegate the creation of a GIMPLE_CALL to
2696 gimplify_modify_expr. This is always possible because when
2697 WANT_VALUE is true, the caller wants the result of this call into
2698 a temporary, which means that we will emit an INIT_EXPR in
2699 internal_get_tmp_var which will then be handled by
2700 gimplify_modify_expr. */
2701 if (!want_value)
2703 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2704 have to do is replicate it as a GIMPLE_CALL tuple. */
2705 gimple_stmt_iterator gsi;
2706 call = gimple_build_call_from_tree (*expr_p);
2707 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2708 notice_special_calls (call);
2709 gimplify_seq_add_stmt (pre_p, call);
2710 gsi = gsi_last (*pre_p);
2711 /* Don't fold stmts inside of target construct. We'll do it
2712 during omplower pass instead. */
2713 struct gimplify_omp_ctx *ctx;
2714 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2715 if (ctx->region_type == ORT_TARGET)
2716 break;
2717 if (ctx == NULL)
2718 fold_stmt (&gsi);
2719 *expr_p = NULL_TREE;
2721 else
2722 /* Remember the original function type. */
2723 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2724 CALL_EXPR_FN (*expr_p));
2726 return ret;
2729 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2730 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2732 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2733 condition is true or false, respectively. If null, we should generate
2734 our own to skip over the evaluation of this specific expression.
2736 LOCUS is the source location of the COND_EXPR.
2738 This function is the tree equivalent of do_jump.
2740 shortcut_cond_r should only be called by shortcut_cond_expr. */
2742 static tree
2743 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2744 location_t locus)
2746 tree local_label = NULL_TREE;
2747 tree t, expr = NULL;
2749 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2750 retain the shortcut semantics. Just insert the gotos here;
2751 shortcut_cond_expr will append the real blocks later. */
2752 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2754 location_t new_locus;
2756 /* Turn if (a && b) into
2758 if (a); else goto no;
2759 if (b) goto yes; else goto no;
2760 (no:) */
2762 if (false_label_p == NULL)
2763 false_label_p = &local_label;
2765 /* Keep the original source location on the first 'if'. */
2766 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2767 append_to_statement_list (t, &expr);
2769 /* Set the source location of the && on the second 'if'. */
2770 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2771 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2772 new_locus);
2773 append_to_statement_list (t, &expr);
2775 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2777 location_t new_locus;
2779 /* Turn if (a || b) into
2781 if (a) goto yes;
2782 if (b) goto yes; else goto no;
2783 (yes:) */
2785 if (true_label_p == NULL)
2786 true_label_p = &local_label;
2788 /* Keep the original source location on the first 'if'. */
2789 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2790 append_to_statement_list (t, &expr);
2792 /* Set the source location of the || on the second 'if'. */
2793 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2794 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2795 new_locus);
2796 append_to_statement_list (t, &expr);
2798 else if (TREE_CODE (pred) == COND_EXPR
2799 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2800 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2802 location_t new_locus;
2804 /* As long as we're messing with gotos, turn if (a ? b : c) into
2805 if (a)
2806 if (b) goto yes; else goto no;
2807 else
2808 if (c) goto yes; else goto no;
2810 Don't do this if one of the arms has void type, which can happen
2811 in C++ when the arm is throw. */
2813 /* Keep the original source location on the first 'if'. Set the source
2814 location of the ? on the second 'if'. */
2815 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2816 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2817 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2818 false_label_p, locus),
2819 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2820 false_label_p, new_locus));
2822 else
2824 expr = build3 (COND_EXPR, void_type_node, pred,
2825 build_and_jump (true_label_p),
2826 build_and_jump (false_label_p));
2827 SET_EXPR_LOCATION (expr, locus);
2830 if (local_label)
2832 t = build1 (LABEL_EXPR, void_type_node, local_label);
2833 append_to_statement_list (t, &expr);
2836 return expr;
2839 /* Given a conditional expression EXPR with short-circuit boolean
2840 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2841 predicate apart into the equivalent sequence of conditionals. */
2843 static tree
2844 shortcut_cond_expr (tree expr)
2846 tree pred = TREE_OPERAND (expr, 0);
2847 tree then_ = TREE_OPERAND (expr, 1);
2848 tree else_ = TREE_OPERAND (expr, 2);
2849 tree true_label, false_label, end_label, t;
2850 tree *true_label_p;
2851 tree *false_label_p;
2852 bool emit_end, emit_false, jump_over_else;
2853 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2854 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2856 /* First do simple transformations. */
2857 if (!else_se)
2859 /* If there is no 'else', turn
2860 if (a && b) then c
2861 into
2862 if (a) if (b) then c. */
2863 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2865 /* Keep the original source location on the first 'if'. */
2866 location_t locus = EXPR_LOC_OR_HERE (expr);
2867 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2868 /* Set the source location of the && on the second 'if'. */
2869 if (EXPR_HAS_LOCATION (pred))
2870 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2871 then_ = shortcut_cond_expr (expr);
2872 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2873 pred = TREE_OPERAND (pred, 0);
2874 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2875 SET_EXPR_LOCATION (expr, locus);
2879 if (!then_se)
2881 /* If there is no 'then', turn
2882 if (a || b); else d
2883 into
2884 if (a); else if (b); else d. */
2885 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2887 /* Keep the original source location on the first 'if'. */
2888 location_t locus = EXPR_LOC_OR_HERE (expr);
2889 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2890 /* Set the source location of the || on the second 'if'. */
2891 if (EXPR_HAS_LOCATION (pred))
2892 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2893 else_ = shortcut_cond_expr (expr);
2894 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2895 pred = TREE_OPERAND (pred, 0);
2896 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2897 SET_EXPR_LOCATION (expr, locus);
2901 /* If we're done, great. */
2902 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2903 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2904 return expr;
2906 /* Otherwise we need to mess with gotos. Change
2907 if (a) c; else d;
2909 if (a); else goto no;
2910 c; goto end;
2911 no: d; end:
2912 and recursively gimplify the condition. */
2914 true_label = false_label = end_label = NULL_TREE;
2916 /* If our arms just jump somewhere, hijack those labels so we don't
2917 generate jumps to jumps. */
2919 if (then_
2920 && TREE_CODE (then_) == GOTO_EXPR
2921 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2923 true_label = GOTO_DESTINATION (then_);
2924 then_ = NULL;
2925 then_se = false;
2928 if (else_
2929 && TREE_CODE (else_) == GOTO_EXPR
2930 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2932 false_label = GOTO_DESTINATION (else_);
2933 else_ = NULL;
2934 else_se = false;
2937 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2938 if (true_label)
2939 true_label_p = &true_label;
2940 else
2941 true_label_p = NULL;
2943 /* The 'else' branch also needs a label if it contains interesting code. */
2944 if (false_label || else_se)
2945 false_label_p = &false_label;
2946 else
2947 false_label_p = NULL;
2949 /* If there was nothing else in our arms, just forward the label(s). */
2950 if (!then_se && !else_se)
2951 return shortcut_cond_r (pred, true_label_p, false_label_p,
2952 EXPR_LOC_OR_HERE (expr));
2954 /* If our last subexpression already has a terminal label, reuse it. */
2955 if (else_se)
2956 t = expr_last (else_);
2957 else if (then_se)
2958 t = expr_last (then_);
2959 else
2960 t = NULL;
2961 if (t && TREE_CODE (t) == LABEL_EXPR)
2962 end_label = LABEL_EXPR_LABEL (t);
2964 /* If we don't care about jumping to the 'else' branch, jump to the end
2965 if the condition is false. */
2966 if (!false_label_p)
2967 false_label_p = &end_label;
2969 /* We only want to emit these labels if we aren't hijacking them. */
2970 emit_end = (end_label == NULL_TREE);
2971 emit_false = (false_label == NULL_TREE);
2973 /* We only emit the jump over the else clause if we have to--if the
2974 then clause may fall through. Otherwise we can wind up with a
2975 useless jump and a useless label at the end of gimplified code,
2976 which will cause us to think that this conditional as a whole
2977 falls through even if it doesn't. If we then inline a function
2978 which ends with such a condition, that can cause us to issue an
2979 inappropriate warning about control reaching the end of a
2980 non-void function. */
2981 jump_over_else = block_may_fallthru (then_);
2983 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2984 EXPR_LOC_OR_HERE (expr));
2986 expr = NULL;
2987 append_to_statement_list (pred, &expr);
2989 append_to_statement_list (then_, &expr);
2990 if (else_se)
2992 if (jump_over_else)
2994 tree last = expr_last (expr);
2995 t = build_and_jump (&end_label);
2996 if (EXPR_HAS_LOCATION (last))
2997 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2998 append_to_statement_list (t, &expr);
3000 if (emit_false)
3002 t = build1 (LABEL_EXPR, void_type_node, false_label);
3003 append_to_statement_list (t, &expr);
3005 append_to_statement_list (else_, &expr);
3007 if (emit_end && end_label)
3009 t = build1 (LABEL_EXPR, void_type_node, end_label);
3010 append_to_statement_list (t, &expr);
3013 return expr;
3016 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3018 tree
3019 gimple_boolify (tree expr)
3021 tree type = TREE_TYPE (expr);
3022 location_t loc = EXPR_LOCATION (expr);
3024 if (TREE_CODE (expr) == NE_EXPR
3025 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3026 && integer_zerop (TREE_OPERAND (expr, 1)))
3028 tree call = TREE_OPERAND (expr, 0);
3029 tree fn = get_callee_fndecl (call);
3031 /* For __builtin_expect ((long) (x), y) recurse into x as well
3032 if x is truth_value_p. */
3033 if (fn
3034 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3035 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3036 && call_expr_nargs (call) == 2)
3038 tree arg = CALL_EXPR_ARG (call, 0);
3039 if (arg)
3041 if (TREE_CODE (arg) == NOP_EXPR
3042 && TREE_TYPE (arg) == TREE_TYPE (call))
3043 arg = TREE_OPERAND (arg, 0);
3044 if (truth_value_p (TREE_CODE (arg)))
3046 arg = gimple_boolify (arg);
3047 CALL_EXPR_ARG (call, 0)
3048 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3054 switch (TREE_CODE (expr))
3056 case TRUTH_AND_EXPR:
3057 case TRUTH_OR_EXPR:
3058 case TRUTH_XOR_EXPR:
3059 case TRUTH_ANDIF_EXPR:
3060 case TRUTH_ORIF_EXPR:
3061 /* Also boolify the arguments of truth exprs. */
3062 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3063 /* FALLTHRU */
3065 case TRUTH_NOT_EXPR:
3066 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3068 /* These expressions always produce boolean results. */
3069 if (TREE_CODE (type) != BOOLEAN_TYPE)
3070 TREE_TYPE (expr) = boolean_type_node;
3071 return expr;
3073 default:
3074 if (COMPARISON_CLASS_P (expr))
3076 /* There expressions always prduce boolean results. */
3077 if (TREE_CODE (type) != BOOLEAN_TYPE)
3078 TREE_TYPE (expr) = boolean_type_node;
3079 return expr;
3081 /* Other expressions that get here must have boolean values, but
3082 might need to be converted to the appropriate mode. */
3083 if (TREE_CODE (type) == BOOLEAN_TYPE)
3084 return expr;
3085 return fold_convert_loc (loc, boolean_type_node, expr);
3089 /* Given a conditional expression *EXPR_P without side effects, gimplify
3090 its operands. New statements are inserted to PRE_P. */
3092 static enum gimplify_status
3093 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3095 tree expr = *expr_p, cond;
3096 enum gimplify_status ret, tret;
3097 enum tree_code code;
3099 cond = gimple_boolify (COND_EXPR_COND (expr));
3101 /* We need to handle && and || specially, as their gimplification
3102 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3103 code = TREE_CODE (cond);
3104 if (code == TRUTH_ANDIF_EXPR)
3105 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3106 else if (code == TRUTH_ORIF_EXPR)
3107 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3108 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3109 COND_EXPR_COND (*expr_p) = cond;
3111 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3112 is_gimple_val, fb_rvalue);
3113 ret = MIN (ret, tret);
3114 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3115 is_gimple_val, fb_rvalue);
3117 return MIN (ret, tret);
3120 /* Return true if evaluating EXPR could trap.
3121 EXPR is GENERIC, while tree_could_trap_p can be called
3122 only on GIMPLE. */
3124 static bool
3125 generic_expr_could_trap_p (tree expr)
3127 unsigned i, n;
3129 if (!expr || is_gimple_val (expr))
3130 return false;
3132 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3133 return true;
3135 n = TREE_OPERAND_LENGTH (expr);
3136 for (i = 0; i < n; i++)
3137 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3138 return true;
3140 return false;
3143 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3144 into
3146 if (p) if (p)
3147 t1 = a; a;
3148 else or else
3149 t1 = b; b;
3152 The second form is used when *EXPR_P is of type void.
3154 PRE_P points to the list where side effects that must happen before
3155 *EXPR_P should be stored. */
3157 static enum gimplify_status
3158 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3160 tree expr = *expr_p;
3161 tree type = TREE_TYPE (expr);
3162 location_t loc = EXPR_LOCATION (expr);
3163 tree tmp, arm1, arm2;
3164 enum gimplify_status ret;
3165 tree label_true, label_false, label_cont;
3166 bool have_then_clause_p, have_else_clause_p;
3167 gimple gimple_cond;
3168 enum tree_code pred_code;
3169 gimple_seq seq = NULL;
3171 /* If this COND_EXPR has a value, copy the values into a temporary within
3172 the arms. */
3173 if (!VOID_TYPE_P (type))
3175 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3176 tree result;
3178 /* If either an rvalue is ok or we do not require an lvalue, create the
3179 temporary. But we cannot do that if the type is addressable. */
3180 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3181 && !TREE_ADDRESSABLE (type))
3183 if (gimplify_ctxp->allow_rhs_cond_expr
3184 /* If either branch has side effects or could trap, it can't be
3185 evaluated unconditionally. */
3186 && !TREE_SIDE_EFFECTS (then_)
3187 && !generic_expr_could_trap_p (then_)
3188 && !TREE_SIDE_EFFECTS (else_)
3189 && !generic_expr_could_trap_p (else_))
3190 return gimplify_pure_cond_expr (expr_p, pre_p);
3192 tmp = create_tmp_var (type, "iftmp");
3193 result = tmp;
3196 /* Otherwise, only create and copy references to the values. */
3197 else
3199 type = build_pointer_type (type);
3201 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3202 then_ = build_fold_addr_expr_loc (loc, then_);
3204 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3205 else_ = build_fold_addr_expr_loc (loc, else_);
3207 expr
3208 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3210 tmp = create_tmp_var (type, "iftmp");
3211 result = build_simple_mem_ref_loc (loc, tmp);
3214 /* Build the new then clause, `tmp = then_;'. But don't build the
3215 assignment if the value is void; in C++ it can be if it's a throw. */
3216 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3217 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3219 /* Similarly, build the new else clause, `tmp = else_;'. */
3220 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3221 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3223 TREE_TYPE (expr) = void_type_node;
3224 recalculate_side_effects (expr);
3226 /* Move the COND_EXPR to the prequeue. */
3227 gimplify_stmt (&expr, pre_p);
3229 *expr_p = result;
3230 return GS_ALL_DONE;
3233 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3234 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3235 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3236 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3238 /* Make sure the condition has BOOLEAN_TYPE. */
3239 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3241 /* Break apart && and || conditions. */
3242 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3243 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3245 expr = shortcut_cond_expr (expr);
3247 if (expr != *expr_p)
3249 *expr_p = expr;
3251 /* We can't rely on gimplify_expr to re-gimplify the expanded
3252 form properly, as cleanups might cause the target labels to be
3253 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3254 set up a conditional context. */
3255 gimple_push_condition ();
3256 gimplify_stmt (expr_p, &seq);
3257 gimple_pop_condition (pre_p);
3258 gimple_seq_add_seq (pre_p, seq);
3260 return GS_ALL_DONE;
3264 /* Now do the normal gimplification. */
3266 /* Gimplify condition. */
3267 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3268 fb_rvalue);
3269 if (ret == GS_ERROR)
3270 return GS_ERROR;
3271 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3273 gimple_push_condition ();
3275 have_then_clause_p = have_else_clause_p = false;
3276 if (TREE_OPERAND (expr, 1) != NULL
3277 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3278 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3279 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3280 == current_function_decl)
3281 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3282 have different locations, otherwise we end up with incorrect
3283 location information on the branches. */
3284 && (optimize
3285 || !EXPR_HAS_LOCATION (expr)
3286 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3287 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3289 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3290 have_then_clause_p = true;
3292 else
3293 label_true = create_artificial_label (UNKNOWN_LOCATION);
3294 if (TREE_OPERAND (expr, 2) != NULL
3295 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3296 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3297 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3298 == current_function_decl)
3299 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3300 have different locations, otherwise we end up with incorrect
3301 location information on the branches. */
3302 && (optimize
3303 || !EXPR_HAS_LOCATION (expr)
3304 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3305 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3307 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3308 have_else_clause_p = true;
3310 else
3311 label_false = create_artificial_label (UNKNOWN_LOCATION);
3313 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3314 &arm2);
3316 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3317 label_false);
3319 gimplify_seq_add_stmt (&seq, gimple_cond);
3320 label_cont = NULL_TREE;
3321 if (!have_then_clause_p)
3323 /* For if (...) {} else { code; } put label_true after
3324 the else block. */
3325 if (TREE_OPERAND (expr, 1) == NULL_TREE
3326 && !have_else_clause_p
3327 && TREE_OPERAND (expr, 2) != NULL_TREE)
3328 label_cont = label_true;
3329 else
3331 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3332 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3333 /* For if (...) { code; } else {} or
3334 if (...) { code; } else goto label; or
3335 if (...) { code; return; } else { ... }
3336 label_cont isn't needed. */
3337 if (!have_else_clause_p
3338 && TREE_OPERAND (expr, 2) != NULL_TREE
3339 && gimple_seq_may_fallthru (seq))
3341 gimple g;
3342 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3344 g = gimple_build_goto (label_cont);
3346 /* GIMPLE_COND's are very low level; they have embedded
3347 gotos. This particular embedded goto should not be marked
3348 with the location of the original COND_EXPR, as it would
3349 correspond to the COND_EXPR's condition, not the ELSE or the
3350 THEN arms. To avoid marking it with the wrong location, flag
3351 it as "no location". */
3352 gimple_set_do_not_emit_location (g);
3354 gimplify_seq_add_stmt (&seq, g);
3358 if (!have_else_clause_p)
3360 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3361 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3363 if (label_cont)
3364 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3366 gimple_pop_condition (pre_p);
3367 gimple_seq_add_seq (pre_p, seq);
3369 if (ret == GS_ERROR)
3370 ; /* Do nothing. */
3371 else if (have_then_clause_p || have_else_clause_p)
3372 ret = GS_ALL_DONE;
3373 else
3375 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3376 expr = TREE_OPERAND (expr, 0);
3377 gimplify_stmt (&expr, pre_p);
3380 *expr_p = NULL;
3381 return ret;
3384 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3385 to be marked addressable.
3387 We cannot rely on such an expression being directly markable if a temporary
3388 has been created by the gimplification. In this case, we create another
3389 temporary and initialize it with a copy, which will become a store after we
3390 mark it addressable. This can happen if the front-end passed us something
3391 that it could not mark addressable yet, like a Fortran pass-by-reference
3392 parameter (int) floatvar. */
3394 static void
3395 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3397 while (handled_component_p (*expr_p))
3398 expr_p = &TREE_OPERAND (*expr_p, 0);
3399 if (is_gimple_reg (*expr_p))
3400 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3403 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3404 a call to __builtin_memcpy. */
3406 static enum gimplify_status
3407 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3408 gimple_seq *seq_p)
3410 tree t, to, to_ptr, from, from_ptr;
3411 gimple gs;
3412 location_t loc = EXPR_LOCATION (*expr_p);
3414 to = TREE_OPERAND (*expr_p, 0);
3415 from = TREE_OPERAND (*expr_p, 1);
3417 /* Mark the RHS addressable. Beware that it may not be possible to do so
3418 directly if a temporary has been created by the gimplification. */
3419 prepare_gimple_addressable (&from, seq_p);
3421 mark_addressable (from);
3422 from_ptr = build_fold_addr_expr_loc (loc, from);
3423 gimplify_arg (&from_ptr, seq_p, loc);
3425 mark_addressable (to);
3426 to_ptr = build_fold_addr_expr_loc (loc, to);
3427 gimplify_arg (&to_ptr, seq_p, loc);
3429 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3431 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3433 if (want_value)
3435 /* tmp = memcpy() */
3436 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3437 gimple_call_set_lhs (gs, t);
3438 gimplify_seq_add_stmt (seq_p, gs);
3440 *expr_p = build_simple_mem_ref (t);
3441 return GS_ALL_DONE;
3444 gimplify_seq_add_stmt (seq_p, gs);
3445 *expr_p = NULL;
3446 return GS_ALL_DONE;
3449 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3450 a call to __builtin_memset. In this case we know that the RHS is
3451 a CONSTRUCTOR with an empty element list. */
3453 static enum gimplify_status
3454 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3455 gimple_seq *seq_p)
3457 tree t, from, to, to_ptr;
3458 gimple gs;
3459 location_t loc = EXPR_LOCATION (*expr_p);
3461 /* Assert our assumptions, to abort instead of producing wrong code
3462 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3463 not be immediately exposed. */
3464 from = TREE_OPERAND (*expr_p, 1);
3465 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3466 from = TREE_OPERAND (from, 0);
3468 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3469 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3471 /* Now proceed. */
3472 to = TREE_OPERAND (*expr_p, 0);
3474 to_ptr = build_fold_addr_expr_loc (loc, to);
3475 gimplify_arg (&to_ptr, seq_p, loc);
3476 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3478 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3480 if (want_value)
3482 /* tmp = memset() */
3483 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3484 gimple_call_set_lhs (gs, t);
3485 gimplify_seq_add_stmt (seq_p, gs);
3487 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3488 return GS_ALL_DONE;
3491 gimplify_seq_add_stmt (seq_p, gs);
3492 *expr_p = NULL;
3493 return GS_ALL_DONE;
3496 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3497 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3498 assignment. Return non-null if we detect a potential overlap. */
3500 struct gimplify_init_ctor_preeval_data
3502 /* The base decl of the lhs object. May be NULL, in which case we
3503 have to assume the lhs is indirect. */
3504 tree lhs_base_decl;
3506 /* The alias set of the lhs object. */
3507 alias_set_type lhs_alias_set;
3510 static tree
3511 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3513 struct gimplify_init_ctor_preeval_data *data
3514 = (struct gimplify_init_ctor_preeval_data *) xdata;
3515 tree t = *tp;
3517 /* If we find the base object, obviously we have overlap. */
3518 if (data->lhs_base_decl == t)
3519 return t;
3521 /* If the constructor component is indirect, determine if we have a
3522 potential overlap with the lhs. The only bits of information we
3523 have to go on at this point are addressability and alias sets. */
3524 if ((INDIRECT_REF_P (t)
3525 || TREE_CODE (t) == MEM_REF)
3526 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3527 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3528 return t;
3530 /* If the constructor component is a call, determine if it can hide a
3531 potential overlap with the lhs through an INDIRECT_REF like above.
3532 ??? Ugh - this is completely broken. In fact this whole analysis
3533 doesn't look conservative. */
3534 if (TREE_CODE (t) == CALL_EXPR)
3536 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3538 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3539 if (POINTER_TYPE_P (TREE_VALUE (type))
3540 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3541 && alias_sets_conflict_p (data->lhs_alias_set,
3542 get_alias_set
3543 (TREE_TYPE (TREE_VALUE (type)))))
3544 return t;
3547 if (IS_TYPE_OR_DECL_P (t))
3548 *walk_subtrees = 0;
3549 return NULL;
3552 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3553 force values that overlap with the lhs (as described by *DATA)
3554 into temporaries. */
3556 static void
3557 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3558 struct gimplify_init_ctor_preeval_data *data)
3560 enum gimplify_status one;
3562 /* If the value is constant, then there's nothing to pre-evaluate. */
3563 if (TREE_CONSTANT (*expr_p))
3565 /* Ensure it does not have side effects, it might contain a reference to
3566 the object we're initializing. */
3567 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3568 return;
3571 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3572 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3573 return;
3575 /* Recurse for nested constructors. */
3576 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3578 unsigned HOST_WIDE_INT ix;
3579 constructor_elt *ce;
3580 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3582 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3583 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3585 return;
3588 /* If this is a variable sized type, we must remember the size. */
3589 maybe_with_size_expr (expr_p);
3591 /* Gimplify the constructor element to something appropriate for the rhs
3592 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3593 the gimplifier will consider this a store to memory. Doing this
3594 gimplification now means that we won't have to deal with complicated
3595 language-specific trees, nor trees like SAVE_EXPR that can induce
3596 exponential search behavior. */
3597 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3598 if (one == GS_ERROR)
3600 *expr_p = NULL;
3601 return;
3604 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3605 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3606 always be true for all scalars, since is_gimple_mem_rhs insists on a
3607 temporary variable for them. */
3608 if (DECL_P (*expr_p))
3609 return;
3611 /* If this is of variable size, we have no choice but to assume it doesn't
3612 overlap since we can't make a temporary for it. */
3613 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3614 return;
3616 /* Otherwise, we must search for overlap ... */
3617 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3618 return;
3620 /* ... and if found, force the value into a temporary. */
3621 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3624 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3625 a RANGE_EXPR in a CONSTRUCTOR for an array.
3627 var = lower;
3628 loop_entry:
3629 object[var] = value;
3630 if (var == upper)
3631 goto loop_exit;
3632 var = var + 1;
3633 goto loop_entry;
3634 loop_exit:
3636 We increment var _after_ the loop exit check because we might otherwise
3637 fail if upper == TYPE_MAX_VALUE (type for upper).
3639 Note that we never have to deal with SAVE_EXPRs here, because this has
3640 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3642 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3643 gimple_seq *, bool);
3645 static void
3646 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3647 tree value, tree array_elt_type,
3648 gimple_seq *pre_p, bool cleared)
3650 tree loop_entry_label, loop_exit_label, fall_thru_label;
3651 tree var, var_type, cref, tmp;
3653 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3654 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3655 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3657 /* Create and initialize the index variable. */
3658 var_type = TREE_TYPE (upper);
3659 var = create_tmp_var (var_type, NULL);
3660 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3662 /* Add the loop entry label. */
3663 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3665 /* Build the reference. */
3666 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3667 var, NULL_TREE, NULL_TREE);
3669 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3670 the store. Otherwise just assign value to the reference. */
3672 if (TREE_CODE (value) == CONSTRUCTOR)
3673 /* NB we might have to call ourself recursively through
3674 gimplify_init_ctor_eval if the value is a constructor. */
3675 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3676 pre_p, cleared);
3677 else
3678 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3680 /* We exit the loop when the index var is equal to the upper bound. */
3681 gimplify_seq_add_stmt (pre_p,
3682 gimple_build_cond (EQ_EXPR, var, upper,
3683 loop_exit_label, fall_thru_label));
3685 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3687 /* Otherwise, increment the index var... */
3688 tmp = build2 (PLUS_EXPR, var_type, var,
3689 fold_convert (var_type, integer_one_node));
3690 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3692 /* ...and jump back to the loop entry. */
3693 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3695 /* Add the loop exit label. */
3696 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3699 /* Return true if FDECL is accessing a field that is zero sized. */
3701 static bool
3702 zero_sized_field_decl (const_tree fdecl)
3704 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3705 && integer_zerop (DECL_SIZE (fdecl)))
3706 return true;
3707 return false;
3710 /* Return true if TYPE is zero sized. */
3712 static bool
3713 zero_sized_type (const_tree type)
3715 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3716 && integer_zerop (TYPE_SIZE (type)))
3717 return true;
3718 return false;
3721 /* A subroutine of gimplify_init_constructor. Generate individual
3722 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3723 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3724 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3725 zeroed first. */
3727 static void
3728 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3729 gimple_seq *pre_p, bool cleared)
3731 tree array_elt_type = NULL;
3732 unsigned HOST_WIDE_INT ix;
3733 tree purpose, value;
3735 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3736 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3738 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3740 tree cref;
3742 /* NULL values are created above for gimplification errors. */
3743 if (value == NULL)
3744 continue;
3746 if (cleared && initializer_zerop (value))
3747 continue;
3749 /* ??? Here's to hoping the front end fills in all of the indices,
3750 so we don't have to figure out what's missing ourselves. */
3751 gcc_assert (purpose);
3753 /* Skip zero-sized fields, unless value has side-effects. This can
3754 happen with calls to functions returning a zero-sized type, which
3755 we shouldn't discard. As a number of downstream passes don't
3756 expect sets of zero-sized fields, we rely on the gimplification of
3757 the MODIFY_EXPR we make below to drop the assignment statement. */
3758 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3759 continue;
3761 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3762 whole range. */
3763 if (TREE_CODE (purpose) == RANGE_EXPR)
3765 tree lower = TREE_OPERAND (purpose, 0);
3766 tree upper = TREE_OPERAND (purpose, 1);
3768 /* If the lower bound is equal to upper, just treat it as if
3769 upper was the index. */
3770 if (simple_cst_equal (lower, upper))
3771 purpose = upper;
3772 else
3774 gimplify_init_ctor_eval_range (object, lower, upper, value,
3775 array_elt_type, pre_p, cleared);
3776 continue;
3780 if (array_elt_type)
3782 /* Do not use bitsizetype for ARRAY_REF indices. */
3783 if (TYPE_DOMAIN (TREE_TYPE (object)))
3784 purpose
3785 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3786 purpose);
3787 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3788 purpose, NULL_TREE, NULL_TREE);
3790 else
3792 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3793 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3794 unshare_expr (object), purpose, NULL_TREE);
3797 if (TREE_CODE (value) == CONSTRUCTOR
3798 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3799 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3800 pre_p, cleared);
3801 else
3803 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3804 gimplify_and_add (init, pre_p);
3805 ggc_free (init);
3810 /* Return the appropriate RHS predicate for this LHS. */
3812 gimple_predicate
3813 rhs_predicate_for (tree lhs)
3815 if (is_gimple_reg (lhs))
3816 return is_gimple_reg_rhs_or_call;
3817 else
3818 return is_gimple_mem_rhs_or_call;
3821 /* Gimplify a C99 compound literal expression. This just means adding
3822 the DECL_EXPR before the current statement and using its anonymous
3823 decl instead. */
3825 static enum gimplify_status
3826 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3827 bool (*gimple_test_f) (tree),
3828 fallback_t fallback)
3830 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3831 tree decl = DECL_EXPR_DECL (decl_s);
3832 tree init = DECL_INITIAL (decl);
3833 /* Mark the decl as addressable if the compound literal
3834 expression is addressable now, otherwise it is marked too late
3835 after we gimplify the initialization expression. */
3836 if (TREE_ADDRESSABLE (*expr_p))
3837 TREE_ADDRESSABLE (decl) = 1;
3838 /* Otherwise, if we don't need an lvalue and have a literal directly
3839 substitute it. Check if it matches the gimple predicate, as
3840 otherwise we'd generate a new temporary, and we can as well just
3841 use the decl we already have. */
3842 else if (!TREE_ADDRESSABLE (decl)
3843 && init
3844 && (fallback & fb_lvalue) == 0
3845 && gimple_test_f (init))
3847 *expr_p = init;
3848 return GS_OK;
3851 /* Preliminarily mark non-addressed complex variables as eligible
3852 for promotion to gimple registers. We'll transform their uses
3853 as we find them. */
3854 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3855 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3856 && !TREE_THIS_VOLATILE (decl)
3857 && !needs_to_live_in_memory (decl))
3858 DECL_GIMPLE_REG_P (decl) = 1;
3860 /* If the decl is not addressable, then it is being used in some
3861 expression or on the right hand side of a statement, and it can
3862 be put into a readonly data section. */
3863 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3864 TREE_READONLY (decl) = 1;
3866 /* This decl isn't mentioned in the enclosing block, so add it to the
3867 list of temps. FIXME it seems a bit of a kludge to say that
3868 anonymous artificial vars aren't pushed, but everything else is. */
3869 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3870 gimple_add_tmp_var (decl);
3872 gimplify_and_add (decl_s, pre_p);
3873 *expr_p = decl;
3874 return GS_OK;
3877 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3878 return a new CONSTRUCTOR if something changed. */
3880 static tree
3881 optimize_compound_literals_in_ctor (tree orig_ctor)
3883 tree ctor = orig_ctor;
3884 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3885 unsigned int idx, num = vec_safe_length (elts);
3887 for (idx = 0; idx < num; idx++)
3889 tree value = (*elts)[idx].value;
3890 tree newval = value;
3891 if (TREE_CODE (value) == CONSTRUCTOR)
3892 newval = optimize_compound_literals_in_ctor (value);
3893 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3895 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3896 tree decl = DECL_EXPR_DECL (decl_s);
3897 tree init = DECL_INITIAL (decl);
3899 if (!TREE_ADDRESSABLE (value)
3900 && !TREE_ADDRESSABLE (decl)
3901 && init
3902 && TREE_CODE (init) == CONSTRUCTOR)
3903 newval = optimize_compound_literals_in_ctor (init);
3905 if (newval == value)
3906 continue;
3908 if (ctor == orig_ctor)
3910 ctor = copy_node (orig_ctor);
3911 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3912 elts = CONSTRUCTOR_ELTS (ctor);
3914 (*elts)[idx].value = newval;
3916 return ctor;
3919 /* A subroutine of gimplify_modify_expr. Break out elements of a
3920 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3922 Note that we still need to clear any elements that don't have explicit
3923 initializers, so if not all elements are initialized we keep the
3924 original MODIFY_EXPR, we just remove all of the constructor elements.
3926 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3927 GS_ERROR if we would have to create a temporary when gimplifying
3928 this constructor. Otherwise, return GS_OK.
3930 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3932 static enum gimplify_status
3933 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3934 bool want_value, bool notify_temp_creation)
3936 tree object, ctor, type;
3937 enum gimplify_status ret;
3938 vec<constructor_elt, va_gc> *elts;
3940 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3942 if (!notify_temp_creation)
3944 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3945 is_gimple_lvalue, fb_lvalue);
3946 if (ret == GS_ERROR)
3947 return ret;
3950 object = TREE_OPERAND (*expr_p, 0);
3951 ctor = TREE_OPERAND (*expr_p, 1) =
3952 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3953 type = TREE_TYPE (ctor);
3954 elts = CONSTRUCTOR_ELTS (ctor);
3955 ret = GS_ALL_DONE;
3957 switch (TREE_CODE (type))
3959 case RECORD_TYPE:
3960 case UNION_TYPE:
3961 case QUAL_UNION_TYPE:
3962 case ARRAY_TYPE:
3964 struct gimplify_init_ctor_preeval_data preeval_data;
3965 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3966 bool cleared, complete_p, valid_const_initializer;
3968 /* Aggregate types must lower constructors to initialization of
3969 individual elements. The exception is that a CONSTRUCTOR node
3970 with no elements indicates zero-initialization of the whole. */
3971 if (vec_safe_is_empty (elts))
3973 if (notify_temp_creation)
3974 return GS_OK;
3975 break;
3978 /* Fetch information about the constructor to direct later processing.
3979 We might want to make static versions of it in various cases, and
3980 can only do so if it known to be a valid constant initializer. */
3981 valid_const_initializer
3982 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3983 &num_ctor_elements, &complete_p);
3985 /* If a const aggregate variable is being initialized, then it
3986 should never be a lose to promote the variable to be static. */
3987 if (valid_const_initializer
3988 && num_nonzero_elements > 1
3989 && TREE_READONLY (object)
3990 && TREE_CODE (object) == VAR_DECL
3991 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3993 if (notify_temp_creation)
3994 return GS_ERROR;
3995 DECL_INITIAL (object) = ctor;
3996 TREE_STATIC (object) = 1;
3997 if (!DECL_NAME (object))
3998 DECL_NAME (object) = create_tmp_var_name ("C");
3999 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4001 /* ??? C++ doesn't automatically append a .<number> to the
4002 assembler name, and even when it does, it looks at FE private
4003 data structures to figure out what that number should be,
4004 which are not set for this variable. I suppose this is
4005 important for local statics for inline functions, which aren't
4006 "local" in the object file sense. So in order to get a unique
4007 TU-local symbol, we must invoke the lhd version now. */
4008 lhd_set_decl_assembler_name (object);
4010 *expr_p = NULL_TREE;
4011 break;
4014 /* If there are "lots" of initialized elements, even discounting
4015 those that are not address constants (and thus *must* be
4016 computed at runtime), then partition the constructor into
4017 constant and non-constant parts. Block copy the constant
4018 parts in, then generate code for the non-constant parts. */
4019 /* TODO. There's code in cp/typeck.c to do this. */
4021 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4022 /* store_constructor will ignore the clearing of variable-sized
4023 objects. Initializers for such objects must explicitly set
4024 every field that needs to be set. */
4025 cleared = false;
4026 else if (!complete_p)
4027 /* If the constructor isn't complete, clear the whole object
4028 beforehand.
4030 ??? This ought not to be needed. For any element not present
4031 in the initializer, we should simply set them to zero. Except
4032 we'd need to *find* the elements that are not present, and that
4033 requires trickery to avoid quadratic compile-time behavior in
4034 large cases or excessive memory use in small cases. */
4035 cleared = true;
4036 else if (num_ctor_elements - num_nonzero_elements
4037 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4038 && num_nonzero_elements < num_ctor_elements / 4)
4039 /* If there are "lots" of zeros, it's more efficient to clear
4040 the memory and then set the nonzero elements. */
4041 cleared = true;
4042 else
4043 cleared = false;
4045 /* If there are "lots" of initialized elements, and all of them
4046 are valid address constants, then the entire initializer can
4047 be dropped to memory, and then memcpy'd out. Don't do this
4048 for sparse arrays, though, as it's more efficient to follow
4049 the standard CONSTRUCTOR behavior of memset followed by
4050 individual element initialization. Also don't do this for small
4051 all-zero initializers (which aren't big enough to merit
4052 clearing), and don't try to make bitwise copies of
4053 TREE_ADDRESSABLE types. */
4054 if (valid_const_initializer
4055 && !(cleared || num_nonzero_elements == 0)
4056 && !TREE_ADDRESSABLE (type))
4058 HOST_WIDE_INT size = int_size_in_bytes (type);
4059 unsigned int align;
4061 /* ??? We can still get unbounded array types, at least
4062 from the C++ front end. This seems wrong, but attempt
4063 to work around it for now. */
4064 if (size < 0)
4066 size = int_size_in_bytes (TREE_TYPE (object));
4067 if (size >= 0)
4068 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4071 /* Find the maximum alignment we can assume for the object. */
4072 /* ??? Make use of DECL_OFFSET_ALIGN. */
4073 if (DECL_P (object))
4074 align = DECL_ALIGN (object);
4075 else
4076 align = TYPE_ALIGN (type);
4078 /* Do a block move either if the size is so small as to make
4079 each individual move a sub-unit move on average, or if it
4080 is so large as to make individual moves inefficient. */
4081 if (size > 0
4082 && num_nonzero_elements > 1
4083 && (size < num_nonzero_elements
4084 || !can_move_by_pieces (size, align)))
4086 if (notify_temp_creation)
4087 return GS_ERROR;
4089 walk_tree (&ctor, force_labels_r, NULL, NULL);
4090 ctor = tree_output_constant_def (ctor);
4091 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4092 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4093 TREE_OPERAND (*expr_p, 1) = ctor;
4095 /* This is no longer an assignment of a CONSTRUCTOR, but
4096 we still may have processing to do on the LHS. So
4097 pretend we didn't do anything here to let that happen. */
4098 return GS_UNHANDLED;
4102 /* If the target is volatile, we have non-zero elements and more than
4103 one field to assign, initialize the target from a temporary. */
4104 if (TREE_THIS_VOLATILE (object)
4105 && !TREE_ADDRESSABLE (type)
4106 && num_nonzero_elements > 0
4107 && vec_safe_length (elts) > 1)
4109 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
4110 TREE_OPERAND (*expr_p, 0) = temp;
4111 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4112 *expr_p,
4113 build2 (MODIFY_EXPR, void_type_node,
4114 object, temp));
4115 return GS_OK;
4118 if (notify_temp_creation)
4119 return GS_OK;
4121 /* If there are nonzero elements and if needed, pre-evaluate to capture
4122 elements overlapping with the lhs into temporaries. We must do this
4123 before clearing to fetch the values before they are zeroed-out. */
4124 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4126 preeval_data.lhs_base_decl = get_base_address (object);
4127 if (!DECL_P (preeval_data.lhs_base_decl))
4128 preeval_data.lhs_base_decl = NULL;
4129 preeval_data.lhs_alias_set = get_alias_set (object);
4131 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4132 pre_p, post_p, &preeval_data);
4135 if (cleared)
4137 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4138 Note that we still have to gimplify, in order to handle the
4139 case of variable sized types. Avoid shared tree structures. */
4140 CONSTRUCTOR_ELTS (ctor) = NULL;
4141 TREE_SIDE_EFFECTS (ctor) = 0;
4142 object = unshare_expr (object);
4143 gimplify_stmt (expr_p, pre_p);
4146 /* If we have not block cleared the object, or if there are nonzero
4147 elements in the constructor, add assignments to the individual
4148 scalar fields of the object. */
4149 if (!cleared || num_nonzero_elements > 0)
4150 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4152 *expr_p = NULL_TREE;
4154 break;
4156 case COMPLEX_TYPE:
4158 tree r, i;
4160 if (notify_temp_creation)
4161 return GS_OK;
4163 /* Extract the real and imaginary parts out of the ctor. */
4164 gcc_assert (elts->length () == 2);
4165 r = (*elts)[0].value;
4166 i = (*elts)[1].value;
4167 if (r == NULL || i == NULL)
4169 tree zero = build_zero_cst (TREE_TYPE (type));
4170 if (r == NULL)
4171 r = zero;
4172 if (i == NULL)
4173 i = zero;
4176 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4177 represent creation of a complex value. */
4178 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4180 ctor = build_complex (type, r, i);
4181 TREE_OPERAND (*expr_p, 1) = ctor;
4183 else
4185 ctor = build2 (COMPLEX_EXPR, type, r, i);
4186 TREE_OPERAND (*expr_p, 1) = ctor;
4187 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4188 pre_p,
4189 post_p,
4190 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4191 fb_rvalue);
4194 break;
4196 case VECTOR_TYPE:
4198 unsigned HOST_WIDE_INT ix;
4199 constructor_elt *ce;
4201 if (notify_temp_creation)
4202 return GS_OK;
4204 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4205 if (TREE_CONSTANT (ctor))
4207 bool constant_p = true;
4208 tree value;
4210 /* Even when ctor is constant, it might contain non-*_CST
4211 elements, such as addresses or trapping values like
4212 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4213 in VECTOR_CST nodes. */
4214 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4215 if (!CONSTANT_CLASS_P (value))
4217 constant_p = false;
4218 break;
4221 if (constant_p)
4223 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4224 break;
4227 /* Don't reduce an initializer constant even if we can't
4228 make a VECTOR_CST. It won't do anything for us, and it'll
4229 prevent us from representing it as a single constant. */
4230 if (initializer_constant_valid_p (ctor, type))
4231 break;
4233 TREE_CONSTANT (ctor) = 0;
4236 /* Vector types use CONSTRUCTOR all the way through gimple
4237 compilation as a general initializer. */
4238 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4240 enum gimplify_status tret;
4241 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4242 fb_rvalue);
4243 if (tret == GS_ERROR)
4244 ret = GS_ERROR;
4246 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4247 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4249 break;
4251 default:
4252 /* So how did we get a CONSTRUCTOR for a scalar type? */
4253 gcc_unreachable ();
4256 if (ret == GS_ERROR)
4257 return GS_ERROR;
4258 else if (want_value)
4260 *expr_p = object;
4261 return GS_OK;
4263 else
4265 /* If we have gimplified both sides of the initializer but have
4266 not emitted an assignment, do so now. */
4267 if (*expr_p)
4269 tree lhs = TREE_OPERAND (*expr_p, 0);
4270 tree rhs = TREE_OPERAND (*expr_p, 1);
4271 gimple init = gimple_build_assign (lhs, rhs);
4272 gimplify_seq_add_stmt (pre_p, init);
4273 *expr_p = NULL;
4276 return GS_ALL_DONE;
4280 /* Given a pointer value OP0, return a simplified version of an
4281 indirection through OP0, or NULL_TREE if no simplification is
4282 possible. This may only be applied to a rhs of an expression.
4283 Note that the resulting type may be different from the type pointed
4284 to in the sense that it is still compatible from the langhooks
4285 point of view. */
4287 static tree
4288 gimple_fold_indirect_ref_rhs (tree t)
4290 return gimple_fold_indirect_ref (t);
4293 /* Subroutine of gimplify_modify_expr to do simplifications of
4294 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4295 something changes. */
4297 static enum gimplify_status
4298 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4299 gimple_seq *pre_p, gimple_seq *post_p,
4300 bool want_value)
4302 enum gimplify_status ret = GS_UNHANDLED;
4303 bool changed;
4307 changed = false;
4308 switch (TREE_CODE (*from_p))
4310 case VAR_DECL:
4311 /* If we're assigning from a read-only variable initialized with
4312 a constructor, do the direct assignment from the constructor,
4313 but only if neither source nor target are volatile since this
4314 latter assignment might end up being done on a per-field basis. */
4315 if (DECL_INITIAL (*from_p)
4316 && TREE_READONLY (*from_p)
4317 && !TREE_THIS_VOLATILE (*from_p)
4318 && !TREE_THIS_VOLATILE (*to_p)
4319 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4321 tree old_from = *from_p;
4322 enum gimplify_status subret;
4324 /* Move the constructor into the RHS. */
4325 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4327 /* Let's see if gimplify_init_constructor will need to put
4328 it in memory. */
4329 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4330 false, true);
4331 if (subret == GS_ERROR)
4333 /* If so, revert the change. */
4334 *from_p = old_from;
4336 else
4338 ret = GS_OK;
4339 changed = true;
4342 break;
4343 case INDIRECT_REF:
4345 /* If we have code like
4347 *(const A*)(A*)&x
4349 where the type of "x" is a (possibly cv-qualified variant
4350 of "A"), treat the entire expression as identical to "x".
4351 This kind of code arises in C++ when an object is bound
4352 to a const reference, and if "x" is a TARGET_EXPR we want
4353 to take advantage of the optimization below. */
4354 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4355 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4356 if (t)
4358 if (TREE_THIS_VOLATILE (t) != volatile_p)
4360 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4361 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4362 build_fold_addr_expr (t));
4363 if (REFERENCE_CLASS_P (t))
4364 TREE_THIS_VOLATILE (t) = volatile_p;
4366 *from_p = t;
4367 ret = GS_OK;
4368 changed = true;
4370 break;
4373 case TARGET_EXPR:
4375 /* If we are initializing something from a TARGET_EXPR, strip the
4376 TARGET_EXPR and initialize it directly, if possible. This can't
4377 be done if the initializer is void, since that implies that the
4378 temporary is set in some non-trivial way.
4380 ??? What about code that pulls out the temp and uses it
4381 elsewhere? I think that such code never uses the TARGET_EXPR as
4382 an initializer. If I'm wrong, we'll die because the temp won't
4383 have any RTL. In that case, I guess we'll need to replace
4384 references somehow. */
4385 tree init = TARGET_EXPR_INITIAL (*from_p);
4387 if (init
4388 && !VOID_TYPE_P (TREE_TYPE (init)))
4390 *from_p = init;
4391 ret = GS_OK;
4392 changed = true;
4395 break;
4397 case COMPOUND_EXPR:
4398 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4399 caught. */
4400 gimplify_compound_expr (from_p, pre_p, true);
4401 ret = GS_OK;
4402 changed = true;
4403 break;
4405 case CONSTRUCTOR:
4406 /* If we already made some changes, let the front end have a
4407 crack at this before we break it down. */
4408 if (ret != GS_UNHANDLED)
4409 break;
4410 /* If we're initializing from a CONSTRUCTOR, break this into
4411 individual MODIFY_EXPRs. */
4412 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4413 false);
4415 case COND_EXPR:
4416 /* If we're assigning to a non-register type, push the assignment
4417 down into the branches. This is mandatory for ADDRESSABLE types,
4418 since we cannot generate temporaries for such, but it saves a
4419 copy in other cases as well. */
4420 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4422 /* This code should mirror the code in gimplify_cond_expr. */
4423 enum tree_code code = TREE_CODE (*expr_p);
4424 tree cond = *from_p;
4425 tree result = *to_p;
4427 ret = gimplify_expr (&result, pre_p, post_p,
4428 is_gimple_lvalue, fb_lvalue);
4429 if (ret != GS_ERROR)
4430 ret = GS_OK;
4432 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4433 TREE_OPERAND (cond, 1)
4434 = build2 (code, void_type_node, result,
4435 TREE_OPERAND (cond, 1));
4436 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4437 TREE_OPERAND (cond, 2)
4438 = build2 (code, void_type_node, unshare_expr (result),
4439 TREE_OPERAND (cond, 2));
4441 TREE_TYPE (cond) = void_type_node;
4442 recalculate_side_effects (cond);
4444 if (want_value)
4446 gimplify_and_add (cond, pre_p);
4447 *expr_p = unshare_expr (result);
4449 else
4450 *expr_p = cond;
4451 return ret;
4453 break;
4455 case CALL_EXPR:
4456 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4457 return slot so that we don't generate a temporary. */
4458 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4459 && aggregate_value_p (*from_p, *from_p))
4461 bool use_target;
4463 if (!(rhs_predicate_for (*to_p))(*from_p))
4464 /* If we need a temporary, *to_p isn't accurate. */
4465 use_target = false;
4466 /* It's OK to use the return slot directly unless it's an NRV. */
4467 else if (TREE_CODE (*to_p) == RESULT_DECL
4468 && DECL_NAME (*to_p) == NULL_TREE
4469 && needs_to_live_in_memory (*to_p))
4470 use_target = true;
4471 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4472 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4473 /* Don't force regs into memory. */
4474 use_target = false;
4475 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4476 /* It's OK to use the target directly if it's being
4477 initialized. */
4478 use_target = true;
4479 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4480 /* Always use the target and thus RSO for variable-sized types.
4481 GIMPLE cannot deal with a variable-sized assignment
4482 embedded in a call statement. */
4483 use_target = true;
4484 else if (TREE_CODE (*to_p) != SSA_NAME
4485 && (!is_gimple_variable (*to_p)
4486 || needs_to_live_in_memory (*to_p)))
4487 /* Don't use the original target if it's already addressable;
4488 if its address escapes, and the called function uses the
4489 NRV optimization, a conforming program could see *to_p
4490 change before the called function returns; see c++/19317.
4491 When optimizing, the return_slot pass marks more functions
4492 as safe after we have escape info. */
4493 use_target = false;
4494 else
4495 use_target = true;
4497 if (use_target)
4499 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4500 mark_addressable (*to_p);
4503 break;
4505 case WITH_SIZE_EXPR:
4506 /* Likewise for calls that return an aggregate of non-constant size,
4507 since we would not be able to generate a temporary at all. */
4508 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4510 *from_p = TREE_OPERAND (*from_p, 0);
4511 /* We don't change ret in this case because the
4512 WITH_SIZE_EXPR might have been added in
4513 gimplify_modify_expr, so returning GS_OK would lead to an
4514 infinite loop. */
4515 changed = true;
4517 break;
4519 /* If we're initializing from a container, push the initialization
4520 inside it. */
4521 case CLEANUP_POINT_EXPR:
4522 case BIND_EXPR:
4523 case STATEMENT_LIST:
4525 tree wrap = *from_p;
4526 tree t;
4528 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4529 fb_lvalue);
4530 if (ret != GS_ERROR)
4531 ret = GS_OK;
4533 t = voidify_wrapper_expr (wrap, *expr_p);
4534 gcc_assert (t == *expr_p);
4536 if (want_value)
4538 gimplify_and_add (wrap, pre_p);
4539 *expr_p = unshare_expr (*to_p);
4541 else
4542 *expr_p = wrap;
4543 return GS_OK;
4546 case COMPOUND_LITERAL_EXPR:
4548 tree complit = TREE_OPERAND (*expr_p, 1);
4549 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4550 tree decl = DECL_EXPR_DECL (decl_s);
4551 tree init = DECL_INITIAL (decl);
4553 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4554 into struct T x = { 0, 1, 2 } if the address of the
4555 compound literal has never been taken. */
4556 if (!TREE_ADDRESSABLE (complit)
4557 && !TREE_ADDRESSABLE (decl)
4558 && init)
4560 *expr_p = copy_node (*expr_p);
4561 TREE_OPERAND (*expr_p, 1) = init;
4562 return GS_OK;
4566 default:
4567 break;
4570 while (changed);
4572 return ret;
4576 /* Return true if T looks like a valid GIMPLE statement. */
4578 static bool
4579 is_gimple_stmt (tree t)
4581 const enum tree_code code = TREE_CODE (t);
4583 switch (code)
4585 case NOP_EXPR:
4586 /* The only valid NOP_EXPR is the empty statement. */
4587 return IS_EMPTY_STMT (t);
4589 case BIND_EXPR:
4590 case COND_EXPR:
4591 /* These are only valid if they're void. */
4592 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4594 case SWITCH_EXPR:
4595 case GOTO_EXPR:
4596 case RETURN_EXPR:
4597 case LABEL_EXPR:
4598 case CASE_LABEL_EXPR:
4599 case TRY_CATCH_EXPR:
4600 case TRY_FINALLY_EXPR:
4601 case EH_FILTER_EXPR:
4602 case CATCH_EXPR:
4603 case ASM_EXPR:
4604 case STATEMENT_LIST:
4605 case OMP_PARALLEL:
4606 case OMP_FOR:
4607 case OMP_SIMD:
4608 case OMP_DISTRIBUTE:
4609 case OMP_SECTIONS:
4610 case OMP_SECTION:
4611 case OMP_SINGLE:
4612 case OMP_MASTER:
4613 case OMP_TASKGROUP:
4614 case OMP_ORDERED:
4615 case OMP_CRITICAL:
4616 case OMP_TASK:
4617 /* These are always void. */
4618 return true;
4620 case CALL_EXPR:
4621 case MODIFY_EXPR:
4622 case PREDICT_EXPR:
4623 /* These are valid regardless of their type. */
4624 return true;
4626 default:
4627 return false;
4632 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4633 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4634 DECL_GIMPLE_REG_P set.
4636 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4637 other, unmodified part of the complex object just before the total store.
4638 As a consequence, if the object is still uninitialized, an undefined value
4639 will be loaded into a register, which may result in a spurious exception
4640 if the register is floating-point and the value happens to be a signaling
4641 NaN for example. Then the fully-fledged complex operations lowering pass
4642 followed by a DCE pass are necessary in order to fix things up. */
4644 static enum gimplify_status
4645 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4646 bool want_value)
4648 enum tree_code code, ocode;
4649 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4651 lhs = TREE_OPERAND (*expr_p, 0);
4652 rhs = TREE_OPERAND (*expr_p, 1);
4653 code = TREE_CODE (lhs);
4654 lhs = TREE_OPERAND (lhs, 0);
4656 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4657 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4658 TREE_NO_WARNING (other) = 1;
4659 other = get_formal_tmp_var (other, pre_p);
4661 realpart = code == REALPART_EXPR ? rhs : other;
4662 imagpart = code == REALPART_EXPR ? other : rhs;
4664 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4665 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4666 else
4667 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4669 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4670 *expr_p = (want_value) ? rhs : NULL_TREE;
4672 return GS_ALL_DONE;
4675 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4677 modify_expr
4678 : varname '=' rhs
4679 | '*' ID '=' rhs
4681 PRE_P points to the list where side effects that must happen before
4682 *EXPR_P should be stored.
4684 POST_P points to the list where side effects that must happen after
4685 *EXPR_P should be stored.
4687 WANT_VALUE is nonzero iff we want to use the value of this expression
4688 in another expression. */
4690 static enum gimplify_status
4691 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4692 bool want_value)
4694 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4695 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4696 enum gimplify_status ret = GS_UNHANDLED;
4697 gimple assign;
4698 location_t loc = EXPR_LOCATION (*expr_p);
4699 gimple_stmt_iterator gsi;
4701 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4702 || TREE_CODE (*expr_p) == INIT_EXPR);
4704 /* Trying to simplify a clobber using normal logic doesn't work,
4705 so handle it here. */
4706 if (TREE_CLOBBER_P (*from_p))
4708 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4709 if (ret == GS_ERROR)
4710 return ret;
4711 gcc_assert (!want_value
4712 && (TREE_CODE (*to_p) == VAR_DECL
4713 || TREE_CODE (*to_p) == MEM_REF));
4714 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4715 *expr_p = NULL;
4716 return GS_ALL_DONE;
4719 /* Insert pointer conversions required by the middle-end that are not
4720 required by the frontend. This fixes middle-end type checking for
4721 for example gcc.dg/redecl-6.c. */
4722 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4724 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4725 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4726 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4729 /* See if any simplifications can be done based on what the RHS is. */
4730 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4731 want_value);
4732 if (ret != GS_UNHANDLED)
4733 return ret;
4735 /* For zero sized types only gimplify the left hand side and right hand
4736 side as statements and throw away the assignment. Do this after
4737 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4738 types properly. */
4739 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4741 gimplify_stmt (from_p, pre_p);
4742 gimplify_stmt (to_p, pre_p);
4743 *expr_p = NULL_TREE;
4744 return GS_ALL_DONE;
4747 /* If the value being copied is of variable width, compute the length
4748 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4749 before gimplifying any of the operands so that we can resolve any
4750 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4751 the size of the expression to be copied, not of the destination, so
4752 that is what we must do here. */
4753 maybe_with_size_expr (from_p);
4755 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4756 if (ret == GS_ERROR)
4757 return ret;
4759 /* As a special case, we have to temporarily allow for assignments
4760 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4761 a toplevel statement, when gimplifying the GENERIC expression
4762 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4763 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4765 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4766 prevent gimplify_expr from trying to create a new temporary for
4767 foo's LHS, we tell it that it should only gimplify until it
4768 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4769 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4770 and all we need to do here is set 'a' to be its LHS. */
4771 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4772 fb_rvalue);
4773 if (ret == GS_ERROR)
4774 return ret;
4776 /* Now see if the above changed *from_p to something we handle specially. */
4777 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4778 want_value);
4779 if (ret != GS_UNHANDLED)
4780 return ret;
4782 /* If we've got a variable sized assignment between two lvalues (i.e. does
4783 not involve a call), then we can make things a bit more straightforward
4784 by converting the assignment to memcpy or memset. */
4785 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4787 tree from = TREE_OPERAND (*from_p, 0);
4788 tree size = TREE_OPERAND (*from_p, 1);
4790 if (TREE_CODE (from) == CONSTRUCTOR)
4791 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4793 if (is_gimple_addressable (from))
4795 *from_p = from;
4796 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4797 pre_p);
4801 /* Transform partial stores to non-addressable complex variables into
4802 total stores. This allows us to use real instead of virtual operands
4803 for these variables, which improves optimization. */
4804 if ((TREE_CODE (*to_p) == REALPART_EXPR
4805 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4806 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4807 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4809 /* Try to alleviate the effects of the gimplification creating artificial
4810 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4811 if (!gimplify_ctxp->into_ssa
4812 && TREE_CODE (*from_p) == VAR_DECL
4813 && DECL_IGNORED_P (*from_p)
4814 && DECL_P (*to_p)
4815 && !DECL_IGNORED_P (*to_p))
4817 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4818 DECL_NAME (*from_p)
4819 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4820 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4821 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4824 if (want_value && TREE_THIS_VOLATILE (*to_p))
4825 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4827 if (TREE_CODE (*from_p) == CALL_EXPR)
4829 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4830 instead of a GIMPLE_ASSIGN. */
4831 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4832 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4833 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4834 assign = gimple_build_call_from_tree (*from_p);
4835 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4836 notice_special_calls (assign);
4837 if (!gimple_call_noreturn_p (assign))
4838 gimple_call_set_lhs (assign, *to_p);
4840 else
4842 assign = gimple_build_assign (*to_p, *from_p);
4843 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4846 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4848 /* We should have got an SSA name from the start. */
4849 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4852 gimplify_seq_add_stmt (pre_p, assign);
4853 gsi = gsi_last (*pre_p);
4854 /* Don't fold stmts inside of target construct. We'll do it
4855 during omplower pass instead. */
4856 struct gimplify_omp_ctx *ctx;
4857 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
4858 if (ctx->region_type == ORT_TARGET)
4859 break;
4860 if (ctx == NULL)
4861 fold_stmt (&gsi);
4863 if (want_value)
4865 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4866 return GS_OK;
4868 else
4869 *expr_p = NULL;
4871 return GS_ALL_DONE;
4874 /* Gimplify a comparison between two variable-sized objects. Do this
4875 with a call to BUILT_IN_MEMCMP. */
4877 static enum gimplify_status
4878 gimplify_variable_sized_compare (tree *expr_p)
4880 location_t loc = EXPR_LOCATION (*expr_p);
4881 tree op0 = TREE_OPERAND (*expr_p, 0);
4882 tree op1 = TREE_OPERAND (*expr_p, 1);
4883 tree t, arg, dest, src, expr;
4885 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4886 arg = unshare_expr (arg);
4887 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4888 src = build_fold_addr_expr_loc (loc, op1);
4889 dest = build_fold_addr_expr_loc (loc, op0);
4890 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4891 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4893 expr
4894 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4895 SET_EXPR_LOCATION (expr, loc);
4896 *expr_p = expr;
4898 return GS_OK;
4901 /* Gimplify a comparison between two aggregate objects of integral scalar
4902 mode as a comparison between the bitwise equivalent scalar values. */
4904 static enum gimplify_status
4905 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4907 location_t loc = EXPR_LOCATION (*expr_p);
4908 tree op0 = TREE_OPERAND (*expr_p, 0);
4909 tree op1 = TREE_OPERAND (*expr_p, 1);
4911 tree type = TREE_TYPE (op0);
4912 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4914 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4915 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4917 *expr_p
4918 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4920 return GS_OK;
4923 /* Gimplify an expression sequence. This function gimplifies each
4924 expression and rewrites the original expression with the last
4925 expression of the sequence in GIMPLE form.
4927 PRE_P points to the list where the side effects for all the
4928 expressions in the sequence will be emitted.
4930 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4932 static enum gimplify_status
4933 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4935 tree t = *expr_p;
4939 tree *sub_p = &TREE_OPERAND (t, 0);
4941 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4942 gimplify_compound_expr (sub_p, pre_p, false);
4943 else
4944 gimplify_stmt (sub_p, pre_p);
4946 t = TREE_OPERAND (t, 1);
4948 while (TREE_CODE (t) == COMPOUND_EXPR);
4950 *expr_p = t;
4951 if (want_value)
4952 return GS_OK;
4953 else
4955 gimplify_stmt (expr_p, pre_p);
4956 return GS_ALL_DONE;
4960 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4961 gimplify. After gimplification, EXPR_P will point to a new temporary
4962 that holds the original value of the SAVE_EXPR node.
4964 PRE_P points to the list where side effects that must happen before
4965 *EXPR_P should be stored. */
4967 static enum gimplify_status
4968 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4970 enum gimplify_status ret = GS_ALL_DONE;
4971 tree val;
4973 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4974 val = TREE_OPERAND (*expr_p, 0);
4976 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4977 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4979 /* The operand may be a void-valued expression such as SAVE_EXPRs
4980 generated by the Java frontend for class initialization. It is
4981 being executed only for its side-effects. */
4982 if (TREE_TYPE (val) == void_type_node)
4984 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4985 is_gimple_stmt, fb_none);
4986 val = NULL;
4988 else
4989 val = get_initialized_tmp_var (val, pre_p, post_p);
4991 TREE_OPERAND (*expr_p, 0) = val;
4992 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4995 *expr_p = val;
4997 return ret;
5000 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5002 unary_expr
5003 : ...
5004 | '&' varname
5007 PRE_P points to the list where side effects that must happen before
5008 *EXPR_P should be stored.
5010 POST_P points to the list where side effects that must happen after
5011 *EXPR_P should be stored. */
5013 static enum gimplify_status
5014 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5016 tree expr = *expr_p;
5017 tree op0 = TREE_OPERAND (expr, 0);
5018 enum gimplify_status ret;
5019 location_t loc = EXPR_LOCATION (*expr_p);
5021 switch (TREE_CODE (op0))
5023 case INDIRECT_REF:
5024 do_indirect_ref:
5025 /* Check if we are dealing with an expression of the form '&*ptr'.
5026 While the front end folds away '&*ptr' into 'ptr', these
5027 expressions may be generated internally by the compiler (e.g.,
5028 builtins like __builtin_va_end). */
5029 /* Caution: the silent array decomposition semantics we allow for
5030 ADDR_EXPR means we can't always discard the pair. */
5031 /* Gimplification of the ADDR_EXPR operand may drop
5032 cv-qualification conversions, so make sure we add them if
5033 needed. */
5035 tree op00 = TREE_OPERAND (op0, 0);
5036 tree t_expr = TREE_TYPE (expr);
5037 tree t_op00 = TREE_TYPE (op00);
5039 if (!useless_type_conversion_p (t_expr, t_op00))
5040 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5041 *expr_p = op00;
5042 ret = GS_OK;
5044 break;
5046 case VIEW_CONVERT_EXPR:
5047 /* Take the address of our operand and then convert it to the type of
5048 this ADDR_EXPR.
5050 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5051 all clear. The impact of this transformation is even less clear. */
5053 /* If the operand is a useless conversion, look through it. Doing so
5054 guarantees that the ADDR_EXPR and its operand will remain of the
5055 same type. */
5056 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5057 op0 = TREE_OPERAND (op0, 0);
5059 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5060 build_fold_addr_expr_loc (loc,
5061 TREE_OPERAND (op0, 0)));
5062 ret = GS_OK;
5063 break;
5065 default:
5066 /* We use fb_either here because the C frontend sometimes takes
5067 the address of a call that returns a struct; see
5068 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5069 the implied temporary explicit. */
5071 /* Make the operand addressable. */
5072 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5073 is_gimple_addressable, fb_either);
5074 if (ret == GS_ERROR)
5075 break;
5077 /* Then mark it. Beware that it may not be possible to do so directly
5078 if a temporary has been created by the gimplification. */
5079 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5081 op0 = TREE_OPERAND (expr, 0);
5083 /* For various reasons, the gimplification of the expression
5084 may have made a new INDIRECT_REF. */
5085 if (TREE_CODE (op0) == INDIRECT_REF)
5086 goto do_indirect_ref;
5088 mark_addressable (TREE_OPERAND (expr, 0));
5090 /* The FEs may end up building ADDR_EXPRs early on a decl with
5091 an incomplete type. Re-build ADDR_EXPRs in canonical form
5092 here. */
5093 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5094 *expr_p = build_fold_addr_expr (op0);
5096 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5097 recompute_tree_invariant_for_addr_expr (*expr_p);
5099 /* If we re-built the ADDR_EXPR add a conversion to the original type
5100 if required. */
5101 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5102 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5104 break;
5107 return ret;
5110 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5111 value; output operands should be a gimple lvalue. */
5113 static enum gimplify_status
5114 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5116 tree expr;
5117 int noutputs;
5118 const char **oconstraints;
5119 int i;
5120 tree link;
5121 const char *constraint;
5122 bool allows_mem, allows_reg, is_inout;
5123 enum gimplify_status ret, tret;
5124 gimple stmt;
5125 vec<tree, va_gc> *inputs;
5126 vec<tree, va_gc> *outputs;
5127 vec<tree, va_gc> *clobbers;
5128 vec<tree, va_gc> *labels;
5129 tree link_next;
5131 expr = *expr_p;
5132 noutputs = list_length (ASM_OUTPUTS (expr));
5133 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5135 inputs = NULL;
5136 outputs = NULL;
5137 clobbers = NULL;
5138 labels = NULL;
5140 ret = GS_ALL_DONE;
5141 link_next = NULL_TREE;
5142 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5144 bool ok;
5145 size_t constraint_len;
5147 link_next = TREE_CHAIN (link);
5149 oconstraints[i]
5150 = constraint
5151 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5152 constraint_len = strlen (constraint);
5153 if (constraint_len == 0)
5154 continue;
5156 ok = parse_output_constraint (&constraint, i, 0, 0,
5157 &allows_mem, &allows_reg, &is_inout);
5158 if (!ok)
5160 ret = GS_ERROR;
5161 is_inout = false;
5164 if (!allows_reg && allows_mem)
5165 mark_addressable (TREE_VALUE (link));
5167 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5168 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5169 fb_lvalue | fb_mayfail);
5170 if (tret == GS_ERROR)
5172 error ("invalid lvalue in asm output %d", i);
5173 ret = tret;
5176 vec_safe_push (outputs, link);
5177 TREE_CHAIN (link) = NULL_TREE;
5179 if (is_inout)
5181 /* An input/output operand. To give the optimizers more
5182 flexibility, split it into separate input and output
5183 operands. */
5184 tree input;
5185 char buf[10];
5187 /* Turn the in/out constraint into an output constraint. */
5188 char *p = xstrdup (constraint);
5189 p[0] = '=';
5190 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5192 /* And add a matching input constraint. */
5193 if (allows_reg)
5195 sprintf (buf, "%d", i);
5197 /* If there are multiple alternatives in the constraint,
5198 handle each of them individually. Those that allow register
5199 will be replaced with operand number, the others will stay
5200 unchanged. */
5201 if (strchr (p, ',') != NULL)
5203 size_t len = 0, buflen = strlen (buf);
5204 char *beg, *end, *str, *dst;
5206 for (beg = p + 1;;)
5208 end = strchr (beg, ',');
5209 if (end == NULL)
5210 end = strchr (beg, '\0');
5211 if ((size_t) (end - beg) < buflen)
5212 len += buflen + 1;
5213 else
5214 len += end - beg + 1;
5215 if (*end)
5216 beg = end + 1;
5217 else
5218 break;
5221 str = (char *) alloca (len);
5222 for (beg = p + 1, dst = str;;)
5224 const char *tem;
5225 bool mem_p, reg_p, inout_p;
5227 end = strchr (beg, ',');
5228 if (end)
5229 *end = '\0';
5230 beg[-1] = '=';
5231 tem = beg - 1;
5232 parse_output_constraint (&tem, i, 0, 0,
5233 &mem_p, &reg_p, &inout_p);
5234 if (dst != str)
5235 *dst++ = ',';
5236 if (reg_p)
5238 memcpy (dst, buf, buflen);
5239 dst += buflen;
5241 else
5243 if (end)
5244 len = end - beg;
5245 else
5246 len = strlen (beg);
5247 memcpy (dst, beg, len);
5248 dst += len;
5250 if (end)
5251 beg = end + 1;
5252 else
5253 break;
5255 *dst = '\0';
5256 input = build_string (dst - str, str);
5258 else
5259 input = build_string (strlen (buf), buf);
5261 else
5262 input = build_string (constraint_len - 1, constraint + 1);
5264 free (p);
5266 input = build_tree_list (build_tree_list (NULL_TREE, input),
5267 unshare_expr (TREE_VALUE (link)));
5268 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5272 link_next = NULL_TREE;
5273 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5275 link_next = TREE_CHAIN (link);
5276 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5277 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5278 oconstraints, &allows_mem, &allows_reg);
5280 /* If we can't make copies, we can only accept memory. */
5281 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5283 if (allows_mem)
5284 allows_reg = 0;
5285 else
5287 error ("impossible constraint in %<asm%>");
5288 error ("non-memory input %d must stay in memory", i);
5289 return GS_ERROR;
5293 /* If the operand is a memory input, it should be an lvalue. */
5294 if (!allows_reg && allows_mem)
5296 tree inputv = TREE_VALUE (link);
5297 STRIP_NOPS (inputv);
5298 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5299 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5300 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5301 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5302 TREE_VALUE (link) = error_mark_node;
5303 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5304 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5305 mark_addressable (TREE_VALUE (link));
5306 if (tret == GS_ERROR)
5308 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5309 input_location = EXPR_LOCATION (TREE_VALUE (link));
5310 error ("memory input %d is not directly addressable", i);
5311 ret = tret;
5314 else
5316 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5317 is_gimple_asm_val, fb_rvalue);
5318 if (tret == GS_ERROR)
5319 ret = tret;
5322 TREE_CHAIN (link) = NULL_TREE;
5323 vec_safe_push (inputs, link);
5326 link_next = NULL_TREE;
5327 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5329 link_next = TREE_CHAIN (link);
5330 TREE_CHAIN (link) = NULL_TREE;
5331 vec_safe_push (clobbers, link);
5334 link_next = NULL_TREE;
5335 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5337 link_next = TREE_CHAIN (link);
5338 TREE_CHAIN (link) = NULL_TREE;
5339 vec_safe_push (labels, link);
5342 /* Do not add ASMs with errors to the gimple IL stream. */
5343 if (ret != GS_ERROR)
5345 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5346 inputs, outputs, clobbers, labels);
5348 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5349 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5351 gimplify_seq_add_stmt (pre_p, stmt);
5354 return ret;
5357 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5358 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5359 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5360 return to this function.
5362 FIXME should we complexify the prequeue handling instead? Or use flags
5363 for all the cleanups and let the optimizer tighten them up? The current
5364 code seems pretty fragile; it will break on a cleanup within any
5365 non-conditional nesting. But any such nesting would be broken, anyway;
5366 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5367 and continues out of it. We can do that at the RTL level, though, so
5368 having an optimizer to tighten up try/finally regions would be a Good
5369 Thing. */
5371 static enum gimplify_status
5372 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5374 gimple_stmt_iterator iter;
5375 gimple_seq body_sequence = NULL;
5377 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5379 /* We only care about the number of conditions between the innermost
5380 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5381 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5382 int old_conds = gimplify_ctxp->conditions;
5383 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5384 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5385 gimplify_ctxp->conditions = 0;
5386 gimplify_ctxp->conditional_cleanups = NULL;
5387 gimplify_ctxp->in_cleanup_point_expr = true;
5389 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5391 gimplify_ctxp->conditions = old_conds;
5392 gimplify_ctxp->conditional_cleanups = old_cleanups;
5393 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5395 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5397 gimple wce = gsi_stmt (iter);
5399 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5401 if (gsi_one_before_end_p (iter))
5403 /* Note that gsi_insert_seq_before and gsi_remove do not
5404 scan operands, unlike some other sequence mutators. */
5405 if (!gimple_wce_cleanup_eh_only (wce))
5406 gsi_insert_seq_before_without_update (&iter,
5407 gimple_wce_cleanup (wce),
5408 GSI_SAME_STMT);
5409 gsi_remove (&iter, true);
5410 break;
5412 else
5414 gimple gtry;
5415 gimple_seq seq;
5416 enum gimple_try_flags kind;
5418 if (gimple_wce_cleanup_eh_only (wce))
5419 kind = GIMPLE_TRY_CATCH;
5420 else
5421 kind = GIMPLE_TRY_FINALLY;
5422 seq = gsi_split_seq_after (iter);
5424 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5425 /* Do not use gsi_replace here, as it may scan operands.
5426 We want to do a simple structural modification only. */
5427 gsi_set_stmt (&iter, gtry);
5428 iter = gsi_start (gtry->gimple_try.eval);
5431 else
5432 gsi_next (&iter);
5435 gimplify_seq_add_seq (pre_p, body_sequence);
5436 if (temp)
5438 *expr_p = temp;
5439 return GS_OK;
5441 else
5443 *expr_p = NULL;
5444 return GS_ALL_DONE;
5448 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5449 is the cleanup action required. EH_ONLY is true if the cleanup should
5450 only be executed if an exception is thrown, not on normal exit. */
5452 static void
5453 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5455 gimple wce;
5456 gimple_seq cleanup_stmts = NULL;
5458 /* Errors can result in improperly nested cleanups. Which results in
5459 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5460 if (seen_error ())
5461 return;
5463 if (gimple_conditional_context ())
5465 /* If we're in a conditional context, this is more complex. We only
5466 want to run the cleanup if we actually ran the initialization that
5467 necessitates it, but we want to run it after the end of the
5468 conditional context. So we wrap the try/finally around the
5469 condition and use a flag to determine whether or not to actually
5470 run the destructor. Thus
5472 test ? f(A()) : 0
5474 becomes (approximately)
5476 flag = 0;
5477 try {
5478 if (test) { A::A(temp); flag = 1; val = f(temp); }
5479 else { val = 0; }
5480 } finally {
5481 if (flag) A::~A(temp);
5485 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5486 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5487 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5489 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5490 gimplify_stmt (&cleanup, &cleanup_stmts);
5491 wce = gimple_build_wce (cleanup_stmts);
5493 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5494 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5495 gimplify_seq_add_stmt (pre_p, ftrue);
5497 /* Because of this manipulation, and the EH edges that jump
5498 threading cannot redirect, the temporary (VAR) will appear
5499 to be used uninitialized. Don't warn. */
5500 TREE_NO_WARNING (var) = 1;
5502 else
5504 gimplify_stmt (&cleanup, &cleanup_stmts);
5505 wce = gimple_build_wce (cleanup_stmts);
5506 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5507 gimplify_seq_add_stmt (pre_p, wce);
5511 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5513 static enum gimplify_status
5514 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5516 tree targ = *expr_p;
5517 tree temp = TARGET_EXPR_SLOT (targ);
5518 tree init = TARGET_EXPR_INITIAL (targ);
5519 enum gimplify_status ret;
5521 if (init)
5523 tree cleanup = NULL_TREE;
5525 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5526 to the temps list. Handle also variable length TARGET_EXPRs. */
5527 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5529 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5530 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5531 gimplify_vla_decl (temp, pre_p);
5533 else
5534 gimple_add_tmp_var (temp);
5536 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5537 expression is supposed to initialize the slot. */
5538 if (VOID_TYPE_P (TREE_TYPE (init)))
5539 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5540 else
5542 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5543 init = init_expr;
5544 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5545 init = NULL;
5546 ggc_free (init_expr);
5548 if (ret == GS_ERROR)
5550 /* PR c++/28266 Make sure this is expanded only once. */
5551 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5552 return GS_ERROR;
5554 if (init)
5555 gimplify_and_add (init, pre_p);
5557 /* If needed, push the cleanup for the temp. */
5558 if (TARGET_EXPR_CLEANUP (targ))
5560 if (CLEANUP_EH_ONLY (targ))
5561 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5562 CLEANUP_EH_ONLY (targ), pre_p);
5563 else
5564 cleanup = TARGET_EXPR_CLEANUP (targ);
5567 /* Add a clobber for the temporary going out of scope, like
5568 gimplify_bind_expr. */
5569 if (gimplify_ctxp->in_cleanup_point_expr
5570 && needs_to_live_in_memory (temp)
5571 && flag_stack_reuse == SR_ALL)
5573 tree clobber = build_constructor (TREE_TYPE (temp),
5574 NULL);
5575 TREE_THIS_VOLATILE (clobber) = true;
5576 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5577 if (cleanup)
5578 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5579 clobber);
5580 else
5581 cleanup = clobber;
5584 if (cleanup)
5585 gimple_push_cleanup (temp, cleanup, false, pre_p);
5587 /* Only expand this once. */
5588 TREE_OPERAND (targ, 3) = init;
5589 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5591 else
5592 /* We should have expanded this before. */
5593 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5595 *expr_p = temp;
5596 return GS_OK;
5599 /* Gimplification of expression trees. */
5601 /* Gimplify an expression which appears at statement context. The
5602 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5603 NULL, a new sequence is allocated.
5605 Return true if we actually added a statement to the queue. */
5607 bool
5608 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5610 gimple_seq_node last;
5612 last = gimple_seq_last (*seq_p);
5613 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5614 return last != gimple_seq_last (*seq_p);
5617 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5618 to CTX. If entries already exist, force them to be some flavor of private.
5619 If there is no enclosing parallel, do nothing. */
5621 void
5622 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5624 splay_tree_node n;
5626 if (decl == NULL || !DECL_P (decl))
5627 return;
5631 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5632 if (n != NULL)
5634 if (n->value & GOVD_SHARED)
5635 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5636 else if (n->value & GOVD_MAP)
5637 n->value |= GOVD_MAP_TO_ONLY;
5638 else
5639 return;
5641 else if (ctx->region_type == ORT_TARGET)
5642 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5643 else if (ctx->region_type != ORT_WORKSHARE
5644 && ctx->region_type != ORT_SIMD
5645 && ctx->region_type != ORT_TARGET_DATA)
5646 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5648 ctx = ctx->outer_context;
5650 while (ctx);
5653 /* Similarly for each of the type sizes of TYPE. */
5655 static void
5656 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5658 if (type == NULL || type == error_mark_node)
5659 return;
5660 type = TYPE_MAIN_VARIANT (type);
5662 if (pointer_set_insert (ctx->privatized_types, type))
5663 return;
5665 switch (TREE_CODE (type))
5667 case INTEGER_TYPE:
5668 case ENUMERAL_TYPE:
5669 case BOOLEAN_TYPE:
5670 case REAL_TYPE:
5671 case FIXED_POINT_TYPE:
5672 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5673 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5674 break;
5676 case ARRAY_TYPE:
5677 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5678 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5679 break;
5681 case RECORD_TYPE:
5682 case UNION_TYPE:
5683 case QUAL_UNION_TYPE:
5685 tree field;
5686 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5687 if (TREE_CODE (field) == FIELD_DECL)
5689 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5690 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5693 break;
5695 case POINTER_TYPE:
5696 case REFERENCE_TYPE:
5697 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5698 break;
5700 default:
5701 break;
5704 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5705 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5706 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5709 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5711 static void
5712 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5714 splay_tree_node n;
5715 unsigned int nflags;
5716 tree t;
5718 if (error_operand_p (decl))
5719 return;
5721 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5722 there are constructors involved somewhere. */
5723 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5724 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5725 flags |= GOVD_SEEN;
5727 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5728 if (n != NULL && n->value != GOVD_ALIGNED)
5730 /* We shouldn't be re-adding the decl with the same data
5731 sharing class. */
5732 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5733 /* The only combination of data sharing classes we should see is
5734 FIRSTPRIVATE and LASTPRIVATE. */
5735 nflags = n->value | flags;
5736 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5737 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5738 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5739 n->value = nflags;
5740 return;
5743 /* When adding a variable-sized variable, we have to handle all sorts
5744 of additional bits of data: the pointer replacement variable, and
5745 the parameters of the type. */
5746 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5748 /* Add the pointer replacement variable as PRIVATE if the variable
5749 replacement is private, else FIRSTPRIVATE since we'll need the
5750 address of the original variable either for SHARED, or for the
5751 copy into or out of the context. */
5752 if (!(flags & GOVD_LOCAL))
5754 nflags = flags & GOVD_MAP
5755 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5756 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5757 nflags |= flags & GOVD_SEEN;
5758 t = DECL_VALUE_EXPR (decl);
5759 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5760 t = TREE_OPERAND (t, 0);
5761 gcc_assert (DECL_P (t));
5762 omp_add_variable (ctx, t, nflags);
5765 /* Add all of the variable and type parameters (which should have
5766 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5767 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5768 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5769 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5771 /* The variable-sized variable itself is never SHARED, only some form
5772 of PRIVATE. The sharing would take place via the pointer variable
5773 which we remapped above. */
5774 if (flags & GOVD_SHARED)
5775 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5776 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5778 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5779 alloca statement we generate for the variable, so make sure it
5780 is available. This isn't automatically needed for the SHARED
5781 case, since we won't be allocating local storage then.
5782 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5783 in this case omp_notice_variable will be called later
5784 on when it is gimplified. */
5785 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5786 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5787 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5789 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5790 && lang_hooks.decls.omp_privatize_by_reference (decl))
5792 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5794 /* Similar to the direct variable sized case above, we'll need the
5795 size of references being privatized. */
5796 if ((flags & GOVD_SHARED) == 0)
5798 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5799 if (TREE_CODE (t) != INTEGER_CST)
5800 omp_notice_variable (ctx, t, true);
5804 if (n != NULL)
5805 n->value |= flags;
5806 else
5807 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5810 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5811 This just prints out diagnostics about threadprivate variable uses
5812 in untied tasks. If DECL2 is non-NULL, prevent this warning
5813 on that variable. */
5815 static bool
5816 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5817 tree decl2)
5819 splay_tree_node n;
5820 struct gimplify_omp_ctx *octx;
5822 for (octx = ctx; octx; octx = octx->outer_context)
5823 if (octx->region_type == ORT_TARGET)
5825 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5826 if (n == NULL)
5828 error ("threadprivate variable %qE used in target region",
5829 DECL_NAME (decl));
5830 error_at (octx->location, "enclosing target region");
5831 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5833 if (decl2)
5834 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5837 if (ctx->region_type != ORT_UNTIED_TASK)
5838 return false;
5839 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5840 if (n == NULL)
5842 error ("threadprivate variable %qE used in untied task",
5843 DECL_NAME (decl));
5844 error_at (ctx->location, "enclosing task");
5845 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5847 if (decl2)
5848 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5849 return false;
5852 /* Record the fact that DECL was used within the OpenMP context CTX.
5853 IN_CODE is true when real code uses DECL, and false when we should
5854 merely emit default(none) errors. Return true if DECL is going to
5855 be remapped and thus DECL shouldn't be gimplified into its
5856 DECL_VALUE_EXPR (if any). */
5858 static bool
5859 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5861 splay_tree_node n;
5862 unsigned flags = in_code ? GOVD_SEEN : 0;
5863 bool ret = false, shared;
5865 if (error_operand_p (decl))
5866 return false;
5868 /* Threadprivate variables are predetermined. */
5869 if (is_global_var (decl))
5871 if (DECL_THREAD_LOCAL_P (decl))
5872 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5874 if (DECL_HAS_VALUE_EXPR_P (decl))
5876 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5878 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5879 return omp_notice_threadprivate_variable (ctx, decl, value);
5883 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5884 if (ctx->region_type == ORT_TARGET)
5886 if (n == NULL)
5888 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5890 error ("%qD referenced in target region does not have "
5891 "a mappable type", decl);
5892 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5894 else
5895 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5897 else
5898 n->value |= flags;
5899 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5900 goto do_outer;
5903 if (n == NULL)
5905 enum omp_clause_default_kind default_kind, kind;
5906 struct gimplify_omp_ctx *octx;
5908 if (ctx->region_type == ORT_WORKSHARE
5909 || ctx->region_type == ORT_SIMD
5910 || ctx->region_type == ORT_TARGET_DATA)
5911 goto do_outer;
5913 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5914 remapped firstprivate instead of shared. To some extent this is
5915 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5916 default_kind = ctx->default_kind;
5917 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5918 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5919 default_kind = kind;
5921 switch (default_kind)
5923 case OMP_CLAUSE_DEFAULT_NONE:
5924 if ((ctx->region_type & ORT_TASK) != 0)
5926 error ("%qE not specified in enclosing task",
5927 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5928 error_at (ctx->location, "enclosing task");
5930 else if (ctx->region_type == ORT_TEAMS)
5932 error ("%qE not specified in enclosing teams construct",
5933 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5934 error_at (ctx->location, "enclosing teams construct");
5936 else
5938 error ("%qE not specified in enclosing parallel",
5939 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5940 error_at (ctx->location, "enclosing parallel");
5942 /* FALLTHRU */
5943 case OMP_CLAUSE_DEFAULT_SHARED:
5944 flags |= GOVD_SHARED;
5945 break;
5946 case OMP_CLAUSE_DEFAULT_PRIVATE:
5947 flags |= GOVD_PRIVATE;
5948 break;
5949 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5950 flags |= GOVD_FIRSTPRIVATE;
5951 break;
5952 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5953 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5954 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5955 if (ctx->outer_context)
5956 omp_notice_variable (ctx->outer_context, decl, in_code);
5957 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5959 splay_tree_node n2;
5961 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5962 continue;
5963 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5964 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5966 flags |= GOVD_FIRSTPRIVATE;
5967 break;
5969 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5970 break;
5972 if (flags & GOVD_FIRSTPRIVATE)
5973 break;
5974 if (octx == NULL
5975 && (TREE_CODE (decl) == PARM_DECL
5976 || (!is_global_var (decl)
5977 && DECL_CONTEXT (decl) == current_function_decl)))
5979 flags |= GOVD_FIRSTPRIVATE;
5980 break;
5982 flags |= GOVD_SHARED;
5983 break;
5984 default:
5985 gcc_unreachable ();
5988 if ((flags & GOVD_PRIVATE)
5989 && lang_hooks.decls.omp_private_outer_ref (decl))
5990 flags |= GOVD_PRIVATE_OUTER_REF;
5992 omp_add_variable (ctx, decl, flags);
5994 shared = (flags & GOVD_SHARED) != 0;
5995 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5996 goto do_outer;
5999 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6000 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6001 && DECL_SIZE (decl)
6002 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6004 splay_tree_node n2;
6005 tree t = DECL_VALUE_EXPR (decl);
6006 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6007 t = TREE_OPERAND (t, 0);
6008 gcc_assert (DECL_P (t));
6009 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6010 n2->value |= GOVD_SEEN;
6013 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6014 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6016 /* If nothing changed, there's nothing left to do. */
6017 if ((n->value & flags) == flags)
6018 return ret;
6019 flags |= n->value;
6020 n->value = flags;
6022 do_outer:
6023 /* If the variable is private in the current context, then we don't
6024 need to propagate anything to an outer context. */
6025 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6026 return ret;
6027 if (ctx->outer_context
6028 && omp_notice_variable (ctx->outer_context, decl, in_code))
6029 return true;
6030 return ret;
6033 /* Verify that DECL is private within CTX. If there's specific information
6034 to the contrary in the innermost scope, generate an error. */
6036 static bool
6037 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
6039 splay_tree_node n;
6041 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6042 if (n != NULL)
6044 if (n->value & GOVD_SHARED)
6046 if (ctx == gimplify_omp_ctxp)
6048 if (simd)
6049 error ("iteration variable %qE is predetermined linear",
6050 DECL_NAME (decl));
6051 else
6052 error ("iteration variable %qE should be private",
6053 DECL_NAME (decl));
6054 n->value = GOVD_PRIVATE;
6055 return true;
6057 else
6058 return false;
6060 else if ((n->value & GOVD_EXPLICIT) != 0
6061 && (ctx == gimplify_omp_ctxp
6062 || (ctx->region_type == ORT_COMBINED_PARALLEL
6063 && gimplify_omp_ctxp->outer_context == ctx)))
6065 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6066 error ("iteration variable %qE should not be firstprivate",
6067 DECL_NAME (decl));
6068 else if ((n->value & GOVD_REDUCTION) != 0)
6069 error ("iteration variable %qE should not be reduction",
6070 DECL_NAME (decl));
6071 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
6072 error ("iteration variable %qE should not be lastprivate",
6073 DECL_NAME (decl));
6074 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6075 error ("iteration variable %qE should not be private",
6076 DECL_NAME (decl));
6077 else if (simd && (n->value & GOVD_LINEAR) != 0)
6078 error ("iteration variable %qE is predetermined linear",
6079 DECL_NAME (decl));
6081 return (ctx == gimplify_omp_ctxp
6082 || (ctx->region_type == ORT_COMBINED_PARALLEL
6083 && gimplify_omp_ctxp->outer_context == ctx));
6086 if (ctx->region_type != ORT_WORKSHARE
6087 && ctx->region_type != ORT_SIMD)
6088 return false;
6089 else if (ctx->outer_context)
6090 return omp_is_private (ctx->outer_context, decl, simd);
6091 return false;
6094 /* Return true if DECL is private within a parallel region
6095 that binds to the current construct's context or in parallel
6096 region's REDUCTION clause. */
6098 static bool
6099 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
6101 splay_tree_node n;
6105 ctx = ctx->outer_context;
6106 if (ctx == NULL)
6107 return !(is_global_var (decl)
6108 /* References might be private, but might be shared too. */
6109 || lang_hooks.decls.omp_privatize_by_reference (decl));
6111 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
6112 continue;
6114 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6115 if (n != NULL)
6116 return (n->value & GOVD_SHARED) == 0;
6118 while (ctx->region_type == ORT_WORKSHARE
6119 || ctx->region_type == ORT_SIMD);
6120 return false;
6123 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
6124 and previous omp contexts. */
6126 static void
6127 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6128 enum omp_region_type region_type)
6130 struct gimplify_omp_ctx *ctx, *outer_ctx;
6131 struct gimplify_ctx gctx;
6132 tree c;
6134 ctx = new_omp_context (region_type);
6135 outer_ctx = ctx->outer_context;
6137 while ((c = *list_p) != NULL)
6139 bool remove = false;
6140 bool notice_outer = true;
6141 const char *check_non_private = NULL;
6142 unsigned int flags;
6143 tree decl;
6145 switch (OMP_CLAUSE_CODE (c))
6147 case OMP_CLAUSE_PRIVATE:
6148 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6149 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6151 flags |= GOVD_PRIVATE_OUTER_REF;
6152 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6154 else
6155 notice_outer = false;
6156 goto do_add;
6157 case OMP_CLAUSE_SHARED:
6158 flags = GOVD_SHARED | GOVD_EXPLICIT;
6159 goto do_add;
6160 case OMP_CLAUSE_FIRSTPRIVATE:
6161 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6162 check_non_private = "firstprivate";
6163 goto do_add;
6164 case OMP_CLAUSE_LASTPRIVATE:
6165 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6166 check_non_private = "lastprivate";
6167 goto do_add;
6168 case OMP_CLAUSE_REDUCTION:
6169 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6170 check_non_private = "reduction";
6171 goto do_add;
6172 case OMP_CLAUSE_LINEAR:
6173 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6174 is_gimple_val, fb_rvalue) == GS_ERROR)
6176 remove = true;
6177 break;
6179 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6180 goto do_add;
6182 case OMP_CLAUSE_MAP:
6183 if (OMP_CLAUSE_SIZE (c)
6184 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6185 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6187 remove = true;
6188 break;
6190 decl = OMP_CLAUSE_DECL (c);
6191 if (!DECL_P (decl))
6193 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6194 NULL, is_gimple_lvalue, fb_lvalue)
6195 == GS_ERROR)
6197 remove = true;
6198 break;
6200 break;
6202 flags = GOVD_MAP | GOVD_EXPLICIT;
6203 goto do_add;
6205 case OMP_CLAUSE_DEPEND:
6206 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6208 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6209 NULL, is_gimple_val, fb_rvalue);
6210 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6212 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6214 remove = true;
6215 break;
6217 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6218 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6219 is_gimple_val, fb_rvalue) == GS_ERROR)
6221 remove = true;
6222 break;
6224 break;
6226 case OMP_CLAUSE_TO:
6227 case OMP_CLAUSE_FROM:
6228 if (OMP_CLAUSE_SIZE (c)
6229 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6230 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6232 remove = true;
6233 break;
6235 decl = OMP_CLAUSE_DECL (c);
6236 if (error_operand_p (decl))
6238 remove = true;
6239 break;
6241 if (!DECL_P (decl))
6243 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6244 NULL, is_gimple_lvalue, fb_lvalue)
6245 == GS_ERROR)
6247 remove = true;
6248 break;
6250 break;
6252 goto do_notice;
6254 do_add:
6255 decl = OMP_CLAUSE_DECL (c);
6256 if (error_operand_p (decl))
6258 remove = true;
6259 break;
6261 omp_add_variable (ctx, decl, flags);
6262 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6263 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6265 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6266 GOVD_LOCAL | GOVD_SEEN);
6267 gimplify_omp_ctxp = ctx;
6268 push_gimplify_context (&gctx);
6270 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6271 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6273 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6274 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6275 pop_gimplify_context
6276 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6277 push_gimplify_context (&gctx);
6278 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6279 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6280 pop_gimplify_context
6281 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6282 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6283 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6285 gimplify_omp_ctxp = outer_ctx;
6287 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6288 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6290 gimplify_omp_ctxp = ctx;
6291 push_gimplify_context (&gctx);
6292 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6294 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6295 NULL, NULL);
6296 TREE_SIDE_EFFECTS (bind) = 1;
6297 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6298 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6300 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6301 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6302 pop_gimplify_context
6303 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6304 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6306 gimplify_omp_ctxp = outer_ctx;
6308 if (notice_outer)
6309 goto do_notice;
6310 break;
6312 case OMP_CLAUSE_COPYIN:
6313 case OMP_CLAUSE_COPYPRIVATE:
6314 decl = OMP_CLAUSE_DECL (c);
6315 if (error_operand_p (decl))
6317 remove = true;
6318 break;
6320 do_notice:
6321 if (outer_ctx)
6322 omp_notice_variable (outer_ctx, decl, true);
6323 if (check_non_private
6324 && region_type == ORT_WORKSHARE
6325 && omp_check_private (ctx, decl))
6327 error ("%s variable %qE is private in outer context",
6328 check_non_private, DECL_NAME (decl));
6329 remove = true;
6331 break;
6333 case OMP_CLAUSE_FINAL:
6334 case OMP_CLAUSE_IF:
6335 OMP_CLAUSE_OPERAND (c, 0)
6336 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6337 /* Fall through. */
6339 case OMP_CLAUSE_SCHEDULE:
6340 case OMP_CLAUSE_NUM_THREADS:
6341 case OMP_CLAUSE_NUM_TEAMS:
6342 case OMP_CLAUSE_THREAD_LIMIT:
6343 case OMP_CLAUSE_DIST_SCHEDULE:
6344 case OMP_CLAUSE_DEVICE:
6345 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6346 is_gimple_val, fb_rvalue) == GS_ERROR)
6347 remove = true;
6348 break;
6350 case OMP_CLAUSE_NOWAIT:
6351 case OMP_CLAUSE_ORDERED:
6352 case OMP_CLAUSE_UNTIED:
6353 case OMP_CLAUSE_COLLAPSE:
6354 case OMP_CLAUSE_MERGEABLE:
6355 case OMP_CLAUSE_PROC_BIND:
6356 case OMP_CLAUSE_SAFELEN:
6357 break;
6359 case OMP_CLAUSE_ALIGNED:
6360 decl = OMP_CLAUSE_DECL (c);
6361 if (error_operand_p (decl))
6363 remove = true;
6364 break;
6366 if (!is_global_var (decl)
6367 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6368 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6369 break;
6371 case OMP_CLAUSE_DEFAULT:
6372 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6373 break;
6375 default:
6376 gcc_unreachable ();
6379 if (remove)
6380 *list_p = OMP_CLAUSE_CHAIN (c);
6381 else
6382 list_p = &OMP_CLAUSE_CHAIN (c);
6385 gimplify_omp_ctxp = ctx;
6388 /* For all variables that were not actually used within the context,
6389 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6391 static int
6392 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6394 tree *list_p = (tree *) data;
6395 tree decl = (tree) n->key;
6396 unsigned flags = n->value;
6397 enum omp_clause_code code;
6398 tree clause;
6399 bool private_debug;
6401 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6402 return 0;
6403 if ((flags & GOVD_SEEN) == 0)
6404 return 0;
6405 if (flags & GOVD_DEBUG_PRIVATE)
6407 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6408 private_debug = true;
6410 else if (flags & GOVD_MAP)
6411 private_debug = false;
6412 else
6413 private_debug
6414 = lang_hooks.decls.omp_private_debug_clause (decl,
6415 !!(flags & GOVD_SHARED));
6416 if (private_debug)
6417 code = OMP_CLAUSE_PRIVATE;
6418 else if (flags & GOVD_MAP)
6419 code = OMP_CLAUSE_MAP;
6420 else if (flags & GOVD_SHARED)
6422 if (is_global_var (decl))
6424 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6425 while (ctx != NULL)
6427 splay_tree_node on
6428 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6429 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6430 | GOVD_PRIVATE | GOVD_REDUCTION
6431 | GOVD_LINEAR)) != 0)
6432 break;
6433 ctx = ctx->outer_context;
6435 if (ctx == NULL)
6436 return 0;
6438 code = OMP_CLAUSE_SHARED;
6440 else if (flags & GOVD_PRIVATE)
6441 code = OMP_CLAUSE_PRIVATE;
6442 else if (flags & GOVD_FIRSTPRIVATE)
6443 code = OMP_CLAUSE_FIRSTPRIVATE;
6444 else if (flags & GOVD_LASTPRIVATE)
6445 code = OMP_CLAUSE_LASTPRIVATE;
6446 else if (flags & GOVD_ALIGNED)
6447 return 0;
6448 else
6449 gcc_unreachable ();
6451 clause = build_omp_clause (input_location, code);
6452 OMP_CLAUSE_DECL (clause) = decl;
6453 OMP_CLAUSE_CHAIN (clause) = *list_p;
6454 if (private_debug)
6455 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6456 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6457 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6458 else if (code == OMP_CLAUSE_MAP)
6460 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6461 ? OMP_CLAUSE_MAP_TO
6462 : OMP_CLAUSE_MAP_TOFROM;
6463 if (DECL_SIZE (decl)
6464 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6466 tree decl2 = DECL_VALUE_EXPR (decl);
6467 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6468 decl2 = TREE_OPERAND (decl2, 0);
6469 gcc_assert (DECL_P (decl2));
6470 tree mem = build_simple_mem_ref (decl2);
6471 OMP_CLAUSE_DECL (clause) = mem;
6472 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6473 if (gimplify_omp_ctxp->outer_context)
6475 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6476 omp_notice_variable (ctx, decl2, true);
6477 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6479 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6480 OMP_CLAUSE_MAP);
6481 OMP_CLAUSE_DECL (nc) = decl;
6482 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6483 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6484 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6485 OMP_CLAUSE_CHAIN (clause) = nc;
6488 *list_p = clause;
6489 lang_hooks.decls.omp_finish_clause (clause);
6491 return 0;
6494 static void
6495 gimplify_adjust_omp_clauses (tree *list_p)
6497 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6498 tree c, decl;
6500 while ((c = *list_p) != NULL)
6502 splay_tree_node n;
6503 bool remove = false;
6505 switch (OMP_CLAUSE_CODE (c))
6507 case OMP_CLAUSE_PRIVATE:
6508 case OMP_CLAUSE_SHARED:
6509 case OMP_CLAUSE_FIRSTPRIVATE:
6510 case OMP_CLAUSE_LINEAR:
6511 decl = OMP_CLAUSE_DECL (c);
6512 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6513 remove = !(n->value & GOVD_SEEN);
6514 if (! remove)
6516 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6517 if ((n->value & GOVD_DEBUG_PRIVATE)
6518 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6520 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6521 || ((n->value & GOVD_DATA_SHARE_CLASS)
6522 == GOVD_PRIVATE));
6523 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6524 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6526 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6527 && ctx->outer_context
6528 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6529 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6530 && !is_global_var (decl))
6532 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6534 n = splay_tree_lookup (ctx->outer_context->variables,
6535 (splay_tree_key) decl);
6536 if (n == NULL
6537 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6539 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6540 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6541 if (n == NULL)
6542 omp_add_variable (ctx->outer_context, decl,
6543 flags | GOVD_SEEN);
6544 else
6545 n->value |= flags | GOVD_SEEN;
6548 else
6549 omp_notice_variable (ctx->outer_context, decl, true);
6552 break;
6554 case OMP_CLAUSE_LASTPRIVATE:
6555 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6556 accurately reflect the presence of a FIRSTPRIVATE clause. */
6557 decl = OMP_CLAUSE_DECL (c);
6558 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6559 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6560 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6561 break;
6563 case OMP_CLAUSE_ALIGNED:
6564 decl = OMP_CLAUSE_DECL (c);
6565 if (!is_global_var (decl))
6567 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6568 remove = n == NULL || !(n->value & GOVD_SEEN);
6569 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6571 struct gimplify_omp_ctx *octx;
6572 if (n != NULL
6573 && (n->value & (GOVD_DATA_SHARE_CLASS
6574 & ~GOVD_FIRSTPRIVATE)))
6575 remove = true;
6576 else
6577 for (octx = ctx->outer_context; octx;
6578 octx = octx->outer_context)
6580 n = splay_tree_lookup (octx->variables,
6581 (splay_tree_key) decl);
6582 if (n == NULL)
6583 continue;
6584 if (n->value & GOVD_LOCAL)
6585 break;
6586 /* We have to avoid assigning a shared variable
6587 to itself when trying to add
6588 __builtin_assume_aligned. */
6589 if (n->value & GOVD_SHARED)
6591 remove = true;
6592 break;
6597 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6599 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6600 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6601 remove = true;
6603 break;
6605 case OMP_CLAUSE_MAP:
6606 decl = OMP_CLAUSE_DECL (c);
6607 if (!DECL_P (decl))
6608 break;
6609 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6610 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6611 remove = true;
6612 else if (DECL_SIZE (decl)
6613 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6614 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6616 tree decl2 = DECL_VALUE_EXPR (decl);
6617 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6618 decl2 = TREE_OPERAND (decl2, 0);
6619 gcc_assert (DECL_P (decl2));
6620 tree mem = build_simple_mem_ref (decl2);
6621 OMP_CLAUSE_DECL (c) = mem;
6622 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6623 if (ctx->outer_context)
6625 omp_notice_variable (ctx->outer_context, decl2, true);
6626 omp_notice_variable (ctx->outer_context,
6627 OMP_CLAUSE_SIZE (c), true);
6629 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6630 OMP_CLAUSE_MAP);
6631 OMP_CLAUSE_DECL (nc) = decl;
6632 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6633 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6634 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6635 OMP_CLAUSE_CHAIN (c) = nc;
6636 c = nc;
6638 break;
6640 case OMP_CLAUSE_TO:
6641 case OMP_CLAUSE_FROM:
6642 decl = OMP_CLAUSE_DECL (c);
6643 if (!DECL_P (decl))
6644 break;
6645 if (DECL_SIZE (decl)
6646 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6648 tree decl2 = DECL_VALUE_EXPR (decl);
6649 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6650 decl2 = TREE_OPERAND (decl2, 0);
6651 gcc_assert (DECL_P (decl2));
6652 tree mem = build_simple_mem_ref (decl2);
6653 OMP_CLAUSE_DECL (c) = mem;
6654 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6655 if (ctx->outer_context)
6657 omp_notice_variable (ctx->outer_context, decl2, true);
6658 omp_notice_variable (ctx->outer_context,
6659 OMP_CLAUSE_SIZE (c), true);
6662 break;
6664 case OMP_CLAUSE_REDUCTION:
6665 case OMP_CLAUSE_COPYIN:
6666 case OMP_CLAUSE_COPYPRIVATE:
6667 case OMP_CLAUSE_IF:
6668 case OMP_CLAUSE_NUM_THREADS:
6669 case OMP_CLAUSE_NUM_TEAMS:
6670 case OMP_CLAUSE_THREAD_LIMIT:
6671 case OMP_CLAUSE_DIST_SCHEDULE:
6672 case OMP_CLAUSE_DEVICE:
6673 case OMP_CLAUSE_SCHEDULE:
6674 case OMP_CLAUSE_NOWAIT:
6675 case OMP_CLAUSE_ORDERED:
6676 case OMP_CLAUSE_DEFAULT:
6677 case OMP_CLAUSE_UNTIED:
6678 case OMP_CLAUSE_COLLAPSE:
6679 case OMP_CLAUSE_FINAL:
6680 case OMP_CLAUSE_MERGEABLE:
6681 case OMP_CLAUSE_PROC_BIND:
6682 case OMP_CLAUSE_SAFELEN:
6683 case OMP_CLAUSE_DEPEND:
6684 break;
6686 default:
6687 gcc_unreachable ();
6690 if (remove)
6691 *list_p = OMP_CLAUSE_CHAIN (c);
6692 else
6693 list_p = &OMP_CLAUSE_CHAIN (c);
6696 /* Add in any implicit data sharing. */
6697 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6699 gimplify_omp_ctxp = ctx->outer_context;
6700 delete_omp_context (ctx);
6703 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6704 gimplification of the body, as well as scanning the body for used
6705 variables. We need to do this scan now, because variable-sized
6706 decls will be decomposed during gimplification. */
6708 static void
6709 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6711 tree expr = *expr_p;
6712 gimple g;
6713 gimple_seq body = NULL;
6714 struct gimplify_ctx gctx;
6716 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6717 OMP_PARALLEL_COMBINED (expr)
6718 ? ORT_COMBINED_PARALLEL
6719 : ORT_PARALLEL);
6721 push_gimplify_context (&gctx);
6723 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6724 if (gimple_code (g) == GIMPLE_BIND)
6725 pop_gimplify_context (g);
6726 else
6727 pop_gimplify_context (NULL);
6729 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6731 g = gimple_build_omp_parallel (body,
6732 OMP_PARALLEL_CLAUSES (expr),
6733 NULL_TREE, NULL_TREE);
6734 if (OMP_PARALLEL_COMBINED (expr))
6735 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6736 gimplify_seq_add_stmt (pre_p, g);
6737 *expr_p = NULL_TREE;
6740 /* Gimplify the contents of an OMP_TASK statement. This involves
6741 gimplification of the body, as well as scanning the body for used
6742 variables. We need to do this scan now, because variable-sized
6743 decls will be decomposed during gimplification. */
6745 static void
6746 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6748 tree expr = *expr_p;
6749 gimple g;
6750 gimple_seq body = NULL;
6751 struct gimplify_ctx gctx;
6753 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6754 find_omp_clause (OMP_TASK_CLAUSES (expr),
6755 OMP_CLAUSE_UNTIED)
6756 ? ORT_UNTIED_TASK : ORT_TASK);
6758 push_gimplify_context (&gctx);
6760 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6761 if (gimple_code (g) == GIMPLE_BIND)
6762 pop_gimplify_context (g);
6763 else
6764 pop_gimplify_context (NULL);
6766 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6768 g = gimple_build_omp_task (body,
6769 OMP_TASK_CLAUSES (expr),
6770 NULL_TREE, NULL_TREE,
6771 NULL_TREE, NULL_TREE, NULL_TREE);
6772 gimplify_seq_add_stmt (pre_p, g);
6773 *expr_p = NULL_TREE;
6776 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6777 with non-NULL OMP_FOR_INIT. */
6779 static tree
6780 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6782 *walk_subtrees = 0;
6783 switch (TREE_CODE (*tp))
6785 case OMP_FOR:
6786 *walk_subtrees = 1;
6787 /* FALLTHRU */
6788 case OMP_SIMD:
6789 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6790 return *tp;
6791 break;
6792 case BIND_EXPR:
6793 case STATEMENT_LIST:
6794 case OMP_PARALLEL:
6795 *walk_subtrees = 1;
6796 break;
6797 default:
6798 break;
6800 return NULL_TREE;
6803 /* Gimplify the gross structure of an OMP_FOR statement. */
6805 static enum gimplify_status
6806 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6808 tree for_stmt, orig_for_stmt, decl, var, t;
6809 enum gimplify_status ret = GS_ALL_DONE;
6810 enum gimplify_status tret;
6811 gimple gfor;
6812 gimple_seq for_body, for_pre_body;
6813 int i;
6814 bool simd;
6815 bitmap has_decl_expr = NULL;
6817 orig_for_stmt = for_stmt = *expr_p;
6819 simd = TREE_CODE (for_stmt) == OMP_SIMD;
6820 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6821 simd ? ORT_SIMD : ORT_WORKSHARE);
6823 /* Handle OMP_FOR_INIT. */
6824 for_pre_body = NULL;
6825 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6827 has_decl_expr = BITMAP_ALLOC (NULL);
6828 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6829 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6830 == VAR_DECL)
6832 t = OMP_FOR_PRE_BODY (for_stmt);
6833 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6835 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6837 tree_stmt_iterator si;
6838 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6839 tsi_next (&si))
6841 t = tsi_stmt (si);
6842 if (TREE_CODE (t) == DECL_EXPR
6843 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6844 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6848 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6849 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6851 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6853 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6854 NULL, NULL);
6855 gcc_assert (for_stmt != NULL_TREE);
6856 gimplify_omp_ctxp->combined_loop = true;
6859 for_body = NULL;
6860 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6861 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6862 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6863 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6864 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6866 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6867 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6868 decl = TREE_OPERAND (t, 0);
6869 gcc_assert (DECL_P (decl));
6870 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6871 || POINTER_TYPE_P (TREE_TYPE (decl)));
6873 /* Make sure the iteration variable is private. */
6874 tree c = NULL_TREE;
6875 if (orig_for_stmt != for_stmt)
6876 /* Do this only on innermost construct for combined ones. */;
6877 else if (simd)
6879 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6880 (splay_tree_key)decl);
6881 omp_is_private (gimplify_omp_ctxp, decl, simd);
6882 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6883 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6884 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6886 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6887 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6888 if (has_decl_expr
6889 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6890 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6891 OMP_CLAUSE_DECL (c) = decl;
6892 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6893 OMP_FOR_CLAUSES (for_stmt) = c;
6894 omp_add_variable (gimplify_omp_ctxp, decl,
6895 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6897 else
6899 bool lastprivate
6900 = (!has_decl_expr
6901 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6902 c = build_omp_clause (input_location,
6903 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6904 : OMP_CLAUSE_PRIVATE);
6905 OMP_CLAUSE_DECL (c) = decl;
6906 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6907 omp_add_variable (gimplify_omp_ctxp, decl,
6908 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6909 | GOVD_SEEN);
6910 c = NULL_TREE;
6913 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
6914 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6915 else
6916 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6918 /* If DECL is not a gimple register, create a temporary variable to act
6919 as an iteration counter. This is valid, since DECL cannot be
6920 modified in the body of the loop. */
6921 if (orig_for_stmt != for_stmt)
6922 var = decl;
6923 else if (!is_gimple_reg (decl))
6925 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6926 TREE_OPERAND (t, 0) = var;
6928 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6930 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6932 else
6933 var = decl;
6935 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6936 is_gimple_val, fb_rvalue);
6937 ret = MIN (ret, tret);
6938 if (ret == GS_ERROR)
6939 return ret;
6941 /* Handle OMP_FOR_COND. */
6942 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6943 gcc_assert (COMPARISON_CLASS_P (t));
6944 gcc_assert (TREE_OPERAND (t, 0) == decl);
6946 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6947 is_gimple_val, fb_rvalue);
6948 ret = MIN (ret, tret);
6950 /* Handle OMP_FOR_INCR. */
6951 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6952 switch (TREE_CODE (t))
6954 case PREINCREMENT_EXPR:
6955 case POSTINCREMENT_EXPR:
6956 if (orig_for_stmt != for_stmt)
6957 break;
6958 t = build_int_cst (TREE_TYPE (decl), 1);
6959 if (c)
6960 OMP_CLAUSE_LINEAR_STEP (c) = t;
6961 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6962 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6963 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6964 break;
6966 case PREDECREMENT_EXPR:
6967 case POSTDECREMENT_EXPR:
6968 if (orig_for_stmt != for_stmt)
6969 break;
6970 t = build_int_cst (TREE_TYPE (decl), -1);
6971 if (c)
6972 OMP_CLAUSE_LINEAR_STEP (c) = t;
6973 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6974 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6975 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6976 break;
6978 case MODIFY_EXPR:
6979 gcc_assert (TREE_OPERAND (t, 0) == decl);
6980 TREE_OPERAND (t, 0) = var;
6982 t = TREE_OPERAND (t, 1);
6983 switch (TREE_CODE (t))
6985 case PLUS_EXPR:
6986 if (TREE_OPERAND (t, 1) == decl)
6988 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6989 TREE_OPERAND (t, 0) = var;
6990 break;
6993 /* Fallthru. */
6994 case MINUS_EXPR:
6995 case POINTER_PLUS_EXPR:
6996 gcc_assert (TREE_OPERAND (t, 0) == decl);
6997 TREE_OPERAND (t, 0) = var;
6998 break;
6999 default:
7000 gcc_unreachable ();
7003 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7004 is_gimple_val, fb_rvalue);
7005 ret = MIN (ret, tret);
7006 if (c)
7008 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
7009 if (TREE_CODE (t) == MINUS_EXPR)
7011 t = TREE_OPERAND (t, 1);
7012 OMP_CLAUSE_LINEAR_STEP (c)
7013 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
7014 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
7015 &for_pre_body, NULL,
7016 is_gimple_val, fb_rvalue);
7017 ret = MIN (ret, tret);
7020 break;
7022 default:
7023 gcc_unreachable ();
7026 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7027 && orig_for_stmt == for_stmt)
7029 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
7030 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7031 && OMP_CLAUSE_DECL (c) == decl
7032 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7034 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7035 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7036 gcc_assert (TREE_OPERAND (t, 0) == var);
7037 t = TREE_OPERAND (t, 1);
7038 gcc_assert (TREE_CODE (t) == PLUS_EXPR
7039 || TREE_CODE (t) == MINUS_EXPR
7040 || TREE_CODE (t) == POINTER_PLUS_EXPR);
7041 gcc_assert (TREE_OPERAND (t, 0) == var);
7042 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7043 TREE_OPERAND (t, 1));
7044 gimplify_assign (decl, t,
7045 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7050 BITMAP_FREE (has_decl_expr);
7052 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
7054 if (orig_for_stmt != for_stmt)
7055 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7057 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7058 decl = TREE_OPERAND (t, 0);
7059 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7060 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7061 TREE_OPERAND (t, 0) = var;
7062 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7063 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7064 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7067 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
7069 int kind;
7070 switch (TREE_CODE (orig_for_stmt))
7072 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7073 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
7074 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
7075 default:
7076 gcc_unreachable ();
7078 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
7079 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7080 for_pre_body);
7081 if (orig_for_stmt != for_stmt)
7082 gimple_omp_for_set_combined_p (gfor, true);
7083 if (gimplify_omp_ctxp
7084 && (gimplify_omp_ctxp->combined_loop
7085 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7086 && gimplify_omp_ctxp->outer_context
7087 && gimplify_omp_ctxp->outer_context->combined_loop)))
7089 gimple_omp_for_set_combined_into_p (gfor, true);
7090 if (gimplify_omp_ctxp->combined_loop)
7091 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7092 else
7093 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7096 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7098 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7099 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7100 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7101 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7102 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7103 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7104 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7105 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7108 gimplify_seq_add_stmt (pre_p, gfor);
7109 if (ret != GS_ALL_DONE)
7110 return GS_ERROR;
7111 *expr_p = NULL_TREE;
7112 return GS_ALL_DONE;
7115 /* Gimplify the gross structure of other OpenMP constructs.
7116 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
7117 and OMP_TEAMS. */
7119 static void
7120 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7122 tree expr = *expr_p;
7123 gimple stmt;
7124 gimple_seq body = NULL;
7125 enum omp_region_type ort = ORT_WORKSHARE;
7127 switch (TREE_CODE (expr))
7129 case OMP_SECTIONS:
7130 case OMP_SINGLE:
7131 break;
7132 case OMP_TARGET:
7133 ort = ORT_TARGET;
7134 break;
7135 case OMP_TARGET_DATA:
7136 ort = ORT_TARGET_DATA;
7137 break;
7138 case OMP_TEAMS:
7139 ort = ORT_TEAMS;
7140 break;
7141 default:
7142 gcc_unreachable ();
7144 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7145 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7147 struct gimplify_ctx gctx;
7148 push_gimplify_context (&gctx);
7149 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7150 if (gimple_code (g) == GIMPLE_BIND)
7151 pop_gimplify_context (g);
7152 else
7153 pop_gimplify_context (NULL);
7154 if (ort == ORT_TARGET_DATA)
7156 gimple_seq cleanup = NULL;
7157 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
7158 g = gimple_build_call (fn, 0);
7159 gimple_seq_add_stmt (&cleanup, g);
7160 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7161 body = NULL;
7162 gimple_seq_add_stmt (&body, g);
7165 else
7166 gimplify_and_add (OMP_BODY (expr), &body);
7167 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
7169 switch (TREE_CODE (expr))
7171 case OMP_SECTIONS:
7172 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7173 break;
7174 case OMP_SINGLE:
7175 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7176 break;
7177 case OMP_TARGET:
7178 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7179 OMP_CLAUSES (expr));
7180 break;
7181 case OMP_TARGET_DATA:
7182 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7183 OMP_CLAUSES (expr));
7184 break;
7185 case OMP_TEAMS:
7186 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7187 break;
7188 default:
7189 gcc_unreachable ();
7192 gimplify_seq_add_stmt (pre_p, stmt);
7193 *expr_p = NULL_TREE;
7196 /* Gimplify the gross structure of OpenMP target update construct. */
7198 static void
7199 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7201 tree expr = *expr_p;
7202 gimple stmt;
7204 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7205 ORT_WORKSHARE);
7206 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
7207 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7208 OMP_TARGET_UPDATE_CLAUSES (expr));
7210 gimplify_seq_add_stmt (pre_p, stmt);
7211 *expr_p = NULL_TREE;
7214 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
7215 stabilized the lhs of the atomic operation as *ADDR. Return true if
7216 EXPR is this stabilized form. */
7218 static bool
7219 goa_lhs_expr_p (tree expr, tree addr)
7221 /* Also include casts to other type variants. The C front end is fond
7222 of adding these for e.g. volatile variables. This is like
7223 STRIP_TYPE_NOPS but includes the main variant lookup. */
7224 STRIP_USELESS_TYPE_CONVERSION (expr);
7226 if (TREE_CODE (expr) == INDIRECT_REF)
7228 expr = TREE_OPERAND (expr, 0);
7229 while (expr != addr
7230 && (CONVERT_EXPR_P (expr)
7231 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7232 && TREE_CODE (expr) == TREE_CODE (addr)
7233 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7235 expr = TREE_OPERAND (expr, 0);
7236 addr = TREE_OPERAND (addr, 0);
7238 if (expr == addr)
7239 return true;
7240 return (TREE_CODE (addr) == ADDR_EXPR
7241 && TREE_CODE (expr) == ADDR_EXPR
7242 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7244 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7245 return true;
7246 return false;
7249 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7250 expression does not involve the lhs, evaluate it into a temporary.
7251 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7252 or -1 if an error was encountered. */
7254 static int
7255 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7256 tree lhs_var)
7258 tree expr = *expr_p;
7259 int saw_lhs;
7261 if (goa_lhs_expr_p (expr, lhs_addr))
7263 *expr_p = lhs_var;
7264 return 1;
7266 if (is_gimple_val (expr))
7267 return 0;
7269 saw_lhs = 0;
7270 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7272 case tcc_binary:
7273 case tcc_comparison:
7274 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7275 lhs_var);
7276 case tcc_unary:
7277 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7278 lhs_var);
7279 break;
7280 case tcc_expression:
7281 switch (TREE_CODE (expr))
7283 case TRUTH_ANDIF_EXPR:
7284 case TRUTH_ORIF_EXPR:
7285 case TRUTH_AND_EXPR:
7286 case TRUTH_OR_EXPR:
7287 case TRUTH_XOR_EXPR:
7288 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7289 lhs_addr, lhs_var);
7290 case TRUTH_NOT_EXPR:
7291 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7292 lhs_addr, lhs_var);
7293 break;
7294 case COMPOUND_EXPR:
7295 /* Break out any preevaluations from cp_build_modify_expr. */
7296 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7297 expr = TREE_OPERAND (expr, 1))
7298 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7299 *expr_p = expr;
7300 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7301 default:
7302 break;
7304 break;
7305 default:
7306 break;
7309 if (saw_lhs == 0)
7311 enum gimplify_status gs;
7312 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7313 if (gs != GS_ALL_DONE)
7314 saw_lhs = -1;
7317 return saw_lhs;
7320 /* Gimplify an OMP_ATOMIC statement. */
7322 static enum gimplify_status
7323 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7325 tree addr = TREE_OPERAND (*expr_p, 0);
7326 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7327 ? NULL : TREE_OPERAND (*expr_p, 1);
7328 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7329 tree tmp_load;
7330 gimple loadstmt, storestmt;
7332 tmp_load = create_tmp_reg (type, NULL);
7333 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7334 return GS_ERROR;
7336 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7337 != GS_ALL_DONE)
7338 return GS_ERROR;
7340 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7341 gimplify_seq_add_stmt (pre_p, loadstmt);
7342 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7343 != GS_ALL_DONE)
7344 return GS_ERROR;
7346 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7347 rhs = tmp_load;
7348 storestmt = gimple_build_omp_atomic_store (rhs);
7349 gimplify_seq_add_stmt (pre_p, storestmt);
7350 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7352 gimple_omp_atomic_set_seq_cst (loadstmt);
7353 gimple_omp_atomic_set_seq_cst (storestmt);
7355 switch (TREE_CODE (*expr_p))
7357 case OMP_ATOMIC_READ:
7358 case OMP_ATOMIC_CAPTURE_OLD:
7359 *expr_p = tmp_load;
7360 gimple_omp_atomic_set_need_value (loadstmt);
7361 break;
7362 case OMP_ATOMIC_CAPTURE_NEW:
7363 *expr_p = rhs;
7364 gimple_omp_atomic_set_need_value (storestmt);
7365 break;
7366 default:
7367 *expr_p = NULL;
7368 break;
7371 return GS_ALL_DONE;
7374 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7375 body, and adding some EH bits. */
7377 static enum gimplify_status
7378 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7380 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7381 gimple g;
7382 gimple_seq body = NULL;
7383 struct gimplify_ctx gctx;
7384 int subcode = 0;
7386 /* Wrap the transaction body in a BIND_EXPR so we have a context
7387 where to put decls for OpenMP. */
7388 if (TREE_CODE (tbody) != BIND_EXPR)
7390 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7391 TREE_SIDE_EFFECTS (bind) = 1;
7392 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7393 TRANSACTION_EXPR_BODY (expr) = bind;
7396 push_gimplify_context (&gctx);
7397 temp = voidify_wrapper_expr (*expr_p, NULL);
7399 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7400 pop_gimplify_context (g);
7402 g = gimple_build_transaction (body, NULL);
7403 if (TRANSACTION_EXPR_OUTER (expr))
7404 subcode = GTMA_IS_OUTER;
7405 else if (TRANSACTION_EXPR_RELAXED (expr))
7406 subcode = GTMA_IS_RELAXED;
7407 gimple_transaction_set_subcode (g, subcode);
7409 gimplify_seq_add_stmt (pre_p, g);
7411 if (temp)
7413 *expr_p = temp;
7414 return GS_OK;
7417 *expr_p = NULL_TREE;
7418 return GS_ALL_DONE;
7421 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7422 expression produces a value to be used as an operand inside a GIMPLE
7423 statement, the value will be stored back in *EXPR_P. This value will
7424 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7425 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7426 emitted in PRE_P and POST_P.
7428 Additionally, this process may overwrite parts of the input
7429 expression during gimplification. Ideally, it should be
7430 possible to do non-destructive gimplification.
7432 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7433 the expression needs to evaluate to a value to be used as
7434 an operand in a GIMPLE statement, this value will be stored in
7435 *EXPR_P on exit. This happens when the caller specifies one
7436 of fb_lvalue or fb_rvalue fallback flags.
7438 PRE_P will contain the sequence of GIMPLE statements corresponding
7439 to the evaluation of EXPR and all the side-effects that must
7440 be executed before the main expression. On exit, the last
7441 statement of PRE_P is the core statement being gimplified. For
7442 instance, when gimplifying 'if (++a)' the last statement in
7443 PRE_P will be 'if (t.1)' where t.1 is the result of
7444 pre-incrementing 'a'.
7446 POST_P will contain the sequence of GIMPLE statements corresponding
7447 to the evaluation of all the side-effects that must be executed
7448 after the main expression. If this is NULL, the post
7449 side-effects are stored at the end of PRE_P.
7451 The reason why the output is split in two is to handle post
7452 side-effects explicitly. In some cases, an expression may have
7453 inner and outer post side-effects which need to be emitted in
7454 an order different from the one given by the recursive
7455 traversal. For instance, for the expression (*p--)++ the post
7456 side-effects of '--' must actually occur *after* the post
7457 side-effects of '++'. However, gimplification will first visit
7458 the inner expression, so if a separate POST sequence was not
7459 used, the resulting sequence would be:
7461 1 t.1 = *p
7462 2 p = p - 1
7463 3 t.2 = t.1 + 1
7464 4 *p = t.2
7466 However, the post-decrement operation in line #2 must not be
7467 evaluated until after the store to *p at line #4, so the
7468 correct sequence should be:
7470 1 t.1 = *p
7471 2 t.2 = t.1 + 1
7472 3 *p = t.2
7473 4 p = p - 1
7475 So, by specifying a separate post queue, it is possible
7476 to emit the post side-effects in the correct order.
7477 If POST_P is NULL, an internal queue will be used. Before
7478 returning to the caller, the sequence POST_P is appended to
7479 the main output sequence PRE_P.
7481 GIMPLE_TEST_F points to a function that takes a tree T and
7482 returns nonzero if T is in the GIMPLE form requested by the
7483 caller. The GIMPLE predicates are in gimple.c.
7485 FALLBACK tells the function what sort of a temporary we want if
7486 gimplification cannot produce an expression that complies with
7487 GIMPLE_TEST_F.
7489 fb_none means that no temporary should be generated
7490 fb_rvalue means that an rvalue is OK to generate
7491 fb_lvalue means that an lvalue is OK to generate
7492 fb_either means that either is OK, but an lvalue is preferable.
7493 fb_mayfail means that gimplification may fail (in which case
7494 GS_ERROR will be returned)
7496 The return value is either GS_ERROR or GS_ALL_DONE, since this
7497 function iterates until EXPR is completely gimplified or an error
7498 occurs. */
7500 enum gimplify_status
7501 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7502 bool (*gimple_test_f) (tree), fallback_t fallback)
7504 tree tmp;
7505 gimple_seq internal_pre = NULL;
7506 gimple_seq internal_post = NULL;
7507 tree save_expr;
7508 bool is_statement;
7509 location_t saved_location;
7510 enum gimplify_status ret;
7511 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7513 save_expr = *expr_p;
7514 if (save_expr == NULL_TREE)
7515 return GS_ALL_DONE;
7517 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7518 is_statement = gimple_test_f == is_gimple_stmt;
7519 if (is_statement)
7520 gcc_assert (pre_p);
7522 /* Consistency checks. */
7523 if (gimple_test_f == is_gimple_reg)
7524 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7525 else if (gimple_test_f == is_gimple_val
7526 || gimple_test_f == is_gimple_call_addr
7527 || gimple_test_f == is_gimple_condexpr
7528 || gimple_test_f == is_gimple_mem_rhs
7529 || gimple_test_f == is_gimple_mem_rhs_or_call
7530 || gimple_test_f == is_gimple_reg_rhs
7531 || gimple_test_f == is_gimple_reg_rhs_or_call
7532 || gimple_test_f == is_gimple_asm_val
7533 || gimple_test_f == is_gimple_mem_ref_addr)
7534 gcc_assert (fallback & fb_rvalue);
7535 else if (gimple_test_f == is_gimple_min_lval
7536 || gimple_test_f == is_gimple_lvalue)
7537 gcc_assert (fallback & fb_lvalue);
7538 else if (gimple_test_f == is_gimple_addressable)
7539 gcc_assert (fallback & fb_either);
7540 else if (gimple_test_f == is_gimple_stmt)
7541 gcc_assert (fallback == fb_none);
7542 else
7544 /* We should have recognized the GIMPLE_TEST_F predicate to
7545 know what kind of fallback to use in case a temporary is
7546 needed to hold the value or address of *EXPR_P. */
7547 gcc_unreachable ();
7550 /* We used to check the predicate here and return immediately if it
7551 succeeds. This is wrong; the design is for gimplification to be
7552 idempotent, and for the predicates to only test for valid forms, not
7553 whether they are fully simplified. */
7554 if (pre_p == NULL)
7555 pre_p = &internal_pre;
7557 if (post_p == NULL)
7558 post_p = &internal_post;
7560 /* Remember the last statements added to PRE_P and POST_P. Every
7561 new statement added by the gimplification helpers needs to be
7562 annotated with location information. To centralize the
7563 responsibility, we remember the last statement that had been
7564 added to both queues before gimplifying *EXPR_P. If
7565 gimplification produces new statements in PRE_P and POST_P, those
7566 statements will be annotated with the same location information
7567 as *EXPR_P. */
7568 pre_last_gsi = gsi_last (*pre_p);
7569 post_last_gsi = gsi_last (*post_p);
7571 saved_location = input_location;
7572 if (save_expr != error_mark_node
7573 && EXPR_HAS_LOCATION (*expr_p))
7574 input_location = EXPR_LOCATION (*expr_p);
7576 /* Loop over the specific gimplifiers until the toplevel node
7577 remains the same. */
7580 /* Strip away as many useless type conversions as possible
7581 at the toplevel. */
7582 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7584 /* Remember the expr. */
7585 save_expr = *expr_p;
7587 /* Die, die, die, my darling. */
7588 if (save_expr == error_mark_node
7589 || (TREE_TYPE (save_expr)
7590 && TREE_TYPE (save_expr) == error_mark_node))
7592 ret = GS_ERROR;
7593 break;
7596 /* Do any language-specific gimplification. */
7597 ret = ((enum gimplify_status)
7598 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7599 if (ret == GS_OK)
7601 if (*expr_p == NULL_TREE)
7602 break;
7603 if (*expr_p != save_expr)
7604 continue;
7606 else if (ret != GS_UNHANDLED)
7607 break;
7609 /* Make sure that all the cases set 'ret' appropriately. */
7610 ret = GS_UNHANDLED;
7611 switch (TREE_CODE (*expr_p))
7613 /* First deal with the special cases. */
7615 case POSTINCREMENT_EXPR:
7616 case POSTDECREMENT_EXPR:
7617 case PREINCREMENT_EXPR:
7618 case PREDECREMENT_EXPR:
7619 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7620 fallback != fb_none,
7621 TREE_TYPE (*expr_p));
7622 break;
7624 case ARRAY_REF:
7625 case ARRAY_RANGE_REF:
7626 case REALPART_EXPR:
7627 case IMAGPART_EXPR:
7628 case COMPONENT_REF:
7629 case VIEW_CONVERT_EXPR:
7630 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7631 fallback ? fallback : fb_rvalue);
7632 break;
7634 case COND_EXPR:
7635 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7637 /* C99 code may assign to an array in a structure value of a
7638 conditional expression, and this has undefined behavior
7639 only on execution, so create a temporary if an lvalue is
7640 required. */
7641 if (fallback == fb_lvalue)
7643 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7644 mark_addressable (*expr_p);
7645 ret = GS_OK;
7647 break;
7649 case CALL_EXPR:
7650 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7652 /* C99 code may assign to an array in a structure returned
7653 from a function, and this has undefined behavior only on
7654 execution, so create a temporary if an lvalue is
7655 required. */
7656 if (fallback == fb_lvalue)
7658 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7659 mark_addressable (*expr_p);
7660 ret = GS_OK;
7662 break;
7664 case TREE_LIST:
7665 gcc_unreachable ();
7667 case COMPOUND_EXPR:
7668 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7669 break;
7671 case COMPOUND_LITERAL_EXPR:
7672 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7673 gimple_test_f, fallback);
7674 break;
7676 case MODIFY_EXPR:
7677 case INIT_EXPR:
7678 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7679 fallback != fb_none);
7680 break;
7682 case TRUTH_ANDIF_EXPR:
7683 case TRUTH_ORIF_EXPR:
7685 /* Preserve the original type of the expression and the
7686 source location of the outer expression. */
7687 tree org_type = TREE_TYPE (*expr_p);
7688 *expr_p = gimple_boolify (*expr_p);
7689 *expr_p = build3_loc (input_location, COND_EXPR,
7690 org_type, *expr_p,
7691 fold_convert_loc
7692 (input_location,
7693 org_type, boolean_true_node),
7694 fold_convert_loc
7695 (input_location,
7696 org_type, boolean_false_node));
7697 ret = GS_OK;
7698 break;
7701 case TRUTH_NOT_EXPR:
7703 tree type = TREE_TYPE (*expr_p);
7704 /* The parsers are careful to generate TRUTH_NOT_EXPR
7705 only with operands that are always zero or one.
7706 We do not fold here but handle the only interesting case
7707 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7708 *expr_p = gimple_boolify (*expr_p);
7709 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7710 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7711 TREE_TYPE (*expr_p),
7712 TREE_OPERAND (*expr_p, 0));
7713 else
7714 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7715 TREE_TYPE (*expr_p),
7716 TREE_OPERAND (*expr_p, 0),
7717 build_int_cst (TREE_TYPE (*expr_p), 1));
7718 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7719 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7720 ret = GS_OK;
7721 break;
7724 case ADDR_EXPR:
7725 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7726 break;
7728 case VA_ARG_EXPR:
7729 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7730 break;
7732 CASE_CONVERT:
7733 if (IS_EMPTY_STMT (*expr_p))
7735 ret = GS_ALL_DONE;
7736 break;
7739 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7740 || fallback == fb_none)
7742 /* Just strip a conversion to void (or in void context) and
7743 try again. */
7744 *expr_p = TREE_OPERAND (*expr_p, 0);
7745 ret = GS_OK;
7746 break;
7749 ret = gimplify_conversion (expr_p);
7750 if (ret == GS_ERROR)
7751 break;
7752 if (*expr_p != save_expr)
7753 break;
7754 /* FALLTHRU */
7756 case FIX_TRUNC_EXPR:
7757 /* unary_expr: ... | '(' cast ')' val | ... */
7758 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7759 is_gimple_val, fb_rvalue);
7760 recalculate_side_effects (*expr_p);
7761 break;
7763 case INDIRECT_REF:
7765 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7766 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7767 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7769 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7770 if (*expr_p != save_expr)
7772 ret = GS_OK;
7773 break;
7776 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7777 is_gimple_reg, fb_rvalue);
7778 if (ret == GS_ERROR)
7779 break;
7781 recalculate_side_effects (*expr_p);
7782 *expr_p = fold_build2_loc (input_location, MEM_REF,
7783 TREE_TYPE (*expr_p),
7784 TREE_OPERAND (*expr_p, 0),
7785 build_int_cst (saved_ptr_type, 0));
7786 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7787 TREE_THIS_NOTRAP (*expr_p) = notrap;
7788 ret = GS_OK;
7789 break;
7792 /* We arrive here through the various re-gimplifcation paths. */
7793 case MEM_REF:
7794 /* First try re-folding the whole thing. */
7795 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7796 TREE_OPERAND (*expr_p, 0),
7797 TREE_OPERAND (*expr_p, 1));
7798 if (tmp)
7800 *expr_p = tmp;
7801 recalculate_side_effects (*expr_p);
7802 ret = GS_OK;
7803 break;
7805 /* Avoid re-gimplifying the address operand if it is already
7806 in suitable form. Re-gimplifying would mark the address
7807 operand addressable. Always gimplify when not in SSA form
7808 as we still may have to gimplify decls with value-exprs. */
7809 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7810 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7812 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7813 is_gimple_mem_ref_addr, fb_rvalue);
7814 if (ret == GS_ERROR)
7815 break;
7817 recalculate_side_effects (*expr_p);
7818 ret = GS_ALL_DONE;
7819 break;
7821 /* Constants need not be gimplified. */
7822 case INTEGER_CST:
7823 case REAL_CST:
7824 case FIXED_CST:
7825 case STRING_CST:
7826 case COMPLEX_CST:
7827 case VECTOR_CST:
7828 ret = GS_ALL_DONE;
7829 break;
7831 case CONST_DECL:
7832 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7833 CONST_DECL node. Otherwise the decl is replaceable by its
7834 value. */
7835 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7836 if (fallback & fb_lvalue)
7837 ret = GS_ALL_DONE;
7838 else
7840 *expr_p = DECL_INITIAL (*expr_p);
7841 ret = GS_OK;
7843 break;
7845 case DECL_EXPR:
7846 ret = gimplify_decl_expr (expr_p, pre_p);
7847 break;
7849 case BIND_EXPR:
7850 ret = gimplify_bind_expr (expr_p, pre_p);
7851 break;
7853 case LOOP_EXPR:
7854 ret = gimplify_loop_expr (expr_p, pre_p);
7855 break;
7857 case SWITCH_EXPR:
7858 ret = gimplify_switch_expr (expr_p, pre_p);
7859 break;
7861 case EXIT_EXPR:
7862 ret = gimplify_exit_expr (expr_p);
7863 break;
7865 case GOTO_EXPR:
7866 /* If the target is not LABEL, then it is a computed jump
7867 and the target needs to be gimplified. */
7868 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7870 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7871 NULL, is_gimple_val, fb_rvalue);
7872 if (ret == GS_ERROR)
7873 break;
7875 gimplify_seq_add_stmt (pre_p,
7876 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7877 ret = GS_ALL_DONE;
7878 break;
7880 case PREDICT_EXPR:
7881 gimplify_seq_add_stmt (pre_p,
7882 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7883 PREDICT_EXPR_OUTCOME (*expr_p)));
7884 ret = GS_ALL_DONE;
7885 break;
7887 case LABEL_EXPR:
7888 ret = GS_ALL_DONE;
7889 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7890 == current_function_decl);
7891 gimplify_seq_add_stmt (pre_p,
7892 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7893 break;
7895 case CASE_LABEL_EXPR:
7896 ret = gimplify_case_label_expr (expr_p, pre_p);
7897 break;
7899 case RETURN_EXPR:
7900 ret = gimplify_return_expr (*expr_p, pre_p);
7901 break;
7903 case CONSTRUCTOR:
7904 /* Don't reduce this in place; let gimplify_init_constructor work its
7905 magic. Buf if we're just elaborating this for side effects, just
7906 gimplify any element that has side-effects. */
7907 if (fallback == fb_none)
7909 unsigned HOST_WIDE_INT ix;
7910 tree val;
7911 tree temp = NULL_TREE;
7912 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7913 if (TREE_SIDE_EFFECTS (val))
7914 append_to_statement_list (val, &temp);
7916 *expr_p = temp;
7917 ret = temp ? GS_OK : GS_ALL_DONE;
7919 /* C99 code may assign to an array in a constructed
7920 structure or union, and this has undefined behavior only
7921 on execution, so create a temporary if an lvalue is
7922 required. */
7923 else if (fallback == fb_lvalue)
7925 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7926 mark_addressable (*expr_p);
7927 ret = GS_OK;
7929 else
7930 ret = GS_ALL_DONE;
7931 break;
7933 /* The following are special cases that are not handled by the
7934 original GIMPLE grammar. */
7936 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7937 eliminated. */
7938 case SAVE_EXPR:
7939 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7940 break;
7942 case BIT_FIELD_REF:
7943 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7944 post_p, is_gimple_lvalue, fb_either);
7945 recalculate_side_effects (*expr_p);
7946 break;
7948 case TARGET_MEM_REF:
7950 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7952 if (TMR_BASE (*expr_p))
7953 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7954 post_p, is_gimple_mem_ref_addr, fb_either);
7955 if (TMR_INDEX (*expr_p))
7956 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7957 post_p, is_gimple_val, fb_rvalue);
7958 if (TMR_INDEX2 (*expr_p))
7959 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7960 post_p, is_gimple_val, fb_rvalue);
7961 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7962 ret = MIN (r0, r1);
7964 break;
7966 case NON_LVALUE_EXPR:
7967 /* This should have been stripped above. */
7968 gcc_unreachable ();
7970 case ASM_EXPR:
7971 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7972 break;
7974 case TRY_FINALLY_EXPR:
7975 case TRY_CATCH_EXPR:
7977 gimple_seq eval, cleanup;
7978 gimple try_;
7980 /* Calls to destructors are generated automatically in FINALLY/CATCH
7981 block. They should have location as UNKNOWN_LOCATION. However,
7982 gimplify_call_expr will reset these call stmts to input_location
7983 if it finds stmt's location is unknown. To prevent resetting for
7984 destructors, we set the input_location to unknown.
7985 Note that this only affects the destructor calls in FINALLY/CATCH
7986 block, and will automatically reset to its original value by the
7987 end of gimplify_expr. */
7988 input_location = UNKNOWN_LOCATION;
7989 eval = cleanup = NULL;
7990 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7991 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7992 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7993 if (gimple_seq_empty_p (cleanup))
7995 gimple_seq_add_seq (pre_p, eval);
7996 ret = GS_ALL_DONE;
7997 break;
7999 try_ = gimple_build_try (eval, cleanup,
8000 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8001 ? GIMPLE_TRY_FINALLY
8002 : GIMPLE_TRY_CATCH);
8003 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8004 gimple_set_location (try_, saved_location);
8005 else
8006 gimple_set_location (try_, EXPR_LOCATION (save_expr));
8007 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8008 gimple_try_set_catch_is_cleanup (try_,
8009 TRY_CATCH_IS_CLEANUP (*expr_p));
8010 gimplify_seq_add_stmt (pre_p, try_);
8011 ret = GS_ALL_DONE;
8012 break;
8015 case CLEANUP_POINT_EXPR:
8016 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8017 break;
8019 case TARGET_EXPR:
8020 ret = gimplify_target_expr (expr_p, pre_p, post_p);
8021 break;
8023 case CATCH_EXPR:
8025 gimple c;
8026 gimple_seq handler = NULL;
8027 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8028 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8029 gimplify_seq_add_stmt (pre_p, c);
8030 ret = GS_ALL_DONE;
8031 break;
8034 case EH_FILTER_EXPR:
8036 gimple ehf;
8037 gimple_seq failure = NULL;
8039 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8040 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8041 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8042 gimplify_seq_add_stmt (pre_p, ehf);
8043 ret = GS_ALL_DONE;
8044 break;
8047 case OBJ_TYPE_REF:
8049 enum gimplify_status r0, r1;
8050 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8051 post_p, is_gimple_val, fb_rvalue);
8052 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8053 post_p, is_gimple_val, fb_rvalue);
8054 TREE_SIDE_EFFECTS (*expr_p) = 0;
8055 ret = MIN (r0, r1);
8057 break;
8059 case LABEL_DECL:
8060 /* We get here when taking the address of a label. We mark
8061 the label as "forced"; meaning it can never be removed and
8062 it is a potential target for any computed goto. */
8063 FORCED_LABEL (*expr_p) = 1;
8064 ret = GS_ALL_DONE;
8065 break;
8067 case STATEMENT_LIST:
8068 ret = gimplify_statement_list (expr_p, pre_p);
8069 break;
8071 case WITH_SIZE_EXPR:
8073 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8074 post_p == &internal_post ? NULL : post_p,
8075 gimple_test_f, fallback);
8076 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8077 is_gimple_val, fb_rvalue);
8078 ret = GS_ALL_DONE;
8080 break;
8082 case VAR_DECL:
8083 case PARM_DECL:
8084 ret = gimplify_var_or_parm_decl (expr_p);
8085 break;
8087 case RESULT_DECL:
8088 /* When within an OpenMP context, notice uses of variables. */
8089 if (gimplify_omp_ctxp)
8090 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8091 ret = GS_ALL_DONE;
8092 break;
8094 case SSA_NAME:
8095 /* Allow callbacks into the gimplifier during optimization. */
8096 ret = GS_ALL_DONE;
8097 break;
8099 case OMP_PARALLEL:
8100 gimplify_omp_parallel (expr_p, pre_p);
8101 ret = GS_ALL_DONE;
8102 break;
8104 case OMP_TASK:
8105 gimplify_omp_task (expr_p, pre_p);
8106 ret = GS_ALL_DONE;
8107 break;
8109 case OMP_FOR:
8110 case OMP_SIMD:
8111 case OMP_DISTRIBUTE:
8112 ret = gimplify_omp_for (expr_p, pre_p);
8113 break;
8115 case OMP_SECTIONS:
8116 case OMP_SINGLE:
8117 case OMP_TARGET:
8118 case OMP_TARGET_DATA:
8119 case OMP_TEAMS:
8120 gimplify_omp_workshare (expr_p, pre_p);
8121 ret = GS_ALL_DONE;
8122 break;
8124 case OMP_TARGET_UPDATE:
8125 gimplify_omp_target_update (expr_p, pre_p);
8126 ret = GS_ALL_DONE;
8127 break;
8129 case OMP_SECTION:
8130 case OMP_MASTER:
8131 case OMP_TASKGROUP:
8132 case OMP_ORDERED:
8133 case OMP_CRITICAL:
8135 gimple_seq body = NULL;
8136 gimple g;
8138 gimplify_and_add (OMP_BODY (*expr_p), &body);
8139 switch (TREE_CODE (*expr_p))
8141 case OMP_SECTION:
8142 g = gimple_build_omp_section (body);
8143 break;
8144 case OMP_MASTER:
8145 g = gimple_build_omp_master (body);
8146 break;
8147 case OMP_TASKGROUP:
8149 gimple_seq cleanup = NULL;
8150 tree fn
8151 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8152 g = gimple_build_call (fn, 0);
8153 gimple_seq_add_stmt (&cleanup, g);
8154 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8155 body = NULL;
8156 gimple_seq_add_stmt (&body, g);
8157 g = gimple_build_omp_taskgroup (body);
8159 break;
8160 case OMP_ORDERED:
8161 g = gimple_build_omp_ordered (body);
8162 break;
8163 case OMP_CRITICAL:
8164 g = gimple_build_omp_critical (body,
8165 OMP_CRITICAL_NAME (*expr_p));
8166 break;
8167 default:
8168 gcc_unreachable ();
8170 gimplify_seq_add_stmt (pre_p, g);
8171 ret = GS_ALL_DONE;
8172 break;
8175 case OMP_ATOMIC:
8176 case OMP_ATOMIC_READ:
8177 case OMP_ATOMIC_CAPTURE_OLD:
8178 case OMP_ATOMIC_CAPTURE_NEW:
8179 ret = gimplify_omp_atomic (expr_p, pre_p);
8180 break;
8182 case TRANSACTION_EXPR:
8183 ret = gimplify_transaction (expr_p, pre_p);
8184 break;
8186 case TRUTH_AND_EXPR:
8187 case TRUTH_OR_EXPR:
8188 case TRUTH_XOR_EXPR:
8190 tree orig_type = TREE_TYPE (*expr_p);
8191 tree new_type, xop0, xop1;
8192 *expr_p = gimple_boolify (*expr_p);
8193 new_type = TREE_TYPE (*expr_p);
8194 if (!useless_type_conversion_p (orig_type, new_type))
8196 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8197 ret = GS_OK;
8198 break;
8201 /* Boolified binary truth expressions are semantically equivalent
8202 to bitwise binary expressions. Canonicalize them to the
8203 bitwise variant. */
8204 switch (TREE_CODE (*expr_p))
8206 case TRUTH_AND_EXPR:
8207 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8208 break;
8209 case TRUTH_OR_EXPR:
8210 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8211 break;
8212 case TRUTH_XOR_EXPR:
8213 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8214 break;
8215 default:
8216 break;
8218 /* Now make sure that operands have compatible type to
8219 expression's new_type. */
8220 xop0 = TREE_OPERAND (*expr_p, 0);
8221 xop1 = TREE_OPERAND (*expr_p, 1);
8222 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8223 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8224 new_type,
8225 xop0);
8226 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8227 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8228 new_type,
8229 xop1);
8230 /* Continue classified as tcc_binary. */
8231 goto expr_2;
8234 case FMA_EXPR:
8235 case VEC_COND_EXPR:
8236 case VEC_PERM_EXPR:
8237 /* Classified as tcc_expression. */
8238 goto expr_3;
8240 case POINTER_PLUS_EXPR:
8242 enum gimplify_status r0, r1;
8243 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8244 post_p, is_gimple_val, fb_rvalue);
8245 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8246 post_p, is_gimple_val, fb_rvalue);
8247 recalculate_side_effects (*expr_p);
8248 ret = MIN (r0, r1);
8249 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8250 after gimplifying operands - this is similar to how
8251 it would be folding all gimplified stmts on creation
8252 to have them canonicalized, which is what we eventually
8253 should do anyway. */
8254 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8255 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8257 *expr_p = build_fold_addr_expr_with_type_loc
8258 (input_location,
8259 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8260 TREE_OPERAND (*expr_p, 0),
8261 fold_convert (ptr_type_node,
8262 TREE_OPERAND (*expr_p, 1))),
8263 TREE_TYPE (*expr_p));
8264 ret = MIN (ret, GS_OK);
8266 break;
8269 default:
8270 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8272 case tcc_comparison:
8273 /* Handle comparison of objects of non scalar mode aggregates
8274 with a call to memcmp. It would be nice to only have to do
8275 this for variable-sized objects, but then we'd have to allow
8276 the same nest of reference nodes we allow for MODIFY_EXPR and
8277 that's too complex.
8279 Compare scalar mode aggregates as scalar mode values. Using
8280 memcmp for them would be very inefficient at best, and is
8281 plain wrong if bitfields are involved. */
8283 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8285 /* Vector comparisons need no boolification. */
8286 if (TREE_CODE (type) == VECTOR_TYPE)
8287 goto expr_2;
8288 else if (!AGGREGATE_TYPE_P (type))
8290 tree org_type = TREE_TYPE (*expr_p);
8291 *expr_p = gimple_boolify (*expr_p);
8292 if (!useless_type_conversion_p (org_type,
8293 TREE_TYPE (*expr_p)))
8295 *expr_p = fold_convert_loc (input_location,
8296 org_type, *expr_p);
8297 ret = GS_OK;
8299 else
8300 goto expr_2;
8302 else if (TYPE_MODE (type) != BLKmode)
8303 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8304 else
8305 ret = gimplify_variable_sized_compare (expr_p);
8307 break;
8310 /* If *EXPR_P does not need to be special-cased, handle it
8311 according to its class. */
8312 case tcc_unary:
8313 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8314 post_p, is_gimple_val, fb_rvalue);
8315 break;
8317 case tcc_binary:
8318 expr_2:
8320 enum gimplify_status r0, r1;
8322 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8323 post_p, is_gimple_val, fb_rvalue);
8324 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8325 post_p, is_gimple_val, fb_rvalue);
8327 ret = MIN (r0, r1);
8328 break;
8331 expr_3:
8333 enum gimplify_status r0, r1, r2;
8335 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8336 post_p, is_gimple_val, fb_rvalue);
8337 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8338 post_p, is_gimple_val, fb_rvalue);
8339 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8340 post_p, is_gimple_val, fb_rvalue);
8342 ret = MIN (MIN (r0, r1), r2);
8343 break;
8346 case tcc_declaration:
8347 case tcc_constant:
8348 ret = GS_ALL_DONE;
8349 goto dont_recalculate;
8351 default:
8352 gcc_unreachable ();
8355 recalculate_side_effects (*expr_p);
8357 dont_recalculate:
8358 break;
8361 gcc_assert (*expr_p || ret != GS_OK);
8363 while (ret == GS_OK);
8365 /* If we encountered an error_mark somewhere nested inside, either
8366 stub out the statement or propagate the error back out. */
8367 if (ret == GS_ERROR)
8369 if (is_statement)
8370 *expr_p = NULL;
8371 goto out;
8374 /* This was only valid as a return value from the langhook, which
8375 we handled. Make sure it doesn't escape from any other context. */
8376 gcc_assert (ret != GS_UNHANDLED);
8378 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8380 /* We aren't looking for a value, and we don't have a valid
8381 statement. If it doesn't have side-effects, throw it away. */
8382 if (!TREE_SIDE_EFFECTS (*expr_p))
8383 *expr_p = NULL;
8384 else if (!TREE_THIS_VOLATILE (*expr_p))
8386 /* This is probably a _REF that contains something nested that
8387 has side effects. Recurse through the operands to find it. */
8388 enum tree_code code = TREE_CODE (*expr_p);
8390 switch (code)
8392 case COMPONENT_REF:
8393 case REALPART_EXPR:
8394 case IMAGPART_EXPR:
8395 case VIEW_CONVERT_EXPR:
8396 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8397 gimple_test_f, fallback);
8398 break;
8400 case ARRAY_REF:
8401 case ARRAY_RANGE_REF:
8402 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8403 gimple_test_f, fallback);
8404 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8405 gimple_test_f, fallback);
8406 break;
8408 default:
8409 /* Anything else with side-effects must be converted to
8410 a valid statement before we get here. */
8411 gcc_unreachable ();
8414 *expr_p = NULL;
8416 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8417 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8419 /* Historically, the compiler has treated a bare reference
8420 to a non-BLKmode volatile lvalue as forcing a load. */
8421 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8423 /* Normally, we do not want to create a temporary for a
8424 TREE_ADDRESSABLE type because such a type should not be
8425 copied by bitwise-assignment. However, we make an
8426 exception here, as all we are doing here is ensuring that
8427 we read the bytes that make up the type. We use
8428 create_tmp_var_raw because create_tmp_var will abort when
8429 given a TREE_ADDRESSABLE type. */
8430 tree tmp = create_tmp_var_raw (type, "vol");
8431 gimple_add_tmp_var (tmp);
8432 gimplify_assign (tmp, *expr_p, pre_p);
8433 *expr_p = NULL;
8435 else
8436 /* We can't do anything useful with a volatile reference to
8437 an incomplete type, so just throw it away. Likewise for
8438 a BLKmode type, since any implicit inner load should
8439 already have been turned into an explicit one by the
8440 gimplification process. */
8441 *expr_p = NULL;
8444 /* If we are gimplifying at the statement level, we're done. Tack
8445 everything together and return. */
8446 if (fallback == fb_none || is_statement)
8448 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8449 it out for GC to reclaim it. */
8450 *expr_p = NULL_TREE;
8452 if (!gimple_seq_empty_p (internal_pre)
8453 || !gimple_seq_empty_p (internal_post))
8455 gimplify_seq_add_seq (&internal_pre, internal_post);
8456 gimplify_seq_add_seq (pre_p, internal_pre);
8459 /* The result of gimplifying *EXPR_P is going to be the last few
8460 statements in *PRE_P and *POST_P. Add location information
8461 to all the statements that were added by the gimplification
8462 helpers. */
8463 if (!gimple_seq_empty_p (*pre_p))
8464 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8466 if (!gimple_seq_empty_p (*post_p))
8467 annotate_all_with_location_after (*post_p, post_last_gsi,
8468 input_location);
8470 goto out;
8473 #ifdef ENABLE_GIMPLE_CHECKING
8474 if (*expr_p)
8476 enum tree_code code = TREE_CODE (*expr_p);
8477 /* These expressions should already be in gimple IR form. */
8478 gcc_assert (code != MODIFY_EXPR
8479 && code != ASM_EXPR
8480 && code != BIND_EXPR
8481 && code != CATCH_EXPR
8482 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8483 && code != EH_FILTER_EXPR
8484 && code != GOTO_EXPR
8485 && code != LABEL_EXPR
8486 && code != LOOP_EXPR
8487 && code != SWITCH_EXPR
8488 && code != TRY_FINALLY_EXPR
8489 && code != OMP_CRITICAL
8490 && code != OMP_FOR
8491 && code != OMP_MASTER
8492 && code != OMP_TASKGROUP
8493 && code != OMP_ORDERED
8494 && code != OMP_PARALLEL
8495 && code != OMP_SECTIONS
8496 && code != OMP_SECTION
8497 && code != OMP_SINGLE);
8499 #endif
8501 /* Otherwise we're gimplifying a subexpression, so the resulting
8502 value is interesting. If it's a valid operand that matches
8503 GIMPLE_TEST_F, we're done. Unless we are handling some
8504 post-effects internally; if that's the case, we need to copy into
8505 a temporary before adding the post-effects to POST_P. */
8506 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8507 goto out;
8509 /* Otherwise, we need to create a new temporary for the gimplified
8510 expression. */
8512 /* We can't return an lvalue if we have an internal postqueue. The
8513 object the lvalue refers to would (probably) be modified by the
8514 postqueue; we need to copy the value out first, which means an
8515 rvalue. */
8516 if ((fallback & fb_lvalue)
8517 && gimple_seq_empty_p (internal_post)
8518 && is_gimple_addressable (*expr_p))
8520 /* An lvalue will do. Take the address of the expression, store it
8521 in a temporary, and replace the expression with an INDIRECT_REF of
8522 that temporary. */
8523 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8524 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8525 *expr_p = build_simple_mem_ref (tmp);
8527 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8529 /* An rvalue will do. Assign the gimplified expression into a
8530 new temporary TMP and replace the original expression with
8531 TMP. First, make sure that the expression has a type so that
8532 it can be assigned into a temporary. */
8533 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8534 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8536 else
8538 #ifdef ENABLE_GIMPLE_CHECKING
8539 if (!(fallback & fb_mayfail))
8541 fprintf (stderr, "gimplification failed:\n");
8542 print_generic_expr (stderr, *expr_p, 0);
8543 debug_tree (*expr_p);
8544 internal_error ("gimplification failed");
8546 #endif
8547 gcc_assert (fallback & fb_mayfail);
8549 /* If this is an asm statement, and the user asked for the
8550 impossible, don't die. Fail and let gimplify_asm_expr
8551 issue an error. */
8552 ret = GS_ERROR;
8553 goto out;
8556 /* Make sure the temporary matches our predicate. */
8557 gcc_assert ((*gimple_test_f) (*expr_p));
8559 if (!gimple_seq_empty_p (internal_post))
8561 annotate_all_with_location (internal_post, input_location);
8562 gimplify_seq_add_seq (pre_p, internal_post);
8565 out:
8566 input_location = saved_location;
8567 return ret;
8570 /* Look through TYPE for variable-sized objects and gimplify each such
8571 size that we find. Add to LIST_P any statements generated. */
8573 void
8574 gimplify_type_sizes (tree type, gimple_seq *list_p)
8576 tree field, t;
8578 if (type == NULL || type == error_mark_node)
8579 return;
8581 /* We first do the main variant, then copy into any other variants. */
8582 type = TYPE_MAIN_VARIANT (type);
8584 /* Avoid infinite recursion. */
8585 if (TYPE_SIZES_GIMPLIFIED (type))
8586 return;
8588 TYPE_SIZES_GIMPLIFIED (type) = 1;
8590 switch (TREE_CODE (type))
8592 case INTEGER_TYPE:
8593 case ENUMERAL_TYPE:
8594 case BOOLEAN_TYPE:
8595 case REAL_TYPE:
8596 case FIXED_POINT_TYPE:
8597 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8598 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8600 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8602 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8603 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8605 break;
8607 case ARRAY_TYPE:
8608 /* These types may not have declarations, so handle them here. */
8609 gimplify_type_sizes (TREE_TYPE (type), list_p);
8610 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8611 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8612 with assigned stack slots, for -O1+ -g they should be tracked
8613 by VTA. */
8614 if (!(TYPE_NAME (type)
8615 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8616 && DECL_IGNORED_P (TYPE_NAME (type)))
8617 && TYPE_DOMAIN (type)
8618 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8620 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8621 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8622 DECL_IGNORED_P (t) = 0;
8623 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8624 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8625 DECL_IGNORED_P (t) = 0;
8627 break;
8629 case RECORD_TYPE:
8630 case UNION_TYPE:
8631 case QUAL_UNION_TYPE:
8632 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8633 if (TREE_CODE (field) == FIELD_DECL)
8635 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8636 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8637 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8638 gimplify_type_sizes (TREE_TYPE (field), list_p);
8640 break;
8642 case POINTER_TYPE:
8643 case REFERENCE_TYPE:
8644 /* We used to recurse on the pointed-to type here, which turned out to
8645 be incorrect because its definition might refer to variables not
8646 yet initialized at this point if a forward declaration is involved.
8648 It was actually useful for anonymous pointed-to types to ensure
8649 that the sizes evaluation dominates every possible later use of the
8650 values. Restricting to such types here would be safe since there
8651 is no possible forward declaration around, but would introduce an
8652 undesirable middle-end semantic to anonymity. We then defer to
8653 front-ends the responsibility of ensuring that the sizes are
8654 evaluated both early and late enough, e.g. by attaching artificial
8655 type declarations to the tree. */
8656 break;
8658 default:
8659 break;
8662 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8663 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8665 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8667 TYPE_SIZE (t) = TYPE_SIZE (type);
8668 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8669 TYPE_SIZES_GIMPLIFIED (t) = 1;
8673 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8674 a size or position, has had all of its SAVE_EXPRs evaluated.
8675 We add any required statements to *STMT_P. */
8677 void
8678 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8680 tree expr = *expr_p;
8682 /* We don't do anything if the value isn't there, is constant, or contains
8683 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8684 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8685 will want to replace it with a new variable, but that will cause problems
8686 if this type is from outside the function. It's OK to have that here. */
8687 if (is_gimple_sizepos (expr))
8688 return;
8690 *expr_p = unshare_expr (expr);
8692 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8695 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8696 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8697 is true, also gimplify the parameters. */
8699 gimple
8700 gimplify_body (tree fndecl, bool do_parms)
8702 location_t saved_location = input_location;
8703 gimple_seq parm_stmts, seq;
8704 gimple outer_bind;
8705 struct gimplify_ctx gctx;
8706 struct cgraph_node *cgn;
8708 timevar_push (TV_TREE_GIMPLIFY);
8710 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8711 gimplification. */
8712 default_rtl_profile ();
8714 gcc_assert (gimplify_ctxp == NULL);
8715 push_gimplify_context (&gctx);
8717 if (flag_openmp)
8719 gcc_assert (gimplify_omp_ctxp == NULL);
8720 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8721 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8724 /* Unshare most shared trees in the body and in that of any nested functions.
8725 It would seem we don't have to do this for nested functions because
8726 they are supposed to be output and then the outer function gimplified
8727 first, but the g++ front end doesn't always do it that way. */
8728 unshare_body (fndecl);
8729 unvisit_body (fndecl);
8731 cgn = cgraph_get_node (fndecl);
8732 if (cgn && cgn->origin)
8733 nonlocal_vlas = pointer_set_create ();
8735 /* Make sure input_location isn't set to something weird. */
8736 input_location = DECL_SOURCE_LOCATION (fndecl);
8738 /* Resolve callee-copies. This has to be done before processing
8739 the body so that DECL_VALUE_EXPR gets processed correctly. */
8740 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8742 /* Gimplify the function's body. */
8743 seq = NULL;
8744 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8745 outer_bind = gimple_seq_first_stmt (seq);
8746 if (!outer_bind)
8748 outer_bind = gimple_build_nop ();
8749 gimplify_seq_add_stmt (&seq, outer_bind);
8752 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8753 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8754 if (gimple_code (outer_bind) == GIMPLE_BIND
8755 && gimple_seq_first (seq) == gimple_seq_last (seq))
8757 else
8758 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8760 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8762 /* If we had callee-copies statements, insert them at the beginning
8763 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8764 if (!gimple_seq_empty_p (parm_stmts))
8766 tree parm;
8768 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8769 gimple_bind_set_body (outer_bind, parm_stmts);
8771 for (parm = DECL_ARGUMENTS (current_function_decl);
8772 parm; parm = DECL_CHAIN (parm))
8773 if (DECL_HAS_VALUE_EXPR_P (parm))
8775 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8776 DECL_IGNORED_P (parm) = 0;
8780 if (nonlocal_vlas)
8782 pointer_set_destroy (nonlocal_vlas);
8783 nonlocal_vlas = NULL;
8786 if (flag_openmp && gimplify_omp_ctxp)
8788 delete_omp_context (gimplify_omp_ctxp);
8789 gimplify_omp_ctxp = NULL;
8792 pop_gimplify_context (outer_bind);
8793 gcc_assert (gimplify_ctxp == NULL);
8795 #ifdef ENABLE_CHECKING
8796 if (!seen_error ())
8797 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8798 #endif
8800 timevar_pop (TV_TREE_GIMPLIFY);
8801 input_location = saved_location;
8803 return outer_bind;
8806 typedef char *char_p; /* For DEF_VEC_P. */
8808 /* Return whether we should exclude FNDECL from instrumentation. */
8810 static bool
8811 flag_instrument_functions_exclude_p (tree fndecl)
8813 vec<char_p> *v;
8815 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8816 if (v && v->length () > 0)
8818 const char *name;
8819 int i;
8820 char *s;
8822 name = lang_hooks.decl_printable_name (fndecl, 0);
8823 FOR_EACH_VEC_ELT (*v, i, s)
8824 if (strstr (name, s) != NULL)
8825 return true;
8828 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8829 if (v && v->length () > 0)
8831 const char *name;
8832 int i;
8833 char *s;
8835 name = DECL_SOURCE_FILE (fndecl);
8836 FOR_EACH_VEC_ELT (*v, i, s)
8837 if (strstr (name, s) != NULL)
8838 return true;
8841 return false;
8844 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8845 node for the function we want to gimplify.
8847 Return the sequence of GIMPLE statements corresponding to the body
8848 of FNDECL. */
8850 void
8851 gimplify_function_tree (tree fndecl)
8853 tree parm, ret;
8854 gimple_seq seq;
8855 gimple bind;
8857 gcc_assert (!gimple_body (fndecl));
8859 if (DECL_STRUCT_FUNCTION (fndecl))
8860 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8861 else
8862 push_struct_function (fndecl);
8864 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8866 /* Preliminarily mark non-addressed complex variables as eligible
8867 for promotion to gimple registers. We'll transform their uses
8868 as we find them. */
8869 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8870 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8871 && !TREE_THIS_VOLATILE (parm)
8872 && !needs_to_live_in_memory (parm))
8873 DECL_GIMPLE_REG_P (parm) = 1;
8876 ret = DECL_RESULT (fndecl);
8877 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8878 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8879 && !needs_to_live_in_memory (ret))
8880 DECL_GIMPLE_REG_P (ret) = 1;
8882 bind = gimplify_body (fndecl, true);
8884 /* The tree body of the function is no longer needed, replace it
8885 with the new GIMPLE body. */
8886 seq = NULL;
8887 gimple_seq_add_stmt (&seq, bind);
8888 gimple_set_body (fndecl, seq);
8890 /* If we're instrumenting function entry/exit, then prepend the call to
8891 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8892 catch the exit hook. */
8893 /* ??? Add some way to ignore exceptions for this TFE. */
8894 if (flag_instrument_function_entry_exit
8895 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8896 && !flag_instrument_functions_exclude_p (fndecl))
8898 tree x;
8899 gimple new_bind;
8900 gimple tf;
8901 gimple_seq cleanup = NULL, body = NULL;
8902 tree tmp_var;
8903 gimple call;
8905 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8906 call = gimple_build_call (x, 1, integer_zero_node);
8907 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8908 gimple_call_set_lhs (call, tmp_var);
8909 gimplify_seq_add_stmt (&cleanup, call);
8910 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8911 call = gimple_build_call (x, 2,
8912 build_fold_addr_expr (current_function_decl),
8913 tmp_var);
8914 gimplify_seq_add_stmt (&cleanup, call);
8915 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8917 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8918 call = gimple_build_call (x, 1, integer_zero_node);
8919 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8920 gimple_call_set_lhs (call, tmp_var);
8921 gimplify_seq_add_stmt (&body, call);
8922 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8923 call = gimple_build_call (x, 2,
8924 build_fold_addr_expr (current_function_decl),
8925 tmp_var);
8926 gimplify_seq_add_stmt (&body, call);
8927 gimplify_seq_add_stmt (&body, tf);
8928 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8929 /* Clear the block for BIND, since it is no longer directly inside
8930 the function, but within a try block. */
8931 gimple_bind_set_block (bind, NULL);
8933 /* Replace the current function body with the body
8934 wrapped in the try/finally TF. */
8935 seq = NULL;
8936 gimple_seq_add_stmt (&seq, new_bind);
8937 gimple_set_body (fndecl, seq);
8940 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8941 cfun->curr_properties = PROP_gimple_any;
8943 pop_cfun ();
8946 /* Some transformations like inlining may invalidate the GIMPLE form
8947 for operands. This function traverses all the operands in STMT and
8948 gimplifies anything that is not a valid gimple operand. Any new
8949 GIMPLE statements are inserted before *GSI_P. */
8951 void
8952 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8954 size_t i, num_ops;
8955 tree lhs;
8956 gimple_seq pre = NULL;
8957 gimple post_stmt = NULL;
8958 struct gimplify_ctx gctx;
8960 push_gimplify_context (&gctx);
8961 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8963 switch (gimple_code (stmt))
8965 case GIMPLE_COND:
8966 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8967 is_gimple_val, fb_rvalue);
8968 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8969 is_gimple_val, fb_rvalue);
8970 break;
8971 case GIMPLE_SWITCH:
8972 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8973 is_gimple_val, fb_rvalue);
8974 break;
8975 case GIMPLE_OMP_ATOMIC_LOAD:
8976 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8977 is_gimple_val, fb_rvalue);
8978 break;
8979 case GIMPLE_ASM:
8981 size_t i, noutputs = gimple_asm_noutputs (stmt);
8982 const char *constraint, **oconstraints;
8983 bool allows_mem, allows_reg, is_inout;
8985 oconstraints
8986 = (const char **) alloca ((noutputs) * sizeof (const char *));
8987 for (i = 0; i < noutputs; i++)
8989 tree op = gimple_asm_output_op (stmt, i);
8990 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8991 oconstraints[i] = constraint;
8992 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8993 &allows_reg, &is_inout);
8994 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8995 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8996 fb_lvalue | fb_mayfail);
8998 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
9000 tree op = gimple_asm_input_op (stmt, i);
9001 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
9002 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
9003 oconstraints, &allows_mem, &allows_reg);
9004 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
9005 allows_reg = 0;
9006 if (!allows_reg && allows_mem)
9007 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
9008 is_gimple_lvalue, fb_lvalue | fb_mayfail);
9009 else
9010 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
9011 is_gimple_asm_val, fb_rvalue);
9014 break;
9015 default:
9016 /* NOTE: We start gimplifying operands from last to first to
9017 make sure that side-effects on the RHS of calls, assignments
9018 and ASMs are executed before the LHS. The ordering is not
9019 important for other statements. */
9020 num_ops = gimple_num_ops (stmt);
9021 for (i = num_ops; i > 0; i--)
9023 tree op = gimple_op (stmt, i - 1);
9024 if (op == NULL_TREE)
9025 continue;
9026 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
9027 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
9028 else if (i == 2
9029 && is_gimple_assign (stmt)
9030 && num_ops == 2
9031 && get_gimple_rhs_class (gimple_expr_code (stmt))
9032 == GIMPLE_SINGLE_RHS)
9033 gimplify_expr (&op, &pre, NULL,
9034 rhs_predicate_for (gimple_assign_lhs (stmt)),
9035 fb_rvalue);
9036 else if (i == 2 && is_gimple_call (stmt))
9038 if (TREE_CODE (op) == FUNCTION_DECL)
9039 continue;
9040 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
9042 else
9043 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
9044 gimple_set_op (stmt, i - 1, op);
9047 lhs = gimple_get_lhs (stmt);
9048 /* If the LHS changed it in a way that requires a simple RHS,
9049 create temporary. */
9050 if (lhs && !is_gimple_reg (lhs))
9052 bool need_temp = false;
9054 if (is_gimple_assign (stmt)
9055 && num_ops == 2
9056 && get_gimple_rhs_class (gimple_expr_code (stmt))
9057 == GIMPLE_SINGLE_RHS)
9058 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
9059 rhs_predicate_for (gimple_assign_lhs (stmt)),
9060 fb_rvalue);
9061 else if (is_gimple_reg (lhs))
9063 if (is_gimple_reg_type (TREE_TYPE (lhs)))
9065 if (is_gimple_call (stmt))
9067 i = gimple_call_flags (stmt);
9068 if ((i & ECF_LOOPING_CONST_OR_PURE)
9069 || !(i & (ECF_CONST | ECF_PURE)))
9070 need_temp = true;
9072 if (stmt_can_throw_internal (stmt))
9073 need_temp = true;
9076 else
9078 if (is_gimple_reg_type (TREE_TYPE (lhs)))
9079 need_temp = true;
9080 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
9082 if (is_gimple_call (stmt))
9084 tree fndecl = gimple_call_fndecl (stmt);
9086 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
9087 && !(fndecl && DECL_RESULT (fndecl)
9088 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
9089 need_temp = true;
9091 else
9092 need_temp = true;
9095 if (need_temp)
9097 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
9098 if (gimple_in_ssa_p (cfun))
9099 temp = make_ssa_name (temp, NULL);
9100 gimple_set_lhs (stmt, temp);
9101 post_stmt = gimple_build_assign (lhs, temp);
9102 if (TREE_CODE (lhs) == SSA_NAME)
9103 SSA_NAME_DEF_STMT (lhs) = post_stmt;
9106 break;
9109 if (!gimple_seq_empty_p (pre))
9110 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
9111 if (post_stmt)
9112 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
9114 pop_gimplify_context (NULL);
9117 /* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
9118 the predicate that will hold for the result. If VAR is not NULL, make the
9119 base variable of the final destination be VAR if suitable. */
9121 tree
9122 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
9123 gimple_predicate gimple_test_f, tree var)
9125 enum gimplify_status ret;
9126 struct gimplify_ctx gctx;
9127 location_t saved_location;
9129 *stmts = NULL;
9131 /* gimple_test_f might be more strict than is_gimple_val, make
9132 sure we pass both. Just checking gimple_test_f doesn't work
9133 because most gimple predicates do not work recursively. */
9134 if (is_gimple_val (expr)
9135 && (*gimple_test_f) (expr))
9136 return expr;
9138 push_gimplify_context (&gctx);
9139 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
9140 gimplify_ctxp->allow_rhs_cond_expr = true;
9141 saved_location = input_location;
9142 input_location = UNKNOWN_LOCATION;
9144 if (var)
9146 if (gimplify_ctxp->into_ssa
9147 && is_gimple_reg (var))
9148 var = make_ssa_name (var, NULL);
9149 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
9152 if (TREE_CODE (expr) != MODIFY_EXPR
9153 && TREE_TYPE (expr) == void_type_node)
9155 gimplify_and_add (expr, stmts);
9156 expr = NULL_TREE;
9158 else
9160 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
9161 gcc_assert (ret != GS_ERROR);
9164 input_location = saved_location;
9165 pop_gimplify_context (NULL);
9167 return expr;
9170 /* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
9171 force the result to be either ssa_name or an invariant, otherwise
9172 just force it to be a rhs expression. If VAR is not NULL, make the
9173 base variable of the final destination be VAR if suitable. */
9175 tree
9176 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
9178 return force_gimple_operand_1 (expr, stmts,
9179 simple ? is_gimple_val : is_gimple_reg_rhs,
9180 var);
9183 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
9184 and VAR. If some statements are produced, emits them at GSI.
9185 If BEFORE is true. the statements are appended before GSI, otherwise
9186 they are appended after it. M specifies the way GSI moves after
9187 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
9189 tree
9190 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
9191 gimple_predicate gimple_test_f,
9192 tree var, bool before,
9193 enum gsi_iterator_update m)
9195 gimple_seq stmts;
9197 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
9199 if (!gimple_seq_empty_p (stmts))
9201 if (before)
9202 gsi_insert_seq_before (gsi, stmts, m);
9203 else
9204 gsi_insert_seq_after (gsi, stmts, m);
9207 return expr;
9210 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
9211 If SIMPLE is true, force the result to be either ssa_name or an invariant,
9212 otherwise just force it to be a rhs expression. If some statements are
9213 produced, emits them at GSI. If BEFORE is true, the statements are
9214 appended before GSI, otherwise they are appended after it. M specifies
9215 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
9216 are the usual values). */
9218 tree
9219 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
9220 bool simple_p, tree var, bool before,
9221 enum gsi_iterator_update m)
9223 return force_gimple_operand_gsi_1 (gsi, expr,
9224 simple_p
9225 ? is_gimple_val : is_gimple_reg_rhs,
9226 var, before, m);
9229 #ifndef PAD_VARARGS_DOWN
9230 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
9231 #endif
9233 /* Build an indirect-ref expression over the given TREE, which represents a
9234 piece of a va_arg() expansion. */
9235 tree
9236 build_va_arg_indirect_ref (tree addr)
9238 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
9240 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
9241 mf_mark (addr);
9243 return addr;
9246 /* The "standard" implementation of va_arg: read the value from the
9247 current (padded) address and increment by the (padded) size. */
9249 tree
9250 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
9251 gimple_seq *post_p)
9253 tree addr, t, type_size, rounded_size, valist_tmp;
9254 unsigned HOST_WIDE_INT align, boundary;
9255 bool indirect;
9257 #ifdef ARGS_GROW_DOWNWARD
9258 /* All of the alignment and movement below is for args-grow-up machines.
9259 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
9260 implement their own specialized gimplify_va_arg_expr routines. */
9261 gcc_unreachable ();
9262 #endif
9264 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
9265 if (indirect)
9266 type = build_pointer_type (type);
9268 align = PARM_BOUNDARY / BITS_PER_UNIT;
9269 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
9271 /* When we align parameter on stack for caller, if the parameter
9272 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
9273 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
9274 here with caller. */
9275 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
9276 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
9278 boundary /= BITS_PER_UNIT;
9280 /* Hoist the valist value into a temporary for the moment. */
9281 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
9283 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
9284 requires greater alignment, we must perform dynamic alignment. */
9285 if (boundary > align
9286 && !integer_zerop (TYPE_SIZE (type)))
9288 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
9289 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
9290 gimplify_and_add (t, pre_p);
9292 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
9293 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
9294 valist_tmp,
9295 build_int_cst (TREE_TYPE (valist), -boundary)));
9296 gimplify_and_add (t, pre_p);
9298 else
9299 boundary = align;
9301 /* If the actual alignment is less than the alignment of the type,
9302 adjust the type accordingly so that we don't assume strict alignment
9303 when dereferencing the pointer. */
9304 boundary *= BITS_PER_UNIT;
9305 if (boundary < TYPE_ALIGN (type))
9307 type = build_variant_type_copy (type);
9308 TYPE_ALIGN (type) = boundary;
9311 /* Compute the rounded size of the type. */
9312 type_size = size_in_bytes (type);
9313 rounded_size = round_up (type_size, align);
9315 /* Reduce rounded_size so it's sharable with the postqueue. */
9316 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
9318 /* Get AP. */
9319 addr = valist_tmp;
9320 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
9322 /* Small args are padded downward. */
9323 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
9324 rounded_size, size_int (align));
9325 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
9326 size_binop (MINUS_EXPR, rounded_size, type_size));
9327 addr = fold_build_pointer_plus (addr, t);
9330 /* Compute new value for AP. */
9331 t = fold_build_pointer_plus (valist_tmp, rounded_size);
9332 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
9333 gimplify_and_add (t, pre_p);
9335 addr = fold_convert (build_pointer_type (type), addr);
9337 if (indirect)
9338 addr = build_va_arg_indirect_ref (addr);
9340 return build_va_arg_indirect_ref (addr);
9343 #include "gt-gimplify.h"