* include/bits/stl_list.h (_M_resize_pos(size_type&)): Declare.
[official-gcc.git] / gcc / gimplify.c
blobc5eccf0628015a32fcf9b619117ecf1184f12014
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "options.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "hashtab.h"
39 #include "tm.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "rtl.h"
43 #include "flags.h"
44 #include "statistics.h"
45 #include "real.h"
46 #include "fixed-value.h"
47 #include "insn-config.h"
48 #include "expmed.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "calls.h"
52 #include "emit-rtl.h"
53 #include "varasm.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "predict.h"
57 #include "basic-block.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
60 #include "gimple-fold.h"
61 #include "tree-eh.h"
62 #include "gimple-expr.h"
63 #include "is-a.h"
64 #include "gimple.h"
65 #include "gimplify.h"
66 #include "gimple-iterator.h"
67 #include "stringpool.h"
68 #include "stor-layout.h"
69 #include "print-tree.h"
70 #include "tree-iterator.h"
71 #include "tree-inline.h"
72 #include "tree-pretty-print.h"
73 #include "langhooks.h"
74 #include "bitmap.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-cfg.h"
81 #include "tree-ssanames.h"
82 #include "tree-ssa.h"
83 #include "diagnostic-core.h"
84 #include "target.h"
85 #include "splay-tree.h"
86 #include "omp-low.h"
87 #include "gimple-low.h"
88 #include "cilk.h"
89 #include "gomp-constants.h"
91 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
92 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
93 #include "builtins.h"
95 enum gimplify_omp_var_data
97 GOVD_SEEN = 1,
98 GOVD_EXPLICIT = 2,
99 GOVD_SHARED = 4,
100 GOVD_PRIVATE = 8,
101 GOVD_FIRSTPRIVATE = 16,
102 GOVD_LASTPRIVATE = 32,
103 GOVD_REDUCTION = 64,
104 GOVD_LOCAL = 128,
105 GOVD_MAP = 256,
106 GOVD_DEBUG_PRIVATE = 512,
107 GOVD_PRIVATE_OUTER_REF = 1024,
108 GOVD_LINEAR = 2048,
109 GOVD_ALIGNED = 4096,
111 /* Flag for GOVD_MAP: don't copy back. */
112 GOVD_MAP_TO_ONLY = 8192,
114 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
115 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
117 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
118 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
119 | GOVD_LOCAL)
123 enum omp_region_type
125 ORT_WORKSHARE = 0,
126 ORT_SIMD = 1,
127 ORT_PARALLEL = 2,
128 ORT_COMBINED_PARALLEL = 3,
129 ORT_TASK = 4,
130 ORT_UNTIED_TASK = 5,
131 ORT_TEAMS = 8,
132 ORT_COMBINED_TEAMS = 9,
133 /* Data region. */
134 ORT_TARGET_DATA = 16,
135 /* Data region with offloading. */
136 ORT_TARGET = 32
139 /* Gimplify hashtable helper. */
141 struct gimplify_hasher : typed_free_remove <elt_t>
143 typedef elt_t *value_type;
144 typedef elt_t *compare_type;
145 static inline hashval_t hash (const elt_t *);
146 static inline bool equal (const elt_t *, const elt_t *);
149 struct gimplify_ctx
151 struct gimplify_ctx *prev_context;
153 vec<gbind *> bind_expr_stack;
154 tree temps;
155 gimple_seq conditional_cleanups;
156 tree exit_label;
157 tree return_temp;
159 vec<tree> case_labels;
160 /* The formal temporary table. Should this be persistent? */
161 hash_table<gimplify_hasher> *temp_htab;
163 int conditions;
164 bool save_stack;
165 bool into_ssa;
166 bool allow_rhs_cond_expr;
167 bool in_cleanup_point_expr;
170 struct gimplify_omp_ctx
172 struct gimplify_omp_ctx *outer_context;
173 splay_tree variables;
174 hash_set<tree> *privatized_types;
175 location_t location;
176 enum omp_clause_default_kind default_kind;
177 enum omp_region_type region_type;
178 bool combined_loop;
179 bool distribute;
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
185 /* Forward declaration. */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
188 /* Shorter alias name for the above function for use in gimplify.c
189 only. */
191 static inline void
192 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
194 gimple_seq_add_stmt_without_update (seq_p, gs);
197 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
198 NULL, a new sequence is allocated. This function is
199 similar to gimple_seq_add_seq, but does not scan the operands.
200 During gimplification, we need to manipulate statement sequences
201 before the def/use vectors have been constructed. */
203 static void
204 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
206 gimple_stmt_iterator si;
208 if (src == NULL)
209 return;
211 si = gsi_last (*dst_p);
212 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
216 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
217 and popping gimplify contexts. */
219 static struct gimplify_ctx *ctx_pool = NULL;
221 /* Return a gimplify context struct from the pool. */
223 static inline struct gimplify_ctx *
224 ctx_alloc (void)
226 struct gimplify_ctx * c = ctx_pool;
228 if (c)
229 ctx_pool = c->prev_context;
230 else
231 c = XNEW (struct gimplify_ctx);
233 memset (c, '\0', sizeof (*c));
234 return c;
237 /* Put gimplify context C back into the pool. */
239 static inline void
240 ctx_free (struct gimplify_ctx *c)
242 c->prev_context = ctx_pool;
243 ctx_pool = c;
246 /* Free allocated ctx stack memory. */
248 void
249 free_gimplify_stack (void)
251 struct gimplify_ctx *c;
253 while ((c = ctx_pool))
255 ctx_pool = c->prev_context;
256 free (c);
261 /* Set up a context for the gimplifier. */
263 void
264 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
266 struct gimplify_ctx *c = ctx_alloc ();
268 c->prev_context = gimplify_ctxp;
269 gimplify_ctxp = c;
270 gimplify_ctxp->into_ssa = in_ssa;
271 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
274 /* Tear down a context for the gimplifier. If BODY is non-null, then
275 put the temporaries into the outer BIND_EXPR. Otherwise, put them
276 in the local_decls.
278 BODY is not a sequence, but the first tuple in a sequence. */
280 void
281 pop_gimplify_context (gimple body)
283 struct gimplify_ctx *c = gimplify_ctxp;
285 gcc_assert (c
286 && (!c->bind_expr_stack.exists ()
287 || c->bind_expr_stack.is_empty ()));
288 c->bind_expr_stack.release ();
289 gimplify_ctxp = c->prev_context;
291 if (body)
292 declare_vars (c->temps, body, false);
293 else
294 record_vars (c->temps);
296 delete c->temp_htab;
297 c->temp_htab = NULL;
298 ctx_free (c);
301 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
303 static void
304 gimple_push_bind_expr (gbind *bind_stmt)
306 gimplify_ctxp->bind_expr_stack.reserve (8);
307 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
310 /* Pop the first element off the stack of bindings. */
312 static void
313 gimple_pop_bind_expr (void)
315 gimplify_ctxp->bind_expr_stack.pop ();
318 /* Return the first element of the stack of bindings. */
320 gbind *
321 gimple_current_bind_expr (void)
323 return gimplify_ctxp->bind_expr_stack.last ();
326 /* Return the stack of bindings created during gimplification. */
328 vec<gbind *>
329 gimple_bind_expr_stack (void)
331 return gimplify_ctxp->bind_expr_stack;
334 /* Return true iff there is a COND_EXPR between us and the innermost
335 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
337 static bool
338 gimple_conditional_context (void)
340 return gimplify_ctxp->conditions > 0;
343 /* Note that we've entered a COND_EXPR. */
345 static void
346 gimple_push_condition (void)
348 #ifdef ENABLE_GIMPLE_CHECKING
349 if (gimplify_ctxp->conditions == 0)
350 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
351 #endif
352 ++(gimplify_ctxp->conditions);
355 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
356 now, add any conditional cleanups we've seen to the prequeue. */
358 static void
359 gimple_pop_condition (gimple_seq *pre_p)
361 int conds = --(gimplify_ctxp->conditions);
363 gcc_assert (conds >= 0);
364 if (conds == 0)
366 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
367 gimplify_ctxp->conditional_cleanups = NULL;
371 /* A stable comparison routine for use with splay trees and DECLs. */
373 static int
374 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
376 tree a = (tree) xa;
377 tree b = (tree) xb;
379 return DECL_UID (a) - DECL_UID (b);
382 /* Create a new omp construct that deals with variable remapping. */
384 static struct gimplify_omp_ctx *
385 new_omp_context (enum omp_region_type region_type)
387 struct gimplify_omp_ctx *c;
389 c = XCNEW (struct gimplify_omp_ctx);
390 c->outer_context = gimplify_omp_ctxp;
391 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
392 c->privatized_types = new hash_set<tree>;
393 c->location = input_location;
394 c->region_type = region_type;
395 if ((region_type & ORT_TASK) == 0)
396 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
397 else
398 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
400 return c;
403 /* Destroy an omp construct that deals with variable remapping. */
405 static void
406 delete_omp_context (struct gimplify_omp_ctx *c)
408 splay_tree_delete (c->variables);
409 delete c->privatized_types;
410 XDELETE (c);
413 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
414 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
416 /* Both gimplify the statement T and append it to *SEQ_P. This function
417 behaves exactly as gimplify_stmt, but you don't have to pass T as a
418 reference. */
420 void
421 gimplify_and_add (tree t, gimple_seq *seq_p)
423 gimplify_stmt (&t, seq_p);
426 /* Gimplify statement T into sequence *SEQ_P, and return the first
427 tuple in the sequence of generated tuples for this statement.
428 Return NULL if gimplifying T produced no tuples. */
430 static gimple
431 gimplify_and_return_first (tree t, gimple_seq *seq_p)
433 gimple_stmt_iterator last = gsi_last (*seq_p);
435 gimplify_and_add (t, seq_p);
437 if (!gsi_end_p (last))
439 gsi_next (&last);
440 return gsi_stmt (last);
442 else
443 return gimple_seq_first_stmt (*seq_p);
446 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
447 LHS, or for a call argument. */
449 static bool
450 is_gimple_mem_rhs (tree t)
452 /* If we're dealing with a renamable type, either source or dest must be
453 a renamed variable. */
454 if (is_gimple_reg_type (TREE_TYPE (t)))
455 return is_gimple_val (t);
456 else
457 return is_gimple_val (t) || is_gimple_lvalue (t);
460 /* Return true if T is a CALL_EXPR or an expression that can be
461 assigned to a temporary. Note that this predicate should only be
462 used during gimplification. See the rationale for this in
463 gimplify_modify_expr. */
465 static bool
466 is_gimple_reg_rhs_or_call (tree t)
468 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
469 || TREE_CODE (t) == CALL_EXPR);
472 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
473 this predicate should only be used during gimplification. See the
474 rationale for this in gimplify_modify_expr. */
476 static bool
477 is_gimple_mem_rhs_or_call (tree t)
479 /* If we're dealing with a renamable type, either source or dest must be
480 a renamed variable. */
481 if (is_gimple_reg_type (TREE_TYPE (t)))
482 return is_gimple_val (t);
483 else
484 return (is_gimple_val (t) || is_gimple_lvalue (t)
485 || TREE_CODE (t) == CALL_EXPR);
488 /* Create a temporary with a name derived from VAL. Subroutine of
489 lookup_tmp_var; nobody else should call this function. */
491 static inline tree
492 create_tmp_from_val (tree val)
494 /* Drop all qualifiers and address-space information from the value type. */
495 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
496 tree var = create_tmp_var (type, get_name (val));
497 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
498 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
499 DECL_GIMPLE_REG_P (var) = 1;
500 return var;
503 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
504 an existing expression temporary. */
506 static tree
507 lookup_tmp_var (tree val, bool is_formal)
509 tree ret;
511 /* If not optimizing, never really reuse a temporary. local-alloc
512 won't allocate any variable that is used in more than one basic
513 block, which means it will go into memory, causing much extra
514 work in reload and final and poorer code generation, outweighing
515 the extra memory allocation here. */
516 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
517 ret = create_tmp_from_val (val);
518 else
520 elt_t elt, *elt_p;
521 elt_t **slot;
523 elt.val = val;
524 if (!gimplify_ctxp->temp_htab)
525 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
526 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
527 if (*slot == NULL)
529 elt_p = XNEW (elt_t);
530 elt_p->val = val;
531 elt_p->temp = ret = create_tmp_from_val (val);
532 *slot = elt_p;
534 else
536 elt_p = *slot;
537 ret = elt_p->temp;
541 return ret;
544 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
546 static tree
547 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
548 bool is_formal)
550 tree t, mod;
552 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
553 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
554 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
555 fb_rvalue);
557 if (gimplify_ctxp->into_ssa
558 && is_gimple_reg_type (TREE_TYPE (val)))
559 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
560 else
561 t = lookup_tmp_var (val, is_formal);
563 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
565 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
567 /* gimplify_modify_expr might want to reduce this further. */
568 gimplify_and_add (mod, pre_p);
569 ggc_free (mod);
571 return t;
574 /* Return a formal temporary variable initialized with VAL. PRE_P is as
575 in gimplify_expr. Only use this function if:
577 1) The value of the unfactored expression represented by VAL will not
578 change between the initialization and use of the temporary, and
579 2) The temporary will not be otherwise modified.
581 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
582 and #2 means it is inappropriate for && temps.
584 For other cases, use get_initialized_tmp_var instead. */
586 tree
587 get_formal_tmp_var (tree val, gimple_seq *pre_p)
589 return internal_get_tmp_var (val, pre_p, NULL, true);
592 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
593 are as in gimplify_expr. */
595 tree
596 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
598 return internal_get_tmp_var (val, pre_p, post_p, false);
601 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
602 generate debug info for them; otherwise don't. */
604 void
605 declare_vars (tree vars, gimple gs, bool debug_info)
607 tree last = vars;
608 if (last)
610 tree temps, block;
612 gbind *scope = as_a <gbind *> (gs);
614 temps = nreverse (last);
616 block = gimple_bind_block (scope);
617 gcc_assert (!block || TREE_CODE (block) == BLOCK);
618 if (!block || !debug_info)
620 DECL_CHAIN (last) = gimple_bind_vars (scope);
621 gimple_bind_set_vars (scope, temps);
623 else
625 /* We need to attach the nodes both to the BIND_EXPR and to its
626 associated BLOCK for debugging purposes. The key point here
627 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
628 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
629 if (BLOCK_VARS (block))
630 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
631 else
633 gimple_bind_set_vars (scope,
634 chainon (gimple_bind_vars (scope), temps));
635 BLOCK_VARS (block) = temps;
641 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
642 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
643 no such upper bound can be obtained. */
645 static void
646 force_constant_size (tree var)
648 /* The only attempt we make is by querying the maximum size of objects
649 of the variable's type. */
651 HOST_WIDE_INT max_size;
653 gcc_assert (TREE_CODE (var) == VAR_DECL);
655 max_size = max_int_size_in_bytes (TREE_TYPE (var));
657 gcc_assert (max_size >= 0);
659 DECL_SIZE_UNIT (var)
660 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
661 DECL_SIZE (var)
662 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
665 /* Push the temporary variable TMP into the current binding. */
667 void
668 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
670 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
672 /* Later processing assumes that the object size is constant, which might
673 not be true at this point. Force the use of a constant upper bound in
674 this case. */
675 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
676 force_constant_size (tmp);
678 DECL_CONTEXT (tmp) = fn->decl;
679 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
681 record_vars_into (tmp, fn->decl);
684 /* Push the temporary variable TMP into the current binding. */
686 void
687 gimple_add_tmp_var (tree tmp)
689 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
691 /* Later processing assumes that the object size is constant, which might
692 not be true at this point. Force the use of a constant upper bound in
693 this case. */
694 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
695 force_constant_size (tmp);
697 DECL_CONTEXT (tmp) = current_function_decl;
698 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
700 if (gimplify_ctxp)
702 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
703 gimplify_ctxp->temps = tmp;
705 /* Mark temporaries local within the nearest enclosing parallel. */
706 if (gimplify_omp_ctxp)
708 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
709 while (ctx
710 && (ctx->region_type == ORT_WORKSHARE
711 || ctx->region_type == ORT_SIMD))
712 ctx = ctx->outer_context;
713 if (ctx)
714 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
717 else if (cfun)
718 record_vars (tmp);
719 else
721 gimple_seq body_seq;
723 /* This case is for nested functions. We need to expose the locals
724 they create. */
725 body_seq = gimple_body (current_function_decl);
726 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
732 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
733 nodes that are referenced more than once in GENERIC functions. This is
734 necessary because gimplification (translation into GIMPLE) is performed
735 by modifying tree nodes in-place, so gimplication of a shared node in a
736 first context could generate an invalid GIMPLE form in a second context.
738 This is achieved with a simple mark/copy/unmark algorithm that walks the
739 GENERIC representation top-down, marks nodes with TREE_VISITED the first
740 time it encounters them, duplicates them if they already have TREE_VISITED
741 set, and finally removes the TREE_VISITED marks it has set.
743 The algorithm works only at the function level, i.e. it generates a GENERIC
744 representation of a function with no nodes shared within the function when
745 passed a GENERIC function (except for nodes that are allowed to be shared).
747 At the global level, it is also necessary to unshare tree nodes that are
748 referenced in more than one function, for the same aforementioned reason.
749 This requires some cooperation from the front-end. There are 2 strategies:
751 1. Manual unsharing. The front-end needs to call unshare_expr on every
752 expression that might end up being shared across functions.
754 2. Deep unsharing. This is an extension of regular unsharing. Instead
755 of calling unshare_expr on expressions that might be shared across
756 functions, the front-end pre-marks them with TREE_VISITED. This will
757 ensure that they are unshared on the first reference within functions
758 when the regular unsharing algorithm runs. The counterpart is that
759 this algorithm must look deeper than for manual unsharing, which is
760 specified by LANG_HOOKS_DEEP_UNSHARING.
762 If there are only few specific cases of node sharing across functions, it is
763 probably easier for a front-end to unshare the expressions manually. On the
764 contrary, if the expressions generated at the global level are as widespread
765 as expressions generated within functions, deep unsharing is very likely the
766 way to go. */
768 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
769 These nodes model computations that must be done once. If we were to
770 unshare something like SAVE_EXPR(i++), the gimplification process would
771 create wrong code. However, if DATA is non-null, it must hold a pointer
772 set that is used to unshare the subtrees of these nodes. */
774 static tree
775 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
777 tree t = *tp;
778 enum tree_code code = TREE_CODE (t);
780 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
781 copy their subtrees if we can make sure to do it only once. */
782 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
784 if (data && !((hash_set<tree> *)data)->add (t))
786 else
787 *walk_subtrees = 0;
790 /* Stop at types, decls, constants like copy_tree_r. */
791 else if (TREE_CODE_CLASS (code) == tcc_type
792 || TREE_CODE_CLASS (code) == tcc_declaration
793 || TREE_CODE_CLASS (code) == tcc_constant
794 /* We can't do anything sensible with a BLOCK used as an
795 expression, but we also can't just die when we see it
796 because of non-expression uses. So we avert our eyes
797 and cross our fingers. Silly Java. */
798 || code == BLOCK)
799 *walk_subtrees = 0;
801 /* Cope with the statement expression extension. */
802 else if (code == STATEMENT_LIST)
805 /* Leave the bulk of the work to copy_tree_r itself. */
806 else
807 copy_tree_r (tp, walk_subtrees, NULL);
809 return NULL_TREE;
812 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
813 If *TP has been visited already, then *TP is deeply copied by calling
814 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
816 static tree
817 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
819 tree t = *tp;
820 enum tree_code code = TREE_CODE (t);
822 /* Skip types, decls, and constants. But we do want to look at their
823 types and the bounds of types. Mark them as visited so we properly
824 unmark their subtrees on the unmark pass. If we've already seen them,
825 don't look down further. */
826 if (TREE_CODE_CLASS (code) == tcc_type
827 || TREE_CODE_CLASS (code) == tcc_declaration
828 || TREE_CODE_CLASS (code) == tcc_constant)
830 if (TREE_VISITED (t))
831 *walk_subtrees = 0;
832 else
833 TREE_VISITED (t) = 1;
836 /* If this node has been visited already, unshare it and don't look
837 any deeper. */
838 else if (TREE_VISITED (t))
840 walk_tree (tp, mostly_copy_tree_r, data, NULL);
841 *walk_subtrees = 0;
844 /* Otherwise, mark the node as visited and keep looking. */
845 else
846 TREE_VISITED (t) = 1;
848 return NULL_TREE;
851 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
852 copy_if_shared_r callback unmodified. */
854 static inline void
855 copy_if_shared (tree *tp, void *data)
857 walk_tree (tp, copy_if_shared_r, data, NULL);
860 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
861 any nested functions. */
863 static void
864 unshare_body (tree fndecl)
866 struct cgraph_node *cgn = cgraph_node::get (fndecl);
867 /* If the language requires deep unsharing, we need a pointer set to make
868 sure we don't repeatedly unshare subtrees of unshareable nodes. */
869 hash_set<tree> *visited
870 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
872 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
873 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
874 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
876 delete visited;
878 if (cgn)
879 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
880 unshare_body (cgn->decl);
883 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
884 Subtrees are walked until the first unvisited node is encountered. */
886 static tree
887 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
889 tree t = *tp;
891 /* If this node has been visited, unmark it and keep looking. */
892 if (TREE_VISITED (t))
893 TREE_VISITED (t) = 0;
895 /* Otherwise, don't look any deeper. */
896 else
897 *walk_subtrees = 0;
899 return NULL_TREE;
902 /* Unmark the visited trees rooted at *TP. */
904 static inline void
905 unmark_visited (tree *tp)
907 walk_tree (tp, unmark_visited_r, NULL, NULL);
910 /* Likewise, but mark all trees as not visited. */
912 static void
913 unvisit_body (tree fndecl)
915 struct cgraph_node *cgn = cgraph_node::get (fndecl);
917 unmark_visited (&DECL_SAVED_TREE (fndecl));
918 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
919 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
921 if (cgn)
922 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
923 unvisit_body (cgn->decl);
926 /* Unconditionally make an unshared copy of EXPR. This is used when using
927 stored expressions which span multiple functions, such as BINFO_VTABLE,
928 as the normal unsharing process can't tell that they're shared. */
930 tree
931 unshare_expr (tree expr)
933 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
934 return expr;
937 /* Worker for unshare_expr_without_location. */
939 static tree
940 prune_expr_location (tree *tp, int *walk_subtrees, void *)
942 if (EXPR_P (*tp))
943 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
944 else
945 *walk_subtrees = 0;
946 return NULL_TREE;
949 /* Similar to unshare_expr but also prune all expression locations
950 from EXPR. */
952 tree
953 unshare_expr_without_location (tree expr)
955 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
956 if (EXPR_P (expr))
957 walk_tree (&expr, prune_expr_location, NULL, NULL);
958 return expr;
961 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
962 contain statements and have a value. Assign its value to a temporary
963 and give it void_type_node. Return the temporary, or NULL_TREE if
964 WRAPPER was already void. */
966 tree
967 voidify_wrapper_expr (tree wrapper, tree temp)
969 tree type = TREE_TYPE (wrapper);
970 if (type && !VOID_TYPE_P (type))
972 tree *p;
974 /* Set p to point to the body of the wrapper. Loop until we find
975 something that isn't a wrapper. */
976 for (p = &wrapper; p && *p; )
978 switch (TREE_CODE (*p))
980 case BIND_EXPR:
981 TREE_SIDE_EFFECTS (*p) = 1;
982 TREE_TYPE (*p) = void_type_node;
983 /* For a BIND_EXPR, the body is operand 1. */
984 p = &BIND_EXPR_BODY (*p);
985 break;
987 case CLEANUP_POINT_EXPR:
988 case TRY_FINALLY_EXPR:
989 case TRY_CATCH_EXPR:
990 TREE_SIDE_EFFECTS (*p) = 1;
991 TREE_TYPE (*p) = void_type_node;
992 p = &TREE_OPERAND (*p, 0);
993 break;
995 case STATEMENT_LIST:
997 tree_stmt_iterator i = tsi_last (*p);
998 TREE_SIDE_EFFECTS (*p) = 1;
999 TREE_TYPE (*p) = void_type_node;
1000 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1002 break;
1004 case COMPOUND_EXPR:
1005 /* Advance to the last statement. Set all container types to
1006 void. */
1007 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1009 TREE_SIDE_EFFECTS (*p) = 1;
1010 TREE_TYPE (*p) = void_type_node;
1012 break;
1014 case TRANSACTION_EXPR:
1015 TREE_SIDE_EFFECTS (*p) = 1;
1016 TREE_TYPE (*p) = void_type_node;
1017 p = &TRANSACTION_EXPR_BODY (*p);
1018 break;
1020 default:
1021 /* Assume that any tree upon which voidify_wrapper_expr is
1022 directly called is a wrapper, and that its body is op0. */
1023 if (p == &wrapper)
1025 TREE_SIDE_EFFECTS (*p) = 1;
1026 TREE_TYPE (*p) = void_type_node;
1027 p = &TREE_OPERAND (*p, 0);
1028 break;
1030 goto out;
1034 out:
1035 if (p == NULL || IS_EMPTY_STMT (*p))
1036 temp = NULL_TREE;
1037 else if (temp)
1039 /* The wrapper is on the RHS of an assignment that we're pushing
1040 down. */
1041 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1042 || TREE_CODE (temp) == MODIFY_EXPR);
1043 TREE_OPERAND (temp, 1) = *p;
1044 *p = temp;
1046 else
1048 temp = create_tmp_var (type, "retval");
1049 *p = build2 (INIT_EXPR, type, temp, *p);
1052 return temp;
1055 return NULL_TREE;
1058 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1059 a temporary through which they communicate. */
1061 static void
1062 build_stack_save_restore (gcall **save, gcall **restore)
1064 tree tmp_var;
1066 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1067 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1068 gimple_call_set_lhs (*save, tmp_var);
1070 *restore
1071 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1072 1, tmp_var);
1075 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1077 static enum gimplify_status
1078 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1080 tree bind_expr = *expr_p;
1081 bool old_save_stack = gimplify_ctxp->save_stack;
1082 tree t;
1083 gbind *bind_stmt;
1084 gimple_seq body, cleanup;
1085 gcall *stack_save;
1086 location_t start_locus = 0, end_locus = 0;
1088 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1090 /* Mark variables seen in this bind expr. */
1091 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1093 if (TREE_CODE (t) == VAR_DECL)
1095 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1097 /* Mark variable as local. */
1098 if (ctx && !DECL_EXTERNAL (t)
1099 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1100 || splay_tree_lookup (ctx->variables,
1101 (splay_tree_key) t) == NULL))
1103 if (ctx->region_type == ORT_SIMD
1104 && TREE_ADDRESSABLE (t)
1105 && !TREE_STATIC (t))
1106 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1107 else
1108 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1111 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1113 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1114 cfun->has_local_explicit_reg_vars = true;
1117 /* Preliminarily mark non-addressed complex variables as eligible
1118 for promotion to gimple registers. We'll transform their uses
1119 as we find them. */
1120 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1121 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1122 && !TREE_THIS_VOLATILE (t)
1123 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1124 && !needs_to_live_in_memory (t))
1125 DECL_GIMPLE_REG_P (t) = 1;
1128 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1129 BIND_EXPR_BLOCK (bind_expr));
1130 gimple_push_bind_expr (bind_stmt);
1132 gimplify_ctxp->save_stack = false;
1134 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1135 body = NULL;
1136 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1137 gimple_bind_set_body (bind_stmt, body);
1139 /* Source location wise, the cleanup code (stack_restore and clobbers)
1140 belongs to the end of the block, so propagate what we have. The
1141 stack_save operation belongs to the beginning of block, which we can
1142 infer from the bind_expr directly if the block has no explicit
1143 assignment. */
1144 if (BIND_EXPR_BLOCK (bind_expr))
1146 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1147 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1149 if (start_locus == 0)
1150 start_locus = EXPR_LOCATION (bind_expr);
1152 cleanup = NULL;
1153 stack_save = NULL;
1154 if (gimplify_ctxp->save_stack)
1156 gcall *stack_restore;
1158 /* Save stack on entry and restore it on exit. Add a try_finally
1159 block to achieve this. */
1160 build_stack_save_restore (&stack_save, &stack_restore);
1162 gimple_set_location (stack_save, start_locus);
1163 gimple_set_location (stack_restore, end_locus);
1165 gimplify_seq_add_stmt (&cleanup, stack_restore);
1168 /* Add clobbers for all variables that go out of scope. */
1169 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1171 if (TREE_CODE (t) == VAR_DECL
1172 && !is_global_var (t)
1173 && DECL_CONTEXT (t) == current_function_decl
1174 && !DECL_HARD_REGISTER (t)
1175 && !TREE_THIS_VOLATILE (t)
1176 && !DECL_HAS_VALUE_EXPR_P (t)
1177 /* Only care for variables that have to be in memory. Others
1178 will be rewritten into SSA names, hence moved to the top-level. */
1179 && !is_gimple_reg (t)
1180 && flag_stack_reuse != SR_NONE)
1182 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1183 gimple clobber_stmt;
1184 TREE_THIS_VOLATILE (clobber) = 1;
1185 clobber_stmt = gimple_build_assign (t, clobber);
1186 gimple_set_location (clobber_stmt, end_locus);
1187 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1191 if (cleanup)
1193 gtry *gs;
1194 gimple_seq new_body;
1196 new_body = NULL;
1197 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1198 GIMPLE_TRY_FINALLY);
1200 if (stack_save)
1201 gimplify_seq_add_stmt (&new_body, stack_save);
1202 gimplify_seq_add_stmt (&new_body, gs);
1203 gimple_bind_set_body (bind_stmt, new_body);
1206 gimplify_ctxp->save_stack = old_save_stack;
1207 gimple_pop_bind_expr ();
1209 gimplify_seq_add_stmt (pre_p, bind_stmt);
1211 if (temp)
1213 *expr_p = temp;
1214 return GS_OK;
1217 *expr_p = NULL_TREE;
1218 return GS_ALL_DONE;
1221 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1222 GIMPLE value, it is assigned to a new temporary and the statement is
1223 re-written to return the temporary.
1225 PRE_P points to the sequence where side effects that must happen before
1226 STMT should be stored. */
1228 static enum gimplify_status
1229 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1231 greturn *ret;
1232 tree ret_expr = TREE_OPERAND (stmt, 0);
1233 tree result_decl, result;
1235 if (ret_expr == error_mark_node)
1236 return GS_ERROR;
1238 /* Implicit _Cilk_sync must be inserted right before any return statement
1239 if there is a _Cilk_spawn in the function. If the user has provided a
1240 _Cilk_sync, the optimizer should remove this duplicate one. */
1241 if (fn_contains_cilk_spawn_p (cfun))
1243 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1244 gimplify_and_add (impl_sync, pre_p);
1247 if (!ret_expr
1248 || TREE_CODE (ret_expr) == RESULT_DECL
1249 || ret_expr == error_mark_node)
1251 greturn *ret = gimple_build_return (ret_expr);
1252 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1253 gimplify_seq_add_stmt (pre_p, ret);
1254 return GS_ALL_DONE;
1257 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1258 result_decl = NULL_TREE;
1259 else
1261 result_decl = TREE_OPERAND (ret_expr, 0);
1263 /* See through a return by reference. */
1264 if (TREE_CODE (result_decl) == INDIRECT_REF)
1265 result_decl = TREE_OPERAND (result_decl, 0);
1267 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1268 || TREE_CODE (ret_expr) == INIT_EXPR)
1269 && TREE_CODE (result_decl) == RESULT_DECL);
1272 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1273 Recall that aggregate_value_p is FALSE for any aggregate type that is
1274 returned in registers. If we're returning values in registers, then
1275 we don't want to extend the lifetime of the RESULT_DECL, particularly
1276 across another call. In addition, for those aggregates for which
1277 hard_function_value generates a PARALLEL, we'll die during normal
1278 expansion of structure assignments; there's special code in expand_return
1279 to handle this case that does not exist in expand_expr. */
1280 if (!result_decl)
1281 result = NULL_TREE;
1282 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1284 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1286 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1287 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1288 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1289 should be effectively allocated by the caller, i.e. all calls to
1290 this function must be subject to the Return Slot Optimization. */
1291 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1292 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1294 result = result_decl;
1296 else if (gimplify_ctxp->return_temp)
1297 result = gimplify_ctxp->return_temp;
1298 else
1300 result = create_tmp_reg (TREE_TYPE (result_decl));
1302 /* ??? With complex control flow (usually involving abnormal edges),
1303 we can wind up warning about an uninitialized value for this. Due
1304 to how this variable is constructed and initialized, this is never
1305 true. Give up and never warn. */
1306 TREE_NO_WARNING (result) = 1;
1308 gimplify_ctxp->return_temp = result;
1311 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1312 Then gimplify the whole thing. */
1313 if (result != result_decl)
1314 TREE_OPERAND (ret_expr, 0) = result;
1316 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1318 ret = gimple_build_return (result);
1319 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1320 gimplify_seq_add_stmt (pre_p, ret);
1322 return GS_ALL_DONE;
1325 /* Gimplify a variable-length array DECL. */
1327 static void
1328 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1330 /* This is a variable-sized decl. Simplify its size and mark it
1331 for deferred expansion. */
1332 tree t, addr, ptr_type;
1334 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1335 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1337 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1338 if (DECL_HAS_VALUE_EXPR_P (decl))
1339 return;
1341 /* All occurrences of this decl in final gimplified code will be
1342 replaced by indirection. Setting DECL_VALUE_EXPR does two
1343 things: First, it lets the rest of the gimplifier know what
1344 replacement to use. Second, it lets the debug info know
1345 where to find the value. */
1346 ptr_type = build_pointer_type (TREE_TYPE (decl));
1347 addr = create_tmp_var (ptr_type, get_name (decl));
1348 DECL_IGNORED_P (addr) = 0;
1349 t = build_fold_indirect_ref (addr);
1350 TREE_THIS_NOTRAP (t) = 1;
1351 SET_DECL_VALUE_EXPR (decl, t);
1352 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1354 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1355 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1356 size_int (DECL_ALIGN (decl)));
1357 /* The call has been built for a variable-sized object. */
1358 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1359 t = fold_convert (ptr_type, t);
1360 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1362 gimplify_and_add (t, seq_p);
1364 /* Indicate that we need to restore the stack level when the
1365 enclosing BIND_EXPR is exited. */
1366 gimplify_ctxp->save_stack = true;
1369 /* A helper function to be called via walk_tree. Mark all labels under *TP
1370 as being forced. To be called for DECL_INITIAL of static variables. */
1372 static tree
1373 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1375 if (TYPE_P (*tp))
1376 *walk_subtrees = 0;
1377 if (TREE_CODE (*tp) == LABEL_DECL)
1378 FORCED_LABEL (*tp) = 1;
1380 return NULL_TREE;
1383 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1384 and initialization explicit. */
1386 static enum gimplify_status
1387 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1389 tree stmt = *stmt_p;
1390 tree decl = DECL_EXPR_DECL (stmt);
1392 *stmt_p = NULL_TREE;
1394 if (TREE_TYPE (decl) == error_mark_node)
1395 return GS_ERROR;
1397 if ((TREE_CODE (decl) == TYPE_DECL
1398 || TREE_CODE (decl) == VAR_DECL)
1399 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1400 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1402 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1403 in case its size expressions contain problematic nodes like CALL_EXPR. */
1404 if (TREE_CODE (decl) == TYPE_DECL
1405 && DECL_ORIGINAL_TYPE (decl)
1406 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1407 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1409 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1411 tree init = DECL_INITIAL (decl);
1413 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1414 || (!TREE_STATIC (decl)
1415 && flag_stack_check == GENERIC_STACK_CHECK
1416 && compare_tree_int (DECL_SIZE_UNIT (decl),
1417 STACK_CHECK_MAX_VAR_SIZE) > 0))
1418 gimplify_vla_decl (decl, seq_p);
1420 /* Some front ends do not explicitly declare all anonymous
1421 artificial variables. We compensate here by declaring the
1422 variables, though it would be better if the front ends would
1423 explicitly declare them. */
1424 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1425 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1426 gimple_add_tmp_var (decl);
1428 if (init && init != error_mark_node)
1430 if (!TREE_STATIC (decl))
1432 DECL_INITIAL (decl) = NULL_TREE;
1433 init = build2 (INIT_EXPR, void_type_node, decl, init);
1434 gimplify_and_add (init, seq_p);
1435 ggc_free (init);
1437 else
1438 /* We must still examine initializers for static variables
1439 as they may contain a label address. */
1440 walk_tree (&init, force_labels_r, NULL, NULL);
1444 return GS_ALL_DONE;
1447 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1448 and replacing the LOOP_EXPR with goto, but if the loop contains an
1449 EXIT_EXPR, we need to append a label for it to jump to. */
1451 static enum gimplify_status
1452 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1454 tree saved_label = gimplify_ctxp->exit_label;
1455 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1457 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1459 gimplify_ctxp->exit_label = NULL_TREE;
1461 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1463 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1465 if (gimplify_ctxp->exit_label)
1466 gimplify_seq_add_stmt (pre_p,
1467 gimple_build_label (gimplify_ctxp->exit_label));
1469 gimplify_ctxp->exit_label = saved_label;
1471 *expr_p = NULL;
1472 return GS_ALL_DONE;
1475 /* Gimplify a statement list onto a sequence. These may be created either
1476 by an enlightened front-end, or by shortcut_cond_expr. */
1478 static enum gimplify_status
1479 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1481 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1483 tree_stmt_iterator i = tsi_start (*expr_p);
1485 while (!tsi_end_p (i))
1487 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1488 tsi_delink (&i);
1491 if (temp)
1493 *expr_p = temp;
1494 return GS_OK;
1497 return GS_ALL_DONE;
1501 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1502 branch to. */
1504 static enum gimplify_status
1505 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1507 tree switch_expr = *expr_p;
1508 gimple_seq switch_body_seq = NULL;
1509 enum gimplify_status ret;
1510 tree index_type = TREE_TYPE (switch_expr);
1511 if (index_type == NULL_TREE)
1512 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1514 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1515 fb_rvalue);
1516 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1517 return ret;
1519 if (SWITCH_BODY (switch_expr))
1521 vec<tree> labels;
1522 vec<tree> saved_labels;
1523 tree default_case = NULL_TREE;
1524 gswitch *switch_stmt;
1526 /* If someone can be bothered to fill in the labels, they can
1527 be bothered to null out the body too. */
1528 gcc_assert (!SWITCH_LABELS (switch_expr));
1530 /* Save old labels, get new ones from body, then restore the old
1531 labels. Save all the things from the switch body to append after. */
1532 saved_labels = gimplify_ctxp->case_labels;
1533 gimplify_ctxp->case_labels.create (8);
1535 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1536 labels = gimplify_ctxp->case_labels;
1537 gimplify_ctxp->case_labels = saved_labels;
1539 preprocess_case_label_vec_for_gimple (labels, index_type,
1540 &default_case);
1542 if (!default_case)
1544 glabel *new_default;
1546 default_case
1547 = build_case_label (NULL_TREE, NULL_TREE,
1548 create_artificial_label (UNKNOWN_LOCATION));
1549 new_default = gimple_build_label (CASE_LABEL (default_case));
1550 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1553 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1554 default_case, labels);
1555 gimplify_seq_add_stmt (pre_p, switch_stmt);
1556 gimplify_seq_add_seq (pre_p, switch_body_seq);
1557 labels.release ();
1559 else
1560 gcc_assert (SWITCH_LABELS (switch_expr));
1562 return GS_ALL_DONE;
1565 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1567 static enum gimplify_status
1568 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1570 struct gimplify_ctx *ctxp;
1571 glabel *label_stmt;
1573 /* Invalid programs can play Duff's Device type games with, for example,
1574 #pragma omp parallel. At least in the C front end, we don't
1575 detect such invalid branches until after gimplification, in the
1576 diagnose_omp_blocks pass. */
1577 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1578 if (ctxp->case_labels.exists ())
1579 break;
1581 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1582 ctxp->case_labels.safe_push (*expr_p);
1583 gimplify_seq_add_stmt (pre_p, label_stmt);
1585 return GS_ALL_DONE;
1588 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1589 if necessary. */
1591 tree
1592 build_and_jump (tree *label_p)
1594 if (label_p == NULL)
1595 /* If there's nowhere to jump, just fall through. */
1596 return NULL_TREE;
1598 if (*label_p == NULL_TREE)
1600 tree label = create_artificial_label (UNKNOWN_LOCATION);
1601 *label_p = label;
1604 return build1 (GOTO_EXPR, void_type_node, *label_p);
1607 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1608 This also involves building a label to jump to and communicating it to
1609 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1611 static enum gimplify_status
1612 gimplify_exit_expr (tree *expr_p)
1614 tree cond = TREE_OPERAND (*expr_p, 0);
1615 tree expr;
1617 expr = build_and_jump (&gimplify_ctxp->exit_label);
1618 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1619 *expr_p = expr;
1621 return GS_OK;
1624 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1625 different from its canonical type, wrap the whole thing inside a
1626 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1627 type.
1629 The canonical type of a COMPONENT_REF is the type of the field being
1630 referenced--unless the field is a bit-field which can be read directly
1631 in a smaller mode, in which case the canonical type is the
1632 sign-appropriate type corresponding to that mode. */
1634 static void
1635 canonicalize_component_ref (tree *expr_p)
1637 tree expr = *expr_p;
1638 tree type;
1640 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1642 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1643 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1644 else
1645 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1647 /* One could argue that all the stuff below is not necessary for
1648 the non-bitfield case and declare it a FE error if type
1649 adjustment would be needed. */
1650 if (TREE_TYPE (expr) != type)
1652 #ifdef ENABLE_TYPES_CHECKING
1653 tree old_type = TREE_TYPE (expr);
1654 #endif
1655 int type_quals;
1657 /* We need to preserve qualifiers and propagate them from
1658 operand 0. */
1659 type_quals = TYPE_QUALS (type)
1660 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1661 if (TYPE_QUALS (type) != type_quals)
1662 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1664 /* Set the type of the COMPONENT_REF to the underlying type. */
1665 TREE_TYPE (expr) = type;
1667 #ifdef ENABLE_TYPES_CHECKING
1668 /* It is now a FE error, if the conversion from the canonical
1669 type to the original expression type is not useless. */
1670 gcc_assert (useless_type_conversion_p (old_type, type));
1671 #endif
1675 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1676 to foo, embed that change in the ADDR_EXPR by converting
1677 T array[U];
1678 (T *)&array
1680 &array[L]
1681 where L is the lower bound. For simplicity, only do this for constant
1682 lower bound.
1683 The constraint is that the type of &array[L] is trivially convertible
1684 to T *. */
1686 static void
1687 canonicalize_addr_expr (tree *expr_p)
1689 tree expr = *expr_p;
1690 tree addr_expr = TREE_OPERAND (expr, 0);
1691 tree datype, ddatype, pddatype;
1693 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1694 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1695 || TREE_CODE (addr_expr) != ADDR_EXPR)
1696 return;
1698 /* The addr_expr type should be a pointer to an array. */
1699 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1700 if (TREE_CODE (datype) != ARRAY_TYPE)
1701 return;
1703 /* The pointer to element type shall be trivially convertible to
1704 the expression pointer type. */
1705 ddatype = TREE_TYPE (datype);
1706 pddatype = build_pointer_type (ddatype);
1707 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1708 pddatype))
1709 return;
1711 /* The lower bound and element sizes must be constant. */
1712 if (!TYPE_SIZE_UNIT (ddatype)
1713 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1714 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1715 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1716 return;
1718 /* All checks succeeded. Build a new node to merge the cast. */
1719 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1720 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1721 NULL_TREE, NULL_TREE);
1722 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1724 /* We can have stripped a required restrict qualifier above. */
1725 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1726 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1729 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1730 underneath as appropriate. */
1732 static enum gimplify_status
1733 gimplify_conversion (tree *expr_p)
1735 location_t loc = EXPR_LOCATION (*expr_p);
1736 gcc_assert (CONVERT_EXPR_P (*expr_p));
1738 /* Then strip away all but the outermost conversion. */
1739 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1741 /* And remove the outermost conversion if it's useless. */
1742 if (tree_ssa_useless_type_conversion (*expr_p))
1743 *expr_p = TREE_OPERAND (*expr_p, 0);
1745 /* If we still have a conversion at the toplevel,
1746 then canonicalize some constructs. */
1747 if (CONVERT_EXPR_P (*expr_p))
1749 tree sub = TREE_OPERAND (*expr_p, 0);
1751 /* If a NOP conversion is changing the type of a COMPONENT_REF
1752 expression, then canonicalize its type now in order to expose more
1753 redundant conversions. */
1754 if (TREE_CODE (sub) == COMPONENT_REF)
1755 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1757 /* If a NOP conversion is changing a pointer to array of foo
1758 to a pointer to foo, embed that change in the ADDR_EXPR. */
1759 else if (TREE_CODE (sub) == ADDR_EXPR)
1760 canonicalize_addr_expr (expr_p);
1763 /* If we have a conversion to a non-register type force the
1764 use of a VIEW_CONVERT_EXPR instead. */
1765 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1766 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1767 TREE_OPERAND (*expr_p, 0));
1769 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1770 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1771 TREE_SET_CODE (*expr_p, NOP_EXPR);
1773 return GS_OK;
1776 /* Nonlocal VLAs seen in the current function. */
1777 static hash_set<tree> *nonlocal_vlas;
1779 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1780 static tree nonlocal_vla_vars;
1782 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1783 DECL_VALUE_EXPR, and it's worth re-examining things. */
1785 static enum gimplify_status
1786 gimplify_var_or_parm_decl (tree *expr_p)
1788 tree decl = *expr_p;
1790 /* ??? If this is a local variable, and it has not been seen in any
1791 outer BIND_EXPR, then it's probably the result of a duplicate
1792 declaration, for which we've already issued an error. It would
1793 be really nice if the front end wouldn't leak these at all.
1794 Currently the only known culprit is C++ destructors, as seen
1795 in g++.old-deja/g++.jason/binding.C. */
1796 if (TREE_CODE (decl) == VAR_DECL
1797 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1798 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1799 && decl_function_context (decl) == current_function_decl)
1801 gcc_assert (seen_error ());
1802 return GS_ERROR;
1805 /* When within an OMP context, notice uses of variables. */
1806 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1807 return GS_ALL_DONE;
1809 /* If the decl is an alias for another expression, substitute it now. */
1810 if (DECL_HAS_VALUE_EXPR_P (decl))
1812 tree value_expr = DECL_VALUE_EXPR (decl);
1814 /* For referenced nonlocal VLAs add a decl for debugging purposes
1815 to the current function. */
1816 if (TREE_CODE (decl) == VAR_DECL
1817 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1818 && nonlocal_vlas != NULL
1819 && TREE_CODE (value_expr) == INDIRECT_REF
1820 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1821 && decl_function_context (decl) != current_function_decl)
1823 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1824 while (ctx
1825 && (ctx->region_type == ORT_WORKSHARE
1826 || ctx->region_type == ORT_SIMD))
1827 ctx = ctx->outer_context;
1828 if (!ctx && !nonlocal_vlas->add (decl))
1830 tree copy = copy_node (decl);
1832 lang_hooks.dup_lang_specific_decl (copy);
1833 SET_DECL_RTL (copy, 0);
1834 TREE_USED (copy) = 1;
1835 DECL_CHAIN (copy) = nonlocal_vla_vars;
1836 nonlocal_vla_vars = copy;
1837 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1838 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1842 *expr_p = unshare_expr (value_expr);
1843 return GS_OK;
1846 return GS_ALL_DONE;
1849 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1851 static void
1852 recalculate_side_effects (tree t)
1854 enum tree_code code = TREE_CODE (t);
1855 int len = TREE_OPERAND_LENGTH (t);
1856 int i;
1858 switch (TREE_CODE_CLASS (code))
1860 case tcc_expression:
1861 switch (code)
1863 case INIT_EXPR:
1864 case MODIFY_EXPR:
1865 case VA_ARG_EXPR:
1866 case PREDECREMENT_EXPR:
1867 case PREINCREMENT_EXPR:
1868 case POSTDECREMENT_EXPR:
1869 case POSTINCREMENT_EXPR:
1870 /* All of these have side-effects, no matter what their
1871 operands are. */
1872 return;
1874 default:
1875 break;
1877 /* Fall through. */
1879 case tcc_comparison: /* a comparison expression */
1880 case tcc_unary: /* a unary arithmetic expression */
1881 case tcc_binary: /* a binary arithmetic expression */
1882 case tcc_reference: /* a reference */
1883 case tcc_vl_exp: /* a function call */
1884 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1885 for (i = 0; i < len; ++i)
1887 tree op = TREE_OPERAND (t, i);
1888 if (op && TREE_SIDE_EFFECTS (op))
1889 TREE_SIDE_EFFECTS (t) = 1;
1891 break;
1893 case tcc_constant:
1894 /* No side-effects. */
1895 return;
1897 default:
1898 gcc_unreachable ();
1902 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1903 node *EXPR_P.
1905 compound_lval
1906 : min_lval '[' val ']'
1907 | min_lval '.' ID
1908 | compound_lval '[' val ']'
1909 | compound_lval '.' ID
1911 This is not part of the original SIMPLE definition, which separates
1912 array and member references, but it seems reasonable to handle them
1913 together. Also, this way we don't run into problems with union
1914 aliasing; gcc requires that for accesses through a union to alias, the
1915 union reference must be explicit, which was not always the case when we
1916 were splitting up array and member refs.
1918 PRE_P points to the sequence where side effects that must happen before
1919 *EXPR_P should be stored.
1921 POST_P points to the sequence where side effects that must happen after
1922 *EXPR_P should be stored. */
1924 static enum gimplify_status
1925 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1926 fallback_t fallback)
1928 tree *p;
1929 enum gimplify_status ret = GS_ALL_DONE, tret;
1930 int i;
1931 location_t loc = EXPR_LOCATION (*expr_p);
1932 tree expr = *expr_p;
1934 /* Create a stack of the subexpressions so later we can walk them in
1935 order from inner to outer. */
1936 auto_vec<tree, 10> expr_stack;
1938 /* We can handle anything that get_inner_reference can deal with. */
1939 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1941 restart:
1942 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1943 if (TREE_CODE (*p) == INDIRECT_REF)
1944 *p = fold_indirect_ref_loc (loc, *p);
1946 if (handled_component_p (*p))
1948 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1949 additional COMPONENT_REFs. */
1950 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1951 && gimplify_var_or_parm_decl (p) == GS_OK)
1952 goto restart;
1953 else
1954 break;
1956 expr_stack.safe_push (*p);
1959 gcc_assert (expr_stack.length ());
1961 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1962 walked through and P points to the innermost expression.
1964 Java requires that we elaborated nodes in source order. That
1965 means we must gimplify the inner expression followed by each of
1966 the indices, in order. But we can't gimplify the inner
1967 expression until we deal with any variable bounds, sizes, or
1968 positions in order to deal with PLACEHOLDER_EXPRs.
1970 So we do this in three steps. First we deal with the annotations
1971 for any variables in the components, then we gimplify the base,
1972 then we gimplify any indices, from left to right. */
1973 for (i = expr_stack.length () - 1; i >= 0; i--)
1975 tree t = expr_stack[i];
1977 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1979 /* Gimplify the low bound and element type size and put them into
1980 the ARRAY_REF. If these values are set, they have already been
1981 gimplified. */
1982 if (TREE_OPERAND (t, 2) == NULL_TREE)
1984 tree low = unshare_expr (array_ref_low_bound (t));
1985 if (!is_gimple_min_invariant (low))
1987 TREE_OPERAND (t, 2) = low;
1988 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1989 post_p, is_gimple_reg,
1990 fb_rvalue);
1991 ret = MIN (ret, tret);
1994 else
1996 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1997 is_gimple_reg, fb_rvalue);
1998 ret = MIN (ret, tret);
2001 if (TREE_OPERAND (t, 3) == NULL_TREE)
2003 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2004 tree elmt_size = unshare_expr (array_ref_element_size (t));
2005 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2007 /* Divide the element size by the alignment of the element
2008 type (above). */
2009 elmt_size
2010 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2012 if (!is_gimple_min_invariant (elmt_size))
2014 TREE_OPERAND (t, 3) = elmt_size;
2015 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2016 post_p, is_gimple_reg,
2017 fb_rvalue);
2018 ret = MIN (ret, tret);
2021 else
2023 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2024 is_gimple_reg, fb_rvalue);
2025 ret = MIN (ret, tret);
2028 else if (TREE_CODE (t) == COMPONENT_REF)
2030 /* Set the field offset into T and gimplify it. */
2031 if (TREE_OPERAND (t, 2) == NULL_TREE)
2033 tree offset = unshare_expr (component_ref_field_offset (t));
2034 tree field = TREE_OPERAND (t, 1);
2035 tree factor
2036 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2038 /* Divide the offset by its alignment. */
2039 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2041 if (!is_gimple_min_invariant (offset))
2043 TREE_OPERAND (t, 2) = offset;
2044 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2045 post_p, is_gimple_reg,
2046 fb_rvalue);
2047 ret = MIN (ret, tret);
2050 else
2052 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2053 is_gimple_reg, fb_rvalue);
2054 ret = MIN (ret, tret);
2059 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2060 so as to match the min_lval predicate. Failure to do so may result
2061 in the creation of large aggregate temporaries. */
2062 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2063 fallback | fb_lvalue);
2064 ret = MIN (ret, tret);
2066 /* And finally, the indices and operands of ARRAY_REF. During this
2067 loop we also remove any useless conversions. */
2068 for (; expr_stack.length () > 0; )
2070 tree t = expr_stack.pop ();
2072 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2074 /* Gimplify the dimension. */
2075 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2077 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2078 is_gimple_val, fb_rvalue);
2079 ret = MIN (ret, tret);
2083 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2085 /* The innermost expression P may have originally had
2086 TREE_SIDE_EFFECTS set which would have caused all the outer
2087 expressions in *EXPR_P leading to P to also have had
2088 TREE_SIDE_EFFECTS set. */
2089 recalculate_side_effects (t);
2092 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2093 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2095 canonicalize_component_ref (expr_p);
2098 expr_stack.release ();
2100 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2102 return ret;
2105 /* Gimplify the self modifying expression pointed to by EXPR_P
2106 (++, --, +=, -=).
2108 PRE_P points to the list where side effects that must happen before
2109 *EXPR_P should be stored.
2111 POST_P points to the list where side effects that must happen after
2112 *EXPR_P should be stored.
2114 WANT_VALUE is nonzero iff we want to use the value of this expression
2115 in another expression.
2117 ARITH_TYPE is the type the computation should be performed in. */
2119 enum gimplify_status
2120 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2121 bool want_value, tree arith_type)
2123 enum tree_code code;
2124 tree lhs, lvalue, rhs, t1;
2125 gimple_seq post = NULL, *orig_post_p = post_p;
2126 bool postfix;
2127 enum tree_code arith_code;
2128 enum gimplify_status ret;
2129 location_t loc = EXPR_LOCATION (*expr_p);
2131 code = TREE_CODE (*expr_p);
2133 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2134 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2136 /* Prefix or postfix? */
2137 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2138 /* Faster to treat as prefix if result is not used. */
2139 postfix = want_value;
2140 else
2141 postfix = false;
2143 /* For postfix, make sure the inner expression's post side effects
2144 are executed after side effects from this expression. */
2145 if (postfix)
2146 post_p = &post;
2148 /* Add or subtract? */
2149 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2150 arith_code = PLUS_EXPR;
2151 else
2152 arith_code = MINUS_EXPR;
2154 /* Gimplify the LHS into a GIMPLE lvalue. */
2155 lvalue = TREE_OPERAND (*expr_p, 0);
2156 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2157 if (ret == GS_ERROR)
2158 return ret;
2160 /* Extract the operands to the arithmetic operation. */
2161 lhs = lvalue;
2162 rhs = TREE_OPERAND (*expr_p, 1);
2164 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2165 that as the result value and in the postqueue operation. */
2166 if (postfix)
2168 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2169 if (ret == GS_ERROR)
2170 return ret;
2172 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2175 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2176 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2178 rhs = convert_to_ptrofftype_loc (loc, rhs);
2179 if (arith_code == MINUS_EXPR)
2180 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2181 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2183 else
2184 t1 = fold_convert (TREE_TYPE (*expr_p),
2185 fold_build2 (arith_code, arith_type,
2186 fold_convert (arith_type, lhs),
2187 fold_convert (arith_type, rhs)));
2189 if (postfix)
2191 gimplify_assign (lvalue, t1, pre_p);
2192 gimplify_seq_add_seq (orig_post_p, post);
2193 *expr_p = lhs;
2194 return GS_ALL_DONE;
2196 else
2198 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2199 return GS_OK;
2203 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2205 static void
2206 maybe_with_size_expr (tree *expr_p)
2208 tree expr = *expr_p;
2209 tree type = TREE_TYPE (expr);
2210 tree size;
2212 /* If we've already wrapped this or the type is error_mark_node, we can't do
2213 anything. */
2214 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2215 || type == error_mark_node)
2216 return;
2218 /* If the size isn't known or is a constant, we have nothing to do. */
2219 size = TYPE_SIZE_UNIT (type);
2220 if (!size || TREE_CODE (size) == INTEGER_CST)
2221 return;
2223 /* Otherwise, make a WITH_SIZE_EXPR. */
2224 size = unshare_expr (size);
2225 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2226 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2229 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2230 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2231 the CALL_EXPR. */
2233 enum gimplify_status
2234 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2236 bool (*test) (tree);
2237 fallback_t fb;
2239 /* In general, we allow lvalues for function arguments to avoid
2240 extra overhead of copying large aggregates out of even larger
2241 aggregates into temporaries only to copy the temporaries to
2242 the argument list. Make optimizers happy by pulling out to
2243 temporaries those types that fit in registers. */
2244 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2245 test = is_gimple_val, fb = fb_rvalue;
2246 else
2248 test = is_gimple_lvalue, fb = fb_either;
2249 /* Also strip a TARGET_EXPR that would force an extra copy. */
2250 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2252 tree init = TARGET_EXPR_INITIAL (*arg_p);
2253 if (init
2254 && !VOID_TYPE_P (TREE_TYPE (init)))
2255 *arg_p = init;
2259 /* If this is a variable sized type, we must remember the size. */
2260 maybe_with_size_expr (arg_p);
2262 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2263 /* Make sure arguments have the same location as the function call
2264 itself. */
2265 protected_set_expr_location (*arg_p, call_location);
2267 /* There is a sequence point before a function call. Side effects in
2268 the argument list must occur before the actual call. So, when
2269 gimplifying arguments, force gimplify_expr to use an internal
2270 post queue which is then appended to the end of PRE_P. */
2271 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2274 /* Don't fold inside offloading regions: it can break code by adding decl
2275 references that weren't in the source. We'll do it during omplower pass
2276 instead. */
2278 static bool
2279 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2281 struct gimplify_omp_ctx *ctx;
2282 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2283 if (ctx->region_type == ORT_TARGET)
2284 return false;
2285 return fold_stmt (gsi);
2288 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2289 WANT_VALUE is true if the result of the call is desired. */
2291 static enum gimplify_status
2292 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2294 tree fndecl, parms, p, fnptrtype;
2295 enum gimplify_status ret;
2296 int i, nargs;
2297 gcall *call;
2298 bool builtin_va_start_p = false;
2299 location_t loc = EXPR_LOCATION (*expr_p);
2301 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2303 /* For reliable diagnostics during inlining, it is necessary that
2304 every call_expr be annotated with file and line. */
2305 if (! EXPR_HAS_LOCATION (*expr_p))
2306 SET_EXPR_LOCATION (*expr_p, input_location);
2308 /* Gimplify internal functions created in the FEs. */
2309 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2311 if (want_value)
2312 return GS_ALL_DONE;
2314 nargs = call_expr_nargs (*expr_p);
2315 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2316 auto_vec<tree> vargs (nargs);
2318 for (i = 0; i < nargs; i++)
2320 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2321 EXPR_LOCATION (*expr_p));
2322 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2324 gimple call = gimple_build_call_internal_vec (ifn, vargs);
2325 gimplify_seq_add_stmt (pre_p, call);
2326 return GS_ALL_DONE;
2329 /* This may be a call to a builtin function.
2331 Builtin function calls may be transformed into different
2332 (and more efficient) builtin function calls under certain
2333 circumstances. Unfortunately, gimplification can muck things
2334 up enough that the builtin expanders are not aware that certain
2335 transformations are still valid.
2337 So we attempt transformation/gimplification of the call before
2338 we gimplify the CALL_EXPR. At this time we do not manage to
2339 transform all calls in the same manner as the expanders do, but
2340 we do transform most of them. */
2341 fndecl = get_callee_fndecl (*expr_p);
2342 if (fndecl
2343 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2344 switch (DECL_FUNCTION_CODE (fndecl))
2346 case BUILT_IN_VA_START:
2348 builtin_va_start_p = TRUE;
2349 if (call_expr_nargs (*expr_p) < 2)
2351 error ("too few arguments to function %<va_start%>");
2352 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2353 return GS_OK;
2356 if (fold_builtin_next_arg (*expr_p, true))
2358 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2359 return GS_OK;
2361 break;
2363 case BUILT_IN_LINE:
2365 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2366 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2367 return GS_OK;
2369 case BUILT_IN_FILE:
2371 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2372 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2373 return GS_OK;
2375 case BUILT_IN_FUNCTION:
2377 const char *function;
2378 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2379 *expr_p = build_string_literal (strlen (function) + 1, function);
2380 return GS_OK;
2382 default:
2385 if (fndecl && DECL_BUILT_IN (fndecl))
2387 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2388 if (new_tree && new_tree != *expr_p)
2390 /* There was a transformation of this call which computes the
2391 same value, but in a more efficient way. Return and try
2392 again. */
2393 *expr_p = new_tree;
2394 return GS_OK;
2398 /* Remember the original function pointer type. */
2399 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2401 /* There is a sequence point before the call, so any side effects in
2402 the calling expression must occur before the actual call. Force
2403 gimplify_expr to use an internal post queue. */
2404 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2405 is_gimple_call_addr, fb_rvalue);
2407 nargs = call_expr_nargs (*expr_p);
2409 /* Get argument types for verification. */
2410 fndecl = get_callee_fndecl (*expr_p);
2411 parms = NULL_TREE;
2412 if (fndecl)
2413 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2414 else
2415 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2417 if (fndecl && DECL_ARGUMENTS (fndecl))
2418 p = DECL_ARGUMENTS (fndecl);
2419 else if (parms)
2420 p = parms;
2421 else
2422 p = NULL_TREE;
2423 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2426 /* If the last argument is __builtin_va_arg_pack () and it is not
2427 passed as a named argument, decrease the number of CALL_EXPR
2428 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2429 if (!p
2430 && i < nargs
2431 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2433 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2434 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2436 if (last_arg_fndecl
2437 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2438 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2439 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2441 tree call = *expr_p;
2443 --nargs;
2444 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2445 CALL_EXPR_FN (call),
2446 nargs, CALL_EXPR_ARGP (call));
2448 /* Copy all CALL_EXPR flags, location and block, except
2449 CALL_EXPR_VA_ARG_PACK flag. */
2450 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2451 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2452 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2453 = CALL_EXPR_RETURN_SLOT_OPT (call);
2454 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2455 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2457 /* Set CALL_EXPR_VA_ARG_PACK. */
2458 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2462 /* Gimplify the function arguments. */
2463 if (nargs > 0)
2465 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2466 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2467 PUSH_ARGS_REVERSED ? i-- : i++)
2469 enum gimplify_status t;
2471 /* Avoid gimplifying the second argument to va_start, which needs to
2472 be the plain PARM_DECL. */
2473 if ((i != 1) || !builtin_va_start_p)
2475 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2476 EXPR_LOCATION (*expr_p));
2478 if (t == GS_ERROR)
2479 ret = GS_ERROR;
2484 /* Gimplify the static chain. */
2485 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2487 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2488 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2489 else
2491 enum gimplify_status t;
2492 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2493 EXPR_LOCATION (*expr_p));
2494 if (t == GS_ERROR)
2495 ret = GS_ERROR;
2499 /* Verify the function result. */
2500 if (want_value && fndecl
2501 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2503 error_at (loc, "using result of function returning %<void%>");
2504 ret = GS_ERROR;
2507 /* Try this again in case gimplification exposed something. */
2508 if (ret != GS_ERROR)
2510 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2512 if (new_tree && new_tree != *expr_p)
2514 /* There was a transformation of this call which computes the
2515 same value, but in a more efficient way. Return and try
2516 again. */
2517 *expr_p = new_tree;
2518 return GS_OK;
2521 else
2523 *expr_p = error_mark_node;
2524 return GS_ERROR;
2527 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2528 decl. This allows us to eliminate redundant or useless
2529 calls to "const" functions. */
2530 if (TREE_CODE (*expr_p) == CALL_EXPR)
2532 int flags = call_expr_flags (*expr_p);
2533 if (flags & (ECF_CONST | ECF_PURE)
2534 /* An infinite loop is considered a side effect. */
2535 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2536 TREE_SIDE_EFFECTS (*expr_p) = 0;
2539 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2540 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2541 form and delegate the creation of a GIMPLE_CALL to
2542 gimplify_modify_expr. This is always possible because when
2543 WANT_VALUE is true, the caller wants the result of this call into
2544 a temporary, which means that we will emit an INIT_EXPR in
2545 internal_get_tmp_var which will then be handled by
2546 gimplify_modify_expr. */
2547 if (!want_value)
2549 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2550 have to do is replicate it as a GIMPLE_CALL tuple. */
2551 gimple_stmt_iterator gsi;
2552 call = gimple_build_call_from_tree (*expr_p);
2553 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2554 notice_special_calls (call);
2555 gimplify_seq_add_stmt (pre_p, call);
2556 gsi = gsi_last (*pre_p);
2557 maybe_fold_stmt (&gsi);
2558 *expr_p = NULL_TREE;
2560 else
2561 /* Remember the original function type. */
2562 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2563 CALL_EXPR_FN (*expr_p));
2565 return ret;
2568 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2569 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2571 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2572 condition is true or false, respectively. If null, we should generate
2573 our own to skip over the evaluation of this specific expression.
2575 LOCUS is the source location of the COND_EXPR.
2577 This function is the tree equivalent of do_jump.
2579 shortcut_cond_r should only be called by shortcut_cond_expr. */
2581 static tree
2582 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2583 location_t locus)
2585 tree local_label = NULL_TREE;
2586 tree t, expr = NULL;
2588 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2589 retain the shortcut semantics. Just insert the gotos here;
2590 shortcut_cond_expr will append the real blocks later. */
2591 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2593 location_t new_locus;
2595 /* Turn if (a && b) into
2597 if (a); else goto no;
2598 if (b) goto yes; else goto no;
2599 (no:) */
2601 if (false_label_p == NULL)
2602 false_label_p = &local_label;
2604 /* Keep the original source location on the first 'if'. */
2605 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2606 append_to_statement_list (t, &expr);
2608 /* Set the source location of the && on the second 'if'. */
2609 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2610 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2611 new_locus);
2612 append_to_statement_list (t, &expr);
2614 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2616 location_t new_locus;
2618 /* Turn if (a || b) into
2620 if (a) goto yes;
2621 if (b) goto yes; else goto no;
2622 (yes:) */
2624 if (true_label_p == NULL)
2625 true_label_p = &local_label;
2627 /* Keep the original source location on the first 'if'. */
2628 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2629 append_to_statement_list (t, &expr);
2631 /* Set the source location of the || on the second 'if'. */
2632 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2633 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2634 new_locus);
2635 append_to_statement_list (t, &expr);
2637 else if (TREE_CODE (pred) == COND_EXPR
2638 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2639 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2641 location_t new_locus;
2643 /* As long as we're messing with gotos, turn if (a ? b : c) into
2644 if (a)
2645 if (b) goto yes; else goto no;
2646 else
2647 if (c) goto yes; else goto no;
2649 Don't do this if one of the arms has void type, which can happen
2650 in C++ when the arm is throw. */
2652 /* Keep the original source location on the first 'if'. Set the source
2653 location of the ? on the second 'if'. */
2654 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2655 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2656 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2657 false_label_p, locus),
2658 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2659 false_label_p, new_locus));
2661 else
2663 expr = build3 (COND_EXPR, void_type_node, pred,
2664 build_and_jump (true_label_p),
2665 build_and_jump (false_label_p));
2666 SET_EXPR_LOCATION (expr, locus);
2669 if (local_label)
2671 t = build1 (LABEL_EXPR, void_type_node, local_label);
2672 append_to_statement_list (t, &expr);
2675 return expr;
2678 /* Given a conditional expression EXPR with short-circuit boolean
2679 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2680 predicate apart into the equivalent sequence of conditionals. */
2682 static tree
2683 shortcut_cond_expr (tree expr)
2685 tree pred = TREE_OPERAND (expr, 0);
2686 tree then_ = TREE_OPERAND (expr, 1);
2687 tree else_ = TREE_OPERAND (expr, 2);
2688 tree true_label, false_label, end_label, t;
2689 tree *true_label_p;
2690 tree *false_label_p;
2691 bool emit_end, emit_false, jump_over_else;
2692 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2693 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2695 /* First do simple transformations. */
2696 if (!else_se)
2698 /* If there is no 'else', turn
2699 if (a && b) then c
2700 into
2701 if (a) if (b) then c. */
2702 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2704 /* Keep the original source location on the first 'if'. */
2705 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2706 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2707 /* Set the source location of the && on the second 'if'. */
2708 if (EXPR_HAS_LOCATION (pred))
2709 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2710 then_ = shortcut_cond_expr (expr);
2711 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2712 pred = TREE_OPERAND (pred, 0);
2713 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2714 SET_EXPR_LOCATION (expr, locus);
2718 if (!then_se)
2720 /* If there is no 'then', turn
2721 if (a || b); else d
2722 into
2723 if (a); else if (b); else d. */
2724 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2726 /* Keep the original source location on the first 'if'. */
2727 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2728 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2729 /* Set the source location of the || on the second 'if'. */
2730 if (EXPR_HAS_LOCATION (pred))
2731 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2732 else_ = shortcut_cond_expr (expr);
2733 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2734 pred = TREE_OPERAND (pred, 0);
2735 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2736 SET_EXPR_LOCATION (expr, locus);
2740 /* If we're done, great. */
2741 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2742 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2743 return expr;
2745 /* Otherwise we need to mess with gotos. Change
2746 if (a) c; else d;
2748 if (a); else goto no;
2749 c; goto end;
2750 no: d; end:
2751 and recursively gimplify the condition. */
2753 true_label = false_label = end_label = NULL_TREE;
2755 /* If our arms just jump somewhere, hijack those labels so we don't
2756 generate jumps to jumps. */
2758 if (then_
2759 && TREE_CODE (then_) == GOTO_EXPR
2760 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2762 true_label = GOTO_DESTINATION (then_);
2763 then_ = NULL;
2764 then_se = false;
2767 if (else_
2768 && TREE_CODE (else_) == GOTO_EXPR
2769 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2771 false_label = GOTO_DESTINATION (else_);
2772 else_ = NULL;
2773 else_se = false;
2776 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2777 if (true_label)
2778 true_label_p = &true_label;
2779 else
2780 true_label_p = NULL;
2782 /* The 'else' branch also needs a label if it contains interesting code. */
2783 if (false_label || else_se)
2784 false_label_p = &false_label;
2785 else
2786 false_label_p = NULL;
2788 /* If there was nothing else in our arms, just forward the label(s). */
2789 if (!then_se && !else_se)
2790 return shortcut_cond_r (pred, true_label_p, false_label_p,
2791 EXPR_LOC_OR_LOC (expr, input_location));
2793 /* If our last subexpression already has a terminal label, reuse it. */
2794 if (else_se)
2795 t = expr_last (else_);
2796 else if (then_se)
2797 t = expr_last (then_);
2798 else
2799 t = NULL;
2800 if (t && TREE_CODE (t) == LABEL_EXPR)
2801 end_label = LABEL_EXPR_LABEL (t);
2803 /* If we don't care about jumping to the 'else' branch, jump to the end
2804 if the condition is false. */
2805 if (!false_label_p)
2806 false_label_p = &end_label;
2808 /* We only want to emit these labels if we aren't hijacking them. */
2809 emit_end = (end_label == NULL_TREE);
2810 emit_false = (false_label == NULL_TREE);
2812 /* We only emit the jump over the else clause if we have to--if the
2813 then clause may fall through. Otherwise we can wind up with a
2814 useless jump and a useless label at the end of gimplified code,
2815 which will cause us to think that this conditional as a whole
2816 falls through even if it doesn't. If we then inline a function
2817 which ends with such a condition, that can cause us to issue an
2818 inappropriate warning about control reaching the end of a
2819 non-void function. */
2820 jump_over_else = block_may_fallthru (then_);
2822 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2823 EXPR_LOC_OR_LOC (expr, input_location));
2825 expr = NULL;
2826 append_to_statement_list (pred, &expr);
2828 append_to_statement_list (then_, &expr);
2829 if (else_se)
2831 if (jump_over_else)
2833 tree last = expr_last (expr);
2834 t = build_and_jump (&end_label);
2835 if (EXPR_HAS_LOCATION (last))
2836 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2837 append_to_statement_list (t, &expr);
2839 if (emit_false)
2841 t = build1 (LABEL_EXPR, void_type_node, false_label);
2842 append_to_statement_list (t, &expr);
2844 append_to_statement_list (else_, &expr);
2846 if (emit_end && end_label)
2848 t = build1 (LABEL_EXPR, void_type_node, end_label);
2849 append_to_statement_list (t, &expr);
2852 return expr;
2855 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2857 tree
2858 gimple_boolify (tree expr)
2860 tree type = TREE_TYPE (expr);
2861 location_t loc = EXPR_LOCATION (expr);
2863 if (TREE_CODE (expr) == NE_EXPR
2864 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2865 && integer_zerop (TREE_OPERAND (expr, 1)))
2867 tree call = TREE_OPERAND (expr, 0);
2868 tree fn = get_callee_fndecl (call);
2870 /* For __builtin_expect ((long) (x), y) recurse into x as well
2871 if x is truth_value_p. */
2872 if (fn
2873 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2874 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2875 && call_expr_nargs (call) == 2)
2877 tree arg = CALL_EXPR_ARG (call, 0);
2878 if (arg)
2880 if (TREE_CODE (arg) == NOP_EXPR
2881 && TREE_TYPE (arg) == TREE_TYPE (call))
2882 arg = TREE_OPERAND (arg, 0);
2883 if (truth_value_p (TREE_CODE (arg)))
2885 arg = gimple_boolify (arg);
2886 CALL_EXPR_ARG (call, 0)
2887 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2893 switch (TREE_CODE (expr))
2895 case TRUTH_AND_EXPR:
2896 case TRUTH_OR_EXPR:
2897 case TRUTH_XOR_EXPR:
2898 case TRUTH_ANDIF_EXPR:
2899 case TRUTH_ORIF_EXPR:
2900 /* Also boolify the arguments of truth exprs. */
2901 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2902 /* FALLTHRU */
2904 case TRUTH_NOT_EXPR:
2905 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2907 /* These expressions always produce boolean results. */
2908 if (TREE_CODE (type) != BOOLEAN_TYPE)
2909 TREE_TYPE (expr) = boolean_type_node;
2910 return expr;
2912 case ANNOTATE_EXPR:
2913 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2915 case annot_expr_ivdep_kind:
2916 case annot_expr_no_vector_kind:
2917 case annot_expr_vector_kind:
2918 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2919 if (TREE_CODE (type) != BOOLEAN_TYPE)
2920 TREE_TYPE (expr) = boolean_type_node;
2921 return expr;
2922 default:
2923 gcc_unreachable ();
2926 default:
2927 if (COMPARISON_CLASS_P (expr))
2929 /* There expressions always prduce boolean results. */
2930 if (TREE_CODE (type) != BOOLEAN_TYPE)
2931 TREE_TYPE (expr) = boolean_type_node;
2932 return expr;
2934 /* Other expressions that get here must have boolean values, but
2935 might need to be converted to the appropriate mode. */
2936 if (TREE_CODE (type) == BOOLEAN_TYPE)
2937 return expr;
2938 return fold_convert_loc (loc, boolean_type_node, expr);
2942 /* Given a conditional expression *EXPR_P without side effects, gimplify
2943 its operands. New statements are inserted to PRE_P. */
2945 static enum gimplify_status
2946 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2948 tree expr = *expr_p, cond;
2949 enum gimplify_status ret, tret;
2950 enum tree_code code;
2952 cond = gimple_boolify (COND_EXPR_COND (expr));
2954 /* We need to handle && and || specially, as their gimplification
2955 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2956 code = TREE_CODE (cond);
2957 if (code == TRUTH_ANDIF_EXPR)
2958 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2959 else if (code == TRUTH_ORIF_EXPR)
2960 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2961 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2962 COND_EXPR_COND (*expr_p) = cond;
2964 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2965 is_gimple_val, fb_rvalue);
2966 ret = MIN (ret, tret);
2967 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2968 is_gimple_val, fb_rvalue);
2970 return MIN (ret, tret);
2973 /* Return true if evaluating EXPR could trap.
2974 EXPR is GENERIC, while tree_could_trap_p can be called
2975 only on GIMPLE. */
2977 static bool
2978 generic_expr_could_trap_p (tree expr)
2980 unsigned i, n;
2982 if (!expr || is_gimple_val (expr))
2983 return false;
2985 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2986 return true;
2988 n = TREE_OPERAND_LENGTH (expr);
2989 for (i = 0; i < n; i++)
2990 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2991 return true;
2993 return false;
2996 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2997 into
2999 if (p) if (p)
3000 t1 = a; a;
3001 else or else
3002 t1 = b; b;
3005 The second form is used when *EXPR_P is of type void.
3007 PRE_P points to the list where side effects that must happen before
3008 *EXPR_P should be stored. */
3010 static enum gimplify_status
3011 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3013 tree expr = *expr_p;
3014 tree type = TREE_TYPE (expr);
3015 location_t loc = EXPR_LOCATION (expr);
3016 tree tmp, arm1, arm2;
3017 enum gimplify_status ret;
3018 tree label_true, label_false, label_cont;
3019 bool have_then_clause_p, have_else_clause_p;
3020 gcond *cond_stmt;
3021 enum tree_code pred_code;
3022 gimple_seq seq = NULL;
3024 /* If this COND_EXPR has a value, copy the values into a temporary within
3025 the arms. */
3026 if (!VOID_TYPE_P (type))
3028 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3029 tree result;
3031 /* If either an rvalue is ok or we do not require an lvalue, create the
3032 temporary. But we cannot do that if the type is addressable. */
3033 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3034 && !TREE_ADDRESSABLE (type))
3036 if (gimplify_ctxp->allow_rhs_cond_expr
3037 /* If either branch has side effects or could trap, it can't be
3038 evaluated unconditionally. */
3039 && !TREE_SIDE_EFFECTS (then_)
3040 && !generic_expr_could_trap_p (then_)
3041 && !TREE_SIDE_EFFECTS (else_)
3042 && !generic_expr_could_trap_p (else_))
3043 return gimplify_pure_cond_expr (expr_p, pre_p);
3045 tmp = create_tmp_var (type, "iftmp");
3046 result = tmp;
3049 /* Otherwise, only create and copy references to the values. */
3050 else
3052 type = build_pointer_type (type);
3054 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3055 then_ = build_fold_addr_expr_loc (loc, then_);
3057 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3058 else_ = build_fold_addr_expr_loc (loc, else_);
3060 expr
3061 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3063 tmp = create_tmp_var (type, "iftmp");
3064 result = build_simple_mem_ref_loc (loc, tmp);
3067 /* Build the new then clause, `tmp = then_;'. But don't build the
3068 assignment if the value is void; in C++ it can be if it's a throw. */
3069 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3070 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3072 /* Similarly, build the new else clause, `tmp = else_;'. */
3073 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3074 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3076 TREE_TYPE (expr) = void_type_node;
3077 recalculate_side_effects (expr);
3079 /* Move the COND_EXPR to the prequeue. */
3080 gimplify_stmt (&expr, pre_p);
3082 *expr_p = result;
3083 return GS_ALL_DONE;
3086 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3087 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3088 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3089 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3091 /* Make sure the condition has BOOLEAN_TYPE. */
3092 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3094 /* Break apart && and || conditions. */
3095 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3096 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3098 expr = shortcut_cond_expr (expr);
3100 if (expr != *expr_p)
3102 *expr_p = expr;
3104 /* We can't rely on gimplify_expr to re-gimplify the expanded
3105 form properly, as cleanups might cause the target labels to be
3106 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3107 set up a conditional context. */
3108 gimple_push_condition ();
3109 gimplify_stmt (expr_p, &seq);
3110 gimple_pop_condition (pre_p);
3111 gimple_seq_add_seq (pre_p, seq);
3113 return GS_ALL_DONE;
3117 /* Now do the normal gimplification. */
3119 /* Gimplify condition. */
3120 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3121 fb_rvalue);
3122 if (ret == GS_ERROR)
3123 return GS_ERROR;
3124 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3126 gimple_push_condition ();
3128 have_then_clause_p = have_else_clause_p = false;
3129 if (TREE_OPERAND (expr, 1) != NULL
3130 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3131 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3132 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3133 == current_function_decl)
3134 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3135 have different locations, otherwise we end up with incorrect
3136 location information on the branches. */
3137 && (optimize
3138 || !EXPR_HAS_LOCATION (expr)
3139 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3140 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3142 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3143 have_then_clause_p = true;
3145 else
3146 label_true = create_artificial_label (UNKNOWN_LOCATION);
3147 if (TREE_OPERAND (expr, 2) != NULL
3148 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3149 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3150 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3151 == current_function_decl)
3152 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3153 have different locations, otherwise we end up with incorrect
3154 location information on the branches. */
3155 && (optimize
3156 || !EXPR_HAS_LOCATION (expr)
3157 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3158 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3160 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3161 have_else_clause_p = true;
3163 else
3164 label_false = create_artificial_label (UNKNOWN_LOCATION);
3166 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3167 &arm2);
3169 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3170 label_false);
3172 gimplify_seq_add_stmt (&seq, cond_stmt);
3173 label_cont = NULL_TREE;
3174 if (!have_then_clause_p)
3176 /* For if (...) {} else { code; } put label_true after
3177 the else block. */
3178 if (TREE_OPERAND (expr, 1) == NULL_TREE
3179 && !have_else_clause_p
3180 && TREE_OPERAND (expr, 2) != NULL_TREE)
3181 label_cont = label_true;
3182 else
3184 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3185 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3186 /* For if (...) { code; } else {} or
3187 if (...) { code; } else goto label; or
3188 if (...) { code; return; } else { ... }
3189 label_cont isn't needed. */
3190 if (!have_else_clause_p
3191 && TREE_OPERAND (expr, 2) != NULL_TREE
3192 && gimple_seq_may_fallthru (seq))
3194 gimple g;
3195 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3197 g = gimple_build_goto (label_cont);
3199 /* GIMPLE_COND's are very low level; they have embedded
3200 gotos. This particular embedded goto should not be marked
3201 with the location of the original COND_EXPR, as it would
3202 correspond to the COND_EXPR's condition, not the ELSE or the
3203 THEN arms. To avoid marking it with the wrong location, flag
3204 it as "no location". */
3205 gimple_set_do_not_emit_location (g);
3207 gimplify_seq_add_stmt (&seq, g);
3211 if (!have_else_clause_p)
3213 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3214 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3216 if (label_cont)
3217 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3219 gimple_pop_condition (pre_p);
3220 gimple_seq_add_seq (pre_p, seq);
3222 if (ret == GS_ERROR)
3223 ; /* Do nothing. */
3224 else if (have_then_clause_p || have_else_clause_p)
3225 ret = GS_ALL_DONE;
3226 else
3228 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3229 expr = TREE_OPERAND (expr, 0);
3230 gimplify_stmt (&expr, pre_p);
3233 *expr_p = NULL;
3234 return ret;
3237 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3238 to be marked addressable.
3240 We cannot rely on such an expression being directly markable if a temporary
3241 has been created by the gimplification. In this case, we create another
3242 temporary and initialize it with a copy, which will become a store after we
3243 mark it addressable. This can happen if the front-end passed us something
3244 that it could not mark addressable yet, like a Fortran pass-by-reference
3245 parameter (int) floatvar. */
3247 static void
3248 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3250 while (handled_component_p (*expr_p))
3251 expr_p = &TREE_OPERAND (*expr_p, 0);
3252 if (is_gimple_reg (*expr_p))
3254 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3255 DECL_GIMPLE_REG_P (var) = 0;
3256 *expr_p = var;
3260 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3261 a call to __builtin_memcpy. */
3263 static enum gimplify_status
3264 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3265 gimple_seq *seq_p)
3267 tree t, to, to_ptr, from, from_ptr;
3268 gcall *gs;
3269 location_t loc = EXPR_LOCATION (*expr_p);
3271 to = TREE_OPERAND (*expr_p, 0);
3272 from = TREE_OPERAND (*expr_p, 1);
3274 /* Mark the RHS addressable. Beware that it may not be possible to do so
3275 directly if a temporary has been created by the gimplification. */
3276 prepare_gimple_addressable (&from, seq_p);
3278 mark_addressable (from);
3279 from_ptr = build_fold_addr_expr_loc (loc, from);
3280 gimplify_arg (&from_ptr, seq_p, loc);
3282 mark_addressable (to);
3283 to_ptr = build_fold_addr_expr_loc (loc, to);
3284 gimplify_arg (&to_ptr, seq_p, loc);
3286 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3288 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3290 if (want_value)
3292 /* tmp = memcpy() */
3293 t = create_tmp_var (TREE_TYPE (to_ptr));
3294 gimple_call_set_lhs (gs, t);
3295 gimplify_seq_add_stmt (seq_p, gs);
3297 *expr_p = build_simple_mem_ref (t);
3298 return GS_ALL_DONE;
3301 gimplify_seq_add_stmt (seq_p, gs);
3302 *expr_p = NULL;
3303 return GS_ALL_DONE;
3306 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3307 a call to __builtin_memset. In this case we know that the RHS is
3308 a CONSTRUCTOR with an empty element list. */
3310 static enum gimplify_status
3311 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3312 gimple_seq *seq_p)
3314 tree t, from, to, to_ptr;
3315 gcall *gs;
3316 location_t loc = EXPR_LOCATION (*expr_p);
3318 /* Assert our assumptions, to abort instead of producing wrong code
3319 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3320 not be immediately exposed. */
3321 from = TREE_OPERAND (*expr_p, 1);
3322 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3323 from = TREE_OPERAND (from, 0);
3325 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3326 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3328 /* Now proceed. */
3329 to = TREE_OPERAND (*expr_p, 0);
3331 to_ptr = build_fold_addr_expr_loc (loc, to);
3332 gimplify_arg (&to_ptr, seq_p, loc);
3333 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3335 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3337 if (want_value)
3339 /* tmp = memset() */
3340 t = create_tmp_var (TREE_TYPE (to_ptr));
3341 gimple_call_set_lhs (gs, t);
3342 gimplify_seq_add_stmt (seq_p, gs);
3344 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3345 return GS_ALL_DONE;
3348 gimplify_seq_add_stmt (seq_p, gs);
3349 *expr_p = NULL;
3350 return GS_ALL_DONE;
3353 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3354 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3355 assignment. Return non-null if we detect a potential overlap. */
3357 struct gimplify_init_ctor_preeval_data
3359 /* The base decl of the lhs object. May be NULL, in which case we
3360 have to assume the lhs is indirect. */
3361 tree lhs_base_decl;
3363 /* The alias set of the lhs object. */
3364 alias_set_type lhs_alias_set;
3367 static tree
3368 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3370 struct gimplify_init_ctor_preeval_data *data
3371 = (struct gimplify_init_ctor_preeval_data *) xdata;
3372 tree t = *tp;
3374 /* If we find the base object, obviously we have overlap. */
3375 if (data->lhs_base_decl == t)
3376 return t;
3378 /* If the constructor component is indirect, determine if we have a
3379 potential overlap with the lhs. The only bits of information we
3380 have to go on at this point are addressability and alias sets. */
3381 if ((INDIRECT_REF_P (t)
3382 || TREE_CODE (t) == MEM_REF)
3383 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3384 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3385 return t;
3387 /* If the constructor component is a call, determine if it can hide a
3388 potential overlap with the lhs through an INDIRECT_REF like above.
3389 ??? Ugh - this is completely broken. In fact this whole analysis
3390 doesn't look conservative. */
3391 if (TREE_CODE (t) == CALL_EXPR)
3393 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3395 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3396 if (POINTER_TYPE_P (TREE_VALUE (type))
3397 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3398 && alias_sets_conflict_p (data->lhs_alias_set,
3399 get_alias_set
3400 (TREE_TYPE (TREE_VALUE (type)))))
3401 return t;
3404 if (IS_TYPE_OR_DECL_P (t))
3405 *walk_subtrees = 0;
3406 return NULL;
3409 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3410 force values that overlap with the lhs (as described by *DATA)
3411 into temporaries. */
3413 static void
3414 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3415 struct gimplify_init_ctor_preeval_data *data)
3417 enum gimplify_status one;
3419 /* If the value is constant, then there's nothing to pre-evaluate. */
3420 if (TREE_CONSTANT (*expr_p))
3422 /* Ensure it does not have side effects, it might contain a reference to
3423 the object we're initializing. */
3424 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3425 return;
3428 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3429 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3430 return;
3432 /* Recurse for nested constructors. */
3433 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3435 unsigned HOST_WIDE_INT ix;
3436 constructor_elt *ce;
3437 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3439 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3440 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3442 return;
3445 /* If this is a variable sized type, we must remember the size. */
3446 maybe_with_size_expr (expr_p);
3448 /* Gimplify the constructor element to something appropriate for the rhs
3449 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3450 the gimplifier will consider this a store to memory. Doing this
3451 gimplification now means that we won't have to deal with complicated
3452 language-specific trees, nor trees like SAVE_EXPR that can induce
3453 exponential search behavior. */
3454 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3455 if (one == GS_ERROR)
3457 *expr_p = NULL;
3458 return;
3461 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3462 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3463 always be true for all scalars, since is_gimple_mem_rhs insists on a
3464 temporary variable for them. */
3465 if (DECL_P (*expr_p))
3466 return;
3468 /* If this is of variable size, we have no choice but to assume it doesn't
3469 overlap since we can't make a temporary for it. */
3470 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3471 return;
3473 /* Otherwise, we must search for overlap ... */
3474 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3475 return;
3477 /* ... and if found, force the value into a temporary. */
3478 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3481 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3482 a RANGE_EXPR in a CONSTRUCTOR for an array.
3484 var = lower;
3485 loop_entry:
3486 object[var] = value;
3487 if (var == upper)
3488 goto loop_exit;
3489 var = var + 1;
3490 goto loop_entry;
3491 loop_exit:
3493 We increment var _after_ the loop exit check because we might otherwise
3494 fail if upper == TYPE_MAX_VALUE (type for upper).
3496 Note that we never have to deal with SAVE_EXPRs here, because this has
3497 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3499 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3500 gimple_seq *, bool);
3502 static void
3503 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3504 tree value, tree array_elt_type,
3505 gimple_seq *pre_p, bool cleared)
3507 tree loop_entry_label, loop_exit_label, fall_thru_label;
3508 tree var, var_type, cref, tmp;
3510 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3511 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3512 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3514 /* Create and initialize the index variable. */
3515 var_type = TREE_TYPE (upper);
3516 var = create_tmp_var (var_type);
3517 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3519 /* Add the loop entry label. */
3520 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3522 /* Build the reference. */
3523 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3524 var, NULL_TREE, NULL_TREE);
3526 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3527 the store. Otherwise just assign value to the reference. */
3529 if (TREE_CODE (value) == CONSTRUCTOR)
3530 /* NB we might have to call ourself recursively through
3531 gimplify_init_ctor_eval if the value is a constructor. */
3532 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3533 pre_p, cleared);
3534 else
3535 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3537 /* We exit the loop when the index var is equal to the upper bound. */
3538 gimplify_seq_add_stmt (pre_p,
3539 gimple_build_cond (EQ_EXPR, var, upper,
3540 loop_exit_label, fall_thru_label));
3542 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3544 /* Otherwise, increment the index var... */
3545 tmp = build2 (PLUS_EXPR, var_type, var,
3546 fold_convert (var_type, integer_one_node));
3547 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3549 /* ...and jump back to the loop entry. */
3550 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3552 /* Add the loop exit label. */
3553 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3556 /* Return true if FDECL is accessing a field that is zero sized. */
3558 static bool
3559 zero_sized_field_decl (const_tree fdecl)
3561 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3562 && integer_zerop (DECL_SIZE (fdecl)))
3563 return true;
3564 return false;
3567 /* Return true if TYPE is zero sized. */
3569 static bool
3570 zero_sized_type (const_tree type)
3572 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3573 && integer_zerop (TYPE_SIZE (type)))
3574 return true;
3575 return false;
3578 /* A subroutine of gimplify_init_constructor. Generate individual
3579 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3580 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3581 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3582 zeroed first. */
3584 static void
3585 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3586 gimple_seq *pre_p, bool cleared)
3588 tree array_elt_type = NULL;
3589 unsigned HOST_WIDE_INT ix;
3590 tree purpose, value;
3592 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3593 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3595 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3597 tree cref;
3599 /* NULL values are created above for gimplification errors. */
3600 if (value == NULL)
3601 continue;
3603 if (cleared && initializer_zerop (value))
3604 continue;
3606 /* ??? Here's to hoping the front end fills in all of the indices,
3607 so we don't have to figure out what's missing ourselves. */
3608 gcc_assert (purpose);
3610 /* Skip zero-sized fields, unless value has side-effects. This can
3611 happen with calls to functions returning a zero-sized type, which
3612 we shouldn't discard. As a number of downstream passes don't
3613 expect sets of zero-sized fields, we rely on the gimplification of
3614 the MODIFY_EXPR we make below to drop the assignment statement. */
3615 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3616 continue;
3618 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3619 whole range. */
3620 if (TREE_CODE (purpose) == RANGE_EXPR)
3622 tree lower = TREE_OPERAND (purpose, 0);
3623 tree upper = TREE_OPERAND (purpose, 1);
3625 /* If the lower bound is equal to upper, just treat it as if
3626 upper was the index. */
3627 if (simple_cst_equal (lower, upper))
3628 purpose = upper;
3629 else
3631 gimplify_init_ctor_eval_range (object, lower, upper, value,
3632 array_elt_type, pre_p, cleared);
3633 continue;
3637 if (array_elt_type)
3639 /* Do not use bitsizetype for ARRAY_REF indices. */
3640 if (TYPE_DOMAIN (TREE_TYPE (object)))
3641 purpose
3642 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3643 purpose);
3644 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3645 purpose, NULL_TREE, NULL_TREE);
3647 else
3649 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3650 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3651 unshare_expr (object), purpose, NULL_TREE);
3654 if (TREE_CODE (value) == CONSTRUCTOR
3655 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3656 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3657 pre_p, cleared);
3658 else
3660 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3661 gimplify_and_add (init, pre_p);
3662 ggc_free (init);
3667 /* Return the appropriate RHS predicate for this LHS. */
3669 gimple_predicate
3670 rhs_predicate_for (tree lhs)
3672 if (is_gimple_reg (lhs))
3673 return is_gimple_reg_rhs_or_call;
3674 else
3675 return is_gimple_mem_rhs_or_call;
3678 /* Gimplify a C99 compound literal expression. This just means adding
3679 the DECL_EXPR before the current statement and using its anonymous
3680 decl instead. */
3682 static enum gimplify_status
3683 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3684 bool (*gimple_test_f) (tree),
3685 fallback_t fallback)
3687 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3688 tree decl = DECL_EXPR_DECL (decl_s);
3689 tree init = DECL_INITIAL (decl);
3690 /* Mark the decl as addressable if the compound literal
3691 expression is addressable now, otherwise it is marked too late
3692 after we gimplify the initialization expression. */
3693 if (TREE_ADDRESSABLE (*expr_p))
3694 TREE_ADDRESSABLE (decl) = 1;
3695 /* Otherwise, if we don't need an lvalue and have a literal directly
3696 substitute it. Check if it matches the gimple predicate, as
3697 otherwise we'd generate a new temporary, and we can as well just
3698 use the decl we already have. */
3699 else if (!TREE_ADDRESSABLE (decl)
3700 && init
3701 && (fallback & fb_lvalue) == 0
3702 && gimple_test_f (init))
3704 *expr_p = init;
3705 return GS_OK;
3708 /* Preliminarily mark non-addressed complex variables as eligible
3709 for promotion to gimple registers. We'll transform their uses
3710 as we find them. */
3711 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3712 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3713 && !TREE_THIS_VOLATILE (decl)
3714 && !needs_to_live_in_memory (decl))
3715 DECL_GIMPLE_REG_P (decl) = 1;
3717 /* If the decl is not addressable, then it is being used in some
3718 expression or on the right hand side of a statement, and it can
3719 be put into a readonly data section. */
3720 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3721 TREE_READONLY (decl) = 1;
3723 /* This decl isn't mentioned in the enclosing block, so add it to the
3724 list of temps. FIXME it seems a bit of a kludge to say that
3725 anonymous artificial vars aren't pushed, but everything else is. */
3726 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3727 gimple_add_tmp_var (decl);
3729 gimplify_and_add (decl_s, pre_p);
3730 *expr_p = decl;
3731 return GS_OK;
3734 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3735 return a new CONSTRUCTOR if something changed. */
3737 static tree
3738 optimize_compound_literals_in_ctor (tree orig_ctor)
3740 tree ctor = orig_ctor;
3741 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3742 unsigned int idx, num = vec_safe_length (elts);
3744 for (idx = 0; idx < num; idx++)
3746 tree value = (*elts)[idx].value;
3747 tree newval = value;
3748 if (TREE_CODE (value) == CONSTRUCTOR)
3749 newval = optimize_compound_literals_in_ctor (value);
3750 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3752 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3753 tree decl = DECL_EXPR_DECL (decl_s);
3754 tree init = DECL_INITIAL (decl);
3756 if (!TREE_ADDRESSABLE (value)
3757 && !TREE_ADDRESSABLE (decl)
3758 && init
3759 && TREE_CODE (init) == CONSTRUCTOR)
3760 newval = optimize_compound_literals_in_ctor (init);
3762 if (newval == value)
3763 continue;
3765 if (ctor == orig_ctor)
3767 ctor = copy_node (orig_ctor);
3768 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3769 elts = CONSTRUCTOR_ELTS (ctor);
3771 (*elts)[idx].value = newval;
3773 return ctor;
3776 /* A subroutine of gimplify_modify_expr. Break out elements of a
3777 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3779 Note that we still need to clear any elements that don't have explicit
3780 initializers, so if not all elements are initialized we keep the
3781 original MODIFY_EXPR, we just remove all of the constructor elements.
3783 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3784 GS_ERROR if we would have to create a temporary when gimplifying
3785 this constructor. Otherwise, return GS_OK.
3787 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3789 static enum gimplify_status
3790 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3791 bool want_value, bool notify_temp_creation)
3793 tree object, ctor, type;
3794 enum gimplify_status ret;
3795 vec<constructor_elt, va_gc> *elts;
3797 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3799 if (!notify_temp_creation)
3801 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3802 is_gimple_lvalue, fb_lvalue);
3803 if (ret == GS_ERROR)
3804 return ret;
3807 object = TREE_OPERAND (*expr_p, 0);
3808 ctor = TREE_OPERAND (*expr_p, 1) =
3809 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3810 type = TREE_TYPE (ctor);
3811 elts = CONSTRUCTOR_ELTS (ctor);
3812 ret = GS_ALL_DONE;
3814 switch (TREE_CODE (type))
3816 case RECORD_TYPE:
3817 case UNION_TYPE:
3818 case QUAL_UNION_TYPE:
3819 case ARRAY_TYPE:
3821 struct gimplify_init_ctor_preeval_data preeval_data;
3822 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3823 bool cleared, complete_p, valid_const_initializer;
3825 /* Aggregate types must lower constructors to initialization of
3826 individual elements. The exception is that a CONSTRUCTOR node
3827 with no elements indicates zero-initialization of the whole. */
3828 if (vec_safe_is_empty (elts))
3830 if (notify_temp_creation)
3831 return GS_OK;
3832 break;
3835 /* Fetch information about the constructor to direct later processing.
3836 We might want to make static versions of it in various cases, and
3837 can only do so if it known to be a valid constant initializer. */
3838 valid_const_initializer
3839 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3840 &num_ctor_elements, &complete_p);
3842 /* If a const aggregate variable is being initialized, then it
3843 should never be a lose to promote the variable to be static. */
3844 if (valid_const_initializer
3845 && num_nonzero_elements > 1
3846 && TREE_READONLY (object)
3847 && TREE_CODE (object) == VAR_DECL
3848 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3850 if (notify_temp_creation)
3851 return GS_ERROR;
3852 DECL_INITIAL (object) = ctor;
3853 TREE_STATIC (object) = 1;
3854 if (!DECL_NAME (object))
3855 DECL_NAME (object) = create_tmp_var_name ("C");
3856 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3858 /* ??? C++ doesn't automatically append a .<number> to the
3859 assembler name, and even when it does, it looks at FE private
3860 data structures to figure out what that number should be,
3861 which are not set for this variable. I suppose this is
3862 important for local statics for inline functions, which aren't
3863 "local" in the object file sense. So in order to get a unique
3864 TU-local symbol, we must invoke the lhd version now. */
3865 lhd_set_decl_assembler_name (object);
3867 *expr_p = NULL_TREE;
3868 break;
3871 /* If there are "lots" of initialized elements, even discounting
3872 those that are not address constants (and thus *must* be
3873 computed at runtime), then partition the constructor into
3874 constant and non-constant parts. Block copy the constant
3875 parts in, then generate code for the non-constant parts. */
3876 /* TODO. There's code in cp/typeck.c to do this. */
3878 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3879 /* store_constructor will ignore the clearing of variable-sized
3880 objects. Initializers for such objects must explicitly set
3881 every field that needs to be set. */
3882 cleared = false;
3883 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3884 /* If the constructor isn't complete, clear the whole object
3885 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3887 ??? This ought not to be needed. For any element not present
3888 in the initializer, we should simply set them to zero. Except
3889 we'd need to *find* the elements that are not present, and that
3890 requires trickery to avoid quadratic compile-time behavior in
3891 large cases or excessive memory use in small cases. */
3892 cleared = true;
3893 else if (num_ctor_elements - num_nonzero_elements
3894 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3895 && num_nonzero_elements < num_ctor_elements / 4)
3896 /* If there are "lots" of zeros, it's more efficient to clear
3897 the memory and then set the nonzero elements. */
3898 cleared = true;
3899 else
3900 cleared = false;
3902 /* If there are "lots" of initialized elements, and all of them
3903 are valid address constants, then the entire initializer can
3904 be dropped to memory, and then memcpy'd out. Don't do this
3905 for sparse arrays, though, as it's more efficient to follow
3906 the standard CONSTRUCTOR behavior of memset followed by
3907 individual element initialization. Also don't do this for small
3908 all-zero initializers (which aren't big enough to merit
3909 clearing), and don't try to make bitwise copies of
3910 TREE_ADDRESSABLE types.
3912 We cannot apply such transformation when compiling chkp static
3913 initializer because creation of initializer image in the memory
3914 will require static initialization of bounds for it. It should
3915 result in another gimplification of similar initializer and we
3916 may fall into infinite loop. */
3917 if (valid_const_initializer
3918 && !(cleared || num_nonzero_elements == 0)
3919 && !TREE_ADDRESSABLE (type)
3920 && (!current_function_decl
3921 || !lookup_attribute ("chkp ctor",
3922 DECL_ATTRIBUTES (current_function_decl))))
3924 HOST_WIDE_INT size = int_size_in_bytes (type);
3925 unsigned int align;
3927 /* ??? We can still get unbounded array types, at least
3928 from the C++ front end. This seems wrong, but attempt
3929 to work around it for now. */
3930 if (size < 0)
3932 size = int_size_in_bytes (TREE_TYPE (object));
3933 if (size >= 0)
3934 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3937 /* Find the maximum alignment we can assume for the object. */
3938 /* ??? Make use of DECL_OFFSET_ALIGN. */
3939 if (DECL_P (object))
3940 align = DECL_ALIGN (object);
3941 else
3942 align = TYPE_ALIGN (type);
3944 /* Do a block move either if the size is so small as to make
3945 each individual move a sub-unit move on average, or if it
3946 is so large as to make individual moves inefficient. */
3947 if (size > 0
3948 && num_nonzero_elements > 1
3949 && (size < num_nonzero_elements
3950 || !can_move_by_pieces (size, align)))
3952 if (notify_temp_creation)
3953 return GS_ERROR;
3955 walk_tree (&ctor, force_labels_r, NULL, NULL);
3956 ctor = tree_output_constant_def (ctor);
3957 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3958 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3959 TREE_OPERAND (*expr_p, 1) = ctor;
3961 /* This is no longer an assignment of a CONSTRUCTOR, but
3962 we still may have processing to do on the LHS. So
3963 pretend we didn't do anything here to let that happen. */
3964 return GS_UNHANDLED;
3968 /* If the target is volatile, we have non-zero elements and more than
3969 one field to assign, initialize the target from a temporary. */
3970 if (TREE_THIS_VOLATILE (object)
3971 && !TREE_ADDRESSABLE (type)
3972 && num_nonzero_elements > 0
3973 && vec_safe_length (elts) > 1)
3975 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
3976 TREE_OPERAND (*expr_p, 0) = temp;
3977 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3978 *expr_p,
3979 build2 (MODIFY_EXPR, void_type_node,
3980 object, temp));
3981 return GS_OK;
3984 if (notify_temp_creation)
3985 return GS_OK;
3987 /* If there are nonzero elements and if needed, pre-evaluate to capture
3988 elements overlapping with the lhs into temporaries. We must do this
3989 before clearing to fetch the values before they are zeroed-out. */
3990 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3992 preeval_data.lhs_base_decl = get_base_address (object);
3993 if (!DECL_P (preeval_data.lhs_base_decl))
3994 preeval_data.lhs_base_decl = NULL;
3995 preeval_data.lhs_alias_set = get_alias_set (object);
3997 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3998 pre_p, post_p, &preeval_data);
4001 bool ctor_has_side_effects_p
4002 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4004 if (cleared)
4006 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4007 Note that we still have to gimplify, in order to handle the
4008 case of variable sized types. Avoid shared tree structures. */
4009 CONSTRUCTOR_ELTS (ctor) = NULL;
4010 TREE_SIDE_EFFECTS (ctor) = 0;
4011 object = unshare_expr (object);
4012 gimplify_stmt (expr_p, pre_p);
4015 /* If we have not block cleared the object, or if there are nonzero
4016 elements in the constructor, or if the constructor has side effects,
4017 add assignments to the individual scalar fields of the object. */
4018 if (!cleared
4019 || num_nonzero_elements > 0
4020 || ctor_has_side_effects_p)
4021 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4023 *expr_p = NULL_TREE;
4025 break;
4027 case COMPLEX_TYPE:
4029 tree r, i;
4031 if (notify_temp_creation)
4032 return GS_OK;
4034 /* Extract the real and imaginary parts out of the ctor. */
4035 gcc_assert (elts->length () == 2);
4036 r = (*elts)[0].value;
4037 i = (*elts)[1].value;
4038 if (r == NULL || i == NULL)
4040 tree zero = build_zero_cst (TREE_TYPE (type));
4041 if (r == NULL)
4042 r = zero;
4043 if (i == NULL)
4044 i = zero;
4047 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4048 represent creation of a complex value. */
4049 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4051 ctor = build_complex (type, r, i);
4052 TREE_OPERAND (*expr_p, 1) = ctor;
4054 else
4056 ctor = build2 (COMPLEX_EXPR, type, r, i);
4057 TREE_OPERAND (*expr_p, 1) = ctor;
4058 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4059 pre_p,
4060 post_p,
4061 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4062 fb_rvalue);
4065 break;
4067 case VECTOR_TYPE:
4069 unsigned HOST_WIDE_INT ix;
4070 constructor_elt *ce;
4072 if (notify_temp_creation)
4073 return GS_OK;
4075 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4076 if (TREE_CONSTANT (ctor))
4078 bool constant_p = true;
4079 tree value;
4081 /* Even when ctor is constant, it might contain non-*_CST
4082 elements, such as addresses or trapping values like
4083 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4084 in VECTOR_CST nodes. */
4085 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4086 if (!CONSTANT_CLASS_P (value))
4088 constant_p = false;
4089 break;
4092 if (constant_p)
4094 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4095 break;
4098 TREE_CONSTANT (ctor) = 0;
4101 /* Vector types use CONSTRUCTOR all the way through gimple
4102 compilation as a general initializer. */
4103 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4105 enum gimplify_status tret;
4106 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4107 fb_rvalue);
4108 if (tret == GS_ERROR)
4109 ret = GS_ERROR;
4111 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4112 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4114 break;
4116 default:
4117 /* So how did we get a CONSTRUCTOR for a scalar type? */
4118 gcc_unreachable ();
4121 if (ret == GS_ERROR)
4122 return GS_ERROR;
4123 else if (want_value)
4125 *expr_p = object;
4126 return GS_OK;
4128 else
4130 /* If we have gimplified both sides of the initializer but have
4131 not emitted an assignment, do so now. */
4132 if (*expr_p)
4134 tree lhs = TREE_OPERAND (*expr_p, 0);
4135 tree rhs = TREE_OPERAND (*expr_p, 1);
4136 gassign *init = gimple_build_assign (lhs, rhs);
4137 gimplify_seq_add_stmt (pre_p, init);
4138 *expr_p = NULL;
4141 return GS_ALL_DONE;
4145 /* Given a pointer value OP0, return a simplified version of an
4146 indirection through OP0, or NULL_TREE if no simplification is
4147 possible. This may only be applied to a rhs of an expression.
4148 Note that the resulting type may be different from the type pointed
4149 to in the sense that it is still compatible from the langhooks
4150 point of view. */
4152 static tree
4153 gimple_fold_indirect_ref_rhs (tree t)
4155 return gimple_fold_indirect_ref (t);
4158 /* Subroutine of gimplify_modify_expr to do simplifications of
4159 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4160 something changes. */
4162 static enum gimplify_status
4163 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4164 gimple_seq *pre_p, gimple_seq *post_p,
4165 bool want_value)
4167 enum gimplify_status ret = GS_UNHANDLED;
4168 bool changed;
4172 changed = false;
4173 switch (TREE_CODE (*from_p))
4175 case VAR_DECL:
4176 /* If we're assigning from a read-only variable initialized with
4177 a constructor, do the direct assignment from the constructor,
4178 but only if neither source nor target are volatile since this
4179 latter assignment might end up being done on a per-field basis. */
4180 if (DECL_INITIAL (*from_p)
4181 && TREE_READONLY (*from_p)
4182 && !TREE_THIS_VOLATILE (*from_p)
4183 && !TREE_THIS_VOLATILE (*to_p)
4184 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4186 tree old_from = *from_p;
4187 enum gimplify_status subret;
4189 /* Move the constructor into the RHS. */
4190 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4192 /* Let's see if gimplify_init_constructor will need to put
4193 it in memory. */
4194 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4195 false, true);
4196 if (subret == GS_ERROR)
4198 /* If so, revert the change. */
4199 *from_p = old_from;
4201 else
4203 ret = GS_OK;
4204 changed = true;
4207 break;
4208 case INDIRECT_REF:
4210 /* If we have code like
4212 *(const A*)(A*)&x
4214 where the type of "x" is a (possibly cv-qualified variant
4215 of "A"), treat the entire expression as identical to "x".
4216 This kind of code arises in C++ when an object is bound
4217 to a const reference, and if "x" is a TARGET_EXPR we want
4218 to take advantage of the optimization below. */
4219 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4220 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4221 if (t)
4223 if (TREE_THIS_VOLATILE (t) != volatile_p)
4225 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4226 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4227 build_fold_addr_expr (t));
4228 if (REFERENCE_CLASS_P (t))
4229 TREE_THIS_VOLATILE (t) = volatile_p;
4231 *from_p = t;
4232 ret = GS_OK;
4233 changed = true;
4235 break;
4238 case TARGET_EXPR:
4240 /* If we are initializing something from a TARGET_EXPR, strip the
4241 TARGET_EXPR and initialize it directly, if possible. This can't
4242 be done if the initializer is void, since that implies that the
4243 temporary is set in some non-trivial way.
4245 ??? What about code that pulls out the temp and uses it
4246 elsewhere? I think that such code never uses the TARGET_EXPR as
4247 an initializer. If I'm wrong, we'll die because the temp won't
4248 have any RTL. In that case, I guess we'll need to replace
4249 references somehow. */
4250 tree init = TARGET_EXPR_INITIAL (*from_p);
4252 if (init
4253 && !VOID_TYPE_P (TREE_TYPE (init)))
4255 *from_p = init;
4256 ret = GS_OK;
4257 changed = true;
4260 break;
4262 case COMPOUND_EXPR:
4263 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4264 caught. */
4265 gimplify_compound_expr (from_p, pre_p, true);
4266 ret = GS_OK;
4267 changed = true;
4268 break;
4270 case CONSTRUCTOR:
4271 /* If we already made some changes, let the front end have a
4272 crack at this before we break it down. */
4273 if (ret != GS_UNHANDLED)
4274 break;
4275 /* If we're initializing from a CONSTRUCTOR, break this into
4276 individual MODIFY_EXPRs. */
4277 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4278 false);
4280 case COND_EXPR:
4281 /* If we're assigning to a non-register type, push the assignment
4282 down into the branches. This is mandatory for ADDRESSABLE types,
4283 since we cannot generate temporaries for such, but it saves a
4284 copy in other cases as well. */
4285 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4287 /* This code should mirror the code in gimplify_cond_expr. */
4288 enum tree_code code = TREE_CODE (*expr_p);
4289 tree cond = *from_p;
4290 tree result = *to_p;
4292 ret = gimplify_expr (&result, pre_p, post_p,
4293 is_gimple_lvalue, fb_lvalue);
4294 if (ret != GS_ERROR)
4295 ret = GS_OK;
4297 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4298 TREE_OPERAND (cond, 1)
4299 = build2 (code, void_type_node, result,
4300 TREE_OPERAND (cond, 1));
4301 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4302 TREE_OPERAND (cond, 2)
4303 = build2 (code, void_type_node, unshare_expr (result),
4304 TREE_OPERAND (cond, 2));
4306 TREE_TYPE (cond) = void_type_node;
4307 recalculate_side_effects (cond);
4309 if (want_value)
4311 gimplify_and_add (cond, pre_p);
4312 *expr_p = unshare_expr (result);
4314 else
4315 *expr_p = cond;
4316 return ret;
4318 break;
4320 case CALL_EXPR:
4321 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4322 return slot so that we don't generate a temporary. */
4323 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4324 && aggregate_value_p (*from_p, *from_p))
4326 bool use_target;
4328 if (!(rhs_predicate_for (*to_p))(*from_p))
4329 /* If we need a temporary, *to_p isn't accurate. */
4330 use_target = false;
4331 /* It's OK to use the return slot directly unless it's an NRV. */
4332 else if (TREE_CODE (*to_p) == RESULT_DECL
4333 && DECL_NAME (*to_p) == NULL_TREE
4334 && needs_to_live_in_memory (*to_p))
4335 use_target = true;
4336 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4337 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4338 /* Don't force regs into memory. */
4339 use_target = false;
4340 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4341 /* It's OK to use the target directly if it's being
4342 initialized. */
4343 use_target = true;
4344 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4345 /* Always use the target and thus RSO for variable-sized types.
4346 GIMPLE cannot deal with a variable-sized assignment
4347 embedded in a call statement. */
4348 use_target = true;
4349 else if (TREE_CODE (*to_p) != SSA_NAME
4350 && (!is_gimple_variable (*to_p)
4351 || needs_to_live_in_memory (*to_p)))
4352 /* Don't use the original target if it's already addressable;
4353 if its address escapes, and the called function uses the
4354 NRV optimization, a conforming program could see *to_p
4355 change before the called function returns; see c++/19317.
4356 When optimizing, the return_slot pass marks more functions
4357 as safe after we have escape info. */
4358 use_target = false;
4359 else
4360 use_target = true;
4362 if (use_target)
4364 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4365 mark_addressable (*to_p);
4368 break;
4370 case WITH_SIZE_EXPR:
4371 /* Likewise for calls that return an aggregate of non-constant size,
4372 since we would not be able to generate a temporary at all. */
4373 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4375 *from_p = TREE_OPERAND (*from_p, 0);
4376 /* We don't change ret in this case because the
4377 WITH_SIZE_EXPR might have been added in
4378 gimplify_modify_expr, so returning GS_OK would lead to an
4379 infinite loop. */
4380 changed = true;
4382 break;
4384 /* If we're initializing from a container, push the initialization
4385 inside it. */
4386 case CLEANUP_POINT_EXPR:
4387 case BIND_EXPR:
4388 case STATEMENT_LIST:
4390 tree wrap = *from_p;
4391 tree t;
4393 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4394 fb_lvalue);
4395 if (ret != GS_ERROR)
4396 ret = GS_OK;
4398 t = voidify_wrapper_expr (wrap, *expr_p);
4399 gcc_assert (t == *expr_p);
4401 if (want_value)
4403 gimplify_and_add (wrap, pre_p);
4404 *expr_p = unshare_expr (*to_p);
4406 else
4407 *expr_p = wrap;
4408 return GS_OK;
4411 case COMPOUND_LITERAL_EXPR:
4413 tree complit = TREE_OPERAND (*expr_p, 1);
4414 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4415 tree decl = DECL_EXPR_DECL (decl_s);
4416 tree init = DECL_INITIAL (decl);
4418 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4419 into struct T x = { 0, 1, 2 } if the address of the
4420 compound literal has never been taken. */
4421 if (!TREE_ADDRESSABLE (complit)
4422 && !TREE_ADDRESSABLE (decl)
4423 && init)
4425 *expr_p = copy_node (*expr_p);
4426 TREE_OPERAND (*expr_p, 1) = init;
4427 return GS_OK;
4431 default:
4432 break;
4435 while (changed);
4437 return ret;
4441 /* Return true if T looks like a valid GIMPLE statement. */
4443 static bool
4444 is_gimple_stmt (tree t)
4446 const enum tree_code code = TREE_CODE (t);
4448 switch (code)
4450 case NOP_EXPR:
4451 /* The only valid NOP_EXPR is the empty statement. */
4452 return IS_EMPTY_STMT (t);
4454 case BIND_EXPR:
4455 case COND_EXPR:
4456 /* These are only valid if they're void. */
4457 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4459 case SWITCH_EXPR:
4460 case GOTO_EXPR:
4461 case RETURN_EXPR:
4462 case LABEL_EXPR:
4463 case CASE_LABEL_EXPR:
4464 case TRY_CATCH_EXPR:
4465 case TRY_FINALLY_EXPR:
4466 case EH_FILTER_EXPR:
4467 case CATCH_EXPR:
4468 case ASM_EXPR:
4469 case STATEMENT_LIST:
4470 case OACC_PARALLEL:
4471 case OACC_KERNELS:
4472 case OACC_DATA:
4473 case OACC_HOST_DATA:
4474 case OACC_DECLARE:
4475 case OACC_UPDATE:
4476 case OACC_ENTER_DATA:
4477 case OACC_EXIT_DATA:
4478 case OACC_CACHE:
4479 case OMP_PARALLEL:
4480 case OMP_FOR:
4481 case OMP_SIMD:
4482 case CILK_SIMD:
4483 case OMP_DISTRIBUTE:
4484 case OACC_LOOP:
4485 case OMP_SECTIONS:
4486 case OMP_SECTION:
4487 case OMP_SINGLE:
4488 case OMP_MASTER:
4489 case OMP_TASKGROUP:
4490 case OMP_ORDERED:
4491 case OMP_CRITICAL:
4492 case OMP_TASK:
4493 /* These are always void. */
4494 return true;
4496 case CALL_EXPR:
4497 case MODIFY_EXPR:
4498 case PREDICT_EXPR:
4499 /* These are valid regardless of their type. */
4500 return true;
4502 default:
4503 return false;
4508 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4509 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4510 DECL_GIMPLE_REG_P set.
4512 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4513 other, unmodified part of the complex object just before the total store.
4514 As a consequence, if the object is still uninitialized, an undefined value
4515 will be loaded into a register, which may result in a spurious exception
4516 if the register is floating-point and the value happens to be a signaling
4517 NaN for example. Then the fully-fledged complex operations lowering pass
4518 followed by a DCE pass are necessary in order to fix things up. */
4520 static enum gimplify_status
4521 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4522 bool want_value)
4524 enum tree_code code, ocode;
4525 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4527 lhs = TREE_OPERAND (*expr_p, 0);
4528 rhs = TREE_OPERAND (*expr_p, 1);
4529 code = TREE_CODE (lhs);
4530 lhs = TREE_OPERAND (lhs, 0);
4532 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4533 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4534 TREE_NO_WARNING (other) = 1;
4535 other = get_formal_tmp_var (other, pre_p);
4537 realpart = code == REALPART_EXPR ? rhs : other;
4538 imagpart = code == REALPART_EXPR ? other : rhs;
4540 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4541 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4542 else
4543 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4545 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4546 *expr_p = (want_value) ? rhs : NULL_TREE;
4548 return GS_ALL_DONE;
4551 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4553 modify_expr
4554 : varname '=' rhs
4555 | '*' ID '=' rhs
4557 PRE_P points to the list where side effects that must happen before
4558 *EXPR_P should be stored.
4560 POST_P points to the list where side effects that must happen after
4561 *EXPR_P should be stored.
4563 WANT_VALUE is nonzero iff we want to use the value of this expression
4564 in another expression. */
4566 static enum gimplify_status
4567 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4568 bool want_value)
4570 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4571 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4572 enum gimplify_status ret = GS_UNHANDLED;
4573 gimple assign;
4574 location_t loc = EXPR_LOCATION (*expr_p);
4575 gimple_stmt_iterator gsi;
4577 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4578 || TREE_CODE (*expr_p) == INIT_EXPR);
4580 /* Trying to simplify a clobber using normal logic doesn't work,
4581 so handle it here. */
4582 if (TREE_CLOBBER_P (*from_p))
4584 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4585 if (ret == GS_ERROR)
4586 return ret;
4587 gcc_assert (!want_value
4588 && (TREE_CODE (*to_p) == VAR_DECL
4589 || TREE_CODE (*to_p) == MEM_REF));
4590 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4591 *expr_p = NULL;
4592 return GS_ALL_DONE;
4595 /* Insert pointer conversions required by the middle-end that are not
4596 required by the frontend. This fixes middle-end type checking for
4597 for example gcc.dg/redecl-6.c. */
4598 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4600 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4601 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4602 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4605 /* See if any simplifications can be done based on what the RHS is. */
4606 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4607 want_value);
4608 if (ret != GS_UNHANDLED)
4609 return ret;
4611 /* For zero sized types only gimplify the left hand side and right hand
4612 side as statements and throw away the assignment. Do this after
4613 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4614 types properly. */
4615 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4617 gimplify_stmt (from_p, pre_p);
4618 gimplify_stmt (to_p, pre_p);
4619 *expr_p = NULL_TREE;
4620 return GS_ALL_DONE;
4623 /* If the value being copied is of variable width, compute the length
4624 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4625 before gimplifying any of the operands so that we can resolve any
4626 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4627 the size of the expression to be copied, not of the destination, so
4628 that is what we must do here. */
4629 maybe_with_size_expr (from_p);
4631 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4632 if (ret == GS_ERROR)
4633 return ret;
4635 /* As a special case, we have to temporarily allow for assignments
4636 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4637 a toplevel statement, when gimplifying the GENERIC expression
4638 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4639 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4641 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4642 prevent gimplify_expr from trying to create a new temporary for
4643 foo's LHS, we tell it that it should only gimplify until it
4644 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4645 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4646 and all we need to do here is set 'a' to be its LHS. */
4647 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4648 fb_rvalue);
4649 if (ret == GS_ERROR)
4650 return ret;
4652 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4653 size as argument to the the call. */
4654 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4656 tree call = TREE_OPERAND (*from_p, 0);
4657 tree vlasize = TREE_OPERAND (*from_p, 1);
4659 if (TREE_CODE (call) == CALL_EXPR
4660 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4662 int nargs = call_expr_nargs (call);
4663 tree type = TREE_TYPE (call);
4664 tree ap = CALL_EXPR_ARG (call, 0);
4665 tree tag = CALL_EXPR_ARG (call, 1);
4666 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4667 IFN_VA_ARG, type,
4668 nargs + 1, ap, tag,
4669 vlasize);
4670 tree *call_p = &(TREE_OPERAND (*from_p, 0));
4671 *call_p = newcall;
4675 /* Now see if the above changed *from_p to something we handle specially. */
4676 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4677 want_value);
4678 if (ret != GS_UNHANDLED)
4679 return ret;
4681 /* If we've got a variable sized assignment between two lvalues (i.e. does
4682 not involve a call), then we can make things a bit more straightforward
4683 by converting the assignment to memcpy or memset. */
4684 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4686 tree from = TREE_OPERAND (*from_p, 0);
4687 tree size = TREE_OPERAND (*from_p, 1);
4689 if (TREE_CODE (from) == CONSTRUCTOR)
4690 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4692 if (is_gimple_addressable (from))
4694 *from_p = from;
4695 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4696 pre_p);
4700 /* Transform partial stores to non-addressable complex variables into
4701 total stores. This allows us to use real instead of virtual operands
4702 for these variables, which improves optimization. */
4703 if ((TREE_CODE (*to_p) == REALPART_EXPR
4704 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4705 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4706 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4708 /* Try to alleviate the effects of the gimplification creating artificial
4709 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4710 if (!gimplify_ctxp->into_ssa
4711 && TREE_CODE (*from_p) == VAR_DECL
4712 && DECL_IGNORED_P (*from_p)
4713 && DECL_P (*to_p)
4714 && !DECL_IGNORED_P (*to_p))
4716 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4717 DECL_NAME (*from_p)
4718 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4719 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4720 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4723 if (want_value && TREE_THIS_VOLATILE (*to_p))
4724 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4726 if (TREE_CODE (*from_p) == CALL_EXPR)
4728 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4729 instead of a GIMPLE_ASSIGN. */
4730 gcall *call_stmt;
4731 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4733 /* Gimplify internal functions created in the FEs. */
4734 int nargs = call_expr_nargs (*from_p), i;
4735 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4736 auto_vec<tree> vargs (nargs);
4738 for (i = 0; i < nargs; i++)
4740 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4741 EXPR_LOCATION (*from_p));
4742 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4744 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4745 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4747 else
4749 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4750 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4751 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4752 tree fndecl = get_callee_fndecl (*from_p);
4753 if (fndecl
4754 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4755 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4756 && call_expr_nargs (*from_p) == 3)
4757 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4758 CALL_EXPR_ARG (*from_p, 0),
4759 CALL_EXPR_ARG (*from_p, 1),
4760 CALL_EXPR_ARG (*from_p, 2));
4761 else
4763 call_stmt = gimple_build_call_from_tree (*from_p);
4764 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4767 notice_special_calls (call_stmt);
4768 if (!gimple_call_noreturn_p (call_stmt))
4769 gimple_call_set_lhs (call_stmt, *to_p);
4770 assign = call_stmt;
4772 else
4774 assign = gimple_build_assign (*to_p, *from_p);
4775 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4778 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4780 /* We should have got an SSA name from the start. */
4781 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4784 gimplify_seq_add_stmt (pre_p, assign);
4785 gsi = gsi_last (*pre_p);
4786 maybe_fold_stmt (&gsi);
4788 if (want_value)
4790 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4791 return GS_OK;
4793 else
4794 *expr_p = NULL;
4796 return GS_ALL_DONE;
4799 /* Gimplify a comparison between two variable-sized objects. Do this
4800 with a call to BUILT_IN_MEMCMP. */
4802 static enum gimplify_status
4803 gimplify_variable_sized_compare (tree *expr_p)
4805 location_t loc = EXPR_LOCATION (*expr_p);
4806 tree op0 = TREE_OPERAND (*expr_p, 0);
4807 tree op1 = TREE_OPERAND (*expr_p, 1);
4808 tree t, arg, dest, src, expr;
4810 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4811 arg = unshare_expr (arg);
4812 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4813 src = build_fold_addr_expr_loc (loc, op1);
4814 dest = build_fold_addr_expr_loc (loc, op0);
4815 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4816 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4818 expr
4819 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4820 SET_EXPR_LOCATION (expr, loc);
4821 *expr_p = expr;
4823 return GS_OK;
4826 /* Gimplify a comparison between two aggregate objects of integral scalar
4827 mode as a comparison between the bitwise equivalent scalar values. */
4829 static enum gimplify_status
4830 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4832 location_t loc = EXPR_LOCATION (*expr_p);
4833 tree op0 = TREE_OPERAND (*expr_p, 0);
4834 tree op1 = TREE_OPERAND (*expr_p, 1);
4836 tree type = TREE_TYPE (op0);
4837 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4839 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4840 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4842 *expr_p
4843 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4845 return GS_OK;
4848 /* Gimplify an expression sequence. This function gimplifies each
4849 expression and rewrites the original expression with the last
4850 expression of the sequence in GIMPLE form.
4852 PRE_P points to the list where the side effects for all the
4853 expressions in the sequence will be emitted.
4855 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4857 static enum gimplify_status
4858 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4860 tree t = *expr_p;
4864 tree *sub_p = &TREE_OPERAND (t, 0);
4866 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4867 gimplify_compound_expr (sub_p, pre_p, false);
4868 else
4869 gimplify_stmt (sub_p, pre_p);
4871 t = TREE_OPERAND (t, 1);
4873 while (TREE_CODE (t) == COMPOUND_EXPR);
4875 *expr_p = t;
4876 if (want_value)
4877 return GS_OK;
4878 else
4880 gimplify_stmt (expr_p, pre_p);
4881 return GS_ALL_DONE;
4885 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4886 gimplify. After gimplification, EXPR_P will point to a new temporary
4887 that holds the original value of the SAVE_EXPR node.
4889 PRE_P points to the list where side effects that must happen before
4890 *EXPR_P should be stored. */
4892 static enum gimplify_status
4893 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4895 enum gimplify_status ret = GS_ALL_DONE;
4896 tree val;
4898 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4899 val = TREE_OPERAND (*expr_p, 0);
4901 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4902 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4904 /* The operand may be a void-valued expression such as SAVE_EXPRs
4905 generated by the Java frontend for class initialization. It is
4906 being executed only for its side-effects. */
4907 if (TREE_TYPE (val) == void_type_node)
4909 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4910 is_gimple_stmt, fb_none);
4911 val = NULL;
4913 else
4914 val = get_initialized_tmp_var (val, pre_p, post_p);
4916 TREE_OPERAND (*expr_p, 0) = val;
4917 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4920 *expr_p = val;
4922 return ret;
4925 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4927 unary_expr
4928 : ...
4929 | '&' varname
4932 PRE_P points to the list where side effects that must happen before
4933 *EXPR_P should be stored.
4935 POST_P points to the list where side effects that must happen after
4936 *EXPR_P should be stored. */
4938 static enum gimplify_status
4939 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4941 tree expr = *expr_p;
4942 tree op0 = TREE_OPERAND (expr, 0);
4943 enum gimplify_status ret;
4944 location_t loc = EXPR_LOCATION (*expr_p);
4946 switch (TREE_CODE (op0))
4948 case INDIRECT_REF:
4949 do_indirect_ref:
4950 /* Check if we are dealing with an expression of the form '&*ptr'.
4951 While the front end folds away '&*ptr' into 'ptr', these
4952 expressions may be generated internally by the compiler (e.g.,
4953 builtins like __builtin_va_end). */
4954 /* Caution: the silent array decomposition semantics we allow for
4955 ADDR_EXPR means we can't always discard the pair. */
4956 /* Gimplification of the ADDR_EXPR operand may drop
4957 cv-qualification conversions, so make sure we add them if
4958 needed. */
4960 tree op00 = TREE_OPERAND (op0, 0);
4961 tree t_expr = TREE_TYPE (expr);
4962 tree t_op00 = TREE_TYPE (op00);
4964 if (!useless_type_conversion_p (t_expr, t_op00))
4965 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4966 *expr_p = op00;
4967 ret = GS_OK;
4969 break;
4971 case VIEW_CONVERT_EXPR:
4972 /* Take the address of our operand and then convert it to the type of
4973 this ADDR_EXPR.
4975 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4976 all clear. The impact of this transformation is even less clear. */
4978 /* If the operand is a useless conversion, look through it. Doing so
4979 guarantees that the ADDR_EXPR and its operand will remain of the
4980 same type. */
4981 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4982 op0 = TREE_OPERAND (op0, 0);
4984 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4985 build_fold_addr_expr_loc (loc,
4986 TREE_OPERAND (op0, 0)));
4987 ret = GS_OK;
4988 break;
4990 default:
4991 /* If we see a call to a declared builtin or see its address
4992 being taken (we can unify those cases here) then we can mark
4993 the builtin for implicit generation by GCC. */
4994 if (TREE_CODE (op0) == FUNCTION_DECL
4995 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
4996 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
4997 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
4999 /* We use fb_either here because the C frontend sometimes takes
5000 the address of a call that returns a struct; see
5001 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5002 the implied temporary explicit. */
5004 /* Make the operand addressable. */
5005 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5006 is_gimple_addressable, fb_either);
5007 if (ret == GS_ERROR)
5008 break;
5010 /* Then mark it. Beware that it may not be possible to do so directly
5011 if a temporary has been created by the gimplification. */
5012 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5014 op0 = TREE_OPERAND (expr, 0);
5016 /* For various reasons, the gimplification of the expression
5017 may have made a new INDIRECT_REF. */
5018 if (TREE_CODE (op0) == INDIRECT_REF)
5019 goto do_indirect_ref;
5021 mark_addressable (TREE_OPERAND (expr, 0));
5023 /* The FEs may end up building ADDR_EXPRs early on a decl with
5024 an incomplete type. Re-build ADDR_EXPRs in canonical form
5025 here. */
5026 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5027 *expr_p = build_fold_addr_expr (op0);
5029 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5030 recompute_tree_invariant_for_addr_expr (*expr_p);
5032 /* If we re-built the ADDR_EXPR add a conversion to the original type
5033 if required. */
5034 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5035 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5037 break;
5040 return ret;
5043 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5044 value; output operands should be a gimple lvalue. */
5046 static enum gimplify_status
5047 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5049 tree expr;
5050 int noutputs;
5051 const char **oconstraints;
5052 int i;
5053 tree link;
5054 const char *constraint;
5055 bool allows_mem, allows_reg, is_inout;
5056 enum gimplify_status ret, tret;
5057 gasm *stmt;
5058 vec<tree, va_gc> *inputs;
5059 vec<tree, va_gc> *outputs;
5060 vec<tree, va_gc> *clobbers;
5061 vec<tree, va_gc> *labels;
5062 tree link_next;
5064 expr = *expr_p;
5065 noutputs = list_length (ASM_OUTPUTS (expr));
5066 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5068 inputs = NULL;
5069 outputs = NULL;
5070 clobbers = NULL;
5071 labels = NULL;
5073 ret = GS_ALL_DONE;
5074 link_next = NULL_TREE;
5075 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5077 bool ok;
5078 size_t constraint_len;
5080 link_next = TREE_CHAIN (link);
5082 oconstraints[i]
5083 = constraint
5084 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5085 constraint_len = strlen (constraint);
5086 if (constraint_len == 0)
5087 continue;
5089 ok = parse_output_constraint (&constraint, i, 0, 0,
5090 &allows_mem, &allows_reg, &is_inout);
5091 if (!ok)
5093 ret = GS_ERROR;
5094 is_inout = false;
5097 if (!allows_reg && allows_mem)
5098 mark_addressable (TREE_VALUE (link));
5100 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5101 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5102 fb_lvalue | fb_mayfail);
5103 if (tret == GS_ERROR)
5105 error ("invalid lvalue in asm output %d", i);
5106 ret = tret;
5109 vec_safe_push (outputs, link);
5110 TREE_CHAIN (link) = NULL_TREE;
5112 if (is_inout)
5114 /* An input/output operand. To give the optimizers more
5115 flexibility, split it into separate input and output
5116 operands. */
5117 tree input;
5118 char buf[10];
5120 /* Turn the in/out constraint into an output constraint. */
5121 char *p = xstrdup (constraint);
5122 p[0] = '=';
5123 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5125 /* And add a matching input constraint. */
5126 if (allows_reg)
5128 sprintf (buf, "%d", i);
5130 /* If there are multiple alternatives in the constraint,
5131 handle each of them individually. Those that allow register
5132 will be replaced with operand number, the others will stay
5133 unchanged. */
5134 if (strchr (p, ',') != NULL)
5136 size_t len = 0, buflen = strlen (buf);
5137 char *beg, *end, *str, *dst;
5139 for (beg = p + 1;;)
5141 end = strchr (beg, ',');
5142 if (end == NULL)
5143 end = strchr (beg, '\0');
5144 if ((size_t) (end - beg) < buflen)
5145 len += buflen + 1;
5146 else
5147 len += end - beg + 1;
5148 if (*end)
5149 beg = end + 1;
5150 else
5151 break;
5154 str = (char *) alloca (len);
5155 for (beg = p + 1, dst = str;;)
5157 const char *tem;
5158 bool mem_p, reg_p, inout_p;
5160 end = strchr (beg, ',');
5161 if (end)
5162 *end = '\0';
5163 beg[-1] = '=';
5164 tem = beg - 1;
5165 parse_output_constraint (&tem, i, 0, 0,
5166 &mem_p, &reg_p, &inout_p);
5167 if (dst != str)
5168 *dst++ = ',';
5169 if (reg_p)
5171 memcpy (dst, buf, buflen);
5172 dst += buflen;
5174 else
5176 if (end)
5177 len = end - beg;
5178 else
5179 len = strlen (beg);
5180 memcpy (dst, beg, len);
5181 dst += len;
5183 if (end)
5184 beg = end + 1;
5185 else
5186 break;
5188 *dst = '\0';
5189 input = build_string (dst - str, str);
5191 else
5192 input = build_string (strlen (buf), buf);
5194 else
5195 input = build_string (constraint_len - 1, constraint + 1);
5197 free (p);
5199 input = build_tree_list (build_tree_list (NULL_TREE, input),
5200 unshare_expr (TREE_VALUE (link)));
5201 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5205 link_next = NULL_TREE;
5206 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5208 link_next = TREE_CHAIN (link);
5209 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5210 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5211 oconstraints, &allows_mem, &allows_reg);
5213 /* If we can't make copies, we can only accept memory. */
5214 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5216 if (allows_mem)
5217 allows_reg = 0;
5218 else
5220 error ("impossible constraint in %<asm%>");
5221 error ("non-memory input %d must stay in memory", i);
5222 return GS_ERROR;
5226 /* If the operand is a memory input, it should be an lvalue. */
5227 if (!allows_reg && allows_mem)
5229 tree inputv = TREE_VALUE (link);
5230 STRIP_NOPS (inputv);
5231 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5232 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5233 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5234 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5235 TREE_VALUE (link) = error_mark_node;
5236 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5237 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5238 mark_addressable (TREE_VALUE (link));
5239 if (tret == GS_ERROR)
5241 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5242 input_location = EXPR_LOCATION (TREE_VALUE (link));
5243 error ("memory input %d is not directly addressable", i);
5244 ret = tret;
5247 else
5249 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5250 is_gimple_asm_val, fb_rvalue);
5251 if (tret == GS_ERROR)
5252 ret = tret;
5255 TREE_CHAIN (link) = NULL_TREE;
5256 vec_safe_push (inputs, link);
5259 link_next = NULL_TREE;
5260 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5262 link_next = TREE_CHAIN (link);
5263 TREE_CHAIN (link) = NULL_TREE;
5264 vec_safe_push (clobbers, link);
5267 link_next = NULL_TREE;
5268 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5270 link_next = TREE_CHAIN (link);
5271 TREE_CHAIN (link) = NULL_TREE;
5272 vec_safe_push (labels, link);
5275 /* Do not add ASMs with errors to the gimple IL stream. */
5276 if (ret != GS_ERROR)
5278 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5279 inputs, outputs, clobbers, labels);
5281 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5282 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5284 gimplify_seq_add_stmt (pre_p, stmt);
5287 return ret;
5290 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5291 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5292 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5293 return to this function.
5295 FIXME should we complexify the prequeue handling instead? Or use flags
5296 for all the cleanups and let the optimizer tighten them up? The current
5297 code seems pretty fragile; it will break on a cleanup within any
5298 non-conditional nesting. But any such nesting would be broken, anyway;
5299 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5300 and continues out of it. We can do that at the RTL level, though, so
5301 having an optimizer to tighten up try/finally regions would be a Good
5302 Thing. */
5304 static enum gimplify_status
5305 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5307 gimple_stmt_iterator iter;
5308 gimple_seq body_sequence = NULL;
5310 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5312 /* We only care about the number of conditions between the innermost
5313 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5314 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5315 int old_conds = gimplify_ctxp->conditions;
5316 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5317 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5318 gimplify_ctxp->conditions = 0;
5319 gimplify_ctxp->conditional_cleanups = NULL;
5320 gimplify_ctxp->in_cleanup_point_expr = true;
5322 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5324 gimplify_ctxp->conditions = old_conds;
5325 gimplify_ctxp->conditional_cleanups = old_cleanups;
5326 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5328 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5330 gimple wce = gsi_stmt (iter);
5332 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5334 if (gsi_one_before_end_p (iter))
5336 /* Note that gsi_insert_seq_before and gsi_remove do not
5337 scan operands, unlike some other sequence mutators. */
5338 if (!gimple_wce_cleanup_eh_only (wce))
5339 gsi_insert_seq_before_without_update (&iter,
5340 gimple_wce_cleanup (wce),
5341 GSI_SAME_STMT);
5342 gsi_remove (&iter, true);
5343 break;
5345 else
5347 gtry *gtry;
5348 gimple_seq seq;
5349 enum gimple_try_flags kind;
5351 if (gimple_wce_cleanup_eh_only (wce))
5352 kind = GIMPLE_TRY_CATCH;
5353 else
5354 kind = GIMPLE_TRY_FINALLY;
5355 seq = gsi_split_seq_after (iter);
5357 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5358 /* Do not use gsi_replace here, as it may scan operands.
5359 We want to do a simple structural modification only. */
5360 gsi_set_stmt (&iter, gtry);
5361 iter = gsi_start (gtry->eval);
5364 else
5365 gsi_next (&iter);
5368 gimplify_seq_add_seq (pre_p, body_sequence);
5369 if (temp)
5371 *expr_p = temp;
5372 return GS_OK;
5374 else
5376 *expr_p = NULL;
5377 return GS_ALL_DONE;
5381 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5382 is the cleanup action required. EH_ONLY is true if the cleanup should
5383 only be executed if an exception is thrown, not on normal exit. */
5385 static void
5386 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5388 gimple wce;
5389 gimple_seq cleanup_stmts = NULL;
5391 /* Errors can result in improperly nested cleanups. Which results in
5392 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5393 if (seen_error ())
5394 return;
5396 if (gimple_conditional_context ())
5398 /* If we're in a conditional context, this is more complex. We only
5399 want to run the cleanup if we actually ran the initialization that
5400 necessitates it, but we want to run it after the end of the
5401 conditional context. So we wrap the try/finally around the
5402 condition and use a flag to determine whether or not to actually
5403 run the destructor. Thus
5405 test ? f(A()) : 0
5407 becomes (approximately)
5409 flag = 0;
5410 try {
5411 if (test) { A::A(temp); flag = 1; val = f(temp); }
5412 else { val = 0; }
5413 } finally {
5414 if (flag) A::~A(temp);
5418 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5419 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5420 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5422 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5423 gimplify_stmt (&cleanup, &cleanup_stmts);
5424 wce = gimple_build_wce (cleanup_stmts);
5426 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5427 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5428 gimplify_seq_add_stmt (pre_p, ftrue);
5430 /* Because of this manipulation, and the EH edges that jump
5431 threading cannot redirect, the temporary (VAR) will appear
5432 to be used uninitialized. Don't warn. */
5433 TREE_NO_WARNING (var) = 1;
5435 else
5437 gimplify_stmt (&cleanup, &cleanup_stmts);
5438 wce = gimple_build_wce (cleanup_stmts);
5439 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5440 gimplify_seq_add_stmt (pre_p, wce);
5444 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5446 static enum gimplify_status
5447 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5449 tree targ = *expr_p;
5450 tree temp = TARGET_EXPR_SLOT (targ);
5451 tree init = TARGET_EXPR_INITIAL (targ);
5452 enum gimplify_status ret;
5454 if (init)
5456 tree cleanup = NULL_TREE;
5458 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5459 to the temps list. Handle also variable length TARGET_EXPRs. */
5460 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5462 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5463 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5464 gimplify_vla_decl (temp, pre_p);
5466 else
5467 gimple_add_tmp_var (temp);
5469 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5470 expression is supposed to initialize the slot. */
5471 if (VOID_TYPE_P (TREE_TYPE (init)))
5472 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5473 else
5475 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5476 init = init_expr;
5477 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5478 init = NULL;
5479 ggc_free (init_expr);
5481 if (ret == GS_ERROR)
5483 /* PR c++/28266 Make sure this is expanded only once. */
5484 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5485 return GS_ERROR;
5487 if (init)
5488 gimplify_and_add (init, pre_p);
5490 /* If needed, push the cleanup for the temp. */
5491 if (TARGET_EXPR_CLEANUP (targ))
5493 if (CLEANUP_EH_ONLY (targ))
5494 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5495 CLEANUP_EH_ONLY (targ), pre_p);
5496 else
5497 cleanup = TARGET_EXPR_CLEANUP (targ);
5500 /* Add a clobber for the temporary going out of scope, like
5501 gimplify_bind_expr. */
5502 if (gimplify_ctxp->in_cleanup_point_expr
5503 && needs_to_live_in_memory (temp)
5504 && flag_stack_reuse == SR_ALL)
5506 tree clobber = build_constructor (TREE_TYPE (temp),
5507 NULL);
5508 TREE_THIS_VOLATILE (clobber) = true;
5509 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5510 if (cleanup)
5511 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5512 clobber);
5513 else
5514 cleanup = clobber;
5517 if (cleanup)
5518 gimple_push_cleanup (temp, cleanup, false, pre_p);
5520 /* Only expand this once. */
5521 TREE_OPERAND (targ, 3) = init;
5522 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5524 else
5525 /* We should have expanded this before. */
5526 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5528 *expr_p = temp;
5529 return GS_OK;
5532 /* Gimplification of expression trees. */
5534 /* Gimplify an expression which appears at statement context. The
5535 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5536 NULL, a new sequence is allocated.
5538 Return true if we actually added a statement to the queue. */
5540 bool
5541 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5543 gimple_seq_node last;
5545 last = gimple_seq_last (*seq_p);
5546 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5547 return last != gimple_seq_last (*seq_p);
5550 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5551 to CTX. If entries already exist, force them to be some flavor of private.
5552 If there is no enclosing parallel, do nothing. */
5554 void
5555 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5557 splay_tree_node n;
5559 if (decl == NULL || !DECL_P (decl))
5560 return;
5564 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5565 if (n != NULL)
5567 if (n->value & GOVD_SHARED)
5568 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5569 else if (n->value & GOVD_MAP)
5570 n->value |= GOVD_MAP_TO_ONLY;
5571 else
5572 return;
5574 else if (ctx->region_type == ORT_TARGET)
5575 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5576 else if (ctx->region_type != ORT_WORKSHARE
5577 && ctx->region_type != ORT_SIMD
5578 && ctx->region_type != ORT_TARGET_DATA)
5579 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5581 ctx = ctx->outer_context;
5583 while (ctx);
5586 /* Similarly for each of the type sizes of TYPE. */
5588 static void
5589 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5591 if (type == NULL || type == error_mark_node)
5592 return;
5593 type = TYPE_MAIN_VARIANT (type);
5595 if (ctx->privatized_types->add (type))
5596 return;
5598 switch (TREE_CODE (type))
5600 case INTEGER_TYPE:
5601 case ENUMERAL_TYPE:
5602 case BOOLEAN_TYPE:
5603 case REAL_TYPE:
5604 case FIXED_POINT_TYPE:
5605 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5606 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5607 break;
5609 case ARRAY_TYPE:
5610 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5611 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5612 break;
5614 case RECORD_TYPE:
5615 case UNION_TYPE:
5616 case QUAL_UNION_TYPE:
5618 tree field;
5619 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5620 if (TREE_CODE (field) == FIELD_DECL)
5622 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5623 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5626 break;
5628 case POINTER_TYPE:
5629 case REFERENCE_TYPE:
5630 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5631 break;
5633 default:
5634 break;
5637 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5638 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5639 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5642 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5644 static void
5645 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5647 splay_tree_node n;
5648 unsigned int nflags;
5649 tree t;
5651 if (error_operand_p (decl))
5652 return;
5654 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5655 there are constructors involved somewhere. */
5656 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5657 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5658 flags |= GOVD_SEEN;
5660 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5661 if (n != NULL && n->value != GOVD_ALIGNED)
5663 /* We shouldn't be re-adding the decl with the same data
5664 sharing class. */
5665 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5666 /* The only combination of data sharing classes we should see is
5667 FIRSTPRIVATE and LASTPRIVATE. */
5668 nflags = n->value | flags;
5669 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5670 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5671 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5672 n->value = nflags;
5673 return;
5676 /* When adding a variable-sized variable, we have to handle all sorts
5677 of additional bits of data: the pointer replacement variable, and
5678 the parameters of the type. */
5679 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5681 /* Add the pointer replacement variable as PRIVATE if the variable
5682 replacement is private, else FIRSTPRIVATE since we'll need the
5683 address of the original variable either for SHARED, or for the
5684 copy into or out of the context. */
5685 if (!(flags & GOVD_LOCAL))
5687 if (flags & GOVD_MAP)
5688 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5689 else if (flags & GOVD_PRIVATE)
5690 nflags = GOVD_PRIVATE;
5691 else
5692 nflags = GOVD_FIRSTPRIVATE;
5693 nflags |= flags & GOVD_SEEN;
5694 t = DECL_VALUE_EXPR (decl);
5695 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5696 t = TREE_OPERAND (t, 0);
5697 gcc_assert (DECL_P (t));
5698 omp_add_variable (ctx, t, nflags);
5701 /* Add all of the variable and type parameters (which should have
5702 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5703 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5704 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5705 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5707 /* The variable-sized variable itself is never SHARED, only some form
5708 of PRIVATE. The sharing would take place via the pointer variable
5709 which we remapped above. */
5710 if (flags & GOVD_SHARED)
5711 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5712 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5714 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5715 alloca statement we generate for the variable, so make sure it
5716 is available. This isn't automatically needed for the SHARED
5717 case, since we won't be allocating local storage then.
5718 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5719 in this case omp_notice_variable will be called later
5720 on when it is gimplified. */
5721 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5722 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5723 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5725 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5726 && lang_hooks.decls.omp_privatize_by_reference (decl))
5728 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5730 /* Similar to the direct variable sized case above, we'll need the
5731 size of references being privatized. */
5732 if ((flags & GOVD_SHARED) == 0)
5734 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5735 if (TREE_CODE (t) != INTEGER_CST)
5736 omp_notice_variable (ctx, t, true);
5740 if (n != NULL)
5741 n->value |= flags;
5742 else
5743 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5746 /* Notice a threadprivate variable DECL used in OMP context CTX.
5747 This just prints out diagnostics about threadprivate variable uses
5748 in untied tasks. If DECL2 is non-NULL, prevent this warning
5749 on that variable. */
5751 static bool
5752 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5753 tree decl2)
5755 splay_tree_node n;
5756 struct gimplify_omp_ctx *octx;
5758 for (octx = ctx; octx; octx = octx->outer_context)
5759 if (octx->region_type == ORT_TARGET)
5761 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5762 if (n == NULL)
5764 error ("threadprivate variable %qE used in target region",
5765 DECL_NAME (decl));
5766 error_at (octx->location, "enclosing target region");
5767 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5769 if (decl2)
5770 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5773 if (ctx->region_type != ORT_UNTIED_TASK)
5774 return false;
5775 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5776 if (n == NULL)
5778 error ("threadprivate variable %qE used in untied task",
5779 DECL_NAME (decl));
5780 error_at (ctx->location, "enclosing task");
5781 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5783 if (decl2)
5784 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5785 return false;
5788 /* Record the fact that DECL was used within the OMP context CTX.
5789 IN_CODE is true when real code uses DECL, and false when we should
5790 merely emit default(none) errors. Return true if DECL is going to
5791 be remapped and thus DECL shouldn't be gimplified into its
5792 DECL_VALUE_EXPR (if any). */
5794 static bool
5795 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5797 splay_tree_node n;
5798 unsigned flags = in_code ? GOVD_SEEN : 0;
5799 bool ret = false, shared;
5801 if (error_operand_p (decl))
5802 return false;
5804 /* Threadprivate variables are predetermined. */
5805 if (is_global_var (decl))
5807 if (DECL_THREAD_LOCAL_P (decl))
5808 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5810 if (DECL_HAS_VALUE_EXPR_P (decl))
5812 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5814 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5815 return omp_notice_threadprivate_variable (ctx, decl, value);
5819 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5820 if (ctx->region_type == ORT_TARGET)
5822 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5823 if (n == NULL)
5825 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5827 error ("%qD referenced in target region does not have "
5828 "a mappable type", decl);
5829 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5831 else
5832 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5834 else
5836 /* If nothing changed, there's nothing left to do. */
5837 if ((n->value & flags) == flags)
5838 return ret;
5839 n->value |= flags;
5841 goto do_outer;
5844 if (n == NULL)
5846 enum omp_clause_default_kind default_kind, kind;
5847 struct gimplify_omp_ctx *octx;
5849 if (ctx->region_type == ORT_WORKSHARE
5850 || ctx->region_type == ORT_SIMD
5851 || ctx->region_type == ORT_TARGET_DATA)
5852 goto do_outer;
5854 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5855 remapped firstprivate instead of shared. To some extent this is
5856 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5857 default_kind = ctx->default_kind;
5858 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5859 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5860 default_kind = kind;
5862 switch (default_kind)
5864 case OMP_CLAUSE_DEFAULT_NONE:
5865 if ((ctx->region_type & ORT_PARALLEL) != 0)
5867 error ("%qE not specified in enclosing parallel",
5868 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5869 error_at (ctx->location, "enclosing parallel");
5871 else if ((ctx->region_type & ORT_TASK) != 0)
5873 error ("%qE not specified in enclosing task",
5874 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5875 error_at (ctx->location, "enclosing task");
5877 else if (ctx->region_type & ORT_TEAMS)
5879 error ("%qE not specified in enclosing teams construct",
5880 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5881 error_at (ctx->location, "enclosing teams construct");
5883 else
5884 gcc_unreachable ();
5885 /* FALLTHRU */
5886 case OMP_CLAUSE_DEFAULT_SHARED:
5887 flags |= GOVD_SHARED;
5888 break;
5889 case OMP_CLAUSE_DEFAULT_PRIVATE:
5890 flags |= GOVD_PRIVATE;
5891 break;
5892 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5893 flags |= GOVD_FIRSTPRIVATE;
5894 break;
5895 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5896 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5897 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5898 if (ctx->outer_context)
5899 omp_notice_variable (ctx->outer_context, decl, in_code);
5900 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5902 splay_tree_node n2;
5904 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5905 continue;
5906 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5907 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5909 flags |= GOVD_FIRSTPRIVATE;
5910 break;
5912 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5913 break;
5915 if (flags & GOVD_FIRSTPRIVATE)
5916 break;
5917 if (octx == NULL
5918 && (TREE_CODE (decl) == PARM_DECL
5919 || (!is_global_var (decl)
5920 && DECL_CONTEXT (decl) == current_function_decl)))
5922 flags |= GOVD_FIRSTPRIVATE;
5923 break;
5925 flags |= GOVD_SHARED;
5926 break;
5927 default:
5928 gcc_unreachable ();
5931 if ((flags & GOVD_PRIVATE)
5932 && lang_hooks.decls.omp_private_outer_ref (decl))
5933 flags |= GOVD_PRIVATE_OUTER_REF;
5935 omp_add_variable (ctx, decl, flags);
5937 shared = (flags & GOVD_SHARED) != 0;
5938 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5939 goto do_outer;
5942 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5943 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5944 && DECL_SIZE (decl)
5945 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5947 splay_tree_node n2;
5948 tree t = DECL_VALUE_EXPR (decl);
5949 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5950 t = TREE_OPERAND (t, 0);
5951 gcc_assert (DECL_P (t));
5952 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5953 n2->value |= GOVD_SEEN;
5956 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5957 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5959 /* If nothing changed, there's nothing left to do. */
5960 if ((n->value & flags) == flags)
5961 return ret;
5962 flags |= n->value;
5963 n->value = flags;
5965 do_outer:
5966 /* If the variable is private in the current context, then we don't
5967 need to propagate anything to an outer context. */
5968 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5969 return ret;
5970 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5971 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5972 return ret;
5973 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5974 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5975 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5976 return ret;
5977 if (ctx->outer_context
5978 && omp_notice_variable (ctx->outer_context, decl, in_code))
5979 return true;
5980 return ret;
5983 /* Verify that DECL is private within CTX. If there's specific information
5984 to the contrary in the innermost scope, generate an error. */
5986 static bool
5987 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
5989 splay_tree_node n;
5991 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5992 if (n != NULL)
5994 if (n->value & GOVD_SHARED)
5996 if (ctx == gimplify_omp_ctxp)
5998 if (simd)
5999 error ("iteration variable %qE is predetermined linear",
6000 DECL_NAME (decl));
6001 else
6002 error ("iteration variable %qE should be private",
6003 DECL_NAME (decl));
6004 n->value = GOVD_PRIVATE;
6005 return true;
6007 else
6008 return false;
6010 else if ((n->value & GOVD_EXPLICIT) != 0
6011 && (ctx == gimplify_omp_ctxp
6012 || (ctx->region_type == ORT_COMBINED_PARALLEL
6013 && gimplify_omp_ctxp->outer_context == ctx)))
6015 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6016 error ("iteration variable %qE should not be firstprivate",
6017 DECL_NAME (decl));
6018 else if ((n->value & GOVD_REDUCTION) != 0)
6019 error ("iteration variable %qE should not be reduction",
6020 DECL_NAME (decl));
6021 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6022 error ("iteration variable %qE should not be lastprivate",
6023 DECL_NAME (decl));
6024 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6025 error ("iteration variable %qE should not be private",
6026 DECL_NAME (decl));
6027 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6028 error ("iteration variable %qE is predetermined linear",
6029 DECL_NAME (decl));
6031 return (ctx == gimplify_omp_ctxp
6032 || (ctx->region_type == ORT_COMBINED_PARALLEL
6033 && gimplify_omp_ctxp->outer_context == ctx));
6036 if (ctx->region_type != ORT_WORKSHARE
6037 && ctx->region_type != ORT_SIMD)
6038 return false;
6039 else if (ctx->outer_context)
6040 return omp_is_private (ctx->outer_context, decl, simd);
6041 return false;
6044 /* Return true if DECL is private within a parallel region
6045 that binds to the current construct's context or in parallel
6046 region's REDUCTION clause. */
6048 static bool
6049 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6051 splay_tree_node n;
6055 ctx = ctx->outer_context;
6056 if (ctx == NULL)
6057 return !(is_global_var (decl)
6058 /* References might be private, but might be shared too,
6059 when checking for copyprivate, assume they might be
6060 private, otherwise assume they might be shared. */
6061 || (!copyprivate
6062 && lang_hooks.decls.omp_privatize_by_reference (decl)));
6064 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
6065 continue;
6067 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6068 if (n != NULL)
6069 return (n->value & GOVD_SHARED) == 0;
6071 while (ctx->region_type == ORT_WORKSHARE
6072 || ctx->region_type == ORT_SIMD);
6073 return false;
6076 /* Return true if the CTX is combined with distribute and thus
6077 lastprivate can't be supported. */
6079 static bool
6080 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6084 if (ctx->outer_context == NULL)
6085 return false;
6086 ctx = ctx->outer_context;
6087 switch (ctx->region_type)
6089 case ORT_WORKSHARE:
6090 if (!ctx->combined_loop)
6091 return false;
6092 if (ctx->distribute)
6093 return true;
6094 break;
6095 case ORT_COMBINED_PARALLEL:
6096 break;
6097 case ORT_COMBINED_TEAMS:
6098 return true;
6099 default:
6100 return false;
6103 while (1);
6106 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6107 and previous omp contexts. */
6109 static void
6110 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6111 enum omp_region_type region_type)
6113 struct gimplify_omp_ctx *ctx, *outer_ctx;
6114 tree c;
6116 ctx = new_omp_context (region_type);
6117 outer_ctx = ctx->outer_context;
6119 while ((c = *list_p) != NULL)
6121 bool remove = false;
6122 bool notice_outer = true;
6123 const char *check_non_private = NULL;
6124 unsigned int flags;
6125 tree decl;
6127 switch (OMP_CLAUSE_CODE (c))
6129 case OMP_CLAUSE_PRIVATE:
6130 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6131 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6133 flags |= GOVD_PRIVATE_OUTER_REF;
6134 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6136 else
6137 notice_outer = false;
6138 goto do_add;
6139 case OMP_CLAUSE_SHARED:
6140 flags = GOVD_SHARED | GOVD_EXPLICIT;
6141 goto do_add;
6142 case OMP_CLAUSE_FIRSTPRIVATE:
6143 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6144 check_non_private = "firstprivate";
6145 goto do_add;
6146 case OMP_CLAUSE_LASTPRIVATE:
6147 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6148 check_non_private = "lastprivate";
6149 decl = OMP_CLAUSE_DECL (c);
6150 if (omp_no_lastprivate (ctx))
6152 notice_outer = false;
6153 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6155 else if (error_operand_p (decl))
6156 goto do_add;
6157 else if (outer_ctx
6158 && outer_ctx->region_type == ORT_COMBINED_PARALLEL
6159 && splay_tree_lookup (outer_ctx->variables,
6160 (splay_tree_key) decl) == NULL)
6161 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6162 else if (outer_ctx
6163 && outer_ctx->region_type == ORT_WORKSHARE
6164 && outer_ctx->combined_loop
6165 && splay_tree_lookup (outer_ctx->variables,
6166 (splay_tree_key) decl) == NULL
6167 && !omp_check_private (outer_ctx, decl, false))
6169 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6170 if (outer_ctx->outer_context
6171 && (outer_ctx->outer_context->region_type
6172 == ORT_COMBINED_PARALLEL)
6173 && splay_tree_lookup (outer_ctx->outer_context->variables,
6174 (splay_tree_key) decl) == NULL)
6175 omp_add_variable (outer_ctx->outer_context, decl,
6176 GOVD_SHARED | GOVD_SEEN);
6178 goto do_add;
6179 case OMP_CLAUSE_REDUCTION:
6180 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6181 check_non_private = "reduction";
6182 goto do_add;
6183 case OMP_CLAUSE_LINEAR:
6184 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6185 is_gimple_val, fb_rvalue) == GS_ERROR)
6187 remove = true;
6188 break;
6190 else
6192 /* For combined #pragma omp parallel for simd, need to put
6193 lastprivate and perhaps firstprivate too on the
6194 parallel. Similarly for #pragma omp for simd. */
6195 struct gimplify_omp_ctx *octx = outer_ctx;
6196 decl = NULL_TREE;
6197 if (omp_no_lastprivate (ctx))
6198 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6201 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6202 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6203 break;
6204 decl = OMP_CLAUSE_DECL (c);
6205 if (error_operand_p (decl))
6207 decl = NULL_TREE;
6208 break;
6210 if (octx
6211 && octx->region_type == ORT_WORKSHARE
6212 && octx->combined_loop)
6214 if (octx->outer_context
6215 && (octx->outer_context->region_type
6216 == ORT_COMBINED_PARALLEL
6217 || (octx->outer_context->region_type
6218 == ORT_COMBINED_TEAMS)))
6219 octx = octx->outer_context;
6220 else if (omp_check_private (octx, decl, false))
6221 break;
6223 else
6224 break;
6225 gcc_checking_assert (splay_tree_lookup (octx->variables,
6226 (splay_tree_key)
6227 decl) == NULL);
6228 flags = GOVD_SEEN;
6229 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6230 flags |= GOVD_FIRSTPRIVATE;
6231 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6232 flags |= GOVD_LASTPRIVATE;
6233 omp_add_variable (octx, decl, flags);
6234 if (octx->outer_context == NULL)
6235 break;
6236 octx = octx->outer_context;
6238 while (1);
6239 if (octx
6240 && decl
6241 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6242 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6243 omp_notice_variable (octx, decl, true);
6245 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6246 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6247 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6249 notice_outer = false;
6250 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6252 goto do_add;
6254 case OMP_CLAUSE_MAP:
6255 decl = OMP_CLAUSE_DECL (c);
6256 if (error_operand_p (decl))
6258 remove = true;
6259 break;
6261 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6262 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6263 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6264 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6265 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6267 remove = true;
6268 break;
6270 if (!DECL_P (decl))
6272 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6273 NULL, is_gimple_lvalue, fb_lvalue)
6274 == GS_ERROR)
6276 remove = true;
6277 break;
6279 break;
6281 flags = GOVD_MAP | GOVD_EXPLICIT;
6282 goto do_add;
6284 case OMP_CLAUSE_DEPEND:
6285 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6287 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6288 NULL, is_gimple_val, fb_rvalue);
6289 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6291 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6293 remove = true;
6294 break;
6296 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6297 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6298 is_gimple_val, fb_rvalue) == GS_ERROR)
6300 remove = true;
6301 break;
6303 break;
6305 case OMP_CLAUSE_TO:
6306 case OMP_CLAUSE_FROM:
6307 case OMP_CLAUSE__CACHE_:
6308 decl = OMP_CLAUSE_DECL (c);
6309 if (error_operand_p (decl))
6311 remove = true;
6312 break;
6314 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6315 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6316 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6317 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6318 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6320 remove = true;
6321 break;
6323 if (!DECL_P (decl))
6325 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6326 NULL, is_gimple_lvalue, fb_lvalue)
6327 == GS_ERROR)
6329 remove = true;
6330 break;
6332 break;
6334 goto do_notice;
6336 do_add:
6337 decl = OMP_CLAUSE_DECL (c);
6338 if (error_operand_p (decl))
6340 remove = true;
6341 break;
6343 omp_add_variable (ctx, decl, flags);
6344 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6345 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6347 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6348 GOVD_LOCAL | GOVD_SEEN);
6349 gimplify_omp_ctxp = ctx;
6350 push_gimplify_context ();
6352 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6353 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6355 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6356 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6357 pop_gimplify_context
6358 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6359 push_gimplify_context ();
6360 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6361 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6362 pop_gimplify_context
6363 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6364 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6365 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6367 gimplify_omp_ctxp = outer_ctx;
6369 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6370 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6372 gimplify_omp_ctxp = ctx;
6373 push_gimplify_context ();
6374 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6376 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6377 NULL, NULL);
6378 TREE_SIDE_EFFECTS (bind) = 1;
6379 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6380 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6382 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6383 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6384 pop_gimplify_context
6385 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6386 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6388 gimplify_omp_ctxp = outer_ctx;
6390 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6391 && OMP_CLAUSE_LINEAR_STMT (c))
6393 gimplify_omp_ctxp = ctx;
6394 push_gimplify_context ();
6395 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
6397 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6398 NULL, NULL);
6399 TREE_SIDE_EFFECTS (bind) = 1;
6400 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
6401 OMP_CLAUSE_LINEAR_STMT (c) = bind;
6403 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
6404 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6405 pop_gimplify_context
6406 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
6407 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
6409 gimplify_omp_ctxp = outer_ctx;
6411 if (notice_outer)
6412 goto do_notice;
6413 break;
6415 case OMP_CLAUSE_COPYIN:
6416 case OMP_CLAUSE_COPYPRIVATE:
6417 decl = OMP_CLAUSE_DECL (c);
6418 if (error_operand_p (decl))
6420 remove = true;
6421 break;
6423 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6424 && !remove
6425 && !omp_check_private (ctx, decl, true))
6427 remove = true;
6428 if (is_global_var (decl))
6430 if (DECL_THREAD_LOCAL_P (decl))
6431 remove = false;
6432 else if (DECL_HAS_VALUE_EXPR_P (decl))
6434 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6436 if (value
6437 && DECL_P (value)
6438 && DECL_THREAD_LOCAL_P (value))
6439 remove = false;
6442 if (remove)
6443 error_at (OMP_CLAUSE_LOCATION (c),
6444 "copyprivate variable %qE is not threadprivate"
6445 " or private in outer context", DECL_NAME (decl));
6447 do_notice:
6448 if (outer_ctx)
6449 omp_notice_variable (outer_ctx, decl, true);
6450 if (check_non_private
6451 && region_type == ORT_WORKSHARE
6452 && omp_check_private (ctx, decl, false))
6454 error ("%s variable %qE is private in outer context",
6455 check_non_private, DECL_NAME (decl));
6456 remove = true;
6458 break;
6460 case OMP_CLAUSE_FINAL:
6461 case OMP_CLAUSE_IF:
6462 OMP_CLAUSE_OPERAND (c, 0)
6463 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6464 /* Fall through. */
6466 case OMP_CLAUSE_SCHEDULE:
6467 case OMP_CLAUSE_NUM_THREADS:
6468 case OMP_CLAUSE_NUM_TEAMS:
6469 case OMP_CLAUSE_THREAD_LIMIT:
6470 case OMP_CLAUSE_DIST_SCHEDULE:
6471 case OMP_CLAUSE_DEVICE:
6472 case OMP_CLAUSE__CILK_FOR_COUNT_:
6473 case OMP_CLAUSE_ASYNC:
6474 case OMP_CLAUSE_WAIT:
6475 case OMP_CLAUSE_NUM_GANGS:
6476 case OMP_CLAUSE_NUM_WORKERS:
6477 case OMP_CLAUSE_VECTOR_LENGTH:
6478 case OMP_CLAUSE_GANG:
6479 case OMP_CLAUSE_WORKER:
6480 case OMP_CLAUSE_VECTOR:
6481 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6482 is_gimple_val, fb_rvalue) == GS_ERROR)
6483 remove = true;
6484 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_GANG
6485 && gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
6486 is_gimple_val, fb_rvalue) == GS_ERROR)
6487 remove = true;
6488 break;
6490 case OMP_CLAUSE_DEVICE_RESIDENT:
6491 case OMP_CLAUSE_USE_DEVICE:
6492 case OMP_CLAUSE_INDEPENDENT:
6493 remove = true;
6494 break;
6496 case OMP_CLAUSE_NOWAIT:
6497 case OMP_CLAUSE_ORDERED:
6498 case OMP_CLAUSE_UNTIED:
6499 case OMP_CLAUSE_COLLAPSE:
6500 case OMP_CLAUSE_AUTO:
6501 case OMP_CLAUSE_SEQ:
6502 case OMP_CLAUSE_MERGEABLE:
6503 case OMP_CLAUSE_PROC_BIND:
6504 case OMP_CLAUSE_SAFELEN:
6505 break;
6507 case OMP_CLAUSE_ALIGNED:
6508 decl = OMP_CLAUSE_DECL (c);
6509 if (error_operand_p (decl))
6511 remove = true;
6512 break;
6514 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
6515 is_gimple_val, fb_rvalue) == GS_ERROR)
6517 remove = true;
6518 break;
6520 if (!is_global_var (decl)
6521 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6522 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6523 break;
6525 case OMP_CLAUSE_DEFAULT:
6526 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6527 break;
6529 default:
6530 gcc_unreachable ();
6533 if (remove)
6534 *list_p = OMP_CLAUSE_CHAIN (c);
6535 else
6536 list_p = &OMP_CLAUSE_CHAIN (c);
6539 gimplify_omp_ctxp = ctx;
6542 struct gimplify_adjust_omp_clauses_data
6544 tree *list_p;
6545 gimple_seq *pre_p;
6548 /* For all variables that were not actually used within the context,
6549 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6551 static int
6552 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6554 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
6555 gimple_seq *pre_p
6556 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
6557 tree decl = (tree) n->key;
6558 unsigned flags = n->value;
6559 enum omp_clause_code code;
6560 tree clause;
6561 bool private_debug;
6563 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6564 return 0;
6565 if ((flags & GOVD_SEEN) == 0)
6566 return 0;
6567 if (flags & GOVD_DEBUG_PRIVATE)
6569 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6570 private_debug = true;
6572 else if (flags & GOVD_MAP)
6573 private_debug = false;
6574 else
6575 private_debug
6576 = lang_hooks.decls.omp_private_debug_clause (decl,
6577 !!(flags & GOVD_SHARED));
6578 if (private_debug)
6579 code = OMP_CLAUSE_PRIVATE;
6580 else if (flags & GOVD_MAP)
6581 code = OMP_CLAUSE_MAP;
6582 else if (flags & GOVD_SHARED)
6584 if (is_global_var (decl))
6586 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6587 while (ctx != NULL)
6589 splay_tree_node on
6590 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6591 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6592 | GOVD_PRIVATE | GOVD_REDUCTION
6593 | GOVD_LINEAR | GOVD_MAP)) != 0)
6594 break;
6595 ctx = ctx->outer_context;
6597 if (ctx == NULL)
6598 return 0;
6600 code = OMP_CLAUSE_SHARED;
6602 else if (flags & GOVD_PRIVATE)
6603 code = OMP_CLAUSE_PRIVATE;
6604 else if (flags & GOVD_FIRSTPRIVATE)
6605 code = OMP_CLAUSE_FIRSTPRIVATE;
6606 else if (flags & GOVD_LASTPRIVATE)
6607 code = OMP_CLAUSE_LASTPRIVATE;
6608 else if (flags & GOVD_ALIGNED)
6609 return 0;
6610 else
6611 gcc_unreachable ();
6613 clause = build_omp_clause (input_location, code);
6614 OMP_CLAUSE_DECL (clause) = decl;
6615 OMP_CLAUSE_CHAIN (clause) = *list_p;
6616 if (private_debug)
6617 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6618 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6619 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6620 else if (code == OMP_CLAUSE_MAP)
6622 OMP_CLAUSE_SET_MAP_KIND (clause,
6623 flags & GOVD_MAP_TO_ONLY
6624 ? GOMP_MAP_TO
6625 : GOMP_MAP_TOFROM);
6626 if (DECL_SIZE (decl)
6627 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6629 tree decl2 = DECL_VALUE_EXPR (decl);
6630 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6631 decl2 = TREE_OPERAND (decl2, 0);
6632 gcc_assert (DECL_P (decl2));
6633 tree mem = build_simple_mem_ref (decl2);
6634 OMP_CLAUSE_DECL (clause) = mem;
6635 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6636 if (gimplify_omp_ctxp->outer_context)
6638 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6639 omp_notice_variable (ctx, decl2, true);
6640 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6642 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6643 OMP_CLAUSE_MAP);
6644 OMP_CLAUSE_DECL (nc) = decl;
6645 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6646 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
6647 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6648 OMP_CLAUSE_CHAIN (clause) = nc;
6650 else
6651 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
6653 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
6655 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
6656 OMP_CLAUSE_DECL (nc) = decl;
6657 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
6658 OMP_CLAUSE_CHAIN (nc) = *list_p;
6659 OMP_CLAUSE_CHAIN (clause) = nc;
6660 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6661 gimplify_omp_ctxp = ctx->outer_context;
6662 lang_hooks.decls.omp_finish_clause (nc, pre_p);
6663 gimplify_omp_ctxp = ctx;
6665 *list_p = clause;
6666 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6667 gimplify_omp_ctxp = ctx->outer_context;
6668 lang_hooks.decls.omp_finish_clause (clause, pre_p);
6669 gimplify_omp_ctxp = ctx;
6670 return 0;
6673 static void
6674 gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
6676 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6677 tree c, decl;
6679 while ((c = *list_p) != NULL)
6681 splay_tree_node n;
6682 bool remove = false;
6684 switch (OMP_CLAUSE_CODE (c))
6686 case OMP_CLAUSE_PRIVATE:
6687 case OMP_CLAUSE_SHARED:
6688 case OMP_CLAUSE_FIRSTPRIVATE:
6689 case OMP_CLAUSE_LINEAR:
6690 decl = OMP_CLAUSE_DECL (c);
6691 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6692 remove = !(n->value & GOVD_SEEN);
6693 if (! remove)
6695 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6696 if ((n->value & GOVD_DEBUG_PRIVATE)
6697 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6699 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6700 || ((n->value & GOVD_DATA_SHARE_CLASS)
6701 == GOVD_PRIVATE));
6702 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6703 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6706 break;
6708 case OMP_CLAUSE_LASTPRIVATE:
6709 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6710 accurately reflect the presence of a FIRSTPRIVATE clause. */
6711 decl = OMP_CLAUSE_DECL (c);
6712 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6713 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6714 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6715 if (omp_no_lastprivate (ctx))
6717 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6718 remove = true;
6719 else
6720 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
6722 break;
6724 case OMP_CLAUSE_ALIGNED:
6725 decl = OMP_CLAUSE_DECL (c);
6726 if (!is_global_var (decl))
6728 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6729 remove = n == NULL || !(n->value & GOVD_SEEN);
6730 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6732 struct gimplify_omp_ctx *octx;
6733 if (n != NULL
6734 && (n->value & (GOVD_DATA_SHARE_CLASS
6735 & ~GOVD_FIRSTPRIVATE)))
6736 remove = true;
6737 else
6738 for (octx = ctx->outer_context; octx;
6739 octx = octx->outer_context)
6741 n = splay_tree_lookup (octx->variables,
6742 (splay_tree_key) decl);
6743 if (n == NULL)
6744 continue;
6745 if (n->value & GOVD_LOCAL)
6746 break;
6747 /* We have to avoid assigning a shared variable
6748 to itself when trying to add
6749 __builtin_assume_aligned. */
6750 if (n->value & GOVD_SHARED)
6752 remove = true;
6753 break;
6758 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6760 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6761 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6762 remove = true;
6764 break;
6766 case OMP_CLAUSE_MAP:
6767 decl = OMP_CLAUSE_DECL (c);
6768 if (!DECL_P (decl))
6769 break;
6770 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6771 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6772 remove = true;
6773 else if (DECL_SIZE (decl)
6774 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6775 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER)
6777 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
6778 for these, TREE_CODE (DECL_SIZE (decl)) will always be
6779 INTEGER_CST. */
6780 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
6782 tree decl2 = DECL_VALUE_EXPR (decl);
6783 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6784 decl2 = TREE_OPERAND (decl2, 0);
6785 gcc_assert (DECL_P (decl2));
6786 tree mem = build_simple_mem_ref (decl2);
6787 OMP_CLAUSE_DECL (c) = mem;
6788 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6789 if (ctx->outer_context)
6791 omp_notice_variable (ctx->outer_context, decl2, true);
6792 omp_notice_variable (ctx->outer_context,
6793 OMP_CLAUSE_SIZE (c), true);
6795 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6796 OMP_CLAUSE_MAP);
6797 OMP_CLAUSE_DECL (nc) = decl;
6798 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6799 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
6800 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6801 OMP_CLAUSE_CHAIN (c) = nc;
6802 c = nc;
6804 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6805 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6806 break;
6808 case OMP_CLAUSE_TO:
6809 case OMP_CLAUSE_FROM:
6810 case OMP_CLAUSE__CACHE_:
6811 decl = OMP_CLAUSE_DECL (c);
6812 if (!DECL_P (decl))
6813 break;
6814 if (DECL_SIZE (decl)
6815 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6817 tree decl2 = DECL_VALUE_EXPR (decl);
6818 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6819 decl2 = TREE_OPERAND (decl2, 0);
6820 gcc_assert (DECL_P (decl2));
6821 tree mem = build_simple_mem_ref (decl2);
6822 OMP_CLAUSE_DECL (c) = mem;
6823 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6824 if (ctx->outer_context)
6826 omp_notice_variable (ctx->outer_context, decl2, true);
6827 omp_notice_variable (ctx->outer_context,
6828 OMP_CLAUSE_SIZE (c), true);
6831 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6832 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6833 break;
6835 case OMP_CLAUSE_REDUCTION:
6836 case OMP_CLAUSE_COPYIN:
6837 case OMP_CLAUSE_COPYPRIVATE:
6838 case OMP_CLAUSE_IF:
6839 case OMP_CLAUSE_NUM_THREADS:
6840 case OMP_CLAUSE_NUM_TEAMS:
6841 case OMP_CLAUSE_THREAD_LIMIT:
6842 case OMP_CLAUSE_DIST_SCHEDULE:
6843 case OMP_CLAUSE_DEVICE:
6844 case OMP_CLAUSE_SCHEDULE:
6845 case OMP_CLAUSE_NOWAIT:
6846 case OMP_CLAUSE_ORDERED:
6847 case OMP_CLAUSE_DEFAULT:
6848 case OMP_CLAUSE_UNTIED:
6849 case OMP_CLAUSE_COLLAPSE:
6850 case OMP_CLAUSE_FINAL:
6851 case OMP_CLAUSE_MERGEABLE:
6852 case OMP_CLAUSE_PROC_BIND:
6853 case OMP_CLAUSE_SAFELEN:
6854 case OMP_CLAUSE_DEPEND:
6855 case OMP_CLAUSE__CILK_FOR_COUNT_:
6856 case OMP_CLAUSE_ASYNC:
6857 case OMP_CLAUSE_WAIT:
6858 case OMP_CLAUSE_DEVICE_RESIDENT:
6859 case OMP_CLAUSE_USE_DEVICE:
6860 case OMP_CLAUSE_INDEPENDENT:
6861 case OMP_CLAUSE_NUM_GANGS:
6862 case OMP_CLAUSE_NUM_WORKERS:
6863 case OMP_CLAUSE_VECTOR_LENGTH:
6864 case OMP_CLAUSE_GANG:
6865 case OMP_CLAUSE_WORKER:
6866 case OMP_CLAUSE_VECTOR:
6867 case OMP_CLAUSE_AUTO:
6868 case OMP_CLAUSE_SEQ:
6869 break;
6871 default:
6872 gcc_unreachable ();
6875 if (remove)
6876 *list_p = OMP_CLAUSE_CHAIN (c);
6877 else
6878 list_p = &OMP_CLAUSE_CHAIN (c);
6881 /* Add in any implicit data sharing. */
6882 struct gimplify_adjust_omp_clauses_data data;
6883 data.list_p = list_p;
6884 data.pre_p = pre_p;
6885 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
6887 gimplify_omp_ctxp = ctx->outer_context;
6888 delete_omp_context (ctx);
6891 /* Gimplify OACC_CACHE. */
6893 static void
6894 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
6896 tree expr = *expr_p;
6898 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6899 gimplify_adjust_omp_clauses (pre_p, &OACC_CACHE_CLAUSES (expr));
6901 /* TODO: Do something sensible with this information. */
6903 *expr_p = NULL_TREE;
6906 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6907 gimplification of the body, as well as scanning the body for used
6908 variables. We need to do this scan now, because variable-sized
6909 decls will be decomposed during gimplification. */
6911 static void
6912 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6914 tree expr = *expr_p;
6915 gimple g;
6916 gimple_seq body = NULL;
6918 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6919 OMP_PARALLEL_COMBINED (expr)
6920 ? ORT_COMBINED_PARALLEL
6921 : ORT_PARALLEL);
6923 push_gimplify_context ();
6925 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6926 if (gimple_code (g) == GIMPLE_BIND)
6927 pop_gimplify_context (g);
6928 else
6929 pop_gimplify_context (NULL);
6931 gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
6933 g = gimple_build_omp_parallel (body,
6934 OMP_PARALLEL_CLAUSES (expr),
6935 NULL_TREE, NULL_TREE);
6936 if (OMP_PARALLEL_COMBINED (expr))
6937 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6938 gimplify_seq_add_stmt (pre_p, g);
6939 *expr_p = NULL_TREE;
6942 /* Gimplify the contents of an OMP_TASK statement. This involves
6943 gimplification of the body, as well as scanning the body for used
6944 variables. We need to do this scan now, because variable-sized
6945 decls will be decomposed during gimplification. */
6947 static void
6948 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6950 tree expr = *expr_p;
6951 gimple g;
6952 gimple_seq body = NULL;
6954 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6955 find_omp_clause (OMP_TASK_CLAUSES (expr),
6956 OMP_CLAUSE_UNTIED)
6957 ? ORT_UNTIED_TASK : ORT_TASK);
6959 push_gimplify_context ();
6961 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6962 if (gimple_code (g) == GIMPLE_BIND)
6963 pop_gimplify_context (g);
6964 else
6965 pop_gimplify_context (NULL);
6967 gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
6969 g = gimple_build_omp_task (body,
6970 OMP_TASK_CLAUSES (expr),
6971 NULL_TREE, NULL_TREE,
6972 NULL_TREE, NULL_TREE, NULL_TREE);
6973 gimplify_seq_add_stmt (pre_p, g);
6974 *expr_p = NULL_TREE;
6977 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6978 with non-NULL OMP_FOR_INIT. */
6980 static tree
6981 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6983 *walk_subtrees = 0;
6984 switch (TREE_CODE (*tp))
6986 case OMP_FOR:
6987 *walk_subtrees = 1;
6988 /* FALLTHRU */
6989 case OMP_SIMD:
6990 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6991 return *tp;
6992 break;
6993 case BIND_EXPR:
6994 case STATEMENT_LIST:
6995 case OMP_PARALLEL:
6996 *walk_subtrees = 1;
6997 break;
6998 default:
6999 break;
7001 return NULL_TREE;
7004 /* Gimplify the gross structure of an OMP_FOR statement. */
7006 static enum gimplify_status
7007 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
7009 tree for_stmt, orig_for_stmt, decl, var, t;
7010 enum gimplify_status ret = GS_ALL_DONE;
7011 enum gimplify_status tret;
7012 gomp_for *gfor;
7013 gimple_seq for_body, for_pre_body;
7014 int i;
7015 bool simd;
7016 bitmap has_decl_expr = NULL;
7018 orig_for_stmt = for_stmt = *expr_p;
7020 switch (TREE_CODE (for_stmt))
7022 case OMP_FOR:
7023 case CILK_FOR:
7024 case OMP_DISTRIBUTE:
7025 case OACC_LOOP:
7026 simd = false;
7027 break;
7028 case OMP_SIMD:
7029 case CILK_SIMD:
7030 simd = true;
7031 break;
7032 default:
7033 gcc_unreachable ();
7036 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
7037 clause for the IV. */
7038 if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7040 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
7041 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7042 decl = TREE_OPERAND (t, 0);
7043 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7044 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7045 && OMP_CLAUSE_DECL (c) == decl)
7047 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
7048 break;
7052 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
7053 simd ? ORT_SIMD : ORT_WORKSHARE);
7054 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
7055 gimplify_omp_ctxp->distribute = true;
7057 /* Handle OMP_FOR_INIT. */
7058 for_pre_body = NULL;
7059 if (simd && OMP_FOR_PRE_BODY (for_stmt))
7061 has_decl_expr = BITMAP_ALLOC (NULL);
7062 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
7063 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
7064 == VAR_DECL)
7066 t = OMP_FOR_PRE_BODY (for_stmt);
7067 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
7069 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
7071 tree_stmt_iterator si;
7072 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
7073 tsi_next (&si))
7075 t = tsi_stmt (si);
7076 if (TREE_CODE (t) == DECL_EXPR
7077 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
7078 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
7082 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
7083 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
7085 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
7087 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
7088 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
7089 NULL, NULL);
7090 gcc_assert (for_stmt != NULL_TREE);
7091 gimplify_omp_ctxp->combined_loop = true;
7094 for_body = NULL;
7095 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7096 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
7097 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7098 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
7099 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7101 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7102 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7103 decl = TREE_OPERAND (t, 0);
7104 gcc_assert (DECL_P (decl));
7105 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
7106 || POINTER_TYPE_P (TREE_TYPE (decl)));
7108 /* Make sure the iteration variable is private. */
7109 tree c = NULL_TREE;
7110 tree c2 = NULL_TREE;
7111 if (orig_for_stmt != for_stmt)
7112 /* Do this only on innermost construct for combined ones. */;
7113 else if (simd)
7115 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
7116 (splay_tree_key)decl);
7117 omp_is_private (gimplify_omp_ctxp, decl,
7118 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7119 != 1));
7120 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7121 omp_notice_variable (gimplify_omp_ctxp, decl, true);
7122 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7124 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
7125 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
7126 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
7127 if ((has_decl_expr
7128 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
7129 || omp_no_lastprivate (gimplify_omp_ctxp))
7131 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
7132 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7134 OMP_CLAUSE_DECL (c) = decl;
7135 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
7136 OMP_FOR_CLAUSES (for_stmt) = c;
7138 omp_add_variable (gimplify_omp_ctxp, decl, flags);
7139 struct gimplify_omp_ctx *outer
7140 = gimplify_omp_ctxp->outer_context;
7141 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7143 if (outer->region_type == ORT_WORKSHARE
7144 && outer->combined_loop)
7146 if (outer->outer_context
7147 && (outer->outer_context->region_type
7148 == ORT_COMBINED_PARALLEL))
7149 outer = outer->outer_context;
7150 else if (omp_check_private (outer, decl, false))
7151 outer = NULL;
7153 else if (outer->region_type != ORT_COMBINED_PARALLEL)
7154 outer = NULL;
7155 if (outer)
7157 omp_add_variable (outer, decl,
7158 GOVD_LASTPRIVATE | GOVD_SEEN);
7159 if (outer->outer_context)
7160 omp_notice_variable (outer->outer_context, decl, true);
7164 else
7166 bool lastprivate
7167 = (!has_decl_expr
7168 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
7169 && !omp_no_lastprivate (gimplify_omp_ctxp);
7170 struct gimplify_omp_ctx *outer
7171 = gimplify_omp_ctxp->outer_context;
7172 if (outer && lastprivate)
7174 if (outer->region_type == ORT_WORKSHARE
7175 && outer->combined_loop)
7177 if (outer->outer_context
7178 && (outer->outer_context->region_type
7179 == ORT_COMBINED_PARALLEL))
7180 outer = outer->outer_context;
7181 else if (omp_check_private (outer, decl, false))
7182 outer = NULL;
7184 else if (outer->region_type != ORT_COMBINED_PARALLEL)
7185 outer = NULL;
7186 if (outer)
7188 omp_add_variable (outer, decl,
7189 GOVD_LASTPRIVATE | GOVD_SEEN);
7190 if (outer->outer_context)
7191 omp_notice_variable (outer->outer_context, decl, true);
7195 c = build_omp_clause (input_location,
7196 lastprivate ? OMP_CLAUSE_LASTPRIVATE
7197 : OMP_CLAUSE_PRIVATE);
7198 OMP_CLAUSE_DECL (c) = decl;
7199 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
7200 OMP_FOR_CLAUSES (for_stmt) = c;
7201 omp_add_variable (gimplify_omp_ctxp, decl,
7202 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
7203 | GOVD_EXPLICIT | GOVD_SEEN);
7204 c = NULL_TREE;
7207 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
7208 omp_notice_variable (gimplify_omp_ctxp, decl, true);
7209 else
7210 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
7212 /* If DECL is not a gimple register, create a temporary variable to act
7213 as an iteration counter. This is valid, since DECL cannot be
7214 modified in the body of the loop. Similarly for any iteration vars
7215 in simd with collapse > 1 where the iterator vars must be
7216 lastprivate. */
7217 if (orig_for_stmt != for_stmt)
7218 var = decl;
7219 else if (!is_gimple_reg (decl)
7220 || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
7222 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7223 TREE_OPERAND (t, 0) = var;
7225 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
7227 if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7229 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
7230 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
7231 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
7232 OMP_CLAUSE_DECL (c2) = var;
7233 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
7234 OMP_FOR_CLAUSES (for_stmt) = c2;
7235 omp_add_variable (gimplify_omp_ctxp, var,
7236 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
7237 if (c == NULL_TREE)
7239 c = c2;
7240 c2 = NULL_TREE;
7243 else
7244 omp_add_variable (gimplify_omp_ctxp, var,
7245 GOVD_PRIVATE | GOVD_SEEN);
7247 else
7248 var = decl;
7250 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7251 is_gimple_val, fb_rvalue);
7252 ret = MIN (ret, tret);
7253 if (ret == GS_ERROR)
7254 return ret;
7256 /* Handle OMP_FOR_COND. */
7257 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7258 gcc_assert (COMPARISON_CLASS_P (t));
7259 gcc_assert (TREE_OPERAND (t, 0) == decl);
7261 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7262 is_gimple_val, fb_rvalue);
7263 ret = MIN (ret, tret);
7265 /* Handle OMP_FOR_INCR. */
7266 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7267 switch (TREE_CODE (t))
7269 case PREINCREMENT_EXPR:
7270 case POSTINCREMENT_EXPR:
7272 tree decl = TREE_OPERAND (t, 0);
7273 /* c_omp_for_incr_canonicalize_ptr() should have been
7274 called to massage things appropriately. */
7275 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
7277 if (orig_for_stmt != for_stmt)
7278 break;
7279 t = build_int_cst (TREE_TYPE (decl), 1);
7280 if (c)
7281 OMP_CLAUSE_LINEAR_STEP (c) = t;
7282 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
7283 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
7284 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
7285 break;
7288 case PREDECREMENT_EXPR:
7289 case POSTDECREMENT_EXPR:
7290 /* c_omp_for_incr_canonicalize_ptr() should have been
7291 called to massage things appropriately. */
7292 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
7293 if (orig_for_stmt != for_stmt)
7294 break;
7295 t = build_int_cst (TREE_TYPE (decl), -1);
7296 if (c)
7297 OMP_CLAUSE_LINEAR_STEP (c) = t;
7298 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
7299 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
7300 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
7301 break;
7303 case MODIFY_EXPR:
7304 gcc_assert (TREE_OPERAND (t, 0) == decl);
7305 TREE_OPERAND (t, 0) = var;
7307 t = TREE_OPERAND (t, 1);
7308 switch (TREE_CODE (t))
7310 case PLUS_EXPR:
7311 if (TREE_OPERAND (t, 1) == decl)
7313 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
7314 TREE_OPERAND (t, 0) = var;
7315 break;
7318 /* Fallthru. */
7319 case MINUS_EXPR:
7320 case POINTER_PLUS_EXPR:
7321 gcc_assert (TREE_OPERAND (t, 0) == decl);
7322 TREE_OPERAND (t, 0) = var;
7323 break;
7324 default:
7325 gcc_unreachable ();
7328 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7329 is_gimple_val, fb_rvalue);
7330 ret = MIN (ret, tret);
7331 if (c)
7333 tree step = TREE_OPERAND (t, 1);
7334 tree stept = TREE_TYPE (decl);
7335 if (POINTER_TYPE_P (stept))
7336 stept = sizetype;
7337 step = fold_convert (stept, step);
7338 if (TREE_CODE (t) == MINUS_EXPR)
7339 step = fold_build1 (NEGATE_EXPR, stept, step);
7340 OMP_CLAUSE_LINEAR_STEP (c) = step;
7341 if (step != TREE_OPERAND (t, 1))
7343 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
7344 &for_pre_body, NULL,
7345 is_gimple_val, fb_rvalue);
7346 ret = MIN (ret, tret);
7349 break;
7351 default:
7352 gcc_unreachable ();
7355 if (c2)
7357 gcc_assert (c);
7358 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
7361 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7362 && orig_for_stmt == for_stmt)
7364 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
7365 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7366 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7367 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7368 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
7369 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
7370 && OMP_CLAUSE_DECL (c) == decl)
7372 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7373 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7374 gcc_assert (TREE_OPERAND (t, 0) == var);
7375 t = TREE_OPERAND (t, 1);
7376 gcc_assert (TREE_CODE (t) == PLUS_EXPR
7377 || TREE_CODE (t) == MINUS_EXPR
7378 || TREE_CODE (t) == POINTER_PLUS_EXPR);
7379 gcc_assert (TREE_OPERAND (t, 0) == var);
7380 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7381 TREE_OPERAND (t, 1));
7382 gimple_seq *seq;
7383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
7384 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
7385 else
7386 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
7387 gimplify_assign (decl, t, seq);
7392 BITMAP_FREE (has_decl_expr);
7394 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
7396 if (orig_for_stmt != for_stmt)
7397 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7399 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7400 decl = TREE_OPERAND (t, 0);
7401 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7402 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7403 TREE_OPERAND (t, 0) = var;
7404 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7405 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7406 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7409 gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
7411 int kind;
7412 switch (TREE_CODE (orig_for_stmt))
7414 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7415 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
7416 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
7417 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
7418 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
7419 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
7420 default:
7421 gcc_unreachable ();
7423 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
7424 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7425 for_pre_body);
7426 if (orig_for_stmt != for_stmt)
7427 gimple_omp_for_set_combined_p (gfor, true);
7428 if (gimplify_omp_ctxp
7429 && (gimplify_omp_ctxp->combined_loop
7430 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7431 && gimplify_omp_ctxp->outer_context
7432 && gimplify_omp_ctxp->outer_context->combined_loop)))
7434 gimple_omp_for_set_combined_into_p (gfor, true);
7435 if (gimplify_omp_ctxp->combined_loop)
7436 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7437 else
7438 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7441 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7443 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7444 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7445 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7446 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7447 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7448 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7449 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7450 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7453 gimplify_seq_add_stmt (pre_p, gfor);
7454 if (ret != GS_ALL_DONE)
7455 return GS_ERROR;
7456 *expr_p = NULL_TREE;
7457 return GS_ALL_DONE;
7460 /* Gimplify the gross structure of several OMP constructs. */
7462 static void
7463 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7465 tree expr = *expr_p;
7466 gimple stmt;
7467 gimple_seq body = NULL;
7468 enum omp_region_type ort;
7470 switch (TREE_CODE (expr))
7472 case OMP_SECTIONS:
7473 case OMP_SINGLE:
7474 ort = ORT_WORKSHARE;
7475 break;
7476 case OACC_KERNELS:
7477 case OACC_PARALLEL:
7478 case OMP_TARGET:
7479 ort = ORT_TARGET;
7480 break;
7481 case OACC_DATA:
7482 case OMP_TARGET_DATA:
7483 ort = ORT_TARGET_DATA;
7484 break;
7485 case OMP_TEAMS:
7486 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
7487 break;
7488 default:
7489 gcc_unreachable ();
7491 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7492 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7494 push_gimplify_context ();
7495 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7496 if (gimple_code (g) == GIMPLE_BIND)
7497 pop_gimplify_context (g);
7498 else
7499 pop_gimplify_context (NULL);
7500 if (ort == ORT_TARGET_DATA)
7502 enum built_in_function end_ix;
7503 switch (TREE_CODE (expr))
7505 case OACC_DATA:
7506 end_ix = BUILT_IN_GOACC_DATA_END;
7507 break;
7508 case OMP_TARGET_DATA:
7509 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
7510 break;
7511 default:
7512 gcc_unreachable ();
7514 tree fn = builtin_decl_explicit (end_ix);
7515 g = gimple_build_call (fn, 0);
7516 gimple_seq cleanup = NULL;
7517 gimple_seq_add_stmt (&cleanup, g);
7518 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7519 body = NULL;
7520 gimple_seq_add_stmt (&body, g);
7523 else
7524 gimplify_and_add (OMP_BODY (expr), &body);
7525 gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
7527 switch (TREE_CODE (expr))
7529 case OACC_DATA:
7530 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
7531 OMP_CLAUSES (expr));
7532 break;
7533 case OACC_KERNELS:
7534 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
7535 OMP_CLAUSES (expr));
7536 break;
7537 case OACC_PARALLEL:
7538 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
7539 OMP_CLAUSES (expr));
7540 break;
7541 case OMP_SECTIONS:
7542 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7543 break;
7544 case OMP_SINGLE:
7545 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7546 break;
7547 case OMP_TARGET:
7548 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7549 OMP_CLAUSES (expr));
7550 break;
7551 case OMP_TARGET_DATA:
7552 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7553 OMP_CLAUSES (expr));
7554 break;
7555 case OMP_TEAMS:
7556 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7557 break;
7558 default:
7559 gcc_unreachable ();
7562 gimplify_seq_add_stmt (pre_p, stmt);
7563 *expr_p = NULL_TREE;
7566 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
7567 target update constructs. */
7569 static void
7570 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7572 tree expr = *expr_p;
7573 int kind;
7574 gomp_target *stmt;
7576 switch (TREE_CODE (expr))
7578 case OACC_ENTER_DATA:
7579 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
7580 break;
7581 case OACC_EXIT_DATA:
7582 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
7583 break;
7584 case OACC_UPDATE:
7585 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
7586 break;
7587 case OMP_TARGET_UPDATE:
7588 kind = GF_OMP_TARGET_KIND_UPDATE;
7589 break;
7590 default:
7591 gcc_unreachable ();
7593 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
7594 ORT_WORKSHARE);
7595 gimplify_adjust_omp_clauses (pre_p, &OMP_STANDALONE_CLAUSES (expr));
7596 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
7598 gimplify_seq_add_stmt (pre_p, stmt);
7599 *expr_p = NULL_TREE;
7602 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
7603 stabilized the lhs of the atomic operation as *ADDR. Return true if
7604 EXPR is this stabilized form. */
7606 static bool
7607 goa_lhs_expr_p (tree expr, tree addr)
7609 /* Also include casts to other type variants. The C front end is fond
7610 of adding these for e.g. volatile variables. This is like
7611 STRIP_TYPE_NOPS but includes the main variant lookup. */
7612 STRIP_USELESS_TYPE_CONVERSION (expr);
7614 if (TREE_CODE (expr) == INDIRECT_REF)
7616 expr = TREE_OPERAND (expr, 0);
7617 while (expr != addr
7618 && (CONVERT_EXPR_P (expr)
7619 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7620 && TREE_CODE (expr) == TREE_CODE (addr)
7621 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7623 expr = TREE_OPERAND (expr, 0);
7624 addr = TREE_OPERAND (addr, 0);
7626 if (expr == addr)
7627 return true;
7628 return (TREE_CODE (addr) == ADDR_EXPR
7629 && TREE_CODE (expr) == ADDR_EXPR
7630 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7632 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7633 return true;
7634 return false;
7637 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7638 expression does not involve the lhs, evaluate it into a temporary.
7639 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7640 or -1 if an error was encountered. */
7642 static int
7643 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7644 tree lhs_var)
7646 tree expr = *expr_p;
7647 int saw_lhs;
7649 if (goa_lhs_expr_p (expr, lhs_addr))
7651 *expr_p = lhs_var;
7652 return 1;
7654 if (is_gimple_val (expr))
7655 return 0;
7657 saw_lhs = 0;
7658 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7660 case tcc_binary:
7661 case tcc_comparison:
7662 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7663 lhs_var);
7664 case tcc_unary:
7665 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7666 lhs_var);
7667 break;
7668 case tcc_expression:
7669 switch (TREE_CODE (expr))
7671 case TRUTH_ANDIF_EXPR:
7672 case TRUTH_ORIF_EXPR:
7673 case TRUTH_AND_EXPR:
7674 case TRUTH_OR_EXPR:
7675 case TRUTH_XOR_EXPR:
7676 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7677 lhs_addr, lhs_var);
7678 case TRUTH_NOT_EXPR:
7679 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7680 lhs_addr, lhs_var);
7681 break;
7682 case COMPOUND_EXPR:
7683 /* Break out any preevaluations from cp_build_modify_expr. */
7684 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7685 expr = TREE_OPERAND (expr, 1))
7686 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7687 *expr_p = expr;
7688 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7689 default:
7690 break;
7692 break;
7693 default:
7694 break;
7697 if (saw_lhs == 0)
7699 enum gimplify_status gs;
7700 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7701 if (gs != GS_ALL_DONE)
7702 saw_lhs = -1;
7705 return saw_lhs;
7708 /* Gimplify an OMP_ATOMIC statement. */
7710 static enum gimplify_status
7711 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7713 tree addr = TREE_OPERAND (*expr_p, 0);
7714 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7715 ? NULL : TREE_OPERAND (*expr_p, 1);
7716 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7717 tree tmp_load;
7718 gomp_atomic_load *loadstmt;
7719 gomp_atomic_store *storestmt;
7721 tmp_load = create_tmp_reg (type);
7722 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7723 return GS_ERROR;
7725 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7726 != GS_ALL_DONE)
7727 return GS_ERROR;
7729 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7730 gimplify_seq_add_stmt (pre_p, loadstmt);
7731 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7732 != GS_ALL_DONE)
7733 return GS_ERROR;
7735 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7736 rhs = tmp_load;
7737 storestmt = gimple_build_omp_atomic_store (rhs);
7738 gimplify_seq_add_stmt (pre_p, storestmt);
7739 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7741 gimple_omp_atomic_set_seq_cst (loadstmt);
7742 gimple_omp_atomic_set_seq_cst (storestmt);
7744 switch (TREE_CODE (*expr_p))
7746 case OMP_ATOMIC_READ:
7747 case OMP_ATOMIC_CAPTURE_OLD:
7748 *expr_p = tmp_load;
7749 gimple_omp_atomic_set_need_value (loadstmt);
7750 break;
7751 case OMP_ATOMIC_CAPTURE_NEW:
7752 *expr_p = rhs;
7753 gimple_omp_atomic_set_need_value (storestmt);
7754 break;
7755 default:
7756 *expr_p = NULL;
7757 break;
7760 return GS_ALL_DONE;
7763 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7764 body, and adding some EH bits. */
7766 static enum gimplify_status
7767 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7769 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7770 gimple body_stmt;
7771 gtransaction *trans_stmt;
7772 gimple_seq body = NULL;
7773 int subcode = 0;
7775 /* Wrap the transaction body in a BIND_EXPR so we have a context
7776 where to put decls for OMP. */
7777 if (TREE_CODE (tbody) != BIND_EXPR)
7779 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7780 TREE_SIDE_EFFECTS (bind) = 1;
7781 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7782 TRANSACTION_EXPR_BODY (expr) = bind;
7785 push_gimplify_context ();
7786 temp = voidify_wrapper_expr (*expr_p, NULL);
7788 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7789 pop_gimplify_context (body_stmt);
7791 trans_stmt = gimple_build_transaction (body, NULL);
7792 if (TRANSACTION_EXPR_OUTER (expr))
7793 subcode = GTMA_IS_OUTER;
7794 else if (TRANSACTION_EXPR_RELAXED (expr))
7795 subcode = GTMA_IS_RELAXED;
7796 gimple_transaction_set_subcode (trans_stmt, subcode);
7798 gimplify_seq_add_stmt (pre_p, trans_stmt);
7800 if (temp)
7802 *expr_p = temp;
7803 return GS_OK;
7806 *expr_p = NULL_TREE;
7807 return GS_ALL_DONE;
7810 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7811 expression produces a value to be used as an operand inside a GIMPLE
7812 statement, the value will be stored back in *EXPR_P. This value will
7813 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7814 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7815 emitted in PRE_P and POST_P.
7817 Additionally, this process may overwrite parts of the input
7818 expression during gimplification. Ideally, it should be
7819 possible to do non-destructive gimplification.
7821 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7822 the expression needs to evaluate to a value to be used as
7823 an operand in a GIMPLE statement, this value will be stored in
7824 *EXPR_P on exit. This happens when the caller specifies one
7825 of fb_lvalue or fb_rvalue fallback flags.
7827 PRE_P will contain the sequence of GIMPLE statements corresponding
7828 to the evaluation of EXPR and all the side-effects that must
7829 be executed before the main expression. On exit, the last
7830 statement of PRE_P is the core statement being gimplified. For
7831 instance, when gimplifying 'if (++a)' the last statement in
7832 PRE_P will be 'if (t.1)' where t.1 is the result of
7833 pre-incrementing 'a'.
7835 POST_P will contain the sequence of GIMPLE statements corresponding
7836 to the evaluation of all the side-effects that must be executed
7837 after the main expression. If this is NULL, the post
7838 side-effects are stored at the end of PRE_P.
7840 The reason why the output is split in two is to handle post
7841 side-effects explicitly. In some cases, an expression may have
7842 inner and outer post side-effects which need to be emitted in
7843 an order different from the one given by the recursive
7844 traversal. For instance, for the expression (*p--)++ the post
7845 side-effects of '--' must actually occur *after* the post
7846 side-effects of '++'. However, gimplification will first visit
7847 the inner expression, so if a separate POST sequence was not
7848 used, the resulting sequence would be:
7850 1 t.1 = *p
7851 2 p = p - 1
7852 3 t.2 = t.1 + 1
7853 4 *p = t.2
7855 However, the post-decrement operation in line #2 must not be
7856 evaluated until after the store to *p at line #4, so the
7857 correct sequence should be:
7859 1 t.1 = *p
7860 2 t.2 = t.1 + 1
7861 3 *p = t.2
7862 4 p = p - 1
7864 So, by specifying a separate post queue, it is possible
7865 to emit the post side-effects in the correct order.
7866 If POST_P is NULL, an internal queue will be used. Before
7867 returning to the caller, the sequence POST_P is appended to
7868 the main output sequence PRE_P.
7870 GIMPLE_TEST_F points to a function that takes a tree T and
7871 returns nonzero if T is in the GIMPLE form requested by the
7872 caller. The GIMPLE predicates are in gimple.c.
7874 FALLBACK tells the function what sort of a temporary we want if
7875 gimplification cannot produce an expression that complies with
7876 GIMPLE_TEST_F.
7878 fb_none means that no temporary should be generated
7879 fb_rvalue means that an rvalue is OK to generate
7880 fb_lvalue means that an lvalue is OK to generate
7881 fb_either means that either is OK, but an lvalue is preferable.
7882 fb_mayfail means that gimplification may fail (in which case
7883 GS_ERROR will be returned)
7885 The return value is either GS_ERROR or GS_ALL_DONE, since this
7886 function iterates until EXPR is completely gimplified or an error
7887 occurs. */
7889 enum gimplify_status
7890 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7891 bool (*gimple_test_f) (tree), fallback_t fallback)
7893 tree tmp;
7894 gimple_seq internal_pre = NULL;
7895 gimple_seq internal_post = NULL;
7896 tree save_expr;
7897 bool is_statement;
7898 location_t saved_location;
7899 enum gimplify_status ret;
7900 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7902 save_expr = *expr_p;
7903 if (save_expr == NULL_TREE)
7904 return GS_ALL_DONE;
7906 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7907 is_statement = gimple_test_f == is_gimple_stmt;
7908 if (is_statement)
7909 gcc_assert (pre_p);
7911 /* Consistency checks. */
7912 if (gimple_test_f == is_gimple_reg)
7913 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7914 else if (gimple_test_f == is_gimple_val
7915 || gimple_test_f == is_gimple_call_addr
7916 || gimple_test_f == is_gimple_condexpr
7917 || gimple_test_f == is_gimple_mem_rhs
7918 || gimple_test_f == is_gimple_mem_rhs_or_call
7919 || gimple_test_f == is_gimple_reg_rhs
7920 || gimple_test_f == is_gimple_reg_rhs_or_call
7921 || gimple_test_f == is_gimple_asm_val
7922 || gimple_test_f == is_gimple_mem_ref_addr)
7923 gcc_assert (fallback & fb_rvalue);
7924 else if (gimple_test_f == is_gimple_min_lval
7925 || gimple_test_f == is_gimple_lvalue)
7926 gcc_assert (fallback & fb_lvalue);
7927 else if (gimple_test_f == is_gimple_addressable)
7928 gcc_assert (fallback & fb_either);
7929 else if (gimple_test_f == is_gimple_stmt)
7930 gcc_assert (fallback == fb_none);
7931 else
7933 /* We should have recognized the GIMPLE_TEST_F predicate to
7934 know what kind of fallback to use in case a temporary is
7935 needed to hold the value or address of *EXPR_P. */
7936 gcc_unreachable ();
7939 /* We used to check the predicate here and return immediately if it
7940 succeeds. This is wrong; the design is for gimplification to be
7941 idempotent, and for the predicates to only test for valid forms, not
7942 whether they are fully simplified. */
7943 if (pre_p == NULL)
7944 pre_p = &internal_pre;
7946 if (post_p == NULL)
7947 post_p = &internal_post;
7949 /* Remember the last statements added to PRE_P and POST_P. Every
7950 new statement added by the gimplification helpers needs to be
7951 annotated with location information. To centralize the
7952 responsibility, we remember the last statement that had been
7953 added to both queues before gimplifying *EXPR_P. If
7954 gimplification produces new statements in PRE_P and POST_P, those
7955 statements will be annotated with the same location information
7956 as *EXPR_P. */
7957 pre_last_gsi = gsi_last (*pre_p);
7958 post_last_gsi = gsi_last (*post_p);
7960 saved_location = input_location;
7961 if (save_expr != error_mark_node
7962 && EXPR_HAS_LOCATION (*expr_p))
7963 input_location = EXPR_LOCATION (*expr_p);
7965 /* Loop over the specific gimplifiers until the toplevel node
7966 remains the same. */
7969 /* Strip away as many useless type conversions as possible
7970 at the toplevel. */
7971 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7973 /* Remember the expr. */
7974 save_expr = *expr_p;
7976 /* Die, die, die, my darling. */
7977 if (save_expr == error_mark_node
7978 || (TREE_TYPE (save_expr)
7979 && TREE_TYPE (save_expr) == error_mark_node))
7981 ret = GS_ERROR;
7982 break;
7985 /* Do any language-specific gimplification. */
7986 ret = ((enum gimplify_status)
7987 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7988 if (ret == GS_OK)
7990 if (*expr_p == NULL_TREE)
7991 break;
7992 if (*expr_p != save_expr)
7993 continue;
7995 else if (ret != GS_UNHANDLED)
7996 break;
7998 /* Make sure that all the cases set 'ret' appropriately. */
7999 ret = GS_UNHANDLED;
8000 switch (TREE_CODE (*expr_p))
8002 /* First deal with the special cases. */
8004 case POSTINCREMENT_EXPR:
8005 case POSTDECREMENT_EXPR:
8006 case PREINCREMENT_EXPR:
8007 case PREDECREMENT_EXPR:
8008 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
8009 fallback != fb_none,
8010 TREE_TYPE (*expr_p));
8011 break;
8013 case VIEW_CONVERT_EXPR:
8014 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
8015 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
8017 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8018 post_p, is_gimple_val, fb_rvalue);
8019 recalculate_side_effects (*expr_p);
8020 break;
8022 /* Fallthru. */
8024 case ARRAY_REF:
8025 case ARRAY_RANGE_REF:
8026 case REALPART_EXPR:
8027 case IMAGPART_EXPR:
8028 case COMPONENT_REF:
8029 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
8030 fallback ? fallback : fb_rvalue);
8031 break;
8033 case COND_EXPR:
8034 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
8036 /* C99 code may assign to an array in a structure value of a
8037 conditional expression, and this has undefined behavior
8038 only on execution, so create a temporary if an lvalue is
8039 required. */
8040 if (fallback == fb_lvalue)
8042 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8043 mark_addressable (*expr_p);
8044 ret = GS_OK;
8046 break;
8048 case CALL_EXPR:
8049 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
8051 /* C99 code may assign to an array in a structure returned
8052 from a function, and this has undefined behavior only on
8053 execution, so create a temporary if an lvalue is
8054 required. */
8055 if (fallback == fb_lvalue)
8057 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8058 mark_addressable (*expr_p);
8059 ret = GS_OK;
8061 break;
8063 case TREE_LIST:
8064 gcc_unreachable ();
8066 case COMPOUND_EXPR:
8067 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
8068 break;
8070 case COMPOUND_LITERAL_EXPR:
8071 ret = gimplify_compound_literal_expr (expr_p, pre_p,
8072 gimple_test_f, fallback);
8073 break;
8075 case MODIFY_EXPR:
8076 case INIT_EXPR:
8077 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
8078 fallback != fb_none);
8079 break;
8081 case TRUTH_ANDIF_EXPR:
8082 case TRUTH_ORIF_EXPR:
8084 /* Preserve the original type of the expression and the
8085 source location of the outer expression. */
8086 tree org_type = TREE_TYPE (*expr_p);
8087 *expr_p = gimple_boolify (*expr_p);
8088 *expr_p = build3_loc (input_location, COND_EXPR,
8089 org_type, *expr_p,
8090 fold_convert_loc
8091 (input_location,
8092 org_type, boolean_true_node),
8093 fold_convert_loc
8094 (input_location,
8095 org_type, boolean_false_node));
8096 ret = GS_OK;
8097 break;
8100 case TRUTH_NOT_EXPR:
8102 tree type = TREE_TYPE (*expr_p);
8103 /* The parsers are careful to generate TRUTH_NOT_EXPR
8104 only with operands that are always zero or one.
8105 We do not fold here but handle the only interesting case
8106 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
8107 *expr_p = gimple_boolify (*expr_p);
8108 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
8109 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
8110 TREE_TYPE (*expr_p),
8111 TREE_OPERAND (*expr_p, 0));
8112 else
8113 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
8114 TREE_TYPE (*expr_p),
8115 TREE_OPERAND (*expr_p, 0),
8116 build_int_cst (TREE_TYPE (*expr_p), 1));
8117 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
8118 *expr_p = fold_convert_loc (input_location, type, *expr_p);
8119 ret = GS_OK;
8120 break;
8123 case ADDR_EXPR:
8124 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
8125 break;
8127 case ANNOTATE_EXPR:
8129 tree cond = TREE_OPERAND (*expr_p, 0);
8130 tree kind = TREE_OPERAND (*expr_p, 1);
8131 tree type = TREE_TYPE (cond);
8132 if (!INTEGRAL_TYPE_P (type))
8134 *expr_p = cond;
8135 ret = GS_OK;
8136 break;
8138 tree tmp = create_tmp_var (type);
8139 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
8140 gcall *call
8141 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
8142 gimple_call_set_lhs (call, tmp);
8143 gimplify_seq_add_stmt (pre_p, call);
8144 *expr_p = tmp;
8145 ret = GS_ALL_DONE;
8146 break;
8149 case VA_ARG_EXPR:
8150 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
8151 break;
8153 CASE_CONVERT:
8154 if (IS_EMPTY_STMT (*expr_p))
8156 ret = GS_ALL_DONE;
8157 break;
8160 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
8161 || fallback == fb_none)
8163 /* Just strip a conversion to void (or in void context) and
8164 try again. */
8165 *expr_p = TREE_OPERAND (*expr_p, 0);
8166 ret = GS_OK;
8167 break;
8170 ret = gimplify_conversion (expr_p);
8171 if (ret == GS_ERROR)
8172 break;
8173 if (*expr_p != save_expr)
8174 break;
8175 /* FALLTHRU */
8177 case FIX_TRUNC_EXPR:
8178 /* unary_expr: ... | '(' cast ')' val | ... */
8179 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8180 is_gimple_val, fb_rvalue);
8181 recalculate_side_effects (*expr_p);
8182 break;
8184 case INDIRECT_REF:
8186 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
8187 bool notrap = TREE_THIS_NOTRAP (*expr_p);
8188 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
8190 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
8191 if (*expr_p != save_expr)
8193 ret = GS_OK;
8194 break;
8197 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8198 is_gimple_reg, fb_rvalue);
8199 if (ret == GS_ERROR)
8200 break;
8202 recalculate_side_effects (*expr_p);
8203 *expr_p = fold_build2_loc (input_location, MEM_REF,
8204 TREE_TYPE (*expr_p),
8205 TREE_OPERAND (*expr_p, 0),
8206 build_int_cst (saved_ptr_type, 0));
8207 TREE_THIS_VOLATILE (*expr_p) = volatilep;
8208 TREE_THIS_NOTRAP (*expr_p) = notrap;
8209 ret = GS_OK;
8210 break;
8213 /* We arrive here through the various re-gimplifcation paths. */
8214 case MEM_REF:
8215 /* First try re-folding the whole thing. */
8216 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
8217 TREE_OPERAND (*expr_p, 0),
8218 TREE_OPERAND (*expr_p, 1));
8219 if (tmp)
8221 *expr_p = tmp;
8222 recalculate_side_effects (*expr_p);
8223 ret = GS_OK;
8224 break;
8226 /* Avoid re-gimplifying the address operand if it is already
8227 in suitable form. Re-gimplifying would mark the address
8228 operand addressable. Always gimplify when not in SSA form
8229 as we still may have to gimplify decls with value-exprs. */
8230 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
8231 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
8233 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8234 is_gimple_mem_ref_addr, fb_rvalue);
8235 if (ret == GS_ERROR)
8236 break;
8238 recalculate_side_effects (*expr_p);
8239 ret = GS_ALL_DONE;
8240 break;
8242 /* Constants need not be gimplified. */
8243 case INTEGER_CST:
8244 case REAL_CST:
8245 case FIXED_CST:
8246 case STRING_CST:
8247 case COMPLEX_CST:
8248 case VECTOR_CST:
8249 /* Drop the overflow flag on constants, we do not want
8250 that in the GIMPLE IL. */
8251 if (TREE_OVERFLOW_P (*expr_p))
8252 *expr_p = drop_tree_overflow (*expr_p);
8253 ret = GS_ALL_DONE;
8254 break;
8256 case CONST_DECL:
8257 /* If we require an lvalue, such as for ADDR_EXPR, retain the
8258 CONST_DECL node. Otherwise the decl is replaceable by its
8259 value. */
8260 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
8261 if (fallback & fb_lvalue)
8262 ret = GS_ALL_DONE;
8263 else
8265 *expr_p = DECL_INITIAL (*expr_p);
8266 ret = GS_OK;
8268 break;
8270 case DECL_EXPR:
8271 ret = gimplify_decl_expr (expr_p, pre_p);
8272 break;
8274 case BIND_EXPR:
8275 ret = gimplify_bind_expr (expr_p, pre_p);
8276 break;
8278 case LOOP_EXPR:
8279 ret = gimplify_loop_expr (expr_p, pre_p);
8280 break;
8282 case SWITCH_EXPR:
8283 ret = gimplify_switch_expr (expr_p, pre_p);
8284 break;
8286 case EXIT_EXPR:
8287 ret = gimplify_exit_expr (expr_p);
8288 break;
8290 case GOTO_EXPR:
8291 /* If the target is not LABEL, then it is a computed jump
8292 and the target needs to be gimplified. */
8293 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8295 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
8296 NULL, is_gimple_val, fb_rvalue);
8297 if (ret == GS_ERROR)
8298 break;
8300 gimplify_seq_add_stmt (pre_p,
8301 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
8302 ret = GS_ALL_DONE;
8303 break;
8305 case PREDICT_EXPR:
8306 gimplify_seq_add_stmt (pre_p,
8307 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
8308 PREDICT_EXPR_OUTCOME (*expr_p)));
8309 ret = GS_ALL_DONE;
8310 break;
8312 case LABEL_EXPR:
8313 ret = GS_ALL_DONE;
8314 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
8315 == current_function_decl);
8316 gimplify_seq_add_stmt (pre_p,
8317 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
8318 break;
8320 case CASE_LABEL_EXPR:
8321 ret = gimplify_case_label_expr (expr_p, pre_p);
8322 break;
8324 case RETURN_EXPR:
8325 ret = gimplify_return_expr (*expr_p, pre_p);
8326 break;
8328 case CONSTRUCTOR:
8329 /* Don't reduce this in place; let gimplify_init_constructor work its
8330 magic. Buf if we're just elaborating this for side effects, just
8331 gimplify any element that has side-effects. */
8332 if (fallback == fb_none)
8334 unsigned HOST_WIDE_INT ix;
8335 tree val;
8336 tree temp = NULL_TREE;
8337 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
8338 if (TREE_SIDE_EFFECTS (val))
8339 append_to_statement_list (val, &temp);
8341 *expr_p = temp;
8342 ret = temp ? GS_OK : GS_ALL_DONE;
8344 /* C99 code may assign to an array in a constructed
8345 structure or union, and this has undefined behavior only
8346 on execution, so create a temporary if an lvalue is
8347 required. */
8348 else if (fallback == fb_lvalue)
8350 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8351 mark_addressable (*expr_p);
8352 ret = GS_OK;
8354 else
8355 ret = GS_ALL_DONE;
8356 break;
8358 /* The following are special cases that are not handled by the
8359 original GIMPLE grammar. */
8361 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
8362 eliminated. */
8363 case SAVE_EXPR:
8364 ret = gimplify_save_expr (expr_p, pre_p, post_p);
8365 break;
8367 case BIT_FIELD_REF:
8368 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8369 post_p, is_gimple_lvalue, fb_either);
8370 recalculate_side_effects (*expr_p);
8371 break;
8373 case TARGET_MEM_REF:
8375 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
8377 if (TMR_BASE (*expr_p))
8378 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
8379 post_p, is_gimple_mem_ref_addr, fb_either);
8380 if (TMR_INDEX (*expr_p))
8381 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
8382 post_p, is_gimple_val, fb_rvalue);
8383 if (TMR_INDEX2 (*expr_p))
8384 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
8385 post_p, is_gimple_val, fb_rvalue);
8386 /* TMR_STEP and TMR_OFFSET are always integer constants. */
8387 ret = MIN (r0, r1);
8389 break;
8391 case NON_LVALUE_EXPR:
8392 /* This should have been stripped above. */
8393 gcc_unreachable ();
8395 case ASM_EXPR:
8396 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
8397 break;
8399 case TRY_FINALLY_EXPR:
8400 case TRY_CATCH_EXPR:
8402 gimple_seq eval, cleanup;
8403 gtry *try_;
8405 /* Calls to destructors are generated automatically in FINALLY/CATCH
8406 block. They should have location as UNKNOWN_LOCATION. However,
8407 gimplify_call_expr will reset these call stmts to input_location
8408 if it finds stmt's location is unknown. To prevent resetting for
8409 destructors, we set the input_location to unknown.
8410 Note that this only affects the destructor calls in FINALLY/CATCH
8411 block, and will automatically reset to its original value by the
8412 end of gimplify_expr. */
8413 input_location = UNKNOWN_LOCATION;
8414 eval = cleanup = NULL;
8415 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
8416 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
8417 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
8418 if (gimple_seq_empty_p (cleanup))
8420 gimple_seq_add_seq (pre_p, eval);
8421 ret = GS_ALL_DONE;
8422 break;
8424 try_ = gimple_build_try (eval, cleanup,
8425 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8426 ? GIMPLE_TRY_FINALLY
8427 : GIMPLE_TRY_CATCH);
8428 if (EXPR_HAS_LOCATION (save_expr))
8429 gimple_set_location (try_, EXPR_LOCATION (save_expr));
8430 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8431 gimple_set_location (try_, saved_location);
8432 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8433 gimple_try_set_catch_is_cleanup (try_,
8434 TRY_CATCH_IS_CLEANUP (*expr_p));
8435 gimplify_seq_add_stmt (pre_p, try_);
8436 ret = GS_ALL_DONE;
8437 break;
8440 case CLEANUP_POINT_EXPR:
8441 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8442 break;
8444 case TARGET_EXPR:
8445 ret = gimplify_target_expr (expr_p, pre_p, post_p);
8446 break;
8448 case CATCH_EXPR:
8450 gimple c;
8451 gimple_seq handler = NULL;
8452 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8453 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8454 gimplify_seq_add_stmt (pre_p, c);
8455 ret = GS_ALL_DONE;
8456 break;
8459 case EH_FILTER_EXPR:
8461 gimple ehf;
8462 gimple_seq failure = NULL;
8464 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8465 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8466 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8467 gimplify_seq_add_stmt (pre_p, ehf);
8468 ret = GS_ALL_DONE;
8469 break;
8472 case OBJ_TYPE_REF:
8474 enum gimplify_status r0, r1;
8475 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8476 post_p, is_gimple_val, fb_rvalue);
8477 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8478 post_p, is_gimple_val, fb_rvalue);
8479 TREE_SIDE_EFFECTS (*expr_p) = 0;
8480 ret = MIN (r0, r1);
8482 break;
8484 case LABEL_DECL:
8485 /* We get here when taking the address of a label. We mark
8486 the label as "forced"; meaning it can never be removed and
8487 it is a potential target for any computed goto. */
8488 FORCED_LABEL (*expr_p) = 1;
8489 ret = GS_ALL_DONE;
8490 break;
8492 case STATEMENT_LIST:
8493 ret = gimplify_statement_list (expr_p, pre_p);
8494 break;
8496 case WITH_SIZE_EXPR:
8498 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8499 post_p == &internal_post ? NULL : post_p,
8500 gimple_test_f, fallback);
8501 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8502 is_gimple_val, fb_rvalue);
8503 ret = GS_ALL_DONE;
8505 break;
8507 case VAR_DECL:
8508 case PARM_DECL:
8509 ret = gimplify_var_or_parm_decl (expr_p);
8510 break;
8512 case RESULT_DECL:
8513 /* When within an OMP context, notice uses of variables. */
8514 if (gimplify_omp_ctxp)
8515 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8516 ret = GS_ALL_DONE;
8517 break;
8519 case SSA_NAME:
8520 /* Allow callbacks into the gimplifier during optimization. */
8521 ret = GS_ALL_DONE;
8522 break;
8524 case OMP_PARALLEL:
8525 gimplify_omp_parallel (expr_p, pre_p);
8526 ret = GS_ALL_DONE;
8527 break;
8529 case OMP_TASK:
8530 gimplify_omp_task (expr_p, pre_p);
8531 ret = GS_ALL_DONE;
8532 break;
8534 case OMP_FOR:
8535 case OMP_SIMD:
8536 case CILK_SIMD:
8537 case CILK_FOR:
8538 case OMP_DISTRIBUTE:
8539 case OACC_LOOP:
8540 ret = gimplify_omp_for (expr_p, pre_p);
8541 break;
8543 case OACC_CACHE:
8544 gimplify_oacc_cache (expr_p, pre_p);
8545 ret = GS_ALL_DONE;
8546 break;
8548 case OACC_HOST_DATA:
8549 case OACC_DECLARE:
8550 sorry ("directive not yet implemented");
8551 ret = GS_ALL_DONE;
8552 break;
8554 case OACC_KERNELS:
8555 if (OACC_KERNELS_COMBINED (*expr_p))
8556 sorry ("directive not yet implemented");
8557 else
8558 gimplify_omp_workshare (expr_p, pre_p);
8559 ret = GS_ALL_DONE;
8560 break;
8562 case OACC_PARALLEL:
8563 if (OACC_PARALLEL_COMBINED (*expr_p))
8564 sorry ("directive not yet implemented");
8565 else
8566 gimplify_omp_workshare (expr_p, pre_p);
8567 ret = GS_ALL_DONE;
8568 break;
8570 case OACC_DATA:
8571 case OMP_SECTIONS:
8572 case OMP_SINGLE:
8573 case OMP_TARGET:
8574 case OMP_TARGET_DATA:
8575 case OMP_TEAMS:
8576 gimplify_omp_workshare (expr_p, pre_p);
8577 ret = GS_ALL_DONE;
8578 break;
8580 case OACC_ENTER_DATA:
8581 case OACC_EXIT_DATA:
8582 case OACC_UPDATE:
8583 case OMP_TARGET_UPDATE:
8584 gimplify_omp_target_update (expr_p, pre_p);
8585 ret = GS_ALL_DONE;
8586 break;
8588 case OMP_SECTION:
8589 case OMP_MASTER:
8590 case OMP_TASKGROUP:
8591 case OMP_ORDERED:
8592 case OMP_CRITICAL:
8594 gimple_seq body = NULL;
8595 gimple g;
8597 gimplify_and_add (OMP_BODY (*expr_p), &body);
8598 switch (TREE_CODE (*expr_p))
8600 case OMP_SECTION:
8601 g = gimple_build_omp_section (body);
8602 break;
8603 case OMP_MASTER:
8604 g = gimple_build_omp_master (body);
8605 break;
8606 case OMP_TASKGROUP:
8608 gimple_seq cleanup = NULL;
8609 tree fn
8610 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8611 g = gimple_build_call (fn, 0);
8612 gimple_seq_add_stmt (&cleanup, g);
8613 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8614 body = NULL;
8615 gimple_seq_add_stmt (&body, g);
8616 g = gimple_build_omp_taskgroup (body);
8618 break;
8619 case OMP_ORDERED:
8620 g = gimple_build_omp_ordered (body);
8621 break;
8622 case OMP_CRITICAL:
8623 g = gimple_build_omp_critical (body,
8624 OMP_CRITICAL_NAME (*expr_p));
8625 break;
8626 default:
8627 gcc_unreachable ();
8629 gimplify_seq_add_stmt (pre_p, g);
8630 ret = GS_ALL_DONE;
8631 break;
8634 case OMP_ATOMIC:
8635 case OMP_ATOMIC_READ:
8636 case OMP_ATOMIC_CAPTURE_OLD:
8637 case OMP_ATOMIC_CAPTURE_NEW:
8638 ret = gimplify_omp_atomic (expr_p, pre_p);
8639 break;
8641 case TRANSACTION_EXPR:
8642 ret = gimplify_transaction (expr_p, pre_p);
8643 break;
8645 case TRUTH_AND_EXPR:
8646 case TRUTH_OR_EXPR:
8647 case TRUTH_XOR_EXPR:
8649 tree orig_type = TREE_TYPE (*expr_p);
8650 tree new_type, xop0, xop1;
8651 *expr_p = gimple_boolify (*expr_p);
8652 new_type = TREE_TYPE (*expr_p);
8653 if (!useless_type_conversion_p (orig_type, new_type))
8655 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8656 ret = GS_OK;
8657 break;
8660 /* Boolified binary truth expressions are semantically equivalent
8661 to bitwise binary expressions. Canonicalize them to the
8662 bitwise variant. */
8663 switch (TREE_CODE (*expr_p))
8665 case TRUTH_AND_EXPR:
8666 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8667 break;
8668 case TRUTH_OR_EXPR:
8669 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8670 break;
8671 case TRUTH_XOR_EXPR:
8672 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8673 break;
8674 default:
8675 break;
8677 /* Now make sure that operands have compatible type to
8678 expression's new_type. */
8679 xop0 = TREE_OPERAND (*expr_p, 0);
8680 xop1 = TREE_OPERAND (*expr_p, 1);
8681 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8682 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8683 new_type,
8684 xop0);
8685 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8686 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8687 new_type,
8688 xop1);
8689 /* Continue classified as tcc_binary. */
8690 goto expr_2;
8693 case FMA_EXPR:
8694 case VEC_COND_EXPR:
8695 case VEC_PERM_EXPR:
8696 /* Classified as tcc_expression. */
8697 goto expr_3;
8699 case POINTER_PLUS_EXPR:
8701 enum gimplify_status r0, r1;
8702 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8703 post_p, is_gimple_val, fb_rvalue);
8704 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8705 post_p, is_gimple_val, fb_rvalue);
8706 recalculate_side_effects (*expr_p);
8707 ret = MIN (r0, r1);
8708 break;
8711 case CILK_SYNC_STMT:
8713 if (!fn_contains_cilk_spawn_p (cfun))
8715 error_at (EXPR_LOCATION (*expr_p),
8716 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8717 ret = GS_ERROR;
8719 else
8721 gimplify_cilk_sync (expr_p, pre_p);
8722 ret = GS_ALL_DONE;
8724 break;
8727 default:
8728 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8730 case tcc_comparison:
8731 /* Handle comparison of objects of non scalar mode aggregates
8732 with a call to memcmp. It would be nice to only have to do
8733 this for variable-sized objects, but then we'd have to allow
8734 the same nest of reference nodes we allow for MODIFY_EXPR and
8735 that's too complex.
8737 Compare scalar mode aggregates as scalar mode values. Using
8738 memcmp for them would be very inefficient at best, and is
8739 plain wrong if bitfields are involved. */
8741 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8743 /* Vector comparisons need no boolification. */
8744 if (TREE_CODE (type) == VECTOR_TYPE)
8745 goto expr_2;
8746 else if (!AGGREGATE_TYPE_P (type))
8748 tree org_type = TREE_TYPE (*expr_p);
8749 *expr_p = gimple_boolify (*expr_p);
8750 if (!useless_type_conversion_p (org_type,
8751 TREE_TYPE (*expr_p)))
8753 *expr_p = fold_convert_loc (input_location,
8754 org_type, *expr_p);
8755 ret = GS_OK;
8757 else
8758 goto expr_2;
8760 else if (TYPE_MODE (type) != BLKmode)
8761 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8762 else
8763 ret = gimplify_variable_sized_compare (expr_p);
8765 break;
8768 /* If *EXPR_P does not need to be special-cased, handle it
8769 according to its class. */
8770 case tcc_unary:
8771 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8772 post_p, is_gimple_val, fb_rvalue);
8773 break;
8775 case tcc_binary:
8776 expr_2:
8778 enum gimplify_status r0, r1;
8780 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8781 post_p, is_gimple_val, fb_rvalue);
8782 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8783 post_p, is_gimple_val, fb_rvalue);
8785 ret = MIN (r0, r1);
8786 break;
8789 expr_3:
8791 enum gimplify_status r0, r1, r2;
8793 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8794 post_p, is_gimple_val, fb_rvalue);
8795 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8796 post_p, is_gimple_val, fb_rvalue);
8797 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8798 post_p, is_gimple_val, fb_rvalue);
8800 ret = MIN (MIN (r0, r1), r2);
8801 break;
8804 case tcc_declaration:
8805 case tcc_constant:
8806 ret = GS_ALL_DONE;
8807 goto dont_recalculate;
8809 default:
8810 gcc_unreachable ();
8813 recalculate_side_effects (*expr_p);
8815 dont_recalculate:
8816 break;
8819 gcc_assert (*expr_p || ret != GS_OK);
8821 while (ret == GS_OK);
8823 /* If we encountered an error_mark somewhere nested inside, either
8824 stub out the statement or propagate the error back out. */
8825 if (ret == GS_ERROR)
8827 if (is_statement)
8828 *expr_p = NULL;
8829 goto out;
8832 /* This was only valid as a return value from the langhook, which
8833 we handled. Make sure it doesn't escape from any other context. */
8834 gcc_assert (ret != GS_UNHANDLED);
8836 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8838 /* We aren't looking for a value, and we don't have a valid
8839 statement. If it doesn't have side-effects, throw it away. */
8840 if (!TREE_SIDE_EFFECTS (*expr_p))
8841 *expr_p = NULL;
8842 else if (!TREE_THIS_VOLATILE (*expr_p))
8844 /* This is probably a _REF that contains something nested that
8845 has side effects. Recurse through the operands to find it. */
8846 enum tree_code code = TREE_CODE (*expr_p);
8848 switch (code)
8850 case COMPONENT_REF:
8851 case REALPART_EXPR:
8852 case IMAGPART_EXPR:
8853 case VIEW_CONVERT_EXPR:
8854 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8855 gimple_test_f, fallback);
8856 break;
8858 case ARRAY_REF:
8859 case ARRAY_RANGE_REF:
8860 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8861 gimple_test_f, fallback);
8862 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8863 gimple_test_f, fallback);
8864 break;
8866 default:
8867 /* Anything else with side-effects must be converted to
8868 a valid statement before we get here. */
8869 gcc_unreachable ();
8872 *expr_p = NULL;
8874 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8875 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8877 /* Historically, the compiler has treated a bare reference
8878 to a non-BLKmode volatile lvalue as forcing a load. */
8879 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8881 /* Normally, we do not want to create a temporary for a
8882 TREE_ADDRESSABLE type because such a type should not be
8883 copied by bitwise-assignment. However, we make an
8884 exception here, as all we are doing here is ensuring that
8885 we read the bytes that make up the type. We use
8886 create_tmp_var_raw because create_tmp_var will abort when
8887 given a TREE_ADDRESSABLE type. */
8888 tree tmp = create_tmp_var_raw (type, "vol");
8889 gimple_add_tmp_var (tmp);
8890 gimplify_assign (tmp, *expr_p, pre_p);
8891 *expr_p = NULL;
8893 else
8894 /* We can't do anything useful with a volatile reference to
8895 an incomplete type, so just throw it away. Likewise for
8896 a BLKmode type, since any implicit inner load should
8897 already have been turned into an explicit one by the
8898 gimplification process. */
8899 *expr_p = NULL;
8902 /* If we are gimplifying at the statement level, we're done. Tack
8903 everything together and return. */
8904 if (fallback == fb_none || is_statement)
8906 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8907 it out for GC to reclaim it. */
8908 *expr_p = NULL_TREE;
8910 if (!gimple_seq_empty_p (internal_pre)
8911 || !gimple_seq_empty_p (internal_post))
8913 gimplify_seq_add_seq (&internal_pre, internal_post);
8914 gimplify_seq_add_seq (pre_p, internal_pre);
8917 /* The result of gimplifying *EXPR_P is going to be the last few
8918 statements in *PRE_P and *POST_P. Add location information
8919 to all the statements that were added by the gimplification
8920 helpers. */
8921 if (!gimple_seq_empty_p (*pre_p))
8922 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8924 if (!gimple_seq_empty_p (*post_p))
8925 annotate_all_with_location_after (*post_p, post_last_gsi,
8926 input_location);
8928 goto out;
8931 #ifdef ENABLE_GIMPLE_CHECKING
8932 if (*expr_p)
8934 enum tree_code code = TREE_CODE (*expr_p);
8935 /* These expressions should already be in gimple IR form. */
8936 gcc_assert (code != MODIFY_EXPR
8937 && code != ASM_EXPR
8938 && code != BIND_EXPR
8939 && code != CATCH_EXPR
8940 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8941 && code != EH_FILTER_EXPR
8942 && code != GOTO_EXPR
8943 && code != LABEL_EXPR
8944 && code != LOOP_EXPR
8945 && code != SWITCH_EXPR
8946 && code != TRY_FINALLY_EXPR
8947 && code != OACC_PARALLEL
8948 && code != OACC_KERNELS
8949 && code != OACC_DATA
8950 && code != OACC_HOST_DATA
8951 && code != OACC_DECLARE
8952 && code != OACC_UPDATE
8953 && code != OACC_ENTER_DATA
8954 && code != OACC_EXIT_DATA
8955 && code != OACC_CACHE
8956 && code != OMP_CRITICAL
8957 && code != OMP_FOR
8958 && code != OACC_LOOP
8959 && code != OMP_MASTER
8960 && code != OMP_TASKGROUP
8961 && code != OMP_ORDERED
8962 && code != OMP_PARALLEL
8963 && code != OMP_SECTIONS
8964 && code != OMP_SECTION
8965 && code != OMP_SINGLE);
8967 #endif
8969 /* Otherwise we're gimplifying a subexpression, so the resulting
8970 value is interesting. If it's a valid operand that matches
8971 GIMPLE_TEST_F, we're done. Unless we are handling some
8972 post-effects internally; if that's the case, we need to copy into
8973 a temporary before adding the post-effects to POST_P. */
8974 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8975 goto out;
8977 /* Otherwise, we need to create a new temporary for the gimplified
8978 expression. */
8980 /* We can't return an lvalue if we have an internal postqueue. The
8981 object the lvalue refers to would (probably) be modified by the
8982 postqueue; we need to copy the value out first, which means an
8983 rvalue. */
8984 if ((fallback & fb_lvalue)
8985 && gimple_seq_empty_p (internal_post)
8986 && is_gimple_addressable (*expr_p))
8988 /* An lvalue will do. Take the address of the expression, store it
8989 in a temporary, and replace the expression with an INDIRECT_REF of
8990 that temporary. */
8991 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8992 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8993 *expr_p = build_simple_mem_ref (tmp);
8995 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8997 /* An rvalue will do. Assign the gimplified expression into a
8998 new temporary TMP and replace the original expression with
8999 TMP. First, make sure that the expression has a type so that
9000 it can be assigned into a temporary. */
9001 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
9002 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
9004 else
9006 #ifdef ENABLE_GIMPLE_CHECKING
9007 if (!(fallback & fb_mayfail))
9009 fprintf (stderr, "gimplification failed:\n");
9010 print_generic_expr (stderr, *expr_p, 0);
9011 debug_tree (*expr_p);
9012 internal_error ("gimplification failed");
9014 #endif
9015 gcc_assert (fallback & fb_mayfail);
9017 /* If this is an asm statement, and the user asked for the
9018 impossible, don't die. Fail and let gimplify_asm_expr
9019 issue an error. */
9020 ret = GS_ERROR;
9021 goto out;
9024 /* Make sure the temporary matches our predicate. */
9025 gcc_assert ((*gimple_test_f) (*expr_p));
9027 if (!gimple_seq_empty_p (internal_post))
9029 annotate_all_with_location (internal_post, input_location);
9030 gimplify_seq_add_seq (pre_p, internal_post);
9033 out:
9034 input_location = saved_location;
9035 return ret;
9038 /* Look through TYPE for variable-sized objects and gimplify each such
9039 size that we find. Add to LIST_P any statements generated. */
9041 void
9042 gimplify_type_sizes (tree type, gimple_seq *list_p)
9044 tree field, t;
9046 if (type == NULL || type == error_mark_node)
9047 return;
9049 /* We first do the main variant, then copy into any other variants. */
9050 type = TYPE_MAIN_VARIANT (type);
9052 /* Avoid infinite recursion. */
9053 if (TYPE_SIZES_GIMPLIFIED (type))
9054 return;
9056 TYPE_SIZES_GIMPLIFIED (type) = 1;
9058 switch (TREE_CODE (type))
9060 case INTEGER_TYPE:
9061 case ENUMERAL_TYPE:
9062 case BOOLEAN_TYPE:
9063 case REAL_TYPE:
9064 case FIXED_POINT_TYPE:
9065 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
9066 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
9068 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
9070 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
9071 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
9073 break;
9075 case ARRAY_TYPE:
9076 /* These types may not have declarations, so handle them here. */
9077 gimplify_type_sizes (TREE_TYPE (type), list_p);
9078 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
9079 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
9080 with assigned stack slots, for -O1+ -g they should be tracked
9081 by VTA. */
9082 if (!(TYPE_NAME (type)
9083 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
9084 && DECL_IGNORED_P (TYPE_NAME (type)))
9085 && TYPE_DOMAIN (type)
9086 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
9088 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
9089 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
9090 DECL_IGNORED_P (t) = 0;
9091 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9092 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
9093 DECL_IGNORED_P (t) = 0;
9095 break;
9097 case RECORD_TYPE:
9098 case UNION_TYPE:
9099 case QUAL_UNION_TYPE:
9100 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9101 if (TREE_CODE (field) == FIELD_DECL)
9103 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9104 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
9105 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
9106 gimplify_type_sizes (TREE_TYPE (field), list_p);
9108 break;
9110 case POINTER_TYPE:
9111 case REFERENCE_TYPE:
9112 /* We used to recurse on the pointed-to type here, which turned out to
9113 be incorrect because its definition might refer to variables not
9114 yet initialized at this point if a forward declaration is involved.
9116 It was actually useful for anonymous pointed-to types to ensure
9117 that the sizes evaluation dominates every possible later use of the
9118 values. Restricting to such types here would be safe since there
9119 is no possible forward declaration around, but would introduce an
9120 undesirable middle-end semantic to anonymity. We then defer to
9121 front-ends the responsibility of ensuring that the sizes are
9122 evaluated both early and late enough, e.g. by attaching artificial
9123 type declarations to the tree. */
9124 break;
9126 default:
9127 break;
9130 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
9131 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
9133 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
9135 TYPE_SIZE (t) = TYPE_SIZE (type);
9136 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
9137 TYPE_SIZES_GIMPLIFIED (t) = 1;
9141 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
9142 a size or position, has had all of its SAVE_EXPRs evaluated.
9143 We add any required statements to *STMT_P. */
9145 void
9146 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
9148 tree expr = *expr_p;
9150 /* We don't do anything if the value isn't there, is constant, or contains
9151 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
9152 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
9153 will want to replace it with a new variable, but that will cause problems
9154 if this type is from outside the function. It's OK to have that here. */
9155 if (is_gimple_sizepos (expr))
9156 return;
9158 *expr_p = unshare_expr (expr);
9160 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
9163 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
9164 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
9165 is true, also gimplify the parameters. */
9167 gbind *
9168 gimplify_body (tree fndecl, bool do_parms)
9170 location_t saved_location = input_location;
9171 gimple_seq parm_stmts, seq;
9172 gimple outer_stmt;
9173 gbind *outer_bind;
9174 struct cgraph_node *cgn;
9176 timevar_push (TV_TREE_GIMPLIFY);
9178 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
9179 gimplification. */
9180 default_rtl_profile ();
9182 gcc_assert (gimplify_ctxp == NULL);
9183 push_gimplify_context ();
9185 if (flag_openacc || flag_openmp)
9187 gcc_assert (gimplify_omp_ctxp == NULL);
9188 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
9189 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
9192 /* Unshare most shared trees in the body and in that of any nested functions.
9193 It would seem we don't have to do this for nested functions because
9194 they are supposed to be output and then the outer function gimplified
9195 first, but the g++ front end doesn't always do it that way. */
9196 unshare_body (fndecl);
9197 unvisit_body (fndecl);
9199 cgn = cgraph_node::get (fndecl);
9200 if (cgn && cgn->origin)
9201 nonlocal_vlas = new hash_set<tree>;
9203 /* Make sure input_location isn't set to something weird. */
9204 input_location = DECL_SOURCE_LOCATION (fndecl);
9206 /* Resolve callee-copies. This has to be done before processing
9207 the body so that DECL_VALUE_EXPR gets processed correctly. */
9208 parm_stmts = do_parms ? gimplify_parameters () : NULL;
9210 /* Gimplify the function's body. */
9211 seq = NULL;
9212 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
9213 outer_stmt = gimple_seq_first_stmt (seq);
9214 if (!outer_stmt)
9216 outer_stmt = gimple_build_nop ();
9217 gimplify_seq_add_stmt (&seq, outer_stmt);
9220 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
9221 not the case, wrap everything in a GIMPLE_BIND to make it so. */
9222 if (gimple_code (outer_stmt) == GIMPLE_BIND
9223 && gimple_seq_first (seq) == gimple_seq_last (seq))
9224 outer_bind = as_a <gbind *> (outer_stmt);
9225 else
9226 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
9228 DECL_SAVED_TREE (fndecl) = NULL_TREE;
9230 /* If we had callee-copies statements, insert them at the beginning
9231 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
9232 if (!gimple_seq_empty_p (parm_stmts))
9234 tree parm;
9236 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
9237 gimple_bind_set_body (outer_bind, parm_stmts);
9239 for (parm = DECL_ARGUMENTS (current_function_decl);
9240 parm; parm = DECL_CHAIN (parm))
9241 if (DECL_HAS_VALUE_EXPR_P (parm))
9243 DECL_HAS_VALUE_EXPR_P (parm) = 0;
9244 DECL_IGNORED_P (parm) = 0;
9248 if (nonlocal_vlas)
9250 if (nonlocal_vla_vars)
9252 /* tree-nested.c may later on call declare_vars (..., true);
9253 which relies on BLOCK_VARS chain to be the tail of the
9254 gimple_bind_vars chain. Ensure we don't violate that
9255 assumption. */
9256 if (gimple_bind_block (outer_bind)
9257 == DECL_INITIAL (current_function_decl))
9258 declare_vars (nonlocal_vla_vars, outer_bind, true);
9259 else
9260 BLOCK_VARS (DECL_INITIAL (current_function_decl))
9261 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
9262 nonlocal_vla_vars);
9263 nonlocal_vla_vars = NULL_TREE;
9265 delete nonlocal_vlas;
9266 nonlocal_vlas = NULL;
9269 if ((flag_openacc || flag_openmp || flag_openmp_simd)
9270 && gimplify_omp_ctxp)
9272 delete_omp_context (gimplify_omp_ctxp);
9273 gimplify_omp_ctxp = NULL;
9276 pop_gimplify_context (outer_bind);
9277 gcc_assert (gimplify_ctxp == NULL);
9279 #ifdef ENABLE_CHECKING
9280 if (!seen_error ())
9281 verify_gimple_in_seq (gimple_bind_body (outer_bind));
9282 #endif
9284 timevar_pop (TV_TREE_GIMPLIFY);
9285 input_location = saved_location;
9287 return outer_bind;
9290 typedef char *char_p; /* For DEF_VEC_P. */
9292 /* Return whether we should exclude FNDECL from instrumentation. */
9294 static bool
9295 flag_instrument_functions_exclude_p (tree fndecl)
9297 vec<char_p> *v;
9299 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
9300 if (v && v->length () > 0)
9302 const char *name;
9303 int i;
9304 char *s;
9306 name = lang_hooks.decl_printable_name (fndecl, 0);
9307 FOR_EACH_VEC_ELT (*v, i, s)
9308 if (strstr (name, s) != NULL)
9309 return true;
9312 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
9313 if (v && v->length () > 0)
9315 const char *name;
9316 int i;
9317 char *s;
9319 name = DECL_SOURCE_FILE (fndecl);
9320 FOR_EACH_VEC_ELT (*v, i, s)
9321 if (strstr (name, s) != NULL)
9322 return true;
9325 return false;
9328 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
9329 node for the function we want to gimplify.
9331 Return the sequence of GIMPLE statements corresponding to the body
9332 of FNDECL. */
9334 void
9335 gimplify_function_tree (tree fndecl)
9337 tree parm, ret;
9338 gimple_seq seq;
9339 gbind *bind;
9341 gcc_assert (!gimple_body (fndecl));
9343 if (DECL_STRUCT_FUNCTION (fndecl))
9344 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
9345 else
9346 push_struct_function (fndecl);
9348 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
9349 if necessary. */
9350 cfun->curr_properties |= PROP_gimple_lva;
9352 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
9354 /* Preliminarily mark non-addressed complex variables as eligible
9355 for promotion to gimple registers. We'll transform their uses
9356 as we find them. */
9357 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
9358 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
9359 && !TREE_THIS_VOLATILE (parm)
9360 && !needs_to_live_in_memory (parm))
9361 DECL_GIMPLE_REG_P (parm) = 1;
9364 ret = DECL_RESULT (fndecl);
9365 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
9366 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
9367 && !needs_to_live_in_memory (ret))
9368 DECL_GIMPLE_REG_P (ret) = 1;
9370 bind = gimplify_body (fndecl, true);
9372 /* The tree body of the function is no longer needed, replace it
9373 with the new GIMPLE body. */
9374 seq = NULL;
9375 gimple_seq_add_stmt (&seq, bind);
9376 gimple_set_body (fndecl, seq);
9378 /* If we're instrumenting function entry/exit, then prepend the call to
9379 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
9380 catch the exit hook. */
9381 /* ??? Add some way to ignore exceptions for this TFE. */
9382 if (flag_instrument_function_entry_exit
9383 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
9384 && !flag_instrument_functions_exclude_p (fndecl))
9386 tree x;
9387 gbind *new_bind;
9388 gimple tf;
9389 gimple_seq cleanup = NULL, body = NULL;
9390 tree tmp_var;
9391 gcall *call;
9393 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
9394 call = gimple_build_call (x, 1, integer_zero_node);
9395 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
9396 gimple_call_set_lhs (call, tmp_var);
9397 gimplify_seq_add_stmt (&cleanup, call);
9398 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
9399 call = gimple_build_call (x, 2,
9400 build_fold_addr_expr (current_function_decl),
9401 tmp_var);
9402 gimplify_seq_add_stmt (&cleanup, call);
9403 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
9405 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
9406 call = gimple_build_call (x, 1, integer_zero_node);
9407 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
9408 gimple_call_set_lhs (call, tmp_var);
9409 gimplify_seq_add_stmt (&body, call);
9410 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
9411 call = gimple_build_call (x, 2,
9412 build_fold_addr_expr (current_function_decl),
9413 tmp_var);
9414 gimplify_seq_add_stmt (&body, call);
9415 gimplify_seq_add_stmt (&body, tf);
9416 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
9417 /* Clear the block for BIND, since it is no longer directly inside
9418 the function, but within a try block. */
9419 gimple_bind_set_block (bind, NULL);
9421 /* Replace the current function body with the body
9422 wrapped in the try/finally TF. */
9423 seq = NULL;
9424 gimple_seq_add_stmt (&seq, new_bind);
9425 gimple_set_body (fndecl, seq);
9426 bind = new_bind;
9429 if ((flag_sanitize & SANITIZE_THREAD) != 0
9430 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
9432 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
9433 gimple tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
9434 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
9435 /* Clear the block for BIND, since it is no longer directly inside
9436 the function, but within a try block. */
9437 gimple_bind_set_block (bind, NULL);
9438 /* Replace the current function body with the body
9439 wrapped in the try/finally TF. */
9440 seq = NULL;
9441 gimple_seq_add_stmt (&seq, new_bind);
9442 gimple_set_body (fndecl, seq);
9445 DECL_SAVED_TREE (fndecl) = NULL_TREE;
9446 cfun->curr_properties |= PROP_gimple_any;
9448 pop_cfun ();
9451 /* Return a dummy expression of type TYPE in order to keep going after an
9452 error. */
9454 static tree
9455 dummy_object (tree type)
9457 tree t = build_int_cst (build_pointer_type (type), 0);
9458 return build2 (MEM_REF, type, t, t);
9461 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9462 builtin function, but a very special sort of operator. */
9464 enum gimplify_status
9465 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
9466 gimple_seq *post_p ATTRIBUTE_UNUSED)
9468 tree promoted_type, have_va_type;
9469 tree valist = TREE_OPERAND (*expr_p, 0);
9470 tree type = TREE_TYPE (*expr_p);
9471 tree t, tag;
9472 location_t loc = EXPR_LOCATION (*expr_p);
9474 /* Verify that valist is of the proper type. */
9475 have_va_type = TREE_TYPE (valist);
9476 if (have_va_type == error_mark_node)
9477 return GS_ERROR;
9478 have_va_type = targetm.canonical_va_list_type (have_va_type);
9480 if (have_va_type == NULL_TREE)
9482 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9483 return GS_ERROR;
9486 /* Generate a diagnostic for requesting data of a type that cannot
9487 be passed through `...' due to type promotion at the call site. */
9488 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9489 != type)
9491 static bool gave_help;
9492 bool warned;
9494 /* Unfortunately, this is merely undefined, rather than a constraint
9495 violation, so we cannot make this an error. If this call is never
9496 executed, the program is still strictly conforming. */
9497 warned = warning_at (loc, 0,
9498 "%qT is promoted to %qT when passed through %<...%>",
9499 type, promoted_type);
9500 if (!gave_help && warned)
9502 gave_help = true;
9503 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9504 promoted_type, type);
9507 /* We can, however, treat "undefined" any way we please.
9508 Call abort to encourage the user to fix the program. */
9509 if (warned)
9510 inform (loc, "if this code is reached, the program will abort");
9511 /* Before the abort, allow the evaluation of the va_list
9512 expression to exit or longjmp. */
9513 gimplify_and_add (valist, pre_p);
9514 t = build_call_expr_loc (loc,
9515 builtin_decl_implicit (BUILT_IN_TRAP), 0);
9516 gimplify_and_add (t, pre_p);
9518 /* This is dead code, but go ahead and finish so that the
9519 mode of the result comes out right. */
9520 *expr_p = dummy_object (type);
9521 return GS_ALL_DONE;
9524 tag = build_int_cst (build_pointer_type (type), 0);
9525 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 2, valist, tag);
9527 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
9528 needs to be expanded. */
9529 cfun->curr_properties &= ~PROP_gimple_lva;
9531 return GS_OK;
9534 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9536 DST/SRC are the destination and source respectively. You can pass
9537 ungimplified trees in DST or SRC, in which case they will be
9538 converted to a gimple operand if necessary.
9540 This function returns the newly created GIMPLE_ASSIGN tuple. */
9542 gimple
9543 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9545 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9546 gimplify_and_add (t, seq_p);
9547 ggc_free (t);
9548 return gimple_seq_last_stmt (*seq_p);
9551 inline hashval_t
9552 gimplify_hasher::hash (const elt_t *p)
9554 tree t = p->val;
9555 return iterative_hash_expr (t, 0);
9558 inline bool
9559 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
9561 tree t1 = p1->val;
9562 tree t2 = p2->val;
9563 enum tree_code code = TREE_CODE (t1);
9565 if (TREE_CODE (t2) != code
9566 || TREE_TYPE (t1) != TREE_TYPE (t2))
9567 return false;
9569 if (!operand_equal_p (t1, t2, 0))
9570 return false;
9572 #ifdef ENABLE_CHECKING
9573 /* Only allow them to compare equal if they also hash equal; otherwise
9574 results are nondeterminate, and we fail bootstrap comparison. */
9575 gcc_assert (hash (p1) == hash (p2));
9576 #endif
9578 return true;