Don't treat zero-sized ranges as overlapping
[official-gcc.git] / gcc / gimplify.c
blobc4fd5038d922441b27cca50202abe758219fe5e3
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "cilk.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
70 /* Hash set of poisoned variables in a bind expr. */
71 static hash_set<tree> *asan_poisoned_variables = NULL;
73 enum gimplify_omp_var_data
75 GOVD_SEEN = 1,
76 GOVD_EXPLICIT = 2,
77 GOVD_SHARED = 4,
78 GOVD_PRIVATE = 8,
79 GOVD_FIRSTPRIVATE = 16,
80 GOVD_LASTPRIVATE = 32,
81 GOVD_REDUCTION = 64,
82 GOVD_LOCAL = 128,
83 GOVD_MAP = 256,
84 GOVD_DEBUG_PRIVATE = 512,
85 GOVD_PRIVATE_OUTER_REF = 1024,
86 GOVD_LINEAR = 2048,
87 GOVD_ALIGNED = 4096,
89 /* Flag for GOVD_MAP: don't copy back. */
90 GOVD_MAP_TO_ONLY = 8192,
92 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
93 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
95 GOVD_MAP_0LEN_ARRAY = 32768,
97 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
98 GOVD_MAP_ALWAYS_TO = 65536,
100 /* Flag for shared vars that are or might be stored to in the region. */
101 GOVD_WRITTEN = 131072,
103 /* Flag for GOVD_MAP, if it is a forced mapping. */
104 GOVD_MAP_FORCE = 262144,
106 /* Flag for GOVD_MAP: must be present already. */
107 GOVD_MAP_FORCE_PRESENT = 524288,
109 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
110 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
111 | GOVD_LOCAL)
115 enum omp_region_type
117 ORT_WORKSHARE = 0x00,
118 ORT_SIMD = 0x01,
120 ORT_PARALLEL = 0x02,
121 ORT_COMBINED_PARALLEL = 0x03,
123 ORT_TASK = 0x04,
124 ORT_UNTIED_TASK = 0x05,
126 ORT_TEAMS = 0x08,
127 ORT_COMBINED_TEAMS = 0x09,
129 /* Data region. */
130 ORT_TARGET_DATA = 0x10,
132 /* Data region with offloading. */
133 ORT_TARGET = 0x20,
134 ORT_COMBINED_TARGET = 0x21,
136 /* OpenACC variants. */
137 ORT_ACC = 0x40, /* A generic OpenACC region. */
138 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
139 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
140 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
141 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
143 /* Dummy OpenMP region, used to disable expansion of
144 DECL_VALUE_EXPRs in taskloop pre body. */
145 ORT_NONE = 0x100
148 /* Gimplify hashtable helper. */
150 struct gimplify_hasher : free_ptr_hash <elt_t>
152 static inline hashval_t hash (const elt_t *);
153 static inline bool equal (const elt_t *, const elt_t *);
156 struct gimplify_ctx
158 struct gimplify_ctx *prev_context;
160 vec<gbind *> bind_expr_stack;
161 tree temps;
162 gimple_seq conditional_cleanups;
163 tree exit_label;
164 tree return_temp;
166 vec<tree> case_labels;
167 hash_set<tree> *live_switch_vars;
168 /* The formal temporary table. Should this be persistent? */
169 hash_table<gimplify_hasher> *temp_htab;
171 int conditions;
172 unsigned into_ssa : 1;
173 unsigned allow_rhs_cond_expr : 1;
174 unsigned in_cleanup_point_expr : 1;
175 unsigned keep_stack : 1;
176 unsigned save_stack : 1;
177 unsigned in_switch_expr : 1;
180 struct gimplify_omp_ctx
182 struct gimplify_omp_ctx *outer_context;
183 splay_tree variables;
184 hash_set<tree> *privatized_types;
185 /* Iteration variables in an OMP_FOR. */
186 vec<tree> loop_iter_var;
187 location_t location;
188 enum omp_clause_default_kind default_kind;
189 enum omp_region_type region_type;
190 bool combined_loop;
191 bool distribute;
192 bool target_map_scalars_firstprivate;
193 bool target_map_pointers_as_0len_arrays;
194 bool target_firstprivatize_array_bases;
197 static struct gimplify_ctx *gimplify_ctxp;
198 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
200 /* Forward declaration. */
201 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
202 static hash_map<tree, tree> *oacc_declare_returns;
203 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
204 bool (*) (tree), fallback_t, bool);
206 /* Shorter alias name for the above function for use in gimplify.c
207 only. */
209 static inline void
210 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
212 gimple_seq_add_stmt_without_update (seq_p, gs);
215 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
216 NULL, a new sequence is allocated. This function is
217 similar to gimple_seq_add_seq, but does not scan the operands.
218 During gimplification, we need to manipulate statement sequences
219 before the def/use vectors have been constructed. */
221 static void
222 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
224 gimple_stmt_iterator si;
226 if (src == NULL)
227 return;
229 si = gsi_last (*dst_p);
230 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
234 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
235 and popping gimplify contexts. */
237 static struct gimplify_ctx *ctx_pool = NULL;
239 /* Return a gimplify context struct from the pool. */
241 static inline struct gimplify_ctx *
242 ctx_alloc (void)
244 struct gimplify_ctx * c = ctx_pool;
246 if (c)
247 ctx_pool = c->prev_context;
248 else
249 c = XNEW (struct gimplify_ctx);
251 memset (c, '\0', sizeof (*c));
252 return c;
255 /* Put gimplify context C back into the pool. */
257 static inline void
258 ctx_free (struct gimplify_ctx *c)
260 c->prev_context = ctx_pool;
261 ctx_pool = c;
264 /* Free allocated ctx stack memory. */
266 void
267 free_gimplify_stack (void)
269 struct gimplify_ctx *c;
271 while ((c = ctx_pool))
273 ctx_pool = c->prev_context;
274 free (c);
279 /* Set up a context for the gimplifier. */
281 void
282 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
284 struct gimplify_ctx *c = ctx_alloc ();
286 c->prev_context = gimplify_ctxp;
287 gimplify_ctxp = c;
288 gimplify_ctxp->into_ssa = in_ssa;
289 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
292 /* Tear down a context for the gimplifier. If BODY is non-null, then
293 put the temporaries into the outer BIND_EXPR. Otherwise, put them
294 in the local_decls.
296 BODY is not a sequence, but the first tuple in a sequence. */
298 void
299 pop_gimplify_context (gimple *body)
301 struct gimplify_ctx *c = gimplify_ctxp;
303 gcc_assert (c
304 && (!c->bind_expr_stack.exists ()
305 || c->bind_expr_stack.is_empty ()));
306 c->bind_expr_stack.release ();
307 gimplify_ctxp = c->prev_context;
309 if (body)
310 declare_vars (c->temps, body, false);
311 else
312 record_vars (c->temps);
314 delete c->temp_htab;
315 c->temp_htab = NULL;
316 ctx_free (c);
319 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
321 static void
322 gimple_push_bind_expr (gbind *bind_stmt)
324 gimplify_ctxp->bind_expr_stack.reserve (8);
325 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
328 /* Pop the first element off the stack of bindings. */
330 static void
331 gimple_pop_bind_expr (void)
333 gimplify_ctxp->bind_expr_stack.pop ();
336 /* Return the first element of the stack of bindings. */
338 gbind *
339 gimple_current_bind_expr (void)
341 return gimplify_ctxp->bind_expr_stack.last ();
344 /* Return the stack of bindings created during gimplification. */
346 vec<gbind *>
347 gimple_bind_expr_stack (void)
349 return gimplify_ctxp->bind_expr_stack;
352 /* Return true iff there is a COND_EXPR between us and the innermost
353 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
355 static bool
356 gimple_conditional_context (void)
358 return gimplify_ctxp->conditions > 0;
361 /* Note that we've entered a COND_EXPR. */
363 static void
364 gimple_push_condition (void)
366 #ifdef ENABLE_GIMPLE_CHECKING
367 if (gimplify_ctxp->conditions == 0)
368 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
369 #endif
370 ++(gimplify_ctxp->conditions);
373 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
374 now, add any conditional cleanups we've seen to the prequeue. */
376 static void
377 gimple_pop_condition (gimple_seq *pre_p)
379 int conds = --(gimplify_ctxp->conditions);
381 gcc_assert (conds >= 0);
382 if (conds == 0)
384 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
385 gimplify_ctxp->conditional_cleanups = NULL;
389 /* A stable comparison routine for use with splay trees and DECLs. */
391 static int
392 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
394 tree a = (tree) xa;
395 tree b = (tree) xb;
397 return DECL_UID (a) - DECL_UID (b);
400 /* Create a new omp construct that deals with variable remapping. */
402 static struct gimplify_omp_ctx *
403 new_omp_context (enum omp_region_type region_type)
405 struct gimplify_omp_ctx *c;
407 c = XCNEW (struct gimplify_omp_ctx);
408 c->outer_context = gimplify_omp_ctxp;
409 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
410 c->privatized_types = new hash_set<tree>;
411 c->location = input_location;
412 c->region_type = region_type;
413 if ((region_type & ORT_TASK) == 0)
414 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
415 else
416 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
418 return c;
421 /* Destroy an omp construct that deals with variable remapping. */
423 static void
424 delete_omp_context (struct gimplify_omp_ctx *c)
426 splay_tree_delete (c->variables);
427 delete c->privatized_types;
428 c->loop_iter_var.release ();
429 XDELETE (c);
432 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
433 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
435 /* Both gimplify the statement T and append it to *SEQ_P. This function
436 behaves exactly as gimplify_stmt, but you don't have to pass T as a
437 reference. */
439 void
440 gimplify_and_add (tree t, gimple_seq *seq_p)
442 gimplify_stmt (&t, seq_p);
445 /* Gimplify statement T into sequence *SEQ_P, and return the first
446 tuple in the sequence of generated tuples for this statement.
447 Return NULL if gimplifying T produced no tuples. */
449 static gimple *
450 gimplify_and_return_first (tree t, gimple_seq *seq_p)
452 gimple_stmt_iterator last = gsi_last (*seq_p);
454 gimplify_and_add (t, seq_p);
456 if (!gsi_end_p (last))
458 gsi_next (&last);
459 return gsi_stmt (last);
461 else
462 return gimple_seq_first_stmt (*seq_p);
465 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
466 LHS, or for a call argument. */
468 static bool
469 is_gimple_mem_rhs (tree t)
471 /* If we're dealing with a renamable type, either source or dest must be
472 a renamed variable. */
473 if (is_gimple_reg_type (TREE_TYPE (t)))
474 return is_gimple_val (t);
475 else
476 return is_gimple_val (t) || is_gimple_lvalue (t);
479 /* Return true if T is a CALL_EXPR or an expression that can be
480 assigned to a temporary. Note that this predicate should only be
481 used during gimplification. See the rationale for this in
482 gimplify_modify_expr. */
484 static bool
485 is_gimple_reg_rhs_or_call (tree t)
487 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
488 || TREE_CODE (t) == CALL_EXPR);
491 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
492 this predicate should only be used during gimplification. See the
493 rationale for this in gimplify_modify_expr. */
495 static bool
496 is_gimple_mem_rhs_or_call (tree t)
498 /* If we're dealing with a renamable type, either source or dest must be
499 a renamed variable. */
500 if (is_gimple_reg_type (TREE_TYPE (t)))
501 return is_gimple_val (t);
502 else
503 return (is_gimple_val (t)
504 || is_gimple_lvalue (t)
505 || TREE_CLOBBER_P (t)
506 || TREE_CODE (t) == CALL_EXPR);
509 /* Create a temporary with a name derived from VAL. Subroutine of
510 lookup_tmp_var; nobody else should call this function. */
512 static inline tree
513 create_tmp_from_val (tree val)
515 /* Drop all qualifiers and address-space information from the value type. */
516 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
517 tree var = create_tmp_var (type, get_name (val));
518 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
519 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
520 DECL_GIMPLE_REG_P (var) = 1;
521 return var;
524 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
525 an existing expression temporary. */
527 static tree
528 lookup_tmp_var (tree val, bool is_formal)
530 tree ret;
532 /* If not optimizing, never really reuse a temporary. local-alloc
533 won't allocate any variable that is used in more than one basic
534 block, which means it will go into memory, causing much extra
535 work in reload and final and poorer code generation, outweighing
536 the extra memory allocation here. */
537 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
538 ret = create_tmp_from_val (val);
539 else
541 elt_t elt, *elt_p;
542 elt_t **slot;
544 elt.val = val;
545 if (!gimplify_ctxp->temp_htab)
546 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
547 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
548 if (*slot == NULL)
550 elt_p = XNEW (elt_t);
551 elt_p->val = val;
552 elt_p->temp = ret = create_tmp_from_val (val);
553 *slot = elt_p;
555 else
557 elt_p = *slot;
558 ret = elt_p->temp;
562 return ret;
565 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
567 static tree
568 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
569 bool is_formal, bool allow_ssa)
571 tree t, mod;
573 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
574 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
575 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
576 fb_rvalue);
578 if (allow_ssa
579 && gimplify_ctxp->into_ssa
580 && is_gimple_reg_type (TREE_TYPE (val)))
582 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
583 if (! gimple_in_ssa_p (cfun))
585 const char *name = get_name (val);
586 if (name)
587 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
590 else
591 t = lookup_tmp_var (val, is_formal);
593 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
595 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
597 /* gimplify_modify_expr might want to reduce this further. */
598 gimplify_and_add (mod, pre_p);
599 ggc_free (mod);
601 return t;
604 /* Return a formal temporary variable initialized with VAL. PRE_P is as
605 in gimplify_expr. Only use this function if:
607 1) The value of the unfactored expression represented by VAL will not
608 change between the initialization and use of the temporary, and
609 2) The temporary will not be otherwise modified.
611 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
612 and #2 means it is inappropriate for && temps.
614 For other cases, use get_initialized_tmp_var instead. */
616 tree
617 get_formal_tmp_var (tree val, gimple_seq *pre_p)
619 return internal_get_tmp_var (val, pre_p, NULL, true, true);
622 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
623 are as in gimplify_expr. */
625 tree
626 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
627 bool allow_ssa)
629 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
632 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
633 generate debug info for them; otherwise don't. */
635 void
636 declare_vars (tree vars, gimple *gs, bool debug_info)
638 tree last = vars;
639 if (last)
641 tree temps, block;
643 gbind *scope = as_a <gbind *> (gs);
645 temps = nreverse (last);
647 block = gimple_bind_block (scope);
648 gcc_assert (!block || TREE_CODE (block) == BLOCK);
649 if (!block || !debug_info)
651 DECL_CHAIN (last) = gimple_bind_vars (scope);
652 gimple_bind_set_vars (scope, temps);
654 else
656 /* We need to attach the nodes both to the BIND_EXPR and to its
657 associated BLOCK for debugging purposes. The key point here
658 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
659 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
660 if (BLOCK_VARS (block))
661 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
662 else
664 gimple_bind_set_vars (scope,
665 chainon (gimple_bind_vars (scope), temps));
666 BLOCK_VARS (block) = temps;
672 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
673 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
674 no such upper bound can be obtained. */
676 static void
677 force_constant_size (tree var)
679 /* The only attempt we make is by querying the maximum size of objects
680 of the variable's type. */
682 HOST_WIDE_INT max_size;
684 gcc_assert (VAR_P (var));
686 max_size = max_int_size_in_bytes (TREE_TYPE (var));
688 gcc_assert (max_size >= 0);
690 DECL_SIZE_UNIT (var)
691 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
692 DECL_SIZE (var)
693 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
696 /* Push the temporary variable TMP into the current binding. */
698 void
699 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
701 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
703 /* Later processing assumes that the object size is constant, which might
704 not be true at this point. Force the use of a constant upper bound in
705 this case. */
706 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
707 force_constant_size (tmp);
709 DECL_CONTEXT (tmp) = fn->decl;
710 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
712 record_vars_into (tmp, fn->decl);
715 /* Push the temporary variable TMP into the current binding. */
717 void
718 gimple_add_tmp_var (tree tmp)
720 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
726 force_constant_size (tmp);
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
731 if (gimplify_ctxp)
733 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx
741 && (ctx->region_type == ORT_WORKSHARE
742 || ctx->region_type == ORT_SIMD
743 || ctx->region_type == ORT_ACC))
744 ctx = ctx->outer_context;
745 if (ctx)
746 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
749 else if (cfun)
750 record_vars (tmp);
751 else
753 gimple_seq body_seq;
755 /* This case is for nested functions. We need to expose the locals
756 they create. */
757 body_seq = gimple_body (current_function_decl);
758 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
764 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
765 nodes that are referenced more than once in GENERIC functions. This is
766 necessary because gimplification (translation into GIMPLE) is performed
767 by modifying tree nodes in-place, so gimplication of a shared node in a
768 first context could generate an invalid GIMPLE form in a second context.
770 This is achieved with a simple mark/copy/unmark algorithm that walks the
771 GENERIC representation top-down, marks nodes with TREE_VISITED the first
772 time it encounters them, duplicates them if they already have TREE_VISITED
773 set, and finally removes the TREE_VISITED marks it has set.
775 The algorithm works only at the function level, i.e. it generates a GENERIC
776 representation of a function with no nodes shared within the function when
777 passed a GENERIC function (except for nodes that are allowed to be shared).
779 At the global level, it is also necessary to unshare tree nodes that are
780 referenced in more than one function, for the same aforementioned reason.
781 This requires some cooperation from the front-end. There are 2 strategies:
783 1. Manual unsharing. The front-end needs to call unshare_expr on every
784 expression that might end up being shared across functions.
786 2. Deep unsharing. This is an extension of regular unsharing. Instead
787 of calling unshare_expr on expressions that might be shared across
788 functions, the front-end pre-marks them with TREE_VISITED. This will
789 ensure that they are unshared on the first reference within functions
790 when the regular unsharing algorithm runs. The counterpart is that
791 this algorithm must look deeper than for manual unsharing, which is
792 specified by LANG_HOOKS_DEEP_UNSHARING.
794 If there are only few specific cases of node sharing across functions, it is
795 probably easier for a front-end to unshare the expressions manually. On the
796 contrary, if the expressions generated at the global level are as widespread
797 as expressions generated within functions, deep unsharing is very likely the
798 way to go. */
800 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
801 These nodes model computations that must be done once. If we were to
802 unshare something like SAVE_EXPR(i++), the gimplification process would
803 create wrong code. However, if DATA is non-null, it must hold a pointer
804 set that is used to unshare the subtrees of these nodes. */
806 static tree
807 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
809 tree t = *tp;
810 enum tree_code code = TREE_CODE (t);
812 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
813 copy their subtrees if we can make sure to do it only once. */
814 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
816 if (data && !((hash_set<tree> *)data)->add (t))
818 else
819 *walk_subtrees = 0;
822 /* Stop at types, decls, constants like copy_tree_r. */
823 else if (TREE_CODE_CLASS (code) == tcc_type
824 || TREE_CODE_CLASS (code) == tcc_declaration
825 || TREE_CODE_CLASS (code) == tcc_constant)
826 *walk_subtrees = 0;
828 /* Cope with the statement expression extension. */
829 else if (code == STATEMENT_LIST)
832 /* Leave the bulk of the work to copy_tree_r itself. */
833 else
834 copy_tree_r (tp, walk_subtrees, NULL);
836 return NULL_TREE;
839 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
840 If *TP has been visited already, then *TP is deeply copied by calling
841 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
843 static tree
844 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
846 tree t = *tp;
847 enum tree_code code = TREE_CODE (t);
849 /* Skip types, decls, and constants. But we do want to look at their
850 types and the bounds of types. Mark them as visited so we properly
851 unmark their subtrees on the unmark pass. If we've already seen them,
852 don't look down further. */
853 if (TREE_CODE_CLASS (code) == tcc_type
854 || TREE_CODE_CLASS (code) == tcc_declaration
855 || TREE_CODE_CLASS (code) == tcc_constant)
857 if (TREE_VISITED (t))
858 *walk_subtrees = 0;
859 else
860 TREE_VISITED (t) = 1;
863 /* If this node has been visited already, unshare it and don't look
864 any deeper. */
865 else if (TREE_VISITED (t))
867 walk_tree (tp, mostly_copy_tree_r, data, NULL);
868 *walk_subtrees = 0;
871 /* Otherwise, mark the node as visited and keep looking. */
872 else
873 TREE_VISITED (t) = 1;
875 return NULL_TREE;
878 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
879 copy_if_shared_r callback unmodified. */
881 static inline void
882 copy_if_shared (tree *tp, void *data)
884 walk_tree (tp, copy_if_shared_r, data, NULL);
887 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
888 any nested functions. */
890 static void
891 unshare_body (tree fndecl)
893 struct cgraph_node *cgn = cgraph_node::get (fndecl);
894 /* If the language requires deep unsharing, we need a pointer set to make
895 sure we don't repeatedly unshare subtrees of unshareable nodes. */
896 hash_set<tree> *visited
897 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
899 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
900 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
901 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
903 delete visited;
905 if (cgn)
906 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
907 unshare_body (cgn->decl);
910 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
911 Subtrees are walked until the first unvisited node is encountered. */
913 static tree
914 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
916 tree t = *tp;
918 /* If this node has been visited, unmark it and keep looking. */
919 if (TREE_VISITED (t))
920 TREE_VISITED (t) = 0;
922 /* Otherwise, don't look any deeper. */
923 else
924 *walk_subtrees = 0;
926 return NULL_TREE;
929 /* Unmark the visited trees rooted at *TP. */
931 static inline void
932 unmark_visited (tree *tp)
934 walk_tree (tp, unmark_visited_r, NULL, NULL);
937 /* Likewise, but mark all trees as not visited. */
939 static void
940 unvisit_body (tree fndecl)
942 struct cgraph_node *cgn = cgraph_node::get (fndecl);
944 unmark_visited (&DECL_SAVED_TREE (fndecl));
945 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
946 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
948 if (cgn)
949 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
950 unvisit_body (cgn->decl);
953 /* Unconditionally make an unshared copy of EXPR. This is used when using
954 stored expressions which span multiple functions, such as BINFO_VTABLE,
955 as the normal unsharing process can't tell that they're shared. */
957 tree
958 unshare_expr (tree expr)
960 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
961 return expr;
964 /* Worker for unshare_expr_without_location. */
966 static tree
967 prune_expr_location (tree *tp, int *walk_subtrees, void *)
969 if (EXPR_P (*tp))
970 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
971 else
972 *walk_subtrees = 0;
973 return NULL_TREE;
976 /* Similar to unshare_expr but also prune all expression locations
977 from EXPR. */
979 tree
980 unshare_expr_without_location (tree expr)
982 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
983 if (EXPR_P (expr))
984 walk_tree (&expr, prune_expr_location, NULL, NULL);
985 return expr;
988 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
989 contain statements and have a value. Assign its value to a temporary
990 and give it void_type_node. Return the temporary, or NULL_TREE if
991 WRAPPER was already void. */
993 tree
994 voidify_wrapper_expr (tree wrapper, tree temp)
996 tree type = TREE_TYPE (wrapper);
997 if (type && !VOID_TYPE_P (type))
999 tree *p;
1001 /* Set p to point to the body of the wrapper. Loop until we find
1002 something that isn't a wrapper. */
1003 for (p = &wrapper; p && *p; )
1005 switch (TREE_CODE (*p))
1007 case BIND_EXPR:
1008 TREE_SIDE_EFFECTS (*p) = 1;
1009 TREE_TYPE (*p) = void_type_node;
1010 /* For a BIND_EXPR, the body is operand 1. */
1011 p = &BIND_EXPR_BODY (*p);
1012 break;
1014 case CLEANUP_POINT_EXPR:
1015 case TRY_FINALLY_EXPR:
1016 case TRY_CATCH_EXPR:
1017 TREE_SIDE_EFFECTS (*p) = 1;
1018 TREE_TYPE (*p) = void_type_node;
1019 p = &TREE_OPERAND (*p, 0);
1020 break;
1022 case STATEMENT_LIST:
1024 tree_stmt_iterator i = tsi_last (*p);
1025 TREE_SIDE_EFFECTS (*p) = 1;
1026 TREE_TYPE (*p) = void_type_node;
1027 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1029 break;
1031 case COMPOUND_EXPR:
1032 /* Advance to the last statement. Set all container types to
1033 void. */
1034 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1036 TREE_SIDE_EFFECTS (*p) = 1;
1037 TREE_TYPE (*p) = void_type_node;
1039 break;
1041 case TRANSACTION_EXPR:
1042 TREE_SIDE_EFFECTS (*p) = 1;
1043 TREE_TYPE (*p) = void_type_node;
1044 p = &TRANSACTION_EXPR_BODY (*p);
1045 break;
1047 default:
1048 /* Assume that any tree upon which voidify_wrapper_expr is
1049 directly called is a wrapper, and that its body is op0. */
1050 if (p == &wrapper)
1052 TREE_SIDE_EFFECTS (*p) = 1;
1053 TREE_TYPE (*p) = void_type_node;
1054 p = &TREE_OPERAND (*p, 0);
1055 break;
1057 goto out;
1061 out:
1062 if (p == NULL || IS_EMPTY_STMT (*p))
1063 temp = NULL_TREE;
1064 else if (temp)
1066 /* The wrapper is on the RHS of an assignment that we're pushing
1067 down. */
1068 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1069 || TREE_CODE (temp) == MODIFY_EXPR);
1070 TREE_OPERAND (temp, 1) = *p;
1071 *p = temp;
1073 else
1075 temp = create_tmp_var (type, "retval");
1076 *p = build2 (INIT_EXPR, type, temp, *p);
1079 return temp;
1082 return NULL_TREE;
1085 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1086 a temporary through which they communicate. */
1088 static void
1089 build_stack_save_restore (gcall **save, gcall **restore)
1091 tree tmp_var;
1093 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1094 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1095 gimple_call_set_lhs (*save, tmp_var);
1097 *restore
1098 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1099 1, tmp_var);
1102 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1104 static tree
1105 build_asan_poison_call_expr (tree decl)
1107 /* Do not poison variables that have size equal to zero. */
1108 tree unit_size = DECL_SIZE_UNIT (decl);
1109 if (zerop (unit_size))
1110 return NULL_TREE;
1112 tree base = build_fold_addr_expr (decl);
1114 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1115 void_type_node, 3,
1116 build_int_cst (integer_type_node,
1117 ASAN_MARK_POISON),
1118 base, unit_size);
1121 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1122 on POISON flag, shadow memory of a DECL variable. The call will be
1123 put on location identified by IT iterator, where BEFORE flag drives
1124 position where the stmt will be put. */
1126 static void
1127 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1128 bool before)
1130 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1131 if (gimplify_omp_ctxp)
1132 return;
1134 tree unit_size = DECL_SIZE_UNIT (decl);
1135 tree base = build_fold_addr_expr (decl);
1137 /* Do not poison variables that have size equal to zero. */
1138 if (zerop (unit_size))
1139 return;
1141 /* It's necessary to have all stack variables aligned to ASAN granularity
1142 bytes. */
1143 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1144 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1146 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1148 gimple *g
1149 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1150 build_int_cst (integer_type_node, flags),
1151 base, unit_size);
1153 if (before)
1154 gsi_insert_before (it, g, GSI_NEW_STMT);
1155 else
1156 gsi_insert_after (it, g, GSI_NEW_STMT);
1159 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1160 either poisons or unpoisons a DECL. Created statement is appended
1161 to SEQ_P gimple sequence. */
1163 static void
1164 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1166 gimple_stmt_iterator it = gsi_last (*seq_p);
1167 bool before = false;
1169 if (gsi_end_p (it))
1170 before = true;
1172 asan_poison_variable (decl, poison, &it, before);
1175 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1177 static int
1178 sort_by_decl_uid (const void *a, const void *b)
1180 const tree *t1 = (const tree *)a;
1181 const tree *t2 = (const tree *)b;
1183 int uid1 = DECL_UID (*t1);
1184 int uid2 = DECL_UID (*t2);
1186 if (uid1 < uid2)
1187 return -1;
1188 else if (uid1 > uid2)
1189 return 1;
1190 else
1191 return 0;
1194 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1195 depending on POISON flag. Created statement is appended
1196 to SEQ_P gimple sequence. */
1198 static void
1199 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1201 unsigned c = variables->elements ();
1202 if (c == 0)
1203 return;
1205 auto_vec<tree> sorted_variables (c);
1207 for (hash_set<tree>::iterator it = variables->begin ();
1208 it != variables->end (); ++it)
1209 sorted_variables.safe_push (*it);
1211 sorted_variables.qsort (sort_by_decl_uid);
1213 unsigned i;
1214 tree var;
1215 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1217 asan_poison_variable (var, poison, seq_p);
1219 /* Add use_after_scope_memory attribute for the variable in order
1220 to prevent re-written into SSA. */
1221 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1222 DECL_ATTRIBUTES (var)))
1223 DECL_ATTRIBUTES (var)
1224 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1225 integer_one_node,
1226 DECL_ATTRIBUTES (var));
1230 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1232 static enum gimplify_status
1233 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1235 tree bind_expr = *expr_p;
1236 bool old_keep_stack = gimplify_ctxp->keep_stack;
1237 bool old_save_stack = gimplify_ctxp->save_stack;
1238 tree t;
1239 gbind *bind_stmt;
1240 gimple_seq body, cleanup;
1241 gcall *stack_save;
1242 location_t start_locus = 0, end_locus = 0;
1243 tree ret_clauses = NULL;
1245 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1247 /* Mark variables seen in this bind expr. */
1248 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1250 if (VAR_P (t))
1252 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1254 /* Mark variable as local. */
1255 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1256 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1257 || splay_tree_lookup (ctx->variables,
1258 (splay_tree_key) t) == NULL))
1260 if (ctx->region_type == ORT_SIMD
1261 && TREE_ADDRESSABLE (t)
1262 && !TREE_STATIC (t))
1263 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1264 else
1265 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1268 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1270 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1271 cfun->has_local_explicit_reg_vars = true;
1274 /* Preliminarily mark non-addressed complex variables as eligible
1275 for promotion to gimple registers. We'll transform their uses
1276 as we find them. */
1277 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1278 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1279 && !TREE_THIS_VOLATILE (t)
1280 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1281 && !needs_to_live_in_memory (t))
1282 DECL_GIMPLE_REG_P (t) = 1;
1285 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1286 BIND_EXPR_BLOCK (bind_expr));
1287 gimple_push_bind_expr (bind_stmt);
1289 gimplify_ctxp->keep_stack = false;
1290 gimplify_ctxp->save_stack = false;
1292 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1293 body = NULL;
1294 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1295 gimple_bind_set_body (bind_stmt, body);
1297 /* Source location wise, the cleanup code (stack_restore and clobbers)
1298 belongs to the end of the block, so propagate what we have. The
1299 stack_save operation belongs to the beginning of block, which we can
1300 infer from the bind_expr directly if the block has no explicit
1301 assignment. */
1302 if (BIND_EXPR_BLOCK (bind_expr))
1304 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1305 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1307 if (start_locus == 0)
1308 start_locus = EXPR_LOCATION (bind_expr);
1310 cleanup = NULL;
1311 stack_save = NULL;
1313 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1314 the stack space allocated to the VLAs. */
1315 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1317 gcall *stack_restore;
1319 /* Save stack on entry and restore it on exit. Add a try_finally
1320 block to achieve this. */
1321 build_stack_save_restore (&stack_save, &stack_restore);
1323 gimple_set_location (stack_save, start_locus);
1324 gimple_set_location (stack_restore, end_locus);
1326 gimplify_seq_add_stmt (&cleanup, stack_restore);
1329 /* Add clobbers for all variables that go out of scope. */
1330 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1332 if (VAR_P (t)
1333 && !is_global_var (t)
1334 && DECL_CONTEXT (t) == current_function_decl)
1336 if (!DECL_HARD_REGISTER (t)
1337 && !TREE_THIS_VOLATILE (t)
1338 && !DECL_HAS_VALUE_EXPR_P (t)
1339 /* Only care for variables that have to be in memory. Others
1340 will be rewritten into SSA names, hence moved to the
1341 top-level. */
1342 && !is_gimple_reg (t)
1343 && flag_stack_reuse != SR_NONE)
1345 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1346 gimple *clobber_stmt;
1347 TREE_THIS_VOLATILE (clobber) = 1;
1348 clobber_stmt = gimple_build_assign (t, clobber);
1349 gimple_set_location (clobber_stmt, end_locus);
1350 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1353 if (flag_openacc && oacc_declare_returns != NULL)
1355 tree *c = oacc_declare_returns->get (t);
1356 if (c != NULL)
1358 if (ret_clauses)
1359 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1361 ret_clauses = *c;
1363 oacc_declare_returns->remove (t);
1365 if (oacc_declare_returns->elements () == 0)
1367 delete oacc_declare_returns;
1368 oacc_declare_returns = NULL;
1374 if (asan_poisoned_variables != NULL
1375 && asan_poisoned_variables->contains (t))
1377 asan_poisoned_variables->remove (t);
1378 asan_poison_variable (t, true, &cleanup);
1381 if (gimplify_ctxp->live_switch_vars != NULL
1382 && gimplify_ctxp->live_switch_vars->contains (t))
1383 gimplify_ctxp->live_switch_vars->remove (t);
1386 if (ret_clauses)
1388 gomp_target *stmt;
1389 gimple_stmt_iterator si = gsi_start (cleanup);
1391 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1392 ret_clauses);
1393 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1396 if (cleanup)
1398 gtry *gs;
1399 gimple_seq new_body;
1401 new_body = NULL;
1402 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1403 GIMPLE_TRY_FINALLY);
1405 if (stack_save)
1406 gimplify_seq_add_stmt (&new_body, stack_save);
1407 gimplify_seq_add_stmt (&new_body, gs);
1408 gimple_bind_set_body (bind_stmt, new_body);
1411 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1412 if (!gimplify_ctxp->keep_stack)
1413 gimplify_ctxp->keep_stack = old_keep_stack;
1414 gimplify_ctxp->save_stack = old_save_stack;
1416 gimple_pop_bind_expr ();
1418 gimplify_seq_add_stmt (pre_p, bind_stmt);
1420 if (temp)
1422 *expr_p = temp;
1423 return GS_OK;
1426 *expr_p = NULL_TREE;
1427 return GS_ALL_DONE;
1430 /* Maybe add early return predict statement to PRE_P sequence. */
1432 static void
1433 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1435 /* If we are not in a conditional context, add PREDICT statement. */
1436 if (gimple_conditional_context ())
1438 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1439 NOT_TAKEN);
1440 gimplify_seq_add_stmt (pre_p, predict);
1444 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1445 GIMPLE value, it is assigned to a new temporary and the statement is
1446 re-written to return the temporary.
1448 PRE_P points to the sequence where side effects that must happen before
1449 STMT should be stored. */
1451 static enum gimplify_status
1452 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1454 greturn *ret;
1455 tree ret_expr = TREE_OPERAND (stmt, 0);
1456 tree result_decl, result;
1458 if (ret_expr == error_mark_node)
1459 return GS_ERROR;
1461 /* Implicit _Cilk_sync must be inserted right before any return statement
1462 if there is a _Cilk_spawn in the function. If the user has provided a
1463 _Cilk_sync, the optimizer should remove this duplicate one. */
1464 if (fn_contains_cilk_spawn_p (cfun))
1466 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1467 gimplify_and_add (impl_sync, pre_p);
1470 if (!ret_expr
1471 || TREE_CODE (ret_expr) == RESULT_DECL
1472 || ret_expr == error_mark_node)
1474 maybe_add_early_return_predict_stmt (pre_p);
1475 greturn *ret = gimple_build_return (ret_expr);
1476 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1477 gimplify_seq_add_stmt (pre_p, ret);
1478 return GS_ALL_DONE;
1481 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1482 result_decl = NULL_TREE;
1483 else
1485 result_decl = TREE_OPERAND (ret_expr, 0);
1487 /* See through a return by reference. */
1488 if (TREE_CODE (result_decl) == INDIRECT_REF)
1489 result_decl = TREE_OPERAND (result_decl, 0);
1491 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1492 || TREE_CODE (ret_expr) == INIT_EXPR)
1493 && TREE_CODE (result_decl) == RESULT_DECL);
1496 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1497 Recall that aggregate_value_p is FALSE for any aggregate type that is
1498 returned in registers. If we're returning values in registers, then
1499 we don't want to extend the lifetime of the RESULT_DECL, particularly
1500 across another call. In addition, for those aggregates for which
1501 hard_function_value generates a PARALLEL, we'll die during normal
1502 expansion of structure assignments; there's special code in expand_return
1503 to handle this case that does not exist in expand_expr. */
1504 if (!result_decl)
1505 result = NULL_TREE;
1506 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1508 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1510 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1511 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1512 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1513 should be effectively allocated by the caller, i.e. all calls to
1514 this function must be subject to the Return Slot Optimization. */
1515 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1516 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1518 result = result_decl;
1520 else if (gimplify_ctxp->return_temp)
1521 result = gimplify_ctxp->return_temp;
1522 else
1524 result = create_tmp_reg (TREE_TYPE (result_decl));
1526 /* ??? With complex control flow (usually involving abnormal edges),
1527 we can wind up warning about an uninitialized value for this. Due
1528 to how this variable is constructed and initialized, this is never
1529 true. Give up and never warn. */
1530 TREE_NO_WARNING (result) = 1;
1532 gimplify_ctxp->return_temp = result;
1535 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1536 Then gimplify the whole thing. */
1537 if (result != result_decl)
1538 TREE_OPERAND (ret_expr, 0) = result;
1540 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1542 maybe_add_early_return_predict_stmt (pre_p);
1543 ret = gimple_build_return (result);
1544 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1545 gimplify_seq_add_stmt (pre_p, ret);
1547 return GS_ALL_DONE;
1550 /* Gimplify a variable-length array DECL. */
1552 static void
1553 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1555 /* This is a variable-sized decl. Simplify its size and mark it
1556 for deferred expansion. */
1557 tree t, addr, ptr_type;
1559 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1560 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1562 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1563 if (DECL_HAS_VALUE_EXPR_P (decl))
1564 return;
1566 /* All occurrences of this decl in final gimplified code will be
1567 replaced by indirection. Setting DECL_VALUE_EXPR does two
1568 things: First, it lets the rest of the gimplifier know what
1569 replacement to use. Second, it lets the debug info know
1570 where to find the value. */
1571 ptr_type = build_pointer_type (TREE_TYPE (decl));
1572 addr = create_tmp_var (ptr_type, get_name (decl));
1573 DECL_IGNORED_P (addr) = 0;
1574 t = build_fold_indirect_ref (addr);
1575 TREE_THIS_NOTRAP (t) = 1;
1576 SET_DECL_VALUE_EXPR (decl, t);
1577 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1579 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1580 max_int_size_in_bytes (TREE_TYPE (decl)));
1581 /* The call has been built for a variable-sized object. */
1582 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1583 t = fold_convert (ptr_type, t);
1584 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1586 gimplify_and_add (t, seq_p);
1589 /* A helper function to be called via walk_tree. Mark all labels under *TP
1590 as being forced. To be called for DECL_INITIAL of static variables. */
1592 static tree
1593 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1595 if (TYPE_P (*tp))
1596 *walk_subtrees = 0;
1597 if (TREE_CODE (*tp) == LABEL_DECL)
1599 FORCED_LABEL (*tp) = 1;
1600 cfun->has_forced_label_in_static = 1;
1603 return NULL_TREE;
1606 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1607 and initialization explicit. */
1609 static enum gimplify_status
1610 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1612 tree stmt = *stmt_p;
1613 tree decl = DECL_EXPR_DECL (stmt);
1615 *stmt_p = NULL_TREE;
1617 if (TREE_TYPE (decl) == error_mark_node)
1618 return GS_ERROR;
1620 if ((TREE_CODE (decl) == TYPE_DECL
1621 || VAR_P (decl))
1622 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1624 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1625 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1626 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1629 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1630 in case its size expressions contain problematic nodes like CALL_EXPR. */
1631 if (TREE_CODE (decl) == TYPE_DECL
1632 && DECL_ORIGINAL_TYPE (decl)
1633 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1635 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1636 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1637 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1640 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1642 tree init = DECL_INITIAL (decl);
1643 bool is_vla = false;
1645 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1646 || (!TREE_STATIC (decl)
1647 && flag_stack_check == GENERIC_STACK_CHECK
1648 && compare_tree_int (DECL_SIZE_UNIT (decl),
1649 STACK_CHECK_MAX_VAR_SIZE) > 0))
1651 gimplify_vla_decl (decl, seq_p);
1652 is_vla = true;
1655 if (asan_poisoned_variables
1656 && !is_vla
1657 && TREE_ADDRESSABLE (decl)
1658 && !TREE_STATIC (decl)
1659 && !DECL_HAS_VALUE_EXPR_P (decl)
1660 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1661 && dbg_cnt (asan_use_after_scope))
1663 asan_poisoned_variables->add (decl);
1664 asan_poison_variable (decl, false, seq_p);
1665 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1666 gimplify_ctxp->live_switch_vars->add (decl);
1669 /* Some front ends do not explicitly declare all anonymous
1670 artificial variables. We compensate here by declaring the
1671 variables, though it would be better if the front ends would
1672 explicitly declare them. */
1673 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1674 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1675 gimple_add_tmp_var (decl);
1677 if (init && init != error_mark_node)
1679 if (!TREE_STATIC (decl))
1681 DECL_INITIAL (decl) = NULL_TREE;
1682 init = build2 (INIT_EXPR, void_type_node, decl, init);
1683 gimplify_and_add (init, seq_p);
1684 ggc_free (init);
1686 else
1687 /* We must still examine initializers for static variables
1688 as they may contain a label address. */
1689 walk_tree (&init, force_labels_r, NULL, NULL);
1693 return GS_ALL_DONE;
1696 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1697 and replacing the LOOP_EXPR with goto, but if the loop contains an
1698 EXIT_EXPR, we need to append a label for it to jump to. */
1700 static enum gimplify_status
1701 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1703 tree saved_label = gimplify_ctxp->exit_label;
1704 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1706 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1708 gimplify_ctxp->exit_label = NULL_TREE;
1710 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1712 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1714 if (gimplify_ctxp->exit_label)
1715 gimplify_seq_add_stmt (pre_p,
1716 gimple_build_label (gimplify_ctxp->exit_label));
1718 gimplify_ctxp->exit_label = saved_label;
1720 *expr_p = NULL;
1721 return GS_ALL_DONE;
1724 /* Gimplify a statement list onto a sequence. These may be created either
1725 by an enlightened front-end, or by shortcut_cond_expr. */
1727 static enum gimplify_status
1728 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1730 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1732 tree_stmt_iterator i = tsi_start (*expr_p);
1734 while (!tsi_end_p (i))
1736 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1737 tsi_delink (&i);
1740 if (temp)
1742 *expr_p = temp;
1743 return GS_OK;
1746 return GS_ALL_DONE;
1749 /* Callback for walk_gimple_seq. */
1751 static tree
1752 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1753 struct walk_stmt_info *wi)
1755 gimple *stmt = gsi_stmt (*gsi_p);
1757 *handled_ops_p = true;
1758 switch (gimple_code (stmt))
1760 case GIMPLE_TRY:
1761 /* A compiler-generated cleanup or a user-written try block.
1762 If it's empty, don't dive into it--that would result in
1763 worse location info. */
1764 if (gimple_try_eval (stmt) == NULL)
1766 wi->info = stmt;
1767 return integer_zero_node;
1769 /* Fall through. */
1770 case GIMPLE_BIND:
1771 case GIMPLE_CATCH:
1772 case GIMPLE_EH_FILTER:
1773 case GIMPLE_TRANSACTION:
1774 /* Walk the sub-statements. */
1775 *handled_ops_p = false;
1776 break;
1777 case GIMPLE_CALL:
1778 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1780 *handled_ops_p = false;
1781 break;
1783 /* Fall through. */
1784 default:
1785 /* Save the first "real" statement (not a decl/lexical scope/...). */
1786 wi->info = stmt;
1787 return integer_zero_node;
1789 return NULL_TREE;
1792 /* Possibly warn about unreachable statements between switch's controlling
1793 expression and the first case. SEQ is the body of a switch expression. */
1795 static void
1796 maybe_warn_switch_unreachable (gimple_seq seq)
1798 if (!warn_switch_unreachable
1799 /* This warning doesn't play well with Fortran when optimizations
1800 are on. */
1801 || lang_GNU_Fortran ()
1802 || seq == NULL)
1803 return;
1805 struct walk_stmt_info wi;
1806 memset (&wi, 0, sizeof (wi));
1807 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1808 gimple *stmt = (gimple *) wi.info;
1810 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1812 if (gimple_code (stmt) == GIMPLE_GOTO
1813 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1814 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1815 /* Don't warn for compiler-generated gotos. These occur
1816 in Duff's devices, for example. */;
1817 else
1818 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1819 "statement will never be executed");
1824 /* A label entry that pairs label and a location. */
1825 struct label_entry
1827 tree label;
1828 location_t loc;
1831 /* Find LABEL in vector of label entries VEC. */
1833 static struct label_entry *
1834 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1836 unsigned int i;
1837 struct label_entry *l;
1839 FOR_EACH_VEC_ELT (*vec, i, l)
1840 if (l->label == label)
1841 return l;
1842 return NULL;
1845 /* Return true if LABEL, a LABEL_DECL, represents a case label
1846 in a vector of labels CASES. */
1848 static bool
1849 case_label_p (const vec<tree> *cases, tree label)
1851 unsigned int i;
1852 tree l;
1854 FOR_EACH_VEC_ELT (*cases, i, l)
1855 if (CASE_LABEL (l) == label)
1856 return true;
1857 return false;
1860 /* Find the last statement in a scope STMT. */
1862 static gimple *
1863 last_stmt_in_scope (gimple *stmt)
1865 if (!stmt)
1866 return NULL;
1868 switch (gimple_code (stmt))
1870 case GIMPLE_BIND:
1872 gbind *bind = as_a <gbind *> (stmt);
1873 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1874 return last_stmt_in_scope (stmt);
1877 case GIMPLE_TRY:
1879 gtry *try_stmt = as_a <gtry *> (stmt);
1880 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1881 gimple *last_eval = last_stmt_in_scope (stmt);
1882 if (gimple_stmt_may_fallthru (last_eval)
1883 && (last_eval == NULL
1884 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1885 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1887 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1888 return last_stmt_in_scope (stmt);
1890 else
1891 return last_eval;
1894 default:
1895 return stmt;
1899 /* Collect interesting labels in LABELS and return the statement preceding
1900 another case label, or a user-defined label. */
1902 static gimple *
1903 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1904 auto_vec <struct label_entry> *labels)
1906 gimple *prev = NULL;
1910 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1911 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1913 /* Nested scope. Only look at the last statement of
1914 the innermost scope. */
1915 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1916 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1917 if (last)
1919 prev = last;
1920 /* It might be a label without a location. Use the
1921 location of the scope then. */
1922 if (!gimple_has_location (prev))
1923 gimple_set_location (prev, bind_loc);
1925 gsi_next (gsi_p);
1926 continue;
1929 /* Ifs are tricky. */
1930 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1932 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1933 tree false_lab = gimple_cond_false_label (cond_stmt);
1934 location_t if_loc = gimple_location (cond_stmt);
1936 /* If we have e.g.
1937 if (i > 1) goto <D.2259>; else goto D;
1938 we can't do much with the else-branch. */
1939 if (!DECL_ARTIFICIAL (false_lab))
1940 break;
1942 /* Go on until the false label, then one step back. */
1943 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1945 gimple *stmt = gsi_stmt (*gsi_p);
1946 if (gimple_code (stmt) == GIMPLE_LABEL
1947 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1948 break;
1951 /* Not found? Oops. */
1952 if (gsi_end_p (*gsi_p))
1953 break;
1955 struct label_entry l = { false_lab, if_loc };
1956 labels->safe_push (l);
1958 /* Go to the last statement of the then branch. */
1959 gsi_prev (gsi_p);
1961 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1962 <D.1759>:
1963 <stmt>;
1964 goto <D.1761>;
1965 <D.1760>:
1967 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1968 && !gimple_has_location (gsi_stmt (*gsi_p)))
1970 /* Look at the statement before, it might be
1971 attribute fallthrough, in which case don't warn. */
1972 gsi_prev (gsi_p);
1973 bool fallthru_before_dest
1974 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1975 gsi_next (gsi_p);
1976 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1977 if (!fallthru_before_dest)
1979 struct label_entry l = { goto_dest, if_loc };
1980 labels->safe_push (l);
1983 /* And move back. */
1984 gsi_next (gsi_p);
1987 /* Remember the last statement. Skip labels that are of no interest
1988 to us. */
1989 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1991 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1992 if (find_label_entry (labels, label))
1993 prev = gsi_stmt (*gsi_p);
1995 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1997 else
1998 prev = gsi_stmt (*gsi_p);
1999 gsi_next (gsi_p);
2001 while (!gsi_end_p (*gsi_p)
2002 /* Stop if we find a case or a user-defined label. */
2003 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2004 || !gimple_has_location (gsi_stmt (*gsi_p))));
2006 return prev;
2009 /* Return true if the switch fallthough warning should occur. LABEL is
2010 the label statement that we're falling through to. */
2012 static bool
2013 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2015 gimple_stmt_iterator gsi = *gsi_p;
2017 /* Don't warn if the label is marked with a "falls through" comment. */
2018 if (FALLTHROUGH_LABEL_P (label))
2019 return false;
2021 /* Don't warn for non-case labels followed by a statement:
2022 case 0:
2023 foo ();
2024 label:
2025 bar ();
2026 as these are likely intentional. */
2027 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2029 tree l;
2030 while (!gsi_end_p (gsi)
2031 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2032 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2033 && !case_label_p (&gimplify_ctxp->case_labels, l))
2034 gsi_next (&gsi);
2035 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2036 return false;
2039 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2040 immediately breaks. */
2041 gsi = *gsi_p;
2043 /* Skip all immediately following labels. */
2044 while (!gsi_end_p (gsi)
2045 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2046 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2047 gsi_next (&gsi);
2049 /* { ... something; default:; } */
2050 if (gsi_end_p (gsi)
2051 /* { ... something; default: break; } or
2052 { ... something; default: goto L; } */
2053 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2054 /* { ... something; default: return; } */
2055 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2056 return false;
2058 return true;
2061 /* Callback for walk_gimple_seq. */
2063 static tree
2064 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2065 struct walk_stmt_info *)
2067 gimple *stmt = gsi_stmt (*gsi_p);
2069 *handled_ops_p = true;
2070 switch (gimple_code (stmt))
2072 case GIMPLE_TRY:
2073 case GIMPLE_BIND:
2074 case GIMPLE_CATCH:
2075 case GIMPLE_EH_FILTER:
2076 case GIMPLE_TRANSACTION:
2077 /* Walk the sub-statements. */
2078 *handled_ops_p = false;
2079 break;
2081 /* Find a sequence of form:
2083 GIMPLE_LABEL
2084 [...]
2085 <may fallthru stmt>
2086 GIMPLE_LABEL
2088 and possibly warn. */
2089 case GIMPLE_LABEL:
2091 /* Found a label. Skip all immediately following labels. */
2092 while (!gsi_end_p (*gsi_p)
2093 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2094 gsi_next (gsi_p);
2096 /* There might be no more statements. */
2097 if (gsi_end_p (*gsi_p))
2098 return integer_zero_node;
2100 /* Vector of labels that fall through. */
2101 auto_vec <struct label_entry> labels;
2102 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2104 /* There might be no more statements. */
2105 if (gsi_end_p (*gsi_p))
2106 return integer_zero_node;
2108 gimple *next = gsi_stmt (*gsi_p);
2109 tree label;
2110 /* If what follows is a label, then we may have a fallthrough. */
2111 if (gimple_code (next) == GIMPLE_LABEL
2112 && gimple_has_location (next)
2113 && (label = gimple_label_label (as_a <glabel *> (next)))
2114 && prev != NULL)
2116 struct label_entry *l;
2117 bool warned_p = false;
2118 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2119 /* Quiet. */;
2120 else if (gimple_code (prev) == GIMPLE_LABEL
2121 && (label = gimple_label_label (as_a <glabel *> (prev)))
2122 && (l = find_label_entry (&labels, label)))
2123 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2124 "this statement may fall through");
2125 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2126 /* Try to be clever and don't warn when the statement
2127 can't actually fall through. */
2128 && gimple_stmt_may_fallthru (prev)
2129 && gimple_has_location (prev))
2130 warned_p = warning_at (gimple_location (prev),
2131 OPT_Wimplicit_fallthrough_,
2132 "this statement may fall through");
2133 if (warned_p)
2134 inform (gimple_location (next), "here");
2136 /* Mark this label as processed so as to prevent multiple
2137 warnings in nested switches. */
2138 FALLTHROUGH_LABEL_P (label) = true;
2140 /* So that next warn_implicit_fallthrough_r will start looking for
2141 a new sequence starting with this label. */
2142 gsi_prev (gsi_p);
2145 break;
2146 default:
2147 break;
2149 return NULL_TREE;
2152 /* Warn when a switch case falls through. */
2154 static void
2155 maybe_warn_implicit_fallthrough (gimple_seq seq)
2157 if (!warn_implicit_fallthrough)
2158 return;
2160 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2161 if (!(lang_GNU_C ()
2162 || lang_GNU_CXX ()
2163 || lang_GNU_OBJC ()))
2164 return;
2166 struct walk_stmt_info wi;
2167 memset (&wi, 0, sizeof (wi));
2168 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2171 /* Callback for walk_gimple_seq. */
2173 static tree
2174 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2175 struct walk_stmt_info *)
2177 gimple *stmt = gsi_stmt (*gsi_p);
2179 *handled_ops_p = true;
2180 switch (gimple_code (stmt))
2182 case GIMPLE_TRY:
2183 case GIMPLE_BIND:
2184 case GIMPLE_CATCH:
2185 case GIMPLE_EH_FILTER:
2186 case GIMPLE_TRANSACTION:
2187 /* Walk the sub-statements. */
2188 *handled_ops_p = false;
2189 break;
2190 case GIMPLE_CALL:
2191 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2193 gsi_remove (gsi_p, true);
2194 if (gsi_end_p (*gsi_p))
2195 return integer_zero_node;
2197 bool found = false;
2198 location_t loc = gimple_location (stmt);
2200 gimple_stmt_iterator gsi2 = *gsi_p;
2201 stmt = gsi_stmt (gsi2);
2202 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2204 /* Go on until the artificial label. */
2205 tree goto_dest = gimple_goto_dest (stmt);
2206 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2208 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2209 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2210 == goto_dest)
2211 break;
2214 /* Not found? Stop. */
2215 if (gsi_end_p (gsi2))
2216 break;
2218 /* Look one past it. */
2219 gsi_next (&gsi2);
2222 /* We're looking for a case label or default label here. */
2223 while (!gsi_end_p (gsi2))
2225 stmt = gsi_stmt (gsi2);
2226 if (gimple_code (stmt) == GIMPLE_LABEL)
2228 tree label = gimple_label_label (as_a <glabel *> (stmt));
2229 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2231 found = true;
2232 break;
2235 else
2236 /* Something other than a label. That's not expected. */
2237 break;
2238 gsi_next (&gsi2);
2240 if (!found)
2241 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2242 "a case label or default label");
2244 break;
2245 default:
2246 break;
2248 return NULL_TREE;
2251 /* Expand all FALLTHROUGH () calls in SEQ. */
2253 static void
2254 expand_FALLTHROUGH (gimple_seq *seq_p)
2256 struct walk_stmt_info wi;
2257 memset (&wi, 0, sizeof (wi));
2258 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2262 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2263 branch to. */
2265 static enum gimplify_status
2266 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2268 tree switch_expr = *expr_p;
2269 gimple_seq switch_body_seq = NULL;
2270 enum gimplify_status ret;
2271 tree index_type = TREE_TYPE (switch_expr);
2272 if (index_type == NULL_TREE)
2273 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2275 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2276 fb_rvalue);
2277 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2278 return ret;
2280 if (SWITCH_BODY (switch_expr))
2282 vec<tree> labels;
2283 vec<tree> saved_labels;
2284 hash_set<tree> *saved_live_switch_vars = NULL;
2285 tree default_case = NULL_TREE;
2286 gswitch *switch_stmt;
2288 /* If someone can be bothered to fill in the labels, they can
2289 be bothered to null out the body too. */
2290 gcc_assert (!SWITCH_LABELS (switch_expr));
2292 /* Save old labels, get new ones from body, then restore the old
2293 labels. Save all the things from the switch body to append after. */
2294 saved_labels = gimplify_ctxp->case_labels;
2295 gimplify_ctxp->case_labels.create (8);
2297 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2298 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2299 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2300 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2301 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2302 else
2303 gimplify_ctxp->live_switch_vars = NULL;
2305 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2306 gimplify_ctxp->in_switch_expr = true;
2308 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2310 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2311 maybe_warn_switch_unreachable (switch_body_seq);
2312 maybe_warn_implicit_fallthrough (switch_body_seq);
2313 /* Only do this for the outermost GIMPLE_SWITCH. */
2314 if (!gimplify_ctxp->in_switch_expr)
2315 expand_FALLTHROUGH (&switch_body_seq);
2317 labels = gimplify_ctxp->case_labels;
2318 gimplify_ctxp->case_labels = saved_labels;
2320 if (gimplify_ctxp->live_switch_vars)
2322 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2323 delete gimplify_ctxp->live_switch_vars;
2325 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2327 preprocess_case_label_vec_for_gimple (labels, index_type,
2328 &default_case);
2330 if (!default_case)
2332 glabel *new_default;
2334 default_case
2335 = build_case_label (NULL_TREE, NULL_TREE,
2336 create_artificial_label (UNKNOWN_LOCATION));
2337 new_default = gimple_build_label (CASE_LABEL (default_case));
2338 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2341 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2342 default_case, labels);
2343 gimplify_seq_add_stmt (pre_p, switch_stmt);
2344 gimplify_seq_add_seq (pre_p, switch_body_seq);
2345 labels.release ();
2347 else
2348 gcc_assert (SWITCH_LABELS (switch_expr));
2350 return GS_ALL_DONE;
2353 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2355 static enum gimplify_status
2356 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2358 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2359 == current_function_decl);
2361 tree label = LABEL_EXPR_LABEL (*expr_p);
2362 glabel *label_stmt = gimple_build_label (label);
2363 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2364 gimplify_seq_add_stmt (pre_p, label_stmt);
2366 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2367 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2368 NOT_TAKEN));
2369 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2370 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2371 TAKEN));
2373 return GS_ALL_DONE;
2376 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2378 static enum gimplify_status
2379 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2381 struct gimplify_ctx *ctxp;
2382 glabel *label_stmt;
2384 /* Invalid programs can play Duff's Device type games with, for example,
2385 #pragma omp parallel. At least in the C front end, we don't
2386 detect such invalid branches until after gimplification, in the
2387 diagnose_omp_blocks pass. */
2388 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2389 if (ctxp->case_labels.exists ())
2390 break;
2392 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2393 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2394 ctxp->case_labels.safe_push (*expr_p);
2395 gimplify_seq_add_stmt (pre_p, label_stmt);
2397 return GS_ALL_DONE;
2400 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2401 if necessary. */
2403 tree
2404 build_and_jump (tree *label_p)
2406 if (label_p == NULL)
2407 /* If there's nowhere to jump, just fall through. */
2408 return NULL_TREE;
2410 if (*label_p == NULL_TREE)
2412 tree label = create_artificial_label (UNKNOWN_LOCATION);
2413 *label_p = label;
2416 return build1 (GOTO_EXPR, void_type_node, *label_p);
2419 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2420 This also involves building a label to jump to and communicating it to
2421 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2423 static enum gimplify_status
2424 gimplify_exit_expr (tree *expr_p)
2426 tree cond = TREE_OPERAND (*expr_p, 0);
2427 tree expr;
2429 expr = build_and_jump (&gimplify_ctxp->exit_label);
2430 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2431 *expr_p = expr;
2433 return GS_OK;
2436 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2437 different from its canonical type, wrap the whole thing inside a
2438 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2439 type.
2441 The canonical type of a COMPONENT_REF is the type of the field being
2442 referenced--unless the field is a bit-field which can be read directly
2443 in a smaller mode, in which case the canonical type is the
2444 sign-appropriate type corresponding to that mode. */
2446 static void
2447 canonicalize_component_ref (tree *expr_p)
2449 tree expr = *expr_p;
2450 tree type;
2452 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2454 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2455 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2456 else
2457 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2459 /* One could argue that all the stuff below is not necessary for
2460 the non-bitfield case and declare it a FE error if type
2461 adjustment would be needed. */
2462 if (TREE_TYPE (expr) != type)
2464 #ifdef ENABLE_TYPES_CHECKING
2465 tree old_type = TREE_TYPE (expr);
2466 #endif
2467 int type_quals;
2469 /* We need to preserve qualifiers and propagate them from
2470 operand 0. */
2471 type_quals = TYPE_QUALS (type)
2472 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2473 if (TYPE_QUALS (type) != type_quals)
2474 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2476 /* Set the type of the COMPONENT_REF to the underlying type. */
2477 TREE_TYPE (expr) = type;
2479 #ifdef ENABLE_TYPES_CHECKING
2480 /* It is now a FE error, if the conversion from the canonical
2481 type to the original expression type is not useless. */
2482 gcc_assert (useless_type_conversion_p (old_type, type));
2483 #endif
2487 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2488 to foo, embed that change in the ADDR_EXPR by converting
2489 T array[U];
2490 (T *)&array
2492 &array[L]
2493 where L is the lower bound. For simplicity, only do this for constant
2494 lower bound.
2495 The constraint is that the type of &array[L] is trivially convertible
2496 to T *. */
2498 static void
2499 canonicalize_addr_expr (tree *expr_p)
2501 tree expr = *expr_p;
2502 tree addr_expr = TREE_OPERAND (expr, 0);
2503 tree datype, ddatype, pddatype;
2505 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2506 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2507 || TREE_CODE (addr_expr) != ADDR_EXPR)
2508 return;
2510 /* The addr_expr type should be a pointer to an array. */
2511 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2512 if (TREE_CODE (datype) != ARRAY_TYPE)
2513 return;
2515 /* The pointer to element type shall be trivially convertible to
2516 the expression pointer type. */
2517 ddatype = TREE_TYPE (datype);
2518 pddatype = build_pointer_type (ddatype);
2519 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2520 pddatype))
2521 return;
2523 /* The lower bound and element sizes must be constant. */
2524 if (!TYPE_SIZE_UNIT (ddatype)
2525 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2526 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2527 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2528 return;
2530 /* All checks succeeded. Build a new node to merge the cast. */
2531 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2532 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2533 NULL_TREE, NULL_TREE);
2534 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2536 /* We can have stripped a required restrict qualifier above. */
2537 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2538 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2541 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2542 underneath as appropriate. */
2544 static enum gimplify_status
2545 gimplify_conversion (tree *expr_p)
2547 location_t loc = EXPR_LOCATION (*expr_p);
2548 gcc_assert (CONVERT_EXPR_P (*expr_p));
2550 /* Then strip away all but the outermost conversion. */
2551 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2553 /* And remove the outermost conversion if it's useless. */
2554 if (tree_ssa_useless_type_conversion (*expr_p))
2555 *expr_p = TREE_OPERAND (*expr_p, 0);
2557 /* If we still have a conversion at the toplevel,
2558 then canonicalize some constructs. */
2559 if (CONVERT_EXPR_P (*expr_p))
2561 tree sub = TREE_OPERAND (*expr_p, 0);
2563 /* If a NOP conversion is changing the type of a COMPONENT_REF
2564 expression, then canonicalize its type now in order to expose more
2565 redundant conversions. */
2566 if (TREE_CODE (sub) == COMPONENT_REF)
2567 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2569 /* If a NOP conversion is changing a pointer to array of foo
2570 to a pointer to foo, embed that change in the ADDR_EXPR. */
2571 else if (TREE_CODE (sub) == ADDR_EXPR)
2572 canonicalize_addr_expr (expr_p);
2575 /* If we have a conversion to a non-register type force the
2576 use of a VIEW_CONVERT_EXPR instead. */
2577 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2578 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2579 TREE_OPERAND (*expr_p, 0));
2581 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2582 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2583 TREE_SET_CODE (*expr_p, NOP_EXPR);
2585 return GS_OK;
2588 /* Nonlocal VLAs seen in the current function. */
2589 static hash_set<tree> *nonlocal_vlas;
2591 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2592 static tree nonlocal_vla_vars;
2594 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2595 DECL_VALUE_EXPR, and it's worth re-examining things. */
2597 static enum gimplify_status
2598 gimplify_var_or_parm_decl (tree *expr_p)
2600 tree decl = *expr_p;
2602 /* ??? If this is a local variable, and it has not been seen in any
2603 outer BIND_EXPR, then it's probably the result of a duplicate
2604 declaration, for which we've already issued an error. It would
2605 be really nice if the front end wouldn't leak these at all.
2606 Currently the only known culprit is C++ destructors, as seen
2607 in g++.old-deja/g++.jason/binding.C. */
2608 if (VAR_P (decl)
2609 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2610 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2611 && decl_function_context (decl) == current_function_decl)
2613 gcc_assert (seen_error ());
2614 return GS_ERROR;
2617 /* When within an OMP context, notice uses of variables. */
2618 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2619 return GS_ALL_DONE;
2621 /* If the decl is an alias for another expression, substitute it now. */
2622 if (DECL_HAS_VALUE_EXPR_P (decl))
2624 tree value_expr = DECL_VALUE_EXPR (decl);
2626 /* For referenced nonlocal VLAs add a decl for debugging purposes
2627 to the current function. */
2628 if (VAR_P (decl)
2629 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2630 && nonlocal_vlas != NULL
2631 && TREE_CODE (value_expr) == INDIRECT_REF
2632 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2633 && decl_function_context (decl) != current_function_decl)
2635 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2636 while (ctx
2637 && (ctx->region_type == ORT_WORKSHARE
2638 || ctx->region_type == ORT_SIMD
2639 || ctx->region_type == ORT_ACC))
2640 ctx = ctx->outer_context;
2641 if (!ctx && !nonlocal_vlas->add (decl))
2643 tree copy = copy_node (decl);
2645 lang_hooks.dup_lang_specific_decl (copy);
2646 SET_DECL_RTL (copy, 0);
2647 TREE_USED (copy) = 1;
2648 DECL_CHAIN (copy) = nonlocal_vla_vars;
2649 nonlocal_vla_vars = copy;
2650 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2651 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2655 *expr_p = unshare_expr (value_expr);
2656 return GS_OK;
2659 return GS_ALL_DONE;
2662 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2664 static void
2665 recalculate_side_effects (tree t)
2667 enum tree_code code = TREE_CODE (t);
2668 int len = TREE_OPERAND_LENGTH (t);
2669 int i;
2671 switch (TREE_CODE_CLASS (code))
2673 case tcc_expression:
2674 switch (code)
2676 case INIT_EXPR:
2677 case MODIFY_EXPR:
2678 case VA_ARG_EXPR:
2679 case PREDECREMENT_EXPR:
2680 case PREINCREMENT_EXPR:
2681 case POSTDECREMENT_EXPR:
2682 case POSTINCREMENT_EXPR:
2683 /* All of these have side-effects, no matter what their
2684 operands are. */
2685 return;
2687 default:
2688 break;
2690 /* Fall through. */
2692 case tcc_comparison: /* a comparison expression */
2693 case tcc_unary: /* a unary arithmetic expression */
2694 case tcc_binary: /* a binary arithmetic expression */
2695 case tcc_reference: /* a reference */
2696 case tcc_vl_exp: /* a function call */
2697 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2698 for (i = 0; i < len; ++i)
2700 tree op = TREE_OPERAND (t, i);
2701 if (op && TREE_SIDE_EFFECTS (op))
2702 TREE_SIDE_EFFECTS (t) = 1;
2704 break;
2706 case tcc_constant:
2707 /* No side-effects. */
2708 return;
2710 default:
2711 gcc_unreachable ();
2715 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2716 node *EXPR_P.
2718 compound_lval
2719 : min_lval '[' val ']'
2720 | min_lval '.' ID
2721 | compound_lval '[' val ']'
2722 | compound_lval '.' ID
2724 This is not part of the original SIMPLE definition, which separates
2725 array and member references, but it seems reasonable to handle them
2726 together. Also, this way we don't run into problems with union
2727 aliasing; gcc requires that for accesses through a union to alias, the
2728 union reference must be explicit, which was not always the case when we
2729 were splitting up array and member refs.
2731 PRE_P points to the sequence where side effects that must happen before
2732 *EXPR_P should be stored.
2734 POST_P points to the sequence where side effects that must happen after
2735 *EXPR_P should be stored. */
2737 static enum gimplify_status
2738 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2739 fallback_t fallback)
2741 tree *p;
2742 enum gimplify_status ret = GS_ALL_DONE, tret;
2743 int i;
2744 location_t loc = EXPR_LOCATION (*expr_p);
2745 tree expr = *expr_p;
2747 /* Create a stack of the subexpressions so later we can walk them in
2748 order from inner to outer. */
2749 auto_vec<tree, 10> expr_stack;
2751 /* We can handle anything that get_inner_reference can deal with. */
2752 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2754 restart:
2755 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2756 if (TREE_CODE (*p) == INDIRECT_REF)
2757 *p = fold_indirect_ref_loc (loc, *p);
2759 if (handled_component_p (*p))
2761 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2762 additional COMPONENT_REFs. */
2763 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2764 && gimplify_var_or_parm_decl (p) == GS_OK)
2765 goto restart;
2766 else
2767 break;
2769 expr_stack.safe_push (*p);
2772 gcc_assert (expr_stack.length ());
2774 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2775 walked through and P points to the innermost expression.
2777 Java requires that we elaborated nodes in source order. That
2778 means we must gimplify the inner expression followed by each of
2779 the indices, in order. But we can't gimplify the inner
2780 expression until we deal with any variable bounds, sizes, or
2781 positions in order to deal with PLACEHOLDER_EXPRs.
2783 So we do this in three steps. First we deal with the annotations
2784 for any variables in the components, then we gimplify the base,
2785 then we gimplify any indices, from left to right. */
2786 for (i = expr_stack.length () - 1; i >= 0; i--)
2788 tree t = expr_stack[i];
2790 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2792 /* Gimplify the low bound and element type size and put them into
2793 the ARRAY_REF. If these values are set, they have already been
2794 gimplified. */
2795 if (TREE_OPERAND (t, 2) == NULL_TREE)
2797 tree low = unshare_expr (array_ref_low_bound (t));
2798 if (!is_gimple_min_invariant (low))
2800 TREE_OPERAND (t, 2) = low;
2801 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2802 post_p, is_gimple_reg,
2803 fb_rvalue);
2804 ret = MIN (ret, tret);
2807 else
2809 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2810 is_gimple_reg, fb_rvalue);
2811 ret = MIN (ret, tret);
2814 if (TREE_OPERAND (t, 3) == NULL_TREE)
2816 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2817 tree elmt_size = unshare_expr (array_ref_element_size (t));
2818 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2820 /* Divide the element size by the alignment of the element
2821 type (above). */
2822 elmt_size
2823 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2825 if (!is_gimple_min_invariant (elmt_size))
2827 TREE_OPERAND (t, 3) = elmt_size;
2828 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2829 post_p, is_gimple_reg,
2830 fb_rvalue);
2831 ret = MIN (ret, tret);
2834 else
2836 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2837 is_gimple_reg, fb_rvalue);
2838 ret = MIN (ret, tret);
2841 else if (TREE_CODE (t) == COMPONENT_REF)
2843 /* Set the field offset into T and gimplify it. */
2844 if (TREE_OPERAND (t, 2) == NULL_TREE)
2846 tree offset = unshare_expr (component_ref_field_offset (t));
2847 tree field = TREE_OPERAND (t, 1);
2848 tree factor
2849 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2851 /* Divide the offset by its alignment. */
2852 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2854 if (!is_gimple_min_invariant (offset))
2856 TREE_OPERAND (t, 2) = offset;
2857 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2858 post_p, is_gimple_reg,
2859 fb_rvalue);
2860 ret = MIN (ret, tret);
2863 else
2865 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2866 is_gimple_reg, fb_rvalue);
2867 ret = MIN (ret, tret);
2872 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2873 so as to match the min_lval predicate. Failure to do so may result
2874 in the creation of large aggregate temporaries. */
2875 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2876 fallback | fb_lvalue);
2877 ret = MIN (ret, tret);
2879 /* And finally, the indices and operands of ARRAY_REF. During this
2880 loop we also remove any useless conversions. */
2881 for (; expr_stack.length () > 0; )
2883 tree t = expr_stack.pop ();
2885 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2887 /* Gimplify the dimension. */
2888 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2890 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2891 is_gimple_val, fb_rvalue);
2892 ret = MIN (ret, tret);
2896 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2898 /* The innermost expression P may have originally had
2899 TREE_SIDE_EFFECTS set which would have caused all the outer
2900 expressions in *EXPR_P leading to P to also have had
2901 TREE_SIDE_EFFECTS set. */
2902 recalculate_side_effects (t);
2905 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2906 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2908 canonicalize_component_ref (expr_p);
2911 expr_stack.release ();
2913 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2915 return ret;
2918 /* Gimplify the self modifying expression pointed to by EXPR_P
2919 (++, --, +=, -=).
2921 PRE_P points to the list where side effects that must happen before
2922 *EXPR_P should be stored.
2924 POST_P points to the list where side effects that must happen after
2925 *EXPR_P should be stored.
2927 WANT_VALUE is nonzero iff we want to use the value of this expression
2928 in another expression.
2930 ARITH_TYPE is the type the computation should be performed in. */
2932 enum gimplify_status
2933 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2934 bool want_value, tree arith_type)
2936 enum tree_code code;
2937 tree lhs, lvalue, rhs, t1;
2938 gimple_seq post = NULL, *orig_post_p = post_p;
2939 bool postfix;
2940 enum tree_code arith_code;
2941 enum gimplify_status ret;
2942 location_t loc = EXPR_LOCATION (*expr_p);
2944 code = TREE_CODE (*expr_p);
2946 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2947 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2949 /* Prefix or postfix? */
2950 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2951 /* Faster to treat as prefix if result is not used. */
2952 postfix = want_value;
2953 else
2954 postfix = false;
2956 /* For postfix, make sure the inner expression's post side effects
2957 are executed after side effects from this expression. */
2958 if (postfix)
2959 post_p = &post;
2961 /* Add or subtract? */
2962 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2963 arith_code = PLUS_EXPR;
2964 else
2965 arith_code = MINUS_EXPR;
2967 /* Gimplify the LHS into a GIMPLE lvalue. */
2968 lvalue = TREE_OPERAND (*expr_p, 0);
2969 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2970 if (ret == GS_ERROR)
2971 return ret;
2973 /* Extract the operands to the arithmetic operation. */
2974 lhs = lvalue;
2975 rhs = TREE_OPERAND (*expr_p, 1);
2977 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2978 that as the result value and in the postqueue operation. */
2979 if (postfix)
2981 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2982 if (ret == GS_ERROR)
2983 return ret;
2985 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2988 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2989 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2991 rhs = convert_to_ptrofftype_loc (loc, rhs);
2992 if (arith_code == MINUS_EXPR)
2993 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2994 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2996 else
2997 t1 = fold_convert (TREE_TYPE (*expr_p),
2998 fold_build2 (arith_code, arith_type,
2999 fold_convert (arith_type, lhs),
3000 fold_convert (arith_type, rhs)));
3002 if (postfix)
3004 gimplify_assign (lvalue, t1, pre_p);
3005 gimplify_seq_add_seq (orig_post_p, post);
3006 *expr_p = lhs;
3007 return GS_ALL_DONE;
3009 else
3011 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3012 return GS_OK;
3016 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3018 static void
3019 maybe_with_size_expr (tree *expr_p)
3021 tree expr = *expr_p;
3022 tree type = TREE_TYPE (expr);
3023 tree size;
3025 /* If we've already wrapped this or the type is error_mark_node, we can't do
3026 anything. */
3027 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3028 || type == error_mark_node)
3029 return;
3031 /* If the size isn't known or is a constant, we have nothing to do. */
3032 size = TYPE_SIZE_UNIT (type);
3033 if (!size || TREE_CODE (size) == INTEGER_CST)
3034 return;
3036 /* Otherwise, make a WITH_SIZE_EXPR. */
3037 size = unshare_expr (size);
3038 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3039 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3042 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3043 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3044 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3045 gimplified to an SSA name. */
3047 enum gimplify_status
3048 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3049 bool allow_ssa)
3051 bool (*test) (tree);
3052 fallback_t fb;
3054 /* In general, we allow lvalues for function arguments to avoid
3055 extra overhead of copying large aggregates out of even larger
3056 aggregates into temporaries only to copy the temporaries to
3057 the argument list. Make optimizers happy by pulling out to
3058 temporaries those types that fit in registers. */
3059 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3060 test = is_gimple_val, fb = fb_rvalue;
3061 else
3063 test = is_gimple_lvalue, fb = fb_either;
3064 /* Also strip a TARGET_EXPR that would force an extra copy. */
3065 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3067 tree init = TARGET_EXPR_INITIAL (*arg_p);
3068 if (init
3069 && !VOID_TYPE_P (TREE_TYPE (init)))
3070 *arg_p = init;
3074 /* If this is a variable sized type, we must remember the size. */
3075 maybe_with_size_expr (arg_p);
3077 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3078 /* Make sure arguments have the same location as the function call
3079 itself. */
3080 protected_set_expr_location (*arg_p, call_location);
3082 /* There is a sequence point before a function call. Side effects in
3083 the argument list must occur before the actual call. So, when
3084 gimplifying arguments, force gimplify_expr to use an internal
3085 post queue which is then appended to the end of PRE_P. */
3086 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3089 /* Don't fold inside offloading or taskreg regions: it can break code by
3090 adding decl references that weren't in the source. We'll do it during
3091 omplower pass instead. */
3093 static bool
3094 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3096 struct gimplify_omp_ctx *ctx;
3097 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3098 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3099 return false;
3100 return fold_stmt (gsi);
3103 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3104 with the pointer to the proper cilk frame. */
3105 static void
3106 gimplify_cilk_detach (gimple_seq *pre_p)
3108 tree frame = cfun->cilk_frame_decl;
3109 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3110 frame);
3111 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3112 ptrf);
3113 gimplify_seq_add_stmt(pre_p, detach);
3116 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3117 WANT_VALUE is true if the result of the call is desired. */
3119 static enum gimplify_status
3120 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3122 tree fndecl, parms, p, fnptrtype;
3123 enum gimplify_status ret;
3124 int i, nargs;
3125 gcall *call;
3126 bool builtin_va_start_p = false;
3127 location_t loc = EXPR_LOCATION (*expr_p);
3129 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3131 /* For reliable diagnostics during inlining, it is necessary that
3132 every call_expr be annotated with file and line. */
3133 if (! EXPR_HAS_LOCATION (*expr_p))
3134 SET_EXPR_LOCATION (*expr_p, input_location);
3136 /* Gimplify internal functions created in the FEs. */
3137 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3139 if (want_value)
3140 return GS_ALL_DONE;
3142 nargs = call_expr_nargs (*expr_p);
3143 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3144 auto_vec<tree> vargs (nargs);
3146 for (i = 0; i < nargs; i++)
3148 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3149 EXPR_LOCATION (*expr_p));
3150 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3153 if (EXPR_CILK_SPAWN (*expr_p))
3154 gimplify_cilk_detach (pre_p);
3155 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3156 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3157 gimplify_seq_add_stmt (pre_p, call);
3158 return GS_ALL_DONE;
3161 /* This may be a call to a builtin function.
3163 Builtin function calls may be transformed into different
3164 (and more efficient) builtin function calls under certain
3165 circumstances. Unfortunately, gimplification can muck things
3166 up enough that the builtin expanders are not aware that certain
3167 transformations are still valid.
3169 So we attempt transformation/gimplification of the call before
3170 we gimplify the CALL_EXPR. At this time we do not manage to
3171 transform all calls in the same manner as the expanders do, but
3172 we do transform most of them. */
3173 fndecl = get_callee_fndecl (*expr_p);
3174 if (fndecl
3175 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3176 switch (DECL_FUNCTION_CODE (fndecl))
3178 CASE_BUILT_IN_ALLOCA:
3179 /* If the call has been built for a variable-sized object, then we
3180 want to restore the stack level when the enclosing BIND_EXPR is
3181 exited to reclaim the allocated space; otherwise, we precisely
3182 need to do the opposite and preserve the latest stack level. */
3183 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3184 gimplify_ctxp->save_stack = true;
3185 else
3186 gimplify_ctxp->keep_stack = true;
3187 break;
3189 case BUILT_IN_VA_START:
3191 builtin_va_start_p = TRUE;
3192 if (call_expr_nargs (*expr_p) < 2)
3194 error ("too few arguments to function %<va_start%>");
3195 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3196 return GS_OK;
3199 if (fold_builtin_next_arg (*expr_p, true))
3201 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3202 return GS_OK;
3204 break;
3207 default:
3210 if (fndecl && DECL_BUILT_IN (fndecl))
3212 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3213 if (new_tree && new_tree != *expr_p)
3215 /* There was a transformation of this call which computes the
3216 same value, but in a more efficient way. Return and try
3217 again. */
3218 *expr_p = new_tree;
3219 return GS_OK;
3223 /* Remember the original function pointer type. */
3224 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3226 /* There is a sequence point before the call, so any side effects in
3227 the calling expression must occur before the actual call. Force
3228 gimplify_expr to use an internal post queue. */
3229 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3230 is_gimple_call_addr, fb_rvalue);
3232 nargs = call_expr_nargs (*expr_p);
3234 /* Get argument types for verification. */
3235 fndecl = get_callee_fndecl (*expr_p);
3236 parms = NULL_TREE;
3237 if (fndecl)
3238 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3239 else
3240 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3242 if (fndecl && DECL_ARGUMENTS (fndecl))
3243 p = DECL_ARGUMENTS (fndecl);
3244 else if (parms)
3245 p = parms;
3246 else
3247 p = NULL_TREE;
3248 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3251 /* If the last argument is __builtin_va_arg_pack () and it is not
3252 passed as a named argument, decrease the number of CALL_EXPR
3253 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3254 if (!p
3255 && i < nargs
3256 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3258 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3259 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3261 if (last_arg_fndecl
3262 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3263 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3264 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3266 tree call = *expr_p;
3268 --nargs;
3269 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3270 CALL_EXPR_FN (call),
3271 nargs, CALL_EXPR_ARGP (call));
3273 /* Copy all CALL_EXPR flags, location and block, except
3274 CALL_EXPR_VA_ARG_PACK flag. */
3275 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3276 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3277 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3278 = CALL_EXPR_RETURN_SLOT_OPT (call);
3279 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3280 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3282 /* Set CALL_EXPR_VA_ARG_PACK. */
3283 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3287 /* If the call returns twice then after building the CFG the call
3288 argument computations will no longer dominate the call because
3289 we add an abnormal incoming edge to the call. So do not use SSA
3290 vars there. */
3291 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3293 /* Gimplify the function arguments. */
3294 if (nargs > 0)
3296 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3297 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3298 PUSH_ARGS_REVERSED ? i-- : i++)
3300 enum gimplify_status t;
3302 /* Avoid gimplifying the second argument to va_start, which needs to
3303 be the plain PARM_DECL. */
3304 if ((i != 1) || !builtin_va_start_p)
3306 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3307 EXPR_LOCATION (*expr_p), ! returns_twice);
3309 if (t == GS_ERROR)
3310 ret = GS_ERROR;
3315 /* Gimplify the static chain. */
3316 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3318 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3319 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3320 else
3322 enum gimplify_status t;
3323 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3324 EXPR_LOCATION (*expr_p), ! returns_twice);
3325 if (t == GS_ERROR)
3326 ret = GS_ERROR;
3330 /* Verify the function result. */
3331 if (want_value && fndecl
3332 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3334 error_at (loc, "using result of function returning %<void%>");
3335 ret = GS_ERROR;
3338 /* Try this again in case gimplification exposed something. */
3339 if (ret != GS_ERROR)
3341 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3343 if (new_tree && new_tree != *expr_p)
3345 /* There was a transformation of this call which computes the
3346 same value, but in a more efficient way. Return and try
3347 again. */
3348 *expr_p = new_tree;
3349 return GS_OK;
3352 else
3354 *expr_p = error_mark_node;
3355 return GS_ERROR;
3358 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3359 decl. This allows us to eliminate redundant or useless
3360 calls to "const" functions. */
3361 if (TREE_CODE (*expr_p) == CALL_EXPR)
3363 int flags = call_expr_flags (*expr_p);
3364 if (flags & (ECF_CONST | ECF_PURE)
3365 /* An infinite loop is considered a side effect. */
3366 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3367 TREE_SIDE_EFFECTS (*expr_p) = 0;
3370 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3371 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3372 form and delegate the creation of a GIMPLE_CALL to
3373 gimplify_modify_expr. This is always possible because when
3374 WANT_VALUE is true, the caller wants the result of this call into
3375 a temporary, which means that we will emit an INIT_EXPR in
3376 internal_get_tmp_var which will then be handled by
3377 gimplify_modify_expr. */
3378 if (!want_value)
3380 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3381 have to do is replicate it as a GIMPLE_CALL tuple. */
3382 gimple_stmt_iterator gsi;
3383 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3384 notice_special_calls (call);
3385 if (EXPR_CILK_SPAWN (*expr_p))
3386 gimplify_cilk_detach (pre_p);
3387 gimplify_seq_add_stmt (pre_p, call);
3388 gsi = gsi_last (*pre_p);
3389 maybe_fold_stmt (&gsi);
3390 *expr_p = NULL_TREE;
3392 else
3393 /* Remember the original function type. */
3394 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3395 CALL_EXPR_FN (*expr_p));
3397 return ret;
3400 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3401 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3403 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3404 condition is true or false, respectively. If null, we should generate
3405 our own to skip over the evaluation of this specific expression.
3407 LOCUS is the source location of the COND_EXPR.
3409 This function is the tree equivalent of do_jump.
3411 shortcut_cond_r should only be called by shortcut_cond_expr. */
3413 static tree
3414 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3415 location_t locus)
3417 tree local_label = NULL_TREE;
3418 tree t, expr = NULL;
3420 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3421 retain the shortcut semantics. Just insert the gotos here;
3422 shortcut_cond_expr will append the real blocks later. */
3423 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3425 location_t new_locus;
3427 /* Turn if (a && b) into
3429 if (a); else goto no;
3430 if (b) goto yes; else goto no;
3431 (no:) */
3433 if (false_label_p == NULL)
3434 false_label_p = &local_label;
3436 /* Keep the original source location on the first 'if'. */
3437 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3438 append_to_statement_list (t, &expr);
3440 /* Set the source location of the && on the second 'if'. */
3441 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3442 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3443 new_locus);
3444 append_to_statement_list (t, &expr);
3446 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3448 location_t new_locus;
3450 /* Turn if (a || b) into
3452 if (a) goto yes;
3453 if (b) goto yes; else goto no;
3454 (yes:) */
3456 if (true_label_p == NULL)
3457 true_label_p = &local_label;
3459 /* Keep the original source location on the first 'if'. */
3460 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3461 append_to_statement_list (t, &expr);
3463 /* Set the source location of the || on the second 'if'. */
3464 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3465 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3466 new_locus);
3467 append_to_statement_list (t, &expr);
3469 else if (TREE_CODE (pred) == COND_EXPR
3470 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3471 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3473 location_t new_locus;
3475 /* As long as we're messing with gotos, turn if (a ? b : c) into
3476 if (a)
3477 if (b) goto yes; else goto no;
3478 else
3479 if (c) goto yes; else goto no;
3481 Don't do this if one of the arms has void type, which can happen
3482 in C++ when the arm is throw. */
3484 /* Keep the original source location on the first 'if'. Set the source
3485 location of the ? on the second 'if'. */
3486 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3487 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3488 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3489 false_label_p, locus),
3490 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3491 false_label_p, new_locus));
3493 else
3495 expr = build3 (COND_EXPR, void_type_node, pred,
3496 build_and_jump (true_label_p),
3497 build_and_jump (false_label_p));
3498 SET_EXPR_LOCATION (expr, locus);
3501 if (local_label)
3503 t = build1 (LABEL_EXPR, void_type_node, local_label);
3504 append_to_statement_list (t, &expr);
3507 return expr;
3510 /* Given a conditional expression EXPR with short-circuit boolean
3511 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3512 predicate apart into the equivalent sequence of conditionals. */
3514 static tree
3515 shortcut_cond_expr (tree expr)
3517 tree pred = TREE_OPERAND (expr, 0);
3518 tree then_ = TREE_OPERAND (expr, 1);
3519 tree else_ = TREE_OPERAND (expr, 2);
3520 tree true_label, false_label, end_label, t;
3521 tree *true_label_p;
3522 tree *false_label_p;
3523 bool emit_end, emit_false, jump_over_else;
3524 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3525 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3527 /* First do simple transformations. */
3528 if (!else_se)
3530 /* If there is no 'else', turn
3531 if (a && b) then c
3532 into
3533 if (a) if (b) then c. */
3534 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3536 /* Keep the original source location on the first 'if'. */
3537 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3538 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3539 /* Set the source location of the && on the second 'if'. */
3540 if (EXPR_HAS_LOCATION (pred))
3541 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3542 then_ = shortcut_cond_expr (expr);
3543 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3544 pred = TREE_OPERAND (pred, 0);
3545 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3546 SET_EXPR_LOCATION (expr, locus);
3550 if (!then_se)
3552 /* If there is no 'then', turn
3553 if (a || b); else d
3554 into
3555 if (a); else if (b); else d. */
3556 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3558 /* Keep the original source location on the first 'if'. */
3559 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3560 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3561 /* Set the source location of the || on the second 'if'. */
3562 if (EXPR_HAS_LOCATION (pred))
3563 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3564 else_ = shortcut_cond_expr (expr);
3565 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3566 pred = TREE_OPERAND (pred, 0);
3567 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3568 SET_EXPR_LOCATION (expr, locus);
3572 /* If we're done, great. */
3573 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3574 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3575 return expr;
3577 /* Otherwise we need to mess with gotos. Change
3578 if (a) c; else d;
3580 if (a); else goto no;
3581 c; goto end;
3582 no: d; end:
3583 and recursively gimplify the condition. */
3585 true_label = false_label = end_label = NULL_TREE;
3587 /* If our arms just jump somewhere, hijack those labels so we don't
3588 generate jumps to jumps. */
3590 if (then_
3591 && TREE_CODE (then_) == GOTO_EXPR
3592 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3594 true_label = GOTO_DESTINATION (then_);
3595 then_ = NULL;
3596 then_se = false;
3599 if (else_
3600 && TREE_CODE (else_) == GOTO_EXPR
3601 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3603 false_label = GOTO_DESTINATION (else_);
3604 else_ = NULL;
3605 else_se = false;
3608 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3609 if (true_label)
3610 true_label_p = &true_label;
3611 else
3612 true_label_p = NULL;
3614 /* The 'else' branch also needs a label if it contains interesting code. */
3615 if (false_label || else_se)
3616 false_label_p = &false_label;
3617 else
3618 false_label_p = NULL;
3620 /* If there was nothing else in our arms, just forward the label(s). */
3621 if (!then_se && !else_se)
3622 return shortcut_cond_r (pred, true_label_p, false_label_p,
3623 EXPR_LOC_OR_LOC (expr, input_location));
3625 /* If our last subexpression already has a terminal label, reuse it. */
3626 if (else_se)
3627 t = expr_last (else_);
3628 else if (then_se)
3629 t = expr_last (then_);
3630 else
3631 t = NULL;
3632 if (t && TREE_CODE (t) == LABEL_EXPR)
3633 end_label = LABEL_EXPR_LABEL (t);
3635 /* If we don't care about jumping to the 'else' branch, jump to the end
3636 if the condition is false. */
3637 if (!false_label_p)
3638 false_label_p = &end_label;
3640 /* We only want to emit these labels if we aren't hijacking them. */
3641 emit_end = (end_label == NULL_TREE);
3642 emit_false = (false_label == NULL_TREE);
3644 /* We only emit the jump over the else clause if we have to--if the
3645 then clause may fall through. Otherwise we can wind up with a
3646 useless jump and a useless label at the end of gimplified code,
3647 which will cause us to think that this conditional as a whole
3648 falls through even if it doesn't. If we then inline a function
3649 which ends with such a condition, that can cause us to issue an
3650 inappropriate warning about control reaching the end of a
3651 non-void function. */
3652 jump_over_else = block_may_fallthru (then_);
3654 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3655 EXPR_LOC_OR_LOC (expr, input_location));
3657 expr = NULL;
3658 append_to_statement_list (pred, &expr);
3660 append_to_statement_list (then_, &expr);
3661 if (else_se)
3663 if (jump_over_else)
3665 tree last = expr_last (expr);
3666 t = build_and_jump (&end_label);
3667 if (EXPR_HAS_LOCATION (last))
3668 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3669 append_to_statement_list (t, &expr);
3671 if (emit_false)
3673 t = build1 (LABEL_EXPR, void_type_node, false_label);
3674 append_to_statement_list (t, &expr);
3676 append_to_statement_list (else_, &expr);
3678 if (emit_end && end_label)
3680 t = build1 (LABEL_EXPR, void_type_node, end_label);
3681 append_to_statement_list (t, &expr);
3684 return expr;
3687 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3689 tree
3690 gimple_boolify (tree expr)
3692 tree type = TREE_TYPE (expr);
3693 location_t loc = EXPR_LOCATION (expr);
3695 if (TREE_CODE (expr) == NE_EXPR
3696 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3697 && integer_zerop (TREE_OPERAND (expr, 1)))
3699 tree call = TREE_OPERAND (expr, 0);
3700 tree fn = get_callee_fndecl (call);
3702 /* For __builtin_expect ((long) (x), y) recurse into x as well
3703 if x is truth_value_p. */
3704 if (fn
3705 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3706 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3707 && call_expr_nargs (call) == 2)
3709 tree arg = CALL_EXPR_ARG (call, 0);
3710 if (arg)
3712 if (TREE_CODE (arg) == NOP_EXPR
3713 && TREE_TYPE (arg) == TREE_TYPE (call))
3714 arg = TREE_OPERAND (arg, 0);
3715 if (truth_value_p (TREE_CODE (arg)))
3717 arg = gimple_boolify (arg);
3718 CALL_EXPR_ARG (call, 0)
3719 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3725 switch (TREE_CODE (expr))
3727 case TRUTH_AND_EXPR:
3728 case TRUTH_OR_EXPR:
3729 case TRUTH_XOR_EXPR:
3730 case TRUTH_ANDIF_EXPR:
3731 case TRUTH_ORIF_EXPR:
3732 /* Also boolify the arguments of truth exprs. */
3733 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3734 /* FALLTHRU */
3736 case TRUTH_NOT_EXPR:
3737 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3739 /* These expressions always produce boolean results. */
3740 if (TREE_CODE (type) != BOOLEAN_TYPE)
3741 TREE_TYPE (expr) = boolean_type_node;
3742 return expr;
3744 case ANNOTATE_EXPR:
3745 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3747 case annot_expr_ivdep_kind:
3748 case annot_expr_no_vector_kind:
3749 case annot_expr_vector_kind:
3750 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3751 if (TREE_CODE (type) != BOOLEAN_TYPE)
3752 TREE_TYPE (expr) = boolean_type_node;
3753 return expr;
3754 default:
3755 gcc_unreachable ();
3758 default:
3759 if (COMPARISON_CLASS_P (expr))
3761 /* There expressions always prduce boolean results. */
3762 if (TREE_CODE (type) != BOOLEAN_TYPE)
3763 TREE_TYPE (expr) = boolean_type_node;
3764 return expr;
3766 /* Other expressions that get here must have boolean values, but
3767 might need to be converted to the appropriate mode. */
3768 if (TREE_CODE (type) == BOOLEAN_TYPE)
3769 return expr;
3770 return fold_convert_loc (loc, boolean_type_node, expr);
3774 /* Given a conditional expression *EXPR_P without side effects, gimplify
3775 its operands. New statements are inserted to PRE_P. */
3777 static enum gimplify_status
3778 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3780 tree expr = *expr_p, cond;
3781 enum gimplify_status ret, tret;
3782 enum tree_code code;
3784 cond = gimple_boolify (COND_EXPR_COND (expr));
3786 /* We need to handle && and || specially, as their gimplification
3787 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3788 code = TREE_CODE (cond);
3789 if (code == TRUTH_ANDIF_EXPR)
3790 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3791 else if (code == TRUTH_ORIF_EXPR)
3792 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3793 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3794 COND_EXPR_COND (*expr_p) = cond;
3796 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3797 is_gimple_val, fb_rvalue);
3798 ret = MIN (ret, tret);
3799 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3800 is_gimple_val, fb_rvalue);
3802 return MIN (ret, tret);
3805 /* Return true if evaluating EXPR could trap.
3806 EXPR is GENERIC, while tree_could_trap_p can be called
3807 only on GIMPLE. */
3809 static bool
3810 generic_expr_could_trap_p (tree expr)
3812 unsigned i, n;
3814 if (!expr || is_gimple_val (expr))
3815 return false;
3817 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3818 return true;
3820 n = TREE_OPERAND_LENGTH (expr);
3821 for (i = 0; i < n; i++)
3822 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3823 return true;
3825 return false;
3828 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3829 into
3831 if (p) if (p)
3832 t1 = a; a;
3833 else or else
3834 t1 = b; b;
3837 The second form is used when *EXPR_P is of type void.
3839 PRE_P points to the list where side effects that must happen before
3840 *EXPR_P should be stored. */
3842 static enum gimplify_status
3843 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3845 tree expr = *expr_p;
3846 tree type = TREE_TYPE (expr);
3847 location_t loc = EXPR_LOCATION (expr);
3848 tree tmp, arm1, arm2;
3849 enum gimplify_status ret;
3850 tree label_true, label_false, label_cont;
3851 bool have_then_clause_p, have_else_clause_p;
3852 gcond *cond_stmt;
3853 enum tree_code pred_code;
3854 gimple_seq seq = NULL;
3856 /* If this COND_EXPR has a value, copy the values into a temporary within
3857 the arms. */
3858 if (!VOID_TYPE_P (type))
3860 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3861 tree result;
3863 /* If either an rvalue is ok or we do not require an lvalue, create the
3864 temporary. But we cannot do that if the type is addressable. */
3865 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3866 && !TREE_ADDRESSABLE (type))
3868 if (gimplify_ctxp->allow_rhs_cond_expr
3869 /* If either branch has side effects or could trap, it can't be
3870 evaluated unconditionally. */
3871 && !TREE_SIDE_EFFECTS (then_)
3872 && !generic_expr_could_trap_p (then_)
3873 && !TREE_SIDE_EFFECTS (else_)
3874 && !generic_expr_could_trap_p (else_))
3875 return gimplify_pure_cond_expr (expr_p, pre_p);
3877 tmp = create_tmp_var (type, "iftmp");
3878 result = tmp;
3881 /* Otherwise, only create and copy references to the values. */
3882 else
3884 type = build_pointer_type (type);
3886 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3887 then_ = build_fold_addr_expr_loc (loc, then_);
3889 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3890 else_ = build_fold_addr_expr_loc (loc, else_);
3892 expr
3893 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3895 tmp = create_tmp_var (type, "iftmp");
3896 result = build_simple_mem_ref_loc (loc, tmp);
3899 /* Build the new then clause, `tmp = then_;'. But don't build the
3900 assignment if the value is void; in C++ it can be if it's a throw. */
3901 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3902 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3904 /* Similarly, build the new else clause, `tmp = else_;'. */
3905 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3906 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3908 TREE_TYPE (expr) = void_type_node;
3909 recalculate_side_effects (expr);
3911 /* Move the COND_EXPR to the prequeue. */
3912 gimplify_stmt (&expr, pre_p);
3914 *expr_p = result;
3915 return GS_ALL_DONE;
3918 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3919 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3920 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3921 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3923 /* Make sure the condition has BOOLEAN_TYPE. */
3924 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3926 /* Break apart && and || conditions. */
3927 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3928 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3930 expr = shortcut_cond_expr (expr);
3932 if (expr != *expr_p)
3934 *expr_p = expr;
3936 /* We can't rely on gimplify_expr to re-gimplify the expanded
3937 form properly, as cleanups might cause the target labels to be
3938 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3939 set up a conditional context. */
3940 gimple_push_condition ();
3941 gimplify_stmt (expr_p, &seq);
3942 gimple_pop_condition (pre_p);
3943 gimple_seq_add_seq (pre_p, seq);
3945 return GS_ALL_DONE;
3949 /* Now do the normal gimplification. */
3951 /* Gimplify condition. */
3952 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3953 fb_rvalue);
3954 if (ret == GS_ERROR)
3955 return GS_ERROR;
3956 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3958 gimple_push_condition ();
3960 have_then_clause_p = have_else_clause_p = false;
3961 if (TREE_OPERAND (expr, 1) != NULL
3962 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3963 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3964 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3965 == current_function_decl)
3966 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3967 have different locations, otherwise we end up with incorrect
3968 location information on the branches. */
3969 && (optimize
3970 || !EXPR_HAS_LOCATION (expr)
3971 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3972 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3974 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3975 have_then_clause_p = true;
3977 else
3978 label_true = create_artificial_label (UNKNOWN_LOCATION);
3979 if (TREE_OPERAND (expr, 2) != NULL
3980 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3981 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3982 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3983 == current_function_decl)
3984 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3985 have different locations, otherwise we end up with incorrect
3986 location information on the branches. */
3987 && (optimize
3988 || !EXPR_HAS_LOCATION (expr)
3989 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3990 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3992 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3993 have_else_clause_p = true;
3995 else
3996 label_false = create_artificial_label (UNKNOWN_LOCATION);
3998 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3999 &arm2);
4000 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4001 label_false);
4002 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4003 gimplify_seq_add_stmt (&seq, cond_stmt);
4004 gimple_stmt_iterator gsi = gsi_last (seq);
4005 maybe_fold_stmt (&gsi);
4007 label_cont = NULL_TREE;
4008 if (!have_then_clause_p)
4010 /* For if (...) {} else { code; } put label_true after
4011 the else block. */
4012 if (TREE_OPERAND (expr, 1) == NULL_TREE
4013 && !have_else_clause_p
4014 && TREE_OPERAND (expr, 2) != NULL_TREE)
4015 label_cont = label_true;
4016 else
4018 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4019 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4020 /* For if (...) { code; } else {} or
4021 if (...) { code; } else goto label; or
4022 if (...) { code; return; } else { ... }
4023 label_cont isn't needed. */
4024 if (!have_else_clause_p
4025 && TREE_OPERAND (expr, 2) != NULL_TREE
4026 && gimple_seq_may_fallthru (seq))
4028 gimple *g;
4029 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4031 g = gimple_build_goto (label_cont);
4033 /* GIMPLE_COND's are very low level; they have embedded
4034 gotos. This particular embedded goto should not be marked
4035 with the location of the original COND_EXPR, as it would
4036 correspond to the COND_EXPR's condition, not the ELSE or the
4037 THEN arms. To avoid marking it with the wrong location, flag
4038 it as "no location". */
4039 gimple_set_do_not_emit_location (g);
4041 gimplify_seq_add_stmt (&seq, g);
4045 if (!have_else_clause_p)
4047 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4048 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4050 if (label_cont)
4051 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4053 gimple_pop_condition (pre_p);
4054 gimple_seq_add_seq (pre_p, seq);
4056 if (ret == GS_ERROR)
4057 ; /* Do nothing. */
4058 else if (have_then_clause_p || have_else_clause_p)
4059 ret = GS_ALL_DONE;
4060 else
4062 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4063 expr = TREE_OPERAND (expr, 0);
4064 gimplify_stmt (&expr, pre_p);
4067 *expr_p = NULL;
4068 return ret;
4071 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4072 to be marked addressable.
4074 We cannot rely on such an expression being directly markable if a temporary
4075 has been created by the gimplification. In this case, we create another
4076 temporary and initialize it with a copy, which will become a store after we
4077 mark it addressable. This can happen if the front-end passed us something
4078 that it could not mark addressable yet, like a Fortran pass-by-reference
4079 parameter (int) floatvar. */
4081 static void
4082 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4084 while (handled_component_p (*expr_p))
4085 expr_p = &TREE_OPERAND (*expr_p, 0);
4086 if (is_gimple_reg (*expr_p))
4088 /* Do not allow an SSA name as the temporary. */
4089 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4090 DECL_GIMPLE_REG_P (var) = 0;
4091 *expr_p = var;
4095 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4096 a call to __builtin_memcpy. */
4098 static enum gimplify_status
4099 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4100 gimple_seq *seq_p)
4102 tree t, to, to_ptr, from, from_ptr;
4103 gcall *gs;
4104 location_t loc = EXPR_LOCATION (*expr_p);
4106 to = TREE_OPERAND (*expr_p, 0);
4107 from = TREE_OPERAND (*expr_p, 1);
4109 /* Mark the RHS addressable. Beware that it may not be possible to do so
4110 directly if a temporary has been created by the gimplification. */
4111 prepare_gimple_addressable (&from, seq_p);
4113 mark_addressable (from);
4114 from_ptr = build_fold_addr_expr_loc (loc, from);
4115 gimplify_arg (&from_ptr, seq_p, loc);
4117 mark_addressable (to);
4118 to_ptr = build_fold_addr_expr_loc (loc, to);
4119 gimplify_arg (&to_ptr, seq_p, loc);
4121 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4123 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4125 if (want_value)
4127 /* tmp = memcpy() */
4128 t = create_tmp_var (TREE_TYPE (to_ptr));
4129 gimple_call_set_lhs (gs, t);
4130 gimplify_seq_add_stmt (seq_p, gs);
4132 *expr_p = build_simple_mem_ref (t);
4133 return GS_ALL_DONE;
4136 gimplify_seq_add_stmt (seq_p, gs);
4137 *expr_p = NULL;
4138 return GS_ALL_DONE;
4141 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4142 a call to __builtin_memset. In this case we know that the RHS is
4143 a CONSTRUCTOR with an empty element list. */
4145 static enum gimplify_status
4146 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4147 gimple_seq *seq_p)
4149 tree t, from, to, to_ptr;
4150 gcall *gs;
4151 location_t loc = EXPR_LOCATION (*expr_p);
4153 /* Assert our assumptions, to abort instead of producing wrong code
4154 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4155 not be immediately exposed. */
4156 from = TREE_OPERAND (*expr_p, 1);
4157 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4158 from = TREE_OPERAND (from, 0);
4160 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4161 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4163 /* Now proceed. */
4164 to = TREE_OPERAND (*expr_p, 0);
4166 to_ptr = build_fold_addr_expr_loc (loc, to);
4167 gimplify_arg (&to_ptr, seq_p, loc);
4168 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4170 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4172 if (want_value)
4174 /* tmp = memset() */
4175 t = create_tmp_var (TREE_TYPE (to_ptr));
4176 gimple_call_set_lhs (gs, t);
4177 gimplify_seq_add_stmt (seq_p, gs);
4179 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4180 return GS_ALL_DONE;
4183 gimplify_seq_add_stmt (seq_p, gs);
4184 *expr_p = NULL;
4185 return GS_ALL_DONE;
4188 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4189 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4190 assignment. Return non-null if we detect a potential overlap. */
4192 struct gimplify_init_ctor_preeval_data
4194 /* The base decl of the lhs object. May be NULL, in which case we
4195 have to assume the lhs is indirect. */
4196 tree lhs_base_decl;
4198 /* The alias set of the lhs object. */
4199 alias_set_type lhs_alias_set;
4202 static tree
4203 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4205 struct gimplify_init_ctor_preeval_data *data
4206 = (struct gimplify_init_ctor_preeval_data *) xdata;
4207 tree t = *tp;
4209 /* If we find the base object, obviously we have overlap. */
4210 if (data->lhs_base_decl == t)
4211 return t;
4213 /* If the constructor component is indirect, determine if we have a
4214 potential overlap with the lhs. The only bits of information we
4215 have to go on at this point are addressability and alias sets. */
4216 if ((INDIRECT_REF_P (t)
4217 || TREE_CODE (t) == MEM_REF)
4218 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4219 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4220 return t;
4222 /* If the constructor component is a call, determine if it can hide a
4223 potential overlap with the lhs through an INDIRECT_REF like above.
4224 ??? Ugh - this is completely broken. In fact this whole analysis
4225 doesn't look conservative. */
4226 if (TREE_CODE (t) == CALL_EXPR)
4228 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4230 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4231 if (POINTER_TYPE_P (TREE_VALUE (type))
4232 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4233 && alias_sets_conflict_p (data->lhs_alias_set,
4234 get_alias_set
4235 (TREE_TYPE (TREE_VALUE (type)))))
4236 return t;
4239 if (IS_TYPE_OR_DECL_P (t))
4240 *walk_subtrees = 0;
4241 return NULL;
4244 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4245 force values that overlap with the lhs (as described by *DATA)
4246 into temporaries. */
4248 static void
4249 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4250 struct gimplify_init_ctor_preeval_data *data)
4252 enum gimplify_status one;
4254 /* If the value is constant, then there's nothing to pre-evaluate. */
4255 if (TREE_CONSTANT (*expr_p))
4257 /* Ensure it does not have side effects, it might contain a reference to
4258 the object we're initializing. */
4259 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4260 return;
4263 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4264 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4265 return;
4267 /* Recurse for nested constructors. */
4268 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4270 unsigned HOST_WIDE_INT ix;
4271 constructor_elt *ce;
4272 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4274 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4275 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4277 return;
4280 /* If this is a variable sized type, we must remember the size. */
4281 maybe_with_size_expr (expr_p);
4283 /* Gimplify the constructor element to something appropriate for the rhs
4284 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4285 the gimplifier will consider this a store to memory. Doing this
4286 gimplification now means that we won't have to deal with complicated
4287 language-specific trees, nor trees like SAVE_EXPR that can induce
4288 exponential search behavior. */
4289 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4290 if (one == GS_ERROR)
4292 *expr_p = NULL;
4293 return;
4296 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4297 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4298 always be true for all scalars, since is_gimple_mem_rhs insists on a
4299 temporary variable for them. */
4300 if (DECL_P (*expr_p))
4301 return;
4303 /* If this is of variable size, we have no choice but to assume it doesn't
4304 overlap since we can't make a temporary for it. */
4305 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4306 return;
4308 /* Otherwise, we must search for overlap ... */
4309 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4310 return;
4312 /* ... and if found, force the value into a temporary. */
4313 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4316 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4317 a RANGE_EXPR in a CONSTRUCTOR for an array.
4319 var = lower;
4320 loop_entry:
4321 object[var] = value;
4322 if (var == upper)
4323 goto loop_exit;
4324 var = var + 1;
4325 goto loop_entry;
4326 loop_exit:
4328 We increment var _after_ the loop exit check because we might otherwise
4329 fail if upper == TYPE_MAX_VALUE (type for upper).
4331 Note that we never have to deal with SAVE_EXPRs here, because this has
4332 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4334 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4335 gimple_seq *, bool);
4337 static void
4338 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4339 tree value, tree array_elt_type,
4340 gimple_seq *pre_p, bool cleared)
4342 tree loop_entry_label, loop_exit_label, fall_thru_label;
4343 tree var, var_type, cref, tmp;
4345 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4346 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4347 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4349 /* Create and initialize the index variable. */
4350 var_type = TREE_TYPE (upper);
4351 var = create_tmp_var (var_type);
4352 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4354 /* Add the loop entry label. */
4355 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4357 /* Build the reference. */
4358 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4359 var, NULL_TREE, NULL_TREE);
4361 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4362 the store. Otherwise just assign value to the reference. */
4364 if (TREE_CODE (value) == CONSTRUCTOR)
4365 /* NB we might have to call ourself recursively through
4366 gimplify_init_ctor_eval if the value is a constructor. */
4367 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4368 pre_p, cleared);
4369 else
4370 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4372 /* We exit the loop when the index var is equal to the upper bound. */
4373 gimplify_seq_add_stmt (pre_p,
4374 gimple_build_cond (EQ_EXPR, var, upper,
4375 loop_exit_label, fall_thru_label));
4377 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4379 /* Otherwise, increment the index var... */
4380 tmp = build2 (PLUS_EXPR, var_type, var,
4381 fold_convert (var_type, integer_one_node));
4382 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4384 /* ...and jump back to the loop entry. */
4385 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4387 /* Add the loop exit label. */
4388 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4391 /* Return true if FDECL is accessing a field that is zero sized. */
4393 static bool
4394 zero_sized_field_decl (const_tree fdecl)
4396 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4397 && integer_zerop (DECL_SIZE (fdecl)))
4398 return true;
4399 return false;
4402 /* Return true if TYPE is zero sized. */
4404 static bool
4405 zero_sized_type (const_tree type)
4407 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4408 && integer_zerop (TYPE_SIZE (type)))
4409 return true;
4410 return false;
4413 /* A subroutine of gimplify_init_constructor. Generate individual
4414 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4415 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4416 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4417 zeroed first. */
4419 static void
4420 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4421 gimple_seq *pre_p, bool cleared)
4423 tree array_elt_type = NULL;
4424 unsigned HOST_WIDE_INT ix;
4425 tree purpose, value;
4427 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4428 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4430 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4432 tree cref;
4434 /* NULL values are created above for gimplification errors. */
4435 if (value == NULL)
4436 continue;
4438 if (cleared && initializer_zerop (value))
4439 continue;
4441 /* ??? Here's to hoping the front end fills in all of the indices,
4442 so we don't have to figure out what's missing ourselves. */
4443 gcc_assert (purpose);
4445 /* Skip zero-sized fields, unless value has side-effects. This can
4446 happen with calls to functions returning a zero-sized type, which
4447 we shouldn't discard. As a number of downstream passes don't
4448 expect sets of zero-sized fields, we rely on the gimplification of
4449 the MODIFY_EXPR we make below to drop the assignment statement. */
4450 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4451 continue;
4453 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4454 whole range. */
4455 if (TREE_CODE (purpose) == RANGE_EXPR)
4457 tree lower = TREE_OPERAND (purpose, 0);
4458 tree upper = TREE_OPERAND (purpose, 1);
4460 /* If the lower bound is equal to upper, just treat it as if
4461 upper was the index. */
4462 if (simple_cst_equal (lower, upper))
4463 purpose = upper;
4464 else
4466 gimplify_init_ctor_eval_range (object, lower, upper, value,
4467 array_elt_type, pre_p, cleared);
4468 continue;
4472 if (array_elt_type)
4474 /* Do not use bitsizetype for ARRAY_REF indices. */
4475 if (TYPE_DOMAIN (TREE_TYPE (object)))
4476 purpose
4477 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4478 purpose);
4479 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4480 purpose, NULL_TREE, NULL_TREE);
4482 else
4484 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4485 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4486 unshare_expr (object), purpose, NULL_TREE);
4489 if (TREE_CODE (value) == CONSTRUCTOR
4490 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4491 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4492 pre_p, cleared);
4493 else
4495 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4496 gimplify_and_add (init, pre_p);
4497 ggc_free (init);
4502 /* Return the appropriate RHS predicate for this LHS. */
4504 gimple_predicate
4505 rhs_predicate_for (tree lhs)
4507 if (is_gimple_reg (lhs))
4508 return is_gimple_reg_rhs_or_call;
4509 else
4510 return is_gimple_mem_rhs_or_call;
4513 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4514 before the LHS has been gimplified. */
4516 static gimple_predicate
4517 initial_rhs_predicate_for (tree lhs)
4519 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4520 return is_gimple_reg_rhs_or_call;
4521 else
4522 return is_gimple_mem_rhs_or_call;
4525 /* Gimplify a C99 compound literal expression. This just means adding
4526 the DECL_EXPR before the current statement and using its anonymous
4527 decl instead. */
4529 static enum gimplify_status
4530 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4531 bool (*gimple_test_f) (tree),
4532 fallback_t fallback)
4534 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4535 tree decl = DECL_EXPR_DECL (decl_s);
4536 tree init = DECL_INITIAL (decl);
4537 /* Mark the decl as addressable if the compound literal
4538 expression is addressable now, otherwise it is marked too late
4539 after we gimplify the initialization expression. */
4540 if (TREE_ADDRESSABLE (*expr_p))
4541 TREE_ADDRESSABLE (decl) = 1;
4542 /* Otherwise, if we don't need an lvalue and have a literal directly
4543 substitute it. Check if it matches the gimple predicate, as
4544 otherwise we'd generate a new temporary, and we can as well just
4545 use the decl we already have. */
4546 else if (!TREE_ADDRESSABLE (decl)
4547 && init
4548 && (fallback & fb_lvalue) == 0
4549 && gimple_test_f (init))
4551 *expr_p = init;
4552 return GS_OK;
4555 /* Preliminarily mark non-addressed complex variables as eligible
4556 for promotion to gimple registers. We'll transform their uses
4557 as we find them. */
4558 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4559 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4560 && !TREE_THIS_VOLATILE (decl)
4561 && !needs_to_live_in_memory (decl))
4562 DECL_GIMPLE_REG_P (decl) = 1;
4564 /* If the decl is not addressable, then it is being used in some
4565 expression or on the right hand side of a statement, and it can
4566 be put into a readonly data section. */
4567 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4568 TREE_READONLY (decl) = 1;
4570 /* This decl isn't mentioned in the enclosing block, so add it to the
4571 list of temps. FIXME it seems a bit of a kludge to say that
4572 anonymous artificial vars aren't pushed, but everything else is. */
4573 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4574 gimple_add_tmp_var (decl);
4576 gimplify_and_add (decl_s, pre_p);
4577 *expr_p = decl;
4578 return GS_OK;
4581 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4582 return a new CONSTRUCTOR if something changed. */
4584 static tree
4585 optimize_compound_literals_in_ctor (tree orig_ctor)
4587 tree ctor = orig_ctor;
4588 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4589 unsigned int idx, num = vec_safe_length (elts);
4591 for (idx = 0; idx < num; idx++)
4593 tree value = (*elts)[idx].value;
4594 tree newval = value;
4595 if (TREE_CODE (value) == CONSTRUCTOR)
4596 newval = optimize_compound_literals_in_ctor (value);
4597 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4599 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4600 tree decl = DECL_EXPR_DECL (decl_s);
4601 tree init = DECL_INITIAL (decl);
4603 if (!TREE_ADDRESSABLE (value)
4604 && !TREE_ADDRESSABLE (decl)
4605 && init
4606 && TREE_CODE (init) == CONSTRUCTOR)
4607 newval = optimize_compound_literals_in_ctor (init);
4609 if (newval == value)
4610 continue;
4612 if (ctor == orig_ctor)
4614 ctor = copy_node (orig_ctor);
4615 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4616 elts = CONSTRUCTOR_ELTS (ctor);
4618 (*elts)[idx].value = newval;
4620 return ctor;
4623 /* A subroutine of gimplify_modify_expr. Break out elements of a
4624 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4626 Note that we still need to clear any elements that don't have explicit
4627 initializers, so if not all elements are initialized we keep the
4628 original MODIFY_EXPR, we just remove all of the constructor elements.
4630 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4631 GS_ERROR if we would have to create a temporary when gimplifying
4632 this constructor. Otherwise, return GS_OK.
4634 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4636 static enum gimplify_status
4637 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4638 bool want_value, bool notify_temp_creation)
4640 tree object, ctor, type;
4641 enum gimplify_status ret;
4642 vec<constructor_elt, va_gc> *elts;
4644 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4646 if (!notify_temp_creation)
4648 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4649 is_gimple_lvalue, fb_lvalue);
4650 if (ret == GS_ERROR)
4651 return ret;
4654 object = TREE_OPERAND (*expr_p, 0);
4655 ctor = TREE_OPERAND (*expr_p, 1)
4656 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4657 type = TREE_TYPE (ctor);
4658 elts = CONSTRUCTOR_ELTS (ctor);
4659 ret = GS_ALL_DONE;
4661 switch (TREE_CODE (type))
4663 case RECORD_TYPE:
4664 case UNION_TYPE:
4665 case QUAL_UNION_TYPE:
4666 case ARRAY_TYPE:
4668 struct gimplify_init_ctor_preeval_data preeval_data;
4669 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4670 bool cleared, complete_p, valid_const_initializer;
4672 /* Aggregate types must lower constructors to initialization of
4673 individual elements. The exception is that a CONSTRUCTOR node
4674 with no elements indicates zero-initialization of the whole. */
4675 if (vec_safe_is_empty (elts))
4677 if (notify_temp_creation)
4678 return GS_OK;
4679 break;
4682 /* Fetch information about the constructor to direct later processing.
4683 We might want to make static versions of it in various cases, and
4684 can only do so if it known to be a valid constant initializer. */
4685 valid_const_initializer
4686 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4687 &num_ctor_elements, &complete_p);
4689 /* If a const aggregate variable is being initialized, then it
4690 should never be a lose to promote the variable to be static. */
4691 if (valid_const_initializer
4692 && num_nonzero_elements > 1
4693 && TREE_READONLY (object)
4694 && VAR_P (object)
4695 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4697 if (notify_temp_creation)
4698 return GS_ERROR;
4699 DECL_INITIAL (object) = ctor;
4700 TREE_STATIC (object) = 1;
4701 if (!DECL_NAME (object))
4702 DECL_NAME (object) = create_tmp_var_name ("C");
4703 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4705 /* ??? C++ doesn't automatically append a .<number> to the
4706 assembler name, and even when it does, it looks at FE private
4707 data structures to figure out what that number should be,
4708 which are not set for this variable. I suppose this is
4709 important for local statics for inline functions, which aren't
4710 "local" in the object file sense. So in order to get a unique
4711 TU-local symbol, we must invoke the lhd version now. */
4712 lhd_set_decl_assembler_name (object);
4714 *expr_p = NULL_TREE;
4715 break;
4718 /* If there are "lots" of initialized elements, even discounting
4719 those that are not address constants (and thus *must* be
4720 computed at runtime), then partition the constructor into
4721 constant and non-constant parts. Block copy the constant
4722 parts in, then generate code for the non-constant parts. */
4723 /* TODO. There's code in cp/typeck.c to do this. */
4725 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4726 /* store_constructor will ignore the clearing of variable-sized
4727 objects. Initializers for such objects must explicitly set
4728 every field that needs to be set. */
4729 cleared = false;
4730 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4731 /* If the constructor isn't complete, clear the whole object
4732 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4734 ??? This ought not to be needed. For any element not present
4735 in the initializer, we should simply set them to zero. Except
4736 we'd need to *find* the elements that are not present, and that
4737 requires trickery to avoid quadratic compile-time behavior in
4738 large cases or excessive memory use in small cases. */
4739 cleared = true;
4740 else if (num_ctor_elements - num_nonzero_elements
4741 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4742 && num_nonzero_elements < num_ctor_elements / 4)
4743 /* If there are "lots" of zeros, it's more efficient to clear
4744 the memory and then set the nonzero elements. */
4745 cleared = true;
4746 else
4747 cleared = false;
4749 /* If there are "lots" of initialized elements, and all of them
4750 are valid address constants, then the entire initializer can
4751 be dropped to memory, and then memcpy'd out. Don't do this
4752 for sparse arrays, though, as it's more efficient to follow
4753 the standard CONSTRUCTOR behavior of memset followed by
4754 individual element initialization. Also don't do this for small
4755 all-zero initializers (which aren't big enough to merit
4756 clearing), and don't try to make bitwise copies of
4757 TREE_ADDRESSABLE types.
4759 We cannot apply such transformation when compiling chkp static
4760 initializer because creation of initializer image in the memory
4761 will require static initialization of bounds for it. It should
4762 result in another gimplification of similar initializer and we
4763 may fall into infinite loop. */
4764 if (valid_const_initializer
4765 && !(cleared || num_nonzero_elements == 0)
4766 && !TREE_ADDRESSABLE (type)
4767 && (!current_function_decl
4768 || !lookup_attribute ("chkp ctor",
4769 DECL_ATTRIBUTES (current_function_decl))))
4771 HOST_WIDE_INT size = int_size_in_bytes (type);
4772 unsigned int align;
4774 /* ??? We can still get unbounded array types, at least
4775 from the C++ front end. This seems wrong, but attempt
4776 to work around it for now. */
4777 if (size < 0)
4779 size = int_size_in_bytes (TREE_TYPE (object));
4780 if (size >= 0)
4781 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4784 /* Find the maximum alignment we can assume for the object. */
4785 /* ??? Make use of DECL_OFFSET_ALIGN. */
4786 if (DECL_P (object))
4787 align = DECL_ALIGN (object);
4788 else
4789 align = TYPE_ALIGN (type);
4791 /* Do a block move either if the size is so small as to make
4792 each individual move a sub-unit move on average, or if it
4793 is so large as to make individual moves inefficient. */
4794 if (size > 0
4795 && num_nonzero_elements > 1
4796 && (size < num_nonzero_elements
4797 || !can_move_by_pieces (size, align)))
4799 if (notify_temp_creation)
4800 return GS_ERROR;
4802 walk_tree (&ctor, force_labels_r, NULL, NULL);
4803 ctor = tree_output_constant_def (ctor);
4804 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4805 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4806 TREE_OPERAND (*expr_p, 1) = ctor;
4808 /* This is no longer an assignment of a CONSTRUCTOR, but
4809 we still may have processing to do on the LHS. So
4810 pretend we didn't do anything here to let that happen. */
4811 return GS_UNHANDLED;
4815 /* If the target is volatile, we have non-zero elements and more than
4816 one field to assign, initialize the target from a temporary. */
4817 if (TREE_THIS_VOLATILE (object)
4818 && !TREE_ADDRESSABLE (type)
4819 && num_nonzero_elements > 0
4820 && vec_safe_length (elts) > 1)
4822 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4823 TREE_OPERAND (*expr_p, 0) = temp;
4824 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4825 *expr_p,
4826 build2 (MODIFY_EXPR, void_type_node,
4827 object, temp));
4828 return GS_OK;
4831 if (notify_temp_creation)
4832 return GS_OK;
4834 /* If there are nonzero elements and if needed, pre-evaluate to capture
4835 elements overlapping with the lhs into temporaries. We must do this
4836 before clearing to fetch the values before they are zeroed-out. */
4837 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4839 preeval_data.lhs_base_decl = get_base_address (object);
4840 if (!DECL_P (preeval_data.lhs_base_decl))
4841 preeval_data.lhs_base_decl = NULL;
4842 preeval_data.lhs_alias_set = get_alias_set (object);
4844 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4845 pre_p, post_p, &preeval_data);
4848 bool ctor_has_side_effects_p
4849 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4851 if (cleared)
4853 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4854 Note that we still have to gimplify, in order to handle the
4855 case of variable sized types. Avoid shared tree structures. */
4856 CONSTRUCTOR_ELTS (ctor) = NULL;
4857 TREE_SIDE_EFFECTS (ctor) = 0;
4858 object = unshare_expr (object);
4859 gimplify_stmt (expr_p, pre_p);
4862 /* If we have not block cleared the object, or if there are nonzero
4863 elements in the constructor, or if the constructor has side effects,
4864 add assignments to the individual scalar fields of the object. */
4865 if (!cleared
4866 || num_nonzero_elements > 0
4867 || ctor_has_side_effects_p)
4868 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4870 *expr_p = NULL_TREE;
4872 break;
4874 case COMPLEX_TYPE:
4876 tree r, i;
4878 if (notify_temp_creation)
4879 return GS_OK;
4881 /* Extract the real and imaginary parts out of the ctor. */
4882 gcc_assert (elts->length () == 2);
4883 r = (*elts)[0].value;
4884 i = (*elts)[1].value;
4885 if (r == NULL || i == NULL)
4887 tree zero = build_zero_cst (TREE_TYPE (type));
4888 if (r == NULL)
4889 r = zero;
4890 if (i == NULL)
4891 i = zero;
4894 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4895 represent creation of a complex value. */
4896 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4898 ctor = build_complex (type, r, i);
4899 TREE_OPERAND (*expr_p, 1) = ctor;
4901 else
4903 ctor = build2 (COMPLEX_EXPR, type, r, i);
4904 TREE_OPERAND (*expr_p, 1) = ctor;
4905 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4906 pre_p,
4907 post_p,
4908 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4909 fb_rvalue);
4912 break;
4914 case VECTOR_TYPE:
4916 unsigned HOST_WIDE_INT ix;
4917 constructor_elt *ce;
4919 if (notify_temp_creation)
4920 return GS_OK;
4922 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4923 if (TREE_CONSTANT (ctor))
4925 bool constant_p = true;
4926 tree value;
4928 /* Even when ctor is constant, it might contain non-*_CST
4929 elements, such as addresses or trapping values like
4930 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4931 in VECTOR_CST nodes. */
4932 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4933 if (!CONSTANT_CLASS_P (value))
4935 constant_p = false;
4936 break;
4939 if (constant_p)
4941 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4942 break;
4945 TREE_CONSTANT (ctor) = 0;
4948 /* Vector types use CONSTRUCTOR all the way through gimple
4949 compilation as a general initializer. */
4950 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4952 enum gimplify_status tret;
4953 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4954 fb_rvalue);
4955 if (tret == GS_ERROR)
4956 ret = GS_ERROR;
4957 else if (TREE_STATIC (ctor)
4958 && !initializer_constant_valid_p (ce->value,
4959 TREE_TYPE (ce->value)))
4960 TREE_STATIC (ctor) = 0;
4962 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4963 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4965 break;
4967 default:
4968 /* So how did we get a CONSTRUCTOR for a scalar type? */
4969 gcc_unreachable ();
4972 if (ret == GS_ERROR)
4973 return GS_ERROR;
4974 /* If we have gimplified both sides of the initializer but have
4975 not emitted an assignment, do so now. */
4976 if (*expr_p)
4978 tree lhs = TREE_OPERAND (*expr_p, 0);
4979 tree rhs = TREE_OPERAND (*expr_p, 1);
4980 if (want_value && object == lhs)
4981 lhs = unshare_expr (lhs);
4982 gassign *init = gimple_build_assign (lhs, rhs);
4983 gimplify_seq_add_stmt (pre_p, init);
4985 if (want_value)
4987 *expr_p = object;
4988 return GS_OK;
4990 else
4992 *expr_p = NULL;
4993 return GS_ALL_DONE;
4997 /* Given a pointer value OP0, return a simplified version of an
4998 indirection through OP0, or NULL_TREE if no simplification is
4999 possible. This may only be applied to a rhs of an expression.
5000 Note that the resulting type may be different from the type pointed
5001 to in the sense that it is still compatible from the langhooks
5002 point of view. */
5004 static tree
5005 gimple_fold_indirect_ref_rhs (tree t)
5007 return gimple_fold_indirect_ref (t);
5010 /* Subroutine of gimplify_modify_expr to do simplifications of
5011 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5012 something changes. */
5014 static enum gimplify_status
5015 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5016 gimple_seq *pre_p, gimple_seq *post_p,
5017 bool want_value)
5019 enum gimplify_status ret = GS_UNHANDLED;
5020 bool changed;
5024 changed = false;
5025 switch (TREE_CODE (*from_p))
5027 case VAR_DECL:
5028 /* If we're assigning from a read-only variable initialized with
5029 a constructor, do the direct assignment from the constructor,
5030 but only if neither source nor target are volatile since this
5031 latter assignment might end up being done on a per-field basis. */
5032 if (DECL_INITIAL (*from_p)
5033 && TREE_READONLY (*from_p)
5034 && !TREE_THIS_VOLATILE (*from_p)
5035 && !TREE_THIS_VOLATILE (*to_p)
5036 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5038 tree old_from = *from_p;
5039 enum gimplify_status subret;
5041 /* Move the constructor into the RHS. */
5042 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5044 /* Let's see if gimplify_init_constructor will need to put
5045 it in memory. */
5046 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5047 false, true);
5048 if (subret == GS_ERROR)
5050 /* If so, revert the change. */
5051 *from_p = old_from;
5053 else
5055 ret = GS_OK;
5056 changed = true;
5059 break;
5060 case INDIRECT_REF:
5062 /* If we have code like
5064 *(const A*)(A*)&x
5066 where the type of "x" is a (possibly cv-qualified variant
5067 of "A"), treat the entire expression as identical to "x".
5068 This kind of code arises in C++ when an object is bound
5069 to a const reference, and if "x" is a TARGET_EXPR we want
5070 to take advantage of the optimization below. */
5071 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5072 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5073 if (t)
5075 if (TREE_THIS_VOLATILE (t) != volatile_p)
5077 if (DECL_P (t))
5078 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5079 build_fold_addr_expr (t));
5080 if (REFERENCE_CLASS_P (t))
5081 TREE_THIS_VOLATILE (t) = volatile_p;
5083 *from_p = t;
5084 ret = GS_OK;
5085 changed = true;
5087 break;
5090 case TARGET_EXPR:
5092 /* If we are initializing something from a TARGET_EXPR, strip the
5093 TARGET_EXPR and initialize it directly, if possible. This can't
5094 be done if the initializer is void, since that implies that the
5095 temporary is set in some non-trivial way.
5097 ??? What about code that pulls out the temp and uses it
5098 elsewhere? I think that such code never uses the TARGET_EXPR as
5099 an initializer. If I'm wrong, we'll die because the temp won't
5100 have any RTL. In that case, I guess we'll need to replace
5101 references somehow. */
5102 tree init = TARGET_EXPR_INITIAL (*from_p);
5104 if (init
5105 && !VOID_TYPE_P (TREE_TYPE (init)))
5107 *from_p = init;
5108 ret = GS_OK;
5109 changed = true;
5112 break;
5114 case COMPOUND_EXPR:
5115 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5116 caught. */
5117 gimplify_compound_expr (from_p, pre_p, true);
5118 ret = GS_OK;
5119 changed = true;
5120 break;
5122 case CONSTRUCTOR:
5123 /* If we already made some changes, let the front end have a
5124 crack at this before we break it down. */
5125 if (ret != GS_UNHANDLED)
5126 break;
5127 /* If we're initializing from a CONSTRUCTOR, break this into
5128 individual MODIFY_EXPRs. */
5129 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5130 false);
5132 case COND_EXPR:
5133 /* If we're assigning to a non-register type, push the assignment
5134 down into the branches. This is mandatory for ADDRESSABLE types,
5135 since we cannot generate temporaries for such, but it saves a
5136 copy in other cases as well. */
5137 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5139 /* This code should mirror the code in gimplify_cond_expr. */
5140 enum tree_code code = TREE_CODE (*expr_p);
5141 tree cond = *from_p;
5142 tree result = *to_p;
5144 ret = gimplify_expr (&result, pre_p, post_p,
5145 is_gimple_lvalue, fb_lvalue);
5146 if (ret != GS_ERROR)
5147 ret = GS_OK;
5149 /* If we are going to write RESULT more than once, clear
5150 TREE_READONLY flag, otherwise we might incorrectly promote
5151 the variable to static const and initialize it at compile
5152 time in one of the branches. */
5153 if (VAR_P (result)
5154 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5155 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5156 TREE_READONLY (result) = 0;
5157 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5158 TREE_OPERAND (cond, 1)
5159 = build2 (code, void_type_node, result,
5160 TREE_OPERAND (cond, 1));
5161 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5162 TREE_OPERAND (cond, 2)
5163 = build2 (code, void_type_node, unshare_expr (result),
5164 TREE_OPERAND (cond, 2));
5166 TREE_TYPE (cond) = void_type_node;
5167 recalculate_side_effects (cond);
5169 if (want_value)
5171 gimplify_and_add (cond, pre_p);
5172 *expr_p = unshare_expr (result);
5174 else
5175 *expr_p = cond;
5176 return ret;
5178 break;
5180 case CALL_EXPR:
5181 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5182 return slot so that we don't generate a temporary. */
5183 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5184 && aggregate_value_p (*from_p, *from_p))
5186 bool use_target;
5188 if (!(rhs_predicate_for (*to_p))(*from_p))
5189 /* If we need a temporary, *to_p isn't accurate. */
5190 use_target = false;
5191 /* It's OK to use the return slot directly unless it's an NRV. */
5192 else if (TREE_CODE (*to_p) == RESULT_DECL
5193 && DECL_NAME (*to_p) == NULL_TREE
5194 && needs_to_live_in_memory (*to_p))
5195 use_target = true;
5196 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5197 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5198 /* Don't force regs into memory. */
5199 use_target = false;
5200 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5201 /* It's OK to use the target directly if it's being
5202 initialized. */
5203 use_target = true;
5204 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5205 != INTEGER_CST)
5206 /* Always use the target and thus RSO for variable-sized types.
5207 GIMPLE cannot deal with a variable-sized assignment
5208 embedded in a call statement. */
5209 use_target = true;
5210 else if (TREE_CODE (*to_p) != SSA_NAME
5211 && (!is_gimple_variable (*to_p)
5212 || needs_to_live_in_memory (*to_p)))
5213 /* Don't use the original target if it's already addressable;
5214 if its address escapes, and the called function uses the
5215 NRV optimization, a conforming program could see *to_p
5216 change before the called function returns; see c++/19317.
5217 When optimizing, the return_slot pass marks more functions
5218 as safe after we have escape info. */
5219 use_target = false;
5220 else
5221 use_target = true;
5223 if (use_target)
5225 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5226 mark_addressable (*to_p);
5229 break;
5231 case WITH_SIZE_EXPR:
5232 /* Likewise for calls that return an aggregate of non-constant size,
5233 since we would not be able to generate a temporary at all. */
5234 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5236 *from_p = TREE_OPERAND (*from_p, 0);
5237 /* We don't change ret in this case because the
5238 WITH_SIZE_EXPR might have been added in
5239 gimplify_modify_expr, so returning GS_OK would lead to an
5240 infinite loop. */
5241 changed = true;
5243 break;
5245 /* If we're initializing from a container, push the initialization
5246 inside it. */
5247 case CLEANUP_POINT_EXPR:
5248 case BIND_EXPR:
5249 case STATEMENT_LIST:
5251 tree wrap = *from_p;
5252 tree t;
5254 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5255 fb_lvalue);
5256 if (ret != GS_ERROR)
5257 ret = GS_OK;
5259 t = voidify_wrapper_expr (wrap, *expr_p);
5260 gcc_assert (t == *expr_p);
5262 if (want_value)
5264 gimplify_and_add (wrap, pre_p);
5265 *expr_p = unshare_expr (*to_p);
5267 else
5268 *expr_p = wrap;
5269 return GS_OK;
5272 case COMPOUND_LITERAL_EXPR:
5274 tree complit = TREE_OPERAND (*expr_p, 1);
5275 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5276 tree decl = DECL_EXPR_DECL (decl_s);
5277 tree init = DECL_INITIAL (decl);
5279 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5280 into struct T x = { 0, 1, 2 } if the address of the
5281 compound literal has never been taken. */
5282 if (!TREE_ADDRESSABLE (complit)
5283 && !TREE_ADDRESSABLE (decl)
5284 && init)
5286 *expr_p = copy_node (*expr_p);
5287 TREE_OPERAND (*expr_p, 1) = init;
5288 return GS_OK;
5292 default:
5293 break;
5296 while (changed);
5298 return ret;
5302 /* Return true if T looks like a valid GIMPLE statement. */
5304 static bool
5305 is_gimple_stmt (tree t)
5307 const enum tree_code code = TREE_CODE (t);
5309 switch (code)
5311 case NOP_EXPR:
5312 /* The only valid NOP_EXPR is the empty statement. */
5313 return IS_EMPTY_STMT (t);
5315 case BIND_EXPR:
5316 case COND_EXPR:
5317 /* These are only valid if they're void. */
5318 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5320 case SWITCH_EXPR:
5321 case GOTO_EXPR:
5322 case RETURN_EXPR:
5323 case LABEL_EXPR:
5324 case CASE_LABEL_EXPR:
5325 case TRY_CATCH_EXPR:
5326 case TRY_FINALLY_EXPR:
5327 case EH_FILTER_EXPR:
5328 case CATCH_EXPR:
5329 case ASM_EXPR:
5330 case STATEMENT_LIST:
5331 case OACC_PARALLEL:
5332 case OACC_KERNELS:
5333 case OACC_DATA:
5334 case OACC_HOST_DATA:
5335 case OACC_DECLARE:
5336 case OACC_UPDATE:
5337 case OACC_ENTER_DATA:
5338 case OACC_EXIT_DATA:
5339 case OACC_CACHE:
5340 case OMP_PARALLEL:
5341 case OMP_FOR:
5342 case OMP_SIMD:
5343 case CILK_SIMD:
5344 case OMP_DISTRIBUTE:
5345 case OACC_LOOP:
5346 case OMP_SECTIONS:
5347 case OMP_SECTION:
5348 case OMP_SINGLE:
5349 case OMP_MASTER:
5350 case OMP_TASKGROUP:
5351 case OMP_ORDERED:
5352 case OMP_CRITICAL:
5353 case OMP_TASK:
5354 case OMP_TARGET:
5355 case OMP_TARGET_DATA:
5356 case OMP_TARGET_UPDATE:
5357 case OMP_TARGET_ENTER_DATA:
5358 case OMP_TARGET_EXIT_DATA:
5359 case OMP_TASKLOOP:
5360 case OMP_TEAMS:
5361 /* These are always void. */
5362 return true;
5364 case CALL_EXPR:
5365 case MODIFY_EXPR:
5366 case PREDICT_EXPR:
5367 /* These are valid regardless of their type. */
5368 return true;
5370 default:
5371 return false;
5376 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5377 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5378 DECL_GIMPLE_REG_P set.
5380 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5381 other, unmodified part of the complex object just before the total store.
5382 As a consequence, if the object is still uninitialized, an undefined value
5383 will be loaded into a register, which may result in a spurious exception
5384 if the register is floating-point and the value happens to be a signaling
5385 NaN for example. Then the fully-fledged complex operations lowering pass
5386 followed by a DCE pass are necessary in order to fix things up. */
5388 static enum gimplify_status
5389 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5390 bool want_value)
5392 enum tree_code code, ocode;
5393 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5395 lhs = TREE_OPERAND (*expr_p, 0);
5396 rhs = TREE_OPERAND (*expr_p, 1);
5397 code = TREE_CODE (lhs);
5398 lhs = TREE_OPERAND (lhs, 0);
5400 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5401 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5402 TREE_NO_WARNING (other) = 1;
5403 other = get_formal_tmp_var (other, pre_p);
5405 realpart = code == REALPART_EXPR ? rhs : other;
5406 imagpart = code == REALPART_EXPR ? other : rhs;
5408 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5409 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5410 else
5411 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5413 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5414 *expr_p = (want_value) ? rhs : NULL_TREE;
5416 return GS_ALL_DONE;
5419 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5421 modify_expr
5422 : varname '=' rhs
5423 | '*' ID '=' rhs
5425 PRE_P points to the list where side effects that must happen before
5426 *EXPR_P should be stored.
5428 POST_P points to the list where side effects that must happen after
5429 *EXPR_P should be stored.
5431 WANT_VALUE is nonzero iff we want to use the value of this expression
5432 in another expression. */
5434 static enum gimplify_status
5435 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5436 bool want_value)
5438 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5439 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5440 enum gimplify_status ret = GS_UNHANDLED;
5441 gimple *assign;
5442 location_t loc = EXPR_LOCATION (*expr_p);
5443 gimple_stmt_iterator gsi;
5445 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5446 || TREE_CODE (*expr_p) == INIT_EXPR);
5448 /* Trying to simplify a clobber using normal logic doesn't work,
5449 so handle it here. */
5450 if (TREE_CLOBBER_P (*from_p))
5452 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5453 if (ret == GS_ERROR)
5454 return ret;
5455 gcc_assert (!want_value
5456 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5457 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5458 *expr_p = NULL;
5459 return GS_ALL_DONE;
5462 /* Insert pointer conversions required by the middle-end that are not
5463 required by the frontend. This fixes middle-end type checking for
5464 for example gcc.dg/redecl-6.c. */
5465 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5467 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5468 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5469 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5472 /* See if any simplifications can be done based on what the RHS is. */
5473 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5474 want_value);
5475 if (ret != GS_UNHANDLED)
5476 return ret;
5478 /* For zero sized types only gimplify the left hand side and right hand
5479 side as statements and throw away the assignment. Do this after
5480 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5481 types properly. */
5482 if (zero_sized_type (TREE_TYPE (*from_p))
5483 && !want_value
5484 /* Don't do this for calls that return addressable types, expand_call
5485 relies on those having a lhs. */
5486 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5487 && TREE_CODE (*from_p) == CALL_EXPR))
5489 gimplify_stmt (from_p, pre_p);
5490 gimplify_stmt (to_p, pre_p);
5491 *expr_p = NULL_TREE;
5492 return GS_ALL_DONE;
5495 /* If the value being copied is of variable width, compute the length
5496 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5497 before gimplifying any of the operands so that we can resolve any
5498 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5499 the size of the expression to be copied, not of the destination, so
5500 that is what we must do here. */
5501 maybe_with_size_expr (from_p);
5503 /* As a special case, we have to temporarily allow for assignments
5504 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5505 a toplevel statement, when gimplifying the GENERIC expression
5506 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5507 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5509 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5510 prevent gimplify_expr from trying to create a new temporary for
5511 foo's LHS, we tell it that it should only gimplify until it
5512 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5513 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5514 and all we need to do here is set 'a' to be its LHS. */
5516 /* Gimplify the RHS first for C++17 and bug 71104. */
5517 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5518 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5519 if (ret == GS_ERROR)
5520 return ret;
5522 /* Then gimplify the LHS. */
5523 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5524 twice we have to make sure to gimplify into non-SSA as otherwise
5525 the abnormal edge added later will make those defs not dominate
5526 their uses.
5527 ??? Technically this applies only to the registers used in the
5528 resulting non-register *TO_P. */
5529 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5530 if (saved_into_ssa
5531 && TREE_CODE (*from_p) == CALL_EXPR
5532 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5533 gimplify_ctxp->into_ssa = false;
5534 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5535 gimplify_ctxp->into_ssa = saved_into_ssa;
5536 if (ret == GS_ERROR)
5537 return ret;
5539 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5540 guess for the predicate was wrong. */
5541 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5542 if (final_pred != initial_pred)
5544 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5545 if (ret == GS_ERROR)
5546 return ret;
5549 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5550 size as argument to the call. */
5551 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5553 tree call = TREE_OPERAND (*from_p, 0);
5554 tree vlasize = TREE_OPERAND (*from_p, 1);
5556 if (TREE_CODE (call) == CALL_EXPR
5557 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5559 int nargs = call_expr_nargs (call);
5560 tree type = TREE_TYPE (call);
5561 tree ap = CALL_EXPR_ARG (call, 0);
5562 tree tag = CALL_EXPR_ARG (call, 1);
5563 tree aptag = CALL_EXPR_ARG (call, 2);
5564 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5565 IFN_VA_ARG, type,
5566 nargs + 1, ap, tag,
5567 aptag, vlasize);
5568 TREE_OPERAND (*from_p, 0) = newcall;
5572 /* Now see if the above changed *from_p to something we handle specially. */
5573 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5574 want_value);
5575 if (ret != GS_UNHANDLED)
5576 return ret;
5578 /* If we've got a variable sized assignment between two lvalues (i.e. does
5579 not involve a call), then we can make things a bit more straightforward
5580 by converting the assignment to memcpy or memset. */
5581 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5583 tree from = TREE_OPERAND (*from_p, 0);
5584 tree size = TREE_OPERAND (*from_p, 1);
5586 if (TREE_CODE (from) == CONSTRUCTOR)
5587 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5589 if (is_gimple_addressable (from))
5591 *from_p = from;
5592 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5593 pre_p);
5597 /* Transform partial stores to non-addressable complex variables into
5598 total stores. This allows us to use real instead of virtual operands
5599 for these variables, which improves optimization. */
5600 if ((TREE_CODE (*to_p) == REALPART_EXPR
5601 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5602 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5603 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5605 /* Try to alleviate the effects of the gimplification creating artificial
5606 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5607 make sure not to create DECL_DEBUG_EXPR links across functions. */
5608 if (!gimplify_ctxp->into_ssa
5609 && VAR_P (*from_p)
5610 && DECL_IGNORED_P (*from_p)
5611 && DECL_P (*to_p)
5612 && !DECL_IGNORED_P (*to_p)
5613 && decl_function_context (*to_p) == current_function_decl
5614 && decl_function_context (*from_p) == current_function_decl)
5616 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5617 DECL_NAME (*from_p)
5618 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5619 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5620 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5623 if (want_value && TREE_THIS_VOLATILE (*to_p))
5624 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5626 if (TREE_CODE (*from_p) == CALL_EXPR)
5628 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5629 instead of a GIMPLE_ASSIGN. */
5630 gcall *call_stmt;
5631 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5633 /* Gimplify internal functions created in the FEs. */
5634 int nargs = call_expr_nargs (*from_p), i;
5635 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5636 auto_vec<tree> vargs (nargs);
5638 for (i = 0; i < nargs; i++)
5640 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5641 EXPR_LOCATION (*from_p));
5642 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5644 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5645 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5646 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5648 else
5650 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5651 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5652 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5653 tree fndecl = get_callee_fndecl (*from_p);
5654 if (fndecl
5655 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5656 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5657 && call_expr_nargs (*from_p) == 3)
5658 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5659 CALL_EXPR_ARG (*from_p, 0),
5660 CALL_EXPR_ARG (*from_p, 1),
5661 CALL_EXPR_ARG (*from_p, 2));
5662 else
5664 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5667 notice_special_calls (call_stmt);
5668 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5669 gimple_call_set_lhs (call_stmt, *to_p);
5670 else if (TREE_CODE (*to_p) == SSA_NAME)
5671 /* The above is somewhat premature, avoid ICEing later for a
5672 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5673 ??? This doesn't make it a default-def. */
5674 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5676 if (EXPR_CILK_SPAWN (*from_p))
5677 gimplify_cilk_detach (pre_p);
5678 assign = call_stmt;
5680 else
5682 assign = gimple_build_assign (*to_p, *from_p);
5683 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5684 if (COMPARISON_CLASS_P (*from_p))
5685 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5688 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5690 /* We should have got an SSA name from the start. */
5691 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5692 || ! gimple_in_ssa_p (cfun));
5695 gimplify_seq_add_stmt (pre_p, assign);
5696 gsi = gsi_last (*pre_p);
5697 maybe_fold_stmt (&gsi);
5699 if (want_value)
5701 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5702 return GS_OK;
5704 else
5705 *expr_p = NULL;
5707 return GS_ALL_DONE;
5710 /* Gimplify a comparison between two variable-sized objects. Do this
5711 with a call to BUILT_IN_MEMCMP. */
5713 static enum gimplify_status
5714 gimplify_variable_sized_compare (tree *expr_p)
5716 location_t loc = EXPR_LOCATION (*expr_p);
5717 tree op0 = TREE_OPERAND (*expr_p, 0);
5718 tree op1 = TREE_OPERAND (*expr_p, 1);
5719 tree t, arg, dest, src, expr;
5721 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5722 arg = unshare_expr (arg);
5723 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5724 src = build_fold_addr_expr_loc (loc, op1);
5725 dest = build_fold_addr_expr_loc (loc, op0);
5726 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5727 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5729 expr
5730 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5731 SET_EXPR_LOCATION (expr, loc);
5732 *expr_p = expr;
5734 return GS_OK;
5737 /* Gimplify a comparison between two aggregate objects of integral scalar
5738 mode as a comparison between the bitwise equivalent scalar values. */
5740 static enum gimplify_status
5741 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5743 location_t loc = EXPR_LOCATION (*expr_p);
5744 tree op0 = TREE_OPERAND (*expr_p, 0);
5745 tree op1 = TREE_OPERAND (*expr_p, 1);
5747 tree type = TREE_TYPE (op0);
5748 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5750 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5751 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5753 *expr_p
5754 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5756 return GS_OK;
5759 /* Gimplify an expression sequence. This function gimplifies each
5760 expression and rewrites the original expression with the last
5761 expression of the sequence in GIMPLE form.
5763 PRE_P points to the list where the side effects for all the
5764 expressions in the sequence will be emitted.
5766 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5768 static enum gimplify_status
5769 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5771 tree t = *expr_p;
5775 tree *sub_p = &TREE_OPERAND (t, 0);
5777 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5778 gimplify_compound_expr (sub_p, pre_p, false);
5779 else
5780 gimplify_stmt (sub_p, pre_p);
5782 t = TREE_OPERAND (t, 1);
5784 while (TREE_CODE (t) == COMPOUND_EXPR);
5786 *expr_p = t;
5787 if (want_value)
5788 return GS_OK;
5789 else
5791 gimplify_stmt (expr_p, pre_p);
5792 return GS_ALL_DONE;
5796 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5797 gimplify. After gimplification, EXPR_P will point to a new temporary
5798 that holds the original value of the SAVE_EXPR node.
5800 PRE_P points to the list where side effects that must happen before
5801 *EXPR_P should be stored. */
5803 static enum gimplify_status
5804 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5806 enum gimplify_status ret = GS_ALL_DONE;
5807 tree val;
5809 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5810 val = TREE_OPERAND (*expr_p, 0);
5812 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5813 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5815 /* The operand may be a void-valued expression. It is
5816 being executed only for its side-effects. */
5817 if (TREE_TYPE (val) == void_type_node)
5819 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5820 is_gimple_stmt, fb_none);
5821 val = NULL;
5823 else
5824 /* The temporary may not be an SSA name as later abnormal and EH
5825 control flow may invalidate use/def domination. */
5826 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5828 TREE_OPERAND (*expr_p, 0) = val;
5829 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5832 *expr_p = val;
5834 return ret;
5837 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5839 unary_expr
5840 : ...
5841 | '&' varname
5844 PRE_P points to the list where side effects that must happen before
5845 *EXPR_P should be stored.
5847 POST_P points to the list where side effects that must happen after
5848 *EXPR_P should be stored. */
5850 static enum gimplify_status
5851 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5853 tree expr = *expr_p;
5854 tree op0 = TREE_OPERAND (expr, 0);
5855 enum gimplify_status ret;
5856 location_t loc = EXPR_LOCATION (*expr_p);
5858 switch (TREE_CODE (op0))
5860 case INDIRECT_REF:
5861 do_indirect_ref:
5862 /* Check if we are dealing with an expression of the form '&*ptr'.
5863 While the front end folds away '&*ptr' into 'ptr', these
5864 expressions may be generated internally by the compiler (e.g.,
5865 builtins like __builtin_va_end). */
5866 /* Caution: the silent array decomposition semantics we allow for
5867 ADDR_EXPR means we can't always discard the pair. */
5868 /* Gimplification of the ADDR_EXPR operand may drop
5869 cv-qualification conversions, so make sure we add them if
5870 needed. */
5872 tree op00 = TREE_OPERAND (op0, 0);
5873 tree t_expr = TREE_TYPE (expr);
5874 tree t_op00 = TREE_TYPE (op00);
5876 if (!useless_type_conversion_p (t_expr, t_op00))
5877 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5878 *expr_p = op00;
5879 ret = GS_OK;
5881 break;
5883 case VIEW_CONVERT_EXPR:
5884 /* Take the address of our operand and then convert it to the type of
5885 this ADDR_EXPR.
5887 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5888 all clear. The impact of this transformation is even less clear. */
5890 /* If the operand is a useless conversion, look through it. Doing so
5891 guarantees that the ADDR_EXPR and its operand will remain of the
5892 same type. */
5893 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5894 op0 = TREE_OPERAND (op0, 0);
5896 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5897 build_fold_addr_expr_loc (loc,
5898 TREE_OPERAND (op0, 0)));
5899 ret = GS_OK;
5900 break;
5902 case MEM_REF:
5903 if (integer_zerop (TREE_OPERAND (op0, 1)))
5904 goto do_indirect_ref;
5906 /* fall through */
5908 default:
5909 /* If we see a call to a declared builtin or see its address
5910 being taken (we can unify those cases here) then we can mark
5911 the builtin for implicit generation by GCC. */
5912 if (TREE_CODE (op0) == FUNCTION_DECL
5913 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5914 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5915 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5917 /* We use fb_either here because the C frontend sometimes takes
5918 the address of a call that returns a struct; see
5919 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5920 the implied temporary explicit. */
5922 /* Make the operand addressable. */
5923 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5924 is_gimple_addressable, fb_either);
5925 if (ret == GS_ERROR)
5926 break;
5928 /* Then mark it. Beware that it may not be possible to do so directly
5929 if a temporary has been created by the gimplification. */
5930 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5932 op0 = TREE_OPERAND (expr, 0);
5934 /* For various reasons, the gimplification of the expression
5935 may have made a new INDIRECT_REF. */
5936 if (TREE_CODE (op0) == INDIRECT_REF)
5937 goto do_indirect_ref;
5939 mark_addressable (TREE_OPERAND (expr, 0));
5941 /* The FEs may end up building ADDR_EXPRs early on a decl with
5942 an incomplete type. Re-build ADDR_EXPRs in canonical form
5943 here. */
5944 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5945 *expr_p = build_fold_addr_expr (op0);
5947 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5948 recompute_tree_invariant_for_addr_expr (*expr_p);
5950 /* If we re-built the ADDR_EXPR add a conversion to the original type
5951 if required. */
5952 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5953 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5955 break;
5958 return ret;
5961 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5962 value; output operands should be a gimple lvalue. */
5964 static enum gimplify_status
5965 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5967 tree expr;
5968 int noutputs;
5969 const char **oconstraints;
5970 int i;
5971 tree link;
5972 const char *constraint;
5973 bool allows_mem, allows_reg, is_inout;
5974 enum gimplify_status ret, tret;
5975 gasm *stmt;
5976 vec<tree, va_gc> *inputs;
5977 vec<tree, va_gc> *outputs;
5978 vec<tree, va_gc> *clobbers;
5979 vec<tree, va_gc> *labels;
5980 tree link_next;
5982 expr = *expr_p;
5983 noutputs = list_length (ASM_OUTPUTS (expr));
5984 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5986 inputs = NULL;
5987 outputs = NULL;
5988 clobbers = NULL;
5989 labels = NULL;
5991 ret = GS_ALL_DONE;
5992 link_next = NULL_TREE;
5993 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5995 bool ok;
5996 size_t constraint_len;
5998 link_next = TREE_CHAIN (link);
6000 oconstraints[i]
6001 = constraint
6002 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6003 constraint_len = strlen (constraint);
6004 if (constraint_len == 0)
6005 continue;
6007 ok = parse_output_constraint (&constraint, i, 0, 0,
6008 &allows_mem, &allows_reg, &is_inout);
6009 if (!ok)
6011 ret = GS_ERROR;
6012 is_inout = false;
6015 if (!allows_reg && allows_mem)
6016 mark_addressable (TREE_VALUE (link));
6018 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6019 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6020 fb_lvalue | fb_mayfail);
6021 if (tret == GS_ERROR)
6023 error ("invalid lvalue in asm output %d", i);
6024 ret = tret;
6027 /* If the constraint does not allow memory make sure we gimplify
6028 it to a register if it is not already but its base is. This
6029 happens for complex and vector components. */
6030 if (!allows_mem)
6032 tree op = TREE_VALUE (link);
6033 if (! is_gimple_val (op)
6034 && is_gimple_reg_type (TREE_TYPE (op))
6035 && is_gimple_reg (get_base_address (op)))
6037 tree tem = create_tmp_reg (TREE_TYPE (op));
6038 tree ass;
6039 if (is_inout)
6041 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6042 tem, unshare_expr (op));
6043 gimplify_and_add (ass, pre_p);
6045 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6046 gimplify_and_add (ass, post_p);
6048 TREE_VALUE (link) = tem;
6049 tret = GS_OK;
6053 vec_safe_push (outputs, link);
6054 TREE_CHAIN (link) = NULL_TREE;
6056 if (is_inout)
6058 /* An input/output operand. To give the optimizers more
6059 flexibility, split it into separate input and output
6060 operands. */
6061 tree input;
6062 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6063 char buf[11];
6065 /* Turn the in/out constraint into an output constraint. */
6066 char *p = xstrdup (constraint);
6067 p[0] = '=';
6068 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6070 /* And add a matching input constraint. */
6071 if (allows_reg)
6073 sprintf (buf, "%u", i);
6075 /* If there are multiple alternatives in the constraint,
6076 handle each of them individually. Those that allow register
6077 will be replaced with operand number, the others will stay
6078 unchanged. */
6079 if (strchr (p, ',') != NULL)
6081 size_t len = 0, buflen = strlen (buf);
6082 char *beg, *end, *str, *dst;
6084 for (beg = p + 1;;)
6086 end = strchr (beg, ',');
6087 if (end == NULL)
6088 end = strchr (beg, '\0');
6089 if ((size_t) (end - beg) < buflen)
6090 len += buflen + 1;
6091 else
6092 len += end - beg + 1;
6093 if (*end)
6094 beg = end + 1;
6095 else
6096 break;
6099 str = (char *) alloca (len);
6100 for (beg = p + 1, dst = str;;)
6102 const char *tem;
6103 bool mem_p, reg_p, inout_p;
6105 end = strchr (beg, ',');
6106 if (end)
6107 *end = '\0';
6108 beg[-1] = '=';
6109 tem = beg - 1;
6110 parse_output_constraint (&tem, i, 0, 0,
6111 &mem_p, &reg_p, &inout_p);
6112 if (dst != str)
6113 *dst++ = ',';
6114 if (reg_p)
6116 memcpy (dst, buf, buflen);
6117 dst += buflen;
6119 else
6121 if (end)
6122 len = end - beg;
6123 else
6124 len = strlen (beg);
6125 memcpy (dst, beg, len);
6126 dst += len;
6128 if (end)
6129 beg = end + 1;
6130 else
6131 break;
6133 *dst = '\0';
6134 input = build_string (dst - str, str);
6136 else
6137 input = build_string (strlen (buf), buf);
6139 else
6140 input = build_string (constraint_len - 1, constraint + 1);
6142 free (p);
6144 input = build_tree_list (build_tree_list (NULL_TREE, input),
6145 unshare_expr (TREE_VALUE (link)));
6146 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6150 link_next = NULL_TREE;
6151 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6153 link_next = TREE_CHAIN (link);
6154 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6155 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6156 oconstraints, &allows_mem, &allows_reg);
6158 /* If we can't make copies, we can only accept memory. */
6159 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6161 if (allows_mem)
6162 allows_reg = 0;
6163 else
6165 error ("impossible constraint in %<asm%>");
6166 error ("non-memory input %d must stay in memory", i);
6167 return GS_ERROR;
6171 /* If the operand is a memory input, it should be an lvalue. */
6172 if (!allows_reg && allows_mem)
6174 tree inputv = TREE_VALUE (link);
6175 STRIP_NOPS (inputv);
6176 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6177 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6178 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6179 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6180 || TREE_CODE (inputv) == MODIFY_EXPR)
6181 TREE_VALUE (link) = error_mark_node;
6182 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6183 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6184 if (tret != GS_ERROR)
6186 /* Unlike output operands, memory inputs are not guaranteed
6187 to be lvalues by the FE, and while the expressions are
6188 marked addressable there, if it is e.g. a statement
6189 expression, temporaries in it might not end up being
6190 addressable. They might be already used in the IL and thus
6191 it is too late to make them addressable now though. */
6192 tree x = TREE_VALUE (link);
6193 while (handled_component_p (x))
6194 x = TREE_OPERAND (x, 0);
6195 if (TREE_CODE (x) == MEM_REF
6196 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6197 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6198 if ((VAR_P (x)
6199 || TREE_CODE (x) == PARM_DECL
6200 || TREE_CODE (x) == RESULT_DECL)
6201 && !TREE_ADDRESSABLE (x)
6202 && is_gimple_reg (x))
6204 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6205 input_location), 0,
6206 "memory input %d is not directly addressable",
6208 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6211 mark_addressable (TREE_VALUE (link));
6212 if (tret == GS_ERROR)
6214 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6215 "memory input %d is not directly addressable", i);
6216 ret = tret;
6219 else
6221 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6222 is_gimple_asm_val, fb_rvalue);
6223 if (tret == GS_ERROR)
6224 ret = tret;
6227 TREE_CHAIN (link) = NULL_TREE;
6228 vec_safe_push (inputs, link);
6231 link_next = NULL_TREE;
6232 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6234 link_next = TREE_CHAIN (link);
6235 TREE_CHAIN (link) = NULL_TREE;
6236 vec_safe_push (clobbers, link);
6239 link_next = NULL_TREE;
6240 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6242 link_next = TREE_CHAIN (link);
6243 TREE_CHAIN (link) = NULL_TREE;
6244 vec_safe_push (labels, link);
6247 /* Do not add ASMs with errors to the gimple IL stream. */
6248 if (ret != GS_ERROR)
6250 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6251 inputs, outputs, clobbers, labels);
6253 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6254 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6256 gimplify_seq_add_stmt (pre_p, stmt);
6259 return ret;
6262 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6263 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6264 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6265 return to this function.
6267 FIXME should we complexify the prequeue handling instead? Or use flags
6268 for all the cleanups and let the optimizer tighten them up? The current
6269 code seems pretty fragile; it will break on a cleanup within any
6270 non-conditional nesting. But any such nesting would be broken, anyway;
6271 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6272 and continues out of it. We can do that at the RTL level, though, so
6273 having an optimizer to tighten up try/finally regions would be a Good
6274 Thing. */
6276 static enum gimplify_status
6277 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6279 gimple_stmt_iterator iter;
6280 gimple_seq body_sequence = NULL;
6282 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6284 /* We only care about the number of conditions between the innermost
6285 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6286 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6287 int old_conds = gimplify_ctxp->conditions;
6288 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6289 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6290 gimplify_ctxp->conditions = 0;
6291 gimplify_ctxp->conditional_cleanups = NULL;
6292 gimplify_ctxp->in_cleanup_point_expr = true;
6294 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6296 gimplify_ctxp->conditions = old_conds;
6297 gimplify_ctxp->conditional_cleanups = old_cleanups;
6298 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6300 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6302 gimple *wce = gsi_stmt (iter);
6304 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6306 if (gsi_one_before_end_p (iter))
6308 /* Note that gsi_insert_seq_before and gsi_remove do not
6309 scan operands, unlike some other sequence mutators. */
6310 if (!gimple_wce_cleanup_eh_only (wce))
6311 gsi_insert_seq_before_without_update (&iter,
6312 gimple_wce_cleanup (wce),
6313 GSI_SAME_STMT);
6314 gsi_remove (&iter, true);
6315 break;
6317 else
6319 gtry *gtry;
6320 gimple_seq seq;
6321 enum gimple_try_flags kind;
6323 if (gimple_wce_cleanup_eh_only (wce))
6324 kind = GIMPLE_TRY_CATCH;
6325 else
6326 kind = GIMPLE_TRY_FINALLY;
6327 seq = gsi_split_seq_after (iter);
6329 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6330 /* Do not use gsi_replace here, as it may scan operands.
6331 We want to do a simple structural modification only. */
6332 gsi_set_stmt (&iter, gtry);
6333 iter = gsi_start (gtry->eval);
6336 else
6337 gsi_next (&iter);
6340 gimplify_seq_add_seq (pre_p, body_sequence);
6341 if (temp)
6343 *expr_p = temp;
6344 return GS_OK;
6346 else
6348 *expr_p = NULL;
6349 return GS_ALL_DONE;
6353 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6354 is the cleanup action required. EH_ONLY is true if the cleanup should
6355 only be executed if an exception is thrown, not on normal exit.
6356 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6357 only valid for clobbers. */
6359 static void
6360 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6361 bool force_uncond = false)
6363 gimple *wce;
6364 gimple_seq cleanup_stmts = NULL;
6366 /* Errors can result in improperly nested cleanups. Which results in
6367 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6368 if (seen_error ())
6369 return;
6371 if (gimple_conditional_context ())
6373 /* If we're in a conditional context, this is more complex. We only
6374 want to run the cleanup if we actually ran the initialization that
6375 necessitates it, but we want to run it after the end of the
6376 conditional context. So we wrap the try/finally around the
6377 condition and use a flag to determine whether or not to actually
6378 run the destructor. Thus
6380 test ? f(A()) : 0
6382 becomes (approximately)
6384 flag = 0;
6385 try {
6386 if (test) { A::A(temp); flag = 1; val = f(temp); }
6387 else { val = 0; }
6388 } finally {
6389 if (flag) A::~A(temp);
6393 if (force_uncond)
6395 gimplify_stmt (&cleanup, &cleanup_stmts);
6396 wce = gimple_build_wce (cleanup_stmts);
6397 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6399 else
6401 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6402 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6403 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6405 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6406 gimplify_stmt (&cleanup, &cleanup_stmts);
6407 wce = gimple_build_wce (cleanup_stmts);
6409 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6410 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6411 gimplify_seq_add_stmt (pre_p, ftrue);
6413 /* Because of this manipulation, and the EH edges that jump
6414 threading cannot redirect, the temporary (VAR) will appear
6415 to be used uninitialized. Don't warn. */
6416 TREE_NO_WARNING (var) = 1;
6419 else
6421 gimplify_stmt (&cleanup, &cleanup_stmts);
6422 wce = gimple_build_wce (cleanup_stmts);
6423 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6424 gimplify_seq_add_stmt (pre_p, wce);
6428 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6430 static enum gimplify_status
6431 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6433 tree targ = *expr_p;
6434 tree temp = TARGET_EXPR_SLOT (targ);
6435 tree init = TARGET_EXPR_INITIAL (targ);
6436 enum gimplify_status ret;
6438 bool unpoison_empty_seq = false;
6439 gimple_stmt_iterator unpoison_it;
6441 if (init)
6443 tree cleanup = NULL_TREE;
6445 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6446 to the temps list. Handle also variable length TARGET_EXPRs. */
6447 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6449 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6450 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6451 gimplify_vla_decl (temp, pre_p);
6453 else
6455 /* Save location where we need to place unpoisoning. It's possible
6456 that a variable will be converted to needs_to_live_in_memory. */
6457 unpoison_it = gsi_last (*pre_p);
6458 unpoison_empty_seq = gsi_end_p (unpoison_it);
6460 gimple_add_tmp_var (temp);
6463 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6464 expression is supposed to initialize the slot. */
6465 if (VOID_TYPE_P (TREE_TYPE (init)))
6466 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6467 else
6469 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6470 init = init_expr;
6471 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6472 init = NULL;
6473 ggc_free (init_expr);
6475 if (ret == GS_ERROR)
6477 /* PR c++/28266 Make sure this is expanded only once. */
6478 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6479 return GS_ERROR;
6481 if (init)
6482 gimplify_and_add (init, pre_p);
6484 /* If needed, push the cleanup for the temp. */
6485 if (TARGET_EXPR_CLEANUP (targ))
6487 if (CLEANUP_EH_ONLY (targ))
6488 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6489 CLEANUP_EH_ONLY (targ), pre_p);
6490 else
6491 cleanup = TARGET_EXPR_CLEANUP (targ);
6494 /* Add a clobber for the temporary going out of scope, like
6495 gimplify_bind_expr. */
6496 if (gimplify_ctxp->in_cleanup_point_expr
6497 && needs_to_live_in_memory (temp))
6499 if (flag_stack_reuse == SR_ALL)
6501 tree clobber = build_constructor (TREE_TYPE (temp),
6502 NULL);
6503 TREE_THIS_VOLATILE (clobber) = true;
6504 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6505 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6507 if (asan_poisoned_variables
6508 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6509 && dbg_cnt (asan_use_after_scope))
6511 tree asan_cleanup = build_asan_poison_call_expr (temp);
6512 if (asan_cleanup)
6514 if (unpoison_empty_seq)
6515 unpoison_it = gsi_start (*pre_p);
6517 asan_poison_variable (temp, false, &unpoison_it,
6518 unpoison_empty_seq);
6519 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6523 if (cleanup)
6524 gimple_push_cleanup (temp, cleanup, false, pre_p);
6526 /* Only expand this once. */
6527 TREE_OPERAND (targ, 3) = init;
6528 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6530 else
6531 /* We should have expanded this before. */
6532 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6534 *expr_p = temp;
6535 return GS_OK;
6538 /* Gimplification of expression trees. */
6540 /* Gimplify an expression which appears at statement context. The
6541 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6542 NULL, a new sequence is allocated.
6544 Return true if we actually added a statement to the queue. */
6546 bool
6547 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6549 gimple_seq_node last;
6551 last = gimple_seq_last (*seq_p);
6552 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6553 return last != gimple_seq_last (*seq_p);
6556 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6557 to CTX. If entries already exist, force them to be some flavor of private.
6558 If there is no enclosing parallel, do nothing. */
6560 void
6561 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6563 splay_tree_node n;
6565 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6566 return;
6570 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6571 if (n != NULL)
6573 if (n->value & GOVD_SHARED)
6574 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6575 else if (n->value & GOVD_MAP)
6576 n->value |= GOVD_MAP_TO_ONLY;
6577 else
6578 return;
6580 else if ((ctx->region_type & ORT_TARGET) != 0)
6582 if (ctx->target_map_scalars_firstprivate)
6583 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6584 else
6585 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6587 else if (ctx->region_type != ORT_WORKSHARE
6588 && ctx->region_type != ORT_SIMD
6589 && ctx->region_type != ORT_ACC
6590 && !(ctx->region_type & ORT_TARGET_DATA))
6591 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6593 ctx = ctx->outer_context;
6595 while (ctx);
6598 /* Similarly for each of the type sizes of TYPE. */
6600 static void
6601 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6603 if (type == NULL || type == error_mark_node)
6604 return;
6605 type = TYPE_MAIN_VARIANT (type);
6607 if (ctx->privatized_types->add (type))
6608 return;
6610 switch (TREE_CODE (type))
6612 case INTEGER_TYPE:
6613 case ENUMERAL_TYPE:
6614 case BOOLEAN_TYPE:
6615 case REAL_TYPE:
6616 case FIXED_POINT_TYPE:
6617 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6618 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6619 break;
6621 case ARRAY_TYPE:
6622 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6623 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6624 break;
6626 case RECORD_TYPE:
6627 case UNION_TYPE:
6628 case QUAL_UNION_TYPE:
6630 tree field;
6631 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6632 if (TREE_CODE (field) == FIELD_DECL)
6634 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6635 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6638 break;
6640 case POINTER_TYPE:
6641 case REFERENCE_TYPE:
6642 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6643 break;
6645 default:
6646 break;
6649 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6650 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6651 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6654 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6656 static void
6657 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6659 splay_tree_node n;
6660 unsigned int nflags;
6661 tree t;
6663 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6664 return;
6666 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6667 there are constructors involved somewhere. Exception is a shared clause,
6668 there is nothing privatized in that case. */
6669 if ((flags & GOVD_SHARED) == 0
6670 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6671 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6672 flags |= GOVD_SEEN;
6674 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6675 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6677 /* We shouldn't be re-adding the decl with the same data
6678 sharing class. */
6679 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6680 nflags = n->value | flags;
6681 /* The only combination of data sharing classes we should see is
6682 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6683 reduction variables to be used in data sharing clauses. */
6684 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6685 || ((nflags & GOVD_DATA_SHARE_CLASS)
6686 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6687 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6688 n->value = nflags;
6689 return;
6692 /* When adding a variable-sized variable, we have to handle all sorts
6693 of additional bits of data: the pointer replacement variable, and
6694 the parameters of the type. */
6695 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6697 /* Add the pointer replacement variable as PRIVATE if the variable
6698 replacement is private, else FIRSTPRIVATE since we'll need the
6699 address of the original variable either for SHARED, or for the
6700 copy into or out of the context. */
6701 if (!(flags & GOVD_LOCAL))
6703 if (flags & GOVD_MAP)
6704 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6705 else if (flags & GOVD_PRIVATE)
6706 nflags = GOVD_PRIVATE;
6707 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6708 && (flags & GOVD_FIRSTPRIVATE))
6709 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6710 else
6711 nflags = GOVD_FIRSTPRIVATE;
6712 nflags |= flags & GOVD_SEEN;
6713 t = DECL_VALUE_EXPR (decl);
6714 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6715 t = TREE_OPERAND (t, 0);
6716 gcc_assert (DECL_P (t));
6717 omp_add_variable (ctx, t, nflags);
6720 /* Add all of the variable and type parameters (which should have
6721 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6722 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6723 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6724 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6726 /* The variable-sized variable itself is never SHARED, only some form
6727 of PRIVATE. The sharing would take place via the pointer variable
6728 which we remapped above. */
6729 if (flags & GOVD_SHARED)
6730 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6731 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6733 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6734 alloca statement we generate for the variable, so make sure it
6735 is available. This isn't automatically needed for the SHARED
6736 case, since we won't be allocating local storage then.
6737 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6738 in this case omp_notice_variable will be called later
6739 on when it is gimplified. */
6740 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6741 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6742 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6744 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6745 && lang_hooks.decls.omp_privatize_by_reference (decl))
6747 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6749 /* Similar to the direct variable sized case above, we'll need the
6750 size of references being privatized. */
6751 if ((flags & GOVD_SHARED) == 0)
6753 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6754 if (DECL_P (t))
6755 omp_notice_variable (ctx, t, true);
6759 if (n != NULL)
6760 n->value |= flags;
6761 else
6762 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6764 /* For reductions clauses in OpenACC loop directives, by default create a
6765 copy clause on the enclosing parallel construct for carrying back the
6766 results. */
6767 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6769 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6770 while (outer_ctx)
6772 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6773 if (n != NULL)
6775 /* Ignore local variables and explicitly declared clauses. */
6776 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6777 break;
6778 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6780 /* According to the OpenACC spec, such a reduction variable
6781 should already have a copy map on a kernels construct,
6782 verify that here. */
6783 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6784 && (n->value & GOVD_MAP));
6786 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6788 /* Remove firstprivate and make it a copy map. */
6789 n->value &= ~GOVD_FIRSTPRIVATE;
6790 n->value |= GOVD_MAP;
6793 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6795 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6796 GOVD_MAP | GOVD_SEEN);
6797 break;
6799 outer_ctx = outer_ctx->outer_context;
6804 /* Notice a threadprivate variable DECL used in OMP context CTX.
6805 This just prints out diagnostics about threadprivate variable uses
6806 in untied tasks. If DECL2 is non-NULL, prevent this warning
6807 on that variable. */
6809 static bool
6810 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6811 tree decl2)
6813 splay_tree_node n;
6814 struct gimplify_omp_ctx *octx;
6816 for (octx = ctx; octx; octx = octx->outer_context)
6817 if ((octx->region_type & ORT_TARGET) != 0)
6819 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6820 if (n == NULL)
6822 error ("threadprivate variable %qE used in target region",
6823 DECL_NAME (decl));
6824 error_at (octx->location, "enclosing target region");
6825 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6827 if (decl2)
6828 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6831 if (ctx->region_type != ORT_UNTIED_TASK)
6832 return false;
6833 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6834 if (n == NULL)
6836 error ("threadprivate variable %qE used in untied task",
6837 DECL_NAME (decl));
6838 error_at (ctx->location, "enclosing task");
6839 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6841 if (decl2)
6842 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6843 return false;
6846 /* Return true if global var DECL is device resident. */
6848 static bool
6849 device_resident_p (tree decl)
6851 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6853 if (!attr)
6854 return false;
6856 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6858 tree c = TREE_VALUE (t);
6859 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6860 return true;
6863 return false;
6866 /* Return true if DECL has an ACC DECLARE attribute. */
6868 static bool
6869 is_oacc_declared (tree decl)
6871 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6872 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6873 return declared != NULL_TREE;
6876 /* Determine outer default flags for DECL mentioned in an OMP region
6877 but not declared in an enclosing clause.
6879 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6880 remapped firstprivate instead of shared. To some extent this is
6881 addressed in omp_firstprivatize_type_sizes, but not
6882 effectively. */
6884 static unsigned
6885 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6886 bool in_code, unsigned flags)
6888 enum omp_clause_default_kind default_kind = ctx->default_kind;
6889 enum omp_clause_default_kind kind;
6891 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6892 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6893 default_kind = kind;
6895 switch (default_kind)
6897 case OMP_CLAUSE_DEFAULT_NONE:
6899 const char *rtype;
6901 if (ctx->region_type & ORT_PARALLEL)
6902 rtype = "parallel";
6903 else if (ctx->region_type & ORT_TASK)
6904 rtype = "task";
6905 else if (ctx->region_type & ORT_TEAMS)
6906 rtype = "teams";
6907 else
6908 gcc_unreachable ();
6910 error ("%qE not specified in enclosing %qs",
6911 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6912 error_at (ctx->location, "enclosing %qs", rtype);
6914 /* FALLTHRU */
6915 case OMP_CLAUSE_DEFAULT_SHARED:
6916 flags |= GOVD_SHARED;
6917 break;
6918 case OMP_CLAUSE_DEFAULT_PRIVATE:
6919 flags |= GOVD_PRIVATE;
6920 break;
6921 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6922 flags |= GOVD_FIRSTPRIVATE;
6923 break;
6924 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6925 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6926 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6927 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6929 omp_notice_variable (octx, decl, in_code);
6930 for (; octx; octx = octx->outer_context)
6932 splay_tree_node n2;
6934 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6935 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6936 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6937 continue;
6938 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6940 flags |= GOVD_FIRSTPRIVATE;
6941 goto found_outer;
6943 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6945 flags |= GOVD_SHARED;
6946 goto found_outer;
6951 if (TREE_CODE (decl) == PARM_DECL
6952 || (!is_global_var (decl)
6953 && DECL_CONTEXT (decl) == current_function_decl))
6954 flags |= GOVD_FIRSTPRIVATE;
6955 else
6956 flags |= GOVD_SHARED;
6957 found_outer:
6958 break;
6960 default:
6961 gcc_unreachable ();
6964 return flags;
6968 /* Determine outer default flags for DECL mentioned in an OACC region
6969 but not declared in an enclosing clause. */
6971 static unsigned
6972 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6974 const char *rkind;
6975 bool on_device = false;
6976 bool declared = is_oacc_declared (decl);
6977 tree type = TREE_TYPE (decl);
6979 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6980 type = TREE_TYPE (type);
6982 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6983 && is_global_var (decl)
6984 && device_resident_p (decl))
6986 on_device = true;
6987 flags |= GOVD_MAP_TO_ONLY;
6990 switch (ctx->region_type)
6992 case ORT_ACC_KERNELS:
6993 rkind = "kernels";
6995 if (AGGREGATE_TYPE_P (type))
6997 /* Aggregates default to 'present_or_copy', or 'present'. */
6998 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6999 flags |= GOVD_MAP;
7000 else
7001 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7003 else
7004 /* Scalars default to 'copy'. */
7005 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7007 break;
7009 case ORT_ACC_PARALLEL:
7010 rkind = "parallel";
7012 if (on_device || declared)
7013 flags |= GOVD_MAP;
7014 else if (AGGREGATE_TYPE_P (type))
7016 /* Aggregates default to 'present_or_copy', or 'present'. */
7017 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7018 flags |= GOVD_MAP;
7019 else
7020 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7022 else
7023 /* Scalars default to 'firstprivate'. */
7024 flags |= GOVD_FIRSTPRIVATE;
7026 break;
7028 default:
7029 gcc_unreachable ();
7032 if (DECL_ARTIFICIAL (decl))
7033 ; /* We can get compiler-generated decls, and should not complain
7034 about them. */
7035 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7037 error ("%qE not specified in enclosing OpenACC %qs construct",
7038 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7039 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7041 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7042 ; /* Handled above. */
7043 else
7044 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7046 return flags;
7049 /* Record the fact that DECL was used within the OMP context CTX.
7050 IN_CODE is true when real code uses DECL, and false when we should
7051 merely emit default(none) errors. Return true if DECL is going to
7052 be remapped and thus DECL shouldn't be gimplified into its
7053 DECL_VALUE_EXPR (if any). */
7055 static bool
7056 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7058 splay_tree_node n;
7059 unsigned flags = in_code ? GOVD_SEEN : 0;
7060 bool ret = false, shared;
7062 if (error_operand_p (decl))
7063 return false;
7065 if (ctx->region_type == ORT_NONE)
7066 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7068 if (is_global_var (decl))
7070 /* Threadprivate variables are predetermined. */
7071 if (DECL_THREAD_LOCAL_P (decl))
7072 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7074 if (DECL_HAS_VALUE_EXPR_P (decl))
7076 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7078 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7079 return omp_notice_threadprivate_variable (ctx, decl, value);
7082 if (gimplify_omp_ctxp->outer_context == NULL
7083 && VAR_P (decl)
7084 && oacc_get_fn_attrib (current_function_decl))
7086 location_t loc = DECL_SOURCE_LOCATION (decl);
7088 if (lookup_attribute ("omp declare target link",
7089 DECL_ATTRIBUTES (decl)))
7091 error_at (loc,
7092 "%qE with %<link%> clause used in %<routine%> function",
7093 DECL_NAME (decl));
7094 return false;
7096 else if (!lookup_attribute ("omp declare target",
7097 DECL_ATTRIBUTES (decl)))
7099 error_at (loc,
7100 "%qE requires a %<declare%> directive for use "
7101 "in a %<routine%> function", DECL_NAME (decl));
7102 return false;
7107 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7108 if ((ctx->region_type & ORT_TARGET) != 0)
7110 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7111 if (n == NULL)
7113 unsigned nflags = flags;
7114 if (ctx->target_map_pointers_as_0len_arrays
7115 || ctx->target_map_scalars_firstprivate)
7117 bool is_declare_target = false;
7118 bool is_scalar = false;
7119 if (is_global_var (decl)
7120 && varpool_node::get_create (decl)->offloadable)
7122 struct gimplify_omp_ctx *octx;
7123 for (octx = ctx->outer_context;
7124 octx; octx = octx->outer_context)
7126 n = splay_tree_lookup (octx->variables,
7127 (splay_tree_key)decl);
7128 if (n
7129 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7130 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7131 break;
7133 is_declare_target = octx == NULL;
7135 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7136 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7137 if (is_declare_target)
7139 else if (ctx->target_map_pointers_as_0len_arrays
7140 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7141 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7142 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7143 == POINTER_TYPE)))
7144 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7145 else if (is_scalar)
7146 nflags |= GOVD_FIRSTPRIVATE;
7149 struct gimplify_omp_ctx *octx = ctx->outer_context;
7150 if ((ctx->region_type & ORT_ACC) && octx)
7152 /* Look in outer OpenACC contexts, to see if there's a
7153 data attribute for this variable. */
7154 omp_notice_variable (octx, decl, in_code);
7156 for (; octx; octx = octx->outer_context)
7158 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7159 break;
7160 splay_tree_node n2
7161 = splay_tree_lookup (octx->variables,
7162 (splay_tree_key) decl);
7163 if (n2)
7165 if (octx->region_type == ORT_ACC_HOST_DATA)
7166 error ("variable %qE declared in enclosing "
7167 "%<host_data%> region", DECL_NAME (decl));
7168 nflags |= GOVD_MAP;
7169 if (octx->region_type == ORT_ACC_DATA
7170 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7171 nflags |= GOVD_MAP_0LEN_ARRAY;
7172 goto found_outer;
7178 tree type = TREE_TYPE (decl);
7180 if (nflags == flags
7181 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7182 && lang_hooks.decls.omp_privatize_by_reference (decl))
7183 type = TREE_TYPE (type);
7184 if (nflags == flags
7185 && !lang_hooks.types.omp_mappable_type (type))
7187 error ("%qD referenced in target region does not have "
7188 "a mappable type", decl);
7189 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7191 else if (nflags == flags)
7193 if ((ctx->region_type & ORT_ACC) != 0)
7194 nflags = oacc_default_clause (ctx, decl, flags);
7195 else
7196 nflags |= GOVD_MAP;
7199 found_outer:
7200 omp_add_variable (ctx, decl, nflags);
7202 else
7204 /* If nothing changed, there's nothing left to do. */
7205 if ((n->value & flags) == flags)
7206 return ret;
7207 flags |= n->value;
7208 n->value = flags;
7210 goto do_outer;
7213 if (n == NULL)
7215 if (ctx->region_type == ORT_WORKSHARE
7216 || ctx->region_type == ORT_SIMD
7217 || ctx->region_type == ORT_ACC
7218 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7219 goto do_outer;
7221 flags = omp_default_clause (ctx, decl, in_code, flags);
7223 if ((flags & GOVD_PRIVATE)
7224 && lang_hooks.decls.omp_private_outer_ref (decl))
7225 flags |= GOVD_PRIVATE_OUTER_REF;
7227 omp_add_variable (ctx, decl, flags);
7229 shared = (flags & GOVD_SHARED) != 0;
7230 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7231 goto do_outer;
7234 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7235 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7236 && DECL_SIZE (decl))
7238 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7240 splay_tree_node n2;
7241 tree t = DECL_VALUE_EXPR (decl);
7242 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7243 t = TREE_OPERAND (t, 0);
7244 gcc_assert (DECL_P (t));
7245 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7246 n2->value |= GOVD_SEEN;
7248 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7249 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7250 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7251 != INTEGER_CST))
7253 splay_tree_node n2;
7254 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7255 gcc_assert (DECL_P (t));
7256 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7257 if (n2)
7258 omp_notice_variable (ctx, t, true);
7262 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7263 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7265 /* If nothing changed, there's nothing left to do. */
7266 if ((n->value & flags) == flags)
7267 return ret;
7268 flags |= n->value;
7269 n->value = flags;
7271 do_outer:
7272 /* If the variable is private in the current context, then we don't
7273 need to propagate anything to an outer context. */
7274 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7275 return ret;
7276 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7277 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7278 return ret;
7279 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7280 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7281 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7282 return ret;
7283 if (ctx->outer_context
7284 && omp_notice_variable (ctx->outer_context, decl, in_code))
7285 return true;
7286 return ret;
7289 /* Verify that DECL is private within CTX. If there's specific information
7290 to the contrary in the innermost scope, generate an error. */
7292 static bool
7293 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7295 splay_tree_node n;
7297 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7298 if (n != NULL)
7300 if (n->value & GOVD_SHARED)
7302 if (ctx == gimplify_omp_ctxp)
7304 if (simd)
7305 error ("iteration variable %qE is predetermined linear",
7306 DECL_NAME (decl));
7307 else
7308 error ("iteration variable %qE should be private",
7309 DECL_NAME (decl));
7310 n->value = GOVD_PRIVATE;
7311 return true;
7313 else
7314 return false;
7316 else if ((n->value & GOVD_EXPLICIT) != 0
7317 && (ctx == gimplify_omp_ctxp
7318 || (ctx->region_type == ORT_COMBINED_PARALLEL
7319 && gimplify_omp_ctxp->outer_context == ctx)))
7321 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7322 error ("iteration variable %qE should not be firstprivate",
7323 DECL_NAME (decl));
7324 else if ((n->value & GOVD_REDUCTION) != 0)
7325 error ("iteration variable %qE should not be reduction",
7326 DECL_NAME (decl));
7327 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7328 error ("iteration variable %qE should not be linear",
7329 DECL_NAME (decl));
7330 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7331 error ("iteration variable %qE should not be lastprivate",
7332 DECL_NAME (decl));
7333 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7334 error ("iteration variable %qE should not be private",
7335 DECL_NAME (decl));
7336 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7337 error ("iteration variable %qE is predetermined linear",
7338 DECL_NAME (decl));
7340 return (ctx == gimplify_omp_ctxp
7341 || (ctx->region_type == ORT_COMBINED_PARALLEL
7342 && gimplify_omp_ctxp->outer_context == ctx));
7345 if (ctx->region_type != ORT_WORKSHARE
7346 && ctx->region_type != ORT_SIMD
7347 && ctx->region_type != ORT_ACC)
7348 return false;
7349 else if (ctx->outer_context)
7350 return omp_is_private (ctx->outer_context, decl, simd);
7351 return false;
7354 /* Return true if DECL is private within a parallel region
7355 that binds to the current construct's context or in parallel
7356 region's REDUCTION clause. */
7358 static bool
7359 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7361 splay_tree_node n;
7365 ctx = ctx->outer_context;
7366 if (ctx == NULL)
7368 if (is_global_var (decl))
7369 return false;
7371 /* References might be private, but might be shared too,
7372 when checking for copyprivate, assume they might be
7373 private, otherwise assume they might be shared. */
7374 if (copyprivate)
7375 return true;
7377 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7378 return false;
7380 /* Treat C++ privatized non-static data members outside
7381 of the privatization the same. */
7382 if (omp_member_access_dummy_var (decl))
7383 return false;
7385 return true;
7388 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7390 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7391 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7392 continue;
7394 if (n != NULL)
7396 if ((n->value & GOVD_LOCAL) != 0
7397 && omp_member_access_dummy_var (decl))
7398 return false;
7399 return (n->value & GOVD_SHARED) == 0;
7402 while (ctx->region_type == ORT_WORKSHARE
7403 || ctx->region_type == ORT_SIMD
7404 || ctx->region_type == ORT_ACC);
7405 return false;
7408 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7410 static tree
7411 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7413 tree t = *tp;
7415 /* If this node has been visited, unmark it and keep looking. */
7416 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7417 return t;
7419 if (IS_TYPE_OR_DECL_P (t))
7420 *walk_subtrees = 0;
7421 return NULL_TREE;
7424 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7425 and previous omp contexts. */
7427 static void
7428 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7429 enum omp_region_type region_type,
7430 enum tree_code code)
7432 struct gimplify_omp_ctx *ctx, *outer_ctx;
7433 tree c;
7434 hash_map<tree, tree> *struct_map_to_clause = NULL;
7435 tree *prev_list_p = NULL;
7437 ctx = new_omp_context (region_type);
7438 outer_ctx = ctx->outer_context;
7439 if (code == OMP_TARGET)
7441 if (!lang_GNU_Fortran ())
7442 ctx->target_map_pointers_as_0len_arrays = true;
7443 ctx->target_map_scalars_firstprivate = true;
7445 if (!lang_GNU_Fortran ())
7446 switch (code)
7448 case OMP_TARGET:
7449 case OMP_TARGET_DATA:
7450 case OMP_TARGET_ENTER_DATA:
7451 case OMP_TARGET_EXIT_DATA:
7452 case OACC_DECLARE:
7453 case OACC_HOST_DATA:
7454 ctx->target_firstprivatize_array_bases = true;
7455 default:
7456 break;
7459 while ((c = *list_p) != NULL)
7461 bool remove = false;
7462 bool notice_outer = true;
7463 const char *check_non_private = NULL;
7464 unsigned int flags;
7465 tree decl;
7467 switch (OMP_CLAUSE_CODE (c))
7469 case OMP_CLAUSE_PRIVATE:
7470 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7471 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7473 flags |= GOVD_PRIVATE_OUTER_REF;
7474 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7476 else
7477 notice_outer = false;
7478 goto do_add;
7479 case OMP_CLAUSE_SHARED:
7480 flags = GOVD_SHARED | GOVD_EXPLICIT;
7481 goto do_add;
7482 case OMP_CLAUSE_FIRSTPRIVATE:
7483 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7484 check_non_private = "firstprivate";
7485 goto do_add;
7486 case OMP_CLAUSE_LASTPRIVATE:
7487 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7488 check_non_private = "lastprivate";
7489 decl = OMP_CLAUSE_DECL (c);
7490 if (error_operand_p (decl))
7491 goto do_add;
7492 else if (outer_ctx
7493 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7494 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7495 && splay_tree_lookup (outer_ctx->variables,
7496 (splay_tree_key) decl) == NULL)
7498 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7499 if (outer_ctx->outer_context)
7500 omp_notice_variable (outer_ctx->outer_context, decl, true);
7502 else if (outer_ctx
7503 && (outer_ctx->region_type & ORT_TASK) != 0
7504 && outer_ctx->combined_loop
7505 && splay_tree_lookup (outer_ctx->variables,
7506 (splay_tree_key) decl) == NULL)
7508 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7509 if (outer_ctx->outer_context)
7510 omp_notice_variable (outer_ctx->outer_context, decl, true);
7512 else if (outer_ctx
7513 && (outer_ctx->region_type == ORT_WORKSHARE
7514 || outer_ctx->region_type == ORT_ACC)
7515 && outer_ctx->combined_loop
7516 && splay_tree_lookup (outer_ctx->variables,
7517 (splay_tree_key) decl) == NULL
7518 && !omp_check_private (outer_ctx, decl, false))
7520 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7521 if (outer_ctx->outer_context
7522 && (outer_ctx->outer_context->region_type
7523 == ORT_COMBINED_PARALLEL)
7524 && splay_tree_lookup (outer_ctx->outer_context->variables,
7525 (splay_tree_key) decl) == NULL)
7527 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7528 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7529 if (octx->outer_context)
7531 octx = octx->outer_context;
7532 if (octx->region_type == ORT_WORKSHARE
7533 && octx->combined_loop
7534 && splay_tree_lookup (octx->variables,
7535 (splay_tree_key) decl) == NULL
7536 && !omp_check_private (octx, decl, false))
7538 omp_add_variable (octx, decl,
7539 GOVD_LASTPRIVATE | GOVD_SEEN);
7540 octx = octx->outer_context;
7541 if (octx
7542 && octx->region_type == ORT_COMBINED_TEAMS
7543 && (splay_tree_lookup (octx->variables,
7544 (splay_tree_key) decl)
7545 == NULL))
7547 omp_add_variable (octx, decl,
7548 GOVD_SHARED | GOVD_SEEN);
7549 octx = octx->outer_context;
7552 if (octx)
7553 omp_notice_variable (octx, decl, true);
7556 else if (outer_ctx->outer_context)
7557 omp_notice_variable (outer_ctx->outer_context, decl, true);
7559 goto do_add;
7560 case OMP_CLAUSE_REDUCTION:
7561 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7562 /* OpenACC permits reductions on private variables. */
7563 if (!(region_type & ORT_ACC))
7564 check_non_private = "reduction";
7565 decl = OMP_CLAUSE_DECL (c);
7566 if (TREE_CODE (decl) == MEM_REF)
7568 tree type = TREE_TYPE (decl);
7569 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7570 NULL, is_gimple_val, fb_rvalue, false)
7571 == GS_ERROR)
7573 remove = true;
7574 break;
7576 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7577 if (DECL_P (v))
7579 omp_firstprivatize_variable (ctx, v);
7580 omp_notice_variable (ctx, v, true);
7582 decl = TREE_OPERAND (decl, 0);
7583 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7585 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7586 NULL, is_gimple_val, fb_rvalue, false)
7587 == GS_ERROR)
7589 remove = true;
7590 break;
7592 v = TREE_OPERAND (decl, 1);
7593 if (DECL_P (v))
7595 omp_firstprivatize_variable (ctx, v);
7596 omp_notice_variable (ctx, v, true);
7598 decl = TREE_OPERAND (decl, 0);
7600 if (TREE_CODE (decl) == ADDR_EXPR
7601 || TREE_CODE (decl) == INDIRECT_REF)
7602 decl = TREE_OPERAND (decl, 0);
7604 goto do_add_decl;
7605 case OMP_CLAUSE_LINEAR:
7606 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7607 is_gimple_val, fb_rvalue) == GS_ERROR)
7609 remove = true;
7610 break;
7612 else
7614 if (code == OMP_SIMD
7615 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7617 struct gimplify_omp_ctx *octx = outer_ctx;
7618 if (octx
7619 && octx->region_type == ORT_WORKSHARE
7620 && octx->combined_loop
7621 && !octx->distribute)
7623 if (octx->outer_context
7624 && (octx->outer_context->region_type
7625 == ORT_COMBINED_PARALLEL))
7626 octx = octx->outer_context->outer_context;
7627 else
7628 octx = octx->outer_context;
7630 if (octx
7631 && octx->region_type == ORT_WORKSHARE
7632 && octx->combined_loop
7633 && octx->distribute)
7635 error_at (OMP_CLAUSE_LOCATION (c),
7636 "%<linear%> clause for variable other than "
7637 "loop iterator specified on construct "
7638 "combined with %<distribute%>");
7639 remove = true;
7640 break;
7643 /* For combined #pragma omp parallel for simd, need to put
7644 lastprivate and perhaps firstprivate too on the
7645 parallel. Similarly for #pragma omp for simd. */
7646 struct gimplify_omp_ctx *octx = outer_ctx;
7647 decl = NULL_TREE;
7650 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7651 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7652 break;
7653 decl = OMP_CLAUSE_DECL (c);
7654 if (error_operand_p (decl))
7656 decl = NULL_TREE;
7657 break;
7659 flags = GOVD_SEEN;
7660 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7661 flags |= GOVD_FIRSTPRIVATE;
7662 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7663 flags |= GOVD_LASTPRIVATE;
7664 if (octx
7665 && octx->region_type == ORT_WORKSHARE
7666 && octx->combined_loop)
7668 if (octx->outer_context
7669 && (octx->outer_context->region_type
7670 == ORT_COMBINED_PARALLEL))
7671 octx = octx->outer_context;
7672 else if (omp_check_private (octx, decl, false))
7673 break;
7675 else if (octx
7676 && (octx->region_type & ORT_TASK) != 0
7677 && octx->combined_loop)
7679 else if (octx
7680 && octx->region_type == ORT_COMBINED_PARALLEL
7681 && ctx->region_type == ORT_WORKSHARE
7682 && octx == outer_ctx)
7683 flags = GOVD_SEEN | GOVD_SHARED;
7684 else if (octx
7685 && octx->region_type == ORT_COMBINED_TEAMS)
7686 flags = GOVD_SEEN | GOVD_SHARED;
7687 else if (octx
7688 && octx->region_type == ORT_COMBINED_TARGET)
7690 flags &= ~GOVD_LASTPRIVATE;
7691 if (flags == GOVD_SEEN)
7692 break;
7694 else
7695 break;
7696 splay_tree_node on
7697 = splay_tree_lookup (octx->variables,
7698 (splay_tree_key) decl);
7699 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7701 octx = NULL;
7702 break;
7704 omp_add_variable (octx, decl, flags);
7705 if (octx->outer_context == NULL)
7706 break;
7707 octx = octx->outer_context;
7709 while (1);
7710 if (octx
7711 && decl
7712 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7713 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7714 omp_notice_variable (octx, decl, true);
7716 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7717 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7718 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7720 notice_outer = false;
7721 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7723 goto do_add;
7725 case OMP_CLAUSE_MAP:
7726 decl = OMP_CLAUSE_DECL (c);
7727 if (error_operand_p (decl))
7728 remove = true;
7729 switch (code)
7731 case OMP_TARGET:
7732 break;
7733 case OACC_DATA:
7734 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7735 break;
7736 /* FALLTHRU */
7737 case OMP_TARGET_DATA:
7738 case OMP_TARGET_ENTER_DATA:
7739 case OMP_TARGET_EXIT_DATA:
7740 case OACC_ENTER_DATA:
7741 case OACC_EXIT_DATA:
7742 case OACC_HOST_DATA:
7743 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7744 || (OMP_CLAUSE_MAP_KIND (c)
7745 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7746 /* For target {,enter ,exit }data only the array slice is
7747 mapped, but not the pointer to it. */
7748 remove = true;
7749 break;
7750 default:
7751 break;
7753 if (remove)
7754 break;
7755 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7757 struct gimplify_omp_ctx *octx;
7758 for (octx = outer_ctx; octx; octx = octx->outer_context)
7760 if (octx->region_type != ORT_ACC_HOST_DATA)
7761 break;
7762 splay_tree_node n2
7763 = splay_tree_lookup (octx->variables,
7764 (splay_tree_key) decl);
7765 if (n2)
7766 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7767 "declared in enclosing %<host_data%> region",
7768 DECL_NAME (decl));
7771 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7772 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7773 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7774 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7775 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7777 remove = true;
7778 break;
7780 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7781 || (OMP_CLAUSE_MAP_KIND (c)
7782 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7783 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7785 OMP_CLAUSE_SIZE (c)
7786 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7787 false);
7788 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7789 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7791 if (!DECL_P (decl))
7793 tree d = decl, *pd;
7794 if (TREE_CODE (d) == ARRAY_REF)
7796 while (TREE_CODE (d) == ARRAY_REF)
7797 d = TREE_OPERAND (d, 0);
7798 if (TREE_CODE (d) == COMPONENT_REF
7799 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7800 decl = d;
7802 pd = &OMP_CLAUSE_DECL (c);
7803 if (d == decl
7804 && TREE_CODE (decl) == INDIRECT_REF
7805 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7806 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7807 == REFERENCE_TYPE))
7809 pd = &TREE_OPERAND (decl, 0);
7810 decl = TREE_OPERAND (decl, 0);
7812 if (TREE_CODE (decl) == COMPONENT_REF)
7814 while (TREE_CODE (decl) == COMPONENT_REF)
7815 decl = TREE_OPERAND (decl, 0);
7816 if (TREE_CODE (decl) == INDIRECT_REF
7817 && DECL_P (TREE_OPERAND (decl, 0))
7818 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7819 == REFERENCE_TYPE))
7820 decl = TREE_OPERAND (decl, 0);
7822 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7823 == GS_ERROR)
7825 remove = true;
7826 break;
7828 if (DECL_P (decl))
7830 if (error_operand_p (decl))
7832 remove = true;
7833 break;
7836 tree stype = TREE_TYPE (decl);
7837 if (TREE_CODE (stype) == REFERENCE_TYPE)
7838 stype = TREE_TYPE (stype);
7839 if (TYPE_SIZE_UNIT (stype) == NULL
7840 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7842 error_at (OMP_CLAUSE_LOCATION (c),
7843 "mapping field %qE of variable length "
7844 "structure", OMP_CLAUSE_DECL (c));
7845 remove = true;
7846 break;
7849 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7851 /* Error recovery. */
7852 if (prev_list_p == NULL)
7854 remove = true;
7855 break;
7857 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7859 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7860 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7862 remove = true;
7863 break;
7868 tree offset;
7869 HOST_WIDE_INT bitsize, bitpos;
7870 machine_mode mode;
7871 int unsignedp, reversep, volatilep = 0;
7872 tree base = OMP_CLAUSE_DECL (c);
7873 while (TREE_CODE (base) == ARRAY_REF)
7874 base = TREE_OPERAND (base, 0);
7875 if (TREE_CODE (base) == INDIRECT_REF)
7876 base = TREE_OPERAND (base, 0);
7877 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7878 &mode, &unsignedp, &reversep,
7879 &volatilep);
7880 tree orig_base = base;
7881 if ((TREE_CODE (base) == INDIRECT_REF
7882 || (TREE_CODE (base) == MEM_REF
7883 && integer_zerop (TREE_OPERAND (base, 1))))
7884 && DECL_P (TREE_OPERAND (base, 0))
7885 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7886 == REFERENCE_TYPE))
7887 base = TREE_OPERAND (base, 0);
7888 gcc_assert (base == decl
7889 && (offset == NULL_TREE
7890 || TREE_CODE (offset) == INTEGER_CST));
7892 splay_tree_node n
7893 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7894 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7895 == GOMP_MAP_ALWAYS_POINTER);
7896 if (n == NULL || (n->value & GOVD_MAP) == 0)
7898 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7899 OMP_CLAUSE_MAP);
7900 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7901 if (orig_base != base)
7902 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7903 else
7904 OMP_CLAUSE_DECL (l) = decl;
7905 OMP_CLAUSE_SIZE (l) = size_int (1);
7906 if (struct_map_to_clause == NULL)
7907 struct_map_to_clause = new hash_map<tree, tree>;
7908 struct_map_to_clause->put (decl, l);
7909 if (ptr)
7911 enum gomp_map_kind mkind
7912 = code == OMP_TARGET_EXIT_DATA
7913 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7914 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7915 OMP_CLAUSE_MAP);
7916 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7917 OMP_CLAUSE_DECL (c2)
7918 = unshare_expr (OMP_CLAUSE_DECL (c));
7919 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7920 OMP_CLAUSE_SIZE (c2)
7921 = TYPE_SIZE_UNIT (ptr_type_node);
7922 OMP_CLAUSE_CHAIN (l) = c2;
7923 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7925 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7926 tree c3
7927 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7928 OMP_CLAUSE_MAP);
7929 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7930 OMP_CLAUSE_DECL (c3)
7931 = unshare_expr (OMP_CLAUSE_DECL (c4));
7932 OMP_CLAUSE_SIZE (c3)
7933 = TYPE_SIZE_UNIT (ptr_type_node);
7934 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7935 OMP_CLAUSE_CHAIN (c2) = c3;
7937 *prev_list_p = l;
7938 prev_list_p = NULL;
7940 else
7942 OMP_CLAUSE_CHAIN (l) = c;
7943 *list_p = l;
7944 list_p = &OMP_CLAUSE_CHAIN (l);
7946 if (orig_base != base && code == OMP_TARGET)
7948 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7949 OMP_CLAUSE_MAP);
7950 enum gomp_map_kind mkind
7951 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7952 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7953 OMP_CLAUSE_DECL (c2) = decl;
7954 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7955 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7956 OMP_CLAUSE_CHAIN (l) = c2;
7958 flags = GOVD_MAP | GOVD_EXPLICIT;
7959 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7960 flags |= GOVD_SEEN;
7961 goto do_add_decl;
7963 else
7965 tree *osc = struct_map_to_clause->get (decl);
7966 tree *sc = NULL, *scp = NULL;
7967 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7968 n->value |= GOVD_SEEN;
7969 offset_int o1, o2;
7970 if (offset)
7971 o1 = wi::to_offset (offset);
7972 else
7973 o1 = 0;
7974 if (bitpos)
7975 o1 = o1 + bitpos / BITS_PER_UNIT;
7976 sc = &OMP_CLAUSE_CHAIN (*osc);
7977 if (*sc != c
7978 && (OMP_CLAUSE_MAP_KIND (*sc)
7979 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7980 sc = &OMP_CLAUSE_CHAIN (*sc);
7981 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7982 if (ptr && sc == prev_list_p)
7983 break;
7984 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7985 != COMPONENT_REF
7986 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7987 != INDIRECT_REF)
7988 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7989 != ARRAY_REF))
7990 break;
7991 else
7993 tree offset2;
7994 HOST_WIDE_INT bitsize2, bitpos2;
7995 base = OMP_CLAUSE_DECL (*sc);
7996 if (TREE_CODE (base) == ARRAY_REF)
7998 while (TREE_CODE (base) == ARRAY_REF)
7999 base = TREE_OPERAND (base, 0);
8000 if (TREE_CODE (base) != COMPONENT_REF
8001 || (TREE_CODE (TREE_TYPE (base))
8002 != ARRAY_TYPE))
8003 break;
8005 else if (TREE_CODE (base) == INDIRECT_REF
8006 && (TREE_CODE (TREE_OPERAND (base, 0))
8007 == COMPONENT_REF)
8008 && (TREE_CODE (TREE_TYPE
8009 (TREE_OPERAND (base, 0)))
8010 == REFERENCE_TYPE))
8011 base = TREE_OPERAND (base, 0);
8012 base = get_inner_reference (base, &bitsize2,
8013 &bitpos2, &offset2,
8014 &mode, &unsignedp,
8015 &reversep, &volatilep);
8016 if ((TREE_CODE (base) == INDIRECT_REF
8017 || (TREE_CODE (base) == MEM_REF
8018 && integer_zerop (TREE_OPERAND (base,
8019 1))))
8020 && DECL_P (TREE_OPERAND (base, 0))
8021 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8022 0)))
8023 == REFERENCE_TYPE))
8024 base = TREE_OPERAND (base, 0);
8025 if (base != decl)
8026 break;
8027 if (scp)
8028 continue;
8029 gcc_assert (offset == NULL_TREE
8030 || TREE_CODE (offset) == INTEGER_CST);
8031 tree d1 = OMP_CLAUSE_DECL (*sc);
8032 tree d2 = OMP_CLAUSE_DECL (c);
8033 while (TREE_CODE (d1) == ARRAY_REF)
8034 d1 = TREE_OPERAND (d1, 0);
8035 while (TREE_CODE (d2) == ARRAY_REF)
8036 d2 = TREE_OPERAND (d2, 0);
8037 if (TREE_CODE (d1) == INDIRECT_REF)
8038 d1 = TREE_OPERAND (d1, 0);
8039 if (TREE_CODE (d2) == INDIRECT_REF)
8040 d2 = TREE_OPERAND (d2, 0);
8041 while (TREE_CODE (d1) == COMPONENT_REF)
8042 if (TREE_CODE (d2) == COMPONENT_REF
8043 && TREE_OPERAND (d1, 1)
8044 == TREE_OPERAND (d2, 1))
8046 d1 = TREE_OPERAND (d1, 0);
8047 d2 = TREE_OPERAND (d2, 0);
8049 else
8050 break;
8051 if (d1 == d2)
8053 error_at (OMP_CLAUSE_LOCATION (c),
8054 "%qE appears more than once in map "
8055 "clauses", OMP_CLAUSE_DECL (c));
8056 remove = true;
8057 break;
8059 if (offset2)
8060 o2 = wi::to_offset (offset2);
8061 else
8062 o2 = 0;
8063 if (bitpos2)
8064 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8065 if (wi::ltu_p (o1, o2)
8066 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8068 if (ptr)
8069 scp = sc;
8070 else
8071 break;
8074 if (remove)
8075 break;
8076 OMP_CLAUSE_SIZE (*osc)
8077 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8078 size_one_node);
8079 if (ptr)
8081 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8082 OMP_CLAUSE_MAP);
8083 tree cl = NULL_TREE;
8084 enum gomp_map_kind mkind
8085 = code == OMP_TARGET_EXIT_DATA
8086 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8087 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8088 OMP_CLAUSE_DECL (c2)
8089 = unshare_expr (OMP_CLAUSE_DECL (c));
8090 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8091 OMP_CLAUSE_SIZE (c2)
8092 = TYPE_SIZE_UNIT (ptr_type_node);
8093 cl = scp ? *prev_list_p : c2;
8094 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8096 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8097 tree c3
8098 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8099 OMP_CLAUSE_MAP);
8100 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8101 OMP_CLAUSE_DECL (c3)
8102 = unshare_expr (OMP_CLAUSE_DECL (c4));
8103 OMP_CLAUSE_SIZE (c3)
8104 = TYPE_SIZE_UNIT (ptr_type_node);
8105 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8106 if (!scp)
8107 OMP_CLAUSE_CHAIN (c2) = c3;
8108 else
8109 cl = c3;
8111 if (scp)
8112 *scp = c2;
8113 if (sc == prev_list_p)
8115 *sc = cl;
8116 prev_list_p = NULL;
8118 else
8120 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8121 list_p = prev_list_p;
8122 prev_list_p = NULL;
8123 OMP_CLAUSE_CHAIN (c) = *sc;
8124 *sc = cl;
8125 continue;
8128 else if (*sc != c)
8130 *list_p = OMP_CLAUSE_CHAIN (c);
8131 OMP_CLAUSE_CHAIN (c) = *sc;
8132 *sc = c;
8133 continue;
8137 if (!remove
8138 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8139 && OMP_CLAUSE_CHAIN (c)
8140 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8141 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8142 == GOMP_MAP_ALWAYS_POINTER))
8143 prev_list_p = list_p;
8144 break;
8146 flags = GOVD_MAP | GOVD_EXPLICIT;
8147 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8148 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8149 flags |= GOVD_MAP_ALWAYS_TO;
8150 goto do_add;
8152 case OMP_CLAUSE_DEPEND:
8153 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8155 tree deps = OMP_CLAUSE_DECL (c);
8156 while (deps && TREE_CODE (deps) == TREE_LIST)
8158 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8159 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8160 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8161 pre_p, NULL, is_gimple_val, fb_rvalue);
8162 deps = TREE_CHAIN (deps);
8164 break;
8166 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8167 break;
8168 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8170 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8171 NULL, is_gimple_val, fb_rvalue);
8172 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8174 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8176 remove = true;
8177 break;
8179 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8180 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8181 is_gimple_val, fb_rvalue) == GS_ERROR)
8183 remove = true;
8184 break;
8186 break;
8188 case OMP_CLAUSE_TO:
8189 case OMP_CLAUSE_FROM:
8190 case OMP_CLAUSE__CACHE_:
8191 decl = OMP_CLAUSE_DECL (c);
8192 if (error_operand_p (decl))
8194 remove = true;
8195 break;
8197 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8198 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8199 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8200 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8201 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8203 remove = true;
8204 break;
8206 if (!DECL_P (decl))
8208 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8209 NULL, is_gimple_lvalue, fb_lvalue)
8210 == GS_ERROR)
8212 remove = true;
8213 break;
8215 break;
8217 goto do_notice;
8219 case OMP_CLAUSE_USE_DEVICE_PTR:
8220 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8221 goto do_add;
8222 case OMP_CLAUSE_IS_DEVICE_PTR:
8223 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8224 goto do_add;
8226 do_add:
8227 decl = OMP_CLAUSE_DECL (c);
8228 do_add_decl:
8229 if (error_operand_p (decl))
8231 remove = true;
8232 break;
8234 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8236 tree t = omp_member_access_dummy_var (decl);
8237 if (t)
8239 tree v = DECL_VALUE_EXPR (decl);
8240 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8241 if (outer_ctx)
8242 omp_notice_variable (outer_ctx, t, true);
8245 if (code == OACC_DATA
8246 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8247 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8248 flags |= GOVD_MAP_0LEN_ARRAY;
8249 omp_add_variable (ctx, decl, flags);
8250 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8251 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8253 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8254 GOVD_LOCAL | GOVD_SEEN);
8255 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8256 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8257 find_decl_expr,
8258 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8259 NULL) == NULL_TREE)
8260 omp_add_variable (ctx,
8261 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8262 GOVD_LOCAL | GOVD_SEEN);
8263 gimplify_omp_ctxp = ctx;
8264 push_gimplify_context ();
8266 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8267 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8269 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8270 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8271 pop_gimplify_context
8272 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8273 push_gimplify_context ();
8274 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8275 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8276 pop_gimplify_context
8277 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8278 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8279 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8281 gimplify_omp_ctxp = outer_ctx;
8283 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8284 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8286 gimplify_omp_ctxp = ctx;
8287 push_gimplify_context ();
8288 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8290 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8291 NULL, NULL);
8292 TREE_SIDE_EFFECTS (bind) = 1;
8293 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8294 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8296 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8297 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8298 pop_gimplify_context
8299 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8300 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8302 gimplify_omp_ctxp = outer_ctx;
8304 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8305 && OMP_CLAUSE_LINEAR_STMT (c))
8307 gimplify_omp_ctxp = ctx;
8308 push_gimplify_context ();
8309 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8311 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8312 NULL, NULL);
8313 TREE_SIDE_EFFECTS (bind) = 1;
8314 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8315 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8317 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8318 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8319 pop_gimplify_context
8320 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8321 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8323 gimplify_omp_ctxp = outer_ctx;
8325 if (notice_outer)
8326 goto do_notice;
8327 break;
8329 case OMP_CLAUSE_COPYIN:
8330 case OMP_CLAUSE_COPYPRIVATE:
8331 decl = OMP_CLAUSE_DECL (c);
8332 if (error_operand_p (decl))
8334 remove = true;
8335 break;
8337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8338 && !remove
8339 && !omp_check_private (ctx, decl, true))
8341 remove = true;
8342 if (is_global_var (decl))
8344 if (DECL_THREAD_LOCAL_P (decl))
8345 remove = false;
8346 else if (DECL_HAS_VALUE_EXPR_P (decl))
8348 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8350 if (value
8351 && DECL_P (value)
8352 && DECL_THREAD_LOCAL_P (value))
8353 remove = false;
8356 if (remove)
8357 error_at (OMP_CLAUSE_LOCATION (c),
8358 "copyprivate variable %qE is not threadprivate"
8359 " or private in outer context", DECL_NAME (decl));
8361 do_notice:
8362 if (outer_ctx)
8363 omp_notice_variable (outer_ctx, decl, true);
8364 if (check_non_private
8365 && region_type == ORT_WORKSHARE
8366 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8367 || decl == OMP_CLAUSE_DECL (c)
8368 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8369 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8370 == ADDR_EXPR
8371 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8372 == POINTER_PLUS_EXPR
8373 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8374 (OMP_CLAUSE_DECL (c), 0), 0))
8375 == ADDR_EXPR)))))
8376 && omp_check_private (ctx, decl, false))
8378 error ("%s variable %qE is private in outer context",
8379 check_non_private, DECL_NAME (decl));
8380 remove = true;
8382 break;
8384 case OMP_CLAUSE_IF:
8385 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8386 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8388 const char *p[2];
8389 for (int i = 0; i < 2; i++)
8390 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8392 case OMP_PARALLEL: p[i] = "parallel"; break;
8393 case OMP_TASK: p[i] = "task"; break;
8394 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8395 case OMP_TARGET_DATA: p[i] = "target data"; break;
8396 case OMP_TARGET: p[i] = "target"; break;
8397 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8398 case OMP_TARGET_ENTER_DATA:
8399 p[i] = "target enter data"; break;
8400 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8401 default: gcc_unreachable ();
8403 error_at (OMP_CLAUSE_LOCATION (c),
8404 "expected %qs %<if%> clause modifier rather than %qs",
8405 p[0], p[1]);
8406 remove = true;
8408 /* Fall through. */
8410 case OMP_CLAUSE_FINAL:
8411 OMP_CLAUSE_OPERAND (c, 0)
8412 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8413 /* Fall through. */
8415 case OMP_CLAUSE_SCHEDULE:
8416 case OMP_CLAUSE_NUM_THREADS:
8417 case OMP_CLAUSE_NUM_TEAMS:
8418 case OMP_CLAUSE_THREAD_LIMIT:
8419 case OMP_CLAUSE_DIST_SCHEDULE:
8420 case OMP_CLAUSE_DEVICE:
8421 case OMP_CLAUSE_PRIORITY:
8422 case OMP_CLAUSE_GRAINSIZE:
8423 case OMP_CLAUSE_NUM_TASKS:
8424 case OMP_CLAUSE_HINT:
8425 case OMP_CLAUSE__CILK_FOR_COUNT_:
8426 case OMP_CLAUSE_ASYNC:
8427 case OMP_CLAUSE_WAIT:
8428 case OMP_CLAUSE_NUM_GANGS:
8429 case OMP_CLAUSE_NUM_WORKERS:
8430 case OMP_CLAUSE_VECTOR_LENGTH:
8431 case OMP_CLAUSE_WORKER:
8432 case OMP_CLAUSE_VECTOR:
8433 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8434 is_gimple_val, fb_rvalue) == GS_ERROR)
8435 remove = true;
8436 break;
8438 case OMP_CLAUSE_GANG:
8439 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8440 is_gimple_val, fb_rvalue) == GS_ERROR)
8441 remove = true;
8442 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8443 is_gimple_val, fb_rvalue) == GS_ERROR)
8444 remove = true;
8445 break;
8447 case OMP_CLAUSE_NOWAIT:
8448 case OMP_CLAUSE_ORDERED:
8449 case OMP_CLAUSE_UNTIED:
8450 case OMP_CLAUSE_COLLAPSE:
8451 case OMP_CLAUSE_TILE:
8452 case OMP_CLAUSE_AUTO:
8453 case OMP_CLAUSE_SEQ:
8454 case OMP_CLAUSE_INDEPENDENT:
8455 case OMP_CLAUSE_MERGEABLE:
8456 case OMP_CLAUSE_PROC_BIND:
8457 case OMP_CLAUSE_SAFELEN:
8458 case OMP_CLAUSE_SIMDLEN:
8459 case OMP_CLAUSE_NOGROUP:
8460 case OMP_CLAUSE_THREADS:
8461 case OMP_CLAUSE_SIMD:
8462 break;
8464 case OMP_CLAUSE_DEFAULTMAP:
8465 ctx->target_map_scalars_firstprivate = false;
8466 break;
8468 case OMP_CLAUSE_ALIGNED:
8469 decl = OMP_CLAUSE_DECL (c);
8470 if (error_operand_p (decl))
8472 remove = true;
8473 break;
8475 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8476 is_gimple_val, fb_rvalue) == GS_ERROR)
8478 remove = true;
8479 break;
8481 if (!is_global_var (decl)
8482 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8483 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8484 break;
8486 case OMP_CLAUSE_DEFAULT:
8487 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8488 break;
8490 default:
8491 gcc_unreachable ();
8494 if (code == OACC_DATA
8495 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8496 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8497 remove = true;
8498 if (remove)
8499 *list_p = OMP_CLAUSE_CHAIN (c);
8500 else
8501 list_p = &OMP_CLAUSE_CHAIN (c);
8504 gimplify_omp_ctxp = ctx;
8505 if (struct_map_to_clause)
8506 delete struct_map_to_clause;
8509 /* Return true if DECL is a candidate for shared to firstprivate
8510 optimization. We only consider non-addressable scalars, not
8511 too big, and not references. */
8513 static bool
8514 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8516 if (TREE_ADDRESSABLE (decl))
8517 return false;
8518 tree type = TREE_TYPE (decl);
8519 if (!is_gimple_reg_type (type)
8520 || TREE_CODE (type) == REFERENCE_TYPE
8521 || TREE_ADDRESSABLE (type))
8522 return false;
8523 /* Don't optimize too large decls, as each thread/task will have
8524 its own. */
8525 HOST_WIDE_INT len = int_size_in_bytes (type);
8526 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8527 return false;
8528 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8529 return false;
8530 return true;
8533 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8534 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8535 GOVD_WRITTEN in outer contexts. */
8537 static void
8538 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8540 for (; ctx; ctx = ctx->outer_context)
8542 splay_tree_node n = splay_tree_lookup (ctx->variables,
8543 (splay_tree_key) decl);
8544 if (n == NULL)
8545 continue;
8546 else if (n->value & GOVD_SHARED)
8548 n->value |= GOVD_WRITTEN;
8549 return;
8551 else if (n->value & GOVD_DATA_SHARE_CLASS)
8552 return;
8556 /* Helper callback for walk_gimple_seq to discover possible stores
8557 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8558 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8559 for those. */
8561 static tree
8562 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8564 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8566 *walk_subtrees = 0;
8567 if (!wi->is_lhs)
8568 return NULL_TREE;
8570 tree op = *tp;
8573 if (handled_component_p (op))
8574 op = TREE_OPERAND (op, 0);
8575 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8576 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8577 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8578 else
8579 break;
8581 while (1);
8582 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8583 return NULL_TREE;
8585 omp_mark_stores (gimplify_omp_ctxp, op);
8586 return NULL_TREE;
8589 /* Helper callback for walk_gimple_seq to discover possible stores
8590 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8591 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8592 for those. */
8594 static tree
8595 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8596 bool *handled_ops_p,
8597 struct walk_stmt_info *wi)
8599 gimple *stmt = gsi_stmt (*gsi_p);
8600 switch (gimple_code (stmt))
8602 /* Don't recurse on OpenMP constructs for which
8603 gimplify_adjust_omp_clauses already handled the bodies,
8604 except handle gimple_omp_for_pre_body. */
8605 case GIMPLE_OMP_FOR:
8606 *handled_ops_p = true;
8607 if (gimple_omp_for_pre_body (stmt))
8608 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8609 omp_find_stores_stmt, omp_find_stores_op, wi);
8610 break;
8611 case GIMPLE_OMP_PARALLEL:
8612 case GIMPLE_OMP_TASK:
8613 case GIMPLE_OMP_SECTIONS:
8614 case GIMPLE_OMP_SINGLE:
8615 case GIMPLE_OMP_TARGET:
8616 case GIMPLE_OMP_TEAMS:
8617 case GIMPLE_OMP_CRITICAL:
8618 *handled_ops_p = true;
8619 break;
8620 default:
8621 break;
8623 return NULL_TREE;
8626 struct gimplify_adjust_omp_clauses_data
8628 tree *list_p;
8629 gimple_seq *pre_p;
8632 /* For all variables that were not actually used within the context,
8633 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8635 static int
8636 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8638 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8639 gimple_seq *pre_p
8640 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8641 tree decl = (tree) n->key;
8642 unsigned flags = n->value;
8643 enum omp_clause_code code;
8644 tree clause;
8645 bool private_debug;
8647 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8648 return 0;
8649 if ((flags & GOVD_SEEN) == 0)
8650 return 0;
8651 if (flags & GOVD_DEBUG_PRIVATE)
8653 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8654 private_debug = true;
8656 else if (flags & GOVD_MAP)
8657 private_debug = false;
8658 else
8659 private_debug
8660 = lang_hooks.decls.omp_private_debug_clause (decl,
8661 !!(flags & GOVD_SHARED));
8662 if (private_debug)
8663 code = OMP_CLAUSE_PRIVATE;
8664 else if (flags & GOVD_MAP)
8666 code = OMP_CLAUSE_MAP;
8667 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8668 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8670 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8671 return 0;
8674 else if (flags & GOVD_SHARED)
8676 if (is_global_var (decl))
8678 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8679 while (ctx != NULL)
8681 splay_tree_node on
8682 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8683 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8684 | GOVD_PRIVATE | GOVD_REDUCTION
8685 | GOVD_LINEAR | GOVD_MAP)) != 0)
8686 break;
8687 ctx = ctx->outer_context;
8689 if (ctx == NULL)
8690 return 0;
8692 code = OMP_CLAUSE_SHARED;
8694 else if (flags & GOVD_PRIVATE)
8695 code = OMP_CLAUSE_PRIVATE;
8696 else if (flags & GOVD_FIRSTPRIVATE)
8698 code = OMP_CLAUSE_FIRSTPRIVATE;
8699 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8700 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8701 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8703 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8704 "%<target%> construct", decl);
8705 return 0;
8708 else if (flags & GOVD_LASTPRIVATE)
8709 code = OMP_CLAUSE_LASTPRIVATE;
8710 else if (flags & GOVD_ALIGNED)
8711 return 0;
8712 else
8713 gcc_unreachable ();
8715 if (((flags & GOVD_LASTPRIVATE)
8716 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8717 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8718 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8720 tree chain = *list_p;
8721 clause = build_omp_clause (input_location, code);
8722 OMP_CLAUSE_DECL (clause) = decl;
8723 OMP_CLAUSE_CHAIN (clause) = chain;
8724 if (private_debug)
8725 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8726 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8727 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8728 else if (code == OMP_CLAUSE_SHARED
8729 && (flags & GOVD_WRITTEN) == 0
8730 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8731 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8732 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8733 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8734 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8736 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8737 OMP_CLAUSE_DECL (nc) = decl;
8738 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8739 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8740 OMP_CLAUSE_DECL (clause)
8741 = build_simple_mem_ref_loc (input_location, decl);
8742 OMP_CLAUSE_DECL (clause)
8743 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8744 build_int_cst (build_pointer_type (char_type_node), 0));
8745 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8746 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8747 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8748 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8749 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8750 OMP_CLAUSE_CHAIN (nc) = chain;
8751 OMP_CLAUSE_CHAIN (clause) = nc;
8752 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8753 gimplify_omp_ctxp = ctx->outer_context;
8754 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8755 pre_p, NULL, is_gimple_val, fb_rvalue);
8756 gimplify_omp_ctxp = ctx;
8758 else if (code == OMP_CLAUSE_MAP)
8760 int kind;
8761 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8762 switch (flags & (GOVD_MAP_TO_ONLY
8763 | GOVD_MAP_FORCE
8764 | GOVD_MAP_FORCE_PRESENT))
8766 case 0:
8767 kind = GOMP_MAP_TOFROM;
8768 break;
8769 case GOVD_MAP_FORCE:
8770 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8771 break;
8772 case GOVD_MAP_TO_ONLY:
8773 kind = GOMP_MAP_TO;
8774 break;
8775 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8776 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8777 break;
8778 case GOVD_MAP_FORCE_PRESENT:
8779 kind = GOMP_MAP_FORCE_PRESENT;
8780 break;
8781 default:
8782 gcc_unreachable ();
8784 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8785 if (DECL_SIZE (decl)
8786 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8788 tree decl2 = DECL_VALUE_EXPR (decl);
8789 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8790 decl2 = TREE_OPERAND (decl2, 0);
8791 gcc_assert (DECL_P (decl2));
8792 tree mem = build_simple_mem_ref (decl2);
8793 OMP_CLAUSE_DECL (clause) = mem;
8794 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8795 if (gimplify_omp_ctxp->outer_context)
8797 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8798 omp_notice_variable (ctx, decl2, true);
8799 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8801 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8802 OMP_CLAUSE_MAP);
8803 OMP_CLAUSE_DECL (nc) = decl;
8804 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8805 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8806 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8807 else
8808 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8809 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8810 OMP_CLAUSE_CHAIN (clause) = nc;
8812 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8813 && lang_hooks.decls.omp_privatize_by_reference (decl))
8815 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8816 OMP_CLAUSE_SIZE (clause)
8817 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8818 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8819 gimplify_omp_ctxp = ctx->outer_context;
8820 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8821 pre_p, NULL, is_gimple_val, fb_rvalue);
8822 gimplify_omp_ctxp = ctx;
8823 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8824 OMP_CLAUSE_MAP);
8825 OMP_CLAUSE_DECL (nc) = decl;
8826 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8827 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8828 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8829 OMP_CLAUSE_CHAIN (clause) = nc;
8831 else
8832 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8834 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8836 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8837 OMP_CLAUSE_DECL (nc) = decl;
8838 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8839 OMP_CLAUSE_CHAIN (nc) = chain;
8840 OMP_CLAUSE_CHAIN (clause) = nc;
8841 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8842 gimplify_omp_ctxp = ctx->outer_context;
8843 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8844 gimplify_omp_ctxp = ctx;
8846 *list_p = clause;
8847 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8848 gimplify_omp_ctxp = ctx->outer_context;
8849 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8850 if (gimplify_omp_ctxp)
8851 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8852 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8853 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8854 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8855 true);
8856 gimplify_omp_ctxp = ctx;
8857 return 0;
8860 static void
8861 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8862 enum tree_code code)
8864 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8865 tree c, decl;
8867 if (body)
8869 struct gimplify_omp_ctx *octx;
8870 for (octx = ctx; octx; octx = octx->outer_context)
8871 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8872 break;
8873 if (octx)
8875 struct walk_stmt_info wi;
8876 memset (&wi, 0, sizeof (wi));
8877 walk_gimple_seq (body, omp_find_stores_stmt,
8878 omp_find_stores_op, &wi);
8881 while ((c = *list_p) != NULL)
8883 splay_tree_node n;
8884 bool remove = false;
8886 switch (OMP_CLAUSE_CODE (c))
8888 case OMP_CLAUSE_FIRSTPRIVATE:
8889 if ((ctx->region_type & ORT_TARGET)
8890 && (ctx->region_type & ORT_ACC) == 0
8891 && TYPE_ATOMIC (strip_array_types
8892 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8894 error_at (OMP_CLAUSE_LOCATION (c),
8895 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8896 "%<target%> construct", OMP_CLAUSE_DECL (c));
8897 remove = true;
8898 break;
8900 /* FALLTHRU */
8901 case OMP_CLAUSE_PRIVATE:
8902 case OMP_CLAUSE_SHARED:
8903 case OMP_CLAUSE_LINEAR:
8904 decl = OMP_CLAUSE_DECL (c);
8905 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8906 remove = !(n->value & GOVD_SEEN);
8907 if (! remove)
8909 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8910 if ((n->value & GOVD_DEBUG_PRIVATE)
8911 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8913 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8914 || ((n->value & GOVD_DATA_SHARE_CLASS)
8915 == GOVD_SHARED));
8916 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8917 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8920 && (n->value & GOVD_WRITTEN) == 0
8921 && DECL_P (decl)
8922 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8923 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8924 else if (DECL_P (decl)
8925 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8926 && (n->value & GOVD_WRITTEN) != 0)
8927 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8928 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8929 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8930 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8932 break;
8934 case OMP_CLAUSE_LASTPRIVATE:
8935 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8936 accurately reflect the presence of a FIRSTPRIVATE clause. */
8937 decl = OMP_CLAUSE_DECL (c);
8938 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8939 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8940 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8941 if (code == OMP_DISTRIBUTE
8942 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8944 remove = true;
8945 error_at (OMP_CLAUSE_LOCATION (c),
8946 "same variable used in %<firstprivate%> and "
8947 "%<lastprivate%> clauses on %<distribute%> "
8948 "construct");
8950 if (!remove
8951 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8952 && DECL_P (decl)
8953 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8954 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8955 break;
8957 case OMP_CLAUSE_ALIGNED:
8958 decl = OMP_CLAUSE_DECL (c);
8959 if (!is_global_var (decl))
8961 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8962 remove = n == NULL || !(n->value & GOVD_SEEN);
8963 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8965 struct gimplify_omp_ctx *octx;
8966 if (n != NULL
8967 && (n->value & (GOVD_DATA_SHARE_CLASS
8968 & ~GOVD_FIRSTPRIVATE)))
8969 remove = true;
8970 else
8971 for (octx = ctx->outer_context; octx;
8972 octx = octx->outer_context)
8974 n = splay_tree_lookup (octx->variables,
8975 (splay_tree_key) decl);
8976 if (n == NULL)
8977 continue;
8978 if (n->value & GOVD_LOCAL)
8979 break;
8980 /* We have to avoid assigning a shared variable
8981 to itself when trying to add
8982 __builtin_assume_aligned. */
8983 if (n->value & GOVD_SHARED)
8985 remove = true;
8986 break;
8991 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8993 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8994 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8995 remove = true;
8997 break;
8999 case OMP_CLAUSE_MAP:
9000 if (code == OMP_TARGET_EXIT_DATA
9001 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9003 remove = true;
9004 break;
9006 decl = OMP_CLAUSE_DECL (c);
9007 /* Data clauses associated with acc parallel reductions must be
9008 compatible with present_or_copy. Warn and adjust the clause
9009 if that is not the case. */
9010 if (ctx->region_type == ORT_ACC_PARALLEL)
9012 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9013 n = NULL;
9015 if (DECL_P (t))
9016 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9018 if (n && (n->value & GOVD_REDUCTION))
9020 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9022 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9023 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9024 && kind != GOMP_MAP_FORCE_PRESENT
9025 && kind != GOMP_MAP_POINTER)
9027 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9028 "incompatible data clause with reduction "
9029 "on %qE; promoting to present_or_copy",
9030 DECL_NAME (t));
9031 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9035 if (!DECL_P (decl))
9037 if ((ctx->region_type & ORT_TARGET) != 0
9038 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9040 if (TREE_CODE (decl) == INDIRECT_REF
9041 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9042 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9043 == REFERENCE_TYPE))
9044 decl = TREE_OPERAND (decl, 0);
9045 if (TREE_CODE (decl) == COMPONENT_REF)
9047 while (TREE_CODE (decl) == COMPONENT_REF)
9048 decl = TREE_OPERAND (decl, 0);
9049 if (DECL_P (decl))
9051 n = splay_tree_lookup (ctx->variables,
9052 (splay_tree_key) decl);
9053 if (!(n->value & GOVD_SEEN))
9054 remove = true;
9058 break;
9060 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9061 if ((ctx->region_type & ORT_TARGET) != 0
9062 && !(n->value & GOVD_SEEN)
9063 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9064 && (!is_global_var (decl)
9065 || !lookup_attribute ("omp declare target link",
9066 DECL_ATTRIBUTES (decl))))
9068 remove = true;
9069 /* For struct element mapping, if struct is never referenced
9070 in target block and none of the mapping has always modifier,
9071 remove all the struct element mappings, which immediately
9072 follow the GOMP_MAP_STRUCT map clause. */
9073 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9075 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9076 while (cnt--)
9077 OMP_CLAUSE_CHAIN (c)
9078 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9081 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9082 && code == OMP_TARGET_EXIT_DATA)
9083 remove = true;
9084 else if (DECL_SIZE (decl)
9085 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9086 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9087 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9088 && (OMP_CLAUSE_MAP_KIND (c)
9089 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9091 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9092 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9093 INTEGER_CST. */
9094 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9096 tree decl2 = DECL_VALUE_EXPR (decl);
9097 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9098 decl2 = TREE_OPERAND (decl2, 0);
9099 gcc_assert (DECL_P (decl2));
9100 tree mem = build_simple_mem_ref (decl2);
9101 OMP_CLAUSE_DECL (c) = mem;
9102 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9103 if (ctx->outer_context)
9105 omp_notice_variable (ctx->outer_context, decl2, true);
9106 omp_notice_variable (ctx->outer_context,
9107 OMP_CLAUSE_SIZE (c), true);
9109 if (((ctx->region_type & ORT_TARGET) != 0
9110 || !ctx->target_firstprivatize_array_bases)
9111 && ((n->value & GOVD_SEEN) == 0
9112 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9114 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9115 OMP_CLAUSE_MAP);
9116 OMP_CLAUSE_DECL (nc) = decl;
9117 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9118 if (ctx->target_firstprivatize_array_bases)
9119 OMP_CLAUSE_SET_MAP_KIND (nc,
9120 GOMP_MAP_FIRSTPRIVATE_POINTER);
9121 else
9122 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9123 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9124 OMP_CLAUSE_CHAIN (c) = nc;
9125 c = nc;
9128 else
9130 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9131 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9132 gcc_assert ((n->value & GOVD_SEEN) == 0
9133 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9134 == 0));
9136 break;
9138 case OMP_CLAUSE_TO:
9139 case OMP_CLAUSE_FROM:
9140 case OMP_CLAUSE__CACHE_:
9141 decl = OMP_CLAUSE_DECL (c);
9142 if (!DECL_P (decl))
9143 break;
9144 if (DECL_SIZE (decl)
9145 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9147 tree decl2 = DECL_VALUE_EXPR (decl);
9148 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9149 decl2 = TREE_OPERAND (decl2, 0);
9150 gcc_assert (DECL_P (decl2));
9151 tree mem = build_simple_mem_ref (decl2);
9152 OMP_CLAUSE_DECL (c) = mem;
9153 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9154 if (ctx->outer_context)
9156 omp_notice_variable (ctx->outer_context, decl2, true);
9157 omp_notice_variable (ctx->outer_context,
9158 OMP_CLAUSE_SIZE (c), true);
9161 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9162 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9163 break;
9165 case OMP_CLAUSE_REDUCTION:
9166 decl = OMP_CLAUSE_DECL (c);
9167 /* OpenACC reductions need a present_or_copy data clause.
9168 Add one if necessary. Error is the reduction is private. */
9169 if (ctx->region_type == ORT_ACC_PARALLEL)
9171 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9172 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9173 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9174 "reduction on %qE", DECL_NAME (decl));
9175 else if ((n->value & GOVD_MAP) == 0)
9177 tree next = OMP_CLAUSE_CHAIN (c);
9178 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9179 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9180 OMP_CLAUSE_DECL (nc) = decl;
9181 OMP_CLAUSE_CHAIN (c) = nc;
9182 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9183 while (1)
9185 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9186 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9187 break;
9188 nc = OMP_CLAUSE_CHAIN (nc);
9190 OMP_CLAUSE_CHAIN (nc) = next;
9191 n->value |= GOVD_MAP;
9194 if (DECL_P (decl)
9195 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9196 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9197 break;
9198 case OMP_CLAUSE_COPYIN:
9199 case OMP_CLAUSE_COPYPRIVATE:
9200 case OMP_CLAUSE_IF:
9201 case OMP_CLAUSE_NUM_THREADS:
9202 case OMP_CLAUSE_NUM_TEAMS:
9203 case OMP_CLAUSE_THREAD_LIMIT:
9204 case OMP_CLAUSE_DIST_SCHEDULE:
9205 case OMP_CLAUSE_DEVICE:
9206 case OMP_CLAUSE_SCHEDULE:
9207 case OMP_CLAUSE_NOWAIT:
9208 case OMP_CLAUSE_ORDERED:
9209 case OMP_CLAUSE_DEFAULT:
9210 case OMP_CLAUSE_UNTIED:
9211 case OMP_CLAUSE_COLLAPSE:
9212 case OMP_CLAUSE_FINAL:
9213 case OMP_CLAUSE_MERGEABLE:
9214 case OMP_CLAUSE_PROC_BIND:
9215 case OMP_CLAUSE_SAFELEN:
9216 case OMP_CLAUSE_SIMDLEN:
9217 case OMP_CLAUSE_DEPEND:
9218 case OMP_CLAUSE_PRIORITY:
9219 case OMP_CLAUSE_GRAINSIZE:
9220 case OMP_CLAUSE_NUM_TASKS:
9221 case OMP_CLAUSE_NOGROUP:
9222 case OMP_CLAUSE_THREADS:
9223 case OMP_CLAUSE_SIMD:
9224 case OMP_CLAUSE_HINT:
9225 case OMP_CLAUSE_DEFAULTMAP:
9226 case OMP_CLAUSE_USE_DEVICE_PTR:
9227 case OMP_CLAUSE_IS_DEVICE_PTR:
9228 case OMP_CLAUSE__CILK_FOR_COUNT_:
9229 case OMP_CLAUSE_ASYNC:
9230 case OMP_CLAUSE_WAIT:
9231 case OMP_CLAUSE_INDEPENDENT:
9232 case OMP_CLAUSE_NUM_GANGS:
9233 case OMP_CLAUSE_NUM_WORKERS:
9234 case OMP_CLAUSE_VECTOR_LENGTH:
9235 case OMP_CLAUSE_GANG:
9236 case OMP_CLAUSE_WORKER:
9237 case OMP_CLAUSE_VECTOR:
9238 case OMP_CLAUSE_AUTO:
9239 case OMP_CLAUSE_SEQ:
9240 case OMP_CLAUSE_TILE:
9241 break;
9243 default:
9244 gcc_unreachable ();
9247 if (remove)
9248 *list_p = OMP_CLAUSE_CHAIN (c);
9249 else
9250 list_p = &OMP_CLAUSE_CHAIN (c);
9253 /* Add in any implicit data sharing. */
9254 struct gimplify_adjust_omp_clauses_data data;
9255 data.list_p = list_p;
9256 data.pre_p = pre_p;
9257 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9259 gimplify_omp_ctxp = ctx->outer_context;
9260 delete_omp_context (ctx);
9263 /* Gimplify OACC_CACHE. */
9265 static void
9266 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9268 tree expr = *expr_p;
9270 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9271 OACC_CACHE);
9272 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9273 OACC_CACHE);
9275 /* TODO: Do something sensible with this information. */
9277 *expr_p = NULL_TREE;
9280 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9281 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9282 kind. The entry kind will replace the one in CLAUSE, while the exit
9283 kind will be used in a new omp_clause and returned to the caller. */
9285 static tree
9286 gimplify_oacc_declare_1 (tree clause)
9288 HOST_WIDE_INT kind, new_op;
9289 bool ret = false;
9290 tree c = NULL;
9292 kind = OMP_CLAUSE_MAP_KIND (clause);
9294 switch (kind)
9296 case GOMP_MAP_ALLOC:
9297 case GOMP_MAP_FORCE_ALLOC:
9298 case GOMP_MAP_FORCE_TO:
9299 new_op = GOMP_MAP_DELETE;
9300 ret = true;
9301 break;
9303 case GOMP_MAP_FORCE_FROM:
9304 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9305 new_op = GOMP_MAP_FORCE_FROM;
9306 ret = true;
9307 break;
9309 case GOMP_MAP_FORCE_TOFROM:
9310 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9311 new_op = GOMP_MAP_FORCE_FROM;
9312 ret = true;
9313 break;
9315 case GOMP_MAP_FROM:
9316 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9317 new_op = GOMP_MAP_FROM;
9318 ret = true;
9319 break;
9321 case GOMP_MAP_TOFROM:
9322 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9323 new_op = GOMP_MAP_FROM;
9324 ret = true;
9325 break;
9327 case GOMP_MAP_DEVICE_RESIDENT:
9328 case GOMP_MAP_FORCE_DEVICEPTR:
9329 case GOMP_MAP_FORCE_PRESENT:
9330 case GOMP_MAP_LINK:
9331 case GOMP_MAP_POINTER:
9332 case GOMP_MAP_TO:
9333 break;
9335 default:
9336 gcc_unreachable ();
9337 break;
9340 if (ret)
9342 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9343 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9344 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9347 return c;
9350 /* Gimplify OACC_DECLARE. */
9352 static void
9353 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9355 tree expr = *expr_p;
9356 gomp_target *stmt;
9357 tree clauses, t, decl;
9359 clauses = OACC_DECLARE_CLAUSES (expr);
9361 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9362 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9364 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9366 decl = OMP_CLAUSE_DECL (t);
9368 if (TREE_CODE (decl) == MEM_REF)
9369 decl = TREE_OPERAND (decl, 0);
9371 if (VAR_P (decl) && !is_oacc_declared (decl))
9373 tree attr = get_identifier ("oacc declare target");
9374 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9375 DECL_ATTRIBUTES (decl));
9378 if (VAR_P (decl)
9379 && !is_global_var (decl)
9380 && DECL_CONTEXT (decl) == current_function_decl)
9382 tree c = gimplify_oacc_declare_1 (t);
9383 if (c)
9385 if (oacc_declare_returns == NULL)
9386 oacc_declare_returns = new hash_map<tree, tree>;
9388 oacc_declare_returns->put (decl, c);
9392 if (gimplify_omp_ctxp)
9393 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9396 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9397 clauses);
9399 gimplify_seq_add_stmt (pre_p, stmt);
9401 *expr_p = NULL_TREE;
9404 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9405 gimplification of the body, as well as scanning the body for used
9406 variables. We need to do this scan now, because variable-sized
9407 decls will be decomposed during gimplification. */
9409 static void
9410 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9412 tree expr = *expr_p;
9413 gimple *g;
9414 gimple_seq body = NULL;
9416 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9417 OMP_PARALLEL_COMBINED (expr)
9418 ? ORT_COMBINED_PARALLEL
9419 : ORT_PARALLEL, OMP_PARALLEL);
9421 push_gimplify_context ();
9423 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9424 if (gimple_code (g) == GIMPLE_BIND)
9425 pop_gimplify_context (g);
9426 else
9427 pop_gimplify_context (NULL);
9429 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9430 OMP_PARALLEL);
9432 g = gimple_build_omp_parallel (body,
9433 OMP_PARALLEL_CLAUSES (expr),
9434 NULL_TREE, NULL_TREE);
9435 if (OMP_PARALLEL_COMBINED (expr))
9436 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9437 gimplify_seq_add_stmt (pre_p, g);
9438 *expr_p = NULL_TREE;
9441 /* Gimplify the contents of an OMP_TASK statement. This involves
9442 gimplification of the body, as well as scanning the body for used
9443 variables. We need to do this scan now, because variable-sized
9444 decls will be decomposed during gimplification. */
9446 static void
9447 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9449 tree expr = *expr_p;
9450 gimple *g;
9451 gimple_seq body = NULL;
9453 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9454 omp_find_clause (OMP_TASK_CLAUSES (expr),
9455 OMP_CLAUSE_UNTIED)
9456 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9458 push_gimplify_context ();
9460 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9461 if (gimple_code (g) == GIMPLE_BIND)
9462 pop_gimplify_context (g);
9463 else
9464 pop_gimplify_context (NULL);
9466 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9467 OMP_TASK);
9469 g = gimple_build_omp_task (body,
9470 OMP_TASK_CLAUSES (expr),
9471 NULL_TREE, NULL_TREE,
9472 NULL_TREE, NULL_TREE, NULL_TREE);
9473 gimplify_seq_add_stmt (pre_p, g);
9474 *expr_p = NULL_TREE;
9477 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9478 with non-NULL OMP_FOR_INIT. */
9480 static tree
9481 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9483 *walk_subtrees = 0;
9484 switch (TREE_CODE (*tp))
9486 case OMP_FOR:
9487 *walk_subtrees = 1;
9488 /* FALLTHRU */
9489 case OMP_SIMD:
9490 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9491 return *tp;
9492 break;
9493 case BIND_EXPR:
9494 case STATEMENT_LIST:
9495 case OMP_PARALLEL:
9496 *walk_subtrees = 1;
9497 break;
9498 default:
9499 break;
9501 return NULL_TREE;
9504 /* Gimplify the gross structure of an OMP_FOR statement. */
9506 static enum gimplify_status
9507 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9509 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9510 enum gimplify_status ret = GS_ALL_DONE;
9511 enum gimplify_status tret;
9512 gomp_for *gfor;
9513 gimple_seq for_body, for_pre_body;
9514 int i;
9515 bitmap has_decl_expr = NULL;
9516 enum omp_region_type ort = ORT_WORKSHARE;
9518 orig_for_stmt = for_stmt = *expr_p;
9520 switch (TREE_CODE (for_stmt))
9522 case OMP_FOR:
9523 case CILK_FOR:
9524 case OMP_DISTRIBUTE:
9525 break;
9526 case OACC_LOOP:
9527 ort = ORT_ACC;
9528 break;
9529 case OMP_TASKLOOP:
9530 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9531 ort = ORT_UNTIED_TASK;
9532 else
9533 ort = ORT_TASK;
9534 break;
9535 case OMP_SIMD:
9536 case CILK_SIMD:
9537 ort = ORT_SIMD;
9538 break;
9539 default:
9540 gcc_unreachable ();
9543 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9544 clause for the IV. */
9545 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9547 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9548 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9549 decl = TREE_OPERAND (t, 0);
9550 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9551 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9552 && OMP_CLAUSE_DECL (c) == decl)
9554 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9555 break;
9559 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9561 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9562 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9563 find_combined_omp_for, NULL, NULL);
9564 if (inner_for_stmt == NULL_TREE)
9566 gcc_assert (seen_error ());
9567 *expr_p = NULL_TREE;
9568 return GS_ERROR;
9572 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9573 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9574 TREE_CODE (for_stmt));
9576 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9577 gimplify_omp_ctxp->distribute = true;
9579 /* Handle OMP_FOR_INIT. */
9580 for_pre_body = NULL;
9581 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9583 has_decl_expr = BITMAP_ALLOC (NULL);
9584 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9585 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9586 == VAR_DECL)
9588 t = OMP_FOR_PRE_BODY (for_stmt);
9589 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9591 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9593 tree_stmt_iterator si;
9594 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9595 tsi_next (&si))
9597 t = tsi_stmt (si);
9598 if (TREE_CODE (t) == DECL_EXPR
9599 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9600 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9604 if (OMP_FOR_PRE_BODY (for_stmt))
9606 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9607 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9608 else
9610 struct gimplify_omp_ctx ctx;
9611 memset (&ctx, 0, sizeof (ctx));
9612 ctx.region_type = ORT_NONE;
9613 gimplify_omp_ctxp = &ctx;
9614 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9615 gimplify_omp_ctxp = NULL;
9618 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9620 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9621 for_stmt = inner_for_stmt;
9623 /* For taskloop, need to gimplify the start, end and step before the
9624 taskloop, outside of the taskloop omp context. */
9625 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9627 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9629 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9630 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9632 TREE_OPERAND (t, 1)
9633 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9634 pre_p, NULL, false);
9635 tree c = build_omp_clause (input_location,
9636 OMP_CLAUSE_FIRSTPRIVATE);
9637 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9638 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9639 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9642 /* Handle OMP_FOR_COND. */
9643 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9644 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9646 TREE_OPERAND (t, 1)
9647 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9648 gimple_seq_empty_p (for_pre_body)
9649 ? pre_p : &for_pre_body, NULL,
9650 false);
9651 tree c = build_omp_clause (input_location,
9652 OMP_CLAUSE_FIRSTPRIVATE);
9653 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9654 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9655 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9658 /* Handle OMP_FOR_INCR. */
9659 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9660 if (TREE_CODE (t) == MODIFY_EXPR)
9662 decl = TREE_OPERAND (t, 0);
9663 t = TREE_OPERAND (t, 1);
9664 tree *tp = &TREE_OPERAND (t, 1);
9665 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9666 tp = &TREE_OPERAND (t, 0);
9668 if (!is_gimple_constant (*tp))
9670 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9671 ? pre_p : &for_pre_body;
9672 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9673 tree c = build_omp_clause (input_location,
9674 OMP_CLAUSE_FIRSTPRIVATE);
9675 OMP_CLAUSE_DECL (c) = *tp;
9676 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9677 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9682 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9683 OMP_TASKLOOP);
9686 if (orig_for_stmt != for_stmt)
9687 gimplify_omp_ctxp->combined_loop = true;
9689 for_body = NULL;
9690 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9691 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9692 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9693 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9695 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9696 bool is_doacross = false;
9697 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9699 is_doacross = true;
9700 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9701 (OMP_FOR_INIT (for_stmt))
9702 * 2);
9704 int collapse = 1, tile = 0;
9705 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9706 if (c)
9707 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9708 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9709 if (c)
9710 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9711 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9713 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9714 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9715 decl = TREE_OPERAND (t, 0);
9716 gcc_assert (DECL_P (decl));
9717 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9718 || POINTER_TYPE_P (TREE_TYPE (decl)));
9719 if (is_doacross)
9721 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9722 gimplify_omp_ctxp->loop_iter_var.quick_push
9723 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9724 else
9725 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9726 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9729 /* Make sure the iteration variable is private. */
9730 tree c = NULL_TREE;
9731 tree c2 = NULL_TREE;
9732 if (orig_for_stmt != for_stmt)
9733 /* Do this only on innermost construct for combined ones. */;
9734 else if (ort == ORT_SIMD)
9736 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9737 (splay_tree_key) decl);
9738 omp_is_private (gimplify_omp_ctxp, decl,
9739 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9740 != 1));
9741 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9742 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9743 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9745 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9746 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9747 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9748 if (has_decl_expr
9749 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9751 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9752 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9754 struct gimplify_omp_ctx *outer
9755 = gimplify_omp_ctxp->outer_context;
9756 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9758 if (outer->region_type == ORT_WORKSHARE
9759 && outer->combined_loop)
9761 n = splay_tree_lookup (outer->variables,
9762 (splay_tree_key)decl);
9763 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9765 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9766 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9768 else
9770 struct gimplify_omp_ctx *octx = outer->outer_context;
9771 if (octx
9772 && octx->region_type == ORT_COMBINED_PARALLEL
9773 && octx->outer_context
9774 && (octx->outer_context->region_type
9775 == ORT_WORKSHARE)
9776 && octx->outer_context->combined_loop)
9778 octx = octx->outer_context;
9779 n = splay_tree_lookup (octx->variables,
9780 (splay_tree_key)decl);
9781 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9783 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9784 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9791 OMP_CLAUSE_DECL (c) = decl;
9792 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9793 OMP_FOR_CLAUSES (for_stmt) = c;
9794 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9795 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9797 if (outer->region_type == ORT_WORKSHARE
9798 && outer->combined_loop)
9800 if (outer->outer_context
9801 && (outer->outer_context->region_type
9802 == ORT_COMBINED_PARALLEL))
9803 outer = outer->outer_context;
9804 else if (omp_check_private (outer, decl, false))
9805 outer = NULL;
9807 else if (((outer->region_type & ORT_TASK) != 0)
9808 && outer->combined_loop
9809 && !omp_check_private (gimplify_omp_ctxp,
9810 decl, false))
9812 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9814 omp_notice_variable (outer, decl, true);
9815 outer = NULL;
9817 if (outer)
9819 n = splay_tree_lookup (outer->variables,
9820 (splay_tree_key)decl);
9821 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9823 omp_add_variable (outer, decl,
9824 GOVD_LASTPRIVATE | GOVD_SEEN);
9825 if (outer->region_type == ORT_COMBINED_PARALLEL
9826 && outer->outer_context
9827 && (outer->outer_context->region_type
9828 == ORT_WORKSHARE)
9829 && outer->outer_context->combined_loop)
9831 outer = outer->outer_context;
9832 n = splay_tree_lookup (outer->variables,
9833 (splay_tree_key)decl);
9834 if (omp_check_private (outer, decl, false))
9835 outer = NULL;
9836 else if (n == NULL
9837 || ((n->value & GOVD_DATA_SHARE_CLASS)
9838 == 0))
9839 omp_add_variable (outer, decl,
9840 GOVD_LASTPRIVATE
9841 | GOVD_SEEN);
9842 else
9843 outer = NULL;
9845 if (outer && outer->outer_context
9846 && (outer->outer_context->region_type
9847 == ORT_COMBINED_TEAMS))
9849 outer = outer->outer_context;
9850 n = splay_tree_lookup (outer->variables,
9851 (splay_tree_key)decl);
9852 if (n == NULL
9853 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9854 omp_add_variable (outer, decl,
9855 GOVD_SHARED | GOVD_SEEN);
9856 else
9857 outer = NULL;
9859 if (outer && outer->outer_context)
9860 omp_notice_variable (outer->outer_context, decl,
9861 true);
9866 else
9868 bool lastprivate
9869 = (!has_decl_expr
9870 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9871 struct gimplify_omp_ctx *outer
9872 = gimplify_omp_ctxp->outer_context;
9873 if (outer && lastprivate)
9875 if (outer->region_type == ORT_WORKSHARE
9876 && outer->combined_loop)
9878 n = splay_tree_lookup (outer->variables,
9879 (splay_tree_key)decl);
9880 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9882 lastprivate = false;
9883 outer = NULL;
9885 else if (outer->outer_context
9886 && (outer->outer_context->region_type
9887 == ORT_COMBINED_PARALLEL))
9888 outer = outer->outer_context;
9889 else if (omp_check_private (outer, decl, false))
9890 outer = NULL;
9892 else if (((outer->region_type & ORT_TASK) != 0)
9893 && outer->combined_loop
9894 && !omp_check_private (gimplify_omp_ctxp,
9895 decl, false))
9897 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9899 omp_notice_variable (outer, decl, true);
9900 outer = NULL;
9902 if (outer)
9904 n = splay_tree_lookup (outer->variables,
9905 (splay_tree_key)decl);
9906 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9908 omp_add_variable (outer, decl,
9909 GOVD_LASTPRIVATE | GOVD_SEEN);
9910 if (outer->region_type == ORT_COMBINED_PARALLEL
9911 && outer->outer_context
9912 && (outer->outer_context->region_type
9913 == ORT_WORKSHARE)
9914 && outer->outer_context->combined_loop)
9916 outer = outer->outer_context;
9917 n = splay_tree_lookup (outer->variables,
9918 (splay_tree_key)decl);
9919 if (omp_check_private (outer, decl, false))
9920 outer = NULL;
9921 else if (n == NULL
9922 || ((n->value & GOVD_DATA_SHARE_CLASS)
9923 == 0))
9924 omp_add_variable (outer, decl,
9925 GOVD_LASTPRIVATE
9926 | GOVD_SEEN);
9927 else
9928 outer = NULL;
9930 if (outer && outer->outer_context
9931 && (outer->outer_context->region_type
9932 == ORT_COMBINED_TEAMS))
9934 outer = outer->outer_context;
9935 n = splay_tree_lookup (outer->variables,
9936 (splay_tree_key)decl);
9937 if (n == NULL
9938 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9939 omp_add_variable (outer, decl,
9940 GOVD_SHARED | GOVD_SEEN);
9941 else
9942 outer = NULL;
9944 if (outer && outer->outer_context)
9945 omp_notice_variable (outer->outer_context, decl,
9946 true);
9951 c = build_omp_clause (input_location,
9952 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9953 : OMP_CLAUSE_PRIVATE);
9954 OMP_CLAUSE_DECL (c) = decl;
9955 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9956 OMP_FOR_CLAUSES (for_stmt) = c;
9957 omp_add_variable (gimplify_omp_ctxp, decl,
9958 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9959 | GOVD_EXPLICIT | GOVD_SEEN);
9960 c = NULL_TREE;
9963 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9964 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9965 else
9966 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9968 /* If DECL is not a gimple register, create a temporary variable to act
9969 as an iteration counter. This is valid, since DECL cannot be
9970 modified in the body of the loop. Similarly for any iteration vars
9971 in simd with collapse > 1 where the iterator vars must be
9972 lastprivate. */
9973 if (orig_for_stmt != for_stmt)
9974 var = decl;
9975 else if (!is_gimple_reg (decl)
9976 || (ort == ORT_SIMD
9977 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9979 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9980 /* Make sure omp_add_variable is not called on it prematurely.
9981 We call it ourselves a few lines later. */
9982 gimplify_omp_ctxp = NULL;
9983 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9984 gimplify_omp_ctxp = ctx;
9985 TREE_OPERAND (t, 0) = var;
9987 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9989 if (ort == ORT_SIMD
9990 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9992 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9993 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9994 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9995 OMP_CLAUSE_DECL (c2) = var;
9996 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9997 OMP_FOR_CLAUSES (for_stmt) = c2;
9998 omp_add_variable (gimplify_omp_ctxp, var,
9999 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10000 if (c == NULL_TREE)
10002 c = c2;
10003 c2 = NULL_TREE;
10006 else
10007 omp_add_variable (gimplify_omp_ctxp, var,
10008 GOVD_PRIVATE | GOVD_SEEN);
10010 else
10011 var = decl;
10013 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10014 is_gimple_val, fb_rvalue, false);
10015 ret = MIN (ret, tret);
10016 if (ret == GS_ERROR)
10017 return ret;
10019 /* Handle OMP_FOR_COND. */
10020 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10021 gcc_assert (COMPARISON_CLASS_P (t));
10022 gcc_assert (TREE_OPERAND (t, 0) == decl);
10024 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10025 is_gimple_val, fb_rvalue, false);
10026 ret = MIN (ret, tret);
10028 /* Handle OMP_FOR_INCR. */
10029 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10030 switch (TREE_CODE (t))
10032 case PREINCREMENT_EXPR:
10033 case POSTINCREMENT_EXPR:
10035 tree decl = TREE_OPERAND (t, 0);
10036 /* c_omp_for_incr_canonicalize_ptr() should have been
10037 called to massage things appropriately. */
10038 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10040 if (orig_for_stmt != for_stmt)
10041 break;
10042 t = build_int_cst (TREE_TYPE (decl), 1);
10043 if (c)
10044 OMP_CLAUSE_LINEAR_STEP (c) = t;
10045 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10046 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10047 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10048 break;
10051 case PREDECREMENT_EXPR:
10052 case POSTDECREMENT_EXPR:
10053 /* c_omp_for_incr_canonicalize_ptr() should have been
10054 called to massage things appropriately. */
10055 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10056 if (orig_for_stmt != for_stmt)
10057 break;
10058 t = build_int_cst (TREE_TYPE (decl), -1);
10059 if (c)
10060 OMP_CLAUSE_LINEAR_STEP (c) = t;
10061 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10062 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10063 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10064 break;
10066 case MODIFY_EXPR:
10067 gcc_assert (TREE_OPERAND (t, 0) == decl);
10068 TREE_OPERAND (t, 0) = var;
10070 t = TREE_OPERAND (t, 1);
10071 switch (TREE_CODE (t))
10073 case PLUS_EXPR:
10074 if (TREE_OPERAND (t, 1) == decl)
10076 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10077 TREE_OPERAND (t, 0) = var;
10078 break;
10081 /* Fallthru. */
10082 case MINUS_EXPR:
10083 case POINTER_PLUS_EXPR:
10084 gcc_assert (TREE_OPERAND (t, 0) == decl);
10085 TREE_OPERAND (t, 0) = var;
10086 break;
10087 default:
10088 gcc_unreachable ();
10091 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10092 is_gimple_val, fb_rvalue, false);
10093 ret = MIN (ret, tret);
10094 if (c)
10096 tree step = TREE_OPERAND (t, 1);
10097 tree stept = TREE_TYPE (decl);
10098 if (POINTER_TYPE_P (stept))
10099 stept = sizetype;
10100 step = fold_convert (stept, step);
10101 if (TREE_CODE (t) == MINUS_EXPR)
10102 step = fold_build1 (NEGATE_EXPR, stept, step);
10103 OMP_CLAUSE_LINEAR_STEP (c) = step;
10104 if (step != TREE_OPERAND (t, 1))
10106 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10107 &for_pre_body, NULL,
10108 is_gimple_val, fb_rvalue, false);
10109 ret = MIN (ret, tret);
10112 break;
10114 default:
10115 gcc_unreachable ();
10118 if (c2)
10120 gcc_assert (c);
10121 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10124 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10126 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10127 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10128 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10129 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10130 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10131 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10132 && OMP_CLAUSE_DECL (c) == decl)
10134 if (is_doacross && (collapse == 1 || i >= collapse))
10135 t = var;
10136 else
10138 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10139 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10140 gcc_assert (TREE_OPERAND (t, 0) == var);
10141 t = TREE_OPERAND (t, 1);
10142 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10143 || TREE_CODE (t) == MINUS_EXPR
10144 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10145 gcc_assert (TREE_OPERAND (t, 0) == var);
10146 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10147 is_doacross ? var : decl,
10148 TREE_OPERAND (t, 1));
10150 gimple_seq *seq;
10151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10152 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10153 else
10154 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10155 gimplify_assign (decl, t, seq);
10160 BITMAP_FREE (has_decl_expr);
10162 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10164 push_gimplify_context ();
10165 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10167 OMP_FOR_BODY (orig_for_stmt)
10168 = build3 (BIND_EXPR, void_type_node, NULL,
10169 OMP_FOR_BODY (orig_for_stmt), NULL);
10170 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10174 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10175 &for_body);
10177 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10179 if (gimple_code (g) == GIMPLE_BIND)
10180 pop_gimplify_context (g);
10181 else
10182 pop_gimplify_context (NULL);
10185 if (orig_for_stmt != for_stmt)
10186 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10188 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10189 decl = TREE_OPERAND (t, 0);
10190 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10191 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10192 gimplify_omp_ctxp = ctx->outer_context;
10193 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10194 gimplify_omp_ctxp = ctx;
10195 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10196 TREE_OPERAND (t, 0) = var;
10197 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10198 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10199 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10202 gimplify_adjust_omp_clauses (pre_p, for_body,
10203 &OMP_FOR_CLAUSES (orig_for_stmt),
10204 TREE_CODE (orig_for_stmt));
10206 int kind;
10207 switch (TREE_CODE (orig_for_stmt))
10209 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10210 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10211 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10212 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10213 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10214 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10215 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10216 default:
10217 gcc_unreachable ();
10219 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10220 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10221 for_pre_body);
10222 if (orig_for_stmt != for_stmt)
10223 gimple_omp_for_set_combined_p (gfor, true);
10224 if (gimplify_omp_ctxp
10225 && (gimplify_omp_ctxp->combined_loop
10226 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10227 && gimplify_omp_ctxp->outer_context
10228 && gimplify_omp_ctxp->outer_context->combined_loop)))
10230 gimple_omp_for_set_combined_into_p (gfor, true);
10231 if (gimplify_omp_ctxp->combined_loop)
10232 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10233 else
10234 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10237 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10239 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10240 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10241 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10242 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10243 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10244 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10245 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10246 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10249 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10250 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10251 The outer taskloop stands for computing the number of iterations,
10252 counts for collapsed loops and holding taskloop specific clauses.
10253 The task construct stands for the effect of data sharing on the
10254 explicit task it creates and the inner taskloop stands for expansion
10255 of the static loop inside of the explicit task construct. */
10256 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10258 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10259 tree task_clauses = NULL_TREE;
10260 tree c = *gfor_clauses_ptr;
10261 tree *gtask_clauses_ptr = &task_clauses;
10262 tree outer_for_clauses = NULL_TREE;
10263 tree *gforo_clauses_ptr = &outer_for_clauses;
10264 for (; c; c = OMP_CLAUSE_CHAIN (c))
10265 switch (OMP_CLAUSE_CODE (c))
10267 /* These clauses are allowed on task, move them there. */
10268 case OMP_CLAUSE_SHARED:
10269 case OMP_CLAUSE_FIRSTPRIVATE:
10270 case OMP_CLAUSE_DEFAULT:
10271 case OMP_CLAUSE_IF:
10272 case OMP_CLAUSE_UNTIED:
10273 case OMP_CLAUSE_FINAL:
10274 case OMP_CLAUSE_MERGEABLE:
10275 case OMP_CLAUSE_PRIORITY:
10276 *gtask_clauses_ptr = c;
10277 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10278 break;
10279 case OMP_CLAUSE_PRIVATE:
10280 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10282 /* We want private on outer for and firstprivate
10283 on task. */
10284 *gtask_clauses_ptr
10285 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10286 OMP_CLAUSE_FIRSTPRIVATE);
10287 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10288 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10289 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10290 *gforo_clauses_ptr = c;
10291 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10293 else
10295 *gtask_clauses_ptr = c;
10296 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10298 break;
10299 /* These clauses go into outer taskloop clauses. */
10300 case OMP_CLAUSE_GRAINSIZE:
10301 case OMP_CLAUSE_NUM_TASKS:
10302 case OMP_CLAUSE_NOGROUP:
10303 *gforo_clauses_ptr = c;
10304 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10305 break;
10306 /* Taskloop clause we duplicate on both taskloops. */
10307 case OMP_CLAUSE_COLLAPSE:
10308 *gfor_clauses_ptr = c;
10309 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10310 *gforo_clauses_ptr = copy_node (c);
10311 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10312 break;
10313 /* For lastprivate, keep the clause on inner taskloop, and add
10314 a shared clause on task. If the same decl is also firstprivate,
10315 add also firstprivate clause on the inner taskloop. */
10316 case OMP_CLAUSE_LASTPRIVATE:
10317 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10319 /* For taskloop C++ lastprivate IVs, we want:
10320 1) private on outer taskloop
10321 2) firstprivate and shared on task
10322 3) lastprivate on inner taskloop */
10323 *gtask_clauses_ptr
10324 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10325 OMP_CLAUSE_FIRSTPRIVATE);
10326 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10327 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10328 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10329 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10330 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10331 OMP_CLAUSE_PRIVATE);
10332 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10333 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10334 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10335 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10337 *gfor_clauses_ptr = c;
10338 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10339 *gtask_clauses_ptr
10340 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10341 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10342 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10343 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10344 gtask_clauses_ptr
10345 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10346 break;
10347 default:
10348 gcc_unreachable ();
10350 *gfor_clauses_ptr = NULL_TREE;
10351 *gtask_clauses_ptr = NULL_TREE;
10352 *gforo_clauses_ptr = NULL_TREE;
10353 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10354 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10355 NULL_TREE, NULL_TREE, NULL_TREE);
10356 gimple_omp_task_set_taskloop_p (g, true);
10357 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10358 gomp_for *gforo
10359 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10360 gimple_omp_for_collapse (gfor),
10361 gimple_omp_for_pre_body (gfor));
10362 gimple_omp_for_set_pre_body (gfor, NULL);
10363 gimple_omp_for_set_combined_p (gforo, true);
10364 gimple_omp_for_set_combined_into_p (gfor, true);
10365 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10367 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10368 tree v = create_tmp_var (type);
10369 gimple_omp_for_set_index (gforo, i, v);
10370 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10371 gimple_omp_for_set_initial (gforo, i, t);
10372 gimple_omp_for_set_cond (gforo, i,
10373 gimple_omp_for_cond (gfor, i));
10374 t = unshare_expr (gimple_omp_for_final (gfor, i));
10375 gimple_omp_for_set_final (gforo, i, t);
10376 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10377 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10378 TREE_OPERAND (t, 0) = v;
10379 gimple_omp_for_set_incr (gforo, i, t);
10380 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10381 OMP_CLAUSE_DECL (t) = v;
10382 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10383 gimple_omp_for_set_clauses (gforo, t);
10385 gimplify_seq_add_stmt (pre_p, gforo);
10387 else
10388 gimplify_seq_add_stmt (pre_p, gfor);
10389 if (ret != GS_ALL_DONE)
10390 return GS_ERROR;
10391 *expr_p = NULL_TREE;
10392 return GS_ALL_DONE;
10395 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10396 of OMP_TARGET's body. */
10398 static tree
10399 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10401 *walk_subtrees = 0;
10402 switch (TREE_CODE (*tp))
10404 case OMP_TEAMS:
10405 return *tp;
10406 case BIND_EXPR:
10407 case STATEMENT_LIST:
10408 *walk_subtrees = 1;
10409 break;
10410 default:
10411 break;
10413 return NULL_TREE;
10416 /* Helper function of optimize_target_teams, determine if the expression
10417 can be computed safely before the target construct on the host. */
10419 static tree
10420 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10422 splay_tree_node n;
10424 if (TYPE_P (*tp))
10426 *walk_subtrees = 0;
10427 return NULL_TREE;
10429 switch (TREE_CODE (*tp))
10431 case VAR_DECL:
10432 case PARM_DECL:
10433 case RESULT_DECL:
10434 *walk_subtrees = 0;
10435 if (error_operand_p (*tp)
10436 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10437 || DECL_HAS_VALUE_EXPR_P (*tp)
10438 || DECL_THREAD_LOCAL_P (*tp)
10439 || TREE_SIDE_EFFECTS (*tp)
10440 || TREE_THIS_VOLATILE (*tp))
10441 return *tp;
10442 if (is_global_var (*tp)
10443 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10444 || lookup_attribute ("omp declare target link",
10445 DECL_ATTRIBUTES (*tp))))
10446 return *tp;
10447 if (VAR_P (*tp)
10448 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10449 && !is_global_var (*tp)
10450 && decl_function_context (*tp) == current_function_decl)
10451 return *tp;
10452 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10453 (splay_tree_key) *tp);
10454 if (n == NULL)
10456 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10457 return NULL_TREE;
10458 return *tp;
10460 else if (n->value & GOVD_LOCAL)
10461 return *tp;
10462 else if (n->value & GOVD_FIRSTPRIVATE)
10463 return NULL_TREE;
10464 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10465 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10466 return NULL_TREE;
10467 return *tp;
10468 case INTEGER_CST:
10469 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10470 return *tp;
10471 return NULL_TREE;
10472 case TARGET_EXPR:
10473 if (TARGET_EXPR_INITIAL (*tp)
10474 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10475 return *tp;
10476 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10477 walk_subtrees, NULL);
10478 /* Allow some reasonable subset of integral arithmetics. */
10479 case PLUS_EXPR:
10480 case MINUS_EXPR:
10481 case MULT_EXPR:
10482 case TRUNC_DIV_EXPR:
10483 case CEIL_DIV_EXPR:
10484 case FLOOR_DIV_EXPR:
10485 case ROUND_DIV_EXPR:
10486 case TRUNC_MOD_EXPR:
10487 case CEIL_MOD_EXPR:
10488 case FLOOR_MOD_EXPR:
10489 case ROUND_MOD_EXPR:
10490 case RDIV_EXPR:
10491 case EXACT_DIV_EXPR:
10492 case MIN_EXPR:
10493 case MAX_EXPR:
10494 case LSHIFT_EXPR:
10495 case RSHIFT_EXPR:
10496 case BIT_IOR_EXPR:
10497 case BIT_XOR_EXPR:
10498 case BIT_AND_EXPR:
10499 case NEGATE_EXPR:
10500 case ABS_EXPR:
10501 case BIT_NOT_EXPR:
10502 case NON_LVALUE_EXPR:
10503 CASE_CONVERT:
10504 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10505 return *tp;
10506 return NULL_TREE;
10507 /* And disallow anything else, except for comparisons. */
10508 default:
10509 if (COMPARISON_CLASS_P (*tp))
10510 return NULL_TREE;
10511 return *tp;
10515 /* Try to determine if the num_teams and/or thread_limit expressions
10516 can have their values determined already before entering the
10517 target construct.
10518 INTEGER_CSTs trivially are,
10519 integral decls that are firstprivate (explicitly or implicitly)
10520 or explicitly map(always, to:) or map(always, tofrom:) on the target
10521 region too, and expressions involving simple arithmetics on those
10522 too, function calls are not ok, dereferencing something neither etc.
10523 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10524 EXPR based on what we find:
10525 0 stands for clause not specified at all, use implementation default
10526 -1 stands for value that can't be determined easily before entering
10527 the target construct.
10528 If teams construct is not present at all, use 1 for num_teams
10529 and 0 for thread_limit (only one team is involved, and the thread
10530 limit is implementation defined. */
10532 static void
10533 optimize_target_teams (tree target, gimple_seq *pre_p)
10535 tree body = OMP_BODY (target);
10536 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10537 tree num_teams = integer_zero_node;
10538 tree thread_limit = integer_zero_node;
10539 location_t num_teams_loc = EXPR_LOCATION (target);
10540 location_t thread_limit_loc = EXPR_LOCATION (target);
10541 tree c, *p, expr;
10542 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10544 if (teams == NULL_TREE)
10545 num_teams = integer_one_node;
10546 else
10547 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10551 p = &num_teams;
10552 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10554 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10556 p = &thread_limit;
10557 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10559 else
10560 continue;
10561 expr = OMP_CLAUSE_OPERAND (c, 0);
10562 if (TREE_CODE (expr) == INTEGER_CST)
10564 *p = expr;
10565 continue;
10567 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10569 *p = integer_minus_one_node;
10570 continue;
10572 *p = expr;
10573 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10574 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10575 == GS_ERROR)
10577 gimplify_omp_ctxp = target_ctx;
10578 *p = integer_minus_one_node;
10579 continue;
10581 gimplify_omp_ctxp = target_ctx;
10582 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10583 OMP_CLAUSE_OPERAND (c, 0) = *p;
10585 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10586 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10587 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10588 OMP_TARGET_CLAUSES (target) = c;
10589 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10590 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10591 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10592 OMP_TARGET_CLAUSES (target) = c;
10595 /* Gimplify the gross structure of several OMP constructs. */
10597 static void
10598 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10600 tree expr = *expr_p;
10601 gimple *stmt;
10602 gimple_seq body = NULL;
10603 enum omp_region_type ort;
10605 switch (TREE_CODE (expr))
10607 case OMP_SECTIONS:
10608 case OMP_SINGLE:
10609 ort = ORT_WORKSHARE;
10610 break;
10611 case OMP_TARGET:
10612 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10613 break;
10614 case OACC_KERNELS:
10615 ort = ORT_ACC_KERNELS;
10616 break;
10617 case OACC_PARALLEL:
10618 ort = ORT_ACC_PARALLEL;
10619 break;
10620 case OACC_DATA:
10621 ort = ORT_ACC_DATA;
10622 break;
10623 case OMP_TARGET_DATA:
10624 ort = ORT_TARGET_DATA;
10625 break;
10626 case OMP_TEAMS:
10627 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10628 break;
10629 case OACC_HOST_DATA:
10630 ort = ORT_ACC_HOST_DATA;
10631 break;
10632 default:
10633 gcc_unreachable ();
10635 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10636 TREE_CODE (expr));
10637 if (TREE_CODE (expr) == OMP_TARGET)
10638 optimize_target_teams (expr, pre_p);
10639 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10641 push_gimplify_context ();
10642 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10643 if (gimple_code (g) == GIMPLE_BIND)
10644 pop_gimplify_context (g);
10645 else
10646 pop_gimplify_context (NULL);
10647 if ((ort & ORT_TARGET_DATA) != 0)
10649 enum built_in_function end_ix;
10650 switch (TREE_CODE (expr))
10652 case OACC_DATA:
10653 case OACC_HOST_DATA:
10654 end_ix = BUILT_IN_GOACC_DATA_END;
10655 break;
10656 case OMP_TARGET_DATA:
10657 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10658 break;
10659 default:
10660 gcc_unreachable ();
10662 tree fn = builtin_decl_explicit (end_ix);
10663 g = gimple_build_call (fn, 0);
10664 gimple_seq cleanup = NULL;
10665 gimple_seq_add_stmt (&cleanup, g);
10666 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10667 body = NULL;
10668 gimple_seq_add_stmt (&body, g);
10671 else
10672 gimplify_and_add (OMP_BODY (expr), &body);
10673 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10674 TREE_CODE (expr));
10676 switch (TREE_CODE (expr))
10678 case OACC_DATA:
10679 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10680 OMP_CLAUSES (expr));
10681 break;
10682 case OACC_KERNELS:
10683 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10684 OMP_CLAUSES (expr));
10685 break;
10686 case OACC_HOST_DATA:
10687 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10688 OMP_CLAUSES (expr));
10689 break;
10690 case OACC_PARALLEL:
10691 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10692 OMP_CLAUSES (expr));
10693 break;
10694 case OMP_SECTIONS:
10695 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10696 break;
10697 case OMP_SINGLE:
10698 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10699 break;
10700 case OMP_TARGET:
10701 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10702 OMP_CLAUSES (expr));
10703 break;
10704 case OMP_TARGET_DATA:
10705 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10706 OMP_CLAUSES (expr));
10707 break;
10708 case OMP_TEAMS:
10709 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10710 break;
10711 default:
10712 gcc_unreachable ();
10715 gimplify_seq_add_stmt (pre_p, stmt);
10716 *expr_p = NULL_TREE;
10719 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10720 target update constructs. */
10722 static void
10723 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10725 tree expr = *expr_p;
10726 int kind;
10727 gomp_target *stmt;
10728 enum omp_region_type ort = ORT_WORKSHARE;
10730 switch (TREE_CODE (expr))
10732 case OACC_ENTER_DATA:
10733 case OACC_EXIT_DATA:
10734 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10735 ort = ORT_ACC;
10736 break;
10737 case OACC_UPDATE:
10738 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10739 ort = ORT_ACC;
10740 break;
10741 case OMP_TARGET_UPDATE:
10742 kind = GF_OMP_TARGET_KIND_UPDATE;
10743 break;
10744 case OMP_TARGET_ENTER_DATA:
10745 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10746 break;
10747 case OMP_TARGET_EXIT_DATA:
10748 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10749 break;
10750 default:
10751 gcc_unreachable ();
10753 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10754 ort, TREE_CODE (expr));
10755 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10756 TREE_CODE (expr));
10757 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10759 gimplify_seq_add_stmt (pre_p, stmt);
10760 *expr_p = NULL_TREE;
10763 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10764 stabilized the lhs of the atomic operation as *ADDR. Return true if
10765 EXPR is this stabilized form. */
10767 static bool
10768 goa_lhs_expr_p (tree expr, tree addr)
10770 /* Also include casts to other type variants. The C front end is fond
10771 of adding these for e.g. volatile variables. This is like
10772 STRIP_TYPE_NOPS but includes the main variant lookup. */
10773 STRIP_USELESS_TYPE_CONVERSION (expr);
10775 if (TREE_CODE (expr) == INDIRECT_REF)
10777 expr = TREE_OPERAND (expr, 0);
10778 while (expr != addr
10779 && (CONVERT_EXPR_P (expr)
10780 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10781 && TREE_CODE (expr) == TREE_CODE (addr)
10782 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10784 expr = TREE_OPERAND (expr, 0);
10785 addr = TREE_OPERAND (addr, 0);
10787 if (expr == addr)
10788 return true;
10789 return (TREE_CODE (addr) == ADDR_EXPR
10790 && TREE_CODE (expr) == ADDR_EXPR
10791 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10793 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10794 return true;
10795 return false;
10798 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10799 expression does not involve the lhs, evaluate it into a temporary.
10800 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10801 or -1 if an error was encountered. */
10803 static int
10804 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10805 tree lhs_var)
10807 tree expr = *expr_p;
10808 int saw_lhs;
10810 if (goa_lhs_expr_p (expr, lhs_addr))
10812 *expr_p = lhs_var;
10813 return 1;
10815 if (is_gimple_val (expr))
10816 return 0;
10818 saw_lhs = 0;
10819 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10821 case tcc_binary:
10822 case tcc_comparison:
10823 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10824 lhs_var);
10825 /* FALLTHRU */
10826 case tcc_unary:
10827 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10828 lhs_var);
10829 break;
10830 case tcc_expression:
10831 switch (TREE_CODE (expr))
10833 case TRUTH_ANDIF_EXPR:
10834 case TRUTH_ORIF_EXPR:
10835 case TRUTH_AND_EXPR:
10836 case TRUTH_OR_EXPR:
10837 case TRUTH_XOR_EXPR:
10838 case BIT_INSERT_EXPR:
10839 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10840 lhs_addr, lhs_var);
10841 /* FALLTHRU */
10842 case TRUTH_NOT_EXPR:
10843 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10844 lhs_addr, lhs_var);
10845 break;
10846 case COMPOUND_EXPR:
10847 /* Break out any preevaluations from cp_build_modify_expr. */
10848 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10849 expr = TREE_OPERAND (expr, 1))
10850 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10851 *expr_p = expr;
10852 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10853 default:
10854 break;
10856 break;
10857 case tcc_reference:
10858 if (TREE_CODE (expr) == BIT_FIELD_REF)
10859 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10860 lhs_addr, lhs_var);
10861 break;
10862 default:
10863 break;
10866 if (saw_lhs == 0)
10868 enum gimplify_status gs;
10869 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10870 if (gs != GS_ALL_DONE)
10871 saw_lhs = -1;
10874 return saw_lhs;
10877 /* Gimplify an OMP_ATOMIC statement. */
10879 static enum gimplify_status
10880 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10882 tree addr = TREE_OPERAND (*expr_p, 0);
10883 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10884 ? NULL : TREE_OPERAND (*expr_p, 1);
10885 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10886 tree tmp_load;
10887 gomp_atomic_load *loadstmt;
10888 gomp_atomic_store *storestmt;
10890 tmp_load = create_tmp_reg (type);
10891 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10892 return GS_ERROR;
10894 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10895 != GS_ALL_DONE)
10896 return GS_ERROR;
10898 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10899 gimplify_seq_add_stmt (pre_p, loadstmt);
10900 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10901 != GS_ALL_DONE)
10902 return GS_ERROR;
10904 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10905 rhs = tmp_load;
10906 storestmt = gimple_build_omp_atomic_store (rhs);
10907 gimplify_seq_add_stmt (pre_p, storestmt);
10908 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10910 gimple_omp_atomic_set_seq_cst (loadstmt);
10911 gimple_omp_atomic_set_seq_cst (storestmt);
10913 switch (TREE_CODE (*expr_p))
10915 case OMP_ATOMIC_READ:
10916 case OMP_ATOMIC_CAPTURE_OLD:
10917 *expr_p = tmp_load;
10918 gimple_omp_atomic_set_need_value (loadstmt);
10919 break;
10920 case OMP_ATOMIC_CAPTURE_NEW:
10921 *expr_p = rhs;
10922 gimple_omp_atomic_set_need_value (storestmt);
10923 break;
10924 default:
10925 *expr_p = NULL;
10926 break;
10929 return GS_ALL_DONE;
10932 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10933 body, and adding some EH bits. */
10935 static enum gimplify_status
10936 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10938 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10939 gimple *body_stmt;
10940 gtransaction *trans_stmt;
10941 gimple_seq body = NULL;
10942 int subcode = 0;
10944 /* Wrap the transaction body in a BIND_EXPR so we have a context
10945 where to put decls for OMP. */
10946 if (TREE_CODE (tbody) != BIND_EXPR)
10948 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10949 TREE_SIDE_EFFECTS (bind) = 1;
10950 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10951 TRANSACTION_EXPR_BODY (expr) = bind;
10954 push_gimplify_context ();
10955 temp = voidify_wrapper_expr (*expr_p, NULL);
10957 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10958 pop_gimplify_context (body_stmt);
10960 trans_stmt = gimple_build_transaction (body);
10961 if (TRANSACTION_EXPR_OUTER (expr))
10962 subcode = GTMA_IS_OUTER;
10963 else if (TRANSACTION_EXPR_RELAXED (expr))
10964 subcode = GTMA_IS_RELAXED;
10965 gimple_transaction_set_subcode (trans_stmt, subcode);
10967 gimplify_seq_add_stmt (pre_p, trans_stmt);
10969 if (temp)
10971 *expr_p = temp;
10972 return GS_OK;
10975 *expr_p = NULL_TREE;
10976 return GS_ALL_DONE;
10979 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10980 is the OMP_BODY of the original EXPR (which has already been
10981 gimplified so it's not present in the EXPR).
10983 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10985 static gimple *
10986 gimplify_omp_ordered (tree expr, gimple_seq body)
10988 tree c, decls;
10989 int failures = 0;
10990 unsigned int i;
10991 tree source_c = NULL_TREE;
10992 tree sink_c = NULL_TREE;
10994 if (gimplify_omp_ctxp)
10996 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10997 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10998 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10999 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
11000 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
11002 error_at (OMP_CLAUSE_LOCATION (c),
11003 "%<ordered%> construct with %<depend%> clause must be "
11004 "closely nested inside a loop with %<ordered%> clause "
11005 "with a parameter");
11006 failures++;
11008 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11009 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11011 bool fail = false;
11012 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11013 decls && TREE_CODE (decls) == TREE_LIST;
11014 decls = TREE_CHAIN (decls), ++i)
11015 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11016 continue;
11017 else if (TREE_VALUE (decls)
11018 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11020 error_at (OMP_CLAUSE_LOCATION (c),
11021 "variable %qE is not an iteration "
11022 "of outermost loop %d, expected %qE",
11023 TREE_VALUE (decls), i + 1,
11024 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11025 fail = true;
11026 failures++;
11028 else
11029 TREE_VALUE (decls)
11030 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11031 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11033 error_at (OMP_CLAUSE_LOCATION (c),
11034 "number of variables in %<depend(sink)%> "
11035 "clause does not match number of "
11036 "iteration variables");
11037 failures++;
11039 sink_c = c;
11041 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11042 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11044 if (source_c)
11046 error_at (OMP_CLAUSE_LOCATION (c),
11047 "more than one %<depend(source)%> clause on an "
11048 "%<ordered%> construct");
11049 failures++;
11051 else
11052 source_c = c;
11055 if (source_c && sink_c)
11057 error_at (OMP_CLAUSE_LOCATION (source_c),
11058 "%<depend(source)%> clause specified together with "
11059 "%<depend(sink:)%> clauses on the same construct");
11060 failures++;
11063 if (failures)
11064 return gimple_build_nop ();
11065 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11068 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11069 expression produces a value to be used as an operand inside a GIMPLE
11070 statement, the value will be stored back in *EXPR_P. This value will
11071 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11072 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11073 emitted in PRE_P and POST_P.
11075 Additionally, this process may overwrite parts of the input
11076 expression during gimplification. Ideally, it should be
11077 possible to do non-destructive gimplification.
11079 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11080 the expression needs to evaluate to a value to be used as
11081 an operand in a GIMPLE statement, this value will be stored in
11082 *EXPR_P on exit. This happens when the caller specifies one
11083 of fb_lvalue or fb_rvalue fallback flags.
11085 PRE_P will contain the sequence of GIMPLE statements corresponding
11086 to the evaluation of EXPR and all the side-effects that must
11087 be executed before the main expression. On exit, the last
11088 statement of PRE_P is the core statement being gimplified. For
11089 instance, when gimplifying 'if (++a)' the last statement in
11090 PRE_P will be 'if (t.1)' where t.1 is the result of
11091 pre-incrementing 'a'.
11093 POST_P will contain the sequence of GIMPLE statements corresponding
11094 to the evaluation of all the side-effects that must be executed
11095 after the main expression. If this is NULL, the post
11096 side-effects are stored at the end of PRE_P.
11098 The reason why the output is split in two is to handle post
11099 side-effects explicitly. In some cases, an expression may have
11100 inner and outer post side-effects which need to be emitted in
11101 an order different from the one given by the recursive
11102 traversal. For instance, for the expression (*p--)++ the post
11103 side-effects of '--' must actually occur *after* the post
11104 side-effects of '++'. However, gimplification will first visit
11105 the inner expression, so if a separate POST sequence was not
11106 used, the resulting sequence would be:
11108 1 t.1 = *p
11109 2 p = p - 1
11110 3 t.2 = t.1 + 1
11111 4 *p = t.2
11113 However, the post-decrement operation in line #2 must not be
11114 evaluated until after the store to *p at line #4, so the
11115 correct sequence should be:
11117 1 t.1 = *p
11118 2 t.2 = t.1 + 1
11119 3 *p = t.2
11120 4 p = p - 1
11122 So, by specifying a separate post queue, it is possible
11123 to emit the post side-effects in the correct order.
11124 If POST_P is NULL, an internal queue will be used. Before
11125 returning to the caller, the sequence POST_P is appended to
11126 the main output sequence PRE_P.
11128 GIMPLE_TEST_F points to a function that takes a tree T and
11129 returns nonzero if T is in the GIMPLE form requested by the
11130 caller. The GIMPLE predicates are in gimple.c.
11132 FALLBACK tells the function what sort of a temporary we want if
11133 gimplification cannot produce an expression that complies with
11134 GIMPLE_TEST_F.
11136 fb_none means that no temporary should be generated
11137 fb_rvalue means that an rvalue is OK to generate
11138 fb_lvalue means that an lvalue is OK to generate
11139 fb_either means that either is OK, but an lvalue is preferable.
11140 fb_mayfail means that gimplification may fail (in which case
11141 GS_ERROR will be returned)
11143 The return value is either GS_ERROR or GS_ALL_DONE, since this
11144 function iterates until EXPR is completely gimplified or an error
11145 occurs. */
11147 enum gimplify_status
11148 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11149 bool (*gimple_test_f) (tree), fallback_t fallback)
11151 tree tmp;
11152 gimple_seq internal_pre = NULL;
11153 gimple_seq internal_post = NULL;
11154 tree save_expr;
11155 bool is_statement;
11156 location_t saved_location;
11157 enum gimplify_status ret;
11158 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11159 tree label;
11161 save_expr = *expr_p;
11162 if (save_expr == NULL_TREE)
11163 return GS_ALL_DONE;
11165 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11166 is_statement = gimple_test_f == is_gimple_stmt;
11167 if (is_statement)
11168 gcc_assert (pre_p);
11170 /* Consistency checks. */
11171 if (gimple_test_f == is_gimple_reg)
11172 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11173 else if (gimple_test_f == is_gimple_val
11174 || gimple_test_f == is_gimple_call_addr
11175 || gimple_test_f == is_gimple_condexpr
11176 || gimple_test_f == is_gimple_mem_rhs
11177 || gimple_test_f == is_gimple_mem_rhs_or_call
11178 || gimple_test_f == is_gimple_reg_rhs
11179 || gimple_test_f == is_gimple_reg_rhs_or_call
11180 || gimple_test_f == is_gimple_asm_val
11181 || gimple_test_f == is_gimple_mem_ref_addr)
11182 gcc_assert (fallback & fb_rvalue);
11183 else if (gimple_test_f == is_gimple_min_lval
11184 || gimple_test_f == is_gimple_lvalue)
11185 gcc_assert (fallback & fb_lvalue);
11186 else if (gimple_test_f == is_gimple_addressable)
11187 gcc_assert (fallback & fb_either);
11188 else if (gimple_test_f == is_gimple_stmt)
11189 gcc_assert (fallback == fb_none);
11190 else
11192 /* We should have recognized the GIMPLE_TEST_F predicate to
11193 know what kind of fallback to use in case a temporary is
11194 needed to hold the value or address of *EXPR_P. */
11195 gcc_unreachable ();
11198 /* We used to check the predicate here and return immediately if it
11199 succeeds. This is wrong; the design is for gimplification to be
11200 idempotent, and for the predicates to only test for valid forms, not
11201 whether they are fully simplified. */
11202 if (pre_p == NULL)
11203 pre_p = &internal_pre;
11205 if (post_p == NULL)
11206 post_p = &internal_post;
11208 /* Remember the last statements added to PRE_P and POST_P. Every
11209 new statement added by the gimplification helpers needs to be
11210 annotated with location information. To centralize the
11211 responsibility, we remember the last statement that had been
11212 added to both queues before gimplifying *EXPR_P. If
11213 gimplification produces new statements in PRE_P and POST_P, those
11214 statements will be annotated with the same location information
11215 as *EXPR_P. */
11216 pre_last_gsi = gsi_last (*pre_p);
11217 post_last_gsi = gsi_last (*post_p);
11219 saved_location = input_location;
11220 if (save_expr != error_mark_node
11221 && EXPR_HAS_LOCATION (*expr_p))
11222 input_location = EXPR_LOCATION (*expr_p);
11224 /* Loop over the specific gimplifiers until the toplevel node
11225 remains the same. */
11228 /* Strip away as many useless type conversions as possible
11229 at the toplevel. */
11230 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11232 /* Remember the expr. */
11233 save_expr = *expr_p;
11235 /* Die, die, die, my darling. */
11236 if (save_expr == error_mark_node
11237 || (TREE_TYPE (save_expr)
11238 && TREE_TYPE (save_expr) == error_mark_node))
11240 ret = GS_ERROR;
11241 break;
11244 /* Do any language-specific gimplification. */
11245 ret = ((enum gimplify_status)
11246 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11247 if (ret == GS_OK)
11249 if (*expr_p == NULL_TREE)
11250 break;
11251 if (*expr_p != save_expr)
11252 continue;
11254 else if (ret != GS_UNHANDLED)
11255 break;
11257 /* Make sure that all the cases set 'ret' appropriately. */
11258 ret = GS_UNHANDLED;
11259 switch (TREE_CODE (*expr_p))
11261 /* First deal with the special cases. */
11263 case POSTINCREMENT_EXPR:
11264 case POSTDECREMENT_EXPR:
11265 case PREINCREMENT_EXPR:
11266 case PREDECREMENT_EXPR:
11267 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11268 fallback != fb_none,
11269 TREE_TYPE (*expr_p));
11270 break;
11272 case VIEW_CONVERT_EXPR:
11273 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11274 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11276 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11277 post_p, is_gimple_val, fb_rvalue);
11278 recalculate_side_effects (*expr_p);
11279 break;
11281 /* Fallthru. */
11283 case ARRAY_REF:
11284 case ARRAY_RANGE_REF:
11285 case REALPART_EXPR:
11286 case IMAGPART_EXPR:
11287 case COMPONENT_REF:
11288 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11289 fallback ? fallback : fb_rvalue);
11290 break;
11292 case COND_EXPR:
11293 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11295 /* C99 code may assign to an array in a structure value of a
11296 conditional expression, and this has undefined behavior
11297 only on execution, so create a temporary if an lvalue is
11298 required. */
11299 if (fallback == fb_lvalue)
11301 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11302 mark_addressable (*expr_p);
11303 ret = GS_OK;
11305 break;
11307 case CALL_EXPR:
11308 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11310 /* C99 code may assign to an array in a structure returned
11311 from a function, and this has undefined behavior only on
11312 execution, so create a temporary if an lvalue is
11313 required. */
11314 if (fallback == fb_lvalue)
11316 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11317 mark_addressable (*expr_p);
11318 ret = GS_OK;
11320 break;
11322 case TREE_LIST:
11323 gcc_unreachable ();
11325 case COMPOUND_EXPR:
11326 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11327 break;
11329 case COMPOUND_LITERAL_EXPR:
11330 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11331 gimple_test_f, fallback);
11332 break;
11334 case MODIFY_EXPR:
11335 case INIT_EXPR:
11336 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11337 fallback != fb_none);
11338 break;
11340 case TRUTH_ANDIF_EXPR:
11341 case TRUTH_ORIF_EXPR:
11343 /* Preserve the original type of the expression and the
11344 source location of the outer expression. */
11345 tree org_type = TREE_TYPE (*expr_p);
11346 *expr_p = gimple_boolify (*expr_p);
11347 *expr_p = build3_loc (input_location, COND_EXPR,
11348 org_type, *expr_p,
11349 fold_convert_loc
11350 (input_location,
11351 org_type, boolean_true_node),
11352 fold_convert_loc
11353 (input_location,
11354 org_type, boolean_false_node));
11355 ret = GS_OK;
11356 break;
11359 case TRUTH_NOT_EXPR:
11361 tree type = TREE_TYPE (*expr_p);
11362 /* The parsers are careful to generate TRUTH_NOT_EXPR
11363 only with operands that are always zero or one.
11364 We do not fold here but handle the only interesting case
11365 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11366 *expr_p = gimple_boolify (*expr_p);
11367 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11368 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11369 TREE_TYPE (*expr_p),
11370 TREE_OPERAND (*expr_p, 0));
11371 else
11372 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11373 TREE_TYPE (*expr_p),
11374 TREE_OPERAND (*expr_p, 0),
11375 build_int_cst (TREE_TYPE (*expr_p), 1));
11376 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11377 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11378 ret = GS_OK;
11379 break;
11382 case ADDR_EXPR:
11383 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11384 break;
11386 case ANNOTATE_EXPR:
11388 tree cond = TREE_OPERAND (*expr_p, 0);
11389 tree kind = TREE_OPERAND (*expr_p, 1);
11390 tree type = TREE_TYPE (cond);
11391 if (!INTEGRAL_TYPE_P (type))
11393 *expr_p = cond;
11394 ret = GS_OK;
11395 break;
11397 tree tmp = create_tmp_var (type);
11398 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11399 gcall *call
11400 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11401 gimple_call_set_lhs (call, tmp);
11402 gimplify_seq_add_stmt (pre_p, call);
11403 *expr_p = tmp;
11404 ret = GS_ALL_DONE;
11405 break;
11408 case VA_ARG_EXPR:
11409 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11410 break;
11412 CASE_CONVERT:
11413 if (IS_EMPTY_STMT (*expr_p))
11415 ret = GS_ALL_DONE;
11416 break;
11419 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11420 || fallback == fb_none)
11422 /* Just strip a conversion to void (or in void context) and
11423 try again. */
11424 *expr_p = TREE_OPERAND (*expr_p, 0);
11425 ret = GS_OK;
11426 break;
11429 ret = gimplify_conversion (expr_p);
11430 if (ret == GS_ERROR)
11431 break;
11432 if (*expr_p != save_expr)
11433 break;
11434 /* FALLTHRU */
11436 case FIX_TRUNC_EXPR:
11437 /* unary_expr: ... | '(' cast ')' val | ... */
11438 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11439 is_gimple_val, fb_rvalue);
11440 recalculate_side_effects (*expr_p);
11441 break;
11443 case INDIRECT_REF:
11445 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11446 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11447 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11449 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11450 if (*expr_p != save_expr)
11452 ret = GS_OK;
11453 break;
11456 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11457 is_gimple_reg, fb_rvalue);
11458 if (ret == GS_ERROR)
11459 break;
11461 recalculate_side_effects (*expr_p);
11462 *expr_p = fold_build2_loc (input_location, MEM_REF,
11463 TREE_TYPE (*expr_p),
11464 TREE_OPERAND (*expr_p, 0),
11465 build_int_cst (saved_ptr_type, 0));
11466 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11467 TREE_THIS_NOTRAP (*expr_p) = notrap;
11468 ret = GS_OK;
11469 break;
11472 /* We arrive here through the various re-gimplifcation paths. */
11473 case MEM_REF:
11474 /* First try re-folding the whole thing. */
11475 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11476 TREE_OPERAND (*expr_p, 0),
11477 TREE_OPERAND (*expr_p, 1));
11478 if (tmp)
11480 REF_REVERSE_STORAGE_ORDER (tmp)
11481 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11482 *expr_p = tmp;
11483 recalculate_side_effects (*expr_p);
11484 ret = GS_OK;
11485 break;
11487 /* Avoid re-gimplifying the address operand if it is already
11488 in suitable form. Re-gimplifying would mark the address
11489 operand addressable. Always gimplify when not in SSA form
11490 as we still may have to gimplify decls with value-exprs. */
11491 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11492 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11494 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11495 is_gimple_mem_ref_addr, fb_rvalue);
11496 if (ret == GS_ERROR)
11497 break;
11499 recalculate_side_effects (*expr_p);
11500 ret = GS_ALL_DONE;
11501 break;
11503 /* Constants need not be gimplified. */
11504 case INTEGER_CST:
11505 case REAL_CST:
11506 case FIXED_CST:
11507 case STRING_CST:
11508 case COMPLEX_CST:
11509 case VECTOR_CST:
11510 /* Drop the overflow flag on constants, we do not want
11511 that in the GIMPLE IL. */
11512 if (TREE_OVERFLOW_P (*expr_p))
11513 *expr_p = drop_tree_overflow (*expr_p);
11514 ret = GS_ALL_DONE;
11515 break;
11517 case CONST_DECL:
11518 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11519 CONST_DECL node. Otherwise the decl is replaceable by its
11520 value. */
11521 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11522 if (fallback & fb_lvalue)
11523 ret = GS_ALL_DONE;
11524 else
11526 *expr_p = DECL_INITIAL (*expr_p);
11527 ret = GS_OK;
11529 break;
11531 case DECL_EXPR:
11532 ret = gimplify_decl_expr (expr_p, pre_p);
11533 break;
11535 case BIND_EXPR:
11536 ret = gimplify_bind_expr (expr_p, pre_p);
11537 break;
11539 case LOOP_EXPR:
11540 ret = gimplify_loop_expr (expr_p, pre_p);
11541 break;
11543 case SWITCH_EXPR:
11544 ret = gimplify_switch_expr (expr_p, pre_p);
11545 break;
11547 case EXIT_EXPR:
11548 ret = gimplify_exit_expr (expr_p);
11549 break;
11551 case GOTO_EXPR:
11552 /* If the target is not LABEL, then it is a computed jump
11553 and the target needs to be gimplified. */
11554 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11556 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11557 NULL, is_gimple_val, fb_rvalue);
11558 if (ret == GS_ERROR)
11559 break;
11561 gimplify_seq_add_stmt (pre_p,
11562 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11563 ret = GS_ALL_DONE;
11564 break;
11566 case PREDICT_EXPR:
11567 gimplify_seq_add_stmt (pre_p,
11568 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11569 PREDICT_EXPR_OUTCOME (*expr_p)));
11570 ret = GS_ALL_DONE;
11571 break;
11573 case LABEL_EXPR:
11574 ret = gimplify_label_expr (expr_p, pre_p);
11575 label = LABEL_EXPR_LABEL (*expr_p);
11576 gcc_assert (decl_function_context (label) == current_function_decl);
11578 /* If the label is used in a goto statement, or address of the label
11579 is taken, we need to unpoison all variables that were seen so far.
11580 Doing so would prevent us from reporting a false positives. */
11581 if (asan_poisoned_variables
11582 && asan_used_labels != NULL
11583 && asan_used_labels->contains (label))
11584 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11585 break;
11587 case CASE_LABEL_EXPR:
11588 ret = gimplify_case_label_expr (expr_p, pre_p);
11590 if (gimplify_ctxp->live_switch_vars)
11591 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11592 pre_p);
11593 break;
11595 case RETURN_EXPR:
11596 ret = gimplify_return_expr (*expr_p, pre_p);
11597 break;
11599 case CONSTRUCTOR:
11600 /* Don't reduce this in place; let gimplify_init_constructor work its
11601 magic. Buf if we're just elaborating this for side effects, just
11602 gimplify any element that has side-effects. */
11603 if (fallback == fb_none)
11605 unsigned HOST_WIDE_INT ix;
11606 tree val;
11607 tree temp = NULL_TREE;
11608 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11609 if (TREE_SIDE_EFFECTS (val))
11610 append_to_statement_list (val, &temp);
11612 *expr_p = temp;
11613 ret = temp ? GS_OK : GS_ALL_DONE;
11615 /* C99 code may assign to an array in a constructed
11616 structure or union, and this has undefined behavior only
11617 on execution, so create a temporary if an lvalue is
11618 required. */
11619 else if (fallback == fb_lvalue)
11621 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11622 mark_addressable (*expr_p);
11623 ret = GS_OK;
11625 else
11626 ret = GS_ALL_DONE;
11627 break;
11629 /* The following are special cases that are not handled by the
11630 original GIMPLE grammar. */
11632 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11633 eliminated. */
11634 case SAVE_EXPR:
11635 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11636 break;
11638 case BIT_FIELD_REF:
11639 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11640 post_p, is_gimple_lvalue, fb_either);
11641 recalculate_side_effects (*expr_p);
11642 break;
11644 case TARGET_MEM_REF:
11646 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11648 if (TMR_BASE (*expr_p))
11649 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11650 post_p, is_gimple_mem_ref_addr, fb_either);
11651 if (TMR_INDEX (*expr_p))
11652 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11653 post_p, is_gimple_val, fb_rvalue);
11654 if (TMR_INDEX2 (*expr_p))
11655 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11656 post_p, is_gimple_val, fb_rvalue);
11657 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11658 ret = MIN (r0, r1);
11660 break;
11662 case NON_LVALUE_EXPR:
11663 /* This should have been stripped above. */
11664 gcc_unreachable ();
11666 case ASM_EXPR:
11667 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11668 break;
11670 case TRY_FINALLY_EXPR:
11671 case TRY_CATCH_EXPR:
11673 gimple_seq eval, cleanup;
11674 gtry *try_;
11676 /* Calls to destructors are generated automatically in FINALLY/CATCH
11677 block. They should have location as UNKNOWN_LOCATION. However,
11678 gimplify_call_expr will reset these call stmts to input_location
11679 if it finds stmt's location is unknown. To prevent resetting for
11680 destructors, we set the input_location to unknown.
11681 Note that this only affects the destructor calls in FINALLY/CATCH
11682 block, and will automatically reset to its original value by the
11683 end of gimplify_expr. */
11684 input_location = UNKNOWN_LOCATION;
11685 eval = cleanup = NULL;
11686 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11687 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11688 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11689 if (gimple_seq_empty_p (cleanup))
11691 gimple_seq_add_seq (pre_p, eval);
11692 ret = GS_ALL_DONE;
11693 break;
11695 try_ = gimple_build_try (eval, cleanup,
11696 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11697 ? GIMPLE_TRY_FINALLY
11698 : GIMPLE_TRY_CATCH);
11699 if (EXPR_HAS_LOCATION (save_expr))
11700 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11701 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11702 gimple_set_location (try_, saved_location);
11703 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11704 gimple_try_set_catch_is_cleanup (try_,
11705 TRY_CATCH_IS_CLEANUP (*expr_p));
11706 gimplify_seq_add_stmt (pre_p, try_);
11707 ret = GS_ALL_DONE;
11708 break;
11711 case CLEANUP_POINT_EXPR:
11712 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11713 break;
11715 case TARGET_EXPR:
11716 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11717 break;
11719 case CATCH_EXPR:
11721 gimple *c;
11722 gimple_seq handler = NULL;
11723 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11724 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11725 gimplify_seq_add_stmt (pre_p, c);
11726 ret = GS_ALL_DONE;
11727 break;
11730 case EH_FILTER_EXPR:
11732 gimple *ehf;
11733 gimple_seq failure = NULL;
11735 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11736 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11737 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11738 gimplify_seq_add_stmt (pre_p, ehf);
11739 ret = GS_ALL_DONE;
11740 break;
11743 case OBJ_TYPE_REF:
11745 enum gimplify_status r0, r1;
11746 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11747 post_p, is_gimple_val, fb_rvalue);
11748 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11749 post_p, is_gimple_val, fb_rvalue);
11750 TREE_SIDE_EFFECTS (*expr_p) = 0;
11751 ret = MIN (r0, r1);
11753 break;
11755 case LABEL_DECL:
11756 /* We get here when taking the address of a label. We mark
11757 the label as "forced"; meaning it can never be removed and
11758 it is a potential target for any computed goto. */
11759 FORCED_LABEL (*expr_p) = 1;
11760 ret = GS_ALL_DONE;
11761 break;
11763 case STATEMENT_LIST:
11764 ret = gimplify_statement_list (expr_p, pre_p);
11765 break;
11767 case WITH_SIZE_EXPR:
11769 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11770 post_p == &internal_post ? NULL : post_p,
11771 gimple_test_f, fallback);
11772 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11773 is_gimple_val, fb_rvalue);
11774 ret = GS_ALL_DONE;
11776 break;
11778 case VAR_DECL:
11779 case PARM_DECL:
11780 ret = gimplify_var_or_parm_decl (expr_p);
11781 break;
11783 case RESULT_DECL:
11784 /* When within an OMP context, notice uses of variables. */
11785 if (gimplify_omp_ctxp)
11786 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11787 ret = GS_ALL_DONE;
11788 break;
11790 case SSA_NAME:
11791 /* Allow callbacks into the gimplifier during optimization. */
11792 ret = GS_ALL_DONE;
11793 break;
11795 case OMP_PARALLEL:
11796 gimplify_omp_parallel (expr_p, pre_p);
11797 ret = GS_ALL_DONE;
11798 break;
11800 case OMP_TASK:
11801 gimplify_omp_task (expr_p, pre_p);
11802 ret = GS_ALL_DONE;
11803 break;
11805 case OMP_FOR:
11806 case OMP_SIMD:
11807 case CILK_SIMD:
11808 case CILK_FOR:
11809 case OMP_DISTRIBUTE:
11810 case OMP_TASKLOOP:
11811 case OACC_LOOP:
11812 ret = gimplify_omp_for (expr_p, pre_p);
11813 break;
11815 case OACC_CACHE:
11816 gimplify_oacc_cache (expr_p, pre_p);
11817 ret = GS_ALL_DONE;
11818 break;
11820 case OACC_DECLARE:
11821 gimplify_oacc_declare (expr_p, pre_p);
11822 ret = GS_ALL_DONE;
11823 break;
11825 case OACC_HOST_DATA:
11826 case OACC_DATA:
11827 case OACC_KERNELS:
11828 case OACC_PARALLEL:
11829 case OMP_SECTIONS:
11830 case OMP_SINGLE:
11831 case OMP_TARGET:
11832 case OMP_TARGET_DATA:
11833 case OMP_TEAMS:
11834 gimplify_omp_workshare (expr_p, pre_p);
11835 ret = GS_ALL_DONE;
11836 break;
11838 case OACC_ENTER_DATA:
11839 case OACC_EXIT_DATA:
11840 case OACC_UPDATE:
11841 case OMP_TARGET_UPDATE:
11842 case OMP_TARGET_ENTER_DATA:
11843 case OMP_TARGET_EXIT_DATA:
11844 gimplify_omp_target_update (expr_p, pre_p);
11845 ret = GS_ALL_DONE;
11846 break;
11848 case OMP_SECTION:
11849 case OMP_MASTER:
11850 case OMP_TASKGROUP:
11851 case OMP_ORDERED:
11852 case OMP_CRITICAL:
11854 gimple_seq body = NULL;
11855 gimple *g;
11857 gimplify_and_add (OMP_BODY (*expr_p), &body);
11858 switch (TREE_CODE (*expr_p))
11860 case OMP_SECTION:
11861 g = gimple_build_omp_section (body);
11862 break;
11863 case OMP_MASTER:
11864 g = gimple_build_omp_master (body);
11865 break;
11866 case OMP_TASKGROUP:
11868 gimple_seq cleanup = NULL;
11869 tree fn
11870 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11871 g = gimple_build_call (fn, 0);
11872 gimple_seq_add_stmt (&cleanup, g);
11873 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11874 body = NULL;
11875 gimple_seq_add_stmt (&body, g);
11876 g = gimple_build_omp_taskgroup (body);
11878 break;
11879 case OMP_ORDERED:
11880 g = gimplify_omp_ordered (*expr_p, body);
11881 break;
11882 case OMP_CRITICAL:
11883 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11884 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11885 gimplify_adjust_omp_clauses (pre_p, body,
11886 &OMP_CRITICAL_CLAUSES (*expr_p),
11887 OMP_CRITICAL);
11888 g = gimple_build_omp_critical (body,
11889 OMP_CRITICAL_NAME (*expr_p),
11890 OMP_CRITICAL_CLAUSES (*expr_p));
11891 break;
11892 default:
11893 gcc_unreachable ();
11895 gimplify_seq_add_stmt (pre_p, g);
11896 ret = GS_ALL_DONE;
11897 break;
11900 case OMP_ATOMIC:
11901 case OMP_ATOMIC_READ:
11902 case OMP_ATOMIC_CAPTURE_OLD:
11903 case OMP_ATOMIC_CAPTURE_NEW:
11904 ret = gimplify_omp_atomic (expr_p, pre_p);
11905 break;
11907 case TRANSACTION_EXPR:
11908 ret = gimplify_transaction (expr_p, pre_p);
11909 break;
11911 case TRUTH_AND_EXPR:
11912 case TRUTH_OR_EXPR:
11913 case TRUTH_XOR_EXPR:
11915 tree orig_type = TREE_TYPE (*expr_p);
11916 tree new_type, xop0, xop1;
11917 *expr_p = gimple_boolify (*expr_p);
11918 new_type = TREE_TYPE (*expr_p);
11919 if (!useless_type_conversion_p (orig_type, new_type))
11921 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11922 ret = GS_OK;
11923 break;
11926 /* Boolified binary truth expressions are semantically equivalent
11927 to bitwise binary expressions. Canonicalize them to the
11928 bitwise variant. */
11929 switch (TREE_CODE (*expr_p))
11931 case TRUTH_AND_EXPR:
11932 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11933 break;
11934 case TRUTH_OR_EXPR:
11935 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11936 break;
11937 case TRUTH_XOR_EXPR:
11938 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11939 break;
11940 default:
11941 break;
11943 /* Now make sure that operands have compatible type to
11944 expression's new_type. */
11945 xop0 = TREE_OPERAND (*expr_p, 0);
11946 xop1 = TREE_OPERAND (*expr_p, 1);
11947 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11948 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11949 new_type,
11950 xop0);
11951 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11952 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11953 new_type,
11954 xop1);
11955 /* Continue classified as tcc_binary. */
11956 goto expr_2;
11959 case VEC_COND_EXPR:
11961 enum gimplify_status r0, r1, r2;
11963 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11964 post_p, is_gimple_condexpr, fb_rvalue);
11965 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11966 post_p, is_gimple_val, fb_rvalue);
11967 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11968 post_p, is_gimple_val, fb_rvalue);
11970 ret = MIN (MIN (r0, r1), r2);
11971 recalculate_side_effects (*expr_p);
11973 break;
11975 case FMA_EXPR:
11976 case VEC_PERM_EXPR:
11977 /* Classified as tcc_expression. */
11978 goto expr_3;
11980 case BIT_INSERT_EXPR:
11981 /* Argument 3 is a constant. */
11982 goto expr_2;
11984 case POINTER_PLUS_EXPR:
11986 enum gimplify_status r0, r1;
11987 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11988 post_p, is_gimple_val, fb_rvalue);
11989 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11990 post_p, is_gimple_val, fb_rvalue);
11991 recalculate_side_effects (*expr_p);
11992 ret = MIN (r0, r1);
11993 break;
11996 case CILK_SYNC_STMT:
11998 if (!fn_contains_cilk_spawn_p (cfun))
12000 error_at (EXPR_LOCATION (*expr_p),
12001 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
12002 ret = GS_ERROR;
12004 else
12006 gimplify_cilk_sync (expr_p, pre_p);
12007 ret = GS_ALL_DONE;
12009 break;
12012 default:
12013 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12015 case tcc_comparison:
12016 /* Handle comparison of objects of non scalar mode aggregates
12017 with a call to memcmp. It would be nice to only have to do
12018 this for variable-sized objects, but then we'd have to allow
12019 the same nest of reference nodes we allow for MODIFY_EXPR and
12020 that's too complex.
12022 Compare scalar mode aggregates as scalar mode values. Using
12023 memcmp for them would be very inefficient at best, and is
12024 plain wrong if bitfields are involved. */
12026 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12028 /* Vector comparisons need no boolification. */
12029 if (TREE_CODE (type) == VECTOR_TYPE)
12030 goto expr_2;
12031 else if (!AGGREGATE_TYPE_P (type))
12033 tree org_type = TREE_TYPE (*expr_p);
12034 *expr_p = gimple_boolify (*expr_p);
12035 if (!useless_type_conversion_p (org_type,
12036 TREE_TYPE (*expr_p)))
12038 *expr_p = fold_convert_loc (input_location,
12039 org_type, *expr_p);
12040 ret = GS_OK;
12042 else
12043 goto expr_2;
12045 else if (TYPE_MODE (type) != BLKmode)
12046 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12047 else
12048 ret = gimplify_variable_sized_compare (expr_p);
12050 break;
12053 /* If *EXPR_P does not need to be special-cased, handle it
12054 according to its class. */
12055 case tcc_unary:
12056 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12057 post_p, is_gimple_val, fb_rvalue);
12058 break;
12060 case tcc_binary:
12061 expr_2:
12063 enum gimplify_status r0, r1;
12065 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12066 post_p, is_gimple_val, fb_rvalue);
12067 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12068 post_p, is_gimple_val, fb_rvalue);
12070 ret = MIN (r0, r1);
12071 break;
12074 expr_3:
12076 enum gimplify_status r0, r1, r2;
12078 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12079 post_p, is_gimple_val, fb_rvalue);
12080 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12081 post_p, is_gimple_val, fb_rvalue);
12082 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12083 post_p, is_gimple_val, fb_rvalue);
12085 ret = MIN (MIN (r0, r1), r2);
12086 break;
12089 case tcc_declaration:
12090 case tcc_constant:
12091 ret = GS_ALL_DONE;
12092 goto dont_recalculate;
12094 default:
12095 gcc_unreachable ();
12098 recalculate_side_effects (*expr_p);
12100 dont_recalculate:
12101 break;
12104 gcc_assert (*expr_p || ret != GS_OK);
12106 while (ret == GS_OK);
12108 /* If we encountered an error_mark somewhere nested inside, either
12109 stub out the statement or propagate the error back out. */
12110 if (ret == GS_ERROR)
12112 if (is_statement)
12113 *expr_p = NULL;
12114 goto out;
12117 /* This was only valid as a return value from the langhook, which
12118 we handled. Make sure it doesn't escape from any other context. */
12119 gcc_assert (ret != GS_UNHANDLED);
12121 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12123 /* We aren't looking for a value, and we don't have a valid
12124 statement. If it doesn't have side-effects, throw it away.
12125 We can also get here with code such as "*&&L;", where L is
12126 a LABEL_DECL that is marked as FORCED_LABEL. */
12127 if (TREE_CODE (*expr_p) == LABEL_DECL
12128 || !TREE_SIDE_EFFECTS (*expr_p))
12129 *expr_p = NULL;
12130 else if (!TREE_THIS_VOLATILE (*expr_p))
12132 /* This is probably a _REF that contains something nested that
12133 has side effects. Recurse through the operands to find it. */
12134 enum tree_code code = TREE_CODE (*expr_p);
12136 switch (code)
12138 case COMPONENT_REF:
12139 case REALPART_EXPR:
12140 case IMAGPART_EXPR:
12141 case VIEW_CONVERT_EXPR:
12142 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12143 gimple_test_f, fallback);
12144 break;
12146 case ARRAY_REF:
12147 case ARRAY_RANGE_REF:
12148 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12149 gimple_test_f, fallback);
12150 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12151 gimple_test_f, fallback);
12152 break;
12154 default:
12155 /* Anything else with side-effects must be converted to
12156 a valid statement before we get here. */
12157 gcc_unreachable ();
12160 *expr_p = NULL;
12162 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12163 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12165 /* Historically, the compiler has treated a bare reference
12166 to a non-BLKmode volatile lvalue as forcing a load. */
12167 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12169 /* Normally, we do not want to create a temporary for a
12170 TREE_ADDRESSABLE type because such a type should not be
12171 copied by bitwise-assignment. However, we make an
12172 exception here, as all we are doing here is ensuring that
12173 we read the bytes that make up the type. We use
12174 create_tmp_var_raw because create_tmp_var will abort when
12175 given a TREE_ADDRESSABLE type. */
12176 tree tmp = create_tmp_var_raw (type, "vol");
12177 gimple_add_tmp_var (tmp);
12178 gimplify_assign (tmp, *expr_p, pre_p);
12179 *expr_p = NULL;
12181 else
12182 /* We can't do anything useful with a volatile reference to
12183 an incomplete type, so just throw it away. Likewise for
12184 a BLKmode type, since any implicit inner load should
12185 already have been turned into an explicit one by the
12186 gimplification process. */
12187 *expr_p = NULL;
12190 /* If we are gimplifying at the statement level, we're done. Tack
12191 everything together and return. */
12192 if (fallback == fb_none || is_statement)
12194 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12195 it out for GC to reclaim it. */
12196 *expr_p = NULL_TREE;
12198 if (!gimple_seq_empty_p (internal_pre)
12199 || !gimple_seq_empty_p (internal_post))
12201 gimplify_seq_add_seq (&internal_pre, internal_post);
12202 gimplify_seq_add_seq (pre_p, internal_pre);
12205 /* The result of gimplifying *EXPR_P is going to be the last few
12206 statements in *PRE_P and *POST_P. Add location information
12207 to all the statements that were added by the gimplification
12208 helpers. */
12209 if (!gimple_seq_empty_p (*pre_p))
12210 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12212 if (!gimple_seq_empty_p (*post_p))
12213 annotate_all_with_location_after (*post_p, post_last_gsi,
12214 input_location);
12216 goto out;
12219 #ifdef ENABLE_GIMPLE_CHECKING
12220 if (*expr_p)
12222 enum tree_code code = TREE_CODE (*expr_p);
12223 /* These expressions should already be in gimple IR form. */
12224 gcc_assert (code != MODIFY_EXPR
12225 && code != ASM_EXPR
12226 && code != BIND_EXPR
12227 && code != CATCH_EXPR
12228 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12229 && code != EH_FILTER_EXPR
12230 && code != GOTO_EXPR
12231 && code != LABEL_EXPR
12232 && code != LOOP_EXPR
12233 && code != SWITCH_EXPR
12234 && code != TRY_FINALLY_EXPR
12235 && code != OACC_PARALLEL
12236 && code != OACC_KERNELS
12237 && code != OACC_DATA
12238 && code != OACC_HOST_DATA
12239 && code != OACC_DECLARE
12240 && code != OACC_UPDATE
12241 && code != OACC_ENTER_DATA
12242 && code != OACC_EXIT_DATA
12243 && code != OACC_CACHE
12244 && code != OMP_CRITICAL
12245 && code != OMP_FOR
12246 && code != OACC_LOOP
12247 && code != OMP_MASTER
12248 && code != OMP_TASKGROUP
12249 && code != OMP_ORDERED
12250 && code != OMP_PARALLEL
12251 && code != OMP_SECTIONS
12252 && code != OMP_SECTION
12253 && code != OMP_SINGLE);
12255 #endif
12257 /* Otherwise we're gimplifying a subexpression, so the resulting
12258 value is interesting. If it's a valid operand that matches
12259 GIMPLE_TEST_F, we're done. Unless we are handling some
12260 post-effects internally; if that's the case, we need to copy into
12261 a temporary before adding the post-effects to POST_P. */
12262 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12263 goto out;
12265 /* Otherwise, we need to create a new temporary for the gimplified
12266 expression. */
12268 /* We can't return an lvalue if we have an internal postqueue. The
12269 object the lvalue refers to would (probably) be modified by the
12270 postqueue; we need to copy the value out first, which means an
12271 rvalue. */
12272 if ((fallback & fb_lvalue)
12273 && gimple_seq_empty_p (internal_post)
12274 && is_gimple_addressable (*expr_p))
12276 /* An lvalue will do. Take the address of the expression, store it
12277 in a temporary, and replace the expression with an INDIRECT_REF of
12278 that temporary. */
12279 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12280 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12281 *expr_p = build_simple_mem_ref (tmp);
12283 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12285 /* An rvalue will do. Assign the gimplified expression into a
12286 new temporary TMP and replace the original expression with
12287 TMP. First, make sure that the expression has a type so that
12288 it can be assigned into a temporary. */
12289 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12290 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12292 else
12294 #ifdef ENABLE_GIMPLE_CHECKING
12295 if (!(fallback & fb_mayfail))
12297 fprintf (stderr, "gimplification failed:\n");
12298 print_generic_expr (stderr, *expr_p);
12299 debug_tree (*expr_p);
12300 internal_error ("gimplification failed");
12302 #endif
12303 gcc_assert (fallback & fb_mayfail);
12305 /* If this is an asm statement, and the user asked for the
12306 impossible, don't die. Fail and let gimplify_asm_expr
12307 issue an error. */
12308 ret = GS_ERROR;
12309 goto out;
12312 /* Make sure the temporary matches our predicate. */
12313 gcc_assert ((*gimple_test_f) (*expr_p));
12315 if (!gimple_seq_empty_p (internal_post))
12317 annotate_all_with_location (internal_post, input_location);
12318 gimplify_seq_add_seq (pre_p, internal_post);
12321 out:
12322 input_location = saved_location;
12323 return ret;
12326 /* Like gimplify_expr but make sure the gimplified result is not itself
12327 a SSA name (but a decl if it were). Temporaries required by
12328 evaluating *EXPR_P may be still SSA names. */
12330 static enum gimplify_status
12331 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12332 bool (*gimple_test_f) (tree), fallback_t fallback,
12333 bool allow_ssa)
12335 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12336 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12337 gimple_test_f, fallback);
12338 if (! allow_ssa
12339 && TREE_CODE (*expr_p) == SSA_NAME)
12341 tree name = *expr_p;
12342 if (was_ssa_name_p)
12343 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12344 else
12346 /* Avoid the extra copy if possible. */
12347 *expr_p = create_tmp_reg (TREE_TYPE (name));
12348 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12349 release_ssa_name (name);
12352 return ret;
12355 /* Look through TYPE for variable-sized objects and gimplify each such
12356 size that we find. Add to LIST_P any statements generated. */
12358 void
12359 gimplify_type_sizes (tree type, gimple_seq *list_p)
12361 tree field, t;
12363 if (type == NULL || type == error_mark_node)
12364 return;
12366 /* We first do the main variant, then copy into any other variants. */
12367 type = TYPE_MAIN_VARIANT (type);
12369 /* Avoid infinite recursion. */
12370 if (TYPE_SIZES_GIMPLIFIED (type))
12371 return;
12373 TYPE_SIZES_GIMPLIFIED (type) = 1;
12375 switch (TREE_CODE (type))
12377 case INTEGER_TYPE:
12378 case ENUMERAL_TYPE:
12379 case BOOLEAN_TYPE:
12380 case REAL_TYPE:
12381 case FIXED_POINT_TYPE:
12382 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12383 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12385 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12387 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12388 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12390 break;
12392 case ARRAY_TYPE:
12393 /* These types may not have declarations, so handle them here. */
12394 gimplify_type_sizes (TREE_TYPE (type), list_p);
12395 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12396 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12397 with assigned stack slots, for -O1+ -g they should be tracked
12398 by VTA. */
12399 if (!(TYPE_NAME (type)
12400 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12401 && DECL_IGNORED_P (TYPE_NAME (type)))
12402 && TYPE_DOMAIN (type)
12403 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12405 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12406 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12407 DECL_IGNORED_P (t) = 0;
12408 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12409 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12410 DECL_IGNORED_P (t) = 0;
12412 break;
12414 case RECORD_TYPE:
12415 case UNION_TYPE:
12416 case QUAL_UNION_TYPE:
12417 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12418 if (TREE_CODE (field) == FIELD_DECL)
12420 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12421 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12422 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12423 gimplify_type_sizes (TREE_TYPE (field), list_p);
12425 break;
12427 case POINTER_TYPE:
12428 case REFERENCE_TYPE:
12429 /* We used to recurse on the pointed-to type here, which turned out to
12430 be incorrect because its definition might refer to variables not
12431 yet initialized at this point if a forward declaration is involved.
12433 It was actually useful for anonymous pointed-to types to ensure
12434 that the sizes evaluation dominates every possible later use of the
12435 values. Restricting to such types here would be safe since there
12436 is no possible forward declaration around, but would introduce an
12437 undesirable middle-end semantic to anonymity. We then defer to
12438 front-ends the responsibility of ensuring that the sizes are
12439 evaluated both early and late enough, e.g. by attaching artificial
12440 type declarations to the tree. */
12441 break;
12443 default:
12444 break;
12447 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12448 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12450 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12452 TYPE_SIZE (t) = TYPE_SIZE (type);
12453 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12454 TYPE_SIZES_GIMPLIFIED (t) = 1;
12458 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12459 a size or position, has had all of its SAVE_EXPRs evaluated.
12460 We add any required statements to *STMT_P. */
12462 void
12463 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12465 tree expr = *expr_p;
12467 /* We don't do anything if the value isn't there, is constant, or contains
12468 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12469 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12470 will want to replace it with a new variable, but that will cause problems
12471 if this type is from outside the function. It's OK to have that here. */
12472 if (is_gimple_sizepos (expr))
12473 return;
12475 *expr_p = unshare_expr (expr);
12477 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12478 if the def vanishes. */
12479 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12482 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12483 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12484 is true, also gimplify the parameters. */
12486 gbind *
12487 gimplify_body (tree fndecl, bool do_parms)
12489 location_t saved_location = input_location;
12490 gimple_seq parm_stmts, seq;
12491 gimple *outer_stmt;
12492 gbind *outer_bind;
12493 struct cgraph_node *cgn;
12495 timevar_push (TV_TREE_GIMPLIFY);
12497 init_tree_ssa (cfun);
12499 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12500 gimplification. */
12501 default_rtl_profile ();
12503 gcc_assert (gimplify_ctxp == NULL);
12504 push_gimplify_context (true);
12506 if (flag_openacc || flag_openmp)
12508 gcc_assert (gimplify_omp_ctxp == NULL);
12509 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12510 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12513 /* Unshare most shared trees in the body and in that of any nested functions.
12514 It would seem we don't have to do this for nested functions because
12515 they are supposed to be output and then the outer function gimplified
12516 first, but the g++ front end doesn't always do it that way. */
12517 unshare_body (fndecl);
12518 unvisit_body (fndecl);
12520 cgn = cgraph_node::get (fndecl);
12521 if (cgn && cgn->origin)
12522 nonlocal_vlas = new hash_set<tree>;
12524 /* Make sure input_location isn't set to something weird. */
12525 input_location = DECL_SOURCE_LOCATION (fndecl);
12527 /* Resolve callee-copies. This has to be done before processing
12528 the body so that DECL_VALUE_EXPR gets processed correctly. */
12529 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12531 /* Gimplify the function's body. */
12532 seq = NULL;
12533 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12534 outer_stmt = gimple_seq_first_stmt (seq);
12535 if (!outer_stmt)
12537 outer_stmt = gimple_build_nop ();
12538 gimplify_seq_add_stmt (&seq, outer_stmt);
12541 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12542 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12543 if (gimple_code (outer_stmt) == GIMPLE_BIND
12544 && gimple_seq_first (seq) == gimple_seq_last (seq))
12545 outer_bind = as_a <gbind *> (outer_stmt);
12546 else
12547 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12549 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12551 /* If we had callee-copies statements, insert them at the beginning
12552 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12553 if (!gimple_seq_empty_p (parm_stmts))
12555 tree parm;
12557 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12558 gimple_bind_set_body (outer_bind, parm_stmts);
12560 for (parm = DECL_ARGUMENTS (current_function_decl);
12561 parm; parm = DECL_CHAIN (parm))
12562 if (DECL_HAS_VALUE_EXPR_P (parm))
12564 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12565 DECL_IGNORED_P (parm) = 0;
12569 if (nonlocal_vlas)
12571 if (nonlocal_vla_vars)
12573 /* tree-nested.c may later on call declare_vars (..., true);
12574 which relies on BLOCK_VARS chain to be the tail of the
12575 gimple_bind_vars chain. Ensure we don't violate that
12576 assumption. */
12577 if (gimple_bind_block (outer_bind)
12578 == DECL_INITIAL (current_function_decl))
12579 declare_vars (nonlocal_vla_vars, outer_bind, true);
12580 else
12581 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12582 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12583 nonlocal_vla_vars);
12584 nonlocal_vla_vars = NULL_TREE;
12586 delete nonlocal_vlas;
12587 nonlocal_vlas = NULL;
12590 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12591 && gimplify_omp_ctxp)
12593 delete_omp_context (gimplify_omp_ctxp);
12594 gimplify_omp_ctxp = NULL;
12597 pop_gimplify_context (outer_bind);
12598 gcc_assert (gimplify_ctxp == NULL);
12600 if (flag_checking && !seen_error ())
12601 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12603 timevar_pop (TV_TREE_GIMPLIFY);
12604 input_location = saved_location;
12606 return outer_bind;
12609 typedef char *char_p; /* For DEF_VEC_P. */
12611 /* Return whether we should exclude FNDECL from instrumentation. */
12613 static bool
12614 flag_instrument_functions_exclude_p (tree fndecl)
12616 vec<char_p> *v;
12618 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12619 if (v && v->length () > 0)
12621 const char *name;
12622 int i;
12623 char *s;
12625 name = lang_hooks.decl_printable_name (fndecl, 0);
12626 FOR_EACH_VEC_ELT (*v, i, s)
12627 if (strstr (name, s) != NULL)
12628 return true;
12631 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12632 if (v && v->length () > 0)
12634 const char *name;
12635 int i;
12636 char *s;
12638 name = DECL_SOURCE_FILE (fndecl);
12639 FOR_EACH_VEC_ELT (*v, i, s)
12640 if (strstr (name, s) != NULL)
12641 return true;
12644 return false;
12647 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12648 node for the function we want to gimplify.
12650 Return the sequence of GIMPLE statements corresponding to the body
12651 of FNDECL. */
12653 void
12654 gimplify_function_tree (tree fndecl)
12656 tree parm, ret;
12657 gimple_seq seq;
12658 gbind *bind;
12660 gcc_assert (!gimple_body (fndecl));
12662 if (DECL_STRUCT_FUNCTION (fndecl))
12663 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12664 else
12665 push_struct_function (fndecl);
12667 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12668 if necessary. */
12669 cfun->curr_properties |= PROP_gimple_lva;
12671 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12673 /* Preliminarily mark non-addressed complex variables as eligible
12674 for promotion to gimple registers. We'll transform their uses
12675 as we find them. */
12676 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12677 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12678 && !TREE_THIS_VOLATILE (parm)
12679 && !needs_to_live_in_memory (parm))
12680 DECL_GIMPLE_REG_P (parm) = 1;
12683 ret = DECL_RESULT (fndecl);
12684 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12685 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12686 && !needs_to_live_in_memory (ret))
12687 DECL_GIMPLE_REG_P (ret) = 1;
12689 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12690 asan_poisoned_variables = new hash_set<tree> ();
12691 bind = gimplify_body (fndecl, true);
12692 if (asan_poisoned_variables)
12694 delete asan_poisoned_variables;
12695 asan_poisoned_variables = NULL;
12698 /* The tree body of the function is no longer needed, replace it
12699 with the new GIMPLE body. */
12700 seq = NULL;
12701 gimple_seq_add_stmt (&seq, bind);
12702 gimple_set_body (fndecl, seq);
12704 /* If we're instrumenting function entry/exit, then prepend the call to
12705 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12706 catch the exit hook. */
12707 /* ??? Add some way to ignore exceptions for this TFE. */
12708 if (flag_instrument_function_entry_exit
12709 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12710 /* Do not instrument extern inline functions. */
12711 && !(DECL_DECLARED_INLINE_P (fndecl)
12712 && DECL_EXTERNAL (fndecl)
12713 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12714 && !flag_instrument_functions_exclude_p (fndecl))
12716 tree x;
12717 gbind *new_bind;
12718 gimple *tf;
12719 gimple_seq cleanup = NULL, body = NULL;
12720 tree tmp_var;
12721 gcall *call;
12723 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12724 call = gimple_build_call (x, 1, integer_zero_node);
12725 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12726 gimple_call_set_lhs (call, tmp_var);
12727 gimplify_seq_add_stmt (&cleanup, call);
12728 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12729 call = gimple_build_call (x, 2,
12730 build_fold_addr_expr (current_function_decl),
12731 tmp_var);
12732 gimplify_seq_add_stmt (&cleanup, call);
12733 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12735 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12736 call = gimple_build_call (x, 1, integer_zero_node);
12737 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12738 gimple_call_set_lhs (call, tmp_var);
12739 gimplify_seq_add_stmt (&body, call);
12740 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12741 call = gimple_build_call (x, 2,
12742 build_fold_addr_expr (current_function_decl),
12743 tmp_var);
12744 gimplify_seq_add_stmt (&body, call);
12745 gimplify_seq_add_stmt (&body, tf);
12746 new_bind = gimple_build_bind (NULL, body, NULL);
12748 /* Replace the current function body with the body
12749 wrapped in the try/finally TF. */
12750 seq = NULL;
12751 gimple_seq_add_stmt (&seq, new_bind);
12752 gimple_set_body (fndecl, seq);
12753 bind = new_bind;
12756 if (sanitize_flags_p (SANITIZE_THREAD))
12758 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12759 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12760 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12761 /* Replace the current function body with the body
12762 wrapped in the try/finally TF. */
12763 seq = NULL;
12764 gimple_seq_add_stmt (&seq, new_bind);
12765 gimple_set_body (fndecl, seq);
12768 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12769 cfun->curr_properties |= PROP_gimple_any;
12771 pop_cfun ();
12773 dump_function (TDI_gimple, fndecl);
12776 /* Return a dummy expression of type TYPE in order to keep going after an
12777 error. */
12779 static tree
12780 dummy_object (tree type)
12782 tree t = build_int_cst (build_pointer_type (type), 0);
12783 return build2 (MEM_REF, type, t, t);
12786 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12787 builtin function, but a very special sort of operator. */
12789 enum gimplify_status
12790 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12791 gimple_seq *post_p ATTRIBUTE_UNUSED)
12793 tree promoted_type, have_va_type;
12794 tree valist = TREE_OPERAND (*expr_p, 0);
12795 tree type = TREE_TYPE (*expr_p);
12796 tree t, tag, aptag;
12797 location_t loc = EXPR_LOCATION (*expr_p);
12799 /* Verify that valist is of the proper type. */
12800 have_va_type = TREE_TYPE (valist);
12801 if (have_va_type == error_mark_node)
12802 return GS_ERROR;
12803 have_va_type = targetm.canonical_va_list_type (have_va_type);
12804 if (have_va_type == NULL_TREE
12805 && POINTER_TYPE_P (TREE_TYPE (valist)))
12806 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12807 have_va_type
12808 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12809 gcc_assert (have_va_type != NULL_TREE);
12811 /* Generate a diagnostic for requesting data of a type that cannot
12812 be passed through `...' due to type promotion at the call site. */
12813 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12814 != type)
12816 static bool gave_help;
12817 bool warned;
12818 /* Use the expansion point to handle cases such as passing bool (defined
12819 in a system header) through `...'. */
12820 source_location xloc
12821 = expansion_point_location_if_in_system_header (loc);
12823 /* Unfortunately, this is merely undefined, rather than a constraint
12824 violation, so we cannot make this an error. If this call is never
12825 executed, the program is still strictly conforming. */
12826 warned = warning_at (xloc, 0,
12827 "%qT is promoted to %qT when passed through %<...%>",
12828 type, promoted_type);
12829 if (!gave_help && warned)
12831 gave_help = true;
12832 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12833 promoted_type, type);
12836 /* We can, however, treat "undefined" any way we please.
12837 Call abort to encourage the user to fix the program. */
12838 if (warned)
12839 inform (xloc, "if this code is reached, the program will abort");
12840 /* Before the abort, allow the evaluation of the va_list
12841 expression to exit or longjmp. */
12842 gimplify_and_add (valist, pre_p);
12843 t = build_call_expr_loc (loc,
12844 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12845 gimplify_and_add (t, pre_p);
12847 /* This is dead code, but go ahead and finish so that the
12848 mode of the result comes out right. */
12849 *expr_p = dummy_object (type);
12850 return GS_ALL_DONE;
12853 tag = build_int_cst (build_pointer_type (type), 0);
12854 aptag = build_int_cst (TREE_TYPE (valist), 0);
12856 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12857 valist, tag, aptag);
12859 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12860 needs to be expanded. */
12861 cfun->curr_properties &= ~PROP_gimple_lva;
12863 return GS_OK;
12866 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12868 DST/SRC are the destination and source respectively. You can pass
12869 ungimplified trees in DST or SRC, in which case they will be
12870 converted to a gimple operand if necessary.
12872 This function returns the newly created GIMPLE_ASSIGN tuple. */
12874 gimple *
12875 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12877 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12878 gimplify_and_add (t, seq_p);
12879 ggc_free (t);
12880 return gimple_seq_last_stmt (*seq_p);
12883 inline hashval_t
12884 gimplify_hasher::hash (const elt_t *p)
12886 tree t = p->val;
12887 return iterative_hash_expr (t, 0);
12890 inline bool
12891 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12893 tree t1 = p1->val;
12894 tree t2 = p2->val;
12895 enum tree_code code = TREE_CODE (t1);
12897 if (TREE_CODE (t2) != code
12898 || TREE_TYPE (t1) != TREE_TYPE (t2))
12899 return false;
12901 if (!operand_equal_p (t1, t2, 0))
12902 return false;
12904 /* Only allow them to compare equal if they also hash equal; otherwise
12905 results are nondeterminate, and we fail bootstrap comparison. */
12906 gcc_checking_assert (hash (p1) == hash (p2));
12908 return true;