* doc/generic.texi (ANNOTATE_EXPR): Document 3rd operand.
[official-gcc.git] / gcc / gimplify.c
blob13dbbe0538d2faa9ce51f43e04930038eb9b11ca
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "cilk.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
70 /* Hash set of poisoned variables in a bind expr. */
71 static hash_set<tree> *asan_poisoned_variables = NULL;
73 enum gimplify_omp_var_data
75 GOVD_SEEN = 1,
76 GOVD_EXPLICIT = 2,
77 GOVD_SHARED = 4,
78 GOVD_PRIVATE = 8,
79 GOVD_FIRSTPRIVATE = 16,
80 GOVD_LASTPRIVATE = 32,
81 GOVD_REDUCTION = 64,
82 GOVD_LOCAL = 128,
83 GOVD_MAP = 256,
84 GOVD_DEBUG_PRIVATE = 512,
85 GOVD_PRIVATE_OUTER_REF = 1024,
86 GOVD_LINEAR = 2048,
87 GOVD_ALIGNED = 4096,
89 /* Flag for GOVD_MAP: don't copy back. */
90 GOVD_MAP_TO_ONLY = 8192,
92 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
93 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
95 GOVD_MAP_0LEN_ARRAY = 32768,
97 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
98 GOVD_MAP_ALWAYS_TO = 65536,
100 /* Flag for shared vars that are or might be stored to in the region. */
101 GOVD_WRITTEN = 131072,
103 /* Flag for GOVD_MAP, if it is a forced mapping. */
104 GOVD_MAP_FORCE = 262144,
106 /* Flag for GOVD_MAP: must be present already. */
107 GOVD_MAP_FORCE_PRESENT = 524288,
109 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
110 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
111 | GOVD_LOCAL)
115 enum omp_region_type
117 ORT_WORKSHARE = 0x00,
118 ORT_SIMD = 0x01,
120 ORT_PARALLEL = 0x02,
121 ORT_COMBINED_PARALLEL = 0x03,
123 ORT_TASK = 0x04,
124 ORT_UNTIED_TASK = 0x05,
126 ORT_TEAMS = 0x08,
127 ORT_COMBINED_TEAMS = 0x09,
129 /* Data region. */
130 ORT_TARGET_DATA = 0x10,
132 /* Data region with offloading. */
133 ORT_TARGET = 0x20,
134 ORT_COMBINED_TARGET = 0x21,
136 /* OpenACC variants. */
137 ORT_ACC = 0x40, /* A generic OpenACC region. */
138 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
139 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
140 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
141 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
143 /* Dummy OpenMP region, used to disable expansion of
144 DECL_VALUE_EXPRs in taskloop pre body. */
145 ORT_NONE = 0x100
148 /* Gimplify hashtable helper. */
150 struct gimplify_hasher : free_ptr_hash <elt_t>
152 static inline hashval_t hash (const elt_t *);
153 static inline bool equal (const elt_t *, const elt_t *);
156 struct gimplify_ctx
158 struct gimplify_ctx *prev_context;
160 vec<gbind *> bind_expr_stack;
161 tree temps;
162 gimple_seq conditional_cleanups;
163 tree exit_label;
164 tree return_temp;
166 vec<tree> case_labels;
167 hash_set<tree> *live_switch_vars;
168 /* The formal temporary table. Should this be persistent? */
169 hash_table<gimplify_hasher> *temp_htab;
171 int conditions;
172 unsigned into_ssa : 1;
173 unsigned allow_rhs_cond_expr : 1;
174 unsigned in_cleanup_point_expr : 1;
175 unsigned keep_stack : 1;
176 unsigned save_stack : 1;
177 unsigned in_switch_expr : 1;
180 struct gimplify_omp_ctx
182 struct gimplify_omp_ctx *outer_context;
183 splay_tree variables;
184 hash_set<tree> *privatized_types;
185 /* Iteration variables in an OMP_FOR. */
186 vec<tree> loop_iter_var;
187 location_t location;
188 enum omp_clause_default_kind default_kind;
189 enum omp_region_type region_type;
190 bool combined_loop;
191 bool distribute;
192 bool target_map_scalars_firstprivate;
193 bool target_map_pointers_as_0len_arrays;
194 bool target_firstprivatize_array_bases;
197 static struct gimplify_ctx *gimplify_ctxp;
198 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
200 /* Forward declaration. */
201 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
202 static hash_map<tree, tree> *oacc_declare_returns;
203 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
204 bool (*) (tree), fallback_t, bool);
206 /* Shorter alias name for the above function for use in gimplify.c
207 only. */
209 static inline void
210 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
212 gimple_seq_add_stmt_without_update (seq_p, gs);
215 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
216 NULL, a new sequence is allocated. This function is
217 similar to gimple_seq_add_seq, but does not scan the operands.
218 During gimplification, we need to manipulate statement sequences
219 before the def/use vectors have been constructed. */
221 static void
222 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
224 gimple_stmt_iterator si;
226 if (src == NULL)
227 return;
229 si = gsi_last (*dst_p);
230 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
234 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
235 and popping gimplify contexts. */
237 static struct gimplify_ctx *ctx_pool = NULL;
239 /* Return a gimplify context struct from the pool. */
241 static inline struct gimplify_ctx *
242 ctx_alloc (void)
244 struct gimplify_ctx * c = ctx_pool;
246 if (c)
247 ctx_pool = c->prev_context;
248 else
249 c = XNEW (struct gimplify_ctx);
251 memset (c, '\0', sizeof (*c));
252 return c;
255 /* Put gimplify context C back into the pool. */
257 static inline void
258 ctx_free (struct gimplify_ctx *c)
260 c->prev_context = ctx_pool;
261 ctx_pool = c;
264 /* Free allocated ctx stack memory. */
266 void
267 free_gimplify_stack (void)
269 struct gimplify_ctx *c;
271 while ((c = ctx_pool))
273 ctx_pool = c->prev_context;
274 free (c);
279 /* Set up a context for the gimplifier. */
281 void
282 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
284 struct gimplify_ctx *c = ctx_alloc ();
286 c->prev_context = gimplify_ctxp;
287 gimplify_ctxp = c;
288 gimplify_ctxp->into_ssa = in_ssa;
289 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
292 /* Tear down a context for the gimplifier. If BODY is non-null, then
293 put the temporaries into the outer BIND_EXPR. Otherwise, put them
294 in the local_decls.
296 BODY is not a sequence, but the first tuple in a sequence. */
298 void
299 pop_gimplify_context (gimple *body)
301 struct gimplify_ctx *c = gimplify_ctxp;
303 gcc_assert (c
304 && (!c->bind_expr_stack.exists ()
305 || c->bind_expr_stack.is_empty ()));
306 c->bind_expr_stack.release ();
307 gimplify_ctxp = c->prev_context;
309 if (body)
310 declare_vars (c->temps, body, false);
311 else
312 record_vars (c->temps);
314 delete c->temp_htab;
315 c->temp_htab = NULL;
316 ctx_free (c);
319 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
321 static void
322 gimple_push_bind_expr (gbind *bind_stmt)
324 gimplify_ctxp->bind_expr_stack.reserve (8);
325 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
328 /* Pop the first element off the stack of bindings. */
330 static void
331 gimple_pop_bind_expr (void)
333 gimplify_ctxp->bind_expr_stack.pop ();
336 /* Return the first element of the stack of bindings. */
338 gbind *
339 gimple_current_bind_expr (void)
341 return gimplify_ctxp->bind_expr_stack.last ();
344 /* Return the stack of bindings created during gimplification. */
346 vec<gbind *>
347 gimple_bind_expr_stack (void)
349 return gimplify_ctxp->bind_expr_stack;
352 /* Return true iff there is a COND_EXPR between us and the innermost
353 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
355 static bool
356 gimple_conditional_context (void)
358 return gimplify_ctxp->conditions > 0;
361 /* Note that we've entered a COND_EXPR. */
363 static void
364 gimple_push_condition (void)
366 #ifdef ENABLE_GIMPLE_CHECKING
367 if (gimplify_ctxp->conditions == 0)
368 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
369 #endif
370 ++(gimplify_ctxp->conditions);
373 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
374 now, add any conditional cleanups we've seen to the prequeue. */
376 static void
377 gimple_pop_condition (gimple_seq *pre_p)
379 int conds = --(gimplify_ctxp->conditions);
381 gcc_assert (conds >= 0);
382 if (conds == 0)
384 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
385 gimplify_ctxp->conditional_cleanups = NULL;
389 /* A stable comparison routine for use with splay trees and DECLs. */
391 static int
392 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
394 tree a = (tree) xa;
395 tree b = (tree) xb;
397 return DECL_UID (a) - DECL_UID (b);
400 /* Create a new omp construct that deals with variable remapping. */
402 static struct gimplify_omp_ctx *
403 new_omp_context (enum omp_region_type region_type)
405 struct gimplify_omp_ctx *c;
407 c = XCNEW (struct gimplify_omp_ctx);
408 c->outer_context = gimplify_omp_ctxp;
409 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
410 c->privatized_types = new hash_set<tree>;
411 c->location = input_location;
412 c->region_type = region_type;
413 if ((region_type & ORT_TASK) == 0)
414 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
415 else
416 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
418 return c;
421 /* Destroy an omp construct that deals with variable remapping. */
423 static void
424 delete_omp_context (struct gimplify_omp_ctx *c)
426 splay_tree_delete (c->variables);
427 delete c->privatized_types;
428 c->loop_iter_var.release ();
429 XDELETE (c);
432 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
433 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
435 /* Both gimplify the statement T and append it to *SEQ_P. This function
436 behaves exactly as gimplify_stmt, but you don't have to pass T as a
437 reference. */
439 void
440 gimplify_and_add (tree t, gimple_seq *seq_p)
442 gimplify_stmt (&t, seq_p);
445 /* Gimplify statement T into sequence *SEQ_P, and return the first
446 tuple in the sequence of generated tuples for this statement.
447 Return NULL if gimplifying T produced no tuples. */
449 static gimple *
450 gimplify_and_return_first (tree t, gimple_seq *seq_p)
452 gimple_stmt_iterator last = gsi_last (*seq_p);
454 gimplify_and_add (t, seq_p);
456 if (!gsi_end_p (last))
458 gsi_next (&last);
459 return gsi_stmt (last);
461 else
462 return gimple_seq_first_stmt (*seq_p);
465 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
466 LHS, or for a call argument. */
468 static bool
469 is_gimple_mem_rhs (tree t)
471 /* If we're dealing with a renamable type, either source or dest must be
472 a renamed variable. */
473 if (is_gimple_reg_type (TREE_TYPE (t)))
474 return is_gimple_val (t);
475 else
476 return is_gimple_val (t) || is_gimple_lvalue (t);
479 /* Return true if T is a CALL_EXPR or an expression that can be
480 assigned to a temporary. Note that this predicate should only be
481 used during gimplification. See the rationale for this in
482 gimplify_modify_expr. */
484 static bool
485 is_gimple_reg_rhs_or_call (tree t)
487 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
488 || TREE_CODE (t) == CALL_EXPR);
491 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
492 this predicate should only be used during gimplification. See the
493 rationale for this in gimplify_modify_expr. */
495 static bool
496 is_gimple_mem_rhs_or_call (tree t)
498 /* If we're dealing with a renamable type, either source or dest must be
499 a renamed variable. */
500 if (is_gimple_reg_type (TREE_TYPE (t)))
501 return is_gimple_val (t);
502 else
503 return (is_gimple_val (t)
504 || is_gimple_lvalue (t)
505 || TREE_CLOBBER_P (t)
506 || TREE_CODE (t) == CALL_EXPR);
509 /* Create a temporary with a name derived from VAL. Subroutine of
510 lookup_tmp_var; nobody else should call this function. */
512 static inline tree
513 create_tmp_from_val (tree val)
515 /* Drop all qualifiers and address-space information from the value type. */
516 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
517 tree var = create_tmp_var (type, get_name (val));
518 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
519 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
520 DECL_GIMPLE_REG_P (var) = 1;
521 return var;
524 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
525 an existing expression temporary. */
527 static tree
528 lookup_tmp_var (tree val, bool is_formal)
530 tree ret;
532 /* If not optimizing, never really reuse a temporary. local-alloc
533 won't allocate any variable that is used in more than one basic
534 block, which means it will go into memory, causing much extra
535 work in reload and final and poorer code generation, outweighing
536 the extra memory allocation here. */
537 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
538 ret = create_tmp_from_val (val);
539 else
541 elt_t elt, *elt_p;
542 elt_t **slot;
544 elt.val = val;
545 if (!gimplify_ctxp->temp_htab)
546 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
547 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
548 if (*slot == NULL)
550 elt_p = XNEW (elt_t);
551 elt_p->val = val;
552 elt_p->temp = ret = create_tmp_from_val (val);
553 *slot = elt_p;
555 else
557 elt_p = *slot;
558 ret = elt_p->temp;
562 return ret;
565 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
567 static tree
568 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
569 bool is_formal, bool allow_ssa)
571 tree t, mod;
573 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
574 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
575 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
576 fb_rvalue);
578 if (allow_ssa
579 && gimplify_ctxp->into_ssa
580 && is_gimple_reg_type (TREE_TYPE (val)))
582 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
583 if (! gimple_in_ssa_p (cfun))
585 const char *name = get_name (val);
586 if (name)
587 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
590 else
591 t = lookup_tmp_var (val, is_formal);
593 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
595 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
597 /* gimplify_modify_expr might want to reduce this further. */
598 gimplify_and_add (mod, pre_p);
599 ggc_free (mod);
601 return t;
604 /* Return a formal temporary variable initialized with VAL. PRE_P is as
605 in gimplify_expr. Only use this function if:
607 1) The value of the unfactored expression represented by VAL will not
608 change between the initialization and use of the temporary, and
609 2) The temporary will not be otherwise modified.
611 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
612 and #2 means it is inappropriate for && temps.
614 For other cases, use get_initialized_tmp_var instead. */
616 tree
617 get_formal_tmp_var (tree val, gimple_seq *pre_p)
619 return internal_get_tmp_var (val, pre_p, NULL, true, true);
622 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
623 are as in gimplify_expr. */
625 tree
626 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
627 bool allow_ssa)
629 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
632 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
633 generate debug info for them; otherwise don't. */
635 void
636 declare_vars (tree vars, gimple *gs, bool debug_info)
638 tree last = vars;
639 if (last)
641 tree temps, block;
643 gbind *scope = as_a <gbind *> (gs);
645 temps = nreverse (last);
647 block = gimple_bind_block (scope);
648 gcc_assert (!block || TREE_CODE (block) == BLOCK);
649 if (!block || !debug_info)
651 DECL_CHAIN (last) = gimple_bind_vars (scope);
652 gimple_bind_set_vars (scope, temps);
654 else
656 /* We need to attach the nodes both to the BIND_EXPR and to its
657 associated BLOCK for debugging purposes. The key point here
658 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
659 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
660 if (BLOCK_VARS (block))
661 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
662 else
664 gimple_bind_set_vars (scope,
665 chainon (gimple_bind_vars (scope), temps));
666 BLOCK_VARS (block) = temps;
672 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
673 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
674 no such upper bound can be obtained. */
676 static void
677 force_constant_size (tree var)
679 /* The only attempt we make is by querying the maximum size of objects
680 of the variable's type. */
682 HOST_WIDE_INT max_size;
684 gcc_assert (VAR_P (var));
686 max_size = max_int_size_in_bytes (TREE_TYPE (var));
688 gcc_assert (max_size >= 0);
690 DECL_SIZE_UNIT (var)
691 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
692 DECL_SIZE (var)
693 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
696 /* Push the temporary variable TMP into the current binding. */
698 void
699 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
701 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
703 /* Later processing assumes that the object size is constant, which might
704 not be true at this point. Force the use of a constant upper bound in
705 this case. */
706 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
707 force_constant_size (tmp);
709 DECL_CONTEXT (tmp) = fn->decl;
710 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
712 record_vars_into (tmp, fn->decl);
715 /* Push the temporary variable TMP into the current binding. */
717 void
718 gimple_add_tmp_var (tree tmp)
720 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
726 force_constant_size (tmp);
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
731 if (gimplify_ctxp)
733 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx
741 && (ctx->region_type == ORT_WORKSHARE
742 || ctx->region_type == ORT_SIMD
743 || ctx->region_type == ORT_ACC))
744 ctx = ctx->outer_context;
745 if (ctx)
746 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
749 else if (cfun)
750 record_vars (tmp);
751 else
753 gimple_seq body_seq;
755 /* This case is for nested functions. We need to expose the locals
756 they create. */
757 body_seq = gimple_body (current_function_decl);
758 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
764 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
765 nodes that are referenced more than once in GENERIC functions. This is
766 necessary because gimplification (translation into GIMPLE) is performed
767 by modifying tree nodes in-place, so gimplication of a shared node in a
768 first context could generate an invalid GIMPLE form in a second context.
770 This is achieved with a simple mark/copy/unmark algorithm that walks the
771 GENERIC representation top-down, marks nodes with TREE_VISITED the first
772 time it encounters them, duplicates them if they already have TREE_VISITED
773 set, and finally removes the TREE_VISITED marks it has set.
775 The algorithm works only at the function level, i.e. it generates a GENERIC
776 representation of a function with no nodes shared within the function when
777 passed a GENERIC function (except for nodes that are allowed to be shared).
779 At the global level, it is also necessary to unshare tree nodes that are
780 referenced in more than one function, for the same aforementioned reason.
781 This requires some cooperation from the front-end. There are 2 strategies:
783 1. Manual unsharing. The front-end needs to call unshare_expr on every
784 expression that might end up being shared across functions.
786 2. Deep unsharing. This is an extension of regular unsharing. Instead
787 of calling unshare_expr on expressions that might be shared across
788 functions, the front-end pre-marks them with TREE_VISITED. This will
789 ensure that they are unshared on the first reference within functions
790 when the regular unsharing algorithm runs. The counterpart is that
791 this algorithm must look deeper than for manual unsharing, which is
792 specified by LANG_HOOKS_DEEP_UNSHARING.
794 If there are only few specific cases of node sharing across functions, it is
795 probably easier for a front-end to unshare the expressions manually. On the
796 contrary, if the expressions generated at the global level are as widespread
797 as expressions generated within functions, deep unsharing is very likely the
798 way to go. */
800 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
801 These nodes model computations that must be done once. If we were to
802 unshare something like SAVE_EXPR(i++), the gimplification process would
803 create wrong code. However, if DATA is non-null, it must hold a pointer
804 set that is used to unshare the subtrees of these nodes. */
806 static tree
807 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
809 tree t = *tp;
810 enum tree_code code = TREE_CODE (t);
812 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
813 copy their subtrees if we can make sure to do it only once. */
814 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
816 if (data && !((hash_set<tree> *)data)->add (t))
818 else
819 *walk_subtrees = 0;
822 /* Stop at types, decls, constants like copy_tree_r. */
823 else if (TREE_CODE_CLASS (code) == tcc_type
824 || TREE_CODE_CLASS (code) == tcc_declaration
825 || TREE_CODE_CLASS (code) == tcc_constant)
826 *walk_subtrees = 0;
828 /* Cope with the statement expression extension. */
829 else if (code == STATEMENT_LIST)
832 /* Leave the bulk of the work to copy_tree_r itself. */
833 else
834 copy_tree_r (tp, walk_subtrees, NULL);
836 return NULL_TREE;
839 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
840 If *TP has been visited already, then *TP is deeply copied by calling
841 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
843 static tree
844 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
846 tree t = *tp;
847 enum tree_code code = TREE_CODE (t);
849 /* Skip types, decls, and constants. But we do want to look at their
850 types and the bounds of types. Mark them as visited so we properly
851 unmark their subtrees on the unmark pass. If we've already seen them,
852 don't look down further. */
853 if (TREE_CODE_CLASS (code) == tcc_type
854 || TREE_CODE_CLASS (code) == tcc_declaration
855 || TREE_CODE_CLASS (code) == tcc_constant)
857 if (TREE_VISITED (t))
858 *walk_subtrees = 0;
859 else
860 TREE_VISITED (t) = 1;
863 /* If this node has been visited already, unshare it and don't look
864 any deeper. */
865 else if (TREE_VISITED (t))
867 walk_tree (tp, mostly_copy_tree_r, data, NULL);
868 *walk_subtrees = 0;
871 /* Otherwise, mark the node as visited and keep looking. */
872 else
873 TREE_VISITED (t) = 1;
875 return NULL_TREE;
878 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
879 copy_if_shared_r callback unmodified. */
881 static inline void
882 copy_if_shared (tree *tp, void *data)
884 walk_tree (tp, copy_if_shared_r, data, NULL);
887 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
888 any nested functions. */
890 static void
891 unshare_body (tree fndecl)
893 struct cgraph_node *cgn = cgraph_node::get (fndecl);
894 /* If the language requires deep unsharing, we need a pointer set to make
895 sure we don't repeatedly unshare subtrees of unshareable nodes. */
896 hash_set<tree> *visited
897 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
899 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
900 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
901 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
903 delete visited;
905 if (cgn)
906 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
907 unshare_body (cgn->decl);
910 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
911 Subtrees are walked until the first unvisited node is encountered. */
913 static tree
914 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
916 tree t = *tp;
918 /* If this node has been visited, unmark it and keep looking. */
919 if (TREE_VISITED (t))
920 TREE_VISITED (t) = 0;
922 /* Otherwise, don't look any deeper. */
923 else
924 *walk_subtrees = 0;
926 return NULL_TREE;
929 /* Unmark the visited trees rooted at *TP. */
931 static inline void
932 unmark_visited (tree *tp)
934 walk_tree (tp, unmark_visited_r, NULL, NULL);
937 /* Likewise, but mark all trees as not visited. */
939 static void
940 unvisit_body (tree fndecl)
942 struct cgraph_node *cgn = cgraph_node::get (fndecl);
944 unmark_visited (&DECL_SAVED_TREE (fndecl));
945 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
946 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
948 if (cgn)
949 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
950 unvisit_body (cgn->decl);
953 /* Unconditionally make an unshared copy of EXPR. This is used when using
954 stored expressions which span multiple functions, such as BINFO_VTABLE,
955 as the normal unsharing process can't tell that they're shared. */
957 tree
958 unshare_expr (tree expr)
960 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
961 return expr;
964 /* Worker for unshare_expr_without_location. */
966 static tree
967 prune_expr_location (tree *tp, int *walk_subtrees, void *)
969 if (EXPR_P (*tp))
970 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
971 else
972 *walk_subtrees = 0;
973 return NULL_TREE;
976 /* Similar to unshare_expr but also prune all expression locations
977 from EXPR. */
979 tree
980 unshare_expr_without_location (tree expr)
982 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
983 if (EXPR_P (expr))
984 walk_tree (&expr, prune_expr_location, NULL, NULL);
985 return expr;
988 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
989 contain statements and have a value. Assign its value to a temporary
990 and give it void_type_node. Return the temporary, or NULL_TREE if
991 WRAPPER was already void. */
993 tree
994 voidify_wrapper_expr (tree wrapper, tree temp)
996 tree type = TREE_TYPE (wrapper);
997 if (type && !VOID_TYPE_P (type))
999 tree *p;
1001 /* Set p to point to the body of the wrapper. Loop until we find
1002 something that isn't a wrapper. */
1003 for (p = &wrapper; p && *p; )
1005 switch (TREE_CODE (*p))
1007 case BIND_EXPR:
1008 TREE_SIDE_EFFECTS (*p) = 1;
1009 TREE_TYPE (*p) = void_type_node;
1010 /* For a BIND_EXPR, the body is operand 1. */
1011 p = &BIND_EXPR_BODY (*p);
1012 break;
1014 case CLEANUP_POINT_EXPR:
1015 case TRY_FINALLY_EXPR:
1016 case TRY_CATCH_EXPR:
1017 TREE_SIDE_EFFECTS (*p) = 1;
1018 TREE_TYPE (*p) = void_type_node;
1019 p = &TREE_OPERAND (*p, 0);
1020 break;
1022 case STATEMENT_LIST:
1024 tree_stmt_iterator i = tsi_last (*p);
1025 TREE_SIDE_EFFECTS (*p) = 1;
1026 TREE_TYPE (*p) = void_type_node;
1027 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1029 break;
1031 case COMPOUND_EXPR:
1032 /* Advance to the last statement. Set all container types to
1033 void. */
1034 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1036 TREE_SIDE_EFFECTS (*p) = 1;
1037 TREE_TYPE (*p) = void_type_node;
1039 break;
1041 case TRANSACTION_EXPR:
1042 TREE_SIDE_EFFECTS (*p) = 1;
1043 TREE_TYPE (*p) = void_type_node;
1044 p = &TRANSACTION_EXPR_BODY (*p);
1045 break;
1047 default:
1048 /* Assume that any tree upon which voidify_wrapper_expr is
1049 directly called is a wrapper, and that its body is op0. */
1050 if (p == &wrapper)
1052 TREE_SIDE_EFFECTS (*p) = 1;
1053 TREE_TYPE (*p) = void_type_node;
1054 p = &TREE_OPERAND (*p, 0);
1055 break;
1057 goto out;
1061 out:
1062 if (p == NULL || IS_EMPTY_STMT (*p))
1063 temp = NULL_TREE;
1064 else if (temp)
1066 /* The wrapper is on the RHS of an assignment that we're pushing
1067 down. */
1068 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1069 || TREE_CODE (temp) == MODIFY_EXPR);
1070 TREE_OPERAND (temp, 1) = *p;
1071 *p = temp;
1073 else
1075 temp = create_tmp_var (type, "retval");
1076 *p = build2 (INIT_EXPR, type, temp, *p);
1079 return temp;
1082 return NULL_TREE;
1085 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1086 a temporary through which they communicate. */
1088 static void
1089 build_stack_save_restore (gcall **save, gcall **restore)
1091 tree tmp_var;
1093 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1094 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1095 gimple_call_set_lhs (*save, tmp_var);
1097 *restore
1098 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1099 1, tmp_var);
1102 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1104 static tree
1105 build_asan_poison_call_expr (tree decl)
1107 /* Do not poison variables that have size equal to zero. */
1108 tree unit_size = DECL_SIZE_UNIT (decl);
1109 if (zerop (unit_size))
1110 return NULL_TREE;
1112 tree base = build_fold_addr_expr (decl);
1114 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1115 void_type_node, 3,
1116 build_int_cst (integer_type_node,
1117 ASAN_MARK_POISON),
1118 base, unit_size);
1121 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1122 on POISON flag, shadow memory of a DECL variable. The call will be
1123 put on location identified by IT iterator, where BEFORE flag drives
1124 position where the stmt will be put. */
1126 static void
1127 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1128 bool before)
1130 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1131 if (gimplify_omp_ctxp)
1132 return;
1134 tree unit_size = DECL_SIZE_UNIT (decl);
1135 tree base = build_fold_addr_expr (decl);
1137 /* Do not poison variables that have size equal to zero. */
1138 if (zerop (unit_size))
1139 return;
1141 /* It's necessary to have all stack variables aligned to ASAN granularity
1142 bytes. */
1143 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1144 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1146 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1148 gimple *g
1149 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1150 build_int_cst (integer_type_node, flags),
1151 base, unit_size);
1153 if (before)
1154 gsi_insert_before (it, g, GSI_NEW_STMT);
1155 else
1156 gsi_insert_after (it, g, GSI_NEW_STMT);
1159 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1160 either poisons or unpoisons a DECL. Created statement is appended
1161 to SEQ_P gimple sequence. */
1163 static void
1164 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1166 gimple_stmt_iterator it = gsi_last (*seq_p);
1167 bool before = false;
1169 if (gsi_end_p (it))
1170 before = true;
1172 asan_poison_variable (decl, poison, &it, before);
1175 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1177 static int
1178 sort_by_decl_uid (const void *a, const void *b)
1180 const tree *t1 = (const tree *)a;
1181 const tree *t2 = (const tree *)b;
1183 int uid1 = DECL_UID (*t1);
1184 int uid2 = DECL_UID (*t2);
1186 if (uid1 < uid2)
1187 return -1;
1188 else if (uid1 > uid2)
1189 return 1;
1190 else
1191 return 0;
1194 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1195 depending on POISON flag. Created statement is appended
1196 to SEQ_P gimple sequence. */
1198 static void
1199 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1201 unsigned c = variables->elements ();
1202 if (c == 0)
1203 return;
1205 auto_vec<tree> sorted_variables (c);
1207 for (hash_set<tree>::iterator it = variables->begin ();
1208 it != variables->end (); ++it)
1209 sorted_variables.safe_push (*it);
1211 sorted_variables.qsort (sort_by_decl_uid);
1213 unsigned i;
1214 tree var;
1215 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1217 asan_poison_variable (var, poison, seq_p);
1219 /* Add use_after_scope_memory attribute for the variable in order
1220 to prevent re-written into SSA. */
1221 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1222 DECL_ATTRIBUTES (var)))
1223 DECL_ATTRIBUTES (var)
1224 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1225 integer_one_node,
1226 DECL_ATTRIBUTES (var));
1230 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1232 static enum gimplify_status
1233 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1235 tree bind_expr = *expr_p;
1236 bool old_keep_stack = gimplify_ctxp->keep_stack;
1237 bool old_save_stack = gimplify_ctxp->save_stack;
1238 tree t;
1239 gbind *bind_stmt;
1240 gimple_seq body, cleanup;
1241 gcall *stack_save;
1242 location_t start_locus = 0, end_locus = 0;
1243 tree ret_clauses = NULL;
1245 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1247 /* Mark variables seen in this bind expr. */
1248 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1250 if (VAR_P (t))
1252 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1254 /* Mark variable as local. */
1255 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1256 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1257 || splay_tree_lookup (ctx->variables,
1258 (splay_tree_key) t) == NULL))
1260 if (ctx->region_type == ORT_SIMD
1261 && TREE_ADDRESSABLE (t)
1262 && !TREE_STATIC (t))
1263 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1264 else
1265 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1268 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1270 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1271 cfun->has_local_explicit_reg_vars = true;
1274 /* Preliminarily mark non-addressed complex variables as eligible
1275 for promotion to gimple registers. We'll transform their uses
1276 as we find them. */
1277 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1278 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1279 && !TREE_THIS_VOLATILE (t)
1280 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1281 && !needs_to_live_in_memory (t))
1282 DECL_GIMPLE_REG_P (t) = 1;
1285 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1286 BIND_EXPR_BLOCK (bind_expr));
1287 gimple_push_bind_expr (bind_stmt);
1289 gimplify_ctxp->keep_stack = false;
1290 gimplify_ctxp->save_stack = false;
1292 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1293 body = NULL;
1294 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1295 gimple_bind_set_body (bind_stmt, body);
1297 /* Source location wise, the cleanup code (stack_restore and clobbers)
1298 belongs to the end of the block, so propagate what we have. The
1299 stack_save operation belongs to the beginning of block, which we can
1300 infer from the bind_expr directly if the block has no explicit
1301 assignment. */
1302 if (BIND_EXPR_BLOCK (bind_expr))
1304 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1305 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1307 if (start_locus == 0)
1308 start_locus = EXPR_LOCATION (bind_expr);
1310 cleanup = NULL;
1311 stack_save = NULL;
1313 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1314 the stack space allocated to the VLAs. */
1315 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1317 gcall *stack_restore;
1319 /* Save stack on entry and restore it on exit. Add a try_finally
1320 block to achieve this. */
1321 build_stack_save_restore (&stack_save, &stack_restore);
1323 gimple_set_location (stack_save, start_locus);
1324 gimple_set_location (stack_restore, end_locus);
1326 gimplify_seq_add_stmt (&cleanup, stack_restore);
1329 /* Add clobbers for all variables that go out of scope. */
1330 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1332 if (VAR_P (t)
1333 && !is_global_var (t)
1334 && DECL_CONTEXT (t) == current_function_decl)
1336 if (!DECL_HARD_REGISTER (t)
1337 && !TREE_THIS_VOLATILE (t)
1338 && !DECL_HAS_VALUE_EXPR_P (t)
1339 /* Only care for variables that have to be in memory. Others
1340 will be rewritten into SSA names, hence moved to the
1341 top-level. */
1342 && !is_gimple_reg (t)
1343 && flag_stack_reuse != SR_NONE)
1345 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1346 gimple *clobber_stmt;
1347 TREE_THIS_VOLATILE (clobber) = 1;
1348 clobber_stmt = gimple_build_assign (t, clobber);
1349 gimple_set_location (clobber_stmt, end_locus);
1350 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1353 if (flag_openacc && oacc_declare_returns != NULL)
1355 tree *c = oacc_declare_returns->get (t);
1356 if (c != NULL)
1358 if (ret_clauses)
1359 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1361 ret_clauses = *c;
1363 oacc_declare_returns->remove (t);
1365 if (oacc_declare_returns->elements () == 0)
1367 delete oacc_declare_returns;
1368 oacc_declare_returns = NULL;
1374 if (asan_poisoned_variables != NULL
1375 && asan_poisoned_variables->contains (t))
1377 asan_poisoned_variables->remove (t);
1378 asan_poison_variable (t, true, &cleanup);
1381 if (gimplify_ctxp->live_switch_vars != NULL
1382 && gimplify_ctxp->live_switch_vars->contains (t))
1383 gimplify_ctxp->live_switch_vars->remove (t);
1386 if (ret_clauses)
1388 gomp_target *stmt;
1389 gimple_stmt_iterator si = gsi_start (cleanup);
1391 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1392 ret_clauses);
1393 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1396 if (cleanup)
1398 gtry *gs;
1399 gimple_seq new_body;
1401 new_body = NULL;
1402 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1403 GIMPLE_TRY_FINALLY);
1405 if (stack_save)
1406 gimplify_seq_add_stmt (&new_body, stack_save);
1407 gimplify_seq_add_stmt (&new_body, gs);
1408 gimple_bind_set_body (bind_stmt, new_body);
1411 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1412 if (!gimplify_ctxp->keep_stack)
1413 gimplify_ctxp->keep_stack = old_keep_stack;
1414 gimplify_ctxp->save_stack = old_save_stack;
1416 gimple_pop_bind_expr ();
1418 gimplify_seq_add_stmt (pre_p, bind_stmt);
1420 if (temp)
1422 *expr_p = temp;
1423 return GS_OK;
1426 *expr_p = NULL_TREE;
1427 return GS_ALL_DONE;
1430 /* Maybe add early return predict statement to PRE_P sequence. */
1432 static void
1433 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1435 /* If we are not in a conditional context, add PREDICT statement. */
1436 if (gimple_conditional_context ())
1438 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1439 NOT_TAKEN);
1440 gimplify_seq_add_stmt (pre_p, predict);
1444 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1445 GIMPLE value, it is assigned to a new temporary and the statement is
1446 re-written to return the temporary.
1448 PRE_P points to the sequence where side effects that must happen before
1449 STMT should be stored. */
1451 static enum gimplify_status
1452 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1454 greturn *ret;
1455 tree ret_expr = TREE_OPERAND (stmt, 0);
1456 tree result_decl, result;
1458 if (ret_expr == error_mark_node)
1459 return GS_ERROR;
1461 /* Implicit _Cilk_sync must be inserted right before any return statement
1462 if there is a _Cilk_spawn in the function. If the user has provided a
1463 _Cilk_sync, the optimizer should remove this duplicate one. */
1464 if (fn_contains_cilk_spawn_p (cfun))
1466 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1467 gimplify_and_add (impl_sync, pre_p);
1470 if (!ret_expr
1471 || TREE_CODE (ret_expr) == RESULT_DECL
1472 || ret_expr == error_mark_node)
1474 maybe_add_early_return_predict_stmt (pre_p);
1475 greturn *ret = gimple_build_return (ret_expr);
1476 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1477 gimplify_seq_add_stmt (pre_p, ret);
1478 return GS_ALL_DONE;
1481 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1482 result_decl = NULL_TREE;
1483 else
1485 result_decl = TREE_OPERAND (ret_expr, 0);
1487 /* See through a return by reference. */
1488 if (TREE_CODE (result_decl) == INDIRECT_REF)
1489 result_decl = TREE_OPERAND (result_decl, 0);
1491 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1492 || TREE_CODE (ret_expr) == INIT_EXPR)
1493 && TREE_CODE (result_decl) == RESULT_DECL);
1496 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1497 Recall that aggregate_value_p is FALSE for any aggregate type that is
1498 returned in registers. If we're returning values in registers, then
1499 we don't want to extend the lifetime of the RESULT_DECL, particularly
1500 across another call. In addition, for those aggregates for which
1501 hard_function_value generates a PARALLEL, we'll die during normal
1502 expansion of structure assignments; there's special code in expand_return
1503 to handle this case that does not exist in expand_expr. */
1504 if (!result_decl)
1505 result = NULL_TREE;
1506 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1508 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1510 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1511 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1512 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1513 should be effectively allocated by the caller, i.e. all calls to
1514 this function must be subject to the Return Slot Optimization. */
1515 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1516 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1518 result = result_decl;
1520 else if (gimplify_ctxp->return_temp)
1521 result = gimplify_ctxp->return_temp;
1522 else
1524 result = create_tmp_reg (TREE_TYPE (result_decl));
1526 /* ??? With complex control flow (usually involving abnormal edges),
1527 we can wind up warning about an uninitialized value for this. Due
1528 to how this variable is constructed and initialized, this is never
1529 true. Give up and never warn. */
1530 TREE_NO_WARNING (result) = 1;
1532 gimplify_ctxp->return_temp = result;
1535 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1536 Then gimplify the whole thing. */
1537 if (result != result_decl)
1538 TREE_OPERAND (ret_expr, 0) = result;
1540 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1542 maybe_add_early_return_predict_stmt (pre_p);
1543 ret = gimple_build_return (result);
1544 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1545 gimplify_seq_add_stmt (pre_p, ret);
1547 return GS_ALL_DONE;
1550 /* Gimplify a variable-length array DECL. */
1552 static void
1553 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1555 /* This is a variable-sized decl. Simplify its size and mark it
1556 for deferred expansion. */
1557 tree t, addr, ptr_type;
1559 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1560 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1562 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1563 if (DECL_HAS_VALUE_EXPR_P (decl))
1564 return;
1566 /* All occurrences of this decl in final gimplified code will be
1567 replaced by indirection. Setting DECL_VALUE_EXPR does two
1568 things: First, it lets the rest of the gimplifier know what
1569 replacement to use. Second, it lets the debug info know
1570 where to find the value. */
1571 ptr_type = build_pointer_type (TREE_TYPE (decl));
1572 addr = create_tmp_var (ptr_type, get_name (decl));
1573 DECL_IGNORED_P (addr) = 0;
1574 t = build_fold_indirect_ref (addr);
1575 TREE_THIS_NOTRAP (t) = 1;
1576 SET_DECL_VALUE_EXPR (decl, t);
1577 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1579 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1580 max_int_size_in_bytes (TREE_TYPE (decl)));
1581 /* The call has been built for a variable-sized object. */
1582 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1583 t = fold_convert (ptr_type, t);
1584 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1586 gimplify_and_add (t, seq_p);
1589 /* A helper function to be called via walk_tree. Mark all labels under *TP
1590 as being forced. To be called for DECL_INITIAL of static variables. */
1592 static tree
1593 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1595 if (TYPE_P (*tp))
1596 *walk_subtrees = 0;
1597 if (TREE_CODE (*tp) == LABEL_DECL)
1599 FORCED_LABEL (*tp) = 1;
1600 cfun->has_forced_label_in_static = 1;
1603 return NULL_TREE;
1606 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1607 and initialization explicit. */
1609 static enum gimplify_status
1610 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1612 tree stmt = *stmt_p;
1613 tree decl = DECL_EXPR_DECL (stmt);
1615 *stmt_p = NULL_TREE;
1617 if (TREE_TYPE (decl) == error_mark_node)
1618 return GS_ERROR;
1620 if ((TREE_CODE (decl) == TYPE_DECL
1621 || VAR_P (decl))
1622 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1624 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1625 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1626 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1629 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1630 in case its size expressions contain problematic nodes like CALL_EXPR. */
1631 if (TREE_CODE (decl) == TYPE_DECL
1632 && DECL_ORIGINAL_TYPE (decl)
1633 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1635 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1636 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1637 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1640 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1642 tree init = DECL_INITIAL (decl);
1643 bool is_vla = false;
1645 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1646 || (!TREE_STATIC (decl)
1647 && flag_stack_check == GENERIC_STACK_CHECK
1648 && compare_tree_int (DECL_SIZE_UNIT (decl),
1649 STACK_CHECK_MAX_VAR_SIZE) > 0))
1651 gimplify_vla_decl (decl, seq_p);
1652 is_vla = true;
1655 if (asan_poisoned_variables
1656 && !is_vla
1657 && TREE_ADDRESSABLE (decl)
1658 && !TREE_STATIC (decl)
1659 && !DECL_HAS_VALUE_EXPR_P (decl)
1660 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1661 && dbg_cnt (asan_use_after_scope))
1663 asan_poisoned_variables->add (decl);
1664 asan_poison_variable (decl, false, seq_p);
1665 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1666 gimplify_ctxp->live_switch_vars->add (decl);
1669 /* Some front ends do not explicitly declare all anonymous
1670 artificial variables. We compensate here by declaring the
1671 variables, though it would be better if the front ends would
1672 explicitly declare them. */
1673 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1674 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1675 gimple_add_tmp_var (decl);
1677 if (init && init != error_mark_node)
1679 if (!TREE_STATIC (decl))
1681 DECL_INITIAL (decl) = NULL_TREE;
1682 init = build2 (INIT_EXPR, void_type_node, decl, init);
1683 gimplify_and_add (init, seq_p);
1684 ggc_free (init);
1686 else
1687 /* We must still examine initializers for static variables
1688 as they may contain a label address. */
1689 walk_tree (&init, force_labels_r, NULL, NULL);
1693 return GS_ALL_DONE;
1696 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1697 and replacing the LOOP_EXPR with goto, but if the loop contains an
1698 EXIT_EXPR, we need to append a label for it to jump to. */
1700 static enum gimplify_status
1701 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1703 tree saved_label = gimplify_ctxp->exit_label;
1704 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1706 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1708 gimplify_ctxp->exit_label = NULL_TREE;
1710 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1712 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1714 if (gimplify_ctxp->exit_label)
1715 gimplify_seq_add_stmt (pre_p,
1716 gimple_build_label (gimplify_ctxp->exit_label));
1718 gimplify_ctxp->exit_label = saved_label;
1720 *expr_p = NULL;
1721 return GS_ALL_DONE;
1724 /* Gimplify a statement list onto a sequence. These may be created either
1725 by an enlightened front-end, or by shortcut_cond_expr. */
1727 static enum gimplify_status
1728 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1730 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1732 tree_stmt_iterator i = tsi_start (*expr_p);
1734 while (!tsi_end_p (i))
1736 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1737 tsi_delink (&i);
1740 if (temp)
1742 *expr_p = temp;
1743 return GS_OK;
1746 return GS_ALL_DONE;
1749 /* Callback for walk_gimple_seq. */
1751 static tree
1752 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1753 struct walk_stmt_info *wi)
1755 gimple *stmt = gsi_stmt (*gsi_p);
1757 *handled_ops_p = true;
1758 switch (gimple_code (stmt))
1760 case GIMPLE_TRY:
1761 /* A compiler-generated cleanup or a user-written try block.
1762 If it's empty, don't dive into it--that would result in
1763 worse location info. */
1764 if (gimple_try_eval (stmt) == NULL)
1766 wi->info = stmt;
1767 return integer_zero_node;
1769 /* Fall through. */
1770 case GIMPLE_BIND:
1771 case GIMPLE_CATCH:
1772 case GIMPLE_EH_FILTER:
1773 case GIMPLE_TRANSACTION:
1774 /* Walk the sub-statements. */
1775 *handled_ops_p = false;
1776 break;
1777 case GIMPLE_CALL:
1778 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1780 *handled_ops_p = false;
1781 break;
1783 /* Fall through. */
1784 default:
1785 /* Save the first "real" statement (not a decl/lexical scope/...). */
1786 wi->info = stmt;
1787 return integer_zero_node;
1789 return NULL_TREE;
1792 /* Possibly warn about unreachable statements between switch's controlling
1793 expression and the first case. SEQ is the body of a switch expression. */
1795 static void
1796 maybe_warn_switch_unreachable (gimple_seq seq)
1798 if (!warn_switch_unreachable
1799 /* This warning doesn't play well with Fortran when optimizations
1800 are on. */
1801 || lang_GNU_Fortran ()
1802 || seq == NULL)
1803 return;
1805 struct walk_stmt_info wi;
1806 memset (&wi, 0, sizeof (wi));
1807 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1808 gimple *stmt = (gimple *) wi.info;
1810 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1812 if (gimple_code (stmt) == GIMPLE_GOTO
1813 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1814 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1815 /* Don't warn for compiler-generated gotos. These occur
1816 in Duff's devices, for example. */;
1817 else
1818 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1819 "statement will never be executed");
1824 /* A label entry that pairs label and a location. */
1825 struct label_entry
1827 tree label;
1828 location_t loc;
1831 /* Find LABEL in vector of label entries VEC. */
1833 static struct label_entry *
1834 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1836 unsigned int i;
1837 struct label_entry *l;
1839 FOR_EACH_VEC_ELT (*vec, i, l)
1840 if (l->label == label)
1841 return l;
1842 return NULL;
1845 /* Return true if LABEL, a LABEL_DECL, represents a case label
1846 in a vector of labels CASES. */
1848 static bool
1849 case_label_p (const vec<tree> *cases, tree label)
1851 unsigned int i;
1852 tree l;
1854 FOR_EACH_VEC_ELT (*cases, i, l)
1855 if (CASE_LABEL (l) == label)
1856 return true;
1857 return false;
1860 /* Find the last statement in a scope STMT. */
1862 static gimple *
1863 last_stmt_in_scope (gimple *stmt)
1865 if (!stmt)
1866 return NULL;
1868 switch (gimple_code (stmt))
1870 case GIMPLE_BIND:
1872 gbind *bind = as_a <gbind *> (stmt);
1873 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1874 return last_stmt_in_scope (stmt);
1877 case GIMPLE_TRY:
1879 gtry *try_stmt = as_a <gtry *> (stmt);
1880 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1881 gimple *last_eval = last_stmt_in_scope (stmt);
1882 if (gimple_stmt_may_fallthru (last_eval)
1883 && (last_eval == NULL
1884 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1885 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1887 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1888 return last_stmt_in_scope (stmt);
1890 else
1891 return last_eval;
1894 default:
1895 return stmt;
1899 /* Collect interesting labels in LABELS and return the statement preceding
1900 another case label, or a user-defined label. */
1902 static gimple *
1903 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1904 auto_vec <struct label_entry> *labels)
1906 gimple *prev = NULL;
1910 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1911 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1913 /* Nested scope. Only look at the last statement of
1914 the innermost scope. */
1915 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1916 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1917 if (last)
1919 prev = last;
1920 /* It might be a label without a location. Use the
1921 location of the scope then. */
1922 if (!gimple_has_location (prev))
1923 gimple_set_location (prev, bind_loc);
1925 gsi_next (gsi_p);
1926 continue;
1929 /* Ifs are tricky. */
1930 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1932 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1933 tree false_lab = gimple_cond_false_label (cond_stmt);
1934 location_t if_loc = gimple_location (cond_stmt);
1936 /* If we have e.g.
1937 if (i > 1) goto <D.2259>; else goto D;
1938 we can't do much with the else-branch. */
1939 if (!DECL_ARTIFICIAL (false_lab))
1940 break;
1942 /* Go on until the false label, then one step back. */
1943 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1945 gimple *stmt = gsi_stmt (*gsi_p);
1946 if (gimple_code (stmt) == GIMPLE_LABEL
1947 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1948 break;
1951 /* Not found? Oops. */
1952 if (gsi_end_p (*gsi_p))
1953 break;
1955 struct label_entry l = { false_lab, if_loc };
1956 labels->safe_push (l);
1958 /* Go to the last statement of the then branch. */
1959 gsi_prev (gsi_p);
1961 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1962 <D.1759>:
1963 <stmt>;
1964 goto <D.1761>;
1965 <D.1760>:
1967 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1968 && !gimple_has_location (gsi_stmt (*gsi_p)))
1970 /* Look at the statement before, it might be
1971 attribute fallthrough, in which case don't warn. */
1972 gsi_prev (gsi_p);
1973 bool fallthru_before_dest
1974 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1975 gsi_next (gsi_p);
1976 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1977 if (!fallthru_before_dest)
1979 struct label_entry l = { goto_dest, if_loc };
1980 labels->safe_push (l);
1983 /* And move back. */
1984 gsi_next (gsi_p);
1987 /* Remember the last statement. Skip labels that are of no interest
1988 to us. */
1989 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1991 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1992 if (find_label_entry (labels, label))
1993 prev = gsi_stmt (*gsi_p);
1995 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1997 else
1998 prev = gsi_stmt (*gsi_p);
1999 gsi_next (gsi_p);
2001 while (!gsi_end_p (*gsi_p)
2002 /* Stop if we find a case or a user-defined label. */
2003 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2004 || !gimple_has_location (gsi_stmt (*gsi_p))));
2006 return prev;
2009 /* Return true if the switch fallthough warning should occur. LABEL is
2010 the label statement that we're falling through to. */
2012 static bool
2013 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2015 gimple_stmt_iterator gsi = *gsi_p;
2017 /* Don't warn if the label is marked with a "falls through" comment. */
2018 if (FALLTHROUGH_LABEL_P (label))
2019 return false;
2021 /* Don't warn for non-case labels followed by a statement:
2022 case 0:
2023 foo ();
2024 label:
2025 bar ();
2026 as these are likely intentional. */
2027 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2029 tree l;
2030 while (!gsi_end_p (gsi)
2031 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2032 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2033 && !case_label_p (&gimplify_ctxp->case_labels, l))
2034 gsi_next (&gsi);
2035 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2036 return false;
2039 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2040 immediately breaks. */
2041 gsi = *gsi_p;
2043 /* Skip all immediately following labels. */
2044 while (!gsi_end_p (gsi)
2045 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2046 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2047 gsi_next (&gsi);
2049 /* { ... something; default:; } */
2050 if (gsi_end_p (gsi)
2051 /* { ... something; default: break; } or
2052 { ... something; default: goto L; } */
2053 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2054 /* { ... something; default: return; } */
2055 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2056 return false;
2058 return true;
2061 /* Callback for walk_gimple_seq. */
2063 static tree
2064 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2065 struct walk_stmt_info *)
2067 gimple *stmt = gsi_stmt (*gsi_p);
2069 *handled_ops_p = true;
2070 switch (gimple_code (stmt))
2072 case GIMPLE_TRY:
2073 case GIMPLE_BIND:
2074 case GIMPLE_CATCH:
2075 case GIMPLE_EH_FILTER:
2076 case GIMPLE_TRANSACTION:
2077 /* Walk the sub-statements. */
2078 *handled_ops_p = false;
2079 break;
2081 /* Find a sequence of form:
2083 GIMPLE_LABEL
2084 [...]
2085 <may fallthru stmt>
2086 GIMPLE_LABEL
2088 and possibly warn. */
2089 case GIMPLE_LABEL:
2091 /* Found a label. Skip all immediately following labels. */
2092 while (!gsi_end_p (*gsi_p)
2093 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2094 gsi_next (gsi_p);
2096 /* There might be no more statements. */
2097 if (gsi_end_p (*gsi_p))
2098 return integer_zero_node;
2100 /* Vector of labels that fall through. */
2101 auto_vec <struct label_entry> labels;
2102 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2104 /* There might be no more statements. */
2105 if (gsi_end_p (*gsi_p))
2106 return integer_zero_node;
2108 gimple *next = gsi_stmt (*gsi_p);
2109 tree label;
2110 /* If what follows is a label, then we may have a fallthrough. */
2111 if (gimple_code (next) == GIMPLE_LABEL
2112 && gimple_has_location (next)
2113 && (label = gimple_label_label (as_a <glabel *> (next)))
2114 && prev != NULL)
2116 struct label_entry *l;
2117 bool warned_p = false;
2118 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2119 /* Quiet. */;
2120 else if (gimple_code (prev) == GIMPLE_LABEL
2121 && (label = gimple_label_label (as_a <glabel *> (prev)))
2122 && (l = find_label_entry (&labels, label)))
2123 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2124 "this statement may fall through");
2125 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2126 /* Try to be clever and don't warn when the statement
2127 can't actually fall through. */
2128 && gimple_stmt_may_fallthru (prev)
2129 && gimple_has_location (prev))
2130 warned_p = warning_at (gimple_location (prev),
2131 OPT_Wimplicit_fallthrough_,
2132 "this statement may fall through");
2133 if (warned_p)
2134 inform (gimple_location (next), "here");
2136 /* Mark this label as processed so as to prevent multiple
2137 warnings in nested switches. */
2138 FALLTHROUGH_LABEL_P (label) = true;
2140 /* So that next warn_implicit_fallthrough_r will start looking for
2141 a new sequence starting with this label. */
2142 gsi_prev (gsi_p);
2145 break;
2146 default:
2147 break;
2149 return NULL_TREE;
2152 /* Warn when a switch case falls through. */
2154 static void
2155 maybe_warn_implicit_fallthrough (gimple_seq seq)
2157 if (!warn_implicit_fallthrough)
2158 return;
2160 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2161 if (!(lang_GNU_C ()
2162 || lang_GNU_CXX ()
2163 || lang_GNU_OBJC ()))
2164 return;
2166 struct walk_stmt_info wi;
2167 memset (&wi, 0, sizeof (wi));
2168 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2171 /* Callback for walk_gimple_seq. */
2173 static tree
2174 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2175 struct walk_stmt_info *)
2177 gimple *stmt = gsi_stmt (*gsi_p);
2179 *handled_ops_p = true;
2180 switch (gimple_code (stmt))
2182 case GIMPLE_TRY:
2183 case GIMPLE_BIND:
2184 case GIMPLE_CATCH:
2185 case GIMPLE_EH_FILTER:
2186 case GIMPLE_TRANSACTION:
2187 /* Walk the sub-statements. */
2188 *handled_ops_p = false;
2189 break;
2190 case GIMPLE_CALL:
2191 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2193 gsi_remove (gsi_p, true);
2194 if (gsi_end_p (*gsi_p))
2195 return integer_zero_node;
2197 bool found = false;
2198 location_t loc = gimple_location (stmt);
2200 gimple_stmt_iterator gsi2 = *gsi_p;
2201 stmt = gsi_stmt (gsi2);
2202 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2204 /* Go on until the artificial label. */
2205 tree goto_dest = gimple_goto_dest (stmt);
2206 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2208 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2209 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2210 == goto_dest)
2211 break;
2214 /* Not found? Stop. */
2215 if (gsi_end_p (gsi2))
2216 break;
2218 /* Look one past it. */
2219 gsi_next (&gsi2);
2222 /* We're looking for a case label or default label here. */
2223 while (!gsi_end_p (gsi2))
2225 stmt = gsi_stmt (gsi2);
2226 if (gimple_code (stmt) == GIMPLE_LABEL)
2228 tree label = gimple_label_label (as_a <glabel *> (stmt));
2229 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2231 found = true;
2232 break;
2235 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2237 else
2238 /* Something other is not expected. */
2239 break;
2240 gsi_next (&gsi2);
2242 if (!found)
2243 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2244 "a case label or default label");
2246 break;
2247 default:
2248 break;
2250 return NULL_TREE;
2253 /* Expand all FALLTHROUGH () calls in SEQ. */
2255 static void
2256 expand_FALLTHROUGH (gimple_seq *seq_p)
2258 struct walk_stmt_info wi;
2259 memset (&wi, 0, sizeof (wi));
2260 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2264 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2265 branch to. */
2267 static enum gimplify_status
2268 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2270 tree switch_expr = *expr_p;
2271 gimple_seq switch_body_seq = NULL;
2272 enum gimplify_status ret;
2273 tree index_type = TREE_TYPE (switch_expr);
2274 if (index_type == NULL_TREE)
2275 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2277 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2278 fb_rvalue);
2279 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2280 return ret;
2282 if (SWITCH_BODY (switch_expr))
2284 vec<tree> labels;
2285 vec<tree> saved_labels;
2286 hash_set<tree> *saved_live_switch_vars = NULL;
2287 tree default_case = NULL_TREE;
2288 gswitch *switch_stmt;
2290 /* If someone can be bothered to fill in the labels, they can
2291 be bothered to null out the body too. */
2292 gcc_assert (!SWITCH_LABELS (switch_expr));
2294 /* Save old labels, get new ones from body, then restore the old
2295 labels. Save all the things from the switch body to append after. */
2296 saved_labels = gimplify_ctxp->case_labels;
2297 gimplify_ctxp->case_labels.create (8);
2299 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2300 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2301 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2302 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2303 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2304 else
2305 gimplify_ctxp->live_switch_vars = NULL;
2307 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2308 gimplify_ctxp->in_switch_expr = true;
2310 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2312 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2313 maybe_warn_switch_unreachable (switch_body_seq);
2314 maybe_warn_implicit_fallthrough (switch_body_seq);
2315 /* Only do this for the outermost GIMPLE_SWITCH. */
2316 if (!gimplify_ctxp->in_switch_expr)
2317 expand_FALLTHROUGH (&switch_body_seq);
2319 labels = gimplify_ctxp->case_labels;
2320 gimplify_ctxp->case_labels = saved_labels;
2322 if (gimplify_ctxp->live_switch_vars)
2324 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2325 delete gimplify_ctxp->live_switch_vars;
2327 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2329 preprocess_case_label_vec_for_gimple (labels, index_type,
2330 &default_case);
2332 if (!default_case)
2334 glabel *new_default;
2336 default_case
2337 = build_case_label (NULL_TREE, NULL_TREE,
2338 create_artificial_label (UNKNOWN_LOCATION));
2339 new_default = gimple_build_label (CASE_LABEL (default_case));
2340 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2343 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2344 default_case, labels);
2345 gimplify_seq_add_stmt (pre_p, switch_stmt);
2346 gimplify_seq_add_seq (pre_p, switch_body_seq);
2347 labels.release ();
2349 else
2350 gcc_assert (SWITCH_LABELS (switch_expr));
2352 return GS_ALL_DONE;
2355 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2357 static enum gimplify_status
2358 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2360 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2361 == current_function_decl);
2363 tree label = LABEL_EXPR_LABEL (*expr_p);
2364 glabel *label_stmt = gimple_build_label (label);
2365 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2366 gimplify_seq_add_stmt (pre_p, label_stmt);
2368 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2369 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2370 NOT_TAKEN));
2371 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2372 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2373 TAKEN));
2375 return GS_ALL_DONE;
2378 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2380 static enum gimplify_status
2381 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2383 struct gimplify_ctx *ctxp;
2384 glabel *label_stmt;
2386 /* Invalid programs can play Duff's Device type games with, for example,
2387 #pragma omp parallel. At least in the C front end, we don't
2388 detect such invalid branches until after gimplification, in the
2389 diagnose_omp_blocks pass. */
2390 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2391 if (ctxp->case_labels.exists ())
2392 break;
2394 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2395 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2396 ctxp->case_labels.safe_push (*expr_p);
2397 gimplify_seq_add_stmt (pre_p, label_stmt);
2399 return GS_ALL_DONE;
2402 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2403 if necessary. */
2405 tree
2406 build_and_jump (tree *label_p)
2408 if (label_p == NULL)
2409 /* If there's nowhere to jump, just fall through. */
2410 return NULL_TREE;
2412 if (*label_p == NULL_TREE)
2414 tree label = create_artificial_label (UNKNOWN_LOCATION);
2415 *label_p = label;
2418 return build1 (GOTO_EXPR, void_type_node, *label_p);
2421 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2422 This also involves building a label to jump to and communicating it to
2423 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2425 static enum gimplify_status
2426 gimplify_exit_expr (tree *expr_p)
2428 tree cond = TREE_OPERAND (*expr_p, 0);
2429 tree expr;
2431 expr = build_and_jump (&gimplify_ctxp->exit_label);
2432 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2433 *expr_p = expr;
2435 return GS_OK;
2438 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2439 different from its canonical type, wrap the whole thing inside a
2440 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2441 type.
2443 The canonical type of a COMPONENT_REF is the type of the field being
2444 referenced--unless the field is a bit-field which can be read directly
2445 in a smaller mode, in which case the canonical type is the
2446 sign-appropriate type corresponding to that mode. */
2448 static void
2449 canonicalize_component_ref (tree *expr_p)
2451 tree expr = *expr_p;
2452 tree type;
2454 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2456 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2457 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2458 else
2459 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2461 /* One could argue that all the stuff below is not necessary for
2462 the non-bitfield case and declare it a FE error if type
2463 adjustment would be needed. */
2464 if (TREE_TYPE (expr) != type)
2466 #ifdef ENABLE_TYPES_CHECKING
2467 tree old_type = TREE_TYPE (expr);
2468 #endif
2469 int type_quals;
2471 /* We need to preserve qualifiers and propagate them from
2472 operand 0. */
2473 type_quals = TYPE_QUALS (type)
2474 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2475 if (TYPE_QUALS (type) != type_quals)
2476 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2478 /* Set the type of the COMPONENT_REF to the underlying type. */
2479 TREE_TYPE (expr) = type;
2481 #ifdef ENABLE_TYPES_CHECKING
2482 /* It is now a FE error, if the conversion from the canonical
2483 type to the original expression type is not useless. */
2484 gcc_assert (useless_type_conversion_p (old_type, type));
2485 #endif
2489 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2490 to foo, embed that change in the ADDR_EXPR by converting
2491 T array[U];
2492 (T *)&array
2494 &array[L]
2495 where L is the lower bound. For simplicity, only do this for constant
2496 lower bound.
2497 The constraint is that the type of &array[L] is trivially convertible
2498 to T *. */
2500 static void
2501 canonicalize_addr_expr (tree *expr_p)
2503 tree expr = *expr_p;
2504 tree addr_expr = TREE_OPERAND (expr, 0);
2505 tree datype, ddatype, pddatype;
2507 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2508 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2509 || TREE_CODE (addr_expr) != ADDR_EXPR)
2510 return;
2512 /* The addr_expr type should be a pointer to an array. */
2513 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2514 if (TREE_CODE (datype) != ARRAY_TYPE)
2515 return;
2517 /* The pointer to element type shall be trivially convertible to
2518 the expression pointer type. */
2519 ddatype = TREE_TYPE (datype);
2520 pddatype = build_pointer_type (ddatype);
2521 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2522 pddatype))
2523 return;
2525 /* The lower bound and element sizes must be constant. */
2526 if (!TYPE_SIZE_UNIT (ddatype)
2527 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2528 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2529 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2530 return;
2532 /* All checks succeeded. Build a new node to merge the cast. */
2533 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2534 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2535 NULL_TREE, NULL_TREE);
2536 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2538 /* We can have stripped a required restrict qualifier above. */
2539 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2540 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2543 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2544 underneath as appropriate. */
2546 static enum gimplify_status
2547 gimplify_conversion (tree *expr_p)
2549 location_t loc = EXPR_LOCATION (*expr_p);
2550 gcc_assert (CONVERT_EXPR_P (*expr_p));
2552 /* Then strip away all but the outermost conversion. */
2553 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2555 /* And remove the outermost conversion if it's useless. */
2556 if (tree_ssa_useless_type_conversion (*expr_p))
2557 *expr_p = TREE_OPERAND (*expr_p, 0);
2559 /* If we still have a conversion at the toplevel,
2560 then canonicalize some constructs. */
2561 if (CONVERT_EXPR_P (*expr_p))
2563 tree sub = TREE_OPERAND (*expr_p, 0);
2565 /* If a NOP conversion is changing the type of a COMPONENT_REF
2566 expression, then canonicalize its type now in order to expose more
2567 redundant conversions. */
2568 if (TREE_CODE (sub) == COMPONENT_REF)
2569 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2571 /* If a NOP conversion is changing a pointer to array of foo
2572 to a pointer to foo, embed that change in the ADDR_EXPR. */
2573 else if (TREE_CODE (sub) == ADDR_EXPR)
2574 canonicalize_addr_expr (expr_p);
2577 /* If we have a conversion to a non-register type force the
2578 use of a VIEW_CONVERT_EXPR instead. */
2579 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2580 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2581 TREE_OPERAND (*expr_p, 0));
2583 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2584 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2585 TREE_SET_CODE (*expr_p, NOP_EXPR);
2587 return GS_OK;
2590 /* Nonlocal VLAs seen in the current function. */
2591 static hash_set<tree> *nonlocal_vlas;
2593 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2594 static tree nonlocal_vla_vars;
2596 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2597 DECL_VALUE_EXPR, and it's worth re-examining things. */
2599 static enum gimplify_status
2600 gimplify_var_or_parm_decl (tree *expr_p)
2602 tree decl = *expr_p;
2604 /* ??? If this is a local variable, and it has not been seen in any
2605 outer BIND_EXPR, then it's probably the result of a duplicate
2606 declaration, for which we've already issued an error. It would
2607 be really nice if the front end wouldn't leak these at all.
2608 Currently the only known culprit is C++ destructors, as seen
2609 in g++.old-deja/g++.jason/binding.C. */
2610 if (VAR_P (decl)
2611 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2612 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2613 && decl_function_context (decl) == current_function_decl)
2615 gcc_assert (seen_error ());
2616 return GS_ERROR;
2619 /* When within an OMP context, notice uses of variables. */
2620 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2621 return GS_ALL_DONE;
2623 /* If the decl is an alias for another expression, substitute it now. */
2624 if (DECL_HAS_VALUE_EXPR_P (decl))
2626 tree value_expr = DECL_VALUE_EXPR (decl);
2628 /* For referenced nonlocal VLAs add a decl for debugging purposes
2629 to the current function. */
2630 if (VAR_P (decl)
2631 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2632 && nonlocal_vlas != NULL
2633 && TREE_CODE (value_expr) == INDIRECT_REF
2634 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2635 && decl_function_context (decl) != current_function_decl)
2637 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2638 while (ctx
2639 && (ctx->region_type == ORT_WORKSHARE
2640 || ctx->region_type == ORT_SIMD
2641 || ctx->region_type == ORT_ACC))
2642 ctx = ctx->outer_context;
2643 if (!ctx && !nonlocal_vlas->add (decl))
2645 tree copy = copy_node (decl);
2647 lang_hooks.dup_lang_specific_decl (copy);
2648 SET_DECL_RTL (copy, 0);
2649 TREE_USED (copy) = 1;
2650 DECL_CHAIN (copy) = nonlocal_vla_vars;
2651 nonlocal_vla_vars = copy;
2652 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2653 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2657 *expr_p = unshare_expr (value_expr);
2658 return GS_OK;
2661 return GS_ALL_DONE;
2664 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2666 static void
2667 recalculate_side_effects (tree t)
2669 enum tree_code code = TREE_CODE (t);
2670 int len = TREE_OPERAND_LENGTH (t);
2671 int i;
2673 switch (TREE_CODE_CLASS (code))
2675 case tcc_expression:
2676 switch (code)
2678 case INIT_EXPR:
2679 case MODIFY_EXPR:
2680 case VA_ARG_EXPR:
2681 case PREDECREMENT_EXPR:
2682 case PREINCREMENT_EXPR:
2683 case POSTDECREMENT_EXPR:
2684 case POSTINCREMENT_EXPR:
2685 /* All of these have side-effects, no matter what their
2686 operands are. */
2687 return;
2689 default:
2690 break;
2692 /* Fall through. */
2694 case tcc_comparison: /* a comparison expression */
2695 case tcc_unary: /* a unary arithmetic expression */
2696 case tcc_binary: /* a binary arithmetic expression */
2697 case tcc_reference: /* a reference */
2698 case tcc_vl_exp: /* a function call */
2699 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2700 for (i = 0; i < len; ++i)
2702 tree op = TREE_OPERAND (t, i);
2703 if (op && TREE_SIDE_EFFECTS (op))
2704 TREE_SIDE_EFFECTS (t) = 1;
2706 break;
2708 case tcc_constant:
2709 /* No side-effects. */
2710 return;
2712 default:
2713 gcc_unreachable ();
2717 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2718 node *EXPR_P.
2720 compound_lval
2721 : min_lval '[' val ']'
2722 | min_lval '.' ID
2723 | compound_lval '[' val ']'
2724 | compound_lval '.' ID
2726 This is not part of the original SIMPLE definition, which separates
2727 array and member references, but it seems reasonable to handle them
2728 together. Also, this way we don't run into problems with union
2729 aliasing; gcc requires that for accesses through a union to alias, the
2730 union reference must be explicit, which was not always the case when we
2731 were splitting up array and member refs.
2733 PRE_P points to the sequence where side effects that must happen before
2734 *EXPR_P should be stored.
2736 POST_P points to the sequence where side effects that must happen after
2737 *EXPR_P should be stored. */
2739 static enum gimplify_status
2740 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2741 fallback_t fallback)
2743 tree *p;
2744 enum gimplify_status ret = GS_ALL_DONE, tret;
2745 int i;
2746 location_t loc = EXPR_LOCATION (*expr_p);
2747 tree expr = *expr_p;
2749 /* Create a stack of the subexpressions so later we can walk them in
2750 order from inner to outer. */
2751 auto_vec<tree, 10> expr_stack;
2753 /* We can handle anything that get_inner_reference can deal with. */
2754 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2756 restart:
2757 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2758 if (TREE_CODE (*p) == INDIRECT_REF)
2759 *p = fold_indirect_ref_loc (loc, *p);
2761 if (handled_component_p (*p))
2763 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2764 additional COMPONENT_REFs. */
2765 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2766 && gimplify_var_or_parm_decl (p) == GS_OK)
2767 goto restart;
2768 else
2769 break;
2771 expr_stack.safe_push (*p);
2774 gcc_assert (expr_stack.length ());
2776 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2777 walked through and P points to the innermost expression.
2779 Java requires that we elaborated nodes in source order. That
2780 means we must gimplify the inner expression followed by each of
2781 the indices, in order. But we can't gimplify the inner
2782 expression until we deal with any variable bounds, sizes, or
2783 positions in order to deal with PLACEHOLDER_EXPRs.
2785 So we do this in three steps. First we deal with the annotations
2786 for any variables in the components, then we gimplify the base,
2787 then we gimplify any indices, from left to right. */
2788 for (i = expr_stack.length () - 1; i >= 0; i--)
2790 tree t = expr_stack[i];
2792 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2794 /* Gimplify the low bound and element type size and put them into
2795 the ARRAY_REF. If these values are set, they have already been
2796 gimplified. */
2797 if (TREE_OPERAND (t, 2) == NULL_TREE)
2799 tree low = unshare_expr (array_ref_low_bound (t));
2800 if (!is_gimple_min_invariant (low))
2802 TREE_OPERAND (t, 2) = low;
2803 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2804 post_p, is_gimple_reg,
2805 fb_rvalue);
2806 ret = MIN (ret, tret);
2809 else
2811 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2812 is_gimple_reg, fb_rvalue);
2813 ret = MIN (ret, tret);
2816 if (TREE_OPERAND (t, 3) == NULL_TREE)
2818 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2819 tree elmt_size = unshare_expr (array_ref_element_size (t));
2820 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2822 /* Divide the element size by the alignment of the element
2823 type (above). */
2824 elmt_size
2825 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2827 if (!is_gimple_min_invariant (elmt_size))
2829 TREE_OPERAND (t, 3) = elmt_size;
2830 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2831 post_p, is_gimple_reg,
2832 fb_rvalue);
2833 ret = MIN (ret, tret);
2836 else
2838 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2839 is_gimple_reg, fb_rvalue);
2840 ret = MIN (ret, tret);
2843 else if (TREE_CODE (t) == COMPONENT_REF)
2845 /* Set the field offset into T and gimplify it. */
2846 if (TREE_OPERAND (t, 2) == NULL_TREE)
2848 tree offset = unshare_expr (component_ref_field_offset (t));
2849 tree field = TREE_OPERAND (t, 1);
2850 tree factor
2851 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2853 /* Divide the offset by its alignment. */
2854 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2856 if (!is_gimple_min_invariant (offset))
2858 TREE_OPERAND (t, 2) = offset;
2859 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2860 post_p, is_gimple_reg,
2861 fb_rvalue);
2862 ret = MIN (ret, tret);
2865 else
2867 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2868 is_gimple_reg, fb_rvalue);
2869 ret = MIN (ret, tret);
2874 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2875 so as to match the min_lval predicate. Failure to do so may result
2876 in the creation of large aggregate temporaries. */
2877 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2878 fallback | fb_lvalue);
2879 ret = MIN (ret, tret);
2881 /* And finally, the indices and operands of ARRAY_REF. During this
2882 loop we also remove any useless conversions. */
2883 for (; expr_stack.length () > 0; )
2885 tree t = expr_stack.pop ();
2887 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2889 /* Gimplify the dimension. */
2890 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2892 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2893 is_gimple_val, fb_rvalue);
2894 ret = MIN (ret, tret);
2898 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2900 /* The innermost expression P may have originally had
2901 TREE_SIDE_EFFECTS set which would have caused all the outer
2902 expressions in *EXPR_P leading to P to also have had
2903 TREE_SIDE_EFFECTS set. */
2904 recalculate_side_effects (t);
2907 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2908 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2910 canonicalize_component_ref (expr_p);
2913 expr_stack.release ();
2915 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2917 return ret;
2920 /* Gimplify the self modifying expression pointed to by EXPR_P
2921 (++, --, +=, -=).
2923 PRE_P points to the list where side effects that must happen before
2924 *EXPR_P should be stored.
2926 POST_P points to the list where side effects that must happen after
2927 *EXPR_P should be stored.
2929 WANT_VALUE is nonzero iff we want to use the value of this expression
2930 in another expression.
2932 ARITH_TYPE is the type the computation should be performed in. */
2934 enum gimplify_status
2935 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2936 bool want_value, tree arith_type)
2938 enum tree_code code;
2939 tree lhs, lvalue, rhs, t1;
2940 gimple_seq post = NULL, *orig_post_p = post_p;
2941 bool postfix;
2942 enum tree_code arith_code;
2943 enum gimplify_status ret;
2944 location_t loc = EXPR_LOCATION (*expr_p);
2946 code = TREE_CODE (*expr_p);
2948 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2949 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2951 /* Prefix or postfix? */
2952 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2953 /* Faster to treat as prefix if result is not used. */
2954 postfix = want_value;
2955 else
2956 postfix = false;
2958 /* For postfix, make sure the inner expression's post side effects
2959 are executed after side effects from this expression. */
2960 if (postfix)
2961 post_p = &post;
2963 /* Add or subtract? */
2964 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2965 arith_code = PLUS_EXPR;
2966 else
2967 arith_code = MINUS_EXPR;
2969 /* Gimplify the LHS into a GIMPLE lvalue. */
2970 lvalue = TREE_OPERAND (*expr_p, 0);
2971 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2972 if (ret == GS_ERROR)
2973 return ret;
2975 /* Extract the operands to the arithmetic operation. */
2976 lhs = lvalue;
2977 rhs = TREE_OPERAND (*expr_p, 1);
2979 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2980 that as the result value and in the postqueue operation. */
2981 if (postfix)
2983 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2984 if (ret == GS_ERROR)
2985 return ret;
2987 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2990 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2991 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2993 rhs = convert_to_ptrofftype_loc (loc, rhs);
2994 if (arith_code == MINUS_EXPR)
2995 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2996 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2998 else
2999 t1 = fold_convert (TREE_TYPE (*expr_p),
3000 fold_build2 (arith_code, arith_type,
3001 fold_convert (arith_type, lhs),
3002 fold_convert (arith_type, rhs)));
3004 if (postfix)
3006 gimplify_assign (lvalue, t1, pre_p);
3007 gimplify_seq_add_seq (orig_post_p, post);
3008 *expr_p = lhs;
3009 return GS_ALL_DONE;
3011 else
3013 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3014 return GS_OK;
3018 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3020 static void
3021 maybe_with_size_expr (tree *expr_p)
3023 tree expr = *expr_p;
3024 tree type = TREE_TYPE (expr);
3025 tree size;
3027 /* If we've already wrapped this or the type is error_mark_node, we can't do
3028 anything. */
3029 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3030 || type == error_mark_node)
3031 return;
3033 /* If the size isn't known or is a constant, we have nothing to do. */
3034 size = TYPE_SIZE_UNIT (type);
3035 if (!size || TREE_CODE (size) == INTEGER_CST)
3036 return;
3038 /* Otherwise, make a WITH_SIZE_EXPR. */
3039 size = unshare_expr (size);
3040 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3041 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3044 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3045 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3046 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3047 gimplified to an SSA name. */
3049 enum gimplify_status
3050 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3051 bool allow_ssa)
3053 bool (*test) (tree);
3054 fallback_t fb;
3056 /* In general, we allow lvalues for function arguments to avoid
3057 extra overhead of copying large aggregates out of even larger
3058 aggregates into temporaries only to copy the temporaries to
3059 the argument list. Make optimizers happy by pulling out to
3060 temporaries those types that fit in registers. */
3061 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3062 test = is_gimple_val, fb = fb_rvalue;
3063 else
3065 test = is_gimple_lvalue, fb = fb_either;
3066 /* Also strip a TARGET_EXPR that would force an extra copy. */
3067 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3069 tree init = TARGET_EXPR_INITIAL (*arg_p);
3070 if (init
3071 && !VOID_TYPE_P (TREE_TYPE (init)))
3072 *arg_p = init;
3076 /* If this is a variable sized type, we must remember the size. */
3077 maybe_with_size_expr (arg_p);
3079 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3080 /* Make sure arguments have the same location as the function call
3081 itself. */
3082 protected_set_expr_location (*arg_p, call_location);
3084 /* There is a sequence point before a function call. Side effects in
3085 the argument list must occur before the actual call. So, when
3086 gimplifying arguments, force gimplify_expr to use an internal
3087 post queue which is then appended to the end of PRE_P. */
3088 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3091 /* Don't fold inside offloading or taskreg regions: it can break code by
3092 adding decl references that weren't in the source. We'll do it during
3093 omplower pass instead. */
3095 static bool
3096 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3098 struct gimplify_omp_ctx *ctx;
3099 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3100 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3101 return false;
3102 return fold_stmt (gsi);
3105 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3106 with the pointer to the proper cilk frame. */
3107 static void
3108 gimplify_cilk_detach (gimple_seq *pre_p)
3110 tree frame = cfun->cilk_frame_decl;
3111 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3112 frame);
3113 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3114 ptrf);
3115 gimplify_seq_add_stmt(pre_p, detach);
3118 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3119 WANT_VALUE is true if the result of the call is desired. */
3121 static enum gimplify_status
3122 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3124 tree fndecl, parms, p, fnptrtype;
3125 enum gimplify_status ret;
3126 int i, nargs;
3127 gcall *call;
3128 bool builtin_va_start_p = false;
3129 location_t loc = EXPR_LOCATION (*expr_p);
3131 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3133 /* For reliable diagnostics during inlining, it is necessary that
3134 every call_expr be annotated with file and line. */
3135 if (! EXPR_HAS_LOCATION (*expr_p))
3136 SET_EXPR_LOCATION (*expr_p, input_location);
3138 /* Gimplify internal functions created in the FEs. */
3139 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3141 if (want_value)
3142 return GS_ALL_DONE;
3144 nargs = call_expr_nargs (*expr_p);
3145 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3146 auto_vec<tree> vargs (nargs);
3148 for (i = 0; i < nargs; i++)
3150 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3151 EXPR_LOCATION (*expr_p));
3152 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3155 if (EXPR_CILK_SPAWN (*expr_p))
3156 gimplify_cilk_detach (pre_p);
3157 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3158 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3159 gimplify_seq_add_stmt (pre_p, call);
3160 return GS_ALL_DONE;
3163 /* This may be a call to a builtin function.
3165 Builtin function calls may be transformed into different
3166 (and more efficient) builtin function calls under certain
3167 circumstances. Unfortunately, gimplification can muck things
3168 up enough that the builtin expanders are not aware that certain
3169 transformations are still valid.
3171 So we attempt transformation/gimplification of the call before
3172 we gimplify the CALL_EXPR. At this time we do not manage to
3173 transform all calls in the same manner as the expanders do, but
3174 we do transform most of them. */
3175 fndecl = get_callee_fndecl (*expr_p);
3176 if (fndecl
3177 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3178 switch (DECL_FUNCTION_CODE (fndecl))
3180 CASE_BUILT_IN_ALLOCA:
3181 /* If the call has been built for a variable-sized object, then we
3182 want to restore the stack level when the enclosing BIND_EXPR is
3183 exited to reclaim the allocated space; otherwise, we precisely
3184 need to do the opposite and preserve the latest stack level. */
3185 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3186 gimplify_ctxp->save_stack = true;
3187 else
3188 gimplify_ctxp->keep_stack = true;
3189 break;
3191 case BUILT_IN_VA_START:
3193 builtin_va_start_p = TRUE;
3194 if (call_expr_nargs (*expr_p) < 2)
3196 error ("too few arguments to function %<va_start%>");
3197 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3198 return GS_OK;
3201 if (fold_builtin_next_arg (*expr_p, true))
3203 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3204 return GS_OK;
3206 break;
3209 default:
3212 if (fndecl && DECL_BUILT_IN (fndecl))
3214 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3215 if (new_tree && new_tree != *expr_p)
3217 /* There was a transformation of this call which computes the
3218 same value, but in a more efficient way. Return and try
3219 again. */
3220 *expr_p = new_tree;
3221 return GS_OK;
3225 /* Remember the original function pointer type. */
3226 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3228 /* There is a sequence point before the call, so any side effects in
3229 the calling expression must occur before the actual call. Force
3230 gimplify_expr to use an internal post queue. */
3231 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3232 is_gimple_call_addr, fb_rvalue);
3234 nargs = call_expr_nargs (*expr_p);
3236 /* Get argument types for verification. */
3237 fndecl = get_callee_fndecl (*expr_p);
3238 parms = NULL_TREE;
3239 if (fndecl)
3240 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3241 else
3242 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3244 if (fndecl && DECL_ARGUMENTS (fndecl))
3245 p = DECL_ARGUMENTS (fndecl);
3246 else if (parms)
3247 p = parms;
3248 else
3249 p = NULL_TREE;
3250 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3253 /* If the last argument is __builtin_va_arg_pack () and it is not
3254 passed as a named argument, decrease the number of CALL_EXPR
3255 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3256 if (!p
3257 && i < nargs
3258 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3260 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3261 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3263 if (last_arg_fndecl
3264 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3265 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3266 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3268 tree call = *expr_p;
3270 --nargs;
3271 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3272 CALL_EXPR_FN (call),
3273 nargs, CALL_EXPR_ARGP (call));
3275 /* Copy all CALL_EXPR flags, location and block, except
3276 CALL_EXPR_VA_ARG_PACK flag. */
3277 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3278 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3279 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3280 = CALL_EXPR_RETURN_SLOT_OPT (call);
3281 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3282 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3284 /* Set CALL_EXPR_VA_ARG_PACK. */
3285 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3289 /* If the call returns twice then after building the CFG the call
3290 argument computations will no longer dominate the call because
3291 we add an abnormal incoming edge to the call. So do not use SSA
3292 vars there. */
3293 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3295 /* Gimplify the function arguments. */
3296 if (nargs > 0)
3298 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3299 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3300 PUSH_ARGS_REVERSED ? i-- : i++)
3302 enum gimplify_status t;
3304 /* Avoid gimplifying the second argument to va_start, which needs to
3305 be the plain PARM_DECL. */
3306 if ((i != 1) || !builtin_va_start_p)
3308 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3309 EXPR_LOCATION (*expr_p), ! returns_twice);
3311 if (t == GS_ERROR)
3312 ret = GS_ERROR;
3317 /* Gimplify the static chain. */
3318 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3320 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3321 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3322 else
3324 enum gimplify_status t;
3325 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3326 EXPR_LOCATION (*expr_p), ! returns_twice);
3327 if (t == GS_ERROR)
3328 ret = GS_ERROR;
3332 /* Verify the function result. */
3333 if (want_value && fndecl
3334 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3336 error_at (loc, "using result of function returning %<void%>");
3337 ret = GS_ERROR;
3340 /* Try this again in case gimplification exposed something. */
3341 if (ret != GS_ERROR)
3343 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3345 if (new_tree && new_tree != *expr_p)
3347 /* There was a transformation of this call which computes the
3348 same value, but in a more efficient way. Return and try
3349 again. */
3350 *expr_p = new_tree;
3351 return GS_OK;
3354 else
3356 *expr_p = error_mark_node;
3357 return GS_ERROR;
3360 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3361 decl. This allows us to eliminate redundant or useless
3362 calls to "const" functions. */
3363 if (TREE_CODE (*expr_p) == CALL_EXPR)
3365 int flags = call_expr_flags (*expr_p);
3366 if (flags & (ECF_CONST | ECF_PURE)
3367 /* An infinite loop is considered a side effect. */
3368 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3369 TREE_SIDE_EFFECTS (*expr_p) = 0;
3372 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3373 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3374 form and delegate the creation of a GIMPLE_CALL to
3375 gimplify_modify_expr. This is always possible because when
3376 WANT_VALUE is true, the caller wants the result of this call into
3377 a temporary, which means that we will emit an INIT_EXPR in
3378 internal_get_tmp_var which will then be handled by
3379 gimplify_modify_expr. */
3380 if (!want_value)
3382 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3383 have to do is replicate it as a GIMPLE_CALL tuple. */
3384 gimple_stmt_iterator gsi;
3385 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3386 notice_special_calls (call);
3387 if (EXPR_CILK_SPAWN (*expr_p))
3388 gimplify_cilk_detach (pre_p);
3389 gimplify_seq_add_stmt (pre_p, call);
3390 gsi = gsi_last (*pre_p);
3391 maybe_fold_stmt (&gsi);
3392 *expr_p = NULL_TREE;
3394 else
3395 /* Remember the original function type. */
3396 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3397 CALL_EXPR_FN (*expr_p));
3399 return ret;
3402 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3403 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3405 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3406 condition is true or false, respectively. If null, we should generate
3407 our own to skip over the evaluation of this specific expression.
3409 LOCUS is the source location of the COND_EXPR.
3411 This function is the tree equivalent of do_jump.
3413 shortcut_cond_r should only be called by shortcut_cond_expr. */
3415 static tree
3416 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3417 location_t locus)
3419 tree local_label = NULL_TREE;
3420 tree t, expr = NULL;
3422 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3423 retain the shortcut semantics. Just insert the gotos here;
3424 shortcut_cond_expr will append the real blocks later. */
3425 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3427 location_t new_locus;
3429 /* Turn if (a && b) into
3431 if (a); else goto no;
3432 if (b) goto yes; else goto no;
3433 (no:) */
3435 if (false_label_p == NULL)
3436 false_label_p = &local_label;
3438 /* Keep the original source location on the first 'if'. */
3439 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3440 append_to_statement_list (t, &expr);
3442 /* Set the source location of the && on the second 'if'. */
3443 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3444 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3445 new_locus);
3446 append_to_statement_list (t, &expr);
3448 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3450 location_t new_locus;
3452 /* Turn if (a || b) into
3454 if (a) goto yes;
3455 if (b) goto yes; else goto no;
3456 (yes:) */
3458 if (true_label_p == NULL)
3459 true_label_p = &local_label;
3461 /* Keep the original source location on the first 'if'. */
3462 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3463 append_to_statement_list (t, &expr);
3465 /* Set the source location of the || on the second 'if'. */
3466 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3467 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3468 new_locus);
3469 append_to_statement_list (t, &expr);
3471 else if (TREE_CODE (pred) == COND_EXPR
3472 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3473 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3475 location_t new_locus;
3477 /* As long as we're messing with gotos, turn if (a ? b : c) into
3478 if (a)
3479 if (b) goto yes; else goto no;
3480 else
3481 if (c) goto yes; else goto no;
3483 Don't do this if one of the arms has void type, which can happen
3484 in C++ when the arm is throw. */
3486 /* Keep the original source location on the first 'if'. Set the source
3487 location of the ? on the second 'if'. */
3488 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3489 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3490 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3491 false_label_p, locus),
3492 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3493 false_label_p, new_locus));
3495 else
3497 expr = build3 (COND_EXPR, void_type_node, pred,
3498 build_and_jump (true_label_p),
3499 build_and_jump (false_label_p));
3500 SET_EXPR_LOCATION (expr, locus);
3503 if (local_label)
3505 t = build1 (LABEL_EXPR, void_type_node, local_label);
3506 append_to_statement_list (t, &expr);
3509 return expr;
3512 /* Given a conditional expression EXPR with short-circuit boolean
3513 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3514 predicate apart into the equivalent sequence of conditionals. */
3516 static tree
3517 shortcut_cond_expr (tree expr)
3519 tree pred = TREE_OPERAND (expr, 0);
3520 tree then_ = TREE_OPERAND (expr, 1);
3521 tree else_ = TREE_OPERAND (expr, 2);
3522 tree true_label, false_label, end_label, t;
3523 tree *true_label_p;
3524 tree *false_label_p;
3525 bool emit_end, emit_false, jump_over_else;
3526 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3527 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3529 /* First do simple transformations. */
3530 if (!else_se)
3532 /* If there is no 'else', turn
3533 if (a && b) then c
3534 into
3535 if (a) if (b) then c. */
3536 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3538 /* Keep the original source location on the first 'if'. */
3539 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3540 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3541 /* Set the source location of the && on the second 'if'. */
3542 if (EXPR_HAS_LOCATION (pred))
3543 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3544 then_ = shortcut_cond_expr (expr);
3545 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3546 pred = TREE_OPERAND (pred, 0);
3547 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3548 SET_EXPR_LOCATION (expr, locus);
3552 if (!then_se)
3554 /* If there is no 'then', turn
3555 if (a || b); else d
3556 into
3557 if (a); else if (b); else d. */
3558 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3560 /* Keep the original source location on the first 'if'. */
3561 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3562 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3563 /* Set the source location of the || on the second 'if'. */
3564 if (EXPR_HAS_LOCATION (pred))
3565 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3566 else_ = shortcut_cond_expr (expr);
3567 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3568 pred = TREE_OPERAND (pred, 0);
3569 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3570 SET_EXPR_LOCATION (expr, locus);
3574 /* If we're done, great. */
3575 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3576 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3577 return expr;
3579 /* Otherwise we need to mess with gotos. Change
3580 if (a) c; else d;
3582 if (a); else goto no;
3583 c; goto end;
3584 no: d; end:
3585 and recursively gimplify the condition. */
3587 true_label = false_label = end_label = NULL_TREE;
3589 /* If our arms just jump somewhere, hijack those labels so we don't
3590 generate jumps to jumps. */
3592 if (then_
3593 && TREE_CODE (then_) == GOTO_EXPR
3594 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3596 true_label = GOTO_DESTINATION (then_);
3597 then_ = NULL;
3598 then_se = false;
3601 if (else_
3602 && TREE_CODE (else_) == GOTO_EXPR
3603 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3605 false_label = GOTO_DESTINATION (else_);
3606 else_ = NULL;
3607 else_se = false;
3610 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3611 if (true_label)
3612 true_label_p = &true_label;
3613 else
3614 true_label_p = NULL;
3616 /* The 'else' branch also needs a label if it contains interesting code. */
3617 if (false_label || else_se)
3618 false_label_p = &false_label;
3619 else
3620 false_label_p = NULL;
3622 /* If there was nothing else in our arms, just forward the label(s). */
3623 if (!then_se && !else_se)
3624 return shortcut_cond_r (pred, true_label_p, false_label_p,
3625 EXPR_LOC_OR_LOC (expr, input_location));
3627 /* If our last subexpression already has a terminal label, reuse it. */
3628 if (else_se)
3629 t = expr_last (else_);
3630 else if (then_se)
3631 t = expr_last (then_);
3632 else
3633 t = NULL;
3634 if (t && TREE_CODE (t) == LABEL_EXPR)
3635 end_label = LABEL_EXPR_LABEL (t);
3637 /* If we don't care about jumping to the 'else' branch, jump to the end
3638 if the condition is false. */
3639 if (!false_label_p)
3640 false_label_p = &end_label;
3642 /* We only want to emit these labels if we aren't hijacking them. */
3643 emit_end = (end_label == NULL_TREE);
3644 emit_false = (false_label == NULL_TREE);
3646 /* We only emit the jump over the else clause if we have to--if the
3647 then clause may fall through. Otherwise we can wind up with a
3648 useless jump and a useless label at the end of gimplified code,
3649 which will cause us to think that this conditional as a whole
3650 falls through even if it doesn't. If we then inline a function
3651 which ends with such a condition, that can cause us to issue an
3652 inappropriate warning about control reaching the end of a
3653 non-void function. */
3654 jump_over_else = block_may_fallthru (then_);
3656 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3657 EXPR_LOC_OR_LOC (expr, input_location));
3659 expr = NULL;
3660 append_to_statement_list (pred, &expr);
3662 append_to_statement_list (then_, &expr);
3663 if (else_se)
3665 if (jump_over_else)
3667 tree last = expr_last (expr);
3668 t = build_and_jump (&end_label);
3669 if (EXPR_HAS_LOCATION (last))
3670 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3671 append_to_statement_list (t, &expr);
3673 if (emit_false)
3675 t = build1 (LABEL_EXPR, void_type_node, false_label);
3676 append_to_statement_list (t, &expr);
3678 append_to_statement_list (else_, &expr);
3680 if (emit_end && end_label)
3682 t = build1 (LABEL_EXPR, void_type_node, end_label);
3683 append_to_statement_list (t, &expr);
3686 return expr;
3689 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3691 tree
3692 gimple_boolify (tree expr)
3694 tree type = TREE_TYPE (expr);
3695 location_t loc = EXPR_LOCATION (expr);
3697 if (TREE_CODE (expr) == NE_EXPR
3698 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3699 && integer_zerop (TREE_OPERAND (expr, 1)))
3701 tree call = TREE_OPERAND (expr, 0);
3702 tree fn = get_callee_fndecl (call);
3704 /* For __builtin_expect ((long) (x), y) recurse into x as well
3705 if x is truth_value_p. */
3706 if (fn
3707 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3708 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3709 && call_expr_nargs (call) == 2)
3711 tree arg = CALL_EXPR_ARG (call, 0);
3712 if (arg)
3714 if (TREE_CODE (arg) == NOP_EXPR
3715 && TREE_TYPE (arg) == TREE_TYPE (call))
3716 arg = TREE_OPERAND (arg, 0);
3717 if (truth_value_p (TREE_CODE (arg)))
3719 arg = gimple_boolify (arg);
3720 CALL_EXPR_ARG (call, 0)
3721 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3727 switch (TREE_CODE (expr))
3729 case TRUTH_AND_EXPR:
3730 case TRUTH_OR_EXPR:
3731 case TRUTH_XOR_EXPR:
3732 case TRUTH_ANDIF_EXPR:
3733 case TRUTH_ORIF_EXPR:
3734 /* Also boolify the arguments of truth exprs. */
3735 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3736 /* FALLTHRU */
3738 case TRUTH_NOT_EXPR:
3739 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3741 /* These expressions always produce boolean results. */
3742 if (TREE_CODE (type) != BOOLEAN_TYPE)
3743 TREE_TYPE (expr) = boolean_type_node;
3744 return expr;
3746 case ANNOTATE_EXPR:
3747 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3749 case annot_expr_ivdep_kind:
3750 case annot_expr_unroll_kind:
3751 case annot_expr_no_vector_kind:
3752 case annot_expr_vector_kind:
3753 case annot_expr_parallel_kind:
3754 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3755 if (TREE_CODE (type) != BOOLEAN_TYPE)
3756 TREE_TYPE (expr) = boolean_type_node;
3757 return expr;
3758 default:
3759 gcc_unreachable ();
3762 default:
3763 if (COMPARISON_CLASS_P (expr))
3765 /* There expressions always prduce boolean results. */
3766 if (TREE_CODE (type) != BOOLEAN_TYPE)
3767 TREE_TYPE (expr) = boolean_type_node;
3768 return expr;
3770 /* Other expressions that get here must have boolean values, but
3771 might need to be converted to the appropriate mode. */
3772 if (TREE_CODE (type) == BOOLEAN_TYPE)
3773 return expr;
3774 return fold_convert_loc (loc, boolean_type_node, expr);
3778 /* Given a conditional expression *EXPR_P without side effects, gimplify
3779 its operands. New statements are inserted to PRE_P. */
3781 static enum gimplify_status
3782 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3784 tree expr = *expr_p, cond;
3785 enum gimplify_status ret, tret;
3786 enum tree_code code;
3788 cond = gimple_boolify (COND_EXPR_COND (expr));
3790 /* We need to handle && and || specially, as their gimplification
3791 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3792 code = TREE_CODE (cond);
3793 if (code == TRUTH_ANDIF_EXPR)
3794 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3795 else if (code == TRUTH_ORIF_EXPR)
3796 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3797 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3798 COND_EXPR_COND (*expr_p) = cond;
3800 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3801 is_gimple_val, fb_rvalue);
3802 ret = MIN (ret, tret);
3803 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3804 is_gimple_val, fb_rvalue);
3806 return MIN (ret, tret);
3809 /* Return true if evaluating EXPR could trap.
3810 EXPR is GENERIC, while tree_could_trap_p can be called
3811 only on GIMPLE. */
3813 static bool
3814 generic_expr_could_trap_p (tree expr)
3816 unsigned i, n;
3818 if (!expr || is_gimple_val (expr))
3819 return false;
3821 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3822 return true;
3824 n = TREE_OPERAND_LENGTH (expr);
3825 for (i = 0; i < n; i++)
3826 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3827 return true;
3829 return false;
3832 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3833 into
3835 if (p) if (p)
3836 t1 = a; a;
3837 else or else
3838 t1 = b; b;
3841 The second form is used when *EXPR_P is of type void.
3843 PRE_P points to the list where side effects that must happen before
3844 *EXPR_P should be stored. */
3846 static enum gimplify_status
3847 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3849 tree expr = *expr_p;
3850 tree type = TREE_TYPE (expr);
3851 location_t loc = EXPR_LOCATION (expr);
3852 tree tmp, arm1, arm2;
3853 enum gimplify_status ret;
3854 tree label_true, label_false, label_cont;
3855 bool have_then_clause_p, have_else_clause_p;
3856 gcond *cond_stmt;
3857 enum tree_code pred_code;
3858 gimple_seq seq = NULL;
3860 /* If this COND_EXPR has a value, copy the values into a temporary within
3861 the arms. */
3862 if (!VOID_TYPE_P (type))
3864 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3865 tree result;
3867 /* If either an rvalue is ok or we do not require an lvalue, create the
3868 temporary. But we cannot do that if the type is addressable. */
3869 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3870 && !TREE_ADDRESSABLE (type))
3872 if (gimplify_ctxp->allow_rhs_cond_expr
3873 /* If either branch has side effects or could trap, it can't be
3874 evaluated unconditionally. */
3875 && !TREE_SIDE_EFFECTS (then_)
3876 && !generic_expr_could_trap_p (then_)
3877 && !TREE_SIDE_EFFECTS (else_)
3878 && !generic_expr_could_trap_p (else_))
3879 return gimplify_pure_cond_expr (expr_p, pre_p);
3881 tmp = create_tmp_var (type, "iftmp");
3882 result = tmp;
3885 /* Otherwise, only create and copy references to the values. */
3886 else
3888 type = build_pointer_type (type);
3890 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3891 then_ = build_fold_addr_expr_loc (loc, then_);
3893 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3894 else_ = build_fold_addr_expr_loc (loc, else_);
3896 expr
3897 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3899 tmp = create_tmp_var (type, "iftmp");
3900 result = build_simple_mem_ref_loc (loc, tmp);
3903 /* Build the new then clause, `tmp = then_;'. But don't build the
3904 assignment if the value is void; in C++ it can be if it's a throw. */
3905 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3906 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3908 /* Similarly, build the new else clause, `tmp = else_;'. */
3909 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3910 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3912 TREE_TYPE (expr) = void_type_node;
3913 recalculate_side_effects (expr);
3915 /* Move the COND_EXPR to the prequeue. */
3916 gimplify_stmt (&expr, pre_p);
3918 *expr_p = result;
3919 return GS_ALL_DONE;
3922 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3923 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3924 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3925 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3927 /* Make sure the condition has BOOLEAN_TYPE. */
3928 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3930 /* Break apart && and || conditions. */
3931 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3932 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3934 expr = shortcut_cond_expr (expr);
3936 if (expr != *expr_p)
3938 *expr_p = expr;
3940 /* We can't rely on gimplify_expr to re-gimplify the expanded
3941 form properly, as cleanups might cause the target labels to be
3942 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3943 set up a conditional context. */
3944 gimple_push_condition ();
3945 gimplify_stmt (expr_p, &seq);
3946 gimple_pop_condition (pre_p);
3947 gimple_seq_add_seq (pre_p, seq);
3949 return GS_ALL_DONE;
3953 /* Now do the normal gimplification. */
3955 /* Gimplify condition. */
3956 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3957 fb_rvalue);
3958 if (ret == GS_ERROR)
3959 return GS_ERROR;
3960 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3962 gimple_push_condition ();
3964 have_then_clause_p = have_else_clause_p = false;
3965 if (TREE_OPERAND (expr, 1) != NULL
3966 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3967 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3968 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3969 == current_function_decl)
3970 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3971 have different locations, otherwise we end up with incorrect
3972 location information on the branches. */
3973 && (optimize
3974 || !EXPR_HAS_LOCATION (expr)
3975 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3976 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3978 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3979 have_then_clause_p = true;
3981 else
3982 label_true = create_artificial_label (UNKNOWN_LOCATION);
3983 if (TREE_OPERAND (expr, 2) != NULL
3984 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3985 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3986 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3987 == current_function_decl)
3988 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3989 have different locations, otherwise we end up with incorrect
3990 location information on the branches. */
3991 && (optimize
3992 || !EXPR_HAS_LOCATION (expr)
3993 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3994 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3996 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3997 have_else_clause_p = true;
3999 else
4000 label_false = create_artificial_label (UNKNOWN_LOCATION);
4002 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4003 &arm2);
4004 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4005 label_false);
4006 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4007 gimplify_seq_add_stmt (&seq, cond_stmt);
4008 gimple_stmt_iterator gsi = gsi_last (seq);
4009 maybe_fold_stmt (&gsi);
4011 label_cont = NULL_TREE;
4012 if (!have_then_clause_p)
4014 /* For if (...) {} else { code; } put label_true after
4015 the else block. */
4016 if (TREE_OPERAND (expr, 1) == NULL_TREE
4017 && !have_else_clause_p
4018 && TREE_OPERAND (expr, 2) != NULL_TREE)
4019 label_cont = label_true;
4020 else
4022 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4023 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4024 /* For if (...) { code; } else {} or
4025 if (...) { code; } else goto label; or
4026 if (...) { code; return; } else { ... }
4027 label_cont isn't needed. */
4028 if (!have_else_clause_p
4029 && TREE_OPERAND (expr, 2) != NULL_TREE
4030 && gimple_seq_may_fallthru (seq))
4032 gimple *g;
4033 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4035 g = gimple_build_goto (label_cont);
4037 /* GIMPLE_COND's are very low level; they have embedded
4038 gotos. This particular embedded goto should not be marked
4039 with the location of the original COND_EXPR, as it would
4040 correspond to the COND_EXPR's condition, not the ELSE or the
4041 THEN arms. To avoid marking it with the wrong location, flag
4042 it as "no location". */
4043 gimple_set_do_not_emit_location (g);
4045 gimplify_seq_add_stmt (&seq, g);
4049 if (!have_else_clause_p)
4051 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4052 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4054 if (label_cont)
4055 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4057 gimple_pop_condition (pre_p);
4058 gimple_seq_add_seq (pre_p, seq);
4060 if (ret == GS_ERROR)
4061 ; /* Do nothing. */
4062 else if (have_then_clause_p || have_else_clause_p)
4063 ret = GS_ALL_DONE;
4064 else
4066 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4067 expr = TREE_OPERAND (expr, 0);
4068 gimplify_stmt (&expr, pre_p);
4071 *expr_p = NULL;
4072 return ret;
4075 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4076 to be marked addressable.
4078 We cannot rely on such an expression being directly markable if a temporary
4079 has been created by the gimplification. In this case, we create another
4080 temporary and initialize it with a copy, which will become a store after we
4081 mark it addressable. This can happen if the front-end passed us something
4082 that it could not mark addressable yet, like a Fortran pass-by-reference
4083 parameter (int) floatvar. */
4085 static void
4086 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4088 while (handled_component_p (*expr_p))
4089 expr_p = &TREE_OPERAND (*expr_p, 0);
4090 if (is_gimple_reg (*expr_p))
4092 /* Do not allow an SSA name as the temporary. */
4093 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4094 DECL_GIMPLE_REG_P (var) = 0;
4095 *expr_p = var;
4099 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4100 a call to __builtin_memcpy. */
4102 static enum gimplify_status
4103 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4104 gimple_seq *seq_p)
4106 tree t, to, to_ptr, from, from_ptr;
4107 gcall *gs;
4108 location_t loc = EXPR_LOCATION (*expr_p);
4110 to = TREE_OPERAND (*expr_p, 0);
4111 from = TREE_OPERAND (*expr_p, 1);
4113 /* Mark the RHS addressable. Beware that it may not be possible to do so
4114 directly if a temporary has been created by the gimplification. */
4115 prepare_gimple_addressable (&from, seq_p);
4117 mark_addressable (from);
4118 from_ptr = build_fold_addr_expr_loc (loc, from);
4119 gimplify_arg (&from_ptr, seq_p, loc);
4121 mark_addressable (to);
4122 to_ptr = build_fold_addr_expr_loc (loc, to);
4123 gimplify_arg (&to_ptr, seq_p, loc);
4125 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4127 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4129 if (want_value)
4131 /* tmp = memcpy() */
4132 t = create_tmp_var (TREE_TYPE (to_ptr));
4133 gimple_call_set_lhs (gs, t);
4134 gimplify_seq_add_stmt (seq_p, gs);
4136 *expr_p = build_simple_mem_ref (t);
4137 return GS_ALL_DONE;
4140 gimplify_seq_add_stmt (seq_p, gs);
4141 *expr_p = NULL;
4142 return GS_ALL_DONE;
4145 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4146 a call to __builtin_memset. In this case we know that the RHS is
4147 a CONSTRUCTOR with an empty element list. */
4149 static enum gimplify_status
4150 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4151 gimple_seq *seq_p)
4153 tree t, from, to, to_ptr;
4154 gcall *gs;
4155 location_t loc = EXPR_LOCATION (*expr_p);
4157 /* Assert our assumptions, to abort instead of producing wrong code
4158 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4159 not be immediately exposed. */
4160 from = TREE_OPERAND (*expr_p, 1);
4161 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4162 from = TREE_OPERAND (from, 0);
4164 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4165 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4167 /* Now proceed. */
4168 to = TREE_OPERAND (*expr_p, 0);
4170 to_ptr = build_fold_addr_expr_loc (loc, to);
4171 gimplify_arg (&to_ptr, seq_p, loc);
4172 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4174 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4176 if (want_value)
4178 /* tmp = memset() */
4179 t = create_tmp_var (TREE_TYPE (to_ptr));
4180 gimple_call_set_lhs (gs, t);
4181 gimplify_seq_add_stmt (seq_p, gs);
4183 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4184 return GS_ALL_DONE;
4187 gimplify_seq_add_stmt (seq_p, gs);
4188 *expr_p = NULL;
4189 return GS_ALL_DONE;
4192 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4193 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4194 assignment. Return non-null if we detect a potential overlap. */
4196 struct gimplify_init_ctor_preeval_data
4198 /* The base decl of the lhs object. May be NULL, in which case we
4199 have to assume the lhs is indirect. */
4200 tree lhs_base_decl;
4202 /* The alias set of the lhs object. */
4203 alias_set_type lhs_alias_set;
4206 static tree
4207 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4209 struct gimplify_init_ctor_preeval_data *data
4210 = (struct gimplify_init_ctor_preeval_data *) xdata;
4211 tree t = *tp;
4213 /* If we find the base object, obviously we have overlap. */
4214 if (data->lhs_base_decl == t)
4215 return t;
4217 /* If the constructor component is indirect, determine if we have a
4218 potential overlap with the lhs. The only bits of information we
4219 have to go on at this point are addressability and alias sets. */
4220 if ((INDIRECT_REF_P (t)
4221 || TREE_CODE (t) == MEM_REF)
4222 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4223 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4224 return t;
4226 /* If the constructor component is a call, determine if it can hide a
4227 potential overlap with the lhs through an INDIRECT_REF like above.
4228 ??? Ugh - this is completely broken. In fact this whole analysis
4229 doesn't look conservative. */
4230 if (TREE_CODE (t) == CALL_EXPR)
4232 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4234 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4235 if (POINTER_TYPE_P (TREE_VALUE (type))
4236 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4237 && alias_sets_conflict_p (data->lhs_alias_set,
4238 get_alias_set
4239 (TREE_TYPE (TREE_VALUE (type)))))
4240 return t;
4243 if (IS_TYPE_OR_DECL_P (t))
4244 *walk_subtrees = 0;
4245 return NULL;
4248 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4249 force values that overlap with the lhs (as described by *DATA)
4250 into temporaries. */
4252 static void
4253 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4254 struct gimplify_init_ctor_preeval_data *data)
4256 enum gimplify_status one;
4258 /* If the value is constant, then there's nothing to pre-evaluate. */
4259 if (TREE_CONSTANT (*expr_p))
4261 /* Ensure it does not have side effects, it might contain a reference to
4262 the object we're initializing. */
4263 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4264 return;
4267 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4268 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4269 return;
4271 /* Recurse for nested constructors. */
4272 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4274 unsigned HOST_WIDE_INT ix;
4275 constructor_elt *ce;
4276 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4278 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4279 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4281 return;
4284 /* If this is a variable sized type, we must remember the size. */
4285 maybe_with_size_expr (expr_p);
4287 /* Gimplify the constructor element to something appropriate for the rhs
4288 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4289 the gimplifier will consider this a store to memory. Doing this
4290 gimplification now means that we won't have to deal with complicated
4291 language-specific trees, nor trees like SAVE_EXPR that can induce
4292 exponential search behavior. */
4293 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4294 if (one == GS_ERROR)
4296 *expr_p = NULL;
4297 return;
4300 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4301 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4302 always be true for all scalars, since is_gimple_mem_rhs insists on a
4303 temporary variable for them. */
4304 if (DECL_P (*expr_p))
4305 return;
4307 /* If this is of variable size, we have no choice but to assume it doesn't
4308 overlap since we can't make a temporary for it. */
4309 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4310 return;
4312 /* Otherwise, we must search for overlap ... */
4313 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4314 return;
4316 /* ... and if found, force the value into a temporary. */
4317 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4320 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4321 a RANGE_EXPR in a CONSTRUCTOR for an array.
4323 var = lower;
4324 loop_entry:
4325 object[var] = value;
4326 if (var == upper)
4327 goto loop_exit;
4328 var = var + 1;
4329 goto loop_entry;
4330 loop_exit:
4332 We increment var _after_ the loop exit check because we might otherwise
4333 fail if upper == TYPE_MAX_VALUE (type for upper).
4335 Note that we never have to deal with SAVE_EXPRs here, because this has
4336 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4338 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4339 gimple_seq *, bool);
4341 static void
4342 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4343 tree value, tree array_elt_type,
4344 gimple_seq *pre_p, bool cleared)
4346 tree loop_entry_label, loop_exit_label, fall_thru_label;
4347 tree var, var_type, cref, tmp;
4349 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4350 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4351 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4353 /* Create and initialize the index variable. */
4354 var_type = TREE_TYPE (upper);
4355 var = create_tmp_var (var_type);
4356 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4358 /* Add the loop entry label. */
4359 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4361 /* Build the reference. */
4362 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4363 var, NULL_TREE, NULL_TREE);
4365 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4366 the store. Otherwise just assign value to the reference. */
4368 if (TREE_CODE (value) == CONSTRUCTOR)
4369 /* NB we might have to call ourself recursively through
4370 gimplify_init_ctor_eval if the value is a constructor. */
4371 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4372 pre_p, cleared);
4373 else
4374 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4376 /* We exit the loop when the index var is equal to the upper bound. */
4377 gimplify_seq_add_stmt (pre_p,
4378 gimple_build_cond (EQ_EXPR, var, upper,
4379 loop_exit_label, fall_thru_label));
4381 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4383 /* Otherwise, increment the index var... */
4384 tmp = build2 (PLUS_EXPR, var_type, var,
4385 fold_convert (var_type, integer_one_node));
4386 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4388 /* ...and jump back to the loop entry. */
4389 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4391 /* Add the loop exit label. */
4392 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4395 /* Return true if FDECL is accessing a field that is zero sized. */
4397 static bool
4398 zero_sized_field_decl (const_tree fdecl)
4400 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4401 && integer_zerop (DECL_SIZE (fdecl)))
4402 return true;
4403 return false;
4406 /* Return true if TYPE is zero sized. */
4408 static bool
4409 zero_sized_type (const_tree type)
4411 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4412 && integer_zerop (TYPE_SIZE (type)))
4413 return true;
4414 return false;
4417 /* A subroutine of gimplify_init_constructor. Generate individual
4418 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4419 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4420 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4421 zeroed first. */
4423 static void
4424 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4425 gimple_seq *pre_p, bool cleared)
4427 tree array_elt_type = NULL;
4428 unsigned HOST_WIDE_INT ix;
4429 tree purpose, value;
4431 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4432 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4434 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4436 tree cref;
4438 /* NULL values are created above for gimplification errors. */
4439 if (value == NULL)
4440 continue;
4442 if (cleared && initializer_zerop (value))
4443 continue;
4445 /* ??? Here's to hoping the front end fills in all of the indices,
4446 so we don't have to figure out what's missing ourselves. */
4447 gcc_assert (purpose);
4449 /* Skip zero-sized fields, unless value has side-effects. This can
4450 happen with calls to functions returning a zero-sized type, which
4451 we shouldn't discard. As a number of downstream passes don't
4452 expect sets of zero-sized fields, we rely on the gimplification of
4453 the MODIFY_EXPR we make below to drop the assignment statement. */
4454 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4455 continue;
4457 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4458 whole range. */
4459 if (TREE_CODE (purpose) == RANGE_EXPR)
4461 tree lower = TREE_OPERAND (purpose, 0);
4462 tree upper = TREE_OPERAND (purpose, 1);
4464 /* If the lower bound is equal to upper, just treat it as if
4465 upper was the index. */
4466 if (simple_cst_equal (lower, upper))
4467 purpose = upper;
4468 else
4470 gimplify_init_ctor_eval_range (object, lower, upper, value,
4471 array_elt_type, pre_p, cleared);
4472 continue;
4476 if (array_elt_type)
4478 /* Do not use bitsizetype for ARRAY_REF indices. */
4479 if (TYPE_DOMAIN (TREE_TYPE (object)))
4480 purpose
4481 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4482 purpose);
4483 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4484 purpose, NULL_TREE, NULL_TREE);
4486 else
4488 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4489 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4490 unshare_expr (object), purpose, NULL_TREE);
4493 if (TREE_CODE (value) == CONSTRUCTOR
4494 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4495 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4496 pre_p, cleared);
4497 else
4499 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4500 gimplify_and_add (init, pre_p);
4501 ggc_free (init);
4506 /* Return the appropriate RHS predicate for this LHS. */
4508 gimple_predicate
4509 rhs_predicate_for (tree lhs)
4511 if (is_gimple_reg (lhs))
4512 return is_gimple_reg_rhs_or_call;
4513 else
4514 return is_gimple_mem_rhs_or_call;
4517 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4518 before the LHS has been gimplified. */
4520 static gimple_predicate
4521 initial_rhs_predicate_for (tree lhs)
4523 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4524 return is_gimple_reg_rhs_or_call;
4525 else
4526 return is_gimple_mem_rhs_or_call;
4529 /* Gimplify a C99 compound literal expression. This just means adding
4530 the DECL_EXPR before the current statement and using its anonymous
4531 decl instead. */
4533 static enum gimplify_status
4534 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4535 bool (*gimple_test_f) (tree),
4536 fallback_t fallback)
4538 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4539 tree decl = DECL_EXPR_DECL (decl_s);
4540 tree init = DECL_INITIAL (decl);
4541 /* Mark the decl as addressable if the compound literal
4542 expression is addressable now, otherwise it is marked too late
4543 after we gimplify the initialization expression. */
4544 if (TREE_ADDRESSABLE (*expr_p))
4545 TREE_ADDRESSABLE (decl) = 1;
4546 /* Otherwise, if we don't need an lvalue and have a literal directly
4547 substitute it. Check if it matches the gimple predicate, as
4548 otherwise we'd generate a new temporary, and we can as well just
4549 use the decl we already have. */
4550 else if (!TREE_ADDRESSABLE (decl)
4551 && init
4552 && (fallback & fb_lvalue) == 0
4553 && gimple_test_f (init))
4555 *expr_p = init;
4556 return GS_OK;
4559 /* Preliminarily mark non-addressed complex variables as eligible
4560 for promotion to gimple registers. We'll transform their uses
4561 as we find them. */
4562 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4563 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4564 && !TREE_THIS_VOLATILE (decl)
4565 && !needs_to_live_in_memory (decl))
4566 DECL_GIMPLE_REG_P (decl) = 1;
4568 /* If the decl is not addressable, then it is being used in some
4569 expression or on the right hand side of a statement, and it can
4570 be put into a readonly data section. */
4571 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4572 TREE_READONLY (decl) = 1;
4574 /* This decl isn't mentioned in the enclosing block, so add it to the
4575 list of temps. FIXME it seems a bit of a kludge to say that
4576 anonymous artificial vars aren't pushed, but everything else is. */
4577 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4578 gimple_add_tmp_var (decl);
4580 gimplify_and_add (decl_s, pre_p);
4581 *expr_p = decl;
4582 return GS_OK;
4585 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4586 return a new CONSTRUCTOR if something changed. */
4588 static tree
4589 optimize_compound_literals_in_ctor (tree orig_ctor)
4591 tree ctor = orig_ctor;
4592 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4593 unsigned int idx, num = vec_safe_length (elts);
4595 for (idx = 0; idx < num; idx++)
4597 tree value = (*elts)[idx].value;
4598 tree newval = value;
4599 if (TREE_CODE (value) == CONSTRUCTOR)
4600 newval = optimize_compound_literals_in_ctor (value);
4601 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4603 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4604 tree decl = DECL_EXPR_DECL (decl_s);
4605 tree init = DECL_INITIAL (decl);
4607 if (!TREE_ADDRESSABLE (value)
4608 && !TREE_ADDRESSABLE (decl)
4609 && init
4610 && TREE_CODE (init) == CONSTRUCTOR)
4611 newval = optimize_compound_literals_in_ctor (init);
4613 if (newval == value)
4614 continue;
4616 if (ctor == orig_ctor)
4618 ctor = copy_node (orig_ctor);
4619 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4620 elts = CONSTRUCTOR_ELTS (ctor);
4622 (*elts)[idx].value = newval;
4624 return ctor;
4627 /* A subroutine of gimplify_modify_expr. Break out elements of a
4628 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4630 Note that we still need to clear any elements that don't have explicit
4631 initializers, so if not all elements are initialized we keep the
4632 original MODIFY_EXPR, we just remove all of the constructor elements.
4634 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4635 GS_ERROR if we would have to create a temporary when gimplifying
4636 this constructor. Otherwise, return GS_OK.
4638 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4640 static enum gimplify_status
4641 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4642 bool want_value, bool notify_temp_creation)
4644 tree object, ctor, type;
4645 enum gimplify_status ret;
4646 vec<constructor_elt, va_gc> *elts;
4648 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4650 if (!notify_temp_creation)
4652 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4653 is_gimple_lvalue, fb_lvalue);
4654 if (ret == GS_ERROR)
4655 return ret;
4658 object = TREE_OPERAND (*expr_p, 0);
4659 ctor = TREE_OPERAND (*expr_p, 1)
4660 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4661 type = TREE_TYPE (ctor);
4662 elts = CONSTRUCTOR_ELTS (ctor);
4663 ret = GS_ALL_DONE;
4665 switch (TREE_CODE (type))
4667 case RECORD_TYPE:
4668 case UNION_TYPE:
4669 case QUAL_UNION_TYPE:
4670 case ARRAY_TYPE:
4672 struct gimplify_init_ctor_preeval_data preeval_data;
4673 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4674 bool cleared, complete_p, valid_const_initializer;
4676 /* Aggregate types must lower constructors to initialization of
4677 individual elements. The exception is that a CONSTRUCTOR node
4678 with no elements indicates zero-initialization of the whole. */
4679 if (vec_safe_is_empty (elts))
4681 if (notify_temp_creation)
4682 return GS_OK;
4683 break;
4686 /* Fetch information about the constructor to direct later processing.
4687 We might want to make static versions of it in various cases, and
4688 can only do so if it known to be a valid constant initializer. */
4689 valid_const_initializer
4690 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4691 &num_ctor_elements, &complete_p);
4693 /* If a const aggregate variable is being initialized, then it
4694 should never be a lose to promote the variable to be static. */
4695 if (valid_const_initializer
4696 && num_nonzero_elements > 1
4697 && TREE_READONLY (object)
4698 && VAR_P (object)
4699 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4701 if (notify_temp_creation)
4702 return GS_ERROR;
4703 DECL_INITIAL (object) = ctor;
4704 TREE_STATIC (object) = 1;
4705 if (!DECL_NAME (object))
4706 DECL_NAME (object) = create_tmp_var_name ("C");
4707 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4709 /* ??? C++ doesn't automatically append a .<number> to the
4710 assembler name, and even when it does, it looks at FE private
4711 data structures to figure out what that number should be,
4712 which are not set for this variable. I suppose this is
4713 important for local statics for inline functions, which aren't
4714 "local" in the object file sense. So in order to get a unique
4715 TU-local symbol, we must invoke the lhd version now. */
4716 lhd_set_decl_assembler_name (object);
4718 *expr_p = NULL_TREE;
4719 break;
4722 /* If there are "lots" of initialized elements, even discounting
4723 those that are not address constants (and thus *must* be
4724 computed at runtime), then partition the constructor into
4725 constant and non-constant parts. Block copy the constant
4726 parts in, then generate code for the non-constant parts. */
4727 /* TODO. There's code in cp/typeck.c to do this. */
4729 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4730 /* store_constructor will ignore the clearing of variable-sized
4731 objects. Initializers for such objects must explicitly set
4732 every field that needs to be set. */
4733 cleared = false;
4734 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4735 /* If the constructor isn't complete, clear the whole object
4736 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4738 ??? This ought not to be needed. For any element not present
4739 in the initializer, we should simply set them to zero. Except
4740 we'd need to *find* the elements that are not present, and that
4741 requires trickery to avoid quadratic compile-time behavior in
4742 large cases or excessive memory use in small cases. */
4743 cleared = true;
4744 else if (num_ctor_elements - num_nonzero_elements
4745 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4746 && num_nonzero_elements < num_ctor_elements / 4)
4747 /* If there are "lots" of zeros, it's more efficient to clear
4748 the memory and then set the nonzero elements. */
4749 cleared = true;
4750 else
4751 cleared = false;
4753 /* If there are "lots" of initialized elements, and all of them
4754 are valid address constants, then the entire initializer can
4755 be dropped to memory, and then memcpy'd out. Don't do this
4756 for sparse arrays, though, as it's more efficient to follow
4757 the standard CONSTRUCTOR behavior of memset followed by
4758 individual element initialization. Also don't do this for small
4759 all-zero initializers (which aren't big enough to merit
4760 clearing), and don't try to make bitwise copies of
4761 TREE_ADDRESSABLE types.
4763 We cannot apply such transformation when compiling chkp static
4764 initializer because creation of initializer image in the memory
4765 will require static initialization of bounds for it. It should
4766 result in another gimplification of similar initializer and we
4767 may fall into infinite loop. */
4768 if (valid_const_initializer
4769 && !(cleared || num_nonzero_elements == 0)
4770 && !TREE_ADDRESSABLE (type)
4771 && (!current_function_decl
4772 || !lookup_attribute ("chkp ctor",
4773 DECL_ATTRIBUTES (current_function_decl))))
4775 HOST_WIDE_INT size = int_size_in_bytes (type);
4776 unsigned int align;
4778 /* ??? We can still get unbounded array types, at least
4779 from the C++ front end. This seems wrong, but attempt
4780 to work around it for now. */
4781 if (size < 0)
4783 size = int_size_in_bytes (TREE_TYPE (object));
4784 if (size >= 0)
4785 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4788 /* Find the maximum alignment we can assume for the object. */
4789 /* ??? Make use of DECL_OFFSET_ALIGN. */
4790 if (DECL_P (object))
4791 align = DECL_ALIGN (object);
4792 else
4793 align = TYPE_ALIGN (type);
4795 /* Do a block move either if the size is so small as to make
4796 each individual move a sub-unit move on average, or if it
4797 is so large as to make individual moves inefficient. */
4798 if (size > 0
4799 && num_nonzero_elements > 1
4800 && (size < num_nonzero_elements
4801 || !can_move_by_pieces (size, align)))
4803 if (notify_temp_creation)
4804 return GS_ERROR;
4806 walk_tree (&ctor, force_labels_r, NULL, NULL);
4807 ctor = tree_output_constant_def (ctor);
4808 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4809 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4810 TREE_OPERAND (*expr_p, 1) = ctor;
4812 /* This is no longer an assignment of a CONSTRUCTOR, but
4813 we still may have processing to do on the LHS. So
4814 pretend we didn't do anything here to let that happen. */
4815 return GS_UNHANDLED;
4819 /* If the target is volatile, we have non-zero elements and more than
4820 one field to assign, initialize the target from a temporary. */
4821 if (TREE_THIS_VOLATILE (object)
4822 && !TREE_ADDRESSABLE (type)
4823 && num_nonzero_elements > 0
4824 && vec_safe_length (elts) > 1)
4826 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4827 TREE_OPERAND (*expr_p, 0) = temp;
4828 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4829 *expr_p,
4830 build2 (MODIFY_EXPR, void_type_node,
4831 object, temp));
4832 return GS_OK;
4835 if (notify_temp_creation)
4836 return GS_OK;
4838 /* If there are nonzero elements and if needed, pre-evaluate to capture
4839 elements overlapping with the lhs into temporaries. We must do this
4840 before clearing to fetch the values before they are zeroed-out. */
4841 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4843 preeval_data.lhs_base_decl = get_base_address (object);
4844 if (!DECL_P (preeval_data.lhs_base_decl))
4845 preeval_data.lhs_base_decl = NULL;
4846 preeval_data.lhs_alias_set = get_alias_set (object);
4848 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4849 pre_p, post_p, &preeval_data);
4852 bool ctor_has_side_effects_p
4853 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4855 if (cleared)
4857 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4858 Note that we still have to gimplify, in order to handle the
4859 case of variable sized types. Avoid shared tree structures. */
4860 CONSTRUCTOR_ELTS (ctor) = NULL;
4861 TREE_SIDE_EFFECTS (ctor) = 0;
4862 object = unshare_expr (object);
4863 gimplify_stmt (expr_p, pre_p);
4866 /* If we have not block cleared the object, or if there are nonzero
4867 elements in the constructor, or if the constructor has side effects,
4868 add assignments to the individual scalar fields of the object. */
4869 if (!cleared
4870 || num_nonzero_elements > 0
4871 || ctor_has_side_effects_p)
4872 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4874 *expr_p = NULL_TREE;
4876 break;
4878 case COMPLEX_TYPE:
4880 tree r, i;
4882 if (notify_temp_creation)
4883 return GS_OK;
4885 /* Extract the real and imaginary parts out of the ctor. */
4886 gcc_assert (elts->length () == 2);
4887 r = (*elts)[0].value;
4888 i = (*elts)[1].value;
4889 if (r == NULL || i == NULL)
4891 tree zero = build_zero_cst (TREE_TYPE (type));
4892 if (r == NULL)
4893 r = zero;
4894 if (i == NULL)
4895 i = zero;
4898 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4899 represent creation of a complex value. */
4900 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4902 ctor = build_complex (type, r, i);
4903 TREE_OPERAND (*expr_p, 1) = ctor;
4905 else
4907 ctor = build2 (COMPLEX_EXPR, type, r, i);
4908 TREE_OPERAND (*expr_p, 1) = ctor;
4909 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4910 pre_p,
4911 post_p,
4912 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4913 fb_rvalue);
4916 break;
4918 case VECTOR_TYPE:
4920 unsigned HOST_WIDE_INT ix;
4921 constructor_elt *ce;
4923 if (notify_temp_creation)
4924 return GS_OK;
4926 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4927 if (TREE_CONSTANT (ctor))
4929 bool constant_p = true;
4930 tree value;
4932 /* Even when ctor is constant, it might contain non-*_CST
4933 elements, such as addresses or trapping values like
4934 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4935 in VECTOR_CST nodes. */
4936 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4937 if (!CONSTANT_CLASS_P (value))
4939 constant_p = false;
4940 break;
4943 if (constant_p)
4945 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4946 break;
4949 TREE_CONSTANT (ctor) = 0;
4952 /* Vector types use CONSTRUCTOR all the way through gimple
4953 compilation as a general initializer. */
4954 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4956 enum gimplify_status tret;
4957 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4958 fb_rvalue);
4959 if (tret == GS_ERROR)
4960 ret = GS_ERROR;
4961 else if (TREE_STATIC (ctor)
4962 && !initializer_constant_valid_p (ce->value,
4963 TREE_TYPE (ce->value)))
4964 TREE_STATIC (ctor) = 0;
4966 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4967 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4969 break;
4971 default:
4972 /* So how did we get a CONSTRUCTOR for a scalar type? */
4973 gcc_unreachable ();
4976 if (ret == GS_ERROR)
4977 return GS_ERROR;
4978 /* If we have gimplified both sides of the initializer but have
4979 not emitted an assignment, do so now. */
4980 if (*expr_p)
4982 tree lhs = TREE_OPERAND (*expr_p, 0);
4983 tree rhs = TREE_OPERAND (*expr_p, 1);
4984 if (want_value && object == lhs)
4985 lhs = unshare_expr (lhs);
4986 gassign *init = gimple_build_assign (lhs, rhs);
4987 gimplify_seq_add_stmt (pre_p, init);
4989 if (want_value)
4991 *expr_p = object;
4992 return GS_OK;
4994 else
4996 *expr_p = NULL;
4997 return GS_ALL_DONE;
5001 /* Given a pointer value OP0, return a simplified version of an
5002 indirection through OP0, or NULL_TREE if no simplification is
5003 possible. This may only be applied to a rhs of an expression.
5004 Note that the resulting type may be different from the type pointed
5005 to in the sense that it is still compatible from the langhooks
5006 point of view. */
5008 static tree
5009 gimple_fold_indirect_ref_rhs (tree t)
5011 return gimple_fold_indirect_ref (t);
5014 /* Subroutine of gimplify_modify_expr to do simplifications of
5015 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5016 something changes. */
5018 static enum gimplify_status
5019 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5020 gimple_seq *pre_p, gimple_seq *post_p,
5021 bool want_value)
5023 enum gimplify_status ret = GS_UNHANDLED;
5024 bool changed;
5028 changed = false;
5029 switch (TREE_CODE (*from_p))
5031 case VAR_DECL:
5032 /* If we're assigning from a read-only variable initialized with
5033 a constructor, do the direct assignment from the constructor,
5034 but only if neither source nor target are volatile since this
5035 latter assignment might end up being done on a per-field basis. */
5036 if (DECL_INITIAL (*from_p)
5037 && TREE_READONLY (*from_p)
5038 && !TREE_THIS_VOLATILE (*from_p)
5039 && !TREE_THIS_VOLATILE (*to_p)
5040 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5042 tree old_from = *from_p;
5043 enum gimplify_status subret;
5045 /* Move the constructor into the RHS. */
5046 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5048 /* Let's see if gimplify_init_constructor will need to put
5049 it in memory. */
5050 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5051 false, true);
5052 if (subret == GS_ERROR)
5054 /* If so, revert the change. */
5055 *from_p = old_from;
5057 else
5059 ret = GS_OK;
5060 changed = true;
5063 break;
5064 case INDIRECT_REF:
5066 /* If we have code like
5068 *(const A*)(A*)&x
5070 where the type of "x" is a (possibly cv-qualified variant
5071 of "A"), treat the entire expression as identical to "x".
5072 This kind of code arises in C++ when an object is bound
5073 to a const reference, and if "x" is a TARGET_EXPR we want
5074 to take advantage of the optimization below. */
5075 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5076 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5077 if (t)
5079 if (TREE_THIS_VOLATILE (t) != volatile_p)
5081 if (DECL_P (t))
5082 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5083 build_fold_addr_expr (t));
5084 if (REFERENCE_CLASS_P (t))
5085 TREE_THIS_VOLATILE (t) = volatile_p;
5087 *from_p = t;
5088 ret = GS_OK;
5089 changed = true;
5091 break;
5094 case TARGET_EXPR:
5096 /* If we are initializing something from a TARGET_EXPR, strip the
5097 TARGET_EXPR and initialize it directly, if possible. This can't
5098 be done if the initializer is void, since that implies that the
5099 temporary is set in some non-trivial way.
5101 ??? What about code that pulls out the temp and uses it
5102 elsewhere? I think that such code never uses the TARGET_EXPR as
5103 an initializer. If I'm wrong, we'll die because the temp won't
5104 have any RTL. In that case, I guess we'll need to replace
5105 references somehow. */
5106 tree init = TARGET_EXPR_INITIAL (*from_p);
5108 if (init
5109 && !VOID_TYPE_P (TREE_TYPE (init)))
5111 *from_p = init;
5112 ret = GS_OK;
5113 changed = true;
5116 break;
5118 case COMPOUND_EXPR:
5119 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5120 caught. */
5121 gimplify_compound_expr (from_p, pre_p, true);
5122 ret = GS_OK;
5123 changed = true;
5124 break;
5126 case CONSTRUCTOR:
5127 /* If we already made some changes, let the front end have a
5128 crack at this before we break it down. */
5129 if (ret != GS_UNHANDLED)
5130 break;
5131 /* If we're initializing from a CONSTRUCTOR, break this into
5132 individual MODIFY_EXPRs. */
5133 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5134 false);
5136 case COND_EXPR:
5137 /* If we're assigning to a non-register type, push the assignment
5138 down into the branches. This is mandatory for ADDRESSABLE types,
5139 since we cannot generate temporaries for such, but it saves a
5140 copy in other cases as well. */
5141 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5143 /* This code should mirror the code in gimplify_cond_expr. */
5144 enum tree_code code = TREE_CODE (*expr_p);
5145 tree cond = *from_p;
5146 tree result = *to_p;
5148 ret = gimplify_expr (&result, pre_p, post_p,
5149 is_gimple_lvalue, fb_lvalue);
5150 if (ret != GS_ERROR)
5151 ret = GS_OK;
5153 /* If we are going to write RESULT more than once, clear
5154 TREE_READONLY flag, otherwise we might incorrectly promote
5155 the variable to static const and initialize it at compile
5156 time in one of the branches. */
5157 if (VAR_P (result)
5158 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5159 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5160 TREE_READONLY (result) = 0;
5161 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5162 TREE_OPERAND (cond, 1)
5163 = build2 (code, void_type_node, result,
5164 TREE_OPERAND (cond, 1));
5165 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5166 TREE_OPERAND (cond, 2)
5167 = build2 (code, void_type_node, unshare_expr (result),
5168 TREE_OPERAND (cond, 2));
5170 TREE_TYPE (cond) = void_type_node;
5171 recalculate_side_effects (cond);
5173 if (want_value)
5175 gimplify_and_add (cond, pre_p);
5176 *expr_p = unshare_expr (result);
5178 else
5179 *expr_p = cond;
5180 return ret;
5182 break;
5184 case CALL_EXPR:
5185 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5186 return slot so that we don't generate a temporary. */
5187 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5188 && aggregate_value_p (*from_p, *from_p))
5190 bool use_target;
5192 if (!(rhs_predicate_for (*to_p))(*from_p))
5193 /* If we need a temporary, *to_p isn't accurate. */
5194 use_target = false;
5195 /* It's OK to use the return slot directly unless it's an NRV. */
5196 else if (TREE_CODE (*to_p) == RESULT_DECL
5197 && DECL_NAME (*to_p) == NULL_TREE
5198 && needs_to_live_in_memory (*to_p))
5199 use_target = true;
5200 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5201 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5202 /* Don't force regs into memory. */
5203 use_target = false;
5204 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5205 /* It's OK to use the target directly if it's being
5206 initialized. */
5207 use_target = true;
5208 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5209 != INTEGER_CST)
5210 /* Always use the target and thus RSO for variable-sized types.
5211 GIMPLE cannot deal with a variable-sized assignment
5212 embedded in a call statement. */
5213 use_target = true;
5214 else if (TREE_CODE (*to_p) != SSA_NAME
5215 && (!is_gimple_variable (*to_p)
5216 || needs_to_live_in_memory (*to_p)))
5217 /* Don't use the original target if it's already addressable;
5218 if its address escapes, and the called function uses the
5219 NRV optimization, a conforming program could see *to_p
5220 change before the called function returns; see c++/19317.
5221 When optimizing, the return_slot pass marks more functions
5222 as safe after we have escape info. */
5223 use_target = false;
5224 else
5225 use_target = true;
5227 if (use_target)
5229 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5230 mark_addressable (*to_p);
5233 break;
5235 case WITH_SIZE_EXPR:
5236 /* Likewise for calls that return an aggregate of non-constant size,
5237 since we would not be able to generate a temporary at all. */
5238 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5240 *from_p = TREE_OPERAND (*from_p, 0);
5241 /* We don't change ret in this case because the
5242 WITH_SIZE_EXPR might have been added in
5243 gimplify_modify_expr, so returning GS_OK would lead to an
5244 infinite loop. */
5245 changed = true;
5247 break;
5249 /* If we're initializing from a container, push the initialization
5250 inside it. */
5251 case CLEANUP_POINT_EXPR:
5252 case BIND_EXPR:
5253 case STATEMENT_LIST:
5255 tree wrap = *from_p;
5256 tree t;
5258 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5259 fb_lvalue);
5260 if (ret != GS_ERROR)
5261 ret = GS_OK;
5263 t = voidify_wrapper_expr (wrap, *expr_p);
5264 gcc_assert (t == *expr_p);
5266 if (want_value)
5268 gimplify_and_add (wrap, pre_p);
5269 *expr_p = unshare_expr (*to_p);
5271 else
5272 *expr_p = wrap;
5273 return GS_OK;
5276 case COMPOUND_LITERAL_EXPR:
5278 tree complit = TREE_OPERAND (*expr_p, 1);
5279 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5280 tree decl = DECL_EXPR_DECL (decl_s);
5281 tree init = DECL_INITIAL (decl);
5283 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5284 into struct T x = { 0, 1, 2 } if the address of the
5285 compound literal has never been taken. */
5286 if (!TREE_ADDRESSABLE (complit)
5287 && !TREE_ADDRESSABLE (decl)
5288 && init)
5290 *expr_p = copy_node (*expr_p);
5291 TREE_OPERAND (*expr_p, 1) = init;
5292 return GS_OK;
5296 default:
5297 break;
5300 while (changed);
5302 return ret;
5306 /* Return true if T looks like a valid GIMPLE statement. */
5308 static bool
5309 is_gimple_stmt (tree t)
5311 const enum tree_code code = TREE_CODE (t);
5313 switch (code)
5315 case NOP_EXPR:
5316 /* The only valid NOP_EXPR is the empty statement. */
5317 return IS_EMPTY_STMT (t);
5319 case BIND_EXPR:
5320 case COND_EXPR:
5321 /* These are only valid if they're void. */
5322 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5324 case SWITCH_EXPR:
5325 case GOTO_EXPR:
5326 case RETURN_EXPR:
5327 case LABEL_EXPR:
5328 case CASE_LABEL_EXPR:
5329 case TRY_CATCH_EXPR:
5330 case TRY_FINALLY_EXPR:
5331 case EH_FILTER_EXPR:
5332 case CATCH_EXPR:
5333 case ASM_EXPR:
5334 case STATEMENT_LIST:
5335 case OACC_PARALLEL:
5336 case OACC_KERNELS:
5337 case OACC_DATA:
5338 case OACC_HOST_DATA:
5339 case OACC_DECLARE:
5340 case OACC_UPDATE:
5341 case OACC_ENTER_DATA:
5342 case OACC_EXIT_DATA:
5343 case OACC_CACHE:
5344 case OMP_PARALLEL:
5345 case OMP_FOR:
5346 case OMP_SIMD:
5347 case CILK_SIMD:
5348 case OMP_DISTRIBUTE:
5349 case OACC_LOOP:
5350 case OMP_SECTIONS:
5351 case OMP_SECTION:
5352 case OMP_SINGLE:
5353 case OMP_MASTER:
5354 case OMP_TASKGROUP:
5355 case OMP_ORDERED:
5356 case OMP_CRITICAL:
5357 case OMP_TASK:
5358 case OMP_TARGET:
5359 case OMP_TARGET_DATA:
5360 case OMP_TARGET_UPDATE:
5361 case OMP_TARGET_ENTER_DATA:
5362 case OMP_TARGET_EXIT_DATA:
5363 case OMP_TASKLOOP:
5364 case OMP_TEAMS:
5365 /* These are always void. */
5366 return true;
5368 case CALL_EXPR:
5369 case MODIFY_EXPR:
5370 case PREDICT_EXPR:
5371 /* These are valid regardless of their type. */
5372 return true;
5374 default:
5375 return false;
5380 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5381 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5382 DECL_GIMPLE_REG_P set.
5384 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5385 other, unmodified part of the complex object just before the total store.
5386 As a consequence, if the object is still uninitialized, an undefined value
5387 will be loaded into a register, which may result in a spurious exception
5388 if the register is floating-point and the value happens to be a signaling
5389 NaN for example. Then the fully-fledged complex operations lowering pass
5390 followed by a DCE pass are necessary in order to fix things up. */
5392 static enum gimplify_status
5393 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5394 bool want_value)
5396 enum tree_code code, ocode;
5397 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5399 lhs = TREE_OPERAND (*expr_p, 0);
5400 rhs = TREE_OPERAND (*expr_p, 1);
5401 code = TREE_CODE (lhs);
5402 lhs = TREE_OPERAND (lhs, 0);
5404 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5405 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5406 TREE_NO_WARNING (other) = 1;
5407 other = get_formal_tmp_var (other, pre_p);
5409 realpart = code == REALPART_EXPR ? rhs : other;
5410 imagpart = code == REALPART_EXPR ? other : rhs;
5412 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5413 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5414 else
5415 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5417 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5418 *expr_p = (want_value) ? rhs : NULL_TREE;
5420 return GS_ALL_DONE;
5423 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5425 modify_expr
5426 : varname '=' rhs
5427 | '*' ID '=' rhs
5429 PRE_P points to the list where side effects that must happen before
5430 *EXPR_P should be stored.
5432 POST_P points to the list where side effects that must happen after
5433 *EXPR_P should be stored.
5435 WANT_VALUE is nonzero iff we want to use the value of this expression
5436 in another expression. */
5438 static enum gimplify_status
5439 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5440 bool want_value)
5442 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5443 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5444 enum gimplify_status ret = GS_UNHANDLED;
5445 gimple *assign;
5446 location_t loc = EXPR_LOCATION (*expr_p);
5447 gimple_stmt_iterator gsi;
5449 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5450 || TREE_CODE (*expr_p) == INIT_EXPR);
5452 /* Trying to simplify a clobber using normal logic doesn't work,
5453 so handle it here. */
5454 if (TREE_CLOBBER_P (*from_p))
5456 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5457 if (ret == GS_ERROR)
5458 return ret;
5459 gcc_assert (!want_value
5460 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5461 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5462 *expr_p = NULL;
5463 return GS_ALL_DONE;
5466 /* Insert pointer conversions required by the middle-end that are not
5467 required by the frontend. This fixes middle-end type checking for
5468 for example gcc.dg/redecl-6.c. */
5469 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5471 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5472 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5473 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5476 /* See if any simplifications can be done based on what the RHS is. */
5477 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5478 want_value);
5479 if (ret != GS_UNHANDLED)
5480 return ret;
5482 /* For zero sized types only gimplify the left hand side and right hand
5483 side as statements and throw away the assignment. Do this after
5484 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5485 types properly. */
5486 if (zero_sized_type (TREE_TYPE (*from_p))
5487 && !want_value
5488 /* Don't do this for calls that return addressable types, expand_call
5489 relies on those having a lhs. */
5490 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5491 && TREE_CODE (*from_p) == CALL_EXPR))
5493 gimplify_stmt (from_p, pre_p);
5494 gimplify_stmt (to_p, pre_p);
5495 *expr_p = NULL_TREE;
5496 return GS_ALL_DONE;
5499 /* If the value being copied is of variable width, compute the length
5500 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5501 before gimplifying any of the operands so that we can resolve any
5502 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5503 the size of the expression to be copied, not of the destination, so
5504 that is what we must do here. */
5505 maybe_with_size_expr (from_p);
5507 /* As a special case, we have to temporarily allow for assignments
5508 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5509 a toplevel statement, when gimplifying the GENERIC expression
5510 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5511 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5513 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5514 prevent gimplify_expr from trying to create a new temporary for
5515 foo's LHS, we tell it that it should only gimplify until it
5516 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5517 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5518 and all we need to do here is set 'a' to be its LHS. */
5520 /* Gimplify the RHS first for C++17 and bug 71104. */
5521 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5522 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5523 if (ret == GS_ERROR)
5524 return ret;
5526 /* Then gimplify the LHS. */
5527 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5528 twice we have to make sure to gimplify into non-SSA as otherwise
5529 the abnormal edge added later will make those defs not dominate
5530 their uses.
5531 ??? Technically this applies only to the registers used in the
5532 resulting non-register *TO_P. */
5533 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5534 if (saved_into_ssa
5535 && TREE_CODE (*from_p) == CALL_EXPR
5536 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5537 gimplify_ctxp->into_ssa = false;
5538 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5539 gimplify_ctxp->into_ssa = saved_into_ssa;
5540 if (ret == GS_ERROR)
5541 return ret;
5543 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5544 guess for the predicate was wrong. */
5545 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5546 if (final_pred != initial_pred)
5548 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5549 if (ret == GS_ERROR)
5550 return ret;
5553 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5554 size as argument to the call. */
5555 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5557 tree call = TREE_OPERAND (*from_p, 0);
5558 tree vlasize = TREE_OPERAND (*from_p, 1);
5560 if (TREE_CODE (call) == CALL_EXPR
5561 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5563 int nargs = call_expr_nargs (call);
5564 tree type = TREE_TYPE (call);
5565 tree ap = CALL_EXPR_ARG (call, 0);
5566 tree tag = CALL_EXPR_ARG (call, 1);
5567 tree aptag = CALL_EXPR_ARG (call, 2);
5568 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5569 IFN_VA_ARG, type,
5570 nargs + 1, ap, tag,
5571 aptag, vlasize);
5572 TREE_OPERAND (*from_p, 0) = newcall;
5576 /* Now see if the above changed *from_p to something we handle specially. */
5577 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5578 want_value);
5579 if (ret != GS_UNHANDLED)
5580 return ret;
5582 /* If we've got a variable sized assignment between two lvalues (i.e. does
5583 not involve a call), then we can make things a bit more straightforward
5584 by converting the assignment to memcpy or memset. */
5585 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5587 tree from = TREE_OPERAND (*from_p, 0);
5588 tree size = TREE_OPERAND (*from_p, 1);
5590 if (TREE_CODE (from) == CONSTRUCTOR)
5591 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5593 if (is_gimple_addressable (from))
5595 *from_p = from;
5596 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5597 pre_p);
5601 /* Transform partial stores to non-addressable complex variables into
5602 total stores. This allows us to use real instead of virtual operands
5603 for these variables, which improves optimization. */
5604 if ((TREE_CODE (*to_p) == REALPART_EXPR
5605 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5606 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5607 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5609 /* Try to alleviate the effects of the gimplification creating artificial
5610 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5611 make sure not to create DECL_DEBUG_EXPR links across functions. */
5612 if (!gimplify_ctxp->into_ssa
5613 && VAR_P (*from_p)
5614 && DECL_IGNORED_P (*from_p)
5615 && DECL_P (*to_p)
5616 && !DECL_IGNORED_P (*to_p)
5617 && decl_function_context (*to_p) == current_function_decl
5618 && decl_function_context (*from_p) == current_function_decl)
5620 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5621 DECL_NAME (*from_p)
5622 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5623 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5624 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5627 if (want_value && TREE_THIS_VOLATILE (*to_p))
5628 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5630 if (TREE_CODE (*from_p) == CALL_EXPR)
5632 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5633 instead of a GIMPLE_ASSIGN. */
5634 gcall *call_stmt;
5635 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5637 /* Gimplify internal functions created in the FEs. */
5638 int nargs = call_expr_nargs (*from_p), i;
5639 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5640 auto_vec<tree> vargs (nargs);
5642 for (i = 0; i < nargs; i++)
5644 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5645 EXPR_LOCATION (*from_p));
5646 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5648 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5649 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5650 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5652 else
5654 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5655 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5656 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5657 tree fndecl = get_callee_fndecl (*from_p);
5658 if (fndecl
5659 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5660 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5661 && call_expr_nargs (*from_p) == 3)
5662 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5663 CALL_EXPR_ARG (*from_p, 0),
5664 CALL_EXPR_ARG (*from_p, 1),
5665 CALL_EXPR_ARG (*from_p, 2));
5666 else
5668 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5671 notice_special_calls (call_stmt);
5672 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5673 gimple_call_set_lhs (call_stmt, *to_p);
5674 else if (TREE_CODE (*to_p) == SSA_NAME)
5675 /* The above is somewhat premature, avoid ICEing later for a
5676 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5677 ??? This doesn't make it a default-def. */
5678 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5680 if (EXPR_CILK_SPAWN (*from_p))
5681 gimplify_cilk_detach (pre_p);
5682 assign = call_stmt;
5684 else
5686 assign = gimple_build_assign (*to_p, *from_p);
5687 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5688 if (COMPARISON_CLASS_P (*from_p))
5689 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5692 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5694 /* We should have got an SSA name from the start. */
5695 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5696 || ! gimple_in_ssa_p (cfun));
5699 gimplify_seq_add_stmt (pre_p, assign);
5700 gsi = gsi_last (*pre_p);
5701 maybe_fold_stmt (&gsi);
5703 if (want_value)
5705 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5706 return GS_OK;
5708 else
5709 *expr_p = NULL;
5711 return GS_ALL_DONE;
5714 /* Gimplify a comparison between two variable-sized objects. Do this
5715 with a call to BUILT_IN_MEMCMP. */
5717 static enum gimplify_status
5718 gimplify_variable_sized_compare (tree *expr_p)
5720 location_t loc = EXPR_LOCATION (*expr_p);
5721 tree op0 = TREE_OPERAND (*expr_p, 0);
5722 tree op1 = TREE_OPERAND (*expr_p, 1);
5723 tree t, arg, dest, src, expr;
5725 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5726 arg = unshare_expr (arg);
5727 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5728 src = build_fold_addr_expr_loc (loc, op1);
5729 dest = build_fold_addr_expr_loc (loc, op0);
5730 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5731 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5733 expr
5734 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5735 SET_EXPR_LOCATION (expr, loc);
5736 *expr_p = expr;
5738 return GS_OK;
5741 /* Gimplify a comparison between two aggregate objects of integral scalar
5742 mode as a comparison between the bitwise equivalent scalar values. */
5744 static enum gimplify_status
5745 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5747 location_t loc = EXPR_LOCATION (*expr_p);
5748 tree op0 = TREE_OPERAND (*expr_p, 0);
5749 tree op1 = TREE_OPERAND (*expr_p, 1);
5751 tree type = TREE_TYPE (op0);
5752 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5754 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5755 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5757 *expr_p
5758 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5760 return GS_OK;
5763 /* Gimplify an expression sequence. This function gimplifies each
5764 expression and rewrites the original expression with the last
5765 expression of the sequence in GIMPLE form.
5767 PRE_P points to the list where the side effects for all the
5768 expressions in the sequence will be emitted.
5770 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5772 static enum gimplify_status
5773 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5775 tree t = *expr_p;
5779 tree *sub_p = &TREE_OPERAND (t, 0);
5781 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5782 gimplify_compound_expr (sub_p, pre_p, false);
5783 else
5784 gimplify_stmt (sub_p, pre_p);
5786 t = TREE_OPERAND (t, 1);
5788 while (TREE_CODE (t) == COMPOUND_EXPR);
5790 *expr_p = t;
5791 if (want_value)
5792 return GS_OK;
5793 else
5795 gimplify_stmt (expr_p, pre_p);
5796 return GS_ALL_DONE;
5800 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5801 gimplify. After gimplification, EXPR_P will point to a new temporary
5802 that holds the original value of the SAVE_EXPR node.
5804 PRE_P points to the list where side effects that must happen before
5805 *EXPR_P should be stored. */
5807 static enum gimplify_status
5808 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5810 enum gimplify_status ret = GS_ALL_DONE;
5811 tree val;
5813 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5814 val = TREE_OPERAND (*expr_p, 0);
5816 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5817 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5819 /* The operand may be a void-valued expression. It is
5820 being executed only for its side-effects. */
5821 if (TREE_TYPE (val) == void_type_node)
5823 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5824 is_gimple_stmt, fb_none);
5825 val = NULL;
5827 else
5828 /* The temporary may not be an SSA name as later abnormal and EH
5829 control flow may invalidate use/def domination. */
5830 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5832 TREE_OPERAND (*expr_p, 0) = val;
5833 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5836 *expr_p = val;
5838 return ret;
5841 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5843 unary_expr
5844 : ...
5845 | '&' varname
5848 PRE_P points to the list where side effects that must happen before
5849 *EXPR_P should be stored.
5851 POST_P points to the list where side effects that must happen after
5852 *EXPR_P should be stored. */
5854 static enum gimplify_status
5855 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5857 tree expr = *expr_p;
5858 tree op0 = TREE_OPERAND (expr, 0);
5859 enum gimplify_status ret;
5860 location_t loc = EXPR_LOCATION (*expr_p);
5862 switch (TREE_CODE (op0))
5864 case INDIRECT_REF:
5865 do_indirect_ref:
5866 /* Check if we are dealing with an expression of the form '&*ptr'.
5867 While the front end folds away '&*ptr' into 'ptr', these
5868 expressions may be generated internally by the compiler (e.g.,
5869 builtins like __builtin_va_end). */
5870 /* Caution: the silent array decomposition semantics we allow for
5871 ADDR_EXPR means we can't always discard the pair. */
5872 /* Gimplification of the ADDR_EXPR operand may drop
5873 cv-qualification conversions, so make sure we add them if
5874 needed. */
5876 tree op00 = TREE_OPERAND (op0, 0);
5877 tree t_expr = TREE_TYPE (expr);
5878 tree t_op00 = TREE_TYPE (op00);
5880 if (!useless_type_conversion_p (t_expr, t_op00))
5881 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5882 *expr_p = op00;
5883 ret = GS_OK;
5885 break;
5887 case VIEW_CONVERT_EXPR:
5888 /* Take the address of our operand and then convert it to the type of
5889 this ADDR_EXPR.
5891 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5892 all clear. The impact of this transformation is even less clear. */
5894 /* If the operand is a useless conversion, look through it. Doing so
5895 guarantees that the ADDR_EXPR and its operand will remain of the
5896 same type. */
5897 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5898 op0 = TREE_OPERAND (op0, 0);
5900 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5901 build_fold_addr_expr_loc (loc,
5902 TREE_OPERAND (op0, 0)));
5903 ret = GS_OK;
5904 break;
5906 case MEM_REF:
5907 if (integer_zerop (TREE_OPERAND (op0, 1)))
5908 goto do_indirect_ref;
5910 /* fall through */
5912 default:
5913 /* If we see a call to a declared builtin or see its address
5914 being taken (we can unify those cases here) then we can mark
5915 the builtin for implicit generation by GCC. */
5916 if (TREE_CODE (op0) == FUNCTION_DECL
5917 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5918 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5919 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5921 /* We use fb_either here because the C frontend sometimes takes
5922 the address of a call that returns a struct; see
5923 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5924 the implied temporary explicit. */
5926 /* Make the operand addressable. */
5927 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5928 is_gimple_addressable, fb_either);
5929 if (ret == GS_ERROR)
5930 break;
5932 /* Then mark it. Beware that it may not be possible to do so directly
5933 if a temporary has been created by the gimplification. */
5934 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5936 op0 = TREE_OPERAND (expr, 0);
5938 /* For various reasons, the gimplification of the expression
5939 may have made a new INDIRECT_REF. */
5940 if (TREE_CODE (op0) == INDIRECT_REF)
5941 goto do_indirect_ref;
5943 mark_addressable (TREE_OPERAND (expr, 0));
5945 /* The FEs may end up building ADDR_EXPRs early on a decl with
5946 an incomplete type. Re-build ADDR_EXPRs in canonical form
5947 here. */
5948 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5949 *expr_p = build_fold_addr_expr (op0);
5951 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5952 recompute_tree_invariant_for_addr_expr (*expr_p);
5954 /* If we re-built the ADDR_EXPR add a conversion to the original type
5955 if required. */
5956 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5957 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5959 break;
5962 return ret;
5965 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5966 value; output operands should be a gimple lvalue. */
5968 static enum gimplify_status
5969 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5971 tree expr;
5972 int noutputs;
5973 const char **oconstraints;
5974 int i;
5975 tree link;
5976 const char *constraint;
5977 bool allows_mem, allows_reg, is_inout;
5978 enum gimplify_status ret, tret;
5979 gasm *stmt;
5980 vec<tree, va_gc> *inputs;
5981 vec<tree, va_gc> *outputs;
5982 vec<tree, va_gc> *clobbers;
5983 vec<tree, va_gc> *labels;
5984 tree link_next;
5986 expr = *expr_p;
5987 noutputs = list_length (ASM_OUTPUTS (expr));
5988 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5990 inputs = NULL;
5991 outputs = NULL;
5992 clobbers = NULL;
5993 labels = NULL;
5995 ret = GS_ALL_DONE;
5996 link_next = NULL_TREE;
5997 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5999 bool ok;
6000 size_t constraint_len;
6002 link_next = TREE_CHAIN (link);
6004 oconstraints[i]
6005 = constraint
6006 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6007 constraint_len = strlen (constraint);
6008 if (constraint_len == 0)
6009 continue;
6011 ok = parse_output_constraint (&constraint, i, 0, 0,
6012 &allows_mem, &allows_reg, &is_inout);
6013 if (!ok)
6015 ret = GS_ERROR;
6016 is_inout = false;
6019 if (!allows_reg && allows_mem)
6020 mark_addressable (TREE_VALUE (link));
6022 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6023 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6024 fb_lvalue | fb_mayfail);
6025 if (tret == GS_ERROR)
6027 error ("invalid lvalue in asm output %d", i);
6028 ret = tret;
6031 /* If the constraint does not allow memory make sure we gimplify
6032 it to a register if it is not already but its base is. This
6033 happens for complex and vector components. */
6034 if (!allows_mem)
6036 tree op = TREE_VALUE (link);
6037 if (! is_gimple_val (op)
6038 && is_gimple_reg_type (TREE_TYPE (op))
6039 && is_gimple_reg (get_base_address (op)))
6041 tree tem = create_tmp_reg (TREE_TYPE (op));
6042 tree ass;
6043 if (is_inout)
6045 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6046 tem, unshare_expr (op));
6047 gimplify_and_add (ass, pre_p);
6049 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6050 gimplify_and_add (ass, post_p);
6052 TREE_VALUE (link) = tem;
6053 tret = GS_OK;
6057 vec_safe_push (outputs, link);
6058 TREE_CHAIN (link) = NULL_TREE;
6060 if (is_inout)
6062 /* An input/output operand. To give the optimizers more
6063 flexibility, split it into separate input and output
6064 operands. */
6065 tree input;
6066 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6067 char buf[11];
6069 /* Turn the in/out constraint into an output constraint. */
6070 char *p = xstrdup (constraint);
6071 p[0] = '=';
6072 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6074 /* And add a matching input constraint. */
6075 if (allows_reg)
6077 sprintf (buf, "%u", i);
6079 /* If there are multiple alternatives in the constraint,
6080 handle each of them individually. Those that allow register
6081 will be replaced with operand number, the others will stay
6082 unchanged. */
6083 if (strchr (p, ',') != NULL)
6085 size_t len = 0, buflen = strlen (buf);
6086 char *beg, *end, *str, *dst;
6088 for (beg = p + 1;;)
6090 end = strchr (beg, ',');
6091 if (end == NULL)
6092 end = strchr (beg, '\0');
6093 if ((size_t) (end - beg) < buflen)
6094 len += buflen + 1;
6095 else
6096 len += end - beg + 1;
6097 if (*end)
6098 beg = end + 1;
6099 else
6100 break;
6103 str = (char *) alloca (len);
6104 for (beg = p + 1, dst = str;;)
6106 const char *tem;
6107 bool mem_p, reg_p, inout_p;
6109 end = strchr (beg, ',');
6110 if (end)
6111 *end = '\0';
6112 beg[-1] = '=';
6113 tem = beg - 1;
6114 parse_output_constraint (&tem, i, 0, 0,
6115 &mem_p, &reg_p, &inout_p);
6116 if (dst != str)
6117 *dst++ = ',';
6118 if (reg_p)
6120 memcpy (dst, buf, buflen);
6121 dst += buflen;
6123 else
6125 if (end)
6126 len = end - beg;
6127 else
6128 len = strlen (beg);
6129 memcpy (dst, beg, len);
6130 dst += len;
6132 if (end)
6133 beg = end + 1;
6134 else
6135 break;
6137 *dst = '\0';
6138 input = build_string (dst - str, str);
6140 else
6141 input = build_string (strlen (buf), buf);
6143 else
6144 input = build_string (constraint_len - 1, constraint + 1);
6146 free (p);
6148 input = build_tree_list (build_tree_list (NULL_TREE, input),
6149 unshare_expr (TREE_VALUE (link)));
6150 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6154 link_next = NULL_TREE;
6155 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6157 link_next = TREE_CHAIN (link);
6158 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6159 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6160 oconstraints, &allows_mem, &allows_reg);
6162 /* If we can't make copies, we can only accept memory. */
6163 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6165 if (allows_mem)
6166 allows_reg = 0;
6167 else
6169 error ("impossible constraint in %<asm%>");
6170 error ("non-memory input %d must stay in memory", i);
6171 return GS_ERROR;
6175 /* If the operand is a memory input, it should be an lvalue. */
6176 if (!allows_reg && allows_mem)
6178 tree inputv = TREE_VALUE (link);
6179 STRIP_NOPS (inputv);
6180 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6181 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6182 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6183 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6184 || TREE_CODE (inputv) == MODIFY_EXPR)
6185 TREE_VALUE (link) = error_mark_node;
6186 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6187 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6188 if (tret != GS_ERROR)
6190 /* Unlike output operands, memory inputs are not guaranteed
6191 to be lvalues by the FE, and while the expressions are
6192 marked addressable there, if it is e.g. a statement
6193 expression, temporaries in it might not end up being
6194 addressable. They might be already used in the IL and thus
6195 it is too late to make them addressable now though. */
6196 tree x = TREE_VALUE (link);
6197 while (handled_component_p (x))
6198 x = TREE_OPERAND (x, 0);
6199 if (TREE_CODE (x) == MEM_REF
6200 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6201 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6202 if ((VAR_P (x)
6203 || TREE_CODE (x) == PARM_DECL
6204 || TREE_CODE (x) == RESULT_DECL)
6205 && !TREE_ADDRESSABLE (x)
6206 && is_gimple_reg (x))
6208 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6209 input_location), 0,
6210 "memory input %d is not directly addressable",
6212 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6215 mark_addressable (TREE_VALUE (link));
6216 if (tret == GS_ERROR)
6218 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6219 "memory input %d is not directly addressable", i);
6220 ret = tret;
6223 else
6225 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6226 is_gimple_asm_val, fb_rvalue);
6227 if (tret == GS_ERROR)
6228 ret = tret;
6231 TREE_CHAIN (link) = NULL_TREE;
6232 vec_safe_push (inputs, link);
6235 link_next = NULL_TREE;
6236 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6238 link_next = TREE_CHAIN (link);
6239 TREE_CHAIN (link) = NULL_TREE;
6240 vec_safe_push (clobbers, link);
6243 link_next = NULL_TREE;
6244 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6246 link_next = TREE_CHAIN (link);
6247 TREE_CHAIN (link) = NULL_TREE;
6248 vec_safe_push (labels, link);
6251 /* Do not add ASMs with errors to the gimple IL stream. */
6252 if (ret != GS_ERROR)
6254 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6255 inputs, outputs, clobbers, labels);
6257 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6258 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6260 gimplify_seq_add_stmt (pre_p, stmt);
6263 return ret;
6266 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6267 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6268 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6269 return to this function.
6271 FIXME should we complexify the prequeue handling instead? Or use flags
6272 for all the cleanups and let the optimizer tighten them up? The current
6273 code seems pretty fragile; it will break on a cleanup within any
6274 non-conditional nesting. But any such nesting would be broken, anyway;
6275 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6276 and continues out of it. We can do that at the RTL level, though, so
6277 having an optimizer to tighten up try/finally regions would be a Good
6278 Thing. */
6280 static enum gimplify_status
6281 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6283 gimple_stmt_iterator iter;
6284 gimple_seq body_sequence = NULL;
6286 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6288 /* We only care about the number of conditions between the innermost
6289 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6290 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6291 int old_conds = gimplify_ctxp->conditions;
6292 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6293 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6294 gimplify_ctxp->conditions = 0;
6295 gimplify_ctxp->conditional_cleanups = NULL;
6296 gimplify_ctxp->in_cleanup_point_expr = true;
6298 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6300 gimplify_ctxp->conditions = old_conds;
6301 gimplify_ctxp->conditional_cleanups = old_cleanups;
6302 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6304 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6306 gimple *wce = gsi_stmt (iter);
6308 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6310 if (gsi_one_before_end_p (iter))
6312 /* Note that gsi_insert_seq_before and gsi_remove do not
6313 scan operands, unlike some other sequence mutators. */
6314 if (!gimple_wce_cleanup_eh_only (wce))
6315 gsi_insert_seq_before_without_update (&iter,
6316 gimple_wce_cleanup (wce),
6317 GSI_SAME_STMT);
6318 gsi_remove (&iter, true);
6319 break;
6321 else
6323 gtry *gtry;
6324 gimple_seq seq;
6325 enum gimple_try_flags kind;
6327 if (gimple_wce_cleanup_eh_only (wce))
6328 kind = GIMPLE_TRY_CATCH;
6329 else
6330 kind = GIMPLE_TRY_FINALLY;
6331 seq = gsi_split_seq_after (iter);
6333 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6334 /* Do not use gsi_replace here, as it may scan operands.
6335 We want to do a simple structural modification only. */
6336 gsi_set_stmt (&iter, gtry);
6337 iter = gsi_start (gtry->eval);
6340 else
6341 gsi_next (&iter);
6344 gimplify_seq_add_seq (pre_p, body_sequence);
6345 if (temp)
6347 *expr_p = temp;
6348 return GS_OK;
6350 else
6352 *expr_p = NULL;
6353 return GS_ALL_DONE;
6357 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6358 is the cleanup action required. EH_ONLY is true if the cleanup should
6359 only be executed if an exception is thrown, not on normal exit.
6360 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6361 only valid for clobbers. */
6363 static void
6364 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6365 bool force_uncond = false)
6367 gimple *wce;
6368 gimple_seq cleanup_stmts = NULL;
6370 /* Errors can result in improperly nested cleanups. Which results in
6371 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6372 if (seen_error ())
6373 return;
6375 if (gimple_conditional_context ())
6377 /* If we're in a conditional context, this is more complex. We only
6378 want to run the cleanup if we actually ran the initialization that
6379 necessitates it, but we want to run it after the end of the
6380 conditional context. So we wrap the try/finally around the
6381 condition and use a flag to determine whether or not to actually
6382 run the destructor. Thus
6384 test ? f(A()) : 0
6386 becomes (approximately)
6388 flag = 0;
6389 try {
6390 if (test) { A::A(temp); flag = 1; val = f(temp); }
6391 else { val = 0; }
6392 } finally {
6393 if (flag) A::~A(temp);
6397 if (force_uncond)
6399 gimplify_stmt (&cleanup, &cleanup_stmts);
6400 wce = gimple_build_wce (cleanup_stmts);
6401 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6403 else
6405 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6406 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6407 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6409 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6410 gimplify_stmt (&cleanup, &cleanup_stmts);
6411 wce = gimple_build_wce (cleanup_stmts);
6413 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6414 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6415 gimplify_seq_add_stmt (pre_p, ftrue);
6417 /* Because of this manipulation, and the EH edges that jump
6418 threading cannot redirect, the temporary (VAR) will appear
6419 to be used uninitialized. Don't warn. */
6420 TREE_NO_WARNING (var) = 1;
6423 else
6425 gimplify_stmt (&cleanup, &cleanup_stmts);
6426 wce = gimple_build_wce (cleanup_stmts);
6427 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6428 gimplify_seq_add_stmt (pre_p, wce);
6432 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6434 static enum gimplify_status
6435 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6437 tree targ = *expr_p;
6438 tree temp = TARGET_EXPR_SLOT (targ);
6439 tree init = TARGET_EXPR_INITIAL (targ);
6440 enum gimplify_status ret;
6442 bool unpoison_empty_seq = false;
6443 gimple_stmt_iterator unpoison_it;
6445 if (init)
6447 tree cleanup = NULL_TREE;
6449 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6450 to the temps list. Handle also variable length TARGET_EXPRs. */
6451 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6453 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6454 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6455 gimplify_vla_decl (temp, pre_p);
6457 else
6459 /* Save location where we need to place unpoisoning. It's possible
6460 that a variable will be converted to needs_to_live_in_memory. */
6461 unpoison_it = gsi_last (*pre_p);
6462 unpoison_empty_seq = gsi_end_p (unpoison_it);
6464 gimple_add_tmp_var (temp);
6467 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6468 expression is supposed to initialize the slot. */
6469 if (VOID_TYPE_P (TREE_TYPE (init)))
6470 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6471 else
6473 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6474 init = init_expr;
6475 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6476 init = NULL;
6477 ggc_free (init_expr);
6479 if (ret == GS_ERROR)
6481 /* PR c++/28266 Make sure this is expanded only once. */
6482 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6483 return GS_ERROR;
6485 if (init)
6486 gimplify_and_add (init, pre_p);
6488 /* If needed, push the cleanup for the temp. */
6489 if (TARGET_EXPR_CLEANUP (targ))
6491 if (CLEANUP_EH_ONLY (targ))
6492 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6493 CLEANUP_EH_ONLY (targ), pre_p);
6494 else
6495 cleanup = TARGET_EXPR_CLEANUP (targ);
6498 /* Add a clobber for the temporary going out of scope, like
6499 gimplify_bind_expr. */
6500 if (gimplify_ctxp->in_cleanup_point_expr
6501 && needs_to_live_in_memory (temp))
6503 if (flag_stack_reuse == SR_ALL)
6505 tree clobber = build_constructor (TREE_TYPE (temp),
6506 NULL);
6507 TREE_THIS_VOLATILE (clobber) = true;
6508 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6509 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6511 if (asan_poisoned_variables
6512 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6513 && dbg_cnt (asan_use_after_scope))
6515 tree asan_cleanup = build_asan_poison_call_expr (temp);
6516 if (asan_cleanup)
6518 if (unpoison_empty_seq)
6519 unpoison_it = gsi_start (*pre_p);
6521 asan_poison_variable (temp, false, &unpoison_it,
6522 unpoison_empty_seq);
6523 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6527 if (cleanup)
6528 gimple_push_cleanup (temp, cleanup, false, pre_p);
6530 /* Only expand this once. */
6531 TREE_OPERAND (targ, 3) = init;
6532 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6534 else
6535 /* We should have expanded this before. */
6536 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6538 *expr_p = temp;
6539 return GS_OK;
6542 /* Gimplification of expression trees. */
6544 /* Gimplify an expression which appears at statement context. The
6545 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6546 NULL, a new sequence is allocated.
6548 Return true if we actually added a statement to the queue. */
6550 bool
6551 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6553 gimple_seq_node last;
6555 last = gimple_seq_last (*seq_p);
6556 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6557 return last != gimple_seq_last (*seq_p);
6560 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6561 to CTX. If entries already exist, force them to be some flavor of private.
6562 If there is no enclosing parallel, do nothing. */
6564 void
6565 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6567 splay_tree_node n;
6569 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6570 return;
6574 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6575 if (n != NULL)
6577 if (n->value & GOVD_SHARED)
6578 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6579 else if (n->value & GOVD_MAP)
6580 n->value |= GOVD_MAP_TO_ONLY;
6581 else
6582 return;
6584 else if ((ctx->region_type & ORT_TARGET) != 0)
6586 if (ctx->target_map_scalars_firstprivate)
6587 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6588 else
6589 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6591 else if (ctx->region_type != ORT_WORKSHARE
6592 && ctx->region_type != ORT_SIMD
6593 && ctx->region_type != ORT_ACC
6594 && !(ctx->region_type & ORT_TARGET_DATA))
6595 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6597 ctx = ctx->outer_context;
6599 while (ctx);
6602 /* Similarly for each of the type sizes of TYPE. */
6604 static void
6605 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6607 if (type == NULL || type == error_mark_node)
6608 return;
6609 type = TYPE_MAIN_VARIANT (type);
6611 if (ctx->privatized_types->add (type))
6612 return;
6614 switch (TREE_CODE (type))
6616 case INTEGER_TYPE:
6617 case ENUMERAL_TYPE:
6618 case BOOLEAN_TYPE:
6619 case REAL_TYPE:
6620 case FIXED_POINT_TYPE:
6621 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6622 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6623 break;
6625 case ARRAY_TYPE:
6626 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6627 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6628 break;
6630 case RECORD_TYPE:
6631 case UNION_TYPE:
6632 case QUAL_UNION_TYPE:
6634 tree field;
6635 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6636 if (TREE_CODE (field) == FIELD_DECL)
6638 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6639 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6642 break;
6644 case POINTER_TYPE:
6645 case REFERENCE_TYPE:
6646 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6647 break;
6649 default:
6650 break;
6653 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6654 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6655 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6658 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6660 static void
6661 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6663 splay_tree_node n;
6664 unsigned int nflags;
6665 tree t;
6667 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6668 return;
6670 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6671 there are constructors involved somewhere. Exception is a shared clause,
6672 there is nothing privatized in that case. */
6673 if ((flags & GOVD_SHARED) == 0
6674 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6675 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6676 flags |= GOVD_SEEN;
6678 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6679 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6681 /* We shouldn't be re-adding the decl with the same data
6682 sharing class. */
6683 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6684 nflags = n->value | flags;
6685 /* The only combination of data sharing classes we should see is
6686 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6687 reduction variables to be used in data sharing clauses. */
6688 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6689 || ((nflags & GOVD_DATA_SHARE_CLASS)
6690 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6691 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6692 n->value = nflags;
6693 return;
6696 /* When adding a variable-sized variable, we have to handle all sorts
6697 of additional bits of data: the pointer replacement variable, and
6698 the parameters of the type. */
6699 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6701 /* Add the pointer replacement variable as PRIVATE if the variable
6702 replacement is private, else FIRSTPRIVATE since we'll need the
6703 address of the original variable either for SHARED, or for the
6704 copy into or out of the context. */
6705 if (!(flags & GOVD_LOCAL))
6707 if (flags & GOVD_MAP)
6708 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6709 else if (flags & GOVD_PRIVATE)
6710 nflags = GOVD_PRIVATE;
6711 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6712 && (flags & GOVD_FIRSTPRIVATE))
6713 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6714 else
6715 nflags = GOVD_FIRSTPRIVATE;
6716 nflags |= flags & GOVD_SEEN;
6717 t = DECL_VALUE_EXPR (decl);
6718 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6719 t = TREE_OPERAND (t, 0);
6720 gcc_assert (DECL_P (t));
6721 omp_add_variable (ctx, t, nflags);
6724 /* Add all of the variable and type parameters (which should have
6725 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6726 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6727 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6728 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6730 /* The variable-sized variable itself is never SHARED, only some form
6731 of PRIVATE. The sharing would take place via the pointer variable
6732 which we remapped above. */
6733 if (flags & GOVD_SHARED)
6734 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6735 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6737 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6738 alloca statement we generate for the variable, so make sure it
6739 is available. This isn't automatically needed for the SHARED
6740 case, since we won't be allocating local storage then.
6741 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6742 in this case omp_notice_variable will be called later
6743 on when it is gimplified. */
6744 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6745 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6746 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6748 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6749 && lang_hooks.decls.omp_privatize_by_reference (decl))
6751 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6753 /* Similar to the direct variable sized case above, we'll need the
6754 size of references being privatized. */
6755 if ((flags & GOVD_SHARED) == 0)
6757 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6758 if (DECL_P (t))
6759 omp_notice_variable (ctx, t, true);
6763 if (n != NULL)
6764 n->value |= flags;
6765 else
6766 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6768 /* For reductions clauses in OpenACC loop directives, by default create a
6769 copy clause on the enclosing parallel construct for carrying back the
6770 results. */
6771 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6773 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6774 while (outer_ctx)
6776 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6777 if (n != NULL)
6779 /* Ignore local variables and explicitly declared clauses. */
6780 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6781 break;
6782 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6784 /* According to the OpenACC spec, such a reduction variable
6785 should already have a copy map on a kernels construct,
6786 verify that here. */
6787 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6788 && (n->value & GOVD_MAP));
6790 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6792 /* Remove firstprivate and make it a copy map. */
6793 n->value &= ~GOVD_FIRSTPRIVATE;
6794 n->value |= GOVD_MAP;
6797 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6799 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6800 GOVD_MAP | GOVD_SEEN);
6801 break;
6803 outer_ctx = outer_ctx->outer_context;
6808 /* Notice a threadprivate variable DECL used in OMP context CTX.
6809 This just prints out diagnostics about threadprivate variable uses
6810 in untied tasks. If DECL2 is non-NULL, prevent this warning
6811 on that variable. */
6813 static bool
6814 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6815 tree decl2)
6817 splay_tree_node n;
6818 struct gimplify_omp_ctx *octx;
6820 for (octx = ctx; octx; octx = octx->outer_context)
6821 if ((octx->region_type & ORT_TARGET) != 0)
6823 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6824 if (n == NULL)
6826 error ("threadprivate variable %qE used in target region",
6827 DECL_NAME (decl));
6828 error_at (octx->location, "enclosing target region");
6829 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6831 if (decl2)
6832 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6835 if (ctx->region_type != ORT_UNTIED_TASK)
6836 return false;
6837 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6838 if (n == NULL)
6840 error ("threadprivate variable %qE used in untied task",
6841 DECL_NAME (decl));
6842 error_at (ctx->location, "enclosing task");
6843 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6845 if (decl2)
6846 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6847 return false;
6850 /* Return true if global var DECL is device resident. */
6852 static bool
6853 device_resident_p (tree decl)
6855 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6857 if (!attr)
6858 return false;
6860 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6862 tree c = TREE_VALUE (t);
6863 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6864 return true;
6867 return false;
6870 /* Return true if DECL has an ACC DECLARE attribute. */
6872 static bool
6873 is_oacc_declared (tree decl)
6875 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6876 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6877 return declared != NULL_TREE;
6880 /* Determine outer default flags for DECL mentioned in an OMP region
6881 but not declared in an enclosing clause.
6883 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6884 remapped firstprivate instead of shared. To some extent this is
6885 addressed in omp_firstprivatize_type_sizes, but not
6886 effectively. */
6888 static unsigned
6889 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6890 bool in_code, unsigned flags)
6892 enum omp_clause_default_kind default_kind = ctx->default_kind;
6893 enum omp_clause_default_kind kind;
6895 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6896 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6897 default_kind = kind;
6899 switch (default_kind)
6901 case OMP_CLAUSE_DEFAULT_NONE:
6903 const char *rtype;
6905 if (ctx->region_type & ORT_PARALLEL)
6906 rtype = "parallel";
6907 else if (ctx->region_type & ORT_TASK)
6908 rtype = "task";
6909 else if (ctx->region_type & ORT_TEAMS)
6910 rtype = "teams";
6911 else
6912 gcc_unreachable ();
6914 error ("%qE not specified in enclosing %qs",
6915 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6916 error_at (ctx->location, "enclosing %qs", rtype);
6918 /* FALLTHRU */
6919 case OMP_CLAUSE_DEFAULT_SHARED:
6920 flags |= GOVD_SHARED;
6921 break;
6922 case OMP_CLAUSE_DEFAULT_PRIVATE:
6923 flags |= GOVD_PRIVATE;
6924 break;
6925 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6926 flags |= GOVD_FIRSTPRIVATE;
6927 break;
6928 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6929 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6930 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6931 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6933 omp_notice_variable (octx, decl, in_code);
6934 for (; octx; octx = octx->outer_context)
6936 splay_tree_node n2;
6938 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6939 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6940 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6941 continue;
6942 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6944 flags |= GOVD_FIRSTPRIVATE;
6945 goto found_outer;
6947 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6949 flags |= GOVD_SHARED;
6950 goto found_outer;
6955 if (TREE_CODE (decl) == PARM_DECL
6956 || (!is_global_var (decl)
6957 && DECL_CONTEXT (decl) == current_function_decl))
6958 flags |= GOVD_FIRSTPRIVATE;
6959 else
6960 flags |= GOVD_SHARED;
6961 found_outer:
6962 break;
6964 default:
6965 gcc_unreachable ();
6968 return flags;
6972 /* Determine outer default flags for DECL mentioned in an OACC region
6973 but not declared in an enclosing clause. */
6975 static unsigned
6976 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6978 const char *rkind;
6979 bool on_device = false;
6980 bool declared = is_oacc_declared (decl);
6981 tree type = TREE_TYPE (decl);
6983 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6984 type = TREE_TYPE (type);
6986 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6987 && is_global_var (decl)
6988 && device_resident_p (decl))
6990 on_device = true;
6991 flags |= GOVD_MAP_TO_ONLY;
6994 switch (ctx->region_type)
6996 case ORT_ACC_KERNELS:
6997 rkind = "kernels";
6999 if (AGGREGATE_TYPE_P (type))
7001 /* Aggregates default to 'present_or_copy', or 'present'. */
7002 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7003 flags |= GOVD_MAP;
7004 else
7005 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7007 else
7008 /* Scalars default to 'copy'. */
7009 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7011 break;
7013 case ORT_ACC_PARALLEL:
7014 rkind = "parallel";
7016 if (on_device || declared)
7017 flags |= GOVD_MAP;
7018 else if (AGGREGATE_TYPE_P (type))
7020 /* Aggregates default to 'present_or_copy', or 'present'. */
7021 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7022 flags |= GOVD_MAP;
7023 else
7024 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7026 else
7027 /* Scalars default to 'firstprivate'. */
7028 flags |= GOVD_FIRSTPRIVATE;
7030 break;
7032 default:
7033 gcc_unreachable ();
7036 if (DECL_ARTIFICIAL (decl))
7037 ; /* We can get compiler-generated decls, and should not complain
7038 about them. */
7039 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7041 error ("%qE not specified in enclosing OpenACC %qs construct",
7042 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7043 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7045 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7046 ; /* Handled above. */
7047 else
7048 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7050 return flags;
7053 /* Record the fact that DECL was used within the OMP context CTX.
7054 IN_CODE is true when real code uses DECL, and false when we should
7055 merely emit default(none) errors. Return true if DECL is going to
7056 be remapped and thus DECL shouldn't be gimplified into its
7057 DECL_VALUE_EXPR (if any). */
7059 static bool
7060 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7062 splay_tree_node n;
7063 unsigned flags = in_code ? GOVD_SEEN : 0;
7064 bool ret = false, shared;
7066 if (error_operand_p (decl))
7067 return false;
7069 if (ctx->region_type == ORT_NONE)
7070 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7072 if (is_global_var (decl))
7074 /* Threadprivate variables are predetermined. */
7075 if (DECL_THREAD_LOCAL_P (decl))
7076 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7078 if (DECL_HAS_VALUE_EXPR_P (decl))
7080 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7082 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7083 return omp_notice_threadprivate_variable (ctx, decl, value);
7086 if (gimplify_omp_ctxp->outer_context == NULL
7087 && VAR_P (decl)
7088 && oacc_get_fn_attrib (current_function_decl))
7090 location_t loc = DECL_SOURCE_LOCATION (decl);
7092 if (lookup_attribute ("omp declare target link",
7093 DECL_ATTRIBUTES (decl)))
7095 error_at (loc,
7096 "%qE with %<link%> clause used in %<routine%> function",
7097 DECL_NAME (decl));
7098 return false;
7100 else if (!lookup_attribute ("omp declare target",
7101 DECL_ATTRIBUTES (decl)))
7103 error_at (loc,
7104 "%qE requires a %<declare%> directive for use "
7105 "in a %<routine%> function", DECL_NAME (decl));
7106 return false;
7111 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7112 if ((ctx->region_type & ORT_TARGET) != 0)
7114 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7115 if (n == NULL)
7117 unsigned nflags = flags;
7118 if (ctx->target_map_pointers_as_0len_arrays
7119 || ctx->target_map_scalars_firstprivate)
7121 bool is_declare_target = false;
7122 bool is_scalar = false;
7123 if (is_global_var (decl)
7124 && varpool_node::get_create (decl)->offloadable)
7126 struct gimplify_omp_ctx *octx;
7127 for (octx = ctx->outer_context;
7128 octx; octx = octx->outer_context)
7130 n = splay_tree_lookup (octx->variables,
7131 (splay_tree_key)decl);
7132 if (n
7133 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7134 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7135 break;
7137 is_declare_target = octx == NULL;
7139 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7140 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7141 if (is_declare_target)
7143 else if (ctx->target_map_pointers_as_0len_arrays
7144 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7145 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7146 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7147 == POINTER_TYPE)))
7148 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7149 else if (is_scalar)
7150 nflags |= GOVD_FIRSTPRIVATE;
7153 struct gimplify_omp_ctx *octx = ctx->outer_context;
7154 if ((ctx->region_type & ORT_ACC) && octx)
7156 /* Look in outer OpenACC contexts, to see if there's a
7157 data attribute for this variable. */
7158 omp_notice_variable (octx, decl, in_code);
7160 for (; octx; octx = octx->outer_context)
7162 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7163 break;
7164 splay_tree_node n2
7165 = splay_tree_lookup (octx->variables,
7166 (splay_tree_key) decl);
7167 if (n2)
7169 if (octx->region_type == ORT_ACC_HOST_DATA)
7170 error ("variable %qE declared in enclosing "
7171 "%<host_data%> region", DECL_NAME (decl));
7172 nflags |= GOVD_MAP;
7173 if (octx->region_type == ORT_ACC_DATA
7174 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7175 nflags |= GOVD_MAP_0LEN_ARRAY;
7176 goto found_outer;
7182 tree type = TREE_TYPE (decl);
7184 if (nflags == flags
7185 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7186 && lang_hooks.decls.omp_privatize_by_reference (decl))
7187 type = TREE_TYPE (type);
7188 if (nflags == flags
7189 && !lang_hooks.types.omp_mappable_type (type))
7191 error ("%qD referenced in target region does not have "
7192 "a mappable type", decl);
7193 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7195 else if (nflags == flags)
7197 if ((ctx->region_type & ORT_ACC) != 0)
7198 nflags = oacc_default_clause (ctx, decl, flags);
7199 else
7200 nflags |= GOVD_MAP;
7203 found_outer:
7204 omp_add_variable (ctx, decl, nflags);
7206 else
7208 /* If nothing changed, there's nothing left to do. */
7209 if ((n->value & flags) == flags)
7210 return ret;
7211 flags |= n->value;
7212 n->value = flags;
7214 goto do_outer;
7217 if (n == NULL)
7219 if (ctx->region_type == ORT_WORKSHARE
7220 || ctx->region_type == ORT_SIMD
7221 || ctx->region_type == ORT_ACC
7222 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7223 goto do_outer;
7225 flags = omp_default_clause (ctx, decl, in_code, flags);
7227 if ((flags & GOVD_PRIVATE)
7228 && lang_hooks.decls.omp_private_outer_ref (decl))
7229 flags |= GOVD_PRIVATE_OUTER_REF;
7231 omp_add_variable (ctx, decl, flags);
7233 shared = (flags & GOVD_SHARED) != 0;
7234 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7235 goto do_outer;
7238 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7239 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7240 && DECL_SIZE (decl))
7242 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7244 splay_tree_node n2;
7245 tree t = DECL_VALUE_EXPR (decl);
7246 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7247 t = TREE_OPERAND (t, 0);
7248 gcc_assert (DECL_P (t));
7249 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7250 n2->value |= GOVD_SEEN;
7252 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7253 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7254 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7255 != INTEGER_CST))
7257 splay_tree_node n2;
7258 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7259 gcc_assert (DECL_P (t));
7260 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7261 if (n2)
7262 omp_notice_variable (ctx, t, true);
7266 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7267 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7269 /* If nothing changed, there's nothing left to do. */
7270 if ((n->value & flags) == flags)
7271 return ret;
7272 flags |= n->value;
7273 n->value = flags;
7275 do_outer:
7276 /* If the variable is private in the current context, then we don't
7277 need to propagate anything to an outer context. */
7278 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7279 return ret;
7280 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7281 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7282 return ret;
7283 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7284 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7285 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7286 return ret;
7287 if (ctx->outer_context
7288 && omp_notice_variable (ctx->outer_context, decl, in_code))
7289 return true;
7290 return ret;
7293 /* Verify that DECL is private within CTX. If there's specific information
7294 to the contrary in the innermost scope, generate an error. */
7296 static bool
7297 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7299 splay_tree_node n;
7301 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7302 if (n != NULL)
7304 if (n->value & GOVD_SHARED)
7306 if (ctx == gimplify_omp_ctxp)
7308 if (simd)
7309 error ("iteration variable %qE is predetermined linear",
7310 DECL_NAME (decl));
7311 else
7312 error ("iteration variable %qE should be private",
7313 DECL_NAME (decl));
7314 n->value = GOVD_PRIVATE;
7315 return true;
7317 else
7318 return false;
7320 else if ((n->value & GOVD_EXPLICIT) != 0
7321 && (ctx == gimplify_omp_ctxp
7322 || (ctx->region_type == ORT_COMBINED_PARALLEL
7323 && gimplify_omp_ctxp->outer_context == ctx)))
7325 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7326 error ("iteration variable %qE should not be firstprivate",
7327 DECL_NAME (decl));
7328 else if ((n->value & GOVD_REDUCTION) != 0)
7329 error ("iteration variable %qE should not be reduction",
7330 DECL_NAME (decl));
7331 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7332 error ("iteration variable %qE should not be linear",
7333 DECL_NAME (decl));
7334 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7335 error ("iteration variable %qE should not be lastprivate",
7336 DECL_NAME (decl));
7337 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7338 error ("iteration variable %qE should not be private",
7339 DECL_NAME (decl));
7340 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7341 error ("iteration variable %qE is predetermined linear",
7342 DECL_NAME (decl));
7344 return (ctx == gimplify_omp_ctxp
7345 || (ctx->region_type == ORT_COMBINED_PARALLEL
7346 && gimplify_omp_ctxp->outer_context == ctx));
7349 if (ctx->region_type != ORT_WORKSHARE
7350 && ctx->region_type != ORT_SIMD
7351 && ctx->region_type != ORT_ACC)
7352 return false;
7353 else if (ctx->outer_context)
7354 return omp_is_private (ctx->outer_context, decl, simd);
7355 return false;
7358 /* Return true if DECL is private within a parallel region
7359 that binds to the current construct's context or in parallel
7360 region's REDUCTION clause. */
7362 static bool
7363 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7365 splay_tree_node n;
7369 ctx = ctx->outer_context;
7370 if (ctx == NULL)
7372 if (is_global_var (decl))
7373 return false;
7375 /* References might be private, but might be shared too,
7376 when checking for copyprivate, assume they might be
7377 private, otherwise assume they might be shared. */
7378 if (copyprivate)
7379 return true;
7381 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7382 return false;
7384 /* Treat C++ privatized non-static data members outside
7385 of the privatization the same. */
7386 if (omp_member_access_dummy_var (decl))
7387 return false;
7389 return true;
7392 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7394 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7395 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7396 continue;
7398 if (n != NULL)
7400 if ((n->value & GOVD_LOCAL) != 0
7401 && omp_member_access_dummy_var (decl))
7402 return false;
7403 return (n->value & GOVD_SHARED) == 0;
7406 while (ctx->region_type == ORT_WORKSHARE
7407 || ctx->region_type == ORT_SIMD
7408 || ctx->region_type == ORT_ACC);
7409 return false;
7412 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7414 static tree
7415 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7417 tree t = *tp;
7419 /* If this node has been visited, unmark it and keep looking. */
7420 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7421 return t;
7423 if (IS_TYPE_OR_DECL_P (t))
7424 *walk_subtrees = 0;
7425 return NULL_TREE;
7428 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7429 and previous omp contexts. */
7431 static void
7432 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7433 enum omp_region_type region_type,
7434 enum tree_code code)
7436 struct gimplify_omp_ctx *ctx, *outer_ctx;
7437 tree c;
7438 hash_map<tree, tree> *struct_map_to_clause = NULL;
7439 tree *prev_list_p = NULL;
7441 ctx = new_omp_context (region_type);
7442 outer_ctx = ctx->outer_context;
7443 if (code == OMP_TARGET)
7445 if (!lang_GNU_Fortran ())
7446 ctx->target_map_pointers_as_0len_arrays = true;
7447 ctx->target_map_scalars_firstprivate = true;
7449 if (!lang_GNU_Fortran ())
7450 switch (code)
7452 case OMP_TARGET:
7453 case OMP_TARGET_DATA:
7454 case OMP_TARGET_ENTER_DATA:
7455 case OMP_TARGET_EXIT_DATA:
7456 case OACC_DECLARE:
7457 case OACC_HOST_DATA:
7458 ctx->target_firstprivatize_array_bases = true;
7459 default:
7460 break;
7463 while ((c = *list_p) != NULL)
7465 bool remove = false;
7466 bool notice_outer = true;
7467 const char *check_non_private = NULL;
7468 unsigned int flags;
7469 tree decl;
7471 switch (OMP_CLAUSE_CODE (c))
7473 case OMP_CLAUSE_PRIVATE:
7474 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7475 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7477 flags |= GOVD_PRIVATE_OUTER_REF;
7478 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7480 else
7481 notice_outer = false;
7482 goto do_add;
7483 case OMP_CLAUSE_SHARED:
7484 flags = GOVD_SHARED | GOVD_EXPLICIT;
7485 goto do_add;
7486 case OMP_CLAUSE_FIRSTPRIVATE:
7487 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7488 check_non_private = "firstprivate";
7489 goto do_add;
7490 case OMP_CLAUSE_LASTPRIVATE:
7491 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7492 check_non_private = "lastprivate";
7493 decl = OMP_CLAUSE_DECL (c);
7494 if (error_operand_p (decl))
7495 goto do_add;
7496 else if (outer_ctx
7497 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7498 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7499 && splay_tree_lookup (outer_ctx->variables,
7500 (splay_tree_key) decl) == NULL)
7502 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7503 if (outer_ctx->outer_context)
7504 omp_notice_variable (outer_ctx->outer_context, decl, true);
7506 else if (outer_ctx
7507 && (outer_ctx->region_type & ORT_TASK) != 0
7508 && outer_ctx->combined_loop
7509 && splay_tree_lookup (outer_ctx->variables,
7510 (splay_tree_key) decl) == NULL)
7512 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7513 if (outer_ctx->outer_context)
7514 omp_notice_variable (outer_ctx->outer_context, decl, true);
7516 else if (outer_ctx
7517 && (outer_ctx->region_type == ORT_WORKSHARE
7518 || outer_ctx->region_type == ORT_ACC)
7519 && outer_ctx->combined_loop
7520 && splay_tree_lookup (outer_ctx->variables,
7521 (splay_tree_key) decl) == NULL
7522 && !omp_check_private (outer_ctx, decl, false))
7524 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7525 if (outer_ctx->outer_context
7526 && (outer_ctx->outer_context->region_type
7527 == ORT_COMBINED_PARALLEL)
7528 && splay_tree_lookup (outer_ctx->outer_context->variables,
7529 (splay_tree_key) decl) == NULL)
7531 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7532 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7533 if (octx->outer_context)
7535 octx = octx->outer_context;
7536 if (octx->region_type == ORT_WORKSHARE
7537 && octx->combined_loop
7538 && splay_tree_lookup (octx->variables,
7539 (splay_tree_key) decl) == NULL
7540 && !omp_check_private (octx, decl, false))
7542 omp_add_variable (octx, decl,
7543 GOVD_LASTPRIVATE | GOVD_SEEN);
7544 octx = octx->outer_context;
7545 if (octx
7546 && octx->region_type == ORT_COMBINED_TEAMS
7547 && (splay_tree_lookup (octx->variables,
7548 (splay_tree_key) decl)
7549 == NULL))
7551 omp_add_variable (octx, decl,
7552 GOVD_SHARED | GOVD_SEEN);
7553 octx = octx->outer_context;
7556 if (octx)
7557 omp_notice_variable (octx, decl, true);
7560 else if (outer_ctx->outer_context)
7561 omp_notice_variable (outer_ctx->outer_context, decl, true);
7563 goto do_add;
7564 case OMP_CLAUSE_REDUCTION:
7565 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7566 /* OpenACC permits reductions on private variables. */
7567 if (!(region_type & ORT_ACC))
7568 check_non_private = "reduction";
7569 decl = OMP_CLAUSE_DECL (c);
7570 if (TREE_CODE (decl) == MEM_REF)
7572 tree type = TREE_TYPE (decl);
7573 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7574 NULL, is_gimple_val, fb_rvalue, false)
7575 == GS_ERROR)
7577 remove = true;
7578 break;
7580 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7581 if (DECL_P (v))
7583 omp_firstprivatize_variable (ctx, v);
7584 omp_notice_variable (ctx, v, true);
7586 decl = TREE_OPERAND (decl, 0);
7587 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7589 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7590 NULL, is_gimple_val, fb_rvalue, false)
7591 == GS_ERROR)
7593 remove = true;
7594 break;
7596 v = TREE_OPERAND (decl, 1);
7597 if (DECL_P (v))
7599 omp_firstprivatize_variable (ctx, v);
7600 omp_notice_variable (ctx, v, true);
7602 decl = TREE_OPERAND (decl, 0);
7604 if (TREE_CODE (decl) == ADDR_EXPR
7605 || TREE_CODE (decl) == INDIRECT_REF)
7606 decl = TREE_OPERAND (decl, 0);
7608 goto do_add_decl;
7609 case OMP_CLAUSE_LINEAR:
7610 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7611 is_gimple_val, fb_rvalue) == GS_ERROR)
7613 remove = true;
7614 break;
7616 else
7618 if (code == OMP_SIMD
7619 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7621 struct gimplify_omp_ctx *octx = outer_ctx;
7622 if (octx
7623 && octx->region_type == ORT_WORKSHARE
7624 && octx->combined_loop
7625 && !octx->distribute)
7627 if (octx->outer_context
7628 && (octx->outer_context->region_type
7629 == ORT_COMBINED_PARALLEL))
7630 octx = octx->outer_context->outer_context;
7631 else
7632 octx = octx->outer_context;
7634 if (octx
7635 && octx->region_type == ORT_WORKSHARE
7636 && octx->combined_loop
7637 && octx->distribute)
7639 error_at (OMP_CLAUSE_LOCATION (c),
7640 "%<linear%> clause for variable other than "
7641 "loop iterator specified on construct "
7642 "combined with %<distribute%>");
7643 remove = true;
7644 break;
7647 /* For combined #pragma omp parallel for simd, need to put
7648 lastprivate and perhaps firstprivate too on the
7649 parallel. Similarly for #pragma omp for simd. */
7650 struct gimplify_omp_ctx *octx = outer_ctx;
7651 decl = NULL_TREE;
7654 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7655 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7656 break;
7657 decl = OMP_CLAUSE_DECL (c);
7658 if (error_operand_p (decl))
7660 decl = NULL_TREE;
7661 break;
7663 flags = GOVD_SEEN;
7664 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7665 flags |= GOVD_FIRSTPRIVATE;
7666 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7667 flags |= GOVD_LASTPRIVATE;
7668 if (octx
7669 && octx->region_type == ORT_WORKSHARE
7670 && octx->combined_loop)
7672 if (octx->outer_context
7673 && (octx->outer_context->region_type
7674 == ORT_COMBINED_PARALLEL))
7675 octx = octx->outer_context;
7676 else if (omp_check_private (octx, decl, false))
7677 break;
7679 else if (octx
7680 && (octx->region_type & ORT_TASK) != 0
7681 && octx->combined_loop)
7683 else if (octx
7684 && octx->region_type == ORT_COMBINED_PARALLEL
7685 && ctx->region_type == ORT_WORKSHARE
7686 && octx == outer_ctx)
7687 flags = GOVD_SEEN | GOVD_SHARED;
7688 else if (octx
7689 && octx->region_type == ORT_COMBINED_TEAMS)
7690 flags = GOVD_SEEN | GOVD_SHARED;
7691 else if (octx
7692 && octx->region_type == ORT_COMBINED_TARGET)
7694 flags &= ~GOVD_LASTPRIVATE;
7695 if (flags == GOVD_SEEN)
7696 break;
7698 else
7699 break;
7700 splay_tree_node on
7701 = splay_tree_lookup (octx->variables,
7702 (splay_tree_key) decl);
7703 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7705 octx = NULL;
7706 break;
7708 omp_add_variable (octx, decl, flags);
7709 if (octx->outer_context == NULL)
7710 break;
7711 octx = octx->outer_context;
7713 while (1);
7714 if (octx
7715 && decl
7716 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7717 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7718 omp_notice_variable (octx, decl, true);
7720 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7721 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7722 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7724 notice_outer = false;
7725 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7727 goto do_add;
7729 case OMP_CLAUSE_MAP:
7730 decl = OMP_CLAUSE_DECL (c);
7731 if (error_operand_p (decl))
7732 remove = true;
7733 switch (code)
7735 case OMP_TARGET:
7736 break;
7737 case OACC_DATA:
7738 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7739 break;
7740 /* FALLTHRU */
7741 case OMP_TARGET_DATA:
7742 case OMP_TARGET_ENTER_DATA:
7743 case OMP_TARGET_EXIT_DATA:
7744 case OACC_ENTER_DATA:
7745 case OACC_EXIT_DATA:
7746 case OACC_HOST_DATA:
7747 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7748 || (OMP_CLAUSE_MAP_KIND (c)
7749 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7750 /* For target {,enter ,exit }data only the array slice is
7751 mapped, but not the pointer to it. */
7752 remove = true;
7753 break;
7754 default:
7755 break;
7757 if (remove)
7758 break;
7759 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7761 struct gimplify_omp_ctx *octx;
7762 for (octx = outer_ctx; octx; octx = octx->outer_context)
7764 if (octx->region_type != ORT_ACC_HOST_DATA)
7765 break;
7766 splay_tree_node n2
7767 = splay_tree_lookup (octx->variables,
7768 (splay_tree_key) decl);
7769 if (n2)
7770 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7771 "declared in enclosing %<host_data%> region",
7772 DECL_NAME (decl));
7775 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7776 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7777 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7778 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7779 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7781 remove = true;
7782 break;
7784 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7785 || (OMP_CLAUSE_MAP_KIND (c)
7786 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7787 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7789 OMP_CLAUSE_SIZE (c)
7790 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7791 false);
7792 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7793 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7795 if (!DECL_P (decl))
7797 tree d = decl, *pd;
7798 if (TREE_CODE (d) == ARRAY_REF)
7800 while (TREE_CODE (d) == ARRAY_REF)
7801 d = TREE_OPERAND (d, 0);
7802 if (TREE_CODE (d) == COMPONENT_REF
7803 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7804 decl = d;
7806 pd = &OMP_CLAUSE_DECL (c);
7807 if (d == decl
7808 && TREE_CODE (decl) == INDIRECT_REF
7809 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7810 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7811 == REFERENCE_TYPE))
7813 pd = &TREE_OPERAND (decl, 0);
7814 decl = TREE_OPERAND (decl, 0);
7816 if (TREE_CODE (decl) == COMPONENT_REF)
7818 while (TREE_CODE (decl) == COMPONENT_REF)
7819 decl = TREE_OPERAND (decl, 0);
7820 if (TREE_CODE (decl) == INDIRECT_REF
7821 && DECL_P (TREE_OPERAND (decl, 0))
7822 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7823 == REFERENCE_TYPE))
7824 decl = TREE_OPERAND (decl, 0);
7826 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7827 == GS_ERROR)
7829 remove = true;
7830 break;
7832 if (DECL_P (decl))
7834 if (error_operand_p (decl))
7836 remove = true;
7837 break;
7840 tree stype = TREE_TYPE (decl);
7841 if (TREE_CODE (stype) == REFERENCE_TYPE)
7842 stype = TREE_TYPE (stype);
7843 if (TYPE_SIZE_UNIT (stype) == NULL
7844 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7846 error_at (OMP_CLAUSE_LOCATION (c),
7847 "mapping field %qE of variable length "
7848 "structure", OMP_CLAUSE_DECL (c));
7849 remove = true;
7850 break;
7853 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7855 /* Error recovery. */
7856 if (prev_list_p == NULL)
7858 remove = true;
7859 break;
7861 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7863 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7864 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7866 remove = true;
7867 break;
7872 tree offset;
7873 HOST_WIDE_INT bitsize, bitpos;
7874 machine_mode mode;
7875 int unsignedp, reversep, volatilep = 0;
7876 tree base = OMP_CLAUSE_DECL (c);
7877 while (TREE_CODE (base) == ARRAY_REF)
7878 base = TREE_OPERAND (base, 0);
7879 if (TREE_CODE (base) == INDIRECT_REF)
7880 base = TREE_OPERAND (base, 0);
7881 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7882 &mode, &unsignedp, &reversep,
7883 &volatilep);
7884 tree orig_base = base;
7885 if ((TREE_CODE (base) == INDIRECT_REF
7886 || (TREE_CODE (base) == MEM_REF
7887 && integer_zerop (TREE_OPERAND (base, 1))))
7888 && DECL_P (TREE_OPERAND (base, 0))
7889 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7890 == REFERENCE_TYPE))
7891 base = TREE_OPERAND (base, 0);
7892 gcc_assert (base == decl
7893 && (offset == NULL_TREE
7894 || TREE_CODE (offset) == INTEGER_CST));
7896 splay_tree_node n
7897 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7898 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7899 == GOMP_MAP_ALWAYS_POINTER);
7900 if (n == NULL || (n->value & GOVD_MAP) == 0)
7902 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7903 OMP_CLAUSE_MAP);
7904 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7905 if (orig_base != base)
7906 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7907 else
7908 OMP_CLAUSE_DECL (l) = decl;
7909 OMP_CLAUSE_SIZE (l) = size_int (1);
7910 if (struct_map_to_clause == NULL)
7911 struct_map_to_clause = new hash_map<tree, tree>;
7912 struct_map_to_clause->put (decl, l);
7913 if (ptr)
7915 enum gomp_map_kind mkind
7916 = code == OMP_TARGET_EXIT_DATA
7917 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7918 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7919 OMP_CLAUSE_MAP);
7920 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7921 OMP_CLAUSE_DECL (c2)
7922 = unshare_expr (OMP_CLAUSE_DECL (c));
7923 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7924 OMP_CLAUSE_SIZE (c2)
7925 = TYPE_SIZE_UNIT (ptr_type_node);
7926 OMP_CLAUSE_CHAIN (l) = c2;
7927 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7929 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7930 tree c3
7931 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7932 OMP_CLAUSE_MAP);
7933 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7934 OMP_CLAUSE_DECL (c3)
7935 = unshare_expr (OMP_CLAUSE_DECL (c4));
7936 OMP_CLAUSE_SIZE (c3)
7937 = TYPE_SIZE_UNIT (ptr_type_node);
7938 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7939 OMP_CLAUSE_CHAIN (c2) = c3;
7941 *prev_list_p = l;
7942 prev_list_p = NULL;
7944 else
7946 OMP_CLAUSE_CHAIN (l) = c;
7947 *list_p = l;
7948 list_p = &OMP_CLAUSE_CHAIN (l);
7950 if (orig_base != base && code == OMP_TARGET)
7952 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7953 OMP_CLAUSE_MAP);
7954 enum gomp_map_kind mkind
7955 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7956 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7957 OMP_CLAUSE_DECL (c2) = decl;
7958 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7959 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7960 OMP_CLAUSE_CHAIN (l) = c2;
7962 flags = GOVD_MAP | GOVD_EXPLICIT;
7963 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7964 flags |= GOVD_SEEN;
7965 goto do_add_decl;
7967 else
7969 tree *osc = struct_map_to_clause->get (decl);
7970 tree *sc = NULL, *scp = NULL;
7971 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7972 n->value |= GOVD_SEEN;
7973 offset_int o1, o2;
7974 if (offset)
7975 o1 = wi::to_offset (offset);
7976 else
7977 o1 = 0;
7978 if (bitpos)
7979 o1 = o1 + bitpos / BITS_PER_UNIT;
7980 sc = &OMP_CLAUSE_CHAIN (*osc);
7981 if (*sc != c
7982 && (OMP_CLAUSE_MAP_KIND (*sc)
7983 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7984 sc = &OMP_CLAUSE_CHAIN (*sc);
7985 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7986 if (ptr && sc == prev_list_p)
7987 break;
7988 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7989 != COMPONENT_REF
7990 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7991 != INDIRECT_REF)
7992 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7993 != ARRAY_REF))
7994 break;
7995 else
7997 tree offset2;
7998 HOST_WIDE_INT bitsize2, bitpos2;
7999 base = OMP_CLAUSE_DECL (*sc);
8000 if (TREE_CODE (base) == ARRAY_REF)
8002 while (TREE_CODE (base) == ARRAY_REF)
8003 base = TREE_OPERAND (base, 0);
8004 if (TREE_CODE (base) != COMPONENT_REF
8005 || (TREE_CODE (TREE_TYPE (base))
8006 != ARRAY_TYPE))
8007 break;
8009 else if (TREE_CODE (base) == INDIRECT_REF
8010 && (TREE_CODE (TREE_OPERAND (base, 0))
8011 == COMPONENT_REF)
8012 && (TREE_CODE (TREE_TYPE
8013 (TREE_OPERAND (base, 0)))
8014 == REFERENCE_TYPE))
8015 base = TREE_OPERAND (base, 0);
8016 base = get_inner_reference (base, &bitsize2,
8017 &bitpos2, &offset2,
8018 &mode, &unsignedp,
8019 &reversep, &volatilep);
8020 if ((TREE_CODE (base) == INDIRECT_REF
8021 || (TREE_CODE (base) == MEM_REF
8022 && integer_zerop (TREE_OPERAND (base,
8023 1))))
8024 && DECL_P (TREE_OPERAND (base, 0))
8025 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8026 0)))
8027 == REFERENCE_TYPE))
8028 base = TREE_OPERAND (base, 0);
8029 if (base != decl)
8030 break;
8031 if (scp)
8032 continue;
8033 gcc_assert (offset == NULL_TREE
8034 || TREE_CODE (offset) == INTEGER_CST);
8035 tree d1 = OMP_CLAUSE_DECL (*sc);
8036 tree d2 = OMP_CLAUSE_DECL (c);
8037 while (TREE_CODE (d1) == ARRAY_REF)
8038 d1 = TREE_OPERAND (d1, 0);
8039 while (TREE_CODE (d2) == ARRAY_REF)
8040 d2 = TREE_OPERAND (d2, 0);
8041 if (TREE_CODE (d1) == INDIRECT_REF)
8042 d1 = TREE_OPERAND (d1, 0);
8043 if (TREE_CODE (d2) == INDIRECT_REF)
8044 d2 = TREE_OPERAND (d2, 0);
8045 while (TREE_CODE (d1) == COMPONENT_REF)
8046 if (TREE_CODE (d2) == COMPONENT_REF
8047 && TREE_OPERAND (d1, 1)
8048 == TREE_OPERAND (d2, 1))
8050 d1 = TREE_OPERAND (d1, 0);
8051 d2 = TREE_OPERAND (d2, 0);
8053 else
8054 break;
8055 if (d1 == d2)
8057 error_at (OMP_CLAUSE_LOCATION (c),
8058 "%qE appears more than once in map "
8059 "clauses", OMP_CLAUSE_DECL (c));
8060 remove = true;
8061 break;
8063 if (offset2)
8064 o2 = wi::to_offset (offset2);
8065 else
8066 o2 = 0;
8067 if (bitpos2)
8068 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8069 if (wi::ltu_p (o1, o2)
8070 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8072 if (ptr)
8073 scp = sc;
8074 else
8075 break;
8078 if (remove)
8079 break;
8080 OMP_CLAUSE_SIZE (*osc)
8081 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8082 size_one_node);
8083 if (ptr)
8085 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8086 OMP_CLAUSE_MAP);
8087 tree cl = NULL_TREE;
8088 enum gomp_map_kind mkind
8089 = code == OMP_TARGET_EXIT_DATA
8090 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8091 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8092 OMP_CLAUSE_DECL (c2)
8093 = unshare_expr (OMP_CLAUSE_DECL (c));
8094 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8095 OMP_CLAUSE_SIZE (c2)
8096 = TYPE_SIZE_UNIT (ptr_type_node);
8097 cl = scp ? *prev_list_p : c2;
8098 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8100 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8101 tree c3
8102 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8103 OMP_CLAUSE_MAP);
8104 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8105 OMP_CLAUSE_DECL (c3)
8106 = unshare_expr (OMP_CLAUSE_DECL (c4));
8107 OMP_CLAUSE_SIZE (c3)
8108 = TYPE_SIZE_UNIT (ptr_type_node);
8109 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8110 if (!scp)
8111 OMP_CLAUSE_CHAIN (c2) = c3;
8112 else
8113 cl = c3;
8115 if (scp)
8116 *scp = c2;
8117 if (sc == prev_list_p)
8119 *sc = cl;
8120 prev_list_p = NULL;
8122 else
8124 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8125 list_p = prev_list_p;
8126 prev_list_p = NULL;
8127 OMP_CLAUSE_CHAIN (c) = *sc;
8128 *sc = cl;
8129 continue;
8132 else if (*sc != c)
8134 *list_p = OMP_CLAUSE_CHAIN (c);
8135 OMP_CLAUSE_CHAIN (c) = *sc;
8136 *sc = c;
8137 continue;
8141 if (!remove
8142 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8143 && OMP_CLAUSE_CHAIN (c)
8144 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8145 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8146 == GOMP_MAP_ALWAYS_POINTER))
8147 prev_list_p = list_p;
8148 break;
8150 flags = GOVD_MAP | GOVD_EXPLICIT;
8151 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8152 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8153 flags |= GOVD_MAP_ALWAYS_TO;
8154 goto do_add;
8156 case OMP_CLAUSE_DEPEND:
8157 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8159 tree deps = OMP_CLAUSE_DECL (c);
8160 while (deps && TREE_CODE (deps) == TREE_LIST)
8162 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8163 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8164 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8165 pre_p, NULL, is_gimple_val, fb_rvalue);
8166 deps = TREE_CHAIN (deps);
8168 break;
8170 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8171 break;
8172 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8174 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8175 NULL, is_gimple_val, fb_rvalue);
8176 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8178 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8180 remove = true;
8181 break;
8183 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8184 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8185 is_gimple_val, fb_rvalue) == GS_ERROR)
8187 remove = true;
8188 break;
8190 break;
8192 case OMP_CLAUSE_TO:
8193 case OMP_CLAUSE_FROM:
8194 case OMP_CLAUSE__CACHE_:
8195 decl = OMP_CLAUSE_DECL (c);
8196 if (error_operand_p (decl))
8198 remove = true;
8199 break;
8201 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8202 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8203 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8204 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8205 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8207 remove = true;
8208 break;
8210 if (!DECL_P (decl))
8212 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8213 NULL, is_gimple_lvalue, fb_lvalue)
8214 == GS_ERROR)
8216 remove = true;
8217 break;
8219 break;
8221 goto do_notice;
8223 case OMP_CLAUSE_USE_DEVICE_PTR:
8224 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8225 goto do_add;
8226 case OMP_CLAUSE_IS_DEVICE_PTR:
8227 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8228 goto do_add;
8230 do_add:
8231 decl = OMP_CLAUSE_DECL (c);
8232 do_add_decl:
8233 if (error_operand_p (decl))
8235 remove = true;
8236 break;
8238 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8240 tree t = omp_member_access_dummy_var (decl);
8241 if (t)
8243 tree v = DECL_VALUE_EXPR (decl);
8244 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8245 if (outer_ctx)
8246 omp_notice_variable (outer_ctx, t, true);
8249 if (code == OACC_DATA
8250 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8251 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8252 flags |= GOVD_MAP_0LEN_ARRAY;
8253 omp_add_variable (ctx, decl, flags);
8254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8255 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8257 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8258 GOVD_LOCAL | GOVD_SEEN);
8259 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8260 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8261 find_decl_expr,
8262 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8263 NULL) == NULL_TREE)
8264 omp_add_variable (ctx,
8265 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8266 GOVD_LOCAL | GOVD_SEEN);
8267 gimplify_omp_ctxp = ctx;
8268 push_gimplify_context ();
8270 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8271 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8273 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8274 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8275 pop_gimplify_context
8276 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8277 push_gimplify_context ();
8278 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8279 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8280 pop_gimplify_context
8281 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8282 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8283 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8285 gimplify_omp_ctxp = outer_ctx;
8287 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8288 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8290 gimplify_omp_ctxp = ctx;
8291 push_gimplify_context ();
8292 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8294 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8295 NULL, NULL);
8296 TREE_SIDE_EFFECTS (bind) = 1;
8297 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8298 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8300 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8301 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8302 pop_gimplify_context
8303 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8304 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8306 gimplify_omp_ctxp = outer_ctx;
8308 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8309 && OMP_CLAUSE_LINEAR_STMT (c))
8311 gimplify_omp_ctxp = ctx;
8312 push_gimplify_context ();
8313 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8315 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8316 NULL, NULL);
8317 TREE_SIDE_EFFECTS (bind) = 1;
8318 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8319 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8321 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8322 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8323 pop_gimplify_context
8324 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8325 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8327 gimplify_omp_ctxp = outer_ctx;
8329 if (notice_outer)
8330 goto do_notice;
8331 break;
8333 case OMP_CLAUSE_COPYIN:
8334 case OMP_CLAUSE_COPYPRIVATE:
8335 decl = OMP_CLAUSE_DECL (c);
8336 if (error_operand_p (decl))
8338 remove = true;
8339 break;
8341 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8342 && !remove
8343 && !omp_check_private (ctx, decl, true))
8345 remove = true;
8346 if (is_global_var (decl))
8348 if (DECL_THREAD_LOCAL_P (decl))
8349 remove = false;
8350 else if (DECL_HAS_VALUE_EXPR_P (decl))
8352 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8354 if (value
8355 && DECL_P (value)
8356 && DECL_THREAD_LOCAL_P (value))
8357 remove = false;
8360 if (remove)
8361 error_at (OMP_CLAUSE_LOCATION (c),
8362 "copyprivate variable %qE is not threadprivate"
8363 " or private in outer context", DECL_NAME (decl));
8365 do_notice:
8366 if (outer_ctx)
8367 omp_notice_variable (outer_ctx, decl, true);
8368 if (check_non_private
8369 && region_type == ORT_WORKSHARE
8370 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8371 || decl == OMP_CLAUSE_DECL (c)
8372 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8373 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8374 == ADDR_EXPR
8375 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8376 == POINTER_PLUS_EXPR
8377 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8378 (OMP_CLAUSE_DECL (c), 0), 0))
8379 == ADDR_EXPR)))))
8380 && omp_check_private (ctx, decl, false))
8382 error ("%s variable %qE is private in outer context",
8383 check_non_private, DECL_NAME (decl));
8384 remove = true;
8386 break;
8388 case OMP_CLAUSE_IF:
8389 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8390 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8392 const char *p[2];
8393 for (int i = 0; i < 2; i++)
8394 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8396 case OMP_PARALLEL: p[i] = "parallel"; break;
8397 case OMP_TASK: p[i] = "task"; break;
8398 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8399 case OMP_TARGET_DATA: p[i] = "target data"; break;
8400 case OMP_TARGET: p[i] = "target"; break;
8401 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8402 case OMP_TARGET_ENTER_DATA:
8403 p[i] = "target enter data"; break;
8404 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8405 default: gcc_unreachable ();
8407 error_at (OMP_CLAUSE_LOCATION (c),
8408 "expected %qs %<if%> clause modifier rather than %qs",
8409 p[0], p[1]);
8410 remove = true;
8412 /* Fall through. */
8414 case OMP_CLAUSE_FINAL:
8415 OMP_CLAUSE_OPERAND (c, 0)
8416 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8417 /* Fall through. */
8419 case OMP_CLAUSE_SCHEDULE:
8420 case OMP_CLAUSE_NUM_THREADS:
8421 case OMP_CLAUSE_NUM_TEAMS:
8422 case OMP_CLAUSE_THREAD_LIMIT:
8423 case OMP_CLAUSE_DIST_SCHEDULE:
8424 case OMP_CLAUSE_DEVICE:
8425 case OMP_CLAUSE_PRIORITY:
8426 case OMP_CLAUSE_GRAINSIZE:
8427 case OMP_CLAUSE_NUM_TASKS:
8428 case OMP_CLAUSE_HINT:
8429 case OMP_CLAUSE__CILK_FOR_COUNT_:
8430 case OMP_CLAUSE_ASYNC:
8431 case OMP_CLAUSE_WAIT:
8432 case OMP_CLAUSE_NUM_GANGS:
8433 case OMP_CLAUSE_NUM_WORKERS:
8434 case OMP_CLAUSE_VECTOR_LENGTH:
8435 case OMP_CLAUSE_WORKER:
8436 case OMP_CLAUSE_VECTOR:
8437 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8438 is_gimple_val, fb_rvalue) == GS_ERROR)
8439 remove = true;
8440 break;
8442 case OMP_CLAUSE_GANG:
8443 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8444 is_gimple_val, fb_rvalue) == GS_ERROR)
8445 remove = true;
8446 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8447 is_gimple_val, fb_rvalue) == GS_ERROR)
8448 remove = true;
8449 break;
8451 case OMP_CLAUSE_NOWAIT:
8452 case OMP_CLAUSE_ORDERED:
8453 case OMP_CLAUSE_UNTIED:
8454 case OMP_CLAUSE_COLLAPSE:
8455 case OMP_CLAUSE_TILE:
8456 case OMP_CLAUSE_AUTO:
8457 case OMP_CLAUSE_SEQ:
8458 case OMP_CLAUSE_INDEPENDENT:
8459 case OMP_CLAUSE_MERGEABLE:
8460 case OMP_CLAUSE_PROC_BIND:
8461 case OMP_CLAUSE_SAFELEN:
8462 case OMP_CLAUSE_SIMDLEN:
8463 case OMP_CLAUSE_NOGROUP:
8464 case OMP_CLAUSE_THREADS:
8465 case OMP_CLAUSE_SIMD:
8466 break;
8468 case OMP_CLAUSE_DEFAULTMAP:
8469 ctx->target_map_scalars_firstprivate = false;
8470 break;
8472 case OMP_CLAUSE_ALIGNED:
8473 decl = OMP_CLAUSE_DECL (c);
8474 if (error_operand_p (decl))
8476 remove = true;
8477 break;
8479 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8480 is_gimple_val, fb_rvalue) == GS_ERROR)
8482 remove = true;
8483 break;
8485 if (!is_global_var (decl)
8486 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8487 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8488 break;
8490 case OMP_CLAUSE_DEFAULT:
8491 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8492 break;
8494 default:
8495 gcc_unreachable ();
8498 if (code == OACC_DATA
8499 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8500 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8501 remove = true;
8502 if (remove)
8503 *list_p = OMP_CLAUSE_CHAIN (c);
8504 else
8505 list_p = &OMP_CLAUSE_CHAIN (c);
8508 gimplify_omp_ctxp = ctx;
8509 if (struct_map_to_clause)
8510 delete struct_map_to_clause;
8513 /* Return true if DECL is a candidate for shared to firstprivate
8514 optimization. We only consider non-addressable scalars, not
8515 too big, and not references. */
8517 static bool
8518 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8520 if (TREE_ADDRESSABLE (decl))
8521 return false;
8522 tree type = TREE_TYPE (decl);
8523 if (!is_gimple_reg_type (type)
8524 || TREE_CODE (type) == REFERENCE_TYPE
8525 || TREE_ADDRESSABLE (type))
8526 return false;
8527 /* Don't optimize too large decls, as each thread/task will have
8528 its own. */
8529 HOST_WIDE_INT len = int_size_in_bytes (type);
8530 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8531 return false;
8532 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8533 return false;
8534 return true;
8537 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8538 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8539 GOVD_WRITTEN in outer contexts. */
8541 static void
8542 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8544 for (; ctx; ctx = ctx->outer_context)
8546 splay_tree_node n = splay_tree_lookup (ctx->variables,
8547 (splay_tree_key) decl);
8548 if (n == NULL)
8549 continue;
8550 else if (n->value & GOVD_SHARED)
8552 n->value |= GOVD_WRITTEN;
8553 return;
8555 else if (n->value & GOVD_DATA_SHARE_CLASS)
8556 return;
8560 /* Helper callback for walk_gimple_seq to discover possible stores
8561 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8562 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8563 for those. */
8565 static tree
8566 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8568 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8570 *walk_subtrees = 0;
8571 if (!wi->is_lhs)
8572 return NULL_TREE;
8574 tree op = *tp;
8577 if (handled_component_p (op))
8578 op = TREE_OPERAND (op, 0);
8579 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8580 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8581 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8582 else
8583 break;
8585 while (1);
8586 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8587 return NULL_TREE;
8589 omp_mark_stores (gimplify_omp_ctxp, op);
8590 return NULL_TREE;
8593 /* Helper callback for walk_gimple_seq to discover possible stores
8594 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8595 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8596 for those. */
8598 static tree
8599 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8600 bool *handled_ops_p,
8601 struct walk_stmt_info *wi)
8603 gimple *stmt = gsi_stmt (*gsi_p);
8604 switch (gimple_code (stmt))
8606 /* Don't recurse on OpenMP constructs for which
8607 gimplify_adjust_omp_clauses already handled the bodies,
8608 except handle gimple_omp_for_pre_body. */
8609 case GIMPLE_OMP_FOR:
8610 *handled_ops_p = true;
8611 if (gimple_omp_for_pre_body (stmt))
8612 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8613 omp_find_stores_stmt, omp_find_stores_op, wi);
8614 break;
8615 case GIMPLE_OMP_PARALLEL:
8616 case GIMPLE_OMP_TASK:
8617 case GIMPLE_OMP_SECTIONS:
8618 case GIMPLE_OMP_SINGLE:
8619 case GIMPLE_OMP_TARGET:
8620 case GIMPLE_OMP_TEAMS:
8621 case GIMPLE_OMP_CRITICAL:
8622 *handled_ops_p = true;
8623 break;
8624 default:
8625 break;
8627 return NULL_TREE;
8630 struct gimplify_adjust_omp_clauses_data
8632 tree *list_p;
8633 gimple_seq *pre_p;
8636 /* For all variables that were not actually used within the context,
8637 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8639 static int
8640 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8642 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8643 gimple_seq *pre_p
8644 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8645 tree decl = (tree) n->key;
8646 unsigned flags = n->value;
8647 enum omp_clause_code code;
8648 tree clause;
8649 bool private_debug;
8651 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8652 return 0;
8653 if ((flags & GOVD_SEEN) == 0)
8654 return 0;
8655 if (flags & GOVD_DEBUG_PRIVATE)
8657 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8658 private_debug = true;
8660 else if (flags & GOVD_MAP)
8661 private_debug = false;
8662 else
8663 private_debug
8664 = lang_hooks.decls.omp_private_debug_clause (decl,
8665 !!(flags & GOVD_SHARED));
8666 if (private_debug)
8667 code = OMP_CLAUSE_PRIVATE;
8668 else if (flags & GOVD_MAP)
8670 code = OMP_CLAUSE_MAP;
8671 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8672 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8674 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8675 return 0;
8678 else if (flags & GOVD_SHARED)
8680 if (is_global_var (decl))
8682 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8683 while (ctx != NULL)
8685 splay_tree_node on
8686 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8687 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8688 | GOVD_PRIVATE | GOVD_REDUCTION
8689 | GOVD_LINEAR | GOVD_MAP)) != 0)
8690 break;
8691 ctx = ctx->outer_context;
8693 if (ctx == NULL)
8694 return 0;
8696 code = OMP_CLAUSE_SHARED;
8698 else if (flags & GOVD_PRIVATE)
8699 code = OMP_CLAUSE_PRIVATE;
8700 else if (flags & GOVD_FIRSTPRIVATE)
8702 code = OMP_CLAUSE_FIRSTPRIVATE;
8703 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8704 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8705 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8707 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8708 "%<target%> construct", decl);
8709 return 0;
8712 else if (flags & GOVD_LASTPRIVATE)
8713 code = OMP_CLAUSE_LASTPRIVATE;
8714 else if (flags & GOVD_ALIGNED)
8715 return 0;
8716 else
8717 gcc_unreachable ();
8719 if (((flags & GOVD_LASTPRIVATE)
8720 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8721 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8722 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8724 tree chain = *list_p;
8725 clause = build_omp_clause (input_location, code);
8726 OMP_CLAUSE_DECL (clause) = decl;
8727 OMP_CLAUSE_CHAIN (clause) = chain;
8728 if (private_debug)
8729 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8730 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8731 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8732 else if (code == OMP_CLAUSE_SHARED
8733 && (flags & GOVD_WRITTEN) == 0
8734 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8735 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8736 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8737 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8738 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8740 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8741 OMP_CLAUSE_DECL (nc) = decl;
8742 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8743 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8744 OMP_CLAUSE_DECL (clause)
8745 = build_simple_mem_ref_loc (input_location, decl);
8746 OMP_CLAUSE_DECL (clause)
8747 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8748 build_int_cst (build_pointer_type (char_type_node), 0));
8749 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8750 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8751 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8752 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8753 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8754 OMP_CLAUSE_CHAIN (nc) = chain;
8755 OMP_CLAUSE_CHAIN (clause) = nc;
8756 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8757 gimplify_omp_ctxp = ctx->outer_context;
8758 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8759 pre_p, NULL, is_gimple_val, fb_rvalue);
8760 gimplify_omp_ctxp = ctx;
8762 else if (code == OMP_CLAUSE_MAP)
8764 int kind;
8765 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8766 switch (flags & (GOVD_MAP_TO_ONLY
8767 | GOVD_MAP_FORCE
8768 | GOVD_MAP_FORCE_PRESENT))
8770 case 0:
8771 kind = GOMP_MAP_TOFROM;
8772 break;
8773 case GOVD_MAP_FORCE:
8774 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8775 break;
8776 case GOVD_MAP_TO_ONLY:
8777 kind = GOMP_MAP_TO;
8778 break;
8779 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8780 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8781 break;
8782 case GOVD_MAP_FORCE_PRESENT:
8783 kind = GOMP_MAP_FORCE_PRESENT;
8784 break;
8785 default:
8786 gcc_unreachable ();
8788 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8789 if (DECL_SIZE (decl)
8790 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8792 tree decl2 = DECL_VALUE_EXPR (decl);
8793 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8794 decl2 = TREE_OPERAND (decl2, 0);
8795 gcc_assert (DECL_P (decl2));
8796 tree mem = build_simple_mem_ref (decl2);
8797 OMP_CLAUSE_DECL (clause) = mem;
8798 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8799 if (gimplify_omp_ctxp->outer_context)
8801 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8802 omp_notice_variable (ctx, decl2, true);
8803 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8805 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8806 OMP_CLAUSE_MAP);
8807 OMP_CLAUSE_DECL (nc) = decl;
8808 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8809 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8810 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8811 else
8812 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8813 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8814 OMP_CLAUSE_CHAIN (clause) = nc;
8816 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8817 && lang_hooks.decls.omp_privatize_by_reference (decl))
8819 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8820 OMP_CLAUSE_SIZE (clause)
8821 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8822 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8823 gimplify_omp_ctxp = ctx->outer_context;
8824 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8825 pre_p, NULL, is_gimple_val, fb_rvalue);
8826 gimplify_omp_ctxp = ctx;
8827 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8828 OMP_CLAUSE_MAP);
8829 OMP_CLAUSE_DECL (nc) = decl;
8830 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8831 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8832 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8833 OMP_CLAUSE_CHAIN (clause) = nc;
8835 else
8836 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8838 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8840 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8841 OMP_CLAUSE_DECL (nc) = decl;
8842 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8843 OMP_CLAUSE_CHAIN (nc) = chain;
8844 OMP_CLAUSE_CHAIN (clause) = nc;
8845 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8846 gimplify_omp_ctxp = ctx->outer_context;
8847 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8848 gimplify_omp_ctxp = ctx;
8850 *list_p = clause;
8851 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8852 gimplify_omp_ctxp = ctx->outer_context;
8853 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8854 if (gimplify_omp_ctxp)
8855 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8856 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8857 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8858 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8859 true);
8860 gimplify_omp_ctxp = ctx;
8861 return 0;
8864 static void
8865 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8866 enum tree_code code)
8868 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8869 tree c, decl;
8871 if (body)
8873 struct gimplify_omp_ctx *octx;
8874 for (octx = ctx; octx; octx = octx->outer_context)
8875 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8876 break;
8877 if (octx)
8879 struct walk_stmt_info wi;
8880 memset (&wi, 0, sizeof (wi));
8881 walk_gimple_seq (body, omp_find_stores_stmt,
8882 omp_find_stores_op, &wi);
8885 while ((c = *list_p) != NULL)
8887 splay_tree_node n;
8888 bool remove = false;
8890 switch (OMP_CLAUSE_CODE (c))
8892 case OMP_CLAUSE_FIRSTPRIVATE:
8893 if ((ctx->region_type & ORT_TARGET)
8894 && (ctx->region_type & ORT_ACC) == 0
8895 && TYPE_ATOMIC (strip_array_types
8896 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8898 error_at (OMP_CLAUSE_LOCATION (c),
8899 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8900 "%<target%> construct", OMP_CLAUSE_DECL (c));
8901 remove = true;
8902 break;
8904 /* FALLTHRU */
8905 case OMP_CLAUSE_PRIVATE:
8906 case OMP_CLAUSE_SHARED:
8907 case OMP_CLAUSE_LINEAR:
8908 decl = OMP_CLAUSE_DECL (c);
8909 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8910 remove = !(n->value & GOVD_SEEN);
8911 if (! remove)
8913 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8914 if ((n->value & GOVD_DEBUG_PRIVATE)
8915 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8917 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8918 || ((n->value & GOVD_DATA_SHARE_CLASS)
8919 == GOVD_SHARED));
8920 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8921 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8923 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8924 && (n->value & GOVD_WRITTEN) == 0
8925 && DECL_P (decl)
8926 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8927 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8928 else if (DECL_P (decl)
8929 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8930 && (n->value & GOVD_WRITTEN) != 0)
8931 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8932 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8933 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8934 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8936 break;
8938 case OMP_CLAUSE_LASTPRIVATE:
8939 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8940 accurately reflect the presence of a FIRSTPRIVATE clause. */
8941 decl = OMP_CLAUSE_DECL (c);
8942 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8943 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8944 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8945 if (code == OMP_DISTRIBUTE
8946 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8948 remove = true;
8949 error_at (OMP_CLAUSE_LOCATION (c),
8950 "same variable used in %<firstprivate%> and "
8951 "%<lastprivate%> clauses on %<distribute%> "
8952 "construct");
8954 if (!remove
8955 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8956 && DECL_P (decl)
8957 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8958 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8959 break;
8961 case OMP_CLAUSE_ALIGNED:
8962 decl = OMP_CLAUSE_DECL (c);
8963 if (!is_global_var (decl))
8965 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8966 remove = n == NULL || !(n->value & GOVD_SEEN);
8967 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8969 struct gimplify_omp_ctx *octx;
8970 if (n != NULL
8971 && (n->value & (GOVD_DATA_SHARE_CLASS
8972 & ~GOVD_FIRSTPRIVATE)))
8973 remove = true;
8974 else
8975 for (octx = ctx->outer_context; octx;
8976 octx = octx->outer_context)
8978 n = splay_tree_lookup (octx->variables,
8979 (splay_tree_key) decl);
8980 if (n == NULL)
8981 continue;
8982 if (n->value & GOVD_LOCAL)
8983 break;
8984 /* We have to avoid assigning a shared variable
8985 to itself when trying to add
8986 __builtin_assume_aligned. */
8987 if (n->value & GOVD_SHARED)
8989 remove = true;
8990 break;
8995 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8997 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8998 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8999 remove = true;
9001 break;
9003 case OMP_CLAUSE_MAP:
9004 if (code == OMP_TARGET_EXIT_DATA
9005 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9007 remove = true;
9008 break;
9010 decl = OMP_CLAUSE_DECL (c);
9011 /* Data clauses associated with acc parallel reductions must be
9012 compatible with present_or_copy. Warn and adjust the clause
9013 if that is not the case. */
9014 if (ctx->region_type == ORT_ACC_PARALLEL)
9016 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9017 n = NULL;
9019 if (DECL_P (t))
9020 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9022 if (n && (n->value & GOVD_REDUCTION))
9024 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9026 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9027 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9028 && kind != GOMP_MAP_FORCE_PRESENT
9029 && kind != GOMP_MAP_POINTER)
9031 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9032 "incompatible data clause with reduction "
9033 "on %qE; promoting to present_or_copy",
9034 DECL_NAME (t));
9035 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9039 if (!DECL_P (decl))
9041 if ((ctx->region_type & ORT_TARGET) != 0
9042 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9044 if (TREE_CODE (decl) == INDIRECT_REF
9045 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9046 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9047 == REFERENCE_TYPE))
9048 decl = TREE_OPERAND (decl, 0);
9049 if (TREE_CODE (decl) == COMPONENT_REF)
9051 while (TREE_CODE (decl) == COMPONENT_REF)
9052 decl = TREE_OPERAND (decl, 0);
9053 if (DECL_P (decl))
9055 n = splay_tree_lookup (ctx->variables,
9056 (splay_tree_key) decl);
9057 if (!(n->value & GOVD_SEEN))
9058 remove = true;
9062 break;
9064 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9065 if ((ctx->region_type & ORT_TARGET) != 0
9066 && !(n->value & GOVD_SEEN)
9067 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9068 && (!is_global_var (decl)
9069 || !lookup_attribute ("omp declare target link",
9070 DECL_ATTRIBUTES (decl))))
9072 remove = true;
9073 /* For struct element mapping, if struct is never referenced
9074 in target block and none of the mapping has always modifier,
9075 remove all the struct element mappings, which immediately
9076 follow the GOMP_MAP_STRUCT map clause. */
9077 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9079 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9080 while (cnt--)
9081 OMP_CLAUSE_CHAIN (c)
9082 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9085 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9086 && code == OMP_TARGET_EXIT_DATA)
9087 remove = true;
9088 else if (DECL_SIZE (decl)
9089 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9090 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9091 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9092 && (OMP_CLAUSE_MAP_KIND (c)
9093 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9095 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9096 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9097 INTEGER_CST. */
9098 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9100 tree decl2 = DECL_VALUE_EXPR (decl);
9101 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9102 decl2 = TREE_OPERAND (decl2, 0);
9103 gcc_assert (DECL_P (decl2));
9104 tree mem = build_simple_mem_ref (decl2);
9105 OMP_CLAUSE_DECL (c) = mem;
9106 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9107 if (ctx->outer_context)
9109 omp_notice_variable (ctx->outer_context, decl2, true);
9110 omp_notice_variable (ctx->outer_context,
9111 OMP_CLAUSE_SIZE (c), true);
9113 if (((ctx->region_type & ORT_TARGET) != 0
9114 || !ctx->target_firstprivatize_array_bases)
9115 && ((n->value & GOVD_SEEN) == 0
9116 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9118 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9119 OMP_CLAUSE_MAP);
9120 OMP_CLAUSE_DECL (nc) = decl;
9121 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9122 if (ctx->target_firstprivatize_array_bases)
9123 OMP_CLAUSE_SET_MAP_KIND (nc,
9124 GOMP_MAP_FIRSTPRIVATE_POINTER);
9125 else
9126 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9127 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9128 OMP_CLAUSE_CHAIN (c) = nc;
9129 c = nc;
9132 else
9134 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9135 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9136 gcc_assert ((n->value & GOVD_SEEN) == 0
9137 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9138 == 0));
9140 break;
9142 case OMP_CLAUSE_TO:
9143 case OMP_CLAUSE_FROM:
9144 case OMP_CLAUSE__CACHE_:
9145 decl = OMP_CLAUSE_DECL (c);
9146 if (!DECL_P (decl))
9147 break;
9148 if (DECL_SIZE (decl)
9149 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9151 tree decl2 = DECL_VALUE_EXPR (decl);
9152 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9153 decl2 = TREE_OPERAND (decl2, 0);
9154 gcc_assert (DECL_P (decl2));
9155 tree mem = build_simple_mem_ref (decl2);
9156 OMP_CLAUSE_DECL (c) = mem;
9157 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9158 if (ctx->outer_context)
9160 omp_notice_variable (ctx->outer_context, decl2, true);
9161 omp_notice_variable (ctx->outer_context,
9162 OMP_CLAUSE_SIZE (c), true);
9165 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9166 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9167 break;
9169 case OMP_CLAUSE_REDUCTION:
9170 decl = OMP_CLAUSE_DECL (c);
9171 /* OpenACC reductions need a present_or_copy data clause.
9172 Add one if necessary. Error is the reduction is private. */
9173 if (ctx->region_type == ORT_ACC_PARALLEL)
9175 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9176 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9177 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9178 "reduction on %qE", DECL_NAME (decl));
9179 else if ((n->value & GOVD_MAP) == 0)
9181 tree next = OMP_CLAUSE_CHAIN (c);
9182 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9183 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9184 OMP_CLAUSE_DECL (nc) = decl;
9185 OMP_CLAUSE_CHAIN (c) = nc;
9186 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9187 while (1)
9189 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9190 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9191 break;
9192 nc = OMP_CLAUSE_CHAIN (nc);
9194 OMP_CLAUSE_CHAIN (nc) = next;
9195 n->value |= GOVD_MAP;
9198 if (DECL_P (decl)
9199 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9200 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9201 break;
9202 case OMP_CLAUSE_COPYIN:
9203 case OMP_CLAUSE_COPYPRIVATE:
9204 case OMP_CLAUSE_IF:
9205 case OMP_CLAUSE_NUM_THREADS:
9206 case OMP_CLAUSE_NUM_TEAMS:
9207 case OMP_CLAUSE_THREAD_LIMIT:
9208 case OMP_CLAUSE_DIST_SCHEDULE:
9209 case OMP_CLAUSE_DEVICE:
9210 case OMP_CLAUSE_SCHEDULE:
9211 case OMP_CLAUSE_NOWAIT:
9212 case OMP_CLAUSE_ORDERED:
9213 case OMP_CLAUSE_DEFAULT:
9214 case OMP_CLAUSE_UNTIED:
9215 case OMP_CLAUSE_COLLAPSE:
9216 case OMP_CLAUSE_FINAL:
9217 case OMP_CLAUSE_MERGEABLE:
9218 case OMP_CLAUSE_PROC_BIND:
9219 case OMP_CLAUSE_SAFELEN:
9220 case OMP_CLAUSE_SIMDLEN:
9221 case OMP_CLAUSE_DEPEND:
9222 case OMP_CLAUSE_PRIORITY:
9223 case OMP_CLAUSE_GRAINSIZE:
9224 case OMP_CLAUSE_NUM_TASKS:
9225 case OMP_CLAUSE_NOGROUP:
9226 case OMP_CLAUSE_THREADS:
9227 case OMP_CLAUSE_SIMD:
9228 case OMP_CLAUSE_HINT:
9229 case OMP_CLAUSE_DEFAULTMAP:
9230 case OMP_CLAUSE_USE_DEVICE_PTR:
9231 case OMP_CLAUSE_IS_DEVICE_PTR:
9232 case OMP_CLAUSE__CILK_FOR_COUNT_:
9233 case OMP_CLAUSE_ASYNC:
9234 case OMP_CLAUSE_WAIT:
9235 case OMP_CLAUSE_INDEPENDENT:
9236 case OMP_CLAUSE_NUM_GANGS:
9237 case OMP_CLAUSE_NUM_WORKERS:
9238 case OMP_CLAUSE_VECTOR_LENGTH:
9239 case OMP_CLAUSE_GANG:
9240 case OMP_CLAUSE_WORKER:
9241 case OMP_CLAUSE_VECTOR:
9242 case OMP_CLAUSE_AUTO:
9243 case OMP_CLAUSE_SEQ:
9244 case OMP_CLAUSE_TILE:
9245 break;
9247 default:
9248 gcc_unreachable ();
9251 if (remove)
9252 *list_p = OMP_CLAUSE_CHAIN (c);
9253 else
9254 list_p = &OMP_CLAUSE_CHAIN (c);
9257 /* Add in any implicit data sharing. */
9258 struct gimplify_adjust_omp_clauses_data data;
9259 data.list_p = list_p;
9260 data.pre_p = pre_p;
9261 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9263 gimplify_omp_ctxp = ctx->outer_context;
9264 delete_omp_context (ctx);
9267 /* Gimplify OACC_CACHE. */
9269 static void
9270 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9272 tree expr = *expr_p;
9274 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9275 OACC_CACHE);
9276 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9277 OACC_CACHE);
9279 /* TODO: Do something sensible with this information. */
9281 *expr_p = NULL_TREE;
9284 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9285 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9286 kind. The entry kind will replace the one in CLAUSE, while the exit
9287 kind will be used in a new omp_clause and returned to the caller. */
9289 static tree
9290 gimplify_oacc_declare_1 (tree clause)
9292 HOST_WIDE_INT kind, new_op;
9293 bool ret = false;
9294 tree c = NULL;
9296 kind = OMP_CLAUSE_MAP_KIND (clause);
9298 switch (kind)
9300 case GOMP_MAP_ALLOC:
9301 case GOMP_MAP_FORCE_ALLOC:
9302 case GOMP_MAP_FORCE_TO:
9303 new_op = GOMP_MAP_DELETE;
9304 ret = true;
9305 break;
9307 case GOMP_MAP_FORCE_FROM:
9308 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9309 new_op = GOMP_MAP_FORCE_FROM;
9310 ret = true;
9311 break;
9313 case GOMP_MAP_FORCE_TOFROM:
9314 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9315 new_op = GOMP_MAP_FORCE_FROM;
9316 ret = true;
9317 break;
9319 case GOMP_MAP_FROM:
9320 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9321 new_op = GOMP_MAP_FROM;
9322 ret = true;
9323 break;
9325 case GOMP_MAP_TOFROM:
9326 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9327 new_op = GOMP_MAP_FROM;
9328 ret = true;
9329 break;
9331 case GOMP_MAP_DEVICE_RESIDENT:
9332 case GOMP_MAP_FORCE_DEVICEPTR:
9333 case GOMP_MAP_FORCE_PRESENT:
9334 case GOMP_MAP_LINK:
9335 case GOMP_MAP_POINTER:
9336 case GOMP_MAP_TO:
9337 break;
9339 default:
9340 gcc_unreachable ();
9341 break;
9344 if (ret)
9346 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9347 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9348 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9351 return c;
9354 /* Gimplify OACC_DECLARE. */
9356 static void
9357 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9359 tree expr = *expr_p;
9360 gomp_target *stmt;
9361 tree clauses, t, decl;
9363 clauses = OACC_DECLARE_CLAUSES (expr);
9365 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9366 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9368 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9370 decl = OMP_CLAUSE_DECL (t);
9372 if (TREE_CODE (decl) == MEM_REF)
9373 decl = TREE_OPERAND (decl, 0);
9375 if (VAR_P (decl) && !is_oacc_declared (decl))
9377 tree attr = get_identifier ("oacc declare target");
9378 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9379 DECL_ATTRIBUTES (decl));
9382 if (VAR_P (decl)
9383 && !is_global_var (decl)
9384 && DECL_CONTEXT (decl) == current_function_decl)
9386 tree c = gimplify_oacc_declare_1 (t);
9387 if (c)
9389 if (oacc_declare_returns == NULL)
9390 oacc_declare_returns = new hash_map<tree, tree>;
9392 oacc_declare_returns->put (decl, c);
9396 if (gimplify_omp_ctxp)
9397 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9400 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9401 clauses);
9403 gimplify_seq_add_stmt (pre_p, stmt);
9405 *expr_p = NULL_TREE;
9408 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9409 gimplification of the body, as well as scanning the body for used
9410 variables. We need to do this scan now, because variable-sized
9411 decls will be decomposed during gimplification. */
9413 static void
9414 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9416 tree expr = *expr_p;
9417 gimple *g;
9418 gimple_seq body = NULL;
9420 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9421 OMP_PARALLEL_COMBINED (expr)
9422 ? ORT_COMBINED_PARALLEL
9423 : ORT_PARALLEL, OMP_PARALLEL);
9425 push_gimplify_context ();
9427 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9428 if (gimple_code (g) == GIMPLE_BIND)
9429 pop_gimplify_context (g);
9430 else
9431 pop_gimplify_context (NULL);
9433 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9434 OMP_PARALLEL);
9436 g = gimple_build_omp_parallel (body,
9437 OMP_PARALLEL_CLAUSES (expr),
9438 NULL_TREE, NULL_TREE);
9439 if (OMP_PARALLEL_COMBINED (expr))
9440 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9441 gimplify_seq_add_stmt (pre_p, g);
9442 *expr_p = NULL_TREE;
9445 /* Gimplify the contents of an OMP_TASK statement. This involves
9446 gimplification of the body, as well as scanning the body for used
9447 variables. We need to do this scan now, because variable-sized
9448 decls will be decomposed during gimplification. */
9450 static void
9451 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9453 tree expr = *expr_p;
9454 gimple *g;
9455 gimple_seq body = NULL;
9457 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9458 omp_find_clause (OMP_TASK_CLAUSES (expr),
9459 OMP_CLAUSE_UNTIED)
9460 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9462 push_gimplify_context ();
9464 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9465 if (gimple_code (g) == GIMPLE_BIND)
9466 pop_gimplify_context (g);
9467 else
9468 pop_gimplify_context (NULL);
9470 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9471 OMP_TASK);
9473 g = gimple_build_omp_task (body,
9474 OMP_TASK_CLAUSES (expr),
9475 NULL_TREE, NULL_TREE,
9476 NULL_TREE, NULL_TREE, NULL_TREE);
9477 gimplify_seq_add_stmt (pre_p, g);
9478 *expr_p = NULL_TREE;
9481 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9482 with non-NULL OMP_FOR_INIT. */
9484 static tree
9485 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9487 *walk_subtrees = 0;
9488 switch (TREE_CODE (*tp))
9490 case OMP_FOR:
9491 *walk_subtrees = 1;
9492 /* FALLTHRU */
9493 case OMP_SIMD:
9494 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9495 return *tp;
9496 break;
9497 case BIND_EXPR:
9498 case STATEMENT_LIST:
9499 case OMP_PARALLEL:
9500 *walk_subtrees = 1;
9501 break;
9502 default:
9503 break;
9505 return NULL_TREE;
9508 /* Gimplify the gross structure of an OMP_FOR statement. */
9510 static enum gimplify_status
9511 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9513 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9514 enum gimplify_status ret = GS_ALL_DONE;
9515 enum gimplify_status tret;
9516 gomp_for *gfor;
9517 gimple_seq for_body, for_pre_body;
9518 int i;
9519 bitmap has_decl_expr = NULL;
9520 enum omp_region_type ort = ORT_WORKSHARE;
9522 orig_for_stmt = for_stmt = *expr_p;
9524 switch (TREE_CODE (for_stmt))
9526 case OMP_FOR:
9527 case CILK_FOR:
9528 case OMP_DISTRIBUTE:
9529 break;
9530 case OACC_LOOP:
9531 ort = ORT_ACC;
9532 break;
9533 case OMP_TASKLOOP:
9534 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9535 ort = ORT_UNTIED_TASK;
9536 else
9537 ort = ORT_TASK;
9538 break;
9539 case OMP_SIMD:
9540 case CILK_SIMD:
9541 ort = ORT_SIMD;
9542 break;
9543 default:
9544 gcc_unreachable ();
9547 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9548 clause for the IV. */
9549 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9551 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9552 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9553 decl = TREE_OPERAND (t, 0);
9554 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9556 && OMP_CLAUSE_DECL (c) == decl)
9558 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9559 break;
9563 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9565 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9566 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9567 find_combined_omp_for, NULL, NULL);
9568 if (inner_for_stmt == NULL_TREE)
9570 gcc_assert (seen_error ());
9571 *expr_p = NULL_TREE;
9572 return GS_ERROR;
9576 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9577 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9578 TREE_CODE (for_stmt));
9580 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9581 gimplify_omp_ctxp->distribute = true;
9583 /* Handle OMP_FOR_INIT. */
9584 for_pre_body = NULL;
9585 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9587 has_decl_expr = BITMAP_ALLOC (NULL);
9588 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9589 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9590 == VAR_DECL)
9592 t = OMP_FOR_PRE_BODY (for_stmt);
9593 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9595 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9597 tree_stmt_iterator si;
9598 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9599 tsi_next (&si))
9601 t = tsi_stmt (si);
9602 if (TREE_CODE (t) == DECL_EXPR
9603 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9604 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9608 if (OMP_FOR_PRE_BODY (for_stmt))
9610 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9611 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9612 else
9614 struct gimplify_omp_ctx ctx;
9615 memset (&ctx, 0, sizeof (ctx));
9616 ctx.region_type = ORT_NONE;
9617 gimplify_omp_ctxp = &ctx;
9618 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9619 gimplify_omp_ctxp = NULL;
9622 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9624 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9625 for_stmt = inner_for_stmt;
9627 /* For taskloop, need to gimplify the start, end and step before the
9628 taskloop, outside of the taskloop omp context. */
9629 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9631 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9633 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9634 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9636 TREE_OPERAND (t, 1)
9637 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9638 pre_p, NULL, false);
9639 tree c = build_omp_clause (input_location,
9640 OMP_CLAUSE_FIRSTPRIVATE);
9641 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9642 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9643 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9646 /* Handle OMP_FOR_COND. */
9647 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9648 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9650 TREE_OPERAND (t, 1)
9651 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9652 gimple_seq_empty_p (for_pre_body)
9653 ? pre_p : &for_pre_body, NULL,
9654 false);
9655 tree c = build_omp_clause (input_location,
9656 OMP_CLAUSE_FIRSTPRIVATE);
9657 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9658 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9659 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9662 /* Handle OMP_FOR_INCR. */
9663 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9664 if (TREE_CODE (t) == MODIFY_EXPR)
9666 decl = TREE_OPERAND (t, 0);
9667 t = TREE_OPERAND (t, 1);
9668 tree *tp = &TREE_OPERAND (t, 1);
9669 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9670 tp = &TREE_OPERAND (t, 0);
9672 if (!is_gimple_constant (*tp))
9674 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9675 ? pre_p : &for_pre_body;
9676 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9677 tree c = build_omp_clause (input_location,
9678 OMP_CLAUSE_FIRSTPRIVATE);
9679 OMP_CLAUSE_DECL (c) = *tp;
9680 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9681 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9686 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9687 OMP_TASKLOOP);
9690 if (orig_for_stmt != for_stmt)
9691 gimplify_omp_ctxp->combined_loop = true;
9693 for_body = NULL;
9694 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9695 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9696 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9697 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9699 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9700 bool is_doacross = false;
9701 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9703 is_doacross = true;
9704 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9705 (OMP_FOR_INIT (for_stmt))
9706 * 2);
9708 int collapse = 1, tile = 0;
9709 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9710 if (c)
9711 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9712 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9713 if (c)
9714 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9715 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9717 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9718 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9719 decl = TREE_OPERAND (t, 0);
9720 gcc_assert (DECL_P (decl));
9721 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9722 || POINTER_TYPE_P (TREE_TYPE (decl)));
9723 if (is_doacross)
9725 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9726 gimplify_omp_ctxp->loop_iter_var.quick_push
9727 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9728 else
9729 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9730 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9733 /* Make sure the iteration variable is private. */
9734 tree c = NULL_TREE;
9735 tree c2 = NULL_TREE;
9736 if (orig_for_stmt != for_stmt)
9737 /* Do this only on innermost construct for combined ones. */;
9738 else if (ort == ORT_SIMD)
9740 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9741 (splay_tree_key) decl);
9742 omp_is_private (gimplify_omp_ctxp, decl,
9743 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9744 != 1));
9745 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9746 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9747 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9749 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9750 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9751 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9752 if (has_decl_expr
9753 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9755 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9756 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9758 struct gimplify_omp_ctx *outer
9759 = gimplify_omp_ctxp->outer_context;
9760 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9762 if (outer->region_type == ORT_WORKSHARE
9763 && outer->combined_loop)
9765 n = splay_tree_lookup (outer->variables,
9766 (splay_tree_key)decl);
9767 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9769 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9770 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9772 else
9774 struct gimplify_omp_ctx *octx = outer->outer_context;
9775 if (octx
9776 && octx->region_type == ORT_COMBINED_PARALLEL
9777 && octx->outer_context
9778 && (octx->outer_context->region_type
9779 == ORT_WORKSHARE)
9780 && octx->outer_context->combined_loop)
9782 octx = octx->outer_context;
9783 n = splay_tree_lookup (octx->variables,
9784 (splay_tree_key)decl);
9785 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9787 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9788 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9795 OMP_CLAUSE_DECL (c) = decl;
9796 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9797 OMP_FOR_CLAUSES (for_stmt) = c;
9798 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9799 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9801 if (outer->region_type == ORT_WORKSHARE
9802 && outer->combined_loop)
9804 if (outer->outer_context
9805 && (outer->outer_context->region_type
9806 == ORT_COMBINED_PARALLEL))
9807 outer = outer->outer_context;
9808 else if (omp_check_private (outer, decl, false))
9809 outer = NULL;
9811 else if (((outer->region_type & ORT_TASK) != 0)
9812 && outer->combined_loop
9813 && !omp_check_private (gimplify_omp_ctxp,
9814 decl, false))
9816 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9818 omp_notice_variable (outer, decl, true);
9819 outer = NULL;
9821 if (outer)
9823 n = splay_tree_lookup (outer->variables,
9824 (splay_tree_key)decl);
9825 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9827 omp_add_variable (outer, decl,
9828 GOVD_LASTPRIVATE | GOVD_SEEN);
9829 if (outer->region_type == ORT_COMBINED_PARALLEL
9830 && outer->outer_context
9831 && (outer->outer_context->region_type
9832 == ORT_WORKSHARE)
9833 && outer->outer_context->combined_loop)
9835 outer = outer->outer_context;
9836 n = splay_tree_lookup (outer->variables,
9837 (splay_tree_key)decl);
9838 if (omp_check_private (outer, decl, false))
9839 outer = NULL;
9840 else if (n == NULL
9841 || ((n->value & GOVD_DATA_SHARE_CLASS)
9842 == 0))
9843 omp_add_variable (outer, decl,
9844 GOVD_LASTPRIVATE
9845 | GOVD_SEEN);
9846 else
9847 outer = NULL;
9849 if (outer && outer->outer_context
9850 && (outer->outer_context->region_type
9851 == ORT_COMBINED_TEAMS))
9853 outer = outer->outer_context;
9854 n = splay_tree_lookup (outer->variables,
9855 (splay_tree_key)decl);
9856 if (n == NULL
9857 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9858 omp_add_variable (outer, decl,
9859 GOVD_SHARED | GOVD_SEEN);
9860 else
9861 outer = NULL;
9863 if (outer && outer->outer_context)
9864 omp_notice_variable (outer->outer_context, decl,
9865 true);
9870 else
9872 bool lastprivate
9873 = (!has_decl_expr
9874 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9875 struct gimplify_omp_ctx *outer
9876 = gimplify_omp_ctxp->outer_context;
9877 if (outer && lastprivate)
9879 if (outer->region_type == ORT_WORKSHARE
9880 && outer->combined_loop)
9882 n = splay_tree_lookup (outer->variables,
9883 (splay_tree_key)decl);
9884 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9886 lastprivate = false;
9887 outer = NULL;
9889 else if (outer->outer_context
9890 && (outer->outer_context->region_type
9891 == ORT_COMBINED_PARALLEL))
9892 outer = outer->outer_context;
9893 else if (omp_check_private (outer, decl, false))
9894 outer = NULL;
9896 else if (((outer->region_type & ORT_TASK) != 0)
9897 && outer->combined_loop
9898 && !omp_check_private (gimplify_omp_ctxp,
9899 decl, false))
9901 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9903 omp_notice_variable (outer, decl, true);
9904 outer = NULL;
9906 if (outer)
9908 n = splay_tree_lookup (outer->variables,
9909 (splay_tree_key)decl);
9910 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9912 omp_add_variable (outer, decl,
9913 GOVD_LASTPRIVATE | GOVD_SEEN);
9914 if (outer->region_type == ORT_COMBINED_PARALLEL
9915 && outer->outer_context
9916 && (outer->outer_context->region_type
9917 == ORT_WORKSHARE)
9918 && outer->outer_context->combined_loop)
9920 outer = outer->outer_context;
9921 n = splay_tree_lookup (outer->variables,
9922 (splay_tree_key)decl);
9923 if (omp_check_private (outer, decl, false))
9924 outer = NULL;
9925 else if (n == NULL
9926 || ((n->value & GOVD_DATA_SHARE_CLASS)
9927 == 0))
9928 omp_add_variable (outer, decl,
9929 GOVD_LASTPRIVATE
9930 | GOVD_SEEN);
9931 else
9932 outer = NULL;
9934 if (outer && outer->outer_context
9935 && (outer->outer_context->region_type
9936 == ORT_COMBINED_TEAMS))
9938 outer = outer->outer_context;
9939 n = splay_tree_lookup (outer->variables,
9940 (splay_tree_key)decl);
9941 if (n == NULL
9942 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9943 omp_add_variable (outer, decl,
9944 GOVD_SHARED | GOVD_SEEN);
9945 else
9946 outer = NULL;
9948 if (outer && outer->outer_context)
9949 omp_notice_variable (outer->outer_context, decl,
9950 true);
9955 c = build_omp_clause (input_location,
9956 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9957 : OMP_CLAUSE_PRIVATE);
9958 OMP_CLAUSE_DECL (c) = decl;
9959 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9960 OMP_FOR_CLAUSES (for_stmt) = c;
9961 omp_add_variable (gimplify_omp_ctxp, decl,
9962 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9963 | GOVD_EXPLICIT | GOVD_SEEN);
9964 c = NULL_TREE;
9967 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9968 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9969 else
9970 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9972 /* If DECL is not a gimple register, create a temporary variable to act
9973 as an iteration counter. This is valid, since DECL cannot be
9974 modified in the body of the loop. Similarly for any iteration vars
9975 in simd with collapse > 1 where the iterator vars must be
9976 lastprivate. */
9977 if (orig_for_stmt != for_stmt)
9978 var = decl;
9979 else if (!is_gimple_reg (decl)
9980 || (ort == ORT_SIMD
9981 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9983 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9984 /* Make sure omp_add_variable is not called on it prematurely.
9985 We call it ourselves a few lines later. */
9986 gimplify_omp_ctxp = NULL;
9987 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9988 gimplify_omp_ctxp = ctx;
9989 TREE_OPERAND (t, 0) = var;
9991 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9993 if (ort == ORT_SIMD
9994 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9996 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9997 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9998 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9999 OMP_CLAUSE_DECL (c2) = var;
10000 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
10001 OMP_FOR_CLAUSES (for_stmt) = c2;
10002 omp_add_variable (gimplify_omp_ctxp, var,
10003 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10004 if (c == NULL_TREE)
10006 c = c2;
10007 c2 = NULL_TREE;
10010 else
10011 omp_add_variable (gimplify_omp_ctxp, var,
10012 GOVD_PRIVATE | GOVD_SEEN);
10014 else
10015 var = decl;
10017 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10018 is_gimple_val, fb_rvalue, false);
10019 ret = MIN (ret, tret);
10020 if (ret == GS_ERROR)
10021 return ret;
10023 /* Handle OMP_FOR_COND. */
10024 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10025 gcc_assert (COMPARISON_CLASS_P (t));
10026 gcc_assert (TREE_OPERAND (t, 0) == decl);
10028 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10029 is_gimple_val, fb_rvalue, false);
10030 ret = MIN (ret, tret);
10032 /* Handle OMP_FOR_INCR. */
10033 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10034 switch (TREE_CODE (t))
10036 case PREINCREMENT_EXPR:
10037 case POSTINCREMENT_EXPR:
10039 tree decl = TREE_OPERAND (t, 0);
10040 /* c_omp_for_incr_canonicalize_ptr() should have been
10041 called to massage things appropriately. */
10042 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10044 if (orig_for_stmt != for_stmt)
10045 break;
10046 t = build_int_cst (TREE_TYPE (decl), 1);
10047 if (c)
10048 OMP_CLAUSE_LINEAR_STEP (c) = t;
10049 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10050 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10051 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10052 break;
10055 case PREDECREMENT_EXPR:
10056 case POSTDECREMENT_EXPR:
10057 /* c_omp_for_incr_canonicalize_ptr() should have been
10058 called to massage things appropriately. */
10059 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10060 if (orig_for_stmt != for_stmt)
10061 break;
10062 t = build_int_cst (TREE_TYPE (decl), -1);
10063 if (c)
10064 OMP_CLAUSE_LINEAR_STEP (c) = t;
10065 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10066 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10067 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10068 break;
10070 case MODIFY_EXPR:
10071 gcc_assert (TREE_OPERAND (t, 0) == decl);
10072 TREE_OPERAND (t, 0) = var;
10074 t = TREE_OPERAND (t, 1);
10075 switch (TREE_CODE (t))
10077 case PLUS_EXPR:
10078 if (TREE_OPERAND (t, 1) == decl)
10080 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10081 TREE_OPERAND (t, 0) = var;
10082 break;
10085 /* Fallthru. */
10086 case MINUS_EXPR:
10087 case POINTER_PLUS_EXPR:
10088 gcc_assert (TREE_OPERAND (t, 0) == decl);
10089 TREE_OPERAND (t, 0) = var;
10090 break;
10091 default:
10092 gcc_unreachable ();
10095 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10096 is_gimple_val, fb_rvalue, false);
10097 ret = MIN (ret, tret);
10098 if (c)
10100 tree step = TREE_OPERAND (t, 1);
10101 tree stept = TREE_TYPE (decl);
10102 if (POINTER_TYPE_P (stept))
10103 stept = sizetype;
10104 step = fold_convert (stept, step);
10105 if (TREE_CODE (t) == MINUS_EXPR)
10106 step = fold_build1 (NEGATE_EXPR, stept, step);
10107 OMP_CLAUSE_LINEAR_STEP (c) = step;
10108 if (step != TREE_OPERAND (t, 1))
10110 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10111 &for_pre_body, NULL,
10112 is_gimple_val, fb_rvalue, false);
10113 ret = MIN (ret, tret);
10116 break;
10118 default:
10119 gcc_unreachable ();
10122 if (c2)
10124 gcc_assert (c);
10125 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10128 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10130 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10131 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10132 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10133 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10134 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10135 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10136 && OMP_CLAUSE_DECL (c) == decl)
10138 if (is_doacross && (collapse == 1 || i >= collapse))
10139 t = var;
10140 else
10142 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10143 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10144 gcc_assert (TREE_OPERAND (t, 0) == var);
10145 t = TREE_OPERAND (t, 1);
10146 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10147 || TREE_CODE (t) == MINUS_EXPR
10148 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10149 gcc_assert (TREE_OPERAND (t, 0) == var);
10150 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10151 is_doacross ? var : decl,
10152 TREE_OPERAND (t, 1));
10154 gimple_seq *seq;
10155 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10156 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10157 else
10158 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10159 gimplify_assign (decl, t, seq);
10164 BITMAP_FREE (has_decl_expr);
10166 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10168 push_gimplify_context ();
10169 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10171 OMP_FOR_BODY (orig_for_stmt)
10172 = build3 (BIND_EXPR, void_type_node, NULL,
10173 OMP_FOR_BODY (orig_for_stmt), NULL);
10174 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10178 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10179 &for_body);
10181 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10183 if (gimple_code (g) == GIMPLE_BIND)
10184 pop_gimplify_context (g);
10185 else
10186 pop_gimplify_context (NULL);
10189 if (orig_for_stmt != for_stmt)
10190 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10192 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10193 decl = TREE_OPERAND (t, 0);
10194 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10195 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10196 gimplify_omp_ctxp = ctx->outer_context;
10197 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10198 gimplify_omp_ctxp = ctx;
10199 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10200 TREE_OPERAND (t, 0) = var;
10201 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10202 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10203 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10206 gimplify_adjust_omp_clauses (pre_p, for_body,
10207 &OMP_FOR_CLAUSES (orig_for_stmt),
10208 TREE_CODE (orig_for_stmt));
10210 int kind;
10211 switch (TREE_CODE (orig_for_stmt))
10213 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10214 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10215 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10216 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10217 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10218 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10219 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10220 default:
10221 gcc_unreachable ();
10223 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10224 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10225 for_pre_body);
10226 if (orig_for_stmt != for_stmt)
10227 gimple_omp_for_set_combined_p (gfor, true);
10228 if (gimplify_omp_ctxp
10229 && (gimplify_omp_ctxp->combined_loop
10230 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10231 && gimplify_omp_ctxp->outer_context
10232 && gimplify_omp_ctxp->outer_context->combined_loop)))
10234 gimple_omp_for_set_combined_into_p (gfor, true);
10235 if (gimplify_omp_ctxp->combined_loop)
10236 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10237 else
10238 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10241 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10243 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10244 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10245 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10246 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10247 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10248 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10249 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10250 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10253 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10254 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10255 The outer taskloop stands for computing the number of iterations,
10256 counts for collapsed loops and holding taskloop specific clauses.
10257 The task construct stands for the effect of data sharing on the
10258 explicit task it creates and the inner taskloop stands for expansion
10259 of the static loop inside of the explicit task construct. */
10260 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10262 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10263 tree task_clauses = NULL_TREE;
10264 tree c = *gfor_clauses_ptr;
10265 tree *gtask_clauses_ptr = &task_clauses;
10266 tree outer_for_clauses = NULL_TREE;
10267 tree *gforo_clauses_ptr = &outer_for_clauses;
10268 for (; c; c = OMP_CLAUSE_CHAIN (c))
10269 switch (OMP_CLAUSE_CODE (c))
10271 /* These clauses are allowed on task, move them there. */
10272 case OMP_CLAUSE_SHARED:
10273 case OMP_CLAUSE_FIRSTPRIVATE:
10274 case OMP_CLAUSE_DEFAULT:
10275 case OMP_CLAUSE_IF:
10276 case OMP_CLAUSE_UNTIED:
10277 case OMP_CLAUSE_FINAL:
10278 case OMP_CLAUSE_MERGEABLE:
10279 case OMP_CLAUSE_PRIORITY:
10280 *gtask_clauses_ptr = c;
10281 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10282 break;
10283 case OMP_CLAUSE_PRIVATE:
10284 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10286 /* We want private on outer for and firstprivate
10287 on task. */
10288 *gtask_clauses_ptr
10289 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10290 OMP_CLAUSE_FIRSTPRIVATE);
10291 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10292 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10293 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10294 *gforo_clauses_ptr = c;
10295 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10297 else
10299 *gtask_clauses_ptr = c;
10300 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10302 break;
10303 /* These clauses go into outer taskloop clauses. */
10304 case OMP_CLAUSE_GRAINSIZE:
10305 case OMP_CLAUSE_NUM_TASKS:
10306 case OMP_CLAUSE_NOGROUP:
10307 *gforo_clauses_ptr = c;
10308 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10309 break;
10310 /* Taskloop clause we duplicate on both taskloops. */
10311 case OMP_CLAUSE_COLLAPSE:
10312 *gfor_clauses_ptr = c;
10313 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10314 *gforo_clauses_ptr = copy_node (c);
10315 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10316 break;
10317 /* For lastprivate, keep the clause on inner taskloop, and add
10318 a shared clause on task. If the same decl is also firstprivate,
10319 add also firstprivate clause on the inner taskloop. */
10320 case OMP_CLAUSE_LASTPRIVATE:
10321 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10323 /* For taskloop C++ lastprivate IVs, we want:
10324 1) private on outer taskloop
10325 2) firstprivate and shared on task
10326 3) lastprivate on inner taskloop */
10327 *gtask_clauses_ptr
10328 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10329 OMP_CLAUSE_FIRSTPRIVATE);
10330 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10331 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10332 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10333 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10334 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10335 OMP_CLAUSE_PRIVATE);
10336 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10337 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10338 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10339 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10341 *gfor_clauses_ptr = c;
10342 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10343 *gtask_clauses_ptr
10344 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10345 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10346 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10347 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10348 gtask_clauses_ptr
10349 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10350 break;
10351 default:
10352 gcc_unreachable ();
10354 *gfor_clauses_ptr = NULL_TREE;
10355 *gtask_clauses_ptr = NULL_TREE;
10356 *gforo_clauses_ptr = NULL_TREE;
10357 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10358 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10359 NULL_TREE, NULL_TREE, NULL_TREE);
10360 gimple_omp_task_set_taskloop_p (g, true);
10361 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10362 gomp_for *gforo
10363 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10364 gimple_omp_for_collapse (gfor),
10365 gimple_omp_for_pre_body (gfor));
10366 gimple_omp_for_set_pre_body (gfor, NULL);
10367 gimple_omp_for_set_combined_p (gforo, true);
10368 gimple_omp_for_set_combined_into_p (gfor, true);
10369 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10371 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10372 tree v = create_tmp_var (type);
10373 gimple_omp_for_set_index (gforo, i, v);
10374 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10375 gimple_omp_for_set_initial (gforo, i, t);
10376 gimple_omp_for_set_cond (gforo, i,
10377 gimple_omp_for_cond (gfor, i));
10378 t = unshare_expr (gimple_omp_for_final (gfor, i));
10379 gimple_omp_for_set_final (gforo, i, t);
10380 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10381 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10382 TREE_OPERAND (t, 0) = v;
10383 gimple_omp_for_set_incr (gforo, i, t);
10384 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10385 OMP_CLAUSE_DECL (t) = v;
10386 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10387 gimple_omp_for_set_clauses (gforo, t);
10389 gimplify_seq_add_stmt (pre_p, gforo);
10391 else
10392 gimplify_seq_add_stmt (pre_p, gfor);
10393 if (ret != GS_ALL_DONE)
10394 return GS_ERROR;
10395 *expr_p = NULL_TREE;
10396 return GS_ALL_DONE;
10399 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10400 of OMP_TARGET's body. */
10402 static tree
10403 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10405 *walk_subtrees = 0;
10406 switch (TREE_CODE (*tp))
10408 case OMP_TEAMS:
10409 return *tp;
10410 case BIND_EXPR:
10411 case STATEMENT_LIST:
10412 *walk_subtrees = 1;
10413 break;
10414 default:
10415 break;
10417 return NULL_TREE;
10420 /* Helper function of optimize_target_teams, determine if the expression
10421 can be computed safely before the target construct on the host. */
10423 static tree
10424 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10426 splay_tree_node n;
10428 if (TYPE_P (*tp))
10430 *walk_subtrees = 0;
10431 return NULL_TREE;
10433 switch (TREE_CODE (*tp))
10435 case VAR_DECL:
10436 case PARM_DECL:
10437 case RESULT_DECL:
10438 *walk_subtrees = 0;
10439 if (error_operand_p (*tp)
10440 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10441 || DECL_HAS_VALUE_EXPR_P (*tp)
10442 || DECL_THREAD_LOCAL_P (*tp)
10443 || TREE_SIDE_EFFECTS (*tp)
10444 || TREE_THIS_VOLATILE (*tp))
10445 return *tp;
10446 if (is_global_var (*tp)
10447 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10448 || lookup_attribute ("omp declare target link",
10449 DECL_ATTRIBUTES (*tp))))
10450 return *tp;
10451 if (VAR_P (*tp)
10452 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10453 && !is_global_var (*tp)
10454 && decl_function_context (*tp) == current_function_decl)
10455 return *tp;
10456 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10457 (splay_tree_key) *tp);
10458 if (n == NULL)
10460 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10461 return NULL_TREE;
10462 return *tp;
10464 else if (n->value & GOVD_LOCAL)
10465 return *tp;
10466 else if (n->value & GOVD_FIRSTPRIVATE)
10467 return NULL_TREE;
10468 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10469 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10470 return NULL_TREE;
10471 return *tp;
10472 case INTEGER_CST:
10473 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10474 return *tp;
10475 return NULL_TREE;
10476 case TARGET_EXPR:
10477 if (TARGET_EXPR_INITIAL (*tp)
10478 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10479 return *tp;
10480 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10481 walk_subtrees, NULL);
10482 /* Allow some reasonable subset of integral arithmetics. */
10483 case PLUS_EXPR:
10484 case MINUS_EXPR:
10485 case MULT_EXPR:
10486 case TRUNC_DIV_EXPR:
10487 case CEIL_DIV_EXPR:
10488 case FLOOR_DIV_EXPR:
10489 case ROUND_DIV_EXPR:
10490 case TRUNC_MOD_EXPR:
10491 case CEIL_MOD_EXPR:
10492 case FLOOR_MOD_EXPR:
10493 case ROUND_MOD_EXPR:
10494 case RDIV_EXPR:
10495 case EXACT_DIV_EXPR:
10496 case MIN_EXPR:
10497 case MAX_EXPR:
10498 case LSHIFT_EXPR:
10499 case RSHIFT_EXPR:
10500 case BIT_IOR_EXPR:
10501 case BIT_XOR_EXPR:
10502 case BIT_AND_EXPR:
10503 case NEGATE_EXPR:
10504 case ABS_EXPR:
10505 case BIT_NOT_EXPR:
10506 case NON_LVALUE_EXPR:
10507 CASE_CONVERT:
10508 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10509 return *tp;
10510 return NULL_TREE;
10511 /* And disallow anything else, except for comparisons. */
10512 default:
10513 if (COMPARISON_CLASS_P (*tp))
10514 return NULL_TREE;
10515 return *tp;
10519 /* Try to determine if the num_teams and/or thread_limit expressions
10520 can have their values determined already before entering the
10521 target construct.
10522 INTEGER_CSTs trivially are,
10523 integral decls that are firstprivate (explicitly or implicitly)
10524 or explicitly map(always, to:) or map(always, tofrom:) on the target
10525 region too, and expressions involving simple arithmetics on those
10526 too, function calls are not ok, dereferencing something neither etc.
10527 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10528 EXPR based on what we find:
10529 0 stands for clause not specified at all, use implementation default
10530 -1 stands for value that can't be determined easily before entering
10531 the target construct.
10532 If teams construct is not present at all, use 1 for num_teams
10533 and 0 for thread_limit (only one team is involved, and the thread
10534 limit is implementation defined. */
10536 static void
10537 optimize_target_teams (tree target, gimple_seq *pre_p)
10539 tree body = OMP_BODY (target);
10540 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10541 tree num_teams = integer_zero_node;
10542 tree thread_limit = integer_zero_node;
10543 location_t num_teams_loc = EXPR_LOCATION (target);
10544 location_t thread_limit_loc = EXPR_LOCATION (target);
10545 tree c, *p, expr;
10546 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10548 if (teams == NULL_TREE)
10549 num_teams = integer_one_node;
10550 else
10551 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10553 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10555 p = &num_teams;
10556 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10558 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10560 p = &thread_limit;
10561 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10563 else
10564 continue;
10565 expr = OMP_CLAUSE_OPERAND (c, 0);
10566 if (TREE_CODE (expr) == INTEGER_CST)
10568 *p = expr;
10569 continue;
10571 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10573 *p = integer_minus_one_node;
10574 continue;
10576 *p = expr;
10577 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10578 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10579 == GS_ERROR)
10581 gimplify_omp_ctxp = target_ctx;
10582 *p = integer_minus_one_node;
10583 continue;
10585 gimplify_omp_ctxp = target_ctx;
10586 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10587 OMP_CLAUSE_OPERAND (c, 0) = *p;
10589 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10590 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10591 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10592 OMP_TARGET_CLAUSES (target) = c;
10593 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10594 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10595 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10596 OMP_TARGET_CLAUSES (target) = c;
10599 /* Gimplify the gross structure of several OMP constructs. */
10601 static void
10602 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10604 tree expr = *expr_p;
10605 gimple *stmt;
10606 gimple_seq body = NULL;
10607 enum omp_region_type ort;
10609 switch (TREE_CODE (expr))
10611 case OMP_SECTIONS:
10612 case OMP_SINGLE:
10613 ort = ORT_WORKSHARE;
10614 break;
10615 case OMP_TARGET:
10616 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10617 break;
10618 case OACC_KERNELS:
10619 ort = ORT_ACC_KERNELS;
10620 break;
10621 case OACC_PARALLEL:
10622 ort = ORT_ACC_PARALLEL;
10623 break;
10624 case OACC_DATA:
10625 ort = ORT_ACC_DATA;
10626 break;
10627 case OMP_TARGET_DATA:
10628 ort = ORT_TARGET_DATA;
10629 break;
10630 case OMP_TEAMS:
10631 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10632 break;
10633 case OACC_HOST_DATA:
10634 ort = ORT_ACC_HOST_DATA;
10635 break;
10636 default:
10637 gcc_unreachable ();
10639 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10640 TREE_CODE (expr));
10641 if (TREE_CODE (expr) == OMP_TARGET)
10642 optimize_target_teams (expr, pre_p);
10643 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10645 push_gimplify_context ();
10646 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10647 if (gimple_code (g) == GIMPLE_BIND)
10648 pop_gimplify_context (g);
10649 else
10650 pop_gimplify_context (NULL);
10651 if ((ort & ORT_TARGET_DATA) != 0)
10653 enum built_in_function end_ix;
10654 switch (TREE_CODE (expr))
10656 case OACC_DATA:
10657 case OACC_HOST_DATA:
10658 end_ix = BUILT_IN_GOACC_DATA_END;
10659 break;
10660 case OMP_TARGET_DATA:
10661 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10662 break;
10663 default:
10664 gcc_unreachable ();
10666 tree fn = builtin_decl_explicit (end_ix);
10667 g = gimple_build_call (fn, 0);
10668 gimple_seq cleanup = NULL;
10669 gimple_seq_add_stmt (&cleanup, g);
10670 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10671 body = NULL;
10672 gimple_seq_add_stmt (&body, g);
10675 else
10676 gimplify_and_add (OMP_BODY (expr), &body);
10677 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10678 TREE_CODE (expr));
10680 switch (TREE_CODE (expr))
10682 case OACC_DATA:
10683 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10684 OMP_CLAUSES (expr));
10685 break;
10686 case OACC_KERNELS:
10687 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10688 OMP_CLAUSES (expr));
10689 break;
10690 case OACC_HOST_DATA:
10691 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10692 OMP_CLAUSES (expr));
10693 break;
10694 case OACC_PARALLEL:
10695 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10696 OMP_CLAUSES (expr));
10697 break;
10698 case OMP_SECTIONS:
10699 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10700 break;
10701 case OMP_SINGLE:
10702 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10703 break;
10704 case OMP_TARGET:
10705 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10706 OMP_CLAUSES (expr));
10707 break;
10708 case OMP_TARGET_DATA:
10709 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10710 OMP_CLAUSES (expr));
10711 break;
10712 case OMP_TEAMS:
10713 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10714 break;
10715 default:
10716 gcc_unreachable ();
10719 gimplify_seq_add_stmt (pre_p, stmt);
10720 *expr_p = NULL_TREE;
10723 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10724 target update constructs. */
10726 static void
10727 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10729 tree expr = *expr_p;
10730 int kind;
10731 gomp_target *stmt;
10732 enum omp_region_type ort = ORT_WORKSHARE;
10734 switch (TREE_CODE (expr))
10736 case OACC_ENTER_DATA:
10737 case OACC_EXIT_DATA:
10738 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10739 ort = ORT_ACC;
10740 break;
10741 case OACC_UPDATE:
10742 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10743 ort = ORT_ACC;
10744 break;
10745 case OMP_TARGET_UPDATE:
10746 kind = GF_OMP_TARGET_KIND_UPDATE;
10747 break;
10748 case OMP_TARGET_ENTER_DATA:
10749 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10750 break;
10751 case OMP_TARGET_EXIT_DATA:
10752 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10753 break;
10754 default:
10755 gcc_unreachable ();
10757 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10758 ort, TREE_CODE (expr));
10759 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10760 TREE_CODE (expr));
10761 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10763 gimplify_seq_add_stmt (pre_p, stmt);
10764 *expr_p = NULL_TREE;
10767 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10768 stabilized the lhs of the atomic operation as *ADDR. Return true if
10769 EXPR is this stabilized form. */
10771 static bool
10772 goa_lhs_expr_p (tree expr, tree addr)
10774 /* Also include casts to other type variants. The C front end is fond
10775 of adding these for e.g. volatile variables. This is like
10776 STRIP_TYPE_NOPS but includes the main variant lookup. */
10777 STRIP_USELESS_TYPE_CONVERSION (expr);
10779 if (TREE_CODE (expr) == INDIRECT_REF)
10781 expr = TREE_OPERAND (expr, 0);
10782 while (expr != addr
10783 && (CONVERT_EXPR_P (expr)
10784 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10785 && TREE_CODE (expr) == TREE_CODE (addr)
10786 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10788 expr = TREE_OPERAND (expr, 0);
10789 addr = TREE_OPERAND (addr, 0);
10791 if (expr == addr)
10792 return true;
10793 return (TREE_CODE (addr) == ADDR_EXPR
10794 && TREE_CODE (expr) == ADDR_EXPR
10795 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10797 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10798 return true;
10799 return false;
10802 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10803 expression does not involve the lhs, evaluate it into a temporary.
10804 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10805 or -1 if an error was encountered. */
10807 static int
10808 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10809 tree lhs_var)
10811 tree expr = *expr_p;
10812 int saw_lhs;
10814 if (goa_lhs_expr_p (expr, lhs_addr))
10816 *expr_p = lhs_var;
10817 return 1;
10819 if (is_gimple_val (expr))
10820 return 0;
10822 saw_lhs = 0;
10823 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10825 case tcc_binary:
10826 case tcc_comparison:
10827 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10828 lhs_var);
10829 /* FALLTHRU */
10830 case tcc_unary:
10831 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10832 lhs_var);
10833 break;
10834 case tcc_expression:
10835 switch (TREE_CODE (expr))
10837 case TRUTH_ANDIF_EXPR:
10838 case TRUTH_ORIF_EXPR:
10839 case TRUTH_AND_EXPR:
10840 case TRUTH_OR_EXPR:
10841 case TRUTH_XOR_EXPR:
10842 case BIT_INSERT_EXPR:
10843 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10844 lhs_addr, lhs_var);
10845 /* FALLTHRU */
10846 case TRUTH_NOT_EXPR:
10847 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10848 lhs_addr, lhs_var);
10849 break;
10850 case COMPOUND_EXPR:
10851 /* Break out any preevaluations from cp_build_modify_expr. */
10852 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10853 expr = TREE_OPERAND (expr, 1))
10854 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10855 *expr_p = expr;
10856 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10857 default:
10858 break;
10860 break;
10861 case tcc_reference:
10862 if (TREE_CODE (expr) == BIT_FIELD_REF)
10863 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10864 lhs_addr, lhs_var);
10865 break;
10866 default:
10867 break;
10870 if (saw_lhs == 0)
10872 enum gimplify_status gs;
10873 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10874 if (gs != GS_ALL_DONE)
10875 saw_lhs = -1;
10878 return saw_lhs;
10881 /* Gimplify an OMP_ATOMIC statement. */
10883 static enum gimplify_status
10884 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10886 tree addr = TREE_OPERAND (*expr_p, 0);
10887 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10888 ? NULL : TREE_OPERAND (*expr_p, 1);
10889 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10890 tree tmp_load;
10891 gomp_atomic_load *loadstmt;
10892 gomp_atomic_store *storestmt;
10894 tmp_load = create_tmp_reg (type);
10895 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10896 return GS_ERROR;
10898 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10899 != GS_ALL_DONE)
10900 return GS_ERROR;
10902 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10903 gimplify_seq_add_stmt (pre_p, loadstmt);
10904 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10905 != GS_ALL_DONE)
10906 return GS_ERROR;
10908 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10909 rhs = tmp_load;
10910 storestmt = gimple_build_omp_atomic_store (rhs);
10911 gimplify_seq_add_stmt (pre_p, storestmt);
10912 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10914 gimple_omp_atomic_set_seq_cst (loadstmt);
10915 gimple_omp_atomic_set_seq_cst (storestmt);
10917 switch (TREE_CODE (*expr_p))
10919 case OMP_ATOMIC_READ:
10920 case OMP_ATOMIC_CAPTURE_OLD:
10921 *expr_p = tmp_load;
10922 gimple_omp_atomic_set_need_value (loadstmt);
10923 break;
10924 case OMP_ATOMIC_CAPTURE_NEW:
10925 *expr_p = rhs;
10926 gimple_omp_atomic_set_need_value (storestmt);
10927 break;
10928 default:
10929 *expr_p = NULL;
10930 break;
10933 return GS_ALL_DONE;
10936 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10937 body, and adding some EH bits. */
10939 static enum gimplify_status
10940 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10942 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10943 gimple *body_stmt;
10944 gtransaction *trans_stmt;
10945 gimple_seq body = NULL;
10946 int subcode = 0;
10948 /* Wrap the transaction body in a BIND_EXPR so we have a context
10949 where to put decls for OMP. */
10950 if (TREE_CODE (tbody) != BIND_EXPR)
10952 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10953 TREE_SIDE_EFFECTS (bind) = 1;
10954 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10955 TRANSACTION_EXPR_BODY (expr) = bind;
10958 push_gimplify_context ();
10959 temp = voidify_wrapper_expr (*expr_p, NULL);
10961 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10962 pop_gimplify_context (body_stmt);
10964 trans_stmt = gimple_build_transaction (body);
10965 if (TRANSACTION_EXPR_OUTER (expr))
10966 subcode = GTMA_IS_OUTER;
10967 else if (TRANSACTION_EXPR_RELAXED (expr))
10968 subcode = GTMA_IS_RELAXED;
10969 gimple_transaction_set_subcode (trans_stmt, subcode);
10971 gimplify_seq_add_stmt (pre_p, trans_stmt);
10973 if (temp)
10975 *expr_p = temp;
10976 return GS_OK;
10979 *expr_p = NULL_TREE;
10980 return GS_ALL_DONE;
10983 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10984 is the OMP_BODY of the original EXPR (which has already been
10985 gimplified so it's not present in the EXPR).
10987 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10989 static gimple *
10990 gimplify_omp_ordered (tree expr, gimple_seq body)
10992 tree c, decls;
10993 int failures = 0;
10994 unsigned int i;
10995 tree source_c = NULL_TREE;
10996 tree sink_c = NULL_TREE;
10998 if (gimplify_omp_ctxp)
11000 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11001 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11002 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
11003 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
11004 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
11006 error_at (OMP_CLAUSE_LOCATION (c),
11007 "%<ordered%> construct with %<depend%> clause must be "
11008 "closely nested inside a loop with %<ordered%> clause "
11009 "with a parameter");
11010 failures++;
11012 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11013 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11015 bool fail = false;
11016 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11017 decls && TREE_CODE (decls) == TREE_LIST;
11018 decls = TREE_CHAIN (decls), ++i)
11019 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11020 continue;
11021 else if (TREE_VALUE (decls)
11022 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11024 error_at (OMP_CLAUSE_LOCATION (c),
11025 "variable %qE is not an iteration "
11026 "of outermost loop %d, expected %qE",
11027 TREE_VALUE (decls), i + 1,
11028 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11029 fail = true;
11030 failures++;
11032 else
11033 TREE_VALUE (decls)
11034 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11035 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11037 error_at (OMP_CLAUSE_LOCATION (c),
11038 "number of variables in %<depend(sink)%> "
11039 "clause does not match number of "
11040 "iteration variables");
11041 failures++;
11043 sink_c = c;
11045 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11046 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11048 if (source_c)
11050 error_at (OMP_CLAUSE_LOCATION (c),
11051 "more than one %<depend(source)%> clause on an "
11052 "%<ordered%> construct");
11053 failures++;
11055 else
11056 source_c = c;
11059 if (source_c && sink_c)
11061 error_at (OMP_CLAUSE_LOCATION (source_c),
11062 "%<depend(source)%> clause specified together with "
11063 "%<depend(sink:)%> clauses on the same construct");
11064 failures++;
11067 if (failures)
11068 return gimple_build_nop ();
11069 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11072 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11073 expression produces a value to be used as an operand inside a GIMPLE
11074 statement, the value will be stored back in *EXPR_P. This value will
11075 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11076 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11077 emitted in PRE_P and POST_P.
11079 Additionally, this process may overwrite parts of the input
11080 expression during gimplification. Ideally, it should be
11081 possible to do non-destructive gimplification.
11083 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11084 the expression needs to evaluate to a value to be used as
11085 an operand in a GIMPLE statement, this value will be stored in
11086 *EXPR_P on exit. This happens when the caller specifies one
11087 of fb_lvalue or fb_rvalue fallback flags.
11089 PRE_P will contain the sequence of GIMPLE statements corresponding
11090 to the evaluation of EXPR and all the side-effects that must
11091 be executed before the main expression. On exit, the last
11092 statement of PRE_P is the core statement being gimplified. For
11093 instance, when gimplifying 'if (++a)' the last statement in
11094 PRE_P will be 'if (t.1)' where t.1 is the result of
11095 pre-incrementing 'a'.
11097 POST_P will contain the sequence of GIMPLE statements corresponding
11098 to the evaluation of all the side-effects that must be executed
11099 after the main expression. If this is NULL, the post
11100 side-effects are stored at the end of PRE_P.
11102 The reason why the output is split in two is to handle post
11103 side-effects explicitly. In some cases, an expression may have
11104 inner and outer post side-effects which need to be emitted in
11105 an order different from the one given by the recursive
11106 traversal. For instance, for the expression (*p--)++ the post
11107 side-effects of '--' must actually occur *after* the post
11108 side-effects of '++'. However, gimplification will first visit
11109 the inner expression, so if a separate POST sequence was not
11110 used, the resulting sequence would be:
11112 1 t.1 = *p
11113 2 p = p - 1
11114 3 t.2 = t.1 + 1
11115 4 *p = t.2
11117 However, the post-decrement operation in line #2 must not be
11118 evaluated until after the store to *p at line #4, so the
11119 correct sequence should be:
11121 1 t.1 = *p
11122 2 t.2 = t.1 + 1
11123 3 *p = t.2
11124 4 p = p - 1
11126 So, by specifying a separate post queue, it is possible
11127 to emit the post side-effects in the correct order.
11128 If POST_P is NULL, an internal queue will be used. Before
11129 returning to the caller, the sequence POST_P is appended to
11130 the main output sequence PRE_P.
11132 GIMPLE_TEST_F points to a function that takes a tree T and
11133 returns nonzero if T is in the GIMPLE form requested by the
11134 caller. The GIMPLE predicates are in gimple.c.
11136 FALLBACK tells the function what sort of a temporary we want if
11137 gimplification cannot produce an expression that complies with
11138 GIMPLE_TEST_F.
11140 fb_none means that no temporary should be generated
11141 fb_rvalue means that an rvalue is OK to generate
11142 fb_lvalue means that an lvalue is OK to generate
11143 fb_either means that either is OK, but an lvalue is preferable.
11144 fb_mayfail means that gimplification may fail (in which case
11145 GS_ERROR will be returned)
11147 The return value is either GS_ERROR or GS_ALL_DONE, since this
11148 function iterates until EXPR is completely gimplified or an error
11149 occurs. */
11151 enum gimplify_status
11152 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11153 bool (*gimple_test_f) (tree), fallback_t fallback)
11155 tree tmp;
11156 gimple_seq internal_pre = NULL;
11157 gimple_seq internal_post = NULL;
11158 tree save_expr;
11159 bool is_statement;
11160 location_t saved_location;
11161 enum gimplify_status ret;
11162 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11163 tree label;
11165 save_expr = *expr_p;
11166 if (save_expr == NULL_TREE)
11167 return GS_ALL_DONE;
11169 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11170 is_statement = gimple_test_f == is_gimple_stmt;
11171 if (is_statement)
11172 gcc_assert (pre_p);
11174 /* Consistency checks. */
11175 if (gimple_test_f == is_gimple_reg)
11176 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11177 else if (gimple_test_f == is_gimple_val
11178 || gimple_test_f == is_gimple_call_addr
11179 || gimple_test_f == is_gimple_condexpr
11180 || gimple_test_f == is_gimple_mem_rhs
11181 || gimple_test_f == is_gimple_mem_rhs_or_call
11182 || gimple_test_f == is_gimple_reg_rhs
11183 || gimple_test_f == is_gimple_reg_rhs_or_call
11184 || gimple_test_f == is_gimple_asm_val
11185 || gimple_test_f == is_gimple_mem_ref_addr)
11186 gcc_assert (fallback & fb_rvalue);
11187 else if (gimple_test_f == is_gimple_min_lval
11188 || gimple_test_f == is_gimple_lvalue)
11189 gcc_assert (fallback & fb_lvalue);
11190 else if (gimple_test_f == is_gimple_addressable)
11191 gcc_assert (fallback & fb_either);
11192 else if (gimple_test_f == is_gimple_stmt)
11193 gcc_assert (fallback == fb_none);
11194 else
11196 /* We should have recognized the GIMPLE_TEST_F predicate to
11197 know what kind of fallback to use in case a temporary is
11198 needed to hold the value or address of *EXPR_P. */
11199 gcc_unreachable ();
11202 /* We used to check the predicate here and return immediately if it
11203 succeeds. This is wrong; the design is for gimplification to be
11204 idempotent, and for the predicates to only test for valid forms, not
11205 whether they are fully simplified. */
11206 if (pre_p == NULL)
11207 pre_p = &internal_pre;
11209 if (post_p == NULL)
11210 post_p = &internal_post;
11212 /* Remember the last statements added to PRE_P and POST_P. Every
11213 new statement added by the gimplification helpers needs to be
11214 annotated with location information. To centralize the
11215 responsibility, we remember the last statement that had been
11216 added to both queues before gimplifying *EXPR_P. If
11217 gimplification produces new statements in PRE_P and POST_P, those
11218 statements will be annotated with the same location information
11219 as *EXPR_P. */
11220 pre_last_gsi = gsi_last (*pre_p);
11221 post_last_gsi = gsi_last (*post_p);
11223 saved_location = input_location;
11224 if (save_expr != error_mark_node
11225 && EXPR_HAS_LOCATION (*expr_p))
11226 input_location = EXPR_LOCATION (*expr_p);
11228 /* Loop over the specific gimplifiers until the toplevel node
11229 remains the same. */
11232 /* Strip away as many useless type conversions as possible
11233 at the toplevel. */
11234 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11236 /* Remember the expr. */
11237 save_expr = *expr_p;
11239 /* Die, die, die, my darling. */
11240 if (save_expr == error_mark_node
11241 || (TREE_TYPE (save_expr)
11242 && TREE_TYPE (save_expr) == error_mark_node))
11244 ret = GS_ERROR;
11245 break;
11248 /* Do any language-specific gimplification. */
11249 ret = ((enum gimplify_status)
11250 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11251 if (ret == GS_OK)
11253 if (*expr_p == NULL_TREE)
11254 break;
11255 if (*expr_p != save_expr)
11256 continue;
11258 else if (ret != GS_UNHANDLED)
11259 break;
11261 /* Make sure that all the cases set 'ret' appropriately. */
11262 ret = GS_UNHANDLED;
11263 switch (TREE_CODE (*expr_p))
11265 /* First deal with the special cases. */
11267 case POSTINCREMENT_EXPR:
11268 case POSTDECREMENT_EXPR:
11269 case PREINCREMENT_EXPR:
11270 case PREDECREMENT_EXPR:
11271 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11272 fallback != fb_none,
11273 TREE_TYPE (*expr_p));
11274 break;
11276 case VIEW_CONVERT_EXPR:
11277 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11278 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11280 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11281 post_p, is_gimple_val, fb_rvalue);
11282 recalculate_side_effects (*expr_p);
11283 break;
11285 /* Fallthru. */
11287 case ARRAY_REF:
11288 case ARRAY_RANGE_REF:
11289 case REALPART_EXPR:
11290 case IMAGPART_EXPR:
11291 case COMPONENT_REF:
11292 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11293 fallback ? fallback : fb_rvalue);
11294 break;
11296 case COND_EXPR:
11297 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11299 /* C99 code may assign to an array in a structure value of a
11300 conditional expression, and this has undefined behavior
11301 only on execution, so create a temporary if an lvalue is
11302 required. */
11303 if (fallback == fb_lvalue)
11305 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11306 mark_addressable (*expr_p);
11307 ret = GS_OK;
11309 break;
11311 case CALL_EXPR:
11312 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11314 /* C99 code may assign to an array in a structure returned
11315 from a function, and this has undefined behavior only on
11316 execution, so create a temporary if an lvalue is
11317 required. */
11318 if (fallback == fb_lvalue)
11320 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11321 mark_addressable (*expr_p);
11322 ret = GS_OK;
11324 break;
11326 case TREE_LIST:
11327 gcc_unreachable ();
11329 case COMPOUND_EXPR:
11330 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11331 break;
11333 case COMPOUND_LITERAL_EXPR:
11334 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11335 gimple_test_f, fallback);
11336 break;
11338 case MODIFY_EXPR:
11339 case INIT_EXPR:
11340 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11341 fallback != fb_none);
11342 break;
11344 case TRUTH_ANDIF_EXPR:
11345 case TRUTH_ORIF_EXPR:
11347 /* Preserve the original type of the expression and the
11348 source location of the outer expression. */
11349 tree org_type = TREE_TYPE (*expr_p);
11350 *expr_p = gimple_boolify (*expr_p);
11351 *expr_p = build3_loc (input_location, COND_EXPR,
11352 org_type, *expr_p,
11353 fold_convert_loc
11354 (input_location,
11355 org_type, boolean_true_node),
11356 fold_convert_loc
11357 (input_location,
11358 org_type, boolean_false_node));
11359 ret = GS_OK;
11360 break;
11363 case TRUTH_NOT_EXPR:
11365 tree type = TREE_TYPE (*expr_p);
11366 /* The parsers are careful to generate TRUTH_NOT_EXPR
11367 only with operands that are always zero or one.
11368 We do not fold here but handle the only interesting case
11369 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11370 *expr_p = gimple_boolify (*expr_p);
11371 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11372 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11373 TREE_TYPE (*expr_p),
11374 TREE_OPERAND (*expr_p, 0));
11375 else
11376 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11377 TREE_TYPE (*expr_p),
11378 TREE_OPERAND (*expr_p, 0),
11379 build_int_cst (TREE_TYPE (*expr_p), 1));
11380 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11381 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11382 ret = GS_OK;
11383 break;
11386 case ADDR_EXPR:
11387 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11388 break;
11390 case ANNOTATE_EXPR:
11392 tree cond = TREE_OPERAND (*expr_p, 0);
11393 tree kind = TREE_OPERAND (*expr_p, 1);
11394 tree data = TREE_OPERAND (*expr_p, 2);
11395 tree type = TREE_TYPE (cond);
11396 if (!INTEGRAL_TYPE_P (type))
11398 *expr_p = cond;
11399 ret = GS_OK;
11400 break;
11402 tree tmp = create_tmp_var (type);
11403 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11404 gcall *call
11405 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
11406 gimple_call_set_lhs (call, tmp);
11407 gimplify_seq_add_stmt (pre_p, call);
11408 *expr_p = tmp;
11409 ret = GS_ALL_DONE;
11410 break;
11413 case VA_ARG_EXPR:
11414 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11415 break;
11417 CASE_CONVERT:
11418 if (IS_EMPTY_STMT (*expr_p))
11420 ret = GS_ALL_DONE;
11421 break;
11424 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11425 || fallback == fb_none)
11427 /* Just strip a conversion to void (or in void context) and
11428 try again. */
11429 *expr_p = TREE_OPERAND (*expr_p, 0);
11430 ret = GS_OK;
11431 break;
11434 ret = gimplify_conversion (expr_p);
11435 if (ret == GS_ERROR)
11436 break;
11437 if (*expr_p != save_expr)
11438 break;
11439 /* FALLTHRU */
11441 case FIX_TRUNC_EXPR:
11442 /* unary_expr: ... | '(' cast ')' val | ... */
11443 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11444 is_gimple_val, fb_rvalue);
11445 recalculate_side_effects (*expr_p);
11446 break;
11448 case INDIRECT_REF:
11450 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11451 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11452 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11454 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11455 if (*expr_p != save_expr)
11457 ret = GS_OK;
11458 break;
11461 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11462 is_gimple_reg, fb_rvalue);
11463 if (ret == GS_ERROR)
11464 break;
11466 recalculate_side_effects (*expr_p);
11467 *expr_p = fold_build2_loc (input_location, MEM_REF,
11468 TREE_TYPE (*expr_p),
11469 TREE_OPERAND (*expr_p, 0),
11470 build_int_cst (saved_ptr_type, 0));
11471 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11472 TREE_THIS_NOTRAP (*expr_p) = notrap;
11473 ret = GS_OK;
11474 break;
11477 /* We arrive here through the various re-gimplifcation paths. */
11478 case MEM_REF:
11479 /* First try re-folding the whole thing. */
11480 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11481 TREE_OPERAND (*expr_p, 0),
11482 TREE_OPERAND (*expr_p, 1));
11483 if (tmp)
11485 REF_REVERSE_STORAGE_ORDER (tmp)
11486 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11487 *expr_p = tmp;
11488 recalculate_side_effects (*expr_p);
11489 ret = GS_OK;
11490 break;
11492 /* Avoid re-gimplifying the address operand if it is already
11493 in suitable form. Re-gimplifying would mark the address
11494 operand addressable. Always gimplify when not in SSA form
11495 as we still may have to gimplify decls with value-exprs. */
11496 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11497 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11499 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11500 is_gimple_mem_ref_addr, fb_rvalue);
11501 if (ret == GS_ERROR)
11502 break;
11504 recalculate_side_effects (*expr_p);
11505 ret = GS_ALL_DONE;
11506 break;
11508 /* Constants need not be gimplified. */
11509 case INTEGER_CST:
11510 case REAL_CST:
11511 case FIXED_CST:
11512 case STRING_CST:
11513 case COMPLEX_CST:
11514 case VECTOR_CST:
11515 /* Drop the overflow flag on constants, we do not want
11516 that in the GIMPLE IL. */
11517 if (TREE_OVERFLOW_P (*expr_p))
11518 *expr_p = drop_tree_overflow (*expr_p);
11519 ret = GS_ALL_DONE;
11520 break;
11522 case CONST_DECL:
11523 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11524 CONST_DECL node. Otherwise the decl is replaceable by its
11525 value. */
11526 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11527 if (fallback & fb_lvalue)
11528 ret = GS_ALL_DONE;
11529 else
11531 *expr_p = DECL_INITIAL (*expr_p);
11532 ret = GS_OK;
11534 break;
11536 case DECL_EXPR:
11537 ret = gimplify_decl_expr (expr_p, pre_p);
11538 break;
11540 case BIND_EXPR:
11541 ret = gimplify_bind_expr (expr_p, pre_p);
11542 break;
11544 case LOOP_EXPR:
11545 ret = gimplify_loop_expr (expr_p, pre_p);
11546 break;
11548 case SWITCH_EXPR:
11549 ret = gimplify_switch_expr (expr_p, pre_p);
11550 break;
11552 case EXIT_EXPR:
11553 ret = gimplify_exit_expr (expr_p);
11554 break;
11556 case GOTO_EXPR:
11557 /* If the target is not LABEL, then it is a computed jump
11558 and the target needs to be gimplified. */
11559 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11561 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11562 NULL, is_gimple_val, fb_rvalue);
11563 if (ret == GS_ERROR)
11564 break;
11566 gimplify_seq_add_stmt (pre_p,
11567 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11568 ret = GS_ALL_DONE;
11569 break;
11571 case PREDICT_EXPR:
11572 gimplify_seq_add_stmt (pre_p,
11573 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11574 PREDICT_EXPR_OUTCOME (*expr_p)));
11575 ret = GS_ALL_DONE;
11576 break;
11578 case LABEL_EXPR:
11579 ret = gimplify_label_expr (expr_p, pre_p);
11580 label = LABEL_EXPR_LABEL (*expr_p);
11581 gcc_assert (decl_function_context (label) == current_function_decl);
11583 /* If the label is used in a goto statement, or address of the label
11584 is taken, we need to unpoison all variables that were seen so far.
11585 Doing so would prevent us from reporting a false positives. */
11586 if (asan_poisoned_variables
11587 && asan_used_labels != NULL
11588 && asan_used_labels->contains (label))
11589 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11590 break;
11592 case CASE_LABEL_EXPR:
11593 ret = gimplify_case_label_expr (expr_p, pre_p);
11595 if (gimplify_ctxp->live_switch_vars)
11596 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11597 pre_p);
11598 break;
11600 case RETURN_EXPR:
11601 ret = gimplify_return_expr (*expr_p, pre_p);
11602 break;
11604 case CONSTRUCTOR:
11605 /* Don't reduce this in place; let gimplify_init_constructor work its
11606 magic. Buf if we're just elaborating this for side effects, just
11607 gimplify any element that has side-effects. */
11608 if (fallback == fb_none)
11610 unsigned HOST_WIDE_INT ix;
11611 tree val;
11612 tree temp = NULL_TREE;
11613 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11614 if (TREE_SIDE_EFFECTS (val))
11615 append_to_statement_list (val, &temp);
11617 *expr_p = temp;
11618 ret = temp ? GS_OK : GS_ALL_DONE;
11620 /* C99 code may assign to an array in a constructed
11621 structure or union, and this has undefined behavior only
11622 on execution, so create a temporary if an lvalue is
11623 required. */
11624 else if (fallback == fb_lvalue)
11626 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11627 mark_addressable (*expr_p);
11628 ret = GS_OK;
11630 else
11631 ret = GS_ALL_DONE;
11632 break;
11634 /* The following are special cases that are not handled by the
11635 original GIMPLE grammar. */
11637 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11638 eliminated. */
11639 case SAVE_EXPR:
11640 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11641 break;
11643 case BIT_FIELD_REF:
11644 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11645 post_p, is_gimple_lvalue, fb_either);
11646 recalculate_side_effects (*expr_p);
11647 break;
11649 case TARGET_MEM_REF:
11651 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11653 if (TMR_BASE (*expr_p))
11654 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11655 post_p, is_gimple_mem_ref_addr, fb_either);
11656 if (TMR_INDEX (*expr_p))
11657 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11658 post_p, is_gimple_val, fb_rvalue);
11659 if (TMR_INDEX2 (*expr_p))
11660 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11661 post_p, is_gimple_val, fb_rvalue);
11662 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11663 ret = MIN (r0, r1);
11665 break;
11667 case NON_LVALUE_EXPR:
11668 /* This should have been stripped above. */
11669 gcc_unreachable ();
11671 case ASM_EXPR:
11672 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11673 break;
11675 case TRY_FINALLY_EXPR:
11676 case TRY_CATCH_EXPR:
11678 gimple_seq eval, cleanup;
11679 gtry *try_;
11681 /* Calls to destructors are generated automatically in FINALLY/CATCH
11682 block. They should have location as UNKNOWN_LOCATION. However,
11683 gimplify_call_expr will reset these call stmts to input_location
11684 if it finds stmt's location is unknown. To prevent resetting for
11685 destructors, we set the input_location to unknown.
11686 Note that this only affects the destructor calls in FINALLY/CATCH
11687 block, and will automatically reset to its original value by the
11688 end of gimplify_expr. */
11689 input_location = UNKNOWN_LOCATION;
11690 eval = cleanup = NULL;
11691 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11692 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11693 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11694 if (gimple_seq_empty_p (cleanup))
11696 gimple_seq_add_seq (pre_p, eval);
11697 ret = GS_ALL_DONE;
11698 break;
11700 try_ = gimple_build_try (eval, cleanup,
11701 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11702 ? GIMPLE_TRY_FINALLY
11703 : GIMPLE_TRY_CATCH);
11704 if (EXPR_HAS_LOCATION (save_expr))
11705 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11706 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11707 gimple_set_location (try_, saved_location);
11708 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11709 gimple_try_set_catch_is_cleanup (try_,
11710 TRY_CATCH_IS_CLEANUP (*expr_p));
11711 gimplify_seq_add_stmt (pre_p, try_);
11712 ret = GS_ALL_DONE;
11713 break;
11716 case CLEANUP_POINT_EXPR:
11717 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11718 break;
11720 case TARGET_EXPR:
11721 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11722 break;
11724 case CATCH_EXPR:
11726 gimple *c;
11727 gimple_seq handler = NULL;
11728 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11729 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11730 gimplify_seq_add_stmt (pre_p, c);
11731 ret = GS_ALL_DONE;
11732 break;
11735 case EH_FILTER_EXPR:
11737 gimple *ehf;
11738 gimple_seq failure = NULL;
11740 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11741 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11742 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11743 gimplify_seq_add_stmt (pre_p, ehf);
11744 ret = GS_ALL_DONE;
11745 break;
11748 case OBJ_TYPE_REF:
11750 enum gimplify_status r0, r1;
11751 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11752 post_p, is_gimple_val, fb_rvalue);
11753 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11754 post_p, is_gimple_val, fb_rvalue);
11755 TREE_SIDE_EFFECTS (*expr_p) = 0;
11756 ret = MIN (r0, r1);
11758 break;
11760 case LABEL_DECL:
11761 /* We get here when taking the address of a label. We mark
11762 the label as "forced"; meaning it can never be removed and
11763 it is a potential target for any computed goto. */
11764 FORCED_LABEL (*expr_p) = 1;
11765 ret = GS_ALL_DONE;
11766 break;
11768 case STATEMENT_LIST:
11769 ret = gimplify_statement_list (expr_p, pre_p);
11770 break;
11772 case WITH_SIZE_EXPR:
11774 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11775 post_p == &internal_post ? NULL : post_p,
11776 gimple_test_f, fallback);
11777 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11778 is_gimple_val, fb_rvalue);
11779 ret = GS_ALL_DONE;
11781 break;
11783 case VAR_DECL:
11784 case PARM_DECL:
11785 ret = gimplify_var_or_parm_decl (expr_p);
11786 break;
11788 case RESULT_DECL:
11789 /* When within an OMP context, notice uses of variables. */
11790 if (gimplify_omp_ctxp)
11791 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11792 ret = GS_ALL_DONE;
11793 break;
11795 case SSA_NAME:
11796 /* Allow callbacks into the gimplifier during optimization. */
11797 ret = GS_ALL_DONE;
11798 break;
11800 case OMP_PARALLEL:
11801 gimplify_omp_parallel (expr_p, pre_p);
11802 ret = GS_ALL_DONE;
11803 break;
11805 case OMP_TASK:
11806 gimplify_omp_task (expr_p, pre_p);
11807 ret = GS_ALL_DONE;
11808 break;
11810 case OMP_FOR:
11811 case OMP_SIMD:
11812 case CILK_SIMD:
11813 case CILK_FOR:
11814 case OMP_DISTRIBUTE:
11815 case OMP_TASKLOOP:
11816 case OACC_LOOP:
11817 ret = gimplify_omp_for (expr_p, pre_p);
11818 break;
11820 case OACC_CACHE:
11821 gimplify_oacc_cache (expr_p, pre_p);
11822 ret = GS_ALL_DONE;
11823 break;
11825 case OACC_DECLARE:
11826 gimplify_oacc_declare (expr_p, pre_p);
11827 ret = GS_ALL_DONE;
11828 break;
11830 case OACC_HOST_DATA:
11831 case OACC_DATA:
11832 case OACC_KERNELS:
11833 case OACC_PARALLEL:
11834 case OMP_SECTIONS:
11835 case OMP_SINGLE:
11836 case OMP_TARGET:
11837 case OMP_TARGET_DATA:
11838 case OMP_TEAMS:
11839 gimplify_omp_workshare (expr_p, pre_p);
11840 ret = GS_ALL_DONE;
11841 break;
11843 case OACC_ENTER_DATA:
11844 case OACC_EXIT_DATA:
11845 case OACC_UPDATE:
11846 case OMP_TARGET_UPDATE:
11847 case OMP_TARGET_ENTER_DATA:
11848 case OMP_TARGET_EXIT_DATA:
11849 gimplify_omp_target_update (expr_p, pre_p);
11850 ret = GS_ALL_DONE;
11851 break;
11853 case OMP_SECTION:
11854 case OMP_MASTER:
11855 case OMP_TASKGROUP:
11856 case OMP_ORDERED:
11857 case OMP_CRITICAL:
11859 gimple_seq body = NULL;
11860 gimple *g;
11862 gimplify_and_add (OMP_BODY (*expr_p), &body);
11863 switch (TREE_CODE (*expr_p))
11865 case OMP_SECTION:
11866 g = gimple_build_omp_section (body);
11867 break;
11868 case OMP_MASTER:
11869 g = gimple_build_omp_master (body);
11870 break;
11871 case OMP_TASKGROUP:
11873 gimple_seq cleanup = NULL;
11874 tree fn
11875 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11876 g = gimple_build_call (fn, 0);
11877 gimple_seq_add_stmt (&cleanup, g);
11878 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11879 body = NULL;
11880 gimple_seq_add_stmt (&body, g);
11881 g = gimple_build_omp_taskgroup (body);
11883 break;
11884 case OMP_ORDERED:
11885 g = gimplify_omp_ordered (*expr_p, body);
11886 break;
11887 case OMP_CRITICAL:
11888 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11889 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11890 gimplify_adjust_omp_clauses (pre_p, body,
11891 &OMP_CRITICAL_CLAUSES (*expr_p),
11892 OMP_CRITICAL);
11893 g = gimple_build_omp_critical (body,
11894 OMP_CRITICAL_NAME (*expr_p),
11895 OMP_CRITICAL_CLAUSES (*expr_p));
11896 break;
11897 default:
11898 gcc_unreachable ();
11900 gimplify_seq_add_stmt (pre_p, g);
11901 ret = GS_ALL_DONE;
11902 break;
11905 case OMP_ATOMIC:
11906 case OMP_ATOMIC_READ:
11907 case OMP_ATOMIC_CAPTURE_OLD:
11908 case OMP_ATOMIC_CAPTURE_NEW:
11909 ret = gimplify_omp_atomic (expr_p, pre_p);
11910 break;
11912 case TRANSACTION_EXPR:
11913 ret = gimplify_transaction (expr_p, pre_p);
11914 break;
11916 case TRUTH_AND_EXPR:
11917 case TRUTH_OR_EXPR:
11918 case TRUTH_XOR_EXPR:
11920 tree orig_type = TREE_TYPE (*expr_p);
11921 tree new_type, xop0, xop1;
11922 *expr_p = gimple_boolify (*expr_p);
11923 new_type = TREE_TYPE (*expr_p);
11924 if (!useless_type_conversion_p (orig_type, new_type))
11926 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11927 ret = GS_OK;
11928 break;
11931 /* Boolified binary truth expressions are semantically equivalent
11932 to bitwise binary expressions. Canonicalize them to the
11933 bitwise variant. */
11934 switch (TREE_CODE (*expr_p))
11936 case TRUTH_AND_EXPR:
11937 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11938 break;
11939 case TRUTH_OR_EXPR:
11940 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11941 break;
11942 case TRUTH_XOR_EXPR:
11943 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11944 break;
11945 default:
11946 break;
11948 /* Now make sure that operands have compatible type to
11949 expression's new_type. */
11950 xop0 = TREE_OPERAND (*expr_p, 0);
11951 xop1 = TREE_OPERAND (*expr_p, 1);
11952 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11953 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11954 new_type,
11955 xop0);
11956 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11957 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11958 new_type,
11959 xop1);
11960 /* Continue classified as tcc_binary. */
11961 goto expr_2;
11964 case VEC_COND_EXPR:
11966 enum gimplify_status r0, r1, r2;
11968 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11969 post_p, is_gimple_condexpr, fb_rvalue);
11970 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11971 post_p, is_gimple_val, fb_rvalue);
11972 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11973 post_p, is_gimple_val, fb_rvalue);
11975 ret = MIN (MIN (r0, r1), r2);
11976 recalculate_side_effects (*expr_p);
11978 break;
11980 case FMA_EXPR:
11981 case VEC_PERM_EXPR:
11982 /* Classified as tcc_expression. */
11983 goto expr_3;
11985 case BIT_INSERT_EXPR:
11986 /* Argument 3 is a constant. */
11987 goto expr_2;
11989 case POINTER_PLUS_EXPR:
11991 enum gimplify_status r0, r1;
11992 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11993 post_p, is_gimple_val, fb_rvalue);
11994 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11995 post_p, is_gimple_val, fb_rvalue);
11996 recalculate_side_effects (*expr_p);
11997 ret = MIN (r0, r1);
11998 break;
12001 case CILK_SYNC_STMT:
12003 if (!fn_contains_cilk_spawn_p (cfun))
12005 error_at (EXPR_LOCATION (*expr_p),
12006 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
12007 ret = GS_ERROR;
12009 else
12011 gimplify_cilk_sync (expr_p, pre_p);
12012 ret = GS_ALL_DONE;
12014 break;
12017 default:
12018 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12020 case tcc_comparison:
12021 /* Handle comparison of objects of non scalar mode aggregates
12022 with a call to memcmp. It would be nice to only have to do
12023 this for variable-sized objects, but then we'd have to allow
12024 the same nest of reference nodes we allow for MODIFY_EXPR and
12025 that's too complex.
12027 Compare scalar mode aggregates as scalar mode values. Using
12028 memcmp for them would be very inefficient at best, and is
12029 plain wrong if bitfields are involved. */
12031 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12033 /* Vector comparisons need no boolification. */
12034 if (TREE_CODE (type) == VECTOR_TYPE)
12035 goto expr_2;
12036 else if (!AGGREGATE_TYPE_P (type))
12038 tree org_type = TREE_TYPE (*expr_p);
12039 *expr_p = gimple_boolify (*expr_p);
12040 if (!useless_type_conversion_p (org_type,
12041 TREE_TYPE (*expr_p)))
12043 *expr_p = fold_convert_loc (input_location,
12044 org_type, *expr_p);
12045 ret = GS_OK;
12047 else
12048 goto expr_2;
12050 else if (TYPE_MODE (type) != BLKmode)
12051 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12052 else
12053 ret = gimplify_variable_sized_compare (expr_p);
12055 break;
12058 /* If *EXPR_P does not need to be special-cased, handle it
12059 according to its class. */
12060 case tcc_unary:
12061 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12062 post_p, is_gimple_val, fb_rvalue);
12063 break;
12065 case tcc_binary:
12066 expr_2:
12068 enum gimplify_status r0, r1;
12070 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12071 post_p, is_gimple_val, fb_rvalue);
12072 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12073 post_p, is_gimple_val, fb_rvalue);
12075 ret = MIN (r0, r1);
12076 break;
12079 expr_3:
12081 enum gimplify_status r0, r1, r2;
12083 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12084 post_p, is_gimple_val, fb_rvalue);
12085 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12086 post_p, is_gimple_val, fb_rvalue);
12087 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12088 post_p, is_gimple_val, fb_rvalue);
12090 ret = MIN (MIN (r0, r1), r2);
12091 break;
12094 case tcc_declaration:
12095 case tcc_constant:
12096 ret = GS_ALL_DONE;
12097 goto dont_recalculate;
12099 default:
12100 gcc_unreachable ();
12103 recalculate_side_effects (*expr_p);
12105 dont_recalculate:
12106 break;
12109 gcc_assert (*expr_p || ret != GS_OK);
12111 while (ret == GS_OK);
12113 /* If we encountered an error_mark somewhere nested inside, either
12114 stub out the statement or propagate the error back out. */
12115 if (ret == GS_ERROR)
12117 if (is_statement)
12118 *expr_p = NULL;
12119 goto out;
12122 /* This was only valid as a return value from the langhook, which
12123 we handled. Make sure it doesn't escape from any other context. */
12124 gcc_assert (ret != GS_UNHANDLED);
12126 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12128 /* We aren't looking for a value, and we don't have a valid
12129 statement. If it doesn't have side-effects, throw it away.
12130 We can also get here with code such as "*&&L;", where L is
12131 a LABEL_DECL that is marked as FORCED_LABEL. */
12132 if (TREE_CODE (*expr_p) == LABEL_DECL
12133 || !TREE_SIDE_EFFECTS (*expr_p))
12134 *expr_p = NULL;
12135 else if (!TREE_THIS_VOLATILE (*expr_p))
12137 /* This is probably a _REF that contains something nested that
12138 has side effects. Recurse through the operands to find it. */
12139 enum tree_code code = TREE_CODE (*expr_p);
12141 switch (code)
12143 case COMPONENT_REF:
12144 case REALPART_EXPR:
12145 case IMAGPART_EXPR:
12146 case VIEW_CONVERT_EXPR:
12147 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12148 gimple_test_f, fallback);
12149 break;
12151 case ARRAY_REF:
12152 case ARRAY_RANGE_REF:
12153 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12154 gimple_test_f, fallback);
12155 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12156 gimple_test_f, fallback);
12157 break;
12159 default:
12160 /* Anything else with side-effects must be converted to
12161 a valid statement before we get here. */
12162 gcc_unreachable ();
12165 *expr_p = NULL;
12167 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12168 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12170 /* Historically, the compiler has treated a bare reference
12171 to a non-BLKmode volatile lvalue as forcing a load. */
12172 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12174 /* Normally, we do not want to create a temporary for a
12175 TREE_ADDRESSABLE type because such a type should not be
12176 copied by bitwise-assignment. However, we make an
12177 exception here, as all we are doing here is ensuring that
12178 we read the bytes that make up the type. We use
12179 create_tmp_var_raw because create_tmp_var will abort when
12180 given a TREE_ADDRESSABLE type. */
12181 tree tmp = create_tmp_var_raw (type, "vol");
12182 gimple_add_tmp_var (tmp);
12183 gimplify_assign (tmp, *expr_p, pre_p);
12184 *expr_p = NULL;
12186 else
12187 /* We can't do anything useful with a volatile reference to
12188 an incomplete type, so just throw it away. Likewise for
12189 a BLKmode type, since any implicit inner load should
12190 already have been turned into an explicit one by the
12191 gimplification process. */
12192 *expr_p = NULL;
12195 /* If we are gimplifying at the statement level, we're done. Tack
12196 everything together and return. */
12197 if (fallback == fb_none || is_statement)
12199 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12200 it out for GC to reclaim it. */
12201 *expr_p = NULL_TREE;
12203 if (!gimple_seq_empty_p (internal_pre)
12204 || !gimple_seq_empty_p (internal_post))
12206 gimplify_seq_add_seq (&internal_pre, internal_post);
12207 gimplify_seq_add_seq (pre_p, internal_pre);
12210 /* The result of gimplifying *EXPR_P is going to be the last few
12211 statements in *PRE_P and *POST_P. Add location information
12212 to all the statements that were added by the gimplification
12213 helpers. */
12214 if (!gimple_seq_empty_p (*pre_p))
12215 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12217 if (!gimple_seq_empty_p (*post_p))
12218 annotate_all_with_location_after (*post_p, post_last_gsi,
12219 input_location);
12221 goto out;
12224 #ifdef ENABLE_GIMPLE_CHECKING
12225 if (*expr_p)
12227 enum tree_code code = TREE_CODE (*expr_p);
12228 /* These expressions should already be in gimple IR form. */
12229 gcc_assert (code != MODIFY_EXPR
12230 && code != ASM_EXPR
12231 && code != BIND_EXPR
12232 && code != CATCH_EXPR
12233 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12234 && code != EH_FILTER_EXPR
12235 && code != GOTO_EXPR
12236 && code != LABEL_EXPR
12237 && code != LOOP_EXPR
12238 && code != SWITCH_EXPR
12239 && code != TRY_FINALLY_EXPR
12240 && code != OACC_PARALLEL
12241 && code != OACC_KERNELS
12242 && code != OACC_DATA
12243 && code != OACC_HOST_DATA
12244 && code != OACC_DECLARE
12245 && code != OACC_UPDATE
12246 && code != OACC_ENTER_DATA
12247 && code != OACC_EXIT_DATA
12248 && code != OACC_CACHE
12249 && code != OMP_CRITICAL
12250 && code != OMP_FOR
12251 && code != OACC_LOOP
12252 && code != OMP_MASTER
12253 && code != OMP_TASKGROUP
12254 && code != OMP_ORDERED
12255 && code != OMP_PARALLEL
12256 && code != OMP_SECTIONS
12257 && code != OMP_SECTION
12258 && code != OMP_SINGLE);
12260 #endif
12262 /* Otherwise we're gimplifying a subexpression, so the resulting
12263 value is interesting. If it's a valid operand that matches
12264 GIMPLE_TEST_F, we're done. Unless we are handling some
12265 post-effects internally; if that's the case, we need to copy into
12266 a temporary before adding the post-effects to POST_P. */
12267 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12268 goto out;
12270 /* Otherwise, we need to create a new temporary for the gimplified
12271 expression. */
12273 /* We can't return an lvalue if we have an internal postqueue. The
12274 object the lvalue refers to would (probably) be modified by the
12275 postqueue; we need to copy the value out first, which means an
12276 rvalue. */
12277 if ((fallback & fb_lvalue)
12278 && gimple_seq_empty_p (internal_post)
12279 && is_gimple_addressable (*expr_p))
12281 /* An lvalue will do. Take the address of the expression, store it
12282 in a temporary, and replace the expression with an INDIRECT_REF of
12283 that temporary. */
12284 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12285 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12286 *expr_p = build_simple_mem_ref (tmp);
12288 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12290 /* An rvalue will do. Assign the gimplified expression into a
12291 new temporary TMP and replace the original expression with
12292 TMP. First, make sure that the expression has a type so that
12293 it can be assigned into a temporary. */
12294 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12295 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12297 else
12299 #ifdef ENABLE_GIMPLE_CHECKING
12300 if (!(fallback & fb_mayfail))
12302 fprintf (stderr, "gimplification failed:\n");
12303 print_generic_expr (stderr, *expr_p);
12304 debug_tree (*expr_p);
12305 internal_error ("gimplification failed");
12307 #endif
12308 gcc_assert (fallback & fb_mayfail);
12310 /* If this is an asm statement, and the user asked for the
12311 impossible, don't die. Fail and let gimplify_asm_expr
12312 issue an error. */
12313 ret = GS_ERROR;
12314 goto out;
12317 /* Make sure the temporary matches our predicate. */
12318 gcc_assert ((*gimple_test_f) (*expr_p));
12320 if (!gimple_seq_empty_p (internal_post))
12322 annotate_all_with_location (internal_post, input_location);
12323 gimplify_seq_add_seq (pre_p, internal_post);
12326 out:
12327 input_location = saved_location;
12328 return ret;
12331 /* Like gimplify_expr but make sure the gimplified result is not itself
12332 a SSA name (but a decl if it were). Temporaries required by
12333 evaluating *EXPR_P may be still SSA names. */
12335 static enum gimplify_status
12336 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12337 bool (*gimple_test_f) (tree), fallback_t fallback,
12338 bool allow_ssa)
12340 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12341 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12342 gimple_test_f, fallback);
12343 if (! allow_ssa
12344 && TREE_CODE (*expr_p) == SSA_NAME)
12346 tree name = *expr_p;
12347 if (was_ssa_name_p)
12348 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12349 else
12351 /* Avoid the extra copy if possible. */
12352 *expr_p = create_tmp_reg (TREE_TYPE (name));
12353 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12354 release_ssa_name (name);
12357 return ret;
12360 /* Look through TYPE for variable-sized objects and gimplify each such
12361 size that we find. Add to LIST_P any statements generated. */
12363 void
12364 gimplify_type_sizes (tree type, gimple_seq *list_p)
12366 tree field, t;
12368 if (type == NULL || type == error_mark_node)
12369 return;
12371 /* We first do the main variant, then copy into any other variants. */
12372 type = TYPE_MAIN_VARIANT (type);
12374 /* Avoid infinite recursion. */
12375 if (TYPE_SIZES_GIMPLIFIED (type))
12376 return;
12378 TYPE_SIZES_GIMPLIFIED (type) = 1;
12380 switch (TREE_CODE (type))
12382 case INTEGER_TYPE:
12383 case ENUMERAL_TYPE:
12384 case BOOLEAN_TYPE:
12385 case REAL_TYPE:
12386 case FIXED_POINT_TYPE:
12387 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12388 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12390 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12392 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12393 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12395 break;
12397 case ARRAY_TYPE:
12398 /* These types may not have declarations, so handle them here. */
12399 gimplify_type_sizes (TREE_TYPE (type), list_p);
12400 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12401 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12402 with assigned stack slots, for -O1+ -g they should be tracked
12403 by VTA. */
12404 if (!(TYPE_NAME (type)
12405 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12406 && DECL_IGNORED_P (TYPE_NAME (type)))
12407 && TYPE_DOMAIN (type)
12408 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12410 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12411 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12412 DECL_IGNORED_P (t) = 0;
12413 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12414 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12415 DECL_IGNORED_P (t) = 0;
12417 break;
12419 case RECORD_TYPE:
12420 case UNION_TYPE:
12421 case QUAL_UNION_TYPE:
12422 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12423 if (TREE_CODE (field) == FIELD_DECL)
12425 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12426 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12427 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12428 gimplify_type_sizes (TREE_TYPE (field), list_p);
12430 break;
12432 case POINTER_TYPE:
12433 case REFERENCE_TYPE:
12434 /* We used to recurse on the pointed-to type here, which turned out to
12435 be incorrect because its definition might refer to variables not
12436 yet initialized at this point if a forward declaration is involved.
12438 It was actually useful for anonymous pointed-to types to ensure
12439 that the sizes evaluation dominates every possible later use of the
12440 values. Restricting to such types here would be safe since there
12441 is no possible forward declaration around, but would introduce an
12442 undesirable middle-end semantic to anonymity. We then defer to
12443 front-ends the responsibility of ensuring that the sizes are
12444 evaluated both early and late enough, e.g. by attaching artificial
12445 type declarations to the tree. */
12446 break;
12448 default:
12449 break;
12452 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12453 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12455 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12457 TYPE_SIZE (t) = TYPE_SIZE (type);
12458 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12459 TYPE_SIZES_GIMPLIFIED (t) = 1;
12463 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12464 a size or position, has had all of its SAVE_EXPRs evaluated.
12465 We add any required statements to *STMT_P. */
12467 void
12468 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12470 tree expr = *expr_p;
12472 /* We don't do anything if the value isn't there, is constant, or contains
12473 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12474 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12475 will want to replace it with a new variable, but that will cause problems
12476 if this type is from outside the function. It's OK to have that here. */
12477 if (is_gimple_sizepos (expr))
12478 return;
12480 *expr_p = unshare_expr (expr);
12482 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12483 if the def vanishes. */
12484 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12487 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12488 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12489 is true, also gimplify the parameters. */
12491 gbind *
12492 gimplify_body (tree fndecl, bool do_parms)
12494 location_t saved_location = input_location;
12495 gimple_seq parm_stmts, seq;
12496 gimple *outer_stmt;
12497 gbind *outer_bind;
12498 struct cgraph_node *cgn;
12500 timevar_push (TV_TREE_GIMPLIFY);
12502 init_tree_ssa (cfun);
12504 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12505 gimplification. */
12506 default_rtl_profile ();
12508 gcc_assert (gimplify_ctxp == NULL);
12509 push_gimplify_context (true);
12511 if (flag_openacc || flag_openmp)
12513 gcc_assert (gimplify_omp_ctxp == NULL);
12514 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12515 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12518 /* Unshare most shared trees in the body and in that of any nested functions.
12519 It would seem we don't have to do this for nested functions because
12520 they are supposed to be output and then the outer function gimplified
12521 first, but the g++ front end doesn't always do it that way. */
12522 unshare_body (fndecl);
12523 unvisit_body (fndecl);
12525 cgn = cgraph_node::get (fndecl);
12526 if (cgn && cgn->origin)
12527 nonlocal_vlas = new hash_set<tree>;
12529 /* Make sure input_location isn't set to something weird. */
12530 input_location = DECL_SOURCE_LOCATION (fndecl);
12532 /* Resolve callee-copies. This has to be done before processing
12533 the body so that DECL_VALUE_EXPR gets processed correctly. */
12534 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12536 /* Gimplify the function's body. */
12537 seq = NULL;
12538 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12539 outer_stmt = gimple_seq_first_stmt (seq);
12540 if (!outer_stmt)
12542 outer_stmt = gimple_build_nop ();
12543 gimplify_seq_add_stmt (&seq, outer_stmt);
12546 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12547 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12548 if (gimple_code (outer_stmt) == GIMPLE_BIND
12549 && gimple_seq_first (seq) == gimple_seq_last (seq))
12550 outer_bind = as_a <gbind *> (outer_stmt);
12551 else
12552 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12554 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12556 /* If we had callee-copies statements, insert them at the beginning
12557 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12558 if (!gimple_seq_empty_p (parm_stmts))
12560 tree parm;
12562 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12563 gimple_bind_set_body (outer_bind, parm_stmts);
12565 for (parm = DECL_ARGUMENTS (current_function_decl);
12566 parm; parm = DECL_CHAIN (parm))
12567 if (DECL_HAS_VALUE_EXPR_P (parm))
12569 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12570 DECL_IGNORED_P (parm) = 0;
12574 if (nonlocal_vlas)
12576 if (nonlocal_vla_vars)
12578 /* tree-nested.c may later on call declare_vars (..., true);
12579 which relies on BLOCK_VARS chain to be the tail of the
12580 gimple_bind_vars chain. Ensure we don't violate that
12581 assumption. */
12582 if (gimple_bind_block (outer_bind)
12583 == DECL_INITIAL (current_function_decl))
12584 declare_vars (nonlocal_vla_vars, outer_bind, true);
12585 else
12586 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12587 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12588 nonlocal_vla_vars);
12589 nonlocal_vla_vars = NULL_TREE;
12591 delete nonlocal_vlas;
12592 nonlocal_vlas = NULL;
12595 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12596 && gimplify_omp_ctxp)
12598 delete_omp_context (gimplify_omp_ctxp);
12599 gimplify_omp_ctxp = NULL;
12602 pop_gimplify_context (outer_bind);
12603 gcc_assert (gimplify_ctxp == NULL);
12605 if (flag_checking && !seen_error ())
12606 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12608 timevar_pop (TV_TREE_GIMPLIFY);
12609 input_location = saved_location;
12611 return outer_bind;
12614 typedef char *char_p; /* For DEF_VEC_P. */
12616 /* Return whether we should exclude FNDECL from instrumentation. */
12618 static bool
12619 flag_instrument_functions_exclude_p (tree fndecl)
12621 vec<char_p> *v;
12623 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12624 if (v && v->length () > 0)
12626 const char *name;
12627 int i;
12628 char *s;
12630 name = lang_hooks.decl_printable_name (fndecl, 0);
12631 FOR_EACH_VEC_ELT (*v, i, s)
12632 if (strstr (name, s) != NULL)
12633 return true;
12636 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12637 if (v && v->length () > 0)
12639 const char *name;
12640 int i;
12641 char *s;
12643 name = DECL_SOURCE_FILE (fndecl);
12644 FOR_EACH_VEC_ELT (*v, i, s)
12645 if (strstr (name, s) != NULL)
12646 return true;
12649 return false;
12652 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12653 node for the function we want to gimplify.
12655 Return the sequence of GIMPLE statements corresponding to the body
12656 of FNDECL. */
12658 void
12659 gimplify_function_tree (tree fndecl)
12661 tree parm, ret;
12662 gimple_seq seq;
12663 gbind *bind;
12665 gcc_assert (!gimple_body (fndecl));
12667 if (DECL_STRUCT_FUNCTION (fndecl))
12668 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12669 else
12670 push_struct_function (fndecl);
12672 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12673 if necessary. */
12674 cfun->curr_properties |= PROP_gimple_lva;
12676 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12678 /* Preliminarily mark non-addressed complex variables as eligible
12679 for promotion to gimple registers. We'll transform their uses
12680 as we find them. */
12681 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12682 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12683 && !TREE_THIS_VOLATILE (parm)
12684 && !needs_to_live_in_memory (parm))
12685 DECL_GIMPLE_REG_P (parm) = 1;
12688 ret = DECL_RESULT (fndecl);
12689 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12690 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12691 && !needs_to_live_in_memory (ret))
12692 DECL_GIMPLE_REG_P (ret) = 1;
12694 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12695 asan_poisoned_variables = new hash_set<tree> ();
12696 bind = gimplify_body (fndecl, true);
12697 if (asan_poisoned_variables)
12699 delete asan_poisoned_variables;
12700 asan_poisoned_variables = NULL;
12703 /* The tree body of the function is no longer needed, replace it
12704 with the new GIMPLE body. */
12705 seq = NULL;
12706 gimple_seq_add_stmt (&seq, bind);
12707 gimple_set_body (fndecl, seq);
12709 /* If we're instrumenting function entry/exit, then prepend the call to
12710 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12711 catch the exit hook. */
12712 /* ??? Add some way to ignore exceptions for this TFE. */
12713 if (flag_instrument_function_entry_exit
12714 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12715 /* Do not instrument extern inline functions. */
12716 && !(DECL_DECLARED_INLINE_P (fndecl)
12717 && DECL_EXTERNAL (fndecl)
12718 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12719 && !flag_instrument_functions_exclude_p (fndecl))
12721 tree x;
12722 gbind *new_bind;
12723 gimple *tf;
12724 gimple_seq cleanup = NULL, body = NULL;
12725 tree tmp_var;
12726 gcall *call;
12728 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12729 call = gimple_build_call (x, 1, integer_zero_node);
12730 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12731 gimple_call_set_lhs (call, tmp_var);
12732 gimplify_seq_add_stmt (&cleanup, call);
12733 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12734 call = gimple_build_call (x, 2,
12735 build_fold_addr_expr (current_function_decl),
12736 tmp_var);
12737 gimplify_seq_add_stmt (&cleanup, call);
12738 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12740 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12741 call = gimple_build_call (x, 1, integer_zero_node);
12742 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12743 gimple_call_set_lhs (call, tmp_var);
12744 gimplify_seq_add_stmt (&body, call);
12745 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12746 call = gimple_build_call (x, 2,
12747 build_fold_addr_expr (current_function_decl),
12748 tmp_var);
12749 gimplify_seq_add_stmt (&body, call);
12750 gimplify_seq_add_stmt (&body, tf);
12751 new_bind = gimple_build_bind (NULL, body, NULL);
12753 /* Replace the current function body with the body
12754 wrapped in the try/finally TF. */
12755 seq = NULL;
12756 gimple_seq_add_stmt (&seq, new_bind);
12757 gimple_set_body (fndecl, seq);
12758 bind = new_bind;
12761 if (sanitize_flags_p (SANITIZE_THREAD))
12763 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12764 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12765 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12766 /* Replace the current function body with the body
12767 wrapped in the try/finally TF. */
12768 seq = NULL;
12769 gimple_seq_add_stmt (&seq, new_bind);
12770 gimple_set_body (fndecl, seq);
12773 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12774 cfun->curr_properties |= PROP_gimple_any;
12776 pop_cfun ();
12778 dump_function (TDI_gimple, fndecl);
12781 /* Return a dummy expression of type TYPE in order to keep going after an
12782 error. */
12784 static tree
12785 dummy_object (tree type)
12787 tree t = build_int_cst (build_pointer_type (type), 0);
12788 return build2 (MEM_REF, type, t, t);
12791 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12792 builtin function, but a very special sort of operator. */
12794 enum gimplify_status
12795 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12796 gimple_seq *post_p ATTRIBUTE_UNUSED)
12798 tree promoted_type, have_va_type;
12799 tree valist = TREE_OPERAND (*expr_p, 0);
12800 tree type = TREE_TYPE (*expr_p);
12801 tree t, tag, aptag;
12802 location_t loc = EXPR_LOCATION (*expr_p);
12804 /* Verify that valist is of the proper type. */
12805 have_va_type = TREE_TYPE (valist);
12806 if (have_va_type == error_mark_node)
12807 return GS_ERROR;
12808 have_va_type = targetm.canonical_va_list_type (have_va_type);
12809 if (have_va_type == NULL_TREE
12810 && POINTER_TYPE_P (TREE_TYPE (valist)))
12811 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12812 have_va_type
12813 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12814 gcc_assert (have_va_type != NULL_TREE);
12816 /* Generate a diagnostic for requesting data of a type that cannot
12817 be passed through `...' due to type promotion at the call site. */
12818 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12819 != type)
12821 static bool gave_help;
12822 bool warned;
12823 /* Use the expansion point to handle cases such as passing bool (defined
12824 in a system header) through `...'. */
12825 source_location xloc
12826 = expansion_point_location_if_in_system_header (loc);
12828 /* Unfortunately, this is merely undefined, rather than a constraint
12829 violation, so we cannot make this an error. If this call is never
12830 executed, the program is still strictly conforming. */
12831 warned = warning_at (xloc, 0,
12832 "%qT is promoted to %qT when passed through %<...%>",
12833 type, promoted_type);
12834 if (!gave_help && warned)
12836 gave_help = true;
12837 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12838 promoted_type, type);
12841 /* We can, however, treat "undefined" any way we please.
12842 Call abort to encourage the user to fix the program. */
12843 if (warned)
12844 inform (xloc, "if this code is reached, the program will abort");
12845 /* Before the abort, allow the evaluation of the va_list
12846 expression to exit or longjmp. */
12847 gimplify_and_add (valist, pre_p);
12848 t = build_call_expr_loc (loc,
12849 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12850 gimplify_and_add (t, pre_p);
12852 /* This is dead code, but go ahead and finish so that the
12853 mode of the result comes out right. */
12854 *expr_p = dummy_object (type);
12855 return GS_ALL_DONE;
12858 tag = build_int_cst (build_pointer_type (type), 0);
12859 aptag = build_int_cst (TREE_TYPE (valist), 0);
12861 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12862 valist, tag, aptag);
12864 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12865 needs to be expanded. */
12866 cfun->curr_properties &= ~PROP_gimple_lva;
12868 return GS_OK;
12871 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12873 DST/SRC are the destination and source respectively. You can pass
12874 ungimplified trees in DST or SRC, in which case they will be
12875 converted to a gimple operand if necessary.
12877 This function returns the newly created GIMPLE_ASSIGN tuple. */
12879 gimple *
12880 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12882 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12883 gimplify_and_add (t, seq_p);
12884 ggc_free (t);
12885 return gimple_seq_last_stmt (*seq_p);
12888 inline hashval_t
12889 gimplify_hasher::hash (const elt_t *p)
12891 tree t = p->val;
12892 return iterative_hash_expr (t, 0);
12895 inline bool
12896 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12898 tree t1 = p1->val;
12899 tree t2 = p2->val;
12900 enum tree_code code = TREE_CODE (t1);
12902 if (TREE_CODE (t2) != code
12903 || TREE_TYPE (t1) != TREE_TYPE (t2))
12904 return false;
12906 if (!operand_equal_p (t1, t2, 0))
12907 return false;
12909 /* Only allow them to compare equal if they also hash equal; otherwise
12910 results are nondeterminate, and we fail bootstrap comparison. */
12911 gcc_checking_assert (hash (p1) == hash (p2));
12913 return true;