PR rtl-optimization/82913
[official-gcc.git] / gcc / gimplify.c
blobe9168785fc010658f76d0b01f73fe8bb9ddf2a57
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "cilk.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
70 /* Hash set of poisoned variables in a bind expr. */
71 static hash_set<tree> *asan_poisoned_variables = NULL;
73 enum gimplify_omp_var_data
75 GOVD_SEEN = 1,
76 GOVD_EXPLICIT = 2,
77 GOVD_SHARED = 4,
78 GOVD_PRIVATE = 8,
79 GOVD_FIRSTPRIVATE = 16,
80 GOVD_LASTPRIVATE = 32,
81 GOVD_REDUCTION = 64,
82 GOVD_LOCAL = 128,
83 GOVD_MAP = 256,
84 GOVD_DEBUG_PRIVATE = 512,
85 GOVD_PRIVATE_OUTER_REF = 1024,
86 GOVD_LINEAR = 2048,
87 GOVD_ALIGNED = 4096,
89 /* Flag for GOVD_MAP: don't copy back. */
90 GOVD_MAP_TO_ONLY = 8192,
92 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
93 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
95 GOVD_MAP_0LEN_ARRAY = 32768,
97 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
98 GOVD_MAP_ALWAYS_TO = 65536,
100 /* Flag for shared vars that are or might be stored to in the region. */
101 GOVD_WRITTEN = 131072,
103 /* Flag for GOVD_MAP, if it is a forced mapping. */
104 GOVD_MAP_FORCE = 262144,
106 /* Flag for GOVD_MAP: must be present already. */
107 GOVD_MAP_FORCE_PRESENT = 524288,
109 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
110 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
111 | GOVD_LOCAL)
115 enum omp_region_type
117 ORT_WORKSHARE = 0x00,
118 ORT_SIMD = 0x01,
120 ORT_PARALLEL = 0x02,
121 ORT_COMBINED_PARALLEL = 0x03,
123 ORT_TASK = 0x04,
124 ORT_UNTIED_TASK = 0x05,
126 ORT_TEAMS = 0x08,
127 ORT_COMBINED_TEAMS = 0x09,
129 /* Data region. */
130 ORT_TARGET_DATA = 0x10,
132 /* Data region with offloading. */
133 ORT_TARGET = 0x20,
134 ORT_COMBINED_TARGET = 0x21,
136 /* OpenACC variants. */
137 ORT_ACC = 0x40, /* A generic OpenACC region. */
138 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
139 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
140 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
141 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
143 /* Dummy OpenMP region, used to disable expansion of
144 DECL_VALUE_EXPRs in taskloop pre body. */
145 ORT_NONE = 0x100
148 /* Gimplify hashtable helper. */
150 struct gimplify_hasher : free_ptr_hash <elt_t>
152 static inline hashval_t hash (const elt_t *);
153 static inline bool equal (const elt_t *, const elt_t *);
156 struct gimplify_ctx
158 struct gimplify_ctx *prev_context;
160 vec<gbind *> bind_expr_stack;
161 tree temps;
162 gimple_seq conditional_cleanups;
163 tree exit_label;
164 tree return_temp;
166 vec<tree> case_labels;
167 hash_set<tree> *live_switch_vars;
168 /* The formal temporary table. Should this be persistent? */
169 hash_table<gimplify_hasher> *temp_htab;
171 int conditions;
172 unsigned into_ssa : 1;
173 unsigned allow_rhs_cond_expr : 1;
174 unsigned in_cleanup_point_expr : 1;
175 unsigned keep_stack : 1;
176 unsigned save_stack : 1;
177 unsigned in_switch_expr : 1;
180 struct gimplify_omp_ctx
182 struct gimplify_omp_ctx *outer_context;
183 splay_tree variables;
184 hash_set<tree> *privatized_types;
185 /* Iteration variables in an OMP_FOR. */
186 vec<tree> loop_iter_var;
187 location_t location;
188 enum omp_clause_default_kind default_kind;
189 enum omp_region_type region_type;
190 bool combined_loop;
191 bool distribute;
192 bool target_map_scalars_firstprivate;
193 bool target_map_pointers_as_0len_arrays;
194 bool target_firstprivatize_array_bases;
197 static struct gimplify_ctx *gimplify_ctxp;
198 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
200 /* Forward declaration. */
201 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
202 static hash_map<tree, tree> *oacc_declare_returns;
203 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
204 bool (*) (tree), fallback_t, bool);
206 /* Shorter alias name for the above function for use in gimplify.c
207 only. */
209 static inline void
210 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
212 gimple_seq_add_stmt_without_update (seq_p, gs);
215 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
216 NULL, a new sequence is allocated. This function is
217 similar to gimple_seq_add_seq, but does not scan the operands.
218 During gimplification, we need to manipulate statement sequences
219 before the def/use vectors have been constructed. */
221 static void
222 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
224 gimple_stmt_iterator si;
226 if (src == NULL)
227 return;
229 si = gsi_last (*dst_p);
230 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
234 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
235 and popping gimplify contexts. */
237 static struct gimplify_ctx *ctx_pool = NULL;
239 /* Return a gimplify context struct from the pool. */
241 static inline struct gimplify_ctx *
242 ctx_alloc (void)
244 struct gimplify_ctx * c = ctx_pool;
246 if (c)
247 ctx_pool = c->prev_context;
248 else
249 c = XNEW (struct gimplify_ctx);
251 memset (c, '\0', sizeof (*c));
252 return c;
255 /* Put gimplify context C back into the pool. */
257 static inline void
258 ctx_free (struct gimplify_ctx *c)
260 c->prev_context = ctx_pool;
261 ctx_pool = c;
264 /* Free allocated ctx stack memory. */
266 void
267 free_gimplify_stack (void)
269 struct gimplify_ctx *c;
271 while ((c = ctx_pool))
273 ctx_pool = c->prev_context;
274 free (c);
279 /* Set up a context for the gimplifier. */
281 void
282 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
284 struct gimplify_ctx *c = ctx_alloc ();
286 c->prev_context = gimplify_ctxp;
287 gimplify_ctxp = c;
288 gimplify_ctxp->into_ssa = in_ssa;
289 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
292 /* Tear down a context for the gimplifier. If BODY is non-null, then
293 put the temporaries into the outer BIND_EXPR. Otherwise, put them
294 in the local_decls.
296 BODY is not a sequence, but the first tuple in a sequence. */
298 void
299 pop_gimplify_context (gimple *body)
301 struct gimplify_ctx *c = gimplify_ctxp;
303 gcc_assert (c
304 && (!c->bind_expr_stack.exists ()
305 || c->bind_expr_stack.is_empty ()));
306 c->bind_expr_stack.release ();
307 gimplify_ctxp = c->prev_context;
309 if (body)
310 declare_vars (c->temps, body, false);
311 else
312 record_vars (c->temps);
314 delete c->temp_htab;
315 c->temp_htab = NULL;
316 ctx_free (c);
319 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
321 static void
322 gimple_push_bind_expr (gbind *bind_stmt)
324 gimplify_ctxp->bind_expr_stack.reserve (8);
325 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
328 /* Pop the first element off the stack of bindings. */
330 static void
331 gimple_pop_bind_expr (void)
333 gimplify_ctxp->bind_expr_stack.pop ();
336 /* Return the first element of the stack of bindings. */
338 gbind *
339 gimple_current_bind_expr (void)
341 return gimplify_ctxp->bind_expr_stack.last ();
344 /* Return the stack of bindings created during gimplification. */
346 vec<gbind *>
347 gimple_bind_expr_stack (void)
349 return gimplify_ctxp->bind_expr_stack;
352 /* Return true iff there is a COND_EXPR between us and the innermost
353 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
355 static bool
356 gimple_conditional_context (void)
358 return gimplify_ctxp->conditions > 0;
361 /* Note that we've entered a COND_EXPR. */
363 static void
364 gimple_push_condition (void)
366 #ifdef ENABLE_GIMPLE_CHECKING
367 if (gimplify_ctxp->conditions == 0)
368 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
369 #endif
370 ++(gimplify_ctxp->conditions);
373 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
374 now, add any conditional cleanups we've seen to the prequeue. */
376 static void
377 gimple_pop_condition (gimple_seq *pre_p)
379 int conds = --(gimplify_ctxp->conditions);
381 gcc_assert (conds >= 0);
382 if (conds == 0)
384 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
385 gimplify_ctxp->conditional_cleanups = NULL;
389 /* A stable comparison routine for use with splay trees and DECLs. */
391 static int
392 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
394 tree a = (tree) xa;
395 tree b = (tree) xb;
397 return DECL_UID (a) - DECL_UID (b);
400 /* Create a new omp construct that deals with variable remapping. */
402 static struct gimplify_omp_ctx *
403 new_omp_context (enum omp_region_type region_type)
405 struct gimplify_omp_ctx *c;
407 c = XCNEW (struct gimplify_omp_ctx);
408 c->outer_context = gimplify_omp_ctxp;
409 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
410 c->privatized_types = new hash_set<tree>;
411 c->location = input_location;
412 c->region_type = region_type;
413 if ((region_type & ORT_TASK) == 0)
414 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
415 else
416 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
418 return c;
421 /* Destroy an omp construct that deals with variable remapping. */
423 static void
424 delete_omp_context (struct gimplify_omp_ctx *c)
426 splay_tree_delete (c->variables);
427 delete c->privatized_types;
428 c->loop_iter_var.release ();
429 XDELETE (c);
432 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
433 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
435 /* Both gimplify the statement T and append it to *SEQ_P. This function
436 behaves exactly as gimplify_stmt, but you don't have to pass T as a
437 reference. */
439 void
440 gimplify_and_add (tree t, gimple_seq *seq_p)
442 gimplify_stmt (&t, seq_p);
445 /* Gimplify statement T into sequence *SEQ_P, and return the first
446 tuple in the sequence of generated tuples for this statement.
447 Return NULL if gimplifying T produced no tuples. */
449 static gimple *
450 gimplify_and_return_first (tree t, gimple_seq *seq_p)
452 gimple_stmt_iterator last = gsi_last (*seq_p);
454 gimplify_and_add (t, seq_p);
456 if (!gsi_end_p (last))
458 gsi_next (&last);
459 return gsi_stmt (last);
461 else
462 return gimple_seq_first_stmt (*seq_p);
465 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
466 LHS, or for a call argument. */
468 static bool
469 is_gimple_mem_rhs (tree t)
471 /* If we're dealing with a renamable type, either source or dest must be
472 a renamed variable. */
473 if (is_gimple_reg_type (TREE_TYPE (t)))
474 return is_gimple_val (t);
475 else
476 return is_gimple_val (t) || is_gimple_lvalue (t);
479 /* Return true if T is a CALL_EXPR or an expression that can be
480 assigned to a temporary. Note that this predicate should only be
481 used during gimplification. See the rationale for this in
482 gimplify_modify_expr. */
484 static bool
485 is_gimple_reg_rhs_or_call (tree t)
487 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
488 || TREE_CODE (t) == CALL_EXPR);
491 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
492 this predicate should only be used during gimplification. See the
493 rationale for this in gimplify_modify_expr. */
495 static bool
496 is_gimple_mem_rhs_or_call (tree t)
498 /* If we're dealing with a renamable type, either source or dest must be
499 a renamed variable. */
500 if (is_gimple_reg_type (TREE_TYPE (t)))
501 return is_gimple_val (t);
502 else
503 return (is_gimple_val (t)
504 || is_gimple_lvalue (t)
505 || TREE_CLOBBER_P (t)
506 || TREE_CODE (t) == CALL_EXPR);
509 /* Create a temporary with a name derived from VAL. Subroutine of
510 lookup_tmp_var; nobody else should call this function. */
512 static inline tree
513 create_tmp_from_val (tree val)
515 /* Drop all qualifiers and address-space information from the value type. */
516 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
517 tree var = create_tmp_var (type, get_name (val));
518 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
519 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
520 DECL_GIMPLE_REG_P (var) = 1;
521 return var;
524 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
525 an existing expression temporary. */
527 static tree
528 lookup_tmp_var (tree val, bool is_formal)
530 tree ret;
532 /* If not optimizing, never really reuse a temporary. local-alloc
533 won't allocate any variable that is used in more than one basic
534 block, which means it will go into memory, causing much extra
535 work in reload and final and poorer code generation, outweighing
536 the extra memory allocation here. */
537 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
538 ret = create_tmp_from_val (val);
539 else
541 elt_t elt, *elt_p;
542 elt_t **slot;
544 elt.val = val;
545 if (!gimplify_ctxp->temp_htab)
546 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
547 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
548 if (*slot == NULL)
550 elt_p = XNEW (elt_t);
551 elt_p->val = val;
552 elt_p->temp = ret = create_tmp_from_val (val);
553 *slot = elt_p;
555 else
557 elt_p = *slot;
558 ret = elt_p->temp;
562 return ret;
565 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
567 static tree
568 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
569 bool is_formal, bool allow_ssa)
571 tree t, mod;
573 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
574 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
575 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
576 fb_rvalue);
578 if (allow_ssa
579 && gimplify_ctxp->into_ssa
580 && is_gimple_reg_type (TREE_TYPE (val)))
582 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
583 if (! gimple_in_ssa_p (cfun))
585 const char *name = get_name (val);
586 if (name)
587 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
590 else
591 t = lookup_tmp_var (val, is_formal);
593 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
595 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
597 /* gimplify_modify_expr might want to reduce this further. */
598 gimplify_and_add (mod, pre_p);
599 ggc_free (mod);
601 return t;
604 /* Return a formal temporary variable initialized with VAL. PRE_P is as
605 in gimplify_expr. Only use this function if:
607 1) The value of the unfactored expression represented by VAL will not
608 change between the initialization and use of the temporary, and
609 2) The temporary will not be otherwise modified.
611 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
612 and #2 means it is inappropriate for && temps.
614 For other cases, use get_initialized_tmp_var instead. */
616 tree
617 get_formal_tmp_var (tree val, gimple_seq *pre_p)
619 return internal_get_tmp_var (val, pre_p, NULL, true, true);
622 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
623 are as in gimplify_expr. */
625 tree
626 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
627 bool allow_ssa)
629 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
632 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
633 generate debug info for them; otherwise don't. */
635 void
636 declare_vars (tree vars, gimple *gs, bool debug_info)
638 tree last = vars;
639 if (last)
641 tree temps, block;
643 gbind *scope = as_a <gbind *> (gs);
645 temps = nreverse (last);
647 block = gimple_bind_block (scope);
648 gcc_assert (!block || TREE_CODE (block) == BLOCK);
649 if (!block || !debug_info)
651 DECL_CHAIN (last) = gimple_bind_vars (scope);
652 gimple_bind_set_vars (scope, temps);
654 else
656 /* We need to attach the nodes both to the BIND_EXPR and to its
657 associated BLOCK for debugging purposes. The key point here
658 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
659 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
660 if (BLOCK_VARS (block))
661 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
662 else
664 gimple_bind_set_vars (scope,
665 chainon (gimple_bind_vars (scope), temps));
666 BLOCK_VARS (block) = temps;
672 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
673 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
674 no such upper bound can be obtained. */
676 static void
677 force_constant_size (tree var)
679 /* The only attempt we make is by querying the maximum size of objects
680 of the variable's type. */
682 HOST_WIDE_INT max_size;
684 gcc_assert (VAR_P (var));
686 max_size = max_int_size_in_bytes (TREE_TYPE (var));
688 gcc_assert (max_size >= 0);
690 DECL_SIZE_UNIT (var)
691 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
692 DECL_SIZE (var)
693 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
696 /* Push the temporary variable TMP into the current binding. */
698 void
699 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
701 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
703 /* Later processing assumes that the object size is constant, which might
704 not be true at this point. Force the use of a constant upper bound in
705 this case. */
706 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
707 force_constant_size (tmp);
709 DECL_CONTEXT (tmp) = fn->decl;
710 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
712 record_vars_into (tmp, fn->decl);
715 /* Push the temporary variable TMP into the current binding. */
717 void
718 gimple_add_tmp_var (tree tmp)
720 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
726 force_constant_size (tmp);
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
731 if (gimplify_ctxp)
733 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx
741 && (ctx->region_type == ORT_WORKSHARE
742 || ctx->region_type == ORT_SIMD
743 || ctx->region_type == ORT_ACC))
744 ctx = ctx->outer_context;
745 if (ctx)
746 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
749 else if (cfun)
750 record_vars (tmp);
751 else
753 gimple_seq body_seq;
755 /* This case is for nested functions. We need to expose the locals
756 they create. */
757 body_seq = gimple_body (current_function_decl);
758 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
764 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
765 nodes that are referenced more than once in GENERIC functions. This is
766 necessary because gimplification (translation into GIMPLE) is performed
767 by modifying tree nodes in-place, so gimplication of a shared node in a
768 first context could generate an invalid GIMPLE form in a second context.
770 This is achieved with a simple mark/copy/unmark algorithm that walks the
771 GENERIC representation top-down, marks nodes with TREE_VISITED the first
772 time it encounters them, duplicates them if they already have TREE_VISITED
773 set, and finally removes the TREE_VISITED marks it has set.
775 The algorithm works only at the function level, i.e. it generates a GENERIC
776 representation of a function with no nodes shared within the function when
777 passed a GENERIC function (except for nodes that are allowed to be shared).
779 At the global level, it is also necessary to unshare tree nodes that are
780 referenced in more than one function, for the same aforementioned reason.
781 This requires some cooperation from the front-end. There are 2 strategies:
783 1. Manual unsharing. The front-end needs to call unshare_expr on every
784 expression that might end up being shared across functions.
786 2. Deep unsharing. This is an extension of regular unsharing. Instead
787 of calling unshare_expr on expressions that might be shared across
788 functions, the front-end pre-marks them with TREE_VISITED. This will
789 ensure that they are unshared on the first reference within functions
790 when the regular unsharing algorithm runs. The counterpart is that
791 this algorithm must look deeper than for manual unsharing, which is
792 specified by LANG_HOOKS_DEEP_UNSHARING.
794 If there are only few specific cases of node sharing across functions, it is
795 probably easier for a front-end to unshare the expressions manually. On the
796 contrary, if the expressions generated at the global level are as widespread
797 as expressions generated within functions, deep unsharing is very likely the
798 way to go. */
800 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
801 These nodes model computations that must be done once. If we were to
802 unshare something like SAVE_EXPR(i++), the gimplification process would
803 create wrong code. However, if DATA is non-null, it must hold a pointer
804 set that is used to unshare the subtrees of these nodes. */
806 static tree
807 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
809 tree t = *tp;
810 enum tree_code code = TREE_CODE (t);
812 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
813 copy their subtrees if we can make sure to do it only once. */
814 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
816 if (data && !((hash_set<tree> *)data)->add (t))
818 else
819 *walk_subtrees = 0;
822 /* Stop at types, decls, constants like copy_tree_r. */
823 else if (TREE_CODE_CLASS (code) == tcc_type
824 || TREE_CODE_CLASS (code) == tcc_declaration
825 || TREE_CODE_CLASS (code) == tcc_constant)
826 *walk_subtrees = 0;
828 /* Cope with the statement expression extension. */
829 else if (code == STATEMENT_LIST)
832 /* Leave the bulk of the work to copy_tree_r itself. */
833 else
834 copy_tree_r (tp, walk_subtrees, NULL);
836 return NULL_TREE;
839 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
840 If *TP has been visited already, then *TP is deeply copied by calling
841 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
843 static tree
844 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
846 tree t = *tp;
847 enum tree_code code = TREE_CODE (t);
849 /* Skip types, decls, and constants. But we do want to look at their
850 types and the bounds of types. Mark them as visited so we properly
851 unmark their subtrees on the unmark pass. If we've already seen them,
852 don't look down further. */
853 if (TREE_CODE_CLASS (code) == tcc_type
854 || TREE_CODE_CLASS (code) == tcc_declaration
855 || TREE_CODE_CLASS (code) == tcc_constant)
857 if (TREE_VISITED (t))
858 *walk_subtrees = 0;
859 else
860 TREE_VISITED (t) = 1;
863 /* If this node has been visited already, unshare it and don't look
864 any deeper. */
865 else if (TREE_VISITED (t))
867 walk_tree (tp, mostly_copy_tree_r, data, NULL);
868 *walk_subtrees = 0;
871 /* Otherwise, mark the node as visited and keep looking. */
872 else
873 TREE_VISITED (t) = 1;
875 return NULL_TREE;
878 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
879 copy_if_shared_r callback unmodified. */
881 static inline void
882 copy_if_shared (tree *tp, void *data)
884 walk_tree (tp, copy_if_shared_r, data, NULL);
887 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
888 any nested functions. */
890 static void
891 unshare_body (tree fndecl)
893 struct cgraph_node *cgn = cgraph_node::get (fndecl);
894 /* If the language requires deep unsharing, we need a pointer set to make
895 sure we don't repeatedly unshare subtrees of unshareable nodes. */
896 hash_set<tree> *visited
897 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
899 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
900 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
901 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
903 delete visited;
905 if (cgn)
906 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
907 unshare_body (cgn->decl);
910 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
911 Subtrees are walked until the first unvisited node is encountered. */
913 static tree
914 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
916 tree t = *tp;
918 /* If this node has been visited, unmark it and keep looking. */
919 if (TREE_VISITED (t))
920 TREE_VISITED (t) = 0;
922 /* Otherwise, don't look any deeper. */
923 else
924 *walk_subtrees = 0;
926 return NULL_TREE;
929 /* Unmark the visited trees rooted at *TP. */
931 static inline void
932 unmark_visited (tree *tp)
934 walk_tree (tp, unmark_visited_r, NULL, NULL);
937 /* Likewise, but mark all trees as not visited. */
939 static void
940 unvisit_body (tree fndecl)
942 struct cgraph_node *cgn = cgraph_node::get (fndecl);
944 unmark_visited (&DECL_SAVED_TREE (fndecl));
945 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
946 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
948 if (cgn)
949 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
950 unvisit_body (cgn->decl);
953 /* Unconditionally make an unshared copy of EXPR. This is used when using
954 stored expressions which span multiple functions, such as BINFO_VTABLE,
955 as the normal unsharing process can't tell that they're shared. */
957 tree
958 unshare_expr (tree expr)
960 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
961 return expr;
964 /* Worker for unshare_expr_without_location. */
966 static tree
967 prune_expr_location (tree *tp, int *walk_subtrees, void *)
969 if (EXPR_P (*tp))
970 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
971 else
972 *walk_subtrees = 0;
973 return NULL_TREE;
976 /* Similar to unshare_expr but also prune all expression locations
977 from EXPR. */
979 tree
980 unshare_expr_without_location (tree expr)
982 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
983 if (EXPR_P (expr))
984 walk_tree (&expr, prune_expr_location, NULL, NULL);
985 return expr;
988 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
989 contain statements and have a value. Assign its value to a temporary
990 and give it void_type_node. Return the temporary, or NULL_TREE if
991 WRAPPER was already void. */
993 tree
994 voidify_wrapper_expr (tree wrapper, tree temp)
996 tree type = TREE_TYPE (wrapper);
997 if (type && !VOID_TYPE_P (type))
999 tree *p;
1001 /* Set p to point to the body of the wrapper. Loop until we find
1002 something that isn't a wrapper. */
1003 for (p = &wrapper; p && *p; )
1005 switch (TREE_CODE (*p))
1007 case BIND_EXPR:
1008 TREE_SIDE_EFFECTS (*p) = 1;
1009 TREE_TYPE (*p) = void_type_node;
1010 /* For a BIND_EXPR, the body is operand 1. */
1011 p = &BIND_EXPR_BODY (*p);
1012 break;
1014 case CLEANUP_POINT_EXPR:
1015 case TRY_FINALLY_EXPR:
1016 case TRY_CATCH_EXPR:
1017 TREE_SIDE_EFFECTS (*p) = 1;
1018 TREE_TYPE (*p) = void_type_node;
1019 p = &TREE_OPERAND (*p, 0);
1020 break;
1022 case STATEMENT_LIST:
1024 tree_stmt_iterator i = tsi_last (*p);
1025 TREE_SIDE_EFFECTS (*p) = 1;
1026 TREE_TYPE (*p) = void_type_node;
1027 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1029 break;
1031 case COMPOUND_EXPR:
1032 /* Advance to the last statement. Set all container types to
1033 void. */
1034 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1036 TREE_SIDE_EFFECTS (*p) = 1;
1037 TREE_TYPE (*p) = void_type_node;
1039 break;
1041 case TRANSACTION_EXPR:
1042 TREE_SIDE_EFFECTS (*p) = 1;
1043 TREE_TYPE (*p) = void_type_node;
1044 p = &TRANSACTION_EXPR_BODY (*p);
1045 break;
1047 default:
1048 /* Assume that any tree upon which voidify_wrapper_expr is
1049 directly called is a wrapper, and that its body is op0. */
1050 if (p == &wrapper)
1052 TREE_SIDE_EFFECTS (*p) = 1;
1053 TREE_TYPE (*p) = void_type_node;
1054 p = &TREE_OPERAND (*p, 0);
1055 break;
1057 goto out;
1061 out:
1062 if (p == NULL || IS_EMPTY_STMT (*p))
1063 temp = NULL_TREE;
1064 else if (temp)
1066 /* The wrapper is on the RHS of an assignment that we're pushing
1067 down. */
1068 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1069 || TREE_CODE (temp) == MODIFY_EXPR);
1070 TREE_OPERAND (temp, 1) = *p;
1071 *p = temp;
1073 else
1075 temp = create_tmp_var (type, "retval");
1076 *p = build2 (INIT_EXPR, type, temp, *p);
1079 return temp;
1082 return NULL_TREE;
1085 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1086 a temporary through which they communicate. */
1088 static void
1089 build_stack_save_restore (gcall **save, gcall **restore)
1091 tree tmp_var;
1093 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1094 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1095 gimple_call_set_lhs (*save, tmp_var);
1097 *restore
1098 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1099 1, tmp_var);
1102 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1104 static tree
1105 build_asan_poison_call_expr (tree decl)
1107 /* Do not poison variables that have size equal to zero. */
1108 tree unit_size = DECL_SIZE_UNIT (decl);
1109 if (zerop (unit_size))
1110 return NULL_TREE;
1112 tree base = build_fold_addr_expr (decl);
1114 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1115 void_type_node, 3,
1116 build_int_cst (integer_type_node,
1117 ASAN_MARK_POISON),
1118 base, unit_size);
1121 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1122 on POISON flag, shadow memory of a DECL variable. The call will be
1123 put on location identified by IT iterator, where BEFORE flag drives
1124 position where the stmt will be put. */
1126 static void
1127 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1128 bool before)
1130 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1131 if (gimplify_omp_ctxp)
1132 return;
1134 tree unit_size = DECL_SIZE_UNIT (decl);
1135 tree base = build_fold_addr_expr (decl);
1137 /* Do not poison variables that have size equal to zero. */
1138 if (zerop (unit_size))
1139 return;
1141 /* It's necessary to have all stack variables aligned to ASAN granularity
1142 bytes. */
1143 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1144 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1146 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1148 gimple *g
1149 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1150 build_int_cst (integer_type_node, flags),
1151 base, unit_size);
1153 if (before)
1154 gsi_insert_before (it, g, GSI_NEW_STMT);
1155 else
1156 gsi_insert_after (it, g, GSI_NEW_STMT);
1159 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1160 either poisons or unpoisons a DECL. Created statement is appended
1161 to SEQ_P gimple sequence. */
1163 static void
1164 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1166 gimple_stmt_iterator it = gsi_last (*seq_p);
1167 bool before = false;
1169 if (gsi_end_p (it))
1170 before = true;
1172 asan_poison_variable (decl, poison, &it, before);
1175 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1177 static int
1178 sort_by_decl_uid (const void *a, const void *b)
1180 const tree *t1 = (const tree *)a;
1181 const tree *t2 = (const tree *)b;
1183 int uid1 = DECL_UID (*t1);
1184 int uid2 = DECL_UID (*t2);
1186 if (uid1 < uid2)
1187 return -1;
1188 else if (uid1 > uid2)
1189 return 1;
1190 else
1191 return 0;
1194 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1195 depending on POISON flag. Created statement is appended
1196 to SEQ_P gimple sequence. */
1198 static void
1199 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1201 unsigned c = variables->elements ();
1202 if (c == 0)
1203 return;
1205 auto_vec<tree> sorted_variables (c);
1207 for (hash_set<tree>::iterator it = variables->begin ();
1208 it != variables->end (); ++it)
1209 sorted_variables.safe_push (*it);
1211 sorted_variables.qsort (sort_by_decl_uid);
1213 unsigned i;
1214 tree var;
1215 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1217 asan_poison_variable (var, poison, seq_p);
1219 /* Add use_after_scope_memory attribute for the variable in order
1220 to prevent re-written into SSA. */
1221 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1222 DECL_ATTRIBUTES (var)))
1223 DECL_ATTRIBUTES (var)
1224 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1225 integer_one_node,
1226 DECL_ATTRIBUTES (var));
1230 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1232 static enum gimplify_status
1233 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1235 tree bind_expr = *expr_p;
1236 bool old_keep_stack = gimplify_ctxp->keep_stack;
1237 bool old_save_stack = gimplify_ctxp->save_stack;
1238 tree t;
1239 gbind *bind_stmt;
1240 gimple_seq body, cleanup;
1241 gcall *stack_save;
1242 location_t start_locus = 0, end_locus = 0;
1243 tree ret_clauses = NULL;
1245 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1247 /* Mark variables seen in this bind expr. */
1248 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1250 if (VAR_P (t))
1252 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1254 /* Mark variable as local. */
1255 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1256 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1257 || splay_tree_lookup (ctx->variables,
1258 (splay_tree_key) t) == NULL))
1260 if (ctx->region_type == ORT_SIMD
1261 && TREE_ADDRESSABLE (t)
1262 && !TREE_STATIC (t))
1263 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1264 else
1265 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1268 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1270 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1271 cfun->has_local_explicit_reg_vars = true;
1274 /* Preliminarily mark non-addressed complex variables as eligible
1275 for promotion to gimple registers. We'll transform their uses
1276 as we find them. */
1277 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1278 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1279 && !TREE_THIS_VOLATILE (t)
1280 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1281 && !needs_to_live_in_memory (t))
1282 DECL_GIMPLE_REG_P (t) = 1;
1285 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1286 BIND_EXPR_BLOCK (bind_expr));
1287 gimple_push_bind_expr (bind_stmt);
1289 gimplify_ctxp->keep_stack = false;
1290 gimplify_ctxp->save_stack = false;
1292 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1293 body = NULL;
1294 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1295 gimple_bind_set_body (bind_stmt, body);
1297 /* Source location wise, the cleanup code (stack_restore and clobbers)
1298 belongs to the end of the block, so propagate what we have. The
1299 stack_save operation belongs to the beginning of block, which we can
1300 infer from the bind_expr directly if the block has no explicit
1301 assignment. */
1302 if (BIND_EXPR_BLOCK (bind_expr))
1304 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1305 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1307 if (start_locus == 0)
1308 start_locus = EXPR_LOCATION (bind_expr);
1310 cleanup = NULL;
1311 stack_save = NULL;
1313 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1314 the stack space allocated to the VLAs. */
1315 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1317 gcall *stack_restore;
1319 /* Save stack on entry and restore it on exit. Add a try_finally
1320 block to achieve this. */
1321 build_stack_save_restore (&stack_save, &stack_restore);
1323 gimple_set_location (stack_save, start_locus);
1324 gimple_set_location (stack_restore, end_locus);
1326 gimplify_seq_add_stmt (&cleanup, stack_restore);
1329 /* Add clobbers for all variables that go out of scope. */
1330 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1332 if (VAR_P (t)
1333 && !is_global_var (t)
1334 && DECL_CONTEXT (t) == current_function_decl)
1336 if (!DECL_HARD_REGISTER (t)
1337 && !TREE_THIS_VOLATILE (t)
1338 && !DECL_HAS_VALUE_EXPR_P (t)
1339 /* Only care for variables that have to be in memory. Others
1340 will be rewritten into SSA names, hence moved to the
1341 top-level. */
1342 && !is_gimple_reg (t)
1343 && flag_stack_reuse != SR_NONE)
1345 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1346 gimple *clobber_stmt;
1347 TREE_THIS_VOLATILE (clobber) = 1;
1348 clobber_stmt = gimple_build_assign (t, clobber);
1349 gimple_set_location (clobber_stmt, end_locus);
1350 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1353 if (flag_openacc && oacc_declare_returns != NULL)
1355 tree *c = oacc_declare_returns->get (t);
1356 if (c != NULL)
1358 if (ret_clauses)
1359 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1361 ret_clauses = *c;
1363 oacc_declare_returns->remove (t);
1365 if (oacc_declare_returns->elements () == 0)
1367 delete oacc_declare_returns;
1368 oacc_declare_returns = NULL;
1374 if (asan_poisoned_variables != NULL
1375 && asan_poisoned_variables->contains (t))
1377 asan_poisoned_variables->remove (t);
1378 asan_poison_variable (t, true, &cleanup);
1381 if (gimplify_ctxp->live_switch_vars != NULL
1382 && gimplify_ctxp->live_switch_vars->contains (t))
1383 gimplify_ctxp->live_switch_vars->remove (t);
1386 if (ret_clauses)
1388 gomp_target *stmt;
1389 gimple_stmt_iterator si = gsi_start (cleanup);
1391 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1392 ret_clauses);
1393 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1396 if (cleanup)
1398 gtry *gs;
1399 gimple_seq new_body;
1401 new_body = NULL;
1402 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1403 GIMPLE_TRY_FINALLY);
1405 if (stack_save)
1406 gimplify_seq_add_stmt (&new_body, stack_save);
1407 gimplify_seq_add_stmt (&new_body, gs);
1408 gimple_bind_set_body (bind_stmt, new_body);
1411 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1412 if (!gimplify_ctxp->keep_stack)
1413 gimplify_ctxp->keep_stack = old_keep_stack;
1414 gimplify_ctxp->save_stack = old_save_stack;
1416 gimple_pop_bind_expr ();
1418 gimplify_seq_add_stmt (pre_p, bind_stmt);
1420 if (temp)
1422 *expr_p = temp;
1423 return GS_OK;
1426 *expr_p = NULL_TREE;
1427 return GS_ALL_DONE;
1430 /* Maybe add early return predict statement to PRE_P sequence. */
1432 static void
1433 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1435 /* If we are not in a conditional context, add PREDICT statement. */
1436 if (gimple_conditional_context ())
1438 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1439 NOT_TAKEN);
1440 gimplify_seq_add_stmt (pre_p, predict);
1444 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1445 GIMPLE value, it is assigned to a new temporary and the statement is
1446 re-written to return the temporary.
1448 PRE_P points to the sequence where side effects that must happen before
1449 STMT should be stored. */
1451 static enum gimplify_status
1452 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1454 greturn *ret;
1455 tree ret_expr = TREE_OPERAND (stmt, 0);
1456 tree result_decl, result;
1458 if (ret_expr == error_mark_node)
1459 return GS_ERROR;
1461 /* Implicit _Cilk_sync must be inserted right before any return statement
1462 if there is a _Cilk_spawn in the function. If the user has provided a
1463 _Cilk_sync, the optimizer should remove this duplicate one. */
1464 if (fn_contains_cilk_spawn_p (cfun))
1466 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1467 gimplify_and_add (impl_sync, pre_p);
1470 if (!ret_expr
1471 || TREE_CODE (ret_expr) == RESULT_DECL
1472 || ret_expr == error_mark_node)
1474 maybe_add_early_return_predict_stmt (pre_p);
1475 greturn *ret = gimple_build_return (ret_expr);
1476 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1477 gimplify_seq_add_stmt (pre_p, ret);
1478 return GS_ALL_DONE;
1481 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1482 result_decl = NULL_TREE;
1483 else
1485 result_decl = TREE_OPERAND (ret_expr, 0);
1487 /* See through a return by reference. */
1488 if (TREE_CODE (result_decl) == INDIRECT_REF)
1489 result_decl = TREE_OPERAND (result_decl, 0);
1491 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1492 || TREE_CODE (ret_expr) == INIT_EXPR)
1493 && TREE_CODE (result_decl) == RESULT_DECL);
1496 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1497 Recall that aggregate_value_p is FALSE for any aggregate type that is
1498 returned in registers. If we're returning values in registers, then
1499 we don't want to extend the lifetime of the RESULT_DECL, particularly
1500 across another call. In addition, for those aggregates for which
1501 hard_function_value generates a PARALLEL, we'll die during normal
1502 expansion of structure assignments; there's special code in expand_return
1503 to handle this case that does not exist in expand_expr. */
1504 if (!result_decl)
1505 result = NULL_TREE;
1506 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1508 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1510 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1511 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1512 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1513 should be effectively allocated by the caller, i.e. all calls to
1514 this function must be subject to the Return Slot Optimization. */
1515 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1516 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1518 result = result_decl;
1520 else if (gimplify_ctxp->return_temp)
1521 result = gimplify_ctxp->return_temp;
1522 else
1524 result = create_tmp_reg (TREE_TYPE (result_decl));
1526 /* ??? With complex control flow (usually involving abnormal edges),
1527 we can wind up warning about an uninitialized value for this. Due
1528 to how this variable is constructed and initialized, this is never
1529 true. Give up and never warn. */
1530 TREE_NO_WARNING (result) = 1;
1532 gimplify_ctxp->return_temp = result;
1535 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1536 Then gimplify the whole thing. */
1537 if (result != result_decl)
1538 TREE_OPERAND (ret_expr, 0) = result;
1540 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1542 maybe_add_early_return_predict_stmt (pre_p);
1543 ret = gimple_build_return (result);
1544 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1545 gimplify_seq_add_stmt (pre_p, ret);
1547 return GS_ALL_DONE;
1550 /* Gimplify a variable-length array DECL. */
1552 static void
1553 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1555 /* This is a variable-sized decl. Simplify its size and mark it
1556 for deferred expansion. */
1557 tree t, addr, ptr_type;
1559 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1560 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1562 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1563 if (DECL_HAS_VALUE_EXPR_P (decl))
1564 return;
1566 /* All occurrences of this decl in final gimplified code will be
1567 replaced by indirection. Setting DECL_VALUE_EXPR does two
1568 things: First, it lets the rest of the gimplifier know what
1569 replacement to use. Second, it lets the debug info know
1570 where to find the value. */
1571 ptr_type = build_pointer_type (TREE_TYPE (decl));
1572 addr = create_tmp_var (ptr_type, get_name (decl));
1573 DECL_IGNORED_P (addr) = 0;
1574 t = build_fold_indirect_ref (addr);
1575 TREE_THIS_NOTRAP (t) = 1;
1576 SET_DECL_VALUE_EXPR (decl, t);
1577 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1579 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1580 max_int_size_in_bytes (TREE_TYPE (decl)));
1581 /* The call has been built for a variable-sized object. */
1582 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1583 t = fold_convert (ptr_type, t);
1584 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1586 gimplify_and_add (t, seq_p);
1589 /* A helper function to be called via walk_tree. Mark all labels under *TP
1590 as being forced. To be called for DECL_INITIAL of static variables. */
1592 static tree
1593 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1595 if (TYPE_P (*tp))
1596 *walk_subtrees = 0;
1597 if (TREE_CODE (*tp) == LABEL_DECL)
1599 FORCED_LABEL (*tp) = 1;
1600 cfun->has_forced_label_in_static = 1;
1603 return NULL_TREE;
1606 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1607 and initialization explicit. */
1609 static enum gimplify_status
1610 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1612 tree stmt = *stmt_p;
1613 tree decl = DECL_EXPR_DECL (stmt);
1615 *stmt_p = NULL_TREE;
1617 if (TREE_TYPE (decl) == error_mark_node)
1618 return GS_ERROR;
1620 if ((TREE_CODE (decl) == TYPE_DECL
1621 || VAR_P (decl))
1622 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1624 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1625 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1626 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1629 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1630 in case its size expressions contain problematic nodes like CALL_EXPR. */
1631 if (TREE_CODE (decl) == TYPE_DECL
1632 && DECL_ORIGINAL_TYPE (decl)
1633 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1635 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1636 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1637 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1640 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1642 tree init = DECL_INITIAL (decl);
1643 bool is_vla = false;
1645 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1646 || (!TREE_STATIC (decl)
1647 && flag_stack_check == GENERIC_STACK_CHECK
1648 && compare_tree_int (DECL_SIZE_UNIT (decl),
1649 STACK_CHECK_MAX_VAR_SIZE) > 0))
1651 gimplify_vla_decl (decl, seq_p);
1652 is_vla = true;
1655 if (asan_poisoned_variables
1656 && !is_vla
1657 && TREE_ADDRESSABLE (decl)
1658 && !TREE_STATIC (decl)
1659 && !DECL_HAS_VALUE_EXPR_P (decl)
1660 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1661 && dbg_cnt (asan_use_after_scope))
1663 asan_poisoned_variables->add (decl);
1664 asan_poison_variable (decl, false, seq_p);
1665 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1666 gimplify_ctxp->live_switch_vars->add (decl);
1669 /* Some front ends do not explicitly declare all anonymous
1670 artificial variables. We compensate here by declaring the
1671 variables, though it would be better if the front ends would
1672 explicitly declare them. */
1673 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1674 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1675 gimple_add_tmp_var (decl);
1677 if (init && init != error_mark_node)
1679 if (!TREE_STATIC (decl))
1681 DECL_INITIAL (decl) = NULL_TREE;
1682 init = build2 (INIT_EXPR, void_type_node, decl, init);
1683 gimplify_and_add (init, seq_p);
1684 ggc_free (init);
1686 else
1687 /* We must still examine initializers for static variables
1688 as they may contain a label address. */
1689 walk_tree (&init, force_labels_r, NULL, NULL);
1693 return GS_ALL_DONE;
1696 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1697 and replacing the LOOP_EXPR with goto, but if the loop contains an
1698 EXIT_EXPR, we need to append a label for it to jump to. */
1700 static enum gimplify_status
1701 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1703 tree saved_label = gimplify_ctxp->exit_label;
1704 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1706 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1708 gimplify_ctxp->exit_label = NULL_TREE;
1710 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1712 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1714 if (gimplify_ctxp->exit_label)
1715 gimplify_seq_add_stmt (pre_p,
1716 gimple_build_label (gimplify_ctxp->exit_label));
1718 gimplify_ctxp->exit_label = saved_label;
1720 *expr_p = NULL;
1721 return GS_ALL_DONE;
1724 /* Gimplify a statement list onto a sequence. These may be created either
1725 by an enlightened front-end, or by shortcut_cond_expr. */
1727 static enum gimplify_status
1728 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1730 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1732 tree_stmt_iterator i = tsi_start (*expr_p);
1734 while (!tsi_end_p (i))
1736 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1737 tsi_delink (&i);
1740 if (temp)
1742 *expr_p = temp;
1743 return GS_OK;
1746 return GS_ALL_DONE;
1749 /* Callback for walk_gimple_seq. */
1751 static tree
1752 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1753 struct walk_stmt_info *wi)
1755 gimple *stmt = gsi_stmt (*gsi_p);
1757 *handled_ops_p = true;
1758 switch (gimple_code (stmt))
1760 case GIMPLE_TRY:
1761 /* A compiler-generated cleanup or a user-written try block.
1762 If it's empty, don't dive into it--that would result in
1763 worse location info. */
1764 if (gimple_try_eval (stmt) == NULL)
1766 wi->info = stmt;
1767 return integer_zero_node;
1769 /* Fall through. */
1770 case GIMPLE_BIND:
1771 case GIMPLE_CATCH:
1772 case GIMPLE_EH_FILTER:
1773 case GIMPLE_TRANSACTION:
1774 /* Walk the sub-statements. */
1775 *handled_ops_p = false;
1776 break;
1777 case GIMPLE_CALL:
1778 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1780 *handled_ops_p = false;
1781 break;
1783 /* Fall through. */
1784 default:
1785 /* Save the first "real" statement (not a decl/lexical scope/...). */
1786 wi->info = stmt;
1787 return integer_zero_node;
1789 return NULL_TREE;
1792 /* Possibly warn about unreachable statements between switch's controlling
1793 expression and the first case. SEQ is the body of a switch expression. */
1795 static void
1796 maybe_warn_switch_unreachable (gimple_seq seq)
1798 if (!warn_switch_unreachable
1799 /* This warning doesn't play well with Fortran when optimizations
1800 are on. */
1801 || lang_GNU_Fortran ()
1802 || seq == NULL)
1803 return;
1805 struct walk_stmt_info wi;
1806 memset (&wi, 0, sizeof (wi));
1807 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1808 gimple *stmt = (gimple *) wi.info;
1810 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1812 if (gimple_code (stmt) == GIMPLE_GOTO
1813 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1814 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1815 /* Don't warn for compiler-generated gotos. These occur
1816 in Duff's devices, for example. */;
1817 else
1818 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1819 "statement will never be executed");
1824 /* A label entry that pairs label and a location. */
1825 struct label_entry
1827 tree label;
1828 location_t loc;
1831 /* Find LABEL in vector of label entries VEC. */
1833 static struct label_entry *
1834 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1836 unsigned int i;
1837 struct label_entry *l;
1839 FOR_EACH_VEC_ELT (*vec, i, l)
1840 if (l->label == label)
1841 return l;
1842 return NULL;
1845 /* Return true if LABEL, a LABEL_DECL, represents a case label
1846 in a vector of labels CASES. */
1848 static bool
1849 case_label_p (const vec<tree> *cases, tree label)
1851 unsigned int i;
1852 tree l;
1854 FOR_EACH_VEC_ELT (*cases, i, l)
1855 if (CASE_LABEL (l) == label)
1856 return true;
1857 return false;
1860 /* Find the last statement in a scope STMT. */
1862 static gimple *
1863 last_stmt_in_scope (gimple *stmt)
1865 if (!stmt)
1866 return NULL;
1868 switch (gimple_code (stmt))
1870 case GIMPLE_BIND:
1872 gbind *bind = as_a <gbind *> (stmt);
1873 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1874 return last_stmt_in_scope (stmt);
1877 case GIMPLE_TRY:
1879 gtry *try_stmt = as_a <gtry *> (stmt);
1880 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1881 gimple *last_eval = last_stmt_in_scope (stmt);
1882 if (gimple_stmt_may_fallthru (last_eval)
1883 && (last_eval == NULL
1884 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1885 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1887 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1888 return last_stmt_in_scope (stmt);
1890 else
1891 return last_eval;
1894 default:
1895 return stmt;
1899 /* Collect interesting labels in LABELS and return the statement preceding
1900 another case label, or a user-defined label. */
1902 static gimple *
1903 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1904 auto_vec <struct label_entry> *labels)
1906 gimple *prev = NULL;
1910 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1911 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1913 /* Nested scope. Only look at the last statement of
1914 the innermost scope. */
1915 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1916 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1917 if (last)
1919 prev = last;
1920 /* It might be a label without a location. Use the
1921 location of the scope then. */
1922 if (!gimple_has_location (prev))
1923 gimple_set_location (prev, bind_loc);
1925 gsi_next (gsi_p);
1926 continue;
1929 /* Ifs are tricky. */
1930 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1932 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1933 tree false_lab = gimple_cond_false_label (cond_stmt);
1934 location_t if_loc = gimple_location (cond_stmt);
1936 /* If we have e.g.
1937 if (i > 1) goto <D.2259>; else goto D;
1938 we can't do much with the else-branch. */
1939 if (!DECL_ARTIFICIAL (false_lab))
1940 break;
1942 /* Go on until the false label, then one step back. */
1943 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1945 gimple *stmt = gsi_stmt (*gsi_p);
1946 if (gimple_code (stmt) == GIMPLE_LABEL
1947 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1948 break;
1951 /* Not found? Oops. */
1952 if (gsi_end_p (*gsi_p))
1953 break;
1955 struct label_entry l = { false_lab, if_loc };
1956 labels->safe_push (l);
1958 /* Go to the last statement of the then branch. */
1959 gsi_prev (gsi_p);
1961 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1962 <D.1759>:
1963 <stmt>;
1964 goto <D.1761>;
1965 <D.1760>:
1967 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1968 && !gimple_has_location (gsi_stmt (*gsi_p)))
1970 /* Look at the statement before, it might be
1971 attribute fallthrough, in which case don't warn. */
1972 gsi_prev (gsi_p);
1973 bool fallthru_before_dest
1974 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1975 gsi_next (gsi_p);
1976 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1977 if (!fallthru_before_dest)
1979 struct label_entry l = { goto_dest, if_loc };
1980 labels->safe_push (l);
1983 /* And move back. */
1984 gsi_next (gsi_p);
1987 /* Remember the last statement. Skip labels that are of no interest
1988 to us. */
1989 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1991 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1992 if (find_label_entry (labels, label))
1993 prev = gsi_stmt (*gsi_p);
1995 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1997 else
1998 prev = gsi_stmt (*gsi_p);
1999 gsi_next (gsi_p);
2001 while (!gsi_end_p (*gsi_p)
2002 /* Stop if we find a case or a user-defined label. */
2003 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2004 || !gimple_has_location (gsi_stmt (*gsi_p))));
2006 return prev;
2009 /* Return true if the switch fallthough warning should occur. LABEL is
2010 the label statement that we're falling through to. */
2012 static bool
2013 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2015 gimple_stmt_iterator gsi = *gsi_p;
2017 /* Don't warn if the label is marked with a "falls through" comment. */
2018 if (FALLTHROUGH_LABEL_P (label))
2019 return false;
2021 /* Don't warn for non-case labels followed by a statement:
2022 case 0:
2023 foo ();
2024 label:
2025 bar ();
2026 as these are likely intentional. */
2027 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2029 tree l;
2030 while (!gsi_end_p (gsi)
2031 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2032 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2033 && !case_label_p (&gimplify_ctxp->case_labels, l))
2034 gsi_next (&gsi);
2035 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2036 return false;
2039 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2040 immediately breaks. */
2041 gsi = *gsi_p;
2043 /* Skip all immediately following labels. */
2044 while (!gsi_end_p (gsi)
2045 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2046 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2047 gsi_next (&gsi);
2049 /* { ... something; default:; } */
2050 if (gsi_end_p (gsi)
2051 /* { ... something; default: break; } or
2052 { ... something; default: goto L; } */
2053 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2054 /* { ... something; default: return; } */
2055 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2056 return false;
2058 return true;
2061 /* Callback for walk_gimple_seq. */
2063 static tree
2064 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2065 struct walk_stmt_info *)
2067 gimple *stmt = gsi_stmt (*gsi_p);
2069 *handled_ops_p = true;
2070 switch (gimple_code (stmt))
2072 case GIMPLE_TRY:
2073 case GIMPLE_BIND:
2074 case GIMPLE_CATCH:
2075 case GIMPLE_EH_FILTER:
2076 case GIMPLE_TRANSACTION:
2077 /* Walk the sub-statements. */
2078 *handled_ops_p = false;
2079 break;
2081 /* Find a sequence of form:
2083 GIMPLE_LABEL
2084 [...]
2085 <may fallthru stmt>
2086 GIMPLE_LABEL
2088 and possibly warn. */
2089 case GIMPLE_LABEL:
2091 /* Found a label. Skip all immediately following labels. */
2092 while (!gsi_end_p (*gsi_p)
2093 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2094 gsi_next (gsi_p);
2096 /* There might be no more statements. */
2097 if (gsi_end_p (*gsi_p))
2098 return integer_zero_node;
2100 /* Vector of labels that fall through. */
2101 auto_vec <struct label_entry> labels;
2102 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2104 /* There might be no more statements. */
2105 if (gsi_end_p (*gsi_p))
2106 return integer_zero_node;
2108 gimple *next = gsi_stmt (*gsi_p);
2109 tree label;
2110 /* If what follows is a label, then we may have a fallthrough. */
2111 if (gimple_code (next) == GIMPLE_LABEL
2112 && gimple_has_location (next)
2113 && (label = gimple_label_label (as_a <glabel *> (next)))
2114 && prev != NULL)
2116 struct label_entry *l;
2117 bool warned_p = false;
2118 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2119 /* Quiet. */;
2120 else if (gimple_code (prev) == GIMPLE_LABEL
2121 && (label = gimple_label_label (as_a <glabel *> (prev)))
2122 && (l = find_label_entry (&labels, label)))
2123 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2124 "this statement may fall through");
2125 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2126 /* Try to be clever and don't warn when the statement
2127 can't actually fall through. */
2128 && gimple_stmt_may_fallthru (prev)
2129 && gimple_has_location (prev))
2130 warned_p = warning_at (gimple_location (prev),
2131 OPT_Wimplicit_fallthrough_,
2132 "this statement may fall through");
2133 if (warned_p)
2134 inform (gimple_location (next), "here");
2136 /* Mark this label as processed so as to prevent multiple
2137 warnings in nested switches. */
2138 FALLTHROUGH_LABEL_P (label) = true;
2140 /* So that next warn_implicit_fallthrough_r will start looking for
2141 a new sequence starting with this label. */
2142 gsi_prev (gsi_p);
2145 break;
2146 default:
2147 break;
2149 return NULL_TREE;
2152 /* Warn when a switch case falls through. */
2154 static void
2155 maybe_warn_implicit_fallthrough (gimple_seq seq)
2157 if (!warn_implicit_fallthrough)
2158 return;
2160 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2161 if (!(lang_GNU_C ()
2162 || lang_GNU_CXX ()
2163 || lang_GNU_OBJC ()))
2164 return;
2166 struct walk_stmt_info wi;
2167 memset (&wi, 0, sizeof (wi));
2168 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2171 /* Callback for walk_gimple_seq. */
2173 static tree
2174 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2175 struct walk_stmt_info *)
2177 gimple *stmt = gsi_stmt (*gsi_p);
2179 *handled_ops_p = true;
2180 switch (gimple_code (stmt))
2182 case GIMPLE_TRY:
2183 case GIMPLE_BIND:
2184 case GIMPLE_CATCH:
2185 case GIMPLE_EH_FILTER:
2186 case GIMPLE_TRANSACTION:
2187 /* Walk the sub-statements. */
2188 *handled_ops_p = false;
2189 break;
2190 case GIMPLE_CALL:
2191 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2193 gsi_remove (gsi_p, true);
2194 if (gsi_end_p (*gsi_p))
2195 return integer_zero_node;
2197 bool found = false;
2198 location_t loc = gimple_location (stmt);
2200 gimple_stmt_iterator gsi2 = *gsi_p;
2201 stmt = gsi_stmt (gsi2);
2202 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2204 /* Go on until the artificial label. */
2205 tree goto_dest = gimple_goto_dest (stmt);
2206 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2208 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2209 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2210 == goto_dest)
2211 break;
2214 /* Not found? Stop. */
2215 if (gsi_end_p (gsi2))
2216 break;
2218 /* Look one past it. */
2219 gsi_next (&gsi2);
2222 /* We're looking for a case label or default label here. */
2223 while (!gsi_end_p (gsi2))
2225 stmt = gsi_stmt (gsi2);
2226 if (gimple_code (stmt) == GIMPLE_LABEL)
2228 tree label = gimple_label_label (as_a <glabel *> (stmt));
2229 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2231 found = true;
2232 break;
2235 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2237 else
2238 /* Something other is not expected. */
2239 break;
2240 gsi_next (&gsi2);
2242 if (!found)
2243 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2244 "a case label or default label");
2246 break;
2247 default:
2248 break;
2250 return NULL_TREE;
2253 /* Expand all FALLTHROUGH () calls in SEQ. */
2255 static void
2256 expand_FALLTHROUGH (gimple_seq *seq_p)
2258 struct walk_stmt_info wi;
2259 memset (&wi, 0, sizeof (wi));
2260 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2264 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2265 branch to. */
2267 static enum gimplify_status
2268 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2270 tree switch_expr = *expr_p;
2271 gimple_seq switch_body_seq = NULL;
2272 enum gimplify_status ret;
2273 tree index_type = TREE_TYPE (switch_expr);
2274 if (index_type == NULL_TREE)
2275 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2277 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2278 fb_rvalue);
2279 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2280 return ret;
2282 if (SWITCH_BODY (switch_expr))
2284 vec<tree> labels;
2285 vec<tree> saved_labels;
2286 hash_set<tree> *saved_live_switch_vars = NULL;
2287 tree default_case = NULL_TREE;
2288 gswitch *switch_stmt;
2290 /* If someone can be bothered to fill in the labels, they can
2291 be bothered to null out the body too. */
2292 gcc_assert (!SWITCH_LABELS (switch_expr));
2294 /* Save old labels, get new ones from body, then restore the old
2295 labels. Save all the things from the switch body to append after. */
2296 saved_labels = gimplify_ctxp->case_labels;
2297 gimplify_ctxp->case_labels.create (8);
2299 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2300 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2301 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2302 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2303 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2304 else
2305 gimplify_ctxp->live_switch_vars = NULL;
2307 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2308 gimplify_ctxp->in_switch_expr = true;
2310 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2312 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2313 maybe_warn_switch_unreachable (switch_body_seq);
2314 maybe_warn_implicit_fallthrough (switch_body_seq);
2315 /* Only do this for the outermost GIMPLE_SWITCH. */
2316 if (!gimplify_ctxp->in_switch_expr)
2317 expand_FALLTHROUGH (&switch_body_seq);
2319 labels = gimplify_ctxp->case_labels;
2320 gimplify_ctxp->case_labels = saved_labels;
2322 if (gimplify_ctxp->live_switch_vars)
2324 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2325 delete gimplify_ctxp->live_switch_vars;
2327 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2329 preprocess_case_label_vec_for_gimple (labels, index_type,
2330 &default_case);
2332 if (!default_case)
2334 glabel *new_default;
2336 default_case
2337 = build_case_label (NULL_TREE, NULL_TREE,
2338 create_artificial_label (UNKNOWN_LOCATION));
2339 new_default = gimple_build_label (CASE_LABEL (default_case));
2340 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2343 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2344 default_case, labels);
2345 gimplify_seq_add_stmt (pre_p, switch_stmt);
2346 gimplify_seq_add_seq (pre_p, switch_body_seq);
2347 labels.release ();
2349 else
2350 gcc_assert (SWITCH_LABELS (switch_expr));
2352 return GS_ALL_DONE;
2355 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2357 static enum gimplify_status
2358 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2360 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2361 == current_function_decl);
2363 tree label = LABEL_EXPR_LABEL (*expr_p);
2364 glabel *label_stmt = gimple_build_label (label);
2365 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2366 gimplify_seq_add_stmt (pre_p, label_stmt);
2368 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2369 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2370 NOT_TAKEN));
2371 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2372 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2373 TAKEN));
2375 return GS_ALL_DONE;
2378 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2380 static enum gimplify_status
2381 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2383 struct gimplify_ctx *ctxp;
2384 glabel *label_stmt;
2386 /* Invalid programs can play Duff's Device type games with, for example,
2387 #pragma omp parallel. At least in the C front end, we don't
2388 detect such invalid branches until after gimplification, in the
2389 diagnose_omp_blocks pass. */
2390 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2391 if (ctxp->case_labels.exists ())
2392 break;
2394 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2395 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2396 ctxp->case_labels.safe_push (*expr_p);
2397 gimplify_seq_add_stmt (pre_p, label_stmt);
2399 return GS_ALL_DONE;
2402 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2403 if necessary. */
2405 tree
2406 build_and_jump (tree *label_p)
2408 if (label_p == NULL)
2409 /* If there's nowhere to jump, just fall through. */
2410 return NULL_TREE;
2412 if (*label_p == NULL_TREE)
2414 tree label = create_artificial_label (UNKNOWN_LOCATION);
2415 *label_p = label;
2418 return build1 (GOTO_EXPR, void_type_node, *label_p);
2421 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2422 This also involves building a label to jump to and communicating it to
2423 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2425 static enum gimplify_status
2426 gimplify_exit_expr (tree *expr_p)
2428 tree cond = TREE_OPERAND (*expr_p, 0);
2429 tree expr;
2431 expr = build_and_jump (&gimplify_ctxp->exit_label);
2432 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2433 *expr_p = expr;
2435 return GS_OK;
2438 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2439 different from its canonical type, wrap the whole thing inside a
2440 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2441 type.
2443 The canonical type of a COMPONENT_REF is the type of the field being
2444 referenced--unless the field is a bit-field which can be read directly
2445 in a smaller mode, in which case the canonical type is the
2446 sign-appropriate type corresponding to that mode. */
2448 static void
2449 canonicalize_component_ref (tree *expr_p)
2451 tree expr = *expr_p;
2452 tree type;
2454 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2456 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2457 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2458 else
2459 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2461 /* One could argue that all the stuff below is not necessary for
2462 the non-bitfield case and declare it a FE error if type
2463 adjustment would be needed. */
2464 if (TREE_TYPE (expr) != type)
2466 #ifdef ENABLE_TYPES_CHECKING
2467 tree old_type = TREE_TYPE (expr);
2468 #endif
2469 int type_quals;
2471 /* We need to preserve qualifiers and propagate them from
2472 operand 0. */
2473 type_quals = TYPE_QUALS (type)
2474 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2475 if (TYPE_QUALS (type) != type_quals)
2476 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2478 /* Set the type of the COMPONENT_REF to the underlying type. */
2479 TREE_TYPE (expr) = type;
2481 #ifdef ENABLE_TYPES_CHECKING
2482 /* It is now a FE error, if the conversion from the canonical
2483 type to the original expression type is not useless. */
2484 gcc_assert (useless_type_conversion_p (old_type, type));
2485 #endif
2489 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2490 to foo, embed that change in the ADDR_EXPR by converting
2491 T array[U];
2492 (T *)&array
2494 &array[L]
2495 where L is the lower bound. For simplicity, only do this for constant
2496 lower bound.
2497 The constraint is that the type of &array[L] is trivially convertible
2498 to T *. */
2500 static void
2501 canonicalize_addr_expr (tree *expr_p)
2503 tree expr = *expr_p;
2504 tree addr_expr = TREE_OPERAND (expr, 0);
2505 tree datype, ddatype, pddatype;
2507 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2508 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2509 || TREE_CODE (addr_expr) != ADDR_EXPR)
2510 return;
2512 /* The addr_expr type should be a pointer to an array. */
2513 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2514 if (TREE_CODE (datype) != ARRAY_TYPE)
2515 return;
2517 /* The pointer to element type shall be trivially convertible to
2518 the expression pointer type. */
2519 ddatype = TREE_TYPE (datype);
2520 pddatype = build_pointer_type (ddatype);
2521 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2522 pddatype))
2523 return;
2525 /* The lower bound and element sizes must be constant. */
2526 if (!TYPE_SIZE_UNIT (ddatype)
2527 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2528 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2529 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2530 return;
2532 /* All checks succeeded. Build a new node to merge the cast. */
2533 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2534 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2535 NULL_TREE, NULL_TREE);
2536 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2538 /* We can have stripped a required restrict qualifier above. */
2539 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2540 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2543 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2544 underneath as appropriate. */
2546 static enum gimplify_status
2547 gimplify_conversion (tree *expr_p)
2549 location_t loc = EXPR_LOCATION (*expr_p);
2550 gcc_assert (CONVERT_EXPR_P (*expr_p));
2552 /* Then strip away all but the outermost conversion. */
2553 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2555 /* And remove the outermost conversion if it's useless. */
2556 if (tree_ssa_useless_type_conversion (*expr_p))
2557 *expr_p = TREE_OPERAND (*expr_p, 0);
2559 /* If we still have a conversion at the toplevel,
2560 then canonicalize some constructs. */
2561 if (CONVERT_EXPR_P (*expr_p))
2563 tree sub = TREE_OPERAND (*expr_p, 0);
2565 /* If a NOP conversion is changing the type of a COMPONENT_REF
2566 expression, then canonicalize its type now in order to expose more
2567 redundant conversions. */
2568 if (TREE_CODE (sub) == COMPONENT_REF)
2569 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2571 /* If a NOP conversion is changing a pointer to array of foo
2572 to a pointer to foo, embed that change in the ADDR_EXPR. */
2573 else if (TREE_CODE (sub) == ADDR_EXPR)
2574 canonicalize_addr_expr (expr_p);
2577 /* If we have a conversion to a non-register type force the
2578 use of a VIEW_CONVERT_EXPR instead. */
2579 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2580 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2581 TREE_OPERAND (*expr_p, 0));
2583 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2584 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2585 TREE_SET_CODE (*expr_p, NOP_EXPR);
2587 return GS_OK;
2590 /* Nonlocal VLAs seen in the current function. */
2591 static hash_set<tree> *nonlocal_vlas;
2593 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2594 static tree nonlocal_vla_vars;
2596 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2597 DECL_VALUE_EXPR, and it's worth re-examining things. */
2599 static enum gimplify_status
2600 gimplify_var_or_parm_decl (tree *expr_p)
2602 tree decl = *expr_p;
2604 /* ??? If this is a local variable, and it has not been seen in any
2605 outer BIND_EXPR, then it's probably the result of a duplicate
2606 declaration, for which we've already issued an error. It would
2607 be really nice if the front end wouldn't leak these at all.
2608 Currently the only known culprit is C++ destructors, as seen
2609 in g++.old-deja/g++.jason/binding.C. */
2610 if (VAR_P (decl)
2611 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2612 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2613 && decl_function_context (decl) == current_function_decl)
2615 gcc_assert (seen_error ());
2616 return GS_ERROR;
2619 /* When within an OMP context, notice uses of variables. */
2620 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2621 return GS_ALL_DONE;
2623 /* If the decl is an alias for another expression, substitute it now. */
2624 if (DECL_HAS_VALUE_EXPR_P (decl))
2626 tree value_expr = DECL_VALUE_EXPR (decl);
2628 /* For referenced nonlocal VLAs add a decl for debugging purposes
2629 to the current function. */
2630 if (VAR_P (decl)
2631 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2632 && nonlocal_vlas != NULL
2633 && TREE_CODE (value_expr) == INDIRECT_REF
2634 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2635 && decl_function_context (decl) != current_function_decl)
2637 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2638 while (ctx
2639 && (ctx->region_type == ORT_WORKSHARE
2640 || ctx->region_type == ORT_SIMD
2641 || ctx->region_type == ORT_ACC))
2642 ctx = ctx->outer_context;
2643 if (!ctx && !nonlocal_vlas->add (decl))
2645 tree copy = copy_node (decl);
2647 lang_hooks.dup_lang_specific_decl (copy);
2648 SET_DECL_RTL (copy, 0);
2649 TREE_USED (copy) = 1;
2650 DECL_CHAIN (copy) = nonlocal_vla_vars;
2651 nonlocal_vla_vars = copy;
2652 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2653 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2657 *expr_p = unshare_expr (value_expr);
2658 return GS_OK;
2661 return GS_ALL_DONE;
2664 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2666 static void
2667 recalculate_side_effects (tree t)
2669 enum tree_code code = TREE_CODE (t);
2670 int len = TREE_OPERAND_LENGTH (t);
2671 int i;
2673 switch (TREE_CODE_CLASS (code))
2675 case tcc_expression:
2676 switch (code)
2678 case INIT_EXPR:
2679 case MODIFY_EXPR:
2680 case VA_ARG_EXPR:
2681 case PREDECREMENT_EXPR:
2682 case PREINCREMENT_EXPR:
2683 case POSTDECREMENT_EXPR:
2684 case POSTINCREMENT_EXPR:
2685 /* All of these have side-effects, no matter what their
2686 operands are. */
2687 return;
2689 default:
2690 break;
2692 /* Fall through. */
2694 case tcc_comparison: /* a comparison expression */
2695 case tcc_unary: /* a unary arithmetic expression */
2696 case tcc_binary: /* a binary arithmetic expression */
2697 case tcc_reference: /* a reference */
2698 case tcc_vl_exp: /* a function call */
2699 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2700 for (i = 0; i < len; ++i)
2702 tree op = TREE_OPERAND (t, i);
2703 if (op && TREE_SIDE_EFFECTS (op))
2704 TREE_SIDE_EFFECTS (t) = 1;
2706 break;
2708 case tcc_constant:
2709 /* No side-effects. */
2710 return;
2712 default:
2713 gcc_unreachable ();
2717 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2718 node *EXPR_P.
2720 compound_lval
2721 : min_lval '[' val ']'
2722 | min_lval '.' ID
2723 | compound_lval '[' val ']'
2724 | compound_lval '.' ID
2726 This is not part of the original SIMPLE definition, which separates
2727 array and member references, but it seems reasonable to handle them
2728 together. Also, this way we don't run into problems with union
2729 aliasing; gcc requires that for accesses through a union to alias, the
2730 union reference must be explicit, which was not always the case when we
2731 were splitting up array and member refs.
2733 PRE_P points to the sequence where side effects that must happen before
2734 *EXPR_P should be stored.
2736 POST_P points to the sequence where side effects that must happen after
2737 *EXPR_P should be stored. */
2739 static enum gimplify_status
2740 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2741 fallback_t fallback)
2743 tree *p;
2744 enum gimplify_status ret = GS_ALL_DONE, tret;
2745 int i;
2746 location_t loc = EXPR_LOCATION (*expr_p);
2747 tree expr = *expr_p;
2749 /* Create a stack of the subexpressions so later we can walk them in
2750 order from inner to outer. */
2751 auto_vec<tree, 10> expr_stack;
2753 /* We can handle anything that get_inner_reference can deal with. */
2754 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2756 restart:
2757 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2758 if (TREE_CODE (*p) == INDIRECT_REF)
2759 *p = fold_indirect_ref_loc (loc, *p);
2761 if (handled_component_p (*p))
2763 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2764 additional COMPONENT_REFs. */
2765 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2766 && gimplify_var_or_parm_decl (p) == GS_OK)
2767 goto restart;
2768 else
2769 break;
2771 expr_stack.safe_push (*p);
2774 gcc_assert (expr_stack.length ());
2776 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2777 walked through and P points to the innermost expression.
2779 Java requires that we elaborated nodes in source order. That
2780 means we must gimplify the inner expression followed by each of
2781 the indices, in order. But we can't gimplify the inner
2782 expression until we deal with any variable bounds, sizes, or
2783 positions in order to deal with PLACEHOLDER_EXPRs.
2785 So we do this in three steps. First we deal with the annotations
2786 for any variables in the components, then we gimplify the base,
2787 then we gimplify any indices, from left to right. */
2788 for (i = expr_stack.length () - 1; i >= 0; i--)
2790 tree t = expr_stack[i];
2792 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2794 /* Gimplify the low bound and element type size and put them into
2795 the ARRAY_REF. If these values are set, they have already been
2796 gimplified. */
2797 if (TREE_OPERAND (t, 2) == NULL_TREE)
2799 tree low = unshare_expr (array_ref_low_bound (t));
2800 if (!is_gimple_min_invariant (low))
2802 TREE_OPERAND (t, 2) = low;
2803 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2804 post_p, is_gimple_reg,
2805 fb_rvalue);
2806 ret = MIN (ret, tret);
2809 else
2811 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2812 is_gimple_reg, fb_rvalue);
2813 ret = MIN (ret, tret);
2816 if (TREE_OPERAND (t, 3) == NULL_TREE)
2818 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2819 tree elmt_size = unshare_expr (array_ref_element_size (t));
2820 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2822 /* Divide the element size by the alignment of the element
2823 type (above). */
2824 elmt_size
2825 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2827 if (!is_gimple_min_invariant (elmt_size))
2829 TREE_OPERAND (t, 3) = elmt_size;
2830 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2831 post_p, is_gimple_reg,
2832 fb_rvalue);
2833 ret = MIN (ret, tret);
2836 else
2838 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2839 is_gimple_reg, fb_rvalue);
2840 ret = MIN (ret, tret);
2843 else if (TREE_CODE (t) == COMPONENT_REF)
2845 /* Set the field offset into T and gimplify it. */
2846 if (TREE_OPERAND (t, 2) == NULL_TREE)
2848 tree offset = unshare_expr (component_ref_field_offset (t));
2849 tree field = TREE_OPERAND (t, 1);
2850 tree factor
2851 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2853 /* Divide the offset by its alignment. */
2854 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2856 if (!is_gimple_min_invariant (offset))
2858 TREE_OPERAND (t, 2) = offset;
2859 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2860 post_p, is_gimple_reg,
2861 fb_rvalue);
2862 ret = MIN (ret, tret);
2865 else
2867 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2868 is_gimple_reg, fb_rvalue);
2869 ret = MIN (ret, tret);
2874 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2875 so as to match the min_lval predicate. Failure to do so may result
2876 in the creation of large aggregate temporaries. */
2877 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2878 fallback | fb_lvalue);
2879 ret = MIN (ret, tret);
2881 /* And finally, the indices and operands of ARRAY_REF. During this
2882 loop we also remove any useless conversions. */
2883 for (; expr_stack.length () > 0; )
2885 tree t = expr_stack.pop ();
2887 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2889 /* Gimplify the dimension. */
2890 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2892 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2893 is_gimple_val, fb_rvalue);
2894 ret = MIN (ret, tret);
2898 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2900 /* The innermost expression P may have originally had
2901 TREE_SIDE_EFFECTS set which would have caused all the outer
2902 expressions in *EXPR_P leading to P to also have had
2903 TREE_SIDE_EFFECTS set. */
2904 recalculate_side_effects (t);
2907 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2908 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2910 canonicalize_component_ref (expr_p);
2913 expr_stack.release ();
2915 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2917 return ret;
2920 /* Gimplify the self modifying expression pointed to by EXPR_P
2921 (++, --, +=, -=).
2923 PRE_P points to the list where side effects that must happen before
2924 *EXPR_P should be stored.
2926 POST_P points to the list where side effects that must happen after
2927 *EXPR_P should be stored.
2929 WANT_VALUE is nonzero iff we want to use the value of this expression
2930 in another expression.
2932 ARITH_TYPE is the type the computation should be performed in. */
2934 enum gimplify_status
2935 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2936 bool want_value, tree arith_type)
2938 enum tree_code code;
2939 tree lhs, lvalue, rhs, t1;
2940 gimple_seq post = NULL, *orig_post_p = post_p;
2941 bool postfix;
2942 enum tree_code arith_code;
2943 enum gimplify_status ret;
2944 location_t loc = EXPR_LOCATION (*expr_p);
2946 code = TREE_CODE (*expr_p);
2948 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2949 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2951 /* Prefix or postfix? */
2952 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2953 /* Faster to treat as prefix if result is not used. */
2954 postfix = want_value;
2955 else
2956 postfix = false;
2958 /* For postfix, make sure the inner expression's post side effects
2959 are executed after side effects from this expression. */
2960 if (postfix)
2961 post_p = &post;
2963 /* Add or subtract? */
2964 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2965 arith_code = PLUS_EXPR;
2966 else
2967 arith_code = MINUS_EXPR;
2969 /* Gimplify the LHS into a GIMPLE lvalue. */
2970 lvalue = TREE_OPERAND (*expr_p, 0);
2971 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2972 if (ret == GS_ERROR)
2973 return ret;
2975 /* Extract the operands to the arithmetic operation. */
2976 lhs = lvalue;
2977 rhs = TREE_OPERAND (*expr_p, 1);
2979 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2980 that as the result value and in the postqueue operation. */
2981 if (postfix)
2983 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2984 if (ret == GS_ERROR)
2985 return ret;
2987 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2990 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2991 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2993 rhs = convert_to_ptrofftype_loc (loc, rhs);
2994 if (arith_code == MINUS_EXPR)
2995 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2996 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2998 else
2999 t1 = fold_convert (TREE_TYPE (*expr_p),
3000 fold_build2 (arith_code, arith_type,
3001 fold_convert (arith_type, lhs),
3002 fold_convert (arith_type, rhs)));
3004 if (postfix)
3006 gimplify_assign (lvalue, t1, pre_p);
3007 gimplify_seq_add_seq (orig_post_p, post);
3008 *expr_p = lhs;
3009 return GS_ALL_DONE;
3011 else
3013 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3014 return GS_OK;
3018 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3020 static void
3021 maybe_with_size_expr (tree *expr_p)
3023 tree expr = *expr_p;
3024 tree type = TREE_TYPE (expr);
3025 tree size;
3027 /* If we've already wrapped this or the type is error_mark_node, we can't do
3028 anything. */
3029 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3030 || type == error_mark_node)
3031 return;
3033 /* If the size isn't known or is a constant, we have nothing to do. */
3034 size = TYPE_SIZE_UNIT (type);
3035 if (!size || TREE_CODE (size) == INTEGER_CST)
3036 return;
3038 /* Otherwise, make a WITH_SIZE_EXPR. */
3039 size = unshare_expr (size);
3040 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3041 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3044 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3045 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3046 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3047 gimplified to an SSA name. */
3049 enum gimplify_status
3050 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3051 bool allow_ssa)
3053 bool (*test) (tree);
3054 fallback_t fb;
3056 /* In general, we allow lvalues for function arguments to avoid
3057 extra overhead of copying large aggregates out of even larger
3058 aggregates into temporaries only to copy the temporaries to
3059 the argument list. Make optimizers happy by pulling out to
3060 temporaries those types that fit in registers. */
3061 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3062 test = is_gimple_val, fb = fb_rvalue;
3063 else
3065 test = is_gimple_lvalue, fb = fb_either;
3066 /* Also strip a TARGET_EXPR that would force an extra copy. */
3067 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3069 tree init = TARGET_EXPR_INITIAL (*arg_p);
3070 if (init
3071 && !VOID_TYPE_P (TREE_TYPE (init)))
3072 *arg_p = init;
3076 /* If this is a variable sized type, we must remember the size. */
3077 maybe_with_size_expr (arg_p);
3079 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3080 /* Make sure arguments have the same location as the function call
3081 itself. */
3082 protected_set_expr_location (*arg_p, call_location);
3084 /* There is a sequence point before a function call. Side effects in
3085 the argument list must occur before the actual call. So, when
3086 gimplifying arguments, force gimplify_expr to use an internal
3087 post queue which is then appended to the end of PRE_P. */
3088 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3091 /* Don't fold inside offloading or taskreg regions: it can break code by
3092 adding decl references that weren't in the source. We'll do it during
3093 omplower pass instead. */
3095 static bool
3096 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3098 struct gimplify_omp_ctx *ctx;
3099 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3100 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3101 return false;
3102 return fold_stmt (gsi);
3105 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3106 with the pointer to the proper cilk frame. */
3107 static void
3108 gimplify_cilk_detach (gimple_seq *pre_p)
3110 tree frame = cfun->cilk_frame_decl;
3111 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3112 frame);
3113 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3114 ptrf);
3115 gimplify_seq_add_stmt(pre_p, detach);
3118 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3119 WANT_VALUE is true if the result of the call is desired. */
3121 static enum gimplify_status
3122 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3124 tree fndecl, parms, p, fnptrtype;
3125 enum gimplify_status ret;
3126 int i, nargs;
3127 gcall *call;
3128 bool builtin_va_start_p = false;
3129 location_t loc = EXPR_LOCATION (*expr_p);
3131 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3133 /* For reliable diagnostics during inlining, it is necessary that
3134 every call_expr be annotated with file and line. */
3135 if (! EXPR_HAS_LOCATION (*expr_p))
3136 SET_EXPR_LOCATION (*expr_p, input_location);
3138 /* Gimplify internal functions created in the FEs. */
3139 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3141 if (want_value)
3142 return GS_ALL_DONE;
3144 nargs = call_expr_nargs (*expr_p);
3145 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3146 auto_vec<tree> vargs (nargs);
3148 for (i = 0; i < nargs; i++)
3150 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3151 EXPR_LOCATION (*expr_p));
3152 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3155 if (EXPR_CILK_SPAWN (*expr_p))
3156 gimplify_cilk_detach (pre_p);
3157 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3158 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3159 gimplify_seq_add_stmt (pre_p, call);
3160 return GS_ALL_DONE;
3163 /* This may be a call to a builtin function.
3165 Builtin function calls may be transformed into different
3166 (and more efficient) builtin function calls under certain
3167 circumstances. Unfortunately, gimplification can muck things
3168 up enough that the builtin expanders are not aware that certain
3169 transformations are still valid.
3171 So we attempt transformation/gimplification of the call before
3172 we gimplify the CALL_EXPR. At this time we do not manage to
3173 transform all calls in the same manner as the expanders do, but
3174 we do transform most of them. */
3175 fndecl = get_callee_fndecl (*expr_p);
3176 if (fndecl
3177 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3178 switch (DECL_FUNCTION_CODE (fndecl))
3180 CASE_BUILT_IN_ALLOCA:
3181 /* If the call has been built for a variable-sized object, then we
3182 want to restore the stack level when the enclosing BIND_EXPR is
3183 exited to reclaim the allocated space; otherwise, we precisely
3184 need to do the opposite and preserve the latest stack level. */
3185 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3186 gimplify_ctxp->save_stack = true;
3187 else
3188 gimplify_ctxp->keep_stack = true;
3189 break;
3191 case BUILT_IN_VA_START:
3193 builtin_va_start_p = TRUE;
3194 if (call_expr_nargs (*expr_p) < 2)
3196 error ("too few arguments to function %<va_start%>");
3197 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3198 return GS_OK;
3201 if (fold_builtin_next_arg (*expr_p, true))
3203 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3204 return GS_OK;
3206 break;
3209 default:
3212 if (fndecl && DECL_BUILT_IN (fndecl))
3214 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3215 if (new_tree && new_tree != *expr_p)
3217 /* There was a transformation of this call which computes the
3218 same value, but in a more efficient way. Return and try
3219 again. */
3220 *expr_p = new_tree;
3221 return GS_OK;
3225 /* Remember the original function pointer type. */
3226 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3228 /* There is a sequence point before the call, so any side effects in
3229 the calling expression must occur before the actual call. Force
3230 gimplify_expr to use an internal post queue. */
3231 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3232 is_gimple_call_addr, fb_rvalue);
3234 nargs = call_expr_nargs (*expr_p);
3236 /* Get argument types for verification. */
3237 fndecl = get_callee_fndecl (*expr_p);
3238 parms = NULL_TREE;
3239 if (fndecl)
3240 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3241 else
3242 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3244 if (fndecl && DECL_ARGUMENTS (fndecl))
3245 p = DECL_ARGUMENTS (fndecl);
3246 else if (parms)
3247 p = parms;
3248 else
3249 p = NULL_TREE;
3250 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3253 /* If the last argument is __builtin_va_arg_pack () and it is not
3254 passed as a named argument, decrease the number of CALL_EXPR
3255 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3256 if (!p
3257 && i < nargs
3258 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3260 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3261 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3263 if (last_arg_fndecl
3264 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3265 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3266 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3268 tree call = *expr_p;
3270 --nargs;
3271 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3272 CALL_EXPR_FN (call),
3273 nargs, CALL_EXPR_ARGP (call));
3275 /* Copy all CALL_EXPR flags, location and block, except
3276 CALL_EXPR_VA_ARG_PACK flag. */
3277 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3278 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3279 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3280 = CALL_EXPR_RETURN_SLOT_OPT (call);
3281 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3282 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3284 /* Set CALL_EXPR_VA_ARG_PACK. */
3285 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3289 /* If the call returns twice then after building the CFG the call
3290 argument computations will no longer dominate the call because
3291 we add an abnormal incoming edge to the call. So do not use SSA
3292 vars there. */
3293 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3295 /* Gimplify the function arguments. */
3296 if (nargs > 0)
3298 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3299 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3300 PUSH_ARGS_REVERSED ? i-- : i++)
3302 enum gimplify_status t;
3304 /* Avoid gimplifying the second argument to va_start, which needs to
3305 be the plain PARM_DECL. */
3306 if ((i != 1) || !builtin_va_start_p)
3308 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3309 EXPR_LOCATION (*expr_p), ! returns_twice);
3311 if (t == GS_ERROR)
3312 ret = GS_ERROR;
3317 /* Gimplify the static chain. */
3318 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3320 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3321 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3322 else
3324 enum gimplify_status t;
3325 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3326 EXPR_LOCATION (*expr_p), ! returns_twice);
3327 if (t == GS_ERROR)
3328 ret = GS_ERROR;
3332 /* Verify the function result. */
3333 if (want_value && fndecl
3334 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3336 error_at (loc, "using result of function returning %<void%>");
3337 ret = GS_ERROR;
3340 /* Try this again in case gimplification exposed something. */
3341 if (ret != GS_ERROR)
3343 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3345 if (new_tree && new_tree != *expr_p)
3347 /* There was a transformation of this call which computes the
3348 same value, but in a more efficient way. Return and try
3349 again. */
3350 *expr_p = new_tree;
3351 return GS_OK;
3354 else
3356 *expr_p = error_mark_node;
3357 return GS_ERROR;
3360 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3361 decl. This allows us to eliminate redundant or useless
3362 calls to "const" functions. */
3363 if (TREE_CODE (*expr_p) == CALL_EXPR)
3365 int flags = call_expr_flags (*expr_p);
3366 if (flags & (ECF_CONST | ECF_PURE)
3367 /* An infinite loop is considered a side effect. */
3368 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3369 TREE_SIDE_EFFECTS (*expr_p) = 0;
3372 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3373 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3374 form and delegate the creation of a GIMPLE_CALL to
3375 gimplify_modify_expr. This is always possible because when
3376 WANT_VALUE is true, the caller wants the result of this call into
3377 a temporary, which means that we will emit an INIT_EXPR in
3378 internal_get_tmp_var which will then be handled by
3379 gimplify_modify_expr. */
3380 if (!want_value)
3382 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3383 have to do is replicate it as a GIMPLE_CALL tuple. */
3384 gimple_stmt_iterator gsi;
3385 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3386 notice_special_calls (call);
3387 if (EXPR_CILK_SPAWN (*expr_p))
3388 gimplify_cilk_detach (pre_p);
3389 gimplify_seq_add_stmt (pre_p, call);
3390 gsi = gsi_last (*pre_p);
3391 maybe_fold_stmt (&gsi);
3392 *expr_p = NULL_TREE;
3394 else
3395 /* Remember the original function type. */
3396 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3397 CALL_EXPR_FN (*expr_p));
3399 return ret;
3402 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3403 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3405 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3406 condition is true or false, respectively. If null, we should generate
3407 our own to skip over the evaluation of this specific expression.
3409 LOCUS is the source location of the COND_EXPR.
3411 This function is the tree equivalent of do_jump.
3413 shortcut_cond_r should only be called by shortcut_cond_expr. */
3415 static tree
3416 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3417 location_t locus)
3419 tree local_label = NULL_TREE;
3420 tree t, expr = NULL;
3422 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3423 retain the shortcut semantics. Just insert the gotos here;
3424 shortcut_cond_expr will append the real blocks later. */
3425 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3427 location_t new_locus;
3429 /* Turn if (a && b) into
3431 if (a); else goto no;
3432 if (b) goto yes; else goto no;
3433 (no:) */
3435 if (false_label_p == NULL)
3436 false_label_p = &local_label;
3438 /* Keep the original source location on the first 'if'. */
3439 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3440 append_to_statement_list (t, &expr);
3442 /* Set the source location of the && on the second 'if'. */
3443 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3444 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3445 new_locus);
3446 append_to_statement_list (t, &expr);
3448 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3450 location_t new_locus;
3452 /* Turn if (a || b) into
3454 if (a) goto yes;
3455 if (b) goto yes; else goto no;
3456 (yes:) */
3458 if (true_label_p == NULL)
3459 true_label_p = &local_label;
3461 /* Keep the original source location on the first 'if'. */
3462 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3463 append_to_statement_list (t, &expr);
3465 /* Set the source location of the || on the second 'if'. */
3466 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3467 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3468 new_locus);
3469 append_to_statement_list (t, &expr);
3471 else if (TREE_CODE (pred) == COND_EXPR
3472 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3473 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3475 location_t new_locus;
3477 /* As long as we're messing with gotos, turn if (a ? b : c) into
3478 if (a)
3479 if (b) goto yes; else goto no;
3480 else
3481 if (c) goto yes; else goto no;
3483 Don't do this if one of the arms has void type, which can happen
3484 in C++ when the arm is throw. */
3486 /* Keep the original source location on the first 'if'. Set the source
3487 location of the ? on the second 'if'. */
3488 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3489 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3490 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3491 false_label_p, locus),
3492 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3493 false_label_p, new_locus));
3495 else
3497 expr = build3 (COND_EXPR, void_type_node, pred,
3498 build_and_jump (true_label_p),
3499 build_and_jump (false_label_p));
3500 SET_EXPR_LOCATION (expr, locus);
3503 if (local_label)
3505 t = build1 (LABEL_EXPR, void_type_node, local_label);
3506 append_to_statement_list (t, &expr);
3509 return expr;
3512 /* Given a conditional expression EXPR with short-circuit boolean
3513 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3514 predicate apart into the equivalent sequence of conditionals. */
3516 static tree
3517 shortcut_cond_expr (tree expr)
3519 tree pred = TREE_OPERAND (expr, 0);
3520 tree then_ = TREE_OPERAND (expr, 1);
3521 tree else_ = TREE_OPERAND (expr, 2);
3522 tree true_label, false_label, end_label, t;
3523 tree *true_label_p;
3524 tree *false_label_p;
3525 bool emit_end, emit_false, jump_over_else;
3526 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3527 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3529 /* First do simple transformations. */
3530 if (!else_se)
3532 /* If there is no 'else', turn
3533 if (a && b) then c
3534 into
3535 if (a) if (b) then c. */
3536 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3538 /* Keep the original source location on the first 'if'. */
3539 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3540 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3541 /* Set the source location of the && on the second 'if'. */
3542 if (EXPR_HAS_LOCATION (pred))
3543 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3544 then_ = shortcut_cond_expr (expr);
3545 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3546 pred = TREE_OPERAND (pred, 0);
3547 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3548 SET_EXPR_LOCATION (expr, locus);
3552 if (!then_se)
3554 /* If there is no 'then', turn
3555 if (a || b); else d
3556 into
3557 if (a); else if (b); else d. */
3558 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3560 /* Keep the original source location on the first 'if'. */
3561 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3562 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3563 /* Set the source location of the || on the second 'if'. */
3564 if (EXPR_HAS_LOCATION (pred))
3565 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3566 else_ = shortcut_cond_expr (expr);
3567 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3568 pred = TREE_OPERAND (pred, 0);
3569 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3570 SET_EXPR_LOCATION (expr, locus);
3574 /* If we're done, great. */
3575 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3576 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3577 return expr;
3579 /* Otherwise we need to mess with gotos. Change
3580 if (a) c; else d;
3582 if (a); else goto no;
3583 c; goto end;
3584 no: d; end:
3585 and recursively gimplify the condition. */
3587 true_label = false_label = end_label = NULL_TREE;
3589 /* If our arms just jump somewhere, hijack those labels so we don't
3590 generate jumps to jumps. */
3592 if (then_
3593 && TREE_CODE (then_) == GOTO_EXPR
3594 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3596 true_label = GOTO_DESTINATION (then_);
3597 then_ = NULL;
3598 then_se = false;
3601 if (else_
3602 && TREE_CODE (else_) == GOTO_EXPR
3603 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3605 false_label = GOTO_DESTINATION (else_);
3606 else_ = NULL;
3607 else_se = false;
3610 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3611 if (true_label)
3612 true_label_p = &true_label;
3613 else
3614 true_label_p = NULL;
3616 /* The 'else' branch also needs a label if it contains interesting code. */
3617 if (false_label || else_se)
3618 false_label_p = &false_label;
3619 else
3620 false_label_p = NULL;
3622 /* If there was nothing else in our arms, just forward the label(s). */
3623 if (!then_se && !else_se)
3624 return shortcut_cond_r (pred, true_label_p, false_label_p,
3625 EXPR_LOC_OR_LOC (expr, input_location));
3627 /* If our last subexpression already has a terminal label, reuse it. */
3628 if (else_se)
3629 t = expr_last (else_);
3630 else if (then_se)
3631 t = expr_last (then_);
3632 else
3633 t = NULL;
3634 if (t && TREE_CODE (t) == LABEL_EXPR)
3635 end_label = LABEL_EXPR_LABEL (t);
3637 /* If we don't care about jumping to the 'else' branch, jump to the end
3638 if the condition is false. */
3639 if (!false_label_p)
3640 false_label_p = &end_label;
3642 /* We only want to emit these labels if we aren't hijacking them. */
3643 emit_end = (end_label == NULL_TREE);
3644 emit_false = (false_label == NULL_TREE);
3646 /* We only emit the jump over the else clause if we have to--if the
3647 then clause may fall through. Otherwise we can wind up with a
3648 useless jump and a useless label at the end of gimplified code,
3649 which will cause us to think that this conditional as a whole
3650 falls through even if it doesn't. If we then inline a function
3651 which ends with such a condition, that can cause us to issue an
3652 inappropriate warning about control reaching the end of a
3653 non-void function. */
3654 jump_over_else = block_may_fallthru (then_);
3656 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3657 EXPR_LOC_OR_LOC (expr, input_location));
3659 expr = NULL;
3660 append_to_statement_list (pred, &expr);
3662 append_to_statement_list (then_, &expr);
3663 if (else_se)
3665 if (jump_over_else)
3667 tree last = expr_last (expr);
3668 t = build_and_jump (&end_label);
3669 if (EXPR_HAS_LOCATION (last))
3670 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3671 append_to_statement_list (t, &expr);
3673 if (emit_false)
3675 t = build1 (LABEL_EXPR, void_type_node, false_label);
3676 append_to_statement_list (t, &expr);
3678 append_to_statement_list (else_, &expr);
3680 if (emit_end && end_label)
3682 t = build1 (LABEL_EXPR, void_type_node, end_label);
3683 append_to_statement_list (t, &expr);
3686 return expr;
3689 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3691 tree
3692 gimple_boolify (tree expr)
3694 tree type = TREE_TYPE (expr);
3695 location_t loc = EXPR_LOCATION (expr);
3697 if (TREE_CODE (expr) == NE_EXPR
3698 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3699 && integer_zerop (TREE_OPERAND (expr, 1)))
3701 tree call = TREE_OPERAND (expr, 0);
3702 tree fn = get_callee_fndecl (call);
3704 /* For __builtin_expect ((long) (x), y) recurse into x as well
3705 if x is truth_value_p. */
3706 if (fn
3707 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3708 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3709 && call_expr_nargs (call) == 2)
3711 tree arg = CALL_EXPR_ARG (call, 0);
3712 if (arg)
3714 if (TREE_CODE (arg) == NOP_EXPR
3715 && TREE_TYPE (arg) == TREE_TYPE (call))
3716 arg = TREE_OPERAND (arg, 0);
3717 if (truth_value_p (TREE_CODE (arg)))
3719 arg = gimple_boolify (arg);
3720 CALL_EXPR_ARG (call, 0)
3721 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3727 switch (TREE_CODE (expr))
3729 case TRUTH_AND_EXPR:
3730 case TRUTH_OR_EXPR:
3731 case TRUTH_XOR_EXPR:
3732 case TRUTH_ANDIF_EXPR:
3733 case TRUTH_ORIF_EXPR:
3734 /* Also boolify the arguments of truth exprs. */
3735 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3736 /* FALLTHRU */
3738 case TRUTH_NOT_EXPR:
3739 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3741 /* These expressions always produce boolean results. */
3742 if (TREE_CODE (type) != BOOLEAN_TYPE)
3743 TREE_TYPE (expr) = boolean_type_node;
3744 return expr;
3746 case ANNOTATE_EXPR:
3747 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3749 case annot_expr_ivdep_kind:
3750 case annot_expr_no_vector_kind:
3751 case annot_expr_vector_kind:
3752 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3753 if (TREE_CODE (type) != BOOLEAN_TYPE)
3754 TREE_TYPE (expr) = boolean_type_node;
3755 return expr;
3756 default:
3757 gcc_unreachable ();
3760 default:
3761 if (COMPARISON_CLASS_P (expr))
3763 /* There expressions always prduce boolean results. */
3764 if (TREE_CODE (type) != BOOLEAN_TYPE)
3765 TREE_TYPE (expr) = boolean_type_node;
3766 return expr;
3768 /* Other expressions that get here must have boolean values, but
3769 might need to be converted to the appropriate mode. */
3770 if (TREE_CODE (type) == BOOLEAN_TYPE)
3771 return expr;
3772 return fold_convert_loc (loc, boolean_type_node, expr);
3776 /* Given a conditional expression *EXPR_P without side effects, gimplify
3777 its operands. New statements are inserted to PRE_P. */
3779 static enum gimplify_status
3780 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3782 tree expr = *expr_p, cond;
3783 enum gimplify_status ret, tret;
3784 enum tree_code code;
3786 cond = gimple_boolify (COND_EXPR_COND (expr));
3788 /* We need to handle && and || specially, as their gimplification
3789 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3790 code = TREE_CODE (cond);
3791 if (code == TRUTH_ANDIF_EXPR)
3792 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3793 else if (code == TRUTH_ORIF_EXPR)
3794 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3795 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3796 COND_EXPR_COND (*expr_p) = cond;
3798 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3799 is_gimple_val, fb_rvalue);
3800 ret = MIN (ret, tret);
3801 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3802 is_gimple_val, fb_rvalue);
3804 return MIN (ret, tret);
3807 /* Return true if evaluating EXPR could trap.
3808 EXPR is GENERIC, while tree_could_trap_p can be called
3809 only on GIMPLE. */
3811 static bool
3812 generic_expr_could_trap_p (tree expr)
3814 unsigned i, n;
3816 if (!expr || is_gimple_val (expr))
3817 return false;
3819 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3820 return true;
3822 n = TREE_OPERAND_LENGTH (expr);
3823 for (i = 0; i < n; i++)
3824 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3825 return true;
3827 return false;
3830 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3831 into
3833 if (p) if (p)
3834 t1 = a; a;
3835 else or else
3836 t1 = b; b;
3839 The second form is used when *EXPR_P is of type void.
3841 PRE_P points to the list where side effects that must happen before
3842 *EXPR_P should be stored. */
3844 static enum gimplify_status
3845 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3847 tree expr = *expr_p;
3848 tree type = TREE_TYPE (expr);
3849 location_t loc = EXPR_LOCATION (expr);
3850 tree tmp, arm1, arm2;
3851 enum gimplify_status ret;
3852 tree label_true, label_false, label_cont;
3853 bool have_then_clause_p, have_else_clause_p;
3854 gcond *cond_stmt;
3855 enum tree_code pred_code;
3856 gimple_seq seq = NULL;
3858 /* If this COND_EXPR has a value, copy the values into a temporary within
3859 the arms. */
3860 if (!VOID_TYPE_P (type))
3862 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3863 tree result;
3865 /* If either an rvalue is ok or we do not require an lvalue, create the
3866 temporary. But we cannot do that if the type is addressable. */
3867 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3868 && !TREE_ADDRESSABLE (type))
3870 if (gimplify_ctxp->allow_rhs_cond_expr
3871 /* If either branch has side effects or could trap, it can't be
3872 evaluated unconditionally. */
3873 && !TREE_SIDE_EFFECTS (then_)
3874 && !generic_expr_could_trap_p (then_)
3875 && !TREE_SIDE_EFFECTS (else_)
3876 && !generic_expr_could_trap_p (else_))
3877 return gimplify_pure_cond_expr (expr_p, pre_p);
3879 tmp = create_tmp_var (type, "iftmp");
3880 result = tmp;
3883 /* Otherwise, only create and copy references to the values. */
3884 else
3886 type = build_pointer_type (type);
3888 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3889 then_ = build_fold_addr_expr_loc (loc, then_);
3891 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3892 else_ = build_fold_addr_expr_loc (loc, else_);
3894 expr
3895 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3897 tmp = create_tmp_var (type, "iftmp");
3898 result = build_simple_mem_ref_loc (loc, tmp);
3901 /* Build the new then clause, `tmp = then_;'. But don't build the
3902 assignment if the value is void; in C++ it can be if it's a throw. */
3903 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3904 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3906 /* Similarly, build the new else clause, `tmp = else_;'. */
3907 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3908 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3910 TREE_TYPE (expr) = void_type_node;
3911 recalculate_side_effects (expr);
3913 /* Move the COND_EXPR to the prequeue. */
3914 gimplify_stmt (&expr, pre_p);
3916 *expr_p = result;
3917 return GS_ALL_DONE;
3920 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3921 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3922 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3923 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3925 /* Make sure the condition has BOOLEAN_TYPE. */
3926 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3928 /* Break apart && and || conditions. */
3929 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3930 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3932 expr = shortcut_cond_expr (expr);
3934 if (expr != *expr_p)
3936 *expr_p = expr;
3938 /* We can't rely on gimplify_expr to re-gimplify the expanded
3939 form properly, as cleanups might cause the target labels to be
3940 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3941 set up a conditional context. */
3942 gimple_push_condition ();
3943 gimplify_stmt (expr_p, &seq);
3944 gimple_pop_condition (pre_p);
3945 gimple_seq_add_seq (pre_p, seq);
3947 return GS_ALL_DONE;
3951 /* Now do the normal gimplification. */
3953 /* Gimplify condition. */
3954 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3955 fb_rvalue);
3956 if (ret == GS_ERROR)
3957 return GS_ERROR;
3958 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3960 gimple_push_condition ();
3962 have_then_clause_p = have_else_clause_p = false;
3963 if (TREE_OPERAND (expr, 1) != NULL
3964 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3965 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3966 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3967 == current_function_decl)
3968 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3969 have different locations, otherwise we end up with incorrect
3970 location information on the branches. */
3971 && (optimize
3972 || !EXPR_HAS_LOCATION (expr)
3973 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3974 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3976 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3977 have_then_clause_p = true;
3979 else
3980 label_true = create_artificial_label (UNKNOWN_LOCATION);
3981 if (TREE_OPERAND (expr, 2) != NULL
3982 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3983 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3984 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3985 == current_function_decl)
3986 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3987 have different locations, otherwise we end up with incorrect
3988 location information on the branches. */
3989 && (optimize
3990 || !EXPR_HAS_LOCATION (expr)
3991 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3992 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3994 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3995 have_else_clause_p = true;
3997 else
3998 label_false = create_artificial_label (UNKNOWN_LOCATION);
4000 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4001 &arm2);
4002 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4003 label_false);
4004 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4005 gimplify_seq_add_stmt (&seq, cond_stmt);
4006 gimple_stmt_iterator gsi = gsi_last (seq);
4007 maybe_fold_stmt (&gsi);
4009 label_cont = NULL_TREE;
4010 if (!have_then_clause_p)
4012 /* For if (...) {} else { code; } put label_true after
4013 the else block. */
4014 if (TREE_OPERAND (expr, 1) == NULL_TREE
4015 && !have_else_clause_p
4016 && TREE_OPERAND (expr, 2) != NULL_TREE)
4017 label_cont = label_true;
4018 else
4020 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4021 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4022 /* For if (...) { code; } else {} or
4023 if (...) { code; } else goto label; or
4024 if (...) { code; return; } else { ... }
4025 label_cont isn't needed. */
4026 if (!have_else_clause_p
4027 && TREE_OPERAND (expr, 2) != NULL_TREE
4028 && gimple_seq_may_fallthru (seq))
4030 gimple *g;
4031 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4033 g = gimple_build_goto (label_cont);
4035 /* GIMPLE_COND's are very low level; they have embedded
4036 gotos. This particular embedded goto should not be marked
4037 with the location of the original COND_EXPR, as it would
4038 correspond to the COND_EXPR's condition, not the ELSE or the
4039 THEN arms. To avoid marking it with the wrong location, flag
4040 it as "no location". */
4041 gimple_set_do_not_emit_location (g);
4043 gimplify_seq_add_stmt (&seq, g);
4047 if (!have_else_clause_p)
4049 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4050 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4052 if (label_cont)
4053 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4055 gimple_pop_condition (pre_p);
4056 gimple_seq_add_seq (pre_p, seq);
4058 if (ret == GS_ERROR)
4059 ; /* Do nothing. */
4060 else if (have_then_clause_p || have_else_clause_p)
4061 ret = GS_ALL_DONE;
4062 else
4064 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4065 expr = TREE_OPERAND (expr, 0);
4066 gimplify_stmt (&expr, pre_p);
4069 *expr_p = NULL;
4070 return ret;
4073 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4074 to be marked addressable.
4076 We cannot rely on such an expression being directly markable if a temporary
4077 has been created by the gimplification. In this case, we create another
4078 temporary and initialize it with a copy, which will become a store after we
4079 mark it addressable. This can happen if the front-end passed us something
4080 that it could not mark addressable yet, like a Fortran pass-by-reference
4081 parameter (int) floatvar. */
4083 static void
4084 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4086 while (handled_component_p (*expr_p))
4087 expr_p = &TREE_OPERAND (*expr_p, 0);
4088 if (is_gimple_reg (*expr_p))
4090 /* Do not allow an SSA name as the temporary. */
4091 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4092 DECL_GIMPLE_REG_P (var) = 0;
4093 *expr_p = var;
4097 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4098 a call to __builtin_memcpy. */
4100 static enum gimplify_status
4101 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4102 gimple_seq *seq_p)
4104 tree t, to, to_ptr, from, from_ptr;
4105 gcall *gs;
4106 location_t loc = EXPR_LOCATION (*expr_p);
4108 to = TREE_OPERAND (*expr_p, 0);
4109 from = TREE_OPERAND (*expr_p, 1);
4111 /* Mark the RHS addressable. Beware that it may not be possible to do so
4112 directly if a temporary has been created by the gimplification. */
4113 prepare_gimple_addressable (&from, seq_p);
4115 mark_addressable (from);
4116 from_ptr = build_fold_addr_expr_loc (loc, from);
4117 gimplify_arg (&from_ptr, seq_p, loc);
4119 mark_addressable (to);
4120 to_ptr = build_fold_addr_expr_loc (loc, to);
4121 gimplify_arg (&to_ptr, seq_p, loc);
4123 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4125 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4127 if (want_value)
4129 /* tmp = memcpy() */
4130 t = create_tmp_var (TREE_TYPE (to_ptr));
4131 gimple_call_set_lhs (gs, t);
4132 gimplify_seq_add_stmt (seq_p, gs);
4134 *expr_p = build_simple_mem_ref (t);
4135 return GS_ALL_DONE;
4138 gimplify_seq_add_stmt (seq_p, gs);
4139 *expr_p = NULL;
4140 return GS_ALL_DONE;
4143 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4144 a call to __builtin_memset. In this case we know that the RHS is
4145 a CONSTRUCTOR with an empty element list. */
4147 static enum gimplify_status
4148 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4149 gimple_seq *seq_p)
4151 tree t, from, to, to_ptr;
4152 gcall *gs;
4153 location_t loc = EXPR_LOCATION (*expr_p);
4155 /* Assert our assumptions, to abort instead of producing wrong code
4156 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4157 not be immediately exposed. */
4158 from = TREE_OPERAND (*expr_p, 1);
4159 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4160 from = TREE_OPERAND (from, 0);
4162 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4163 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4165 /* Now proceed. */
4166 to = TREE_OPERAND (*expr_p, 0);
4168 to_ptr = build_fold_addr_expr_loc (loc, to);
4169 gimplify_arg (&to_ptr, seq_p, loc);
4170 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4172 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4174 if (want_value)
4176 /* tmp = memset() */
4177 t = create_tmp_var (TREE_TYPE (to_ptr));
4178 gimple_call_set_lhs (gs, t);
4179 gimplify_seq_add_stmt (seq_p, gs);
4181 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4182 return GS_ALL_DONE;
4185 gimplify_seq_add_stmt (seq_p, gs);
4186 *expr_p = NULL;
4187 return GS_ALL_DONE;
4190 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4191 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4192 assignment. Return non-null if we detect a potential overlap. */
4194 struct gimplify_init_ctor_preeval_data
4196 /* The base decl of the lhs object. May be NULL, in which case we
4197 have to assume the lhs is indirect. */
4198 tree lhs_base_decl;
4200 /* The alias set of the lhs object. */
4201 alias_set_type lhs_alias_set;
4204 static tree
4205 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4207 struct gimplify_init_ctor_preeval_data *data
4208 = (struct gimplify_init_ctor_preeval_data *) xdata;
4209 tree t = *tp;
4211 /* If we find the base object, obviously we have overlap. */
4212 if (data->lhs_base_decl == t)
4213 return t;
4215 /* If the constructor component is indirect, determine if we have a
4216 potential overlap with the lhs. The only bits of information we
4217 have to go on at this point are addressability and alias sets. */
4218 if ((INDIRECT_REF_P (t)
4219 || TREE_CODE (t) == MEM_REF)
4220 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4221 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4222 return t;
4224 /* If the constructor component is a call, determine if it can hide a
4225 potential overlap with the lhs through an INDIRECT_REF like above.
4226 ??? Ugh - this is completely broken. In fact this whole analysis
4227 doesn't look conservative. */
4228 if (TREE_CODE (t) == CALL_EXPR)
4230 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4232 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4233 if (POINTER_TYPE_P (TREE_VALUE (type))
4234 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4235 && alias_sets_conflict_p (data->lhs_alias_set,
4236 get_alias_set
4237 (TREE_TYPE (TREE_VALUE (type)))))
4238 return t;
4241 if (IS_TYPE_OR_DECL_P (t))
4242 *walk_subtrees = 0;
4243 return NULL;
4246 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4247 force values that overlap with the lhs (as described by *DATA)
4248 into temporaries. */
4250 static void
4251 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4252 struct gimplify_init_ctor_preeval_data *data)
4254 enum gimplify_status one;
4256 /* If the value is constant, then there's nothing to pre-evaluate. */
4257 if (TREE_CONSTANT (*expr_p))
4259 /* Ensure it does not have side effects, it might contain a reference to
4260 the object we're initializing. */
4261 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4262 return;
4265 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4266 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4267 return;
4269 /* Recurse for nested constructors. */
4270 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4272 unsigned HOST_WIDE_INT ix;
4273 constructor_elt *ce;
4274 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4276 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4277 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4279 return;
4282 /* If this is a variable sized type, we must remember the size. */
4283 maybe_with_size_expr (expr_p);
4285 /* Gimplify the constructor element to something appropriate for the rhs
4286 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4287 the gimplifier will consider this a store to memory. Doing this
4288 gimplification now means that we won't have to deal with complicated
4289 language-specific trees, nor trees like SAVE_EXPR that can induce
4290 exponential search behavior. */
4291 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4292 if (one == GS_ERROR)
4294 *expr_p = NULL;
4295 return;
4298 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4299 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4300 always be true for all scalars, since is_gimple_mem_rhs insists on a
4301 temporary variable for them. */
4302 if (DECL_P (*expr_p))
4303 return;
4305 /* If this is of variable size, we have no choice but to assume it doesn't
4306 overlap since we can't make a temporary for it. */
4307 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4308 return;
4310 /* Otherwise, we must search for overlap ... */
4311 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4312 return;
4314 /* ... and if found, force the value into a temporary. */
4315 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4318 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4319 a RANGE_EXPR in a CONSTRUCTOR for an array.
4321 var = lower;
4322 loop_entry:
4323 object[var] = value;
4324 if (var == upper)
4325 goto loop_exit;
4326 var = var + 1;
4327 goto loop_entry;
4328 loop_exit:
4330 We increment var _after_ the loop exit check because we might otherwise
4331 fail if upper == TYPE_MAX_VALUE (type for upper).
4333 Note that we never have to deal with SAVE_EXPRs here, because this has
4334 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4336 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4337 gimple_seq *, bool);
4339 static void
4340 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4341 tree value, tree array_elt_type,
4342 gimple_seq *pre_p, bool cleared)
4344 tree loop_entry_label, loop_exit_label, fall_thru_label;
4345 tree var, var_type, cref, tmp;
4347 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4348 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4349 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4351 /* Create and initialize the index variable. */
4352 var_type = TREE_TYPE (upper);
4353 var = create_tmp_var (var_type);
4354 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4356 /* Add the loop entry label. */
4357 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4359 /* Build the reference. */
4360 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4361 var, NULL_TREE, NULL_TREE);
4363 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4364 the store. Otherwise just assign value to the reference. */
4366 if (TREE_CODE (value) == CONSTRUCTOR)
4367 /* NB we might have to call ourself recursively through
4368 gimplify_init_ctor_eval if the value is a constructor. */
4369 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4370 pre_p, cleared);
4371 else
4372 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4374 /* We exit the loop when the index var is equal to the upper bound. */
4375 gimplify_seq_add_stmt (pre_p,
4376 gimple_build_cond (EQ_EXPR, var, upper,
4377 loop_exit_label, fall_thru_label));
4379 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4381 /* Otherwise, increment the index var... */
4382 tmp = build2 (PLUS_EXPR, var_type, var,
4383 fold_convert (var_type, integer_one_node));
4384 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4386 /* ...and jump back to the loop entry. */
4387 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4389 /* Add the loop exit label. */
4390 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4393 /* Return true if FDECL is accessing a field that is zero sized. */
4395 static bool
4396 zero_sized_field_decl (const_tree fdecl)
4398 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4399 && integer_zerop (DECL_SIZE (fdecl)))
4400 return true;
4401 return false;
4404 /* Return true if TYPE is zero sized. */
4406 static bool
4407 zero_sized_type (const_tree type)
4409 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4410 && integer_zerop (TYPE_SIZE (type)))
4411 return true;
4412 return false;
4415 /* A subroutine of gimplify_init_constructor. Generate individual
4416 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4417 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4418 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4419 zeroed first. */
4421 static void
4422 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4423 gimple_seq *pre_p, bool cleared)
4425 tree array_elt_type = NULL;
4426 unsigned HOST_WIDE_INT ix;
4427 tree purpose, value;
4429 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4430 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4432 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4434 tree cref;
4436 /* NULL values are created above for gimplification errors. */
4437 if (value == NULL)
4438 continue;
4440 if (cleared && initializer_zerop (value))
4441 continue;
4443 /* ??? Here's to hoping the front end fills in all of the indices,
4444 so we don't have to figure out what's missing ourselves. */
4445 gcc_assert (purpose);
4447 /* Skip zero-sized fields, unless value has side-effects. This can
4448 happen with calls to functions returning a zero-sized type, which
4449 we shouldn't discard. As a number of downstream passes don't
4450 expect sets of zero-sized fields, we rely on the gimplification of
4451 the MODIFY_EXPR we make below to drop the assignment statement. */
4452 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4453 continue;
4455 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4456 whole range. */
4457 if (TREE_CODE (purpose) == RANGE_EXPR)
4459 tree lower = TREE_OPERAND (purpose, 0);
4460 tree upper = TREE_OPERAND (purpose, 1);
4462 /* If the lower bound is equal to upper, just treat it as if
4463 upper was the index. */
4464 if (simple_cst_equal (lower, upper))
4465 purpose = upper;
4466 else
4468 gimplify_init_ctor_eval_range (object, lower, upper, value,
4469 array_elt_type, pre_p, cleared);
4470 continue;
4474 if (array_elt_type)
4476 /* Do not use bitsizetype for ARRAY_REF indices. */
4477 if (TYPE_DOMAIN (TREE_TYPE (object)))
4478 purpose
4479 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4480 purpose);
4481 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4482 purpose, NULL_TREE, NULL_TREE);
4484 else
4486 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4487 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4488 unshare_expr (object), purpose, NULL_TREE);
4491 if (TREE_CODE (value) == CONSTRUCTOR
4492 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4493 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4494 pre_p, cleared);
4495 else
4497 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4498 gimplify_and_add (init, pre_p);
4499 ggc_free (init);
4504 /* Return the appropriate RHS predicate for this LHS. */
4506 gimple_predicate
4507 rhs_predicate_for (tree lhs)
4509 if (is_gimple_reg (lhs))
4510 return is_gimple_reg_rhs_or_call;
4511 else
4512 return is_gimple_mem_rhs_or_call;
4515 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4516 before the LHS has been gimplified. */
4518 static gimple_predicate
4519 initial_rhs_predicate_for (tree lhs)
4521 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4522 return is_gimple_reg_rhs_or_call;
4523 else
4524 return is_gimple_mem_rhs_or_call;
4527 /* Gimplify a C99 compound literal expression. This just means adding
4528 the DECL_EXPR before the current statement and using its anonymous
4529 decl instead. */
4531 static enum gimplify_status
4532 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4533 bool (*gimple_test_f) (tree),
4534 fallback_t fallback)
4536 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4537 tree decl = DECL_EXPR_DECL (decl_s);
4538 tree init = DECL_INITIAL (decl);
4539 /* Mark the decl as addressable if the compound literal
4540 expression is addressable now, otherwise it is marked too late
4541 after we gimplify the initialization expression. */
4542 if (TREE_ADDRESSABLE (*expr_p))
4543 TREE_ADDRESSABLE (decl) = 1;
4544 /* Otherwise, if we don't need an lvalue and have a literal directly
4545 substitute it. Check if it matches the gimple predicate, as
4546 otherwise we'd generate a new temporary, and we can as well just
4547 use the decl we already have. */
4548 else if (!TREE_ADDRESSABLE (decl)
4549 && init
4550 && (fallback & fb_lvalue) == 0
4551 && gimple_test_f (init))
4553 *expr_p = init;
4554 return GS_OK;
4557 /* Preliminarily mark non-addressed complex variables as eligible
4558 for promotion to gimple registers. We'll transform their uses
4559 as we find them. */
4560 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4561 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4562 && !TREE_THIS_VOLATILE (decl)
4563 && !needs_to_live_in_memory (decl))
4564 DECL_GIMPLE_REG_P (decl) = 1;
4566 /* If the decl is not addressable, then it is being used in some
4567 expression or on the right hand side of a statement, and it can
4568 be put into a readonly data section. */
4569 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4570 TREE_READONLY (decl) = 1;
4572 /* This decl isn't mentioned in the enclosing block, so add it to the
4573 list of temps. FIXME it seems a bit of a kludge to say that
4574 anonymous artificial vars aren't pushed, but everything else is. */
4575 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4576 gimple_add_tmp_var (decl);
4578 gimplify_and_add (decl_s, pre_p);
4579 *expr_p = decl;
4580 return GS_OK;
4583 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4584 return a new CONSTRUCTOR if something changed. */
4586 static tree
4587 optimize_compound_literals_in_ctor (tree orig_ctor)
4589 tree ctor = orig_ctor;
4590 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4591 unsigned int idx, num = vec_safe_length (elts);
4593 for (idx = 0; idx < num; idx++)
4595 tree value = (*elts)[idx].value;
4596 tree newval = value;
4597 if (TREE_CODE (value) == CONSTRUCTOR)
4598 newval = optimize_compound_literals_in_ctor (value);
4599 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4601 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4602 tree decl = DECL_EXPR_DECL (decl_s);
4603 tree init = DECL_INITIAL (decl);
4605 if (!TREE_ADDRESSABLE (value)
4606 && !TREE_ADDRESSABLE (decl)
4607 && init
4608 && TREE_CODE (init) == CONSTRUCTOR)
4609 newval = optimize_compound_literals_in_ctor (init);
4611 if (newval == value)
4612 continue;
4614 if (ctor == orig_ctor)
4616 ctor = copy_node (orig_ctor);
4617 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4618 elts = CONSTRUCTOR_ELTS (ctor);
4620 (*elts)[idx].value = newval;
4622 return ctor;
4625 /* A subroutine of gimplify_modify_expr. Break out elements of a
4626 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4628 Note that we still need to clear any elements that don't have explicit
4629 initializers, so if not all elements are initialized we keep the
4630 original MODIFY_EXPR, we just remove all of the constructor elements.
4632 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4633 GS_ERROR if we would have to create a temporary when gimplifying
4634 this constructor. Otherwise, return GS_OK.
4636 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4638 static enum gimplify_status
4639 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4640 bool want_value, bool notify_temp_creation)
4642 tree object, ctor, type;
4643 enum gimplify_status ret;
4644 vec<constructor_elt, va_gc> *elts;
4646 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4648 if (!notify_temp_creation)
4650 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4651 is_gimple_lvalue, fb_lvalue);
4652 if (ret == GS_ERROR)
4653 return ret;
4656 object = TREE_OPERAND (*expr_p, 0);
4657 ctor = TREE_OPERAND (*expr_p, 1)
4658 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4659 type = TREE_TYPE (ctor);
4660 elts = CONSTRUCTOR_ELTS (ctor);
4661 ret = GS_ALL_DONE;
4663 switch (TREE_CODE (type))
4665 case RECORD_TYPE:
4666 case UNION_TYPE:
4667 case QUAL_UNION_TYPE:
4668 case ARRAY_TYPE:
4670 struct gimplify_init_ctor_preeval_data preeval_data;
4671 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4672 bool cleared, complete_p, valid_const_initializer;
4674 /* Aggregate types must lower constructors to initialization of
4675 individual elements. The exception is that a CONSTRUCTOR node
4676 with no elements indicates zero-initialization of the whole. */
4677 if (vec_safe_is_empty (elts))
4679 if (notify_temp_creation)
4680 return GS_OK;
4681 break;
4684 /* Fetch information about the constructor to direct later processing.
4685 We might want to make static versions of it in various cases, and
4686 can only do so if it known to be a valid constant initializer. */
4687 valid_const_initializer
4688 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4689 &num_ctor_elements, &complete_p);
4691 /* If a const aggregate variable is being initialized, then it
4692 should never be a lose to promote the variable to be static. */
4693 if (valid_const_initializer
4694 && num_nonzero_elements > 1
4695 && TREE_READONLY (object)
4696 && VAR_P (object)
4697 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4699 if (notify_temp_creation)
4700 return GS_ERROR;
4701 DECL_INITIAL (object) = ctor;
4702 TREE_STATIC (object) = 1;
4703 if (!DECL_NAME (object))
4704 DECL_NAME (object) = create_tmp_var_name ("C");
4705 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4707 /* ??? C++ doesn't automatically append a .<number> to the
4708 assembler name, and even when it does, it looks at FE private
4709 data structures to figure out what that number should be,
4710 which are not set for this variable. I suppose this is
4711 important for local statics for inline functions, which aren't
4712 "local" in the object file sense. So in order to get a unique
4713 TU-local symbol, we must invoke the lhd version now. */
4714 lhd_set_decl_assembler_name (object);
4716 *expr_p = NULL_TREE;
4717 break;
4720 /* If there are "lots" of initialized elements, even discounting
4721 those that are not address constants (and thus *must* be
4722 computed at runtime), then partition the constructor into
4723 constant and non-constant parts. Block copy the constant
4724 parts in, then generate code for the non-constant parts. */
4725 /* TODO. There's code in cp/typeck.c to do this. */
4727 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4728 /* store_constructor will ignore the clearing of variable-sized
4729 objects. Initializers for such objects must explicitly set
4730 every field that needs to be set. */
4731 cleared = false;
4732 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4733 /* If the constructor isn't complete, clear the whole object
4734 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4736 ??? This ought not to be needed. For any element not present
4737 in the initializer, we should simply set them to zero. Except
4738 we'd need to *find* the elements that are not present, and that
4739 requires trickery to avoid quadratic compile-time behavior in
4740 large cases or excessive memory use in small cases. */
4741 cleared = true;
4742 else if (num_ctor_elements - num_nonzero_elements
4743 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4744 && num_nonzero_elements < num_ctor_elements / 4)
4745 /* If there are "lots" of zeros, it's more efficient to clear
4746 the memory and then set the nonzero elements. */
4747 cleared = true;
4748 else
4749 cleared = false;
4751 /* If there are "lots" of initialized elements, and all of them
4752 are valid address constants, then the entire initializer can
4753 be dropped to memory, and then memcpy'd out. Don't do this
4754 for sparse arrays, though, as it's more efficient to follow
4755 the standard CONSTRUCTOR behavior of memset followed by
4756 individual element initialization. Also don't do this for small
4757 all-zero initializers (which aren't big enough to merit
4758 clearing), and don't try to make bitwise copies of
4759 TREE_ADDRESSABLE types.
4761 We cannot apply such transformation when compiling chkp static
4762 initializer because creation of initializer image in the memory
4763 will require static initialization of bounds for it. It should
4764 result in another gimplification of similar initializer and we
4765 may fall into infinite loop. */
4766 if (valid_const_initializer
4767 && !(cleared || num_nonzero_elements == 0)
4768 && !TREE_ADDRESSABLE (type)
4769 && (!current_function_decl
4770 || !lookup_attribute ("chkp ctor",
4771 DECL_ATTRIBUTES (current_function_decl))))
4773 HOST_WIDE_INT size = int_size_in_bytes (type);
4774 unsigned int align;
4776 /* ??? We can still get unbounded array types, at least
4777 from the C++ front end. This seems wrong, but attempt
4778 to work around it for now. */
4779 if (size < 0)
4781 size = int_size_in_bytes (TREE_TYPE (object));
4782 if (size >= 0)
4783 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4786 /* Find the maximum alignment we can assume for the object. */
4787 /* ??? Make use of DECL_OFFSET_ALIGN. */
4788 if (DECL_P (object))
4789 align = DECL_ALIGN (object);
4790 else
4791 align = TYPE_ALIGN (type);
4793 /* Do a block move either if the size is so small as to make
4794 each individual move a sub-unit move on average, or if it
4795 is so large as to make individual moves inefficient. */
4796 if (size > 0
4797 && num_nonzero_elements > 1
4798 && (size < num_nonzero_elements
4799 || !can_move_by_pieces (size, align)))
4801 if (notify_temp_creation)
4802 return GS_ERROR;
4804 walk_tree (&ctor, force_labels_r, NULL, NULL);
4805 ctor = tree_output_constant_def (ctor);
4806 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4807 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4808 TREE_OPERAND (*expr_p, 1) = ctor;
4810 /* This is no longer an assignment of a CONSTRUCTOR, but
4811 we still may have processing to do on the LHS. So
4812 pretend we didn't do anything here to let that happen. */
4813 return GS_UNHANDLED;
4817 /* If the target is volatile, we have non-zero elements and more than
4818 one field to assign, initialize the target from a temporary. */
4819 if (TREE_THIS_VOLATILE (object)
4820 && !TREE_ADDRESSABLE (type)
4821 && num_nonzero_elements > 0
4822 && vec_safe_length (elts) > 1)
4824 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4825 TREE_OPERAND (*expr_p, 0) = temp;
4826 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4827 *expr_p,
4828 build2 (MODIFY_EXPR, void_type_node,
4829 object, temp));
4830 return GS_OK;
4833 if (notify_temp_creation)
4834 return GS_OK;
4836 /* If there are nonzero elements and if needed, pre-evaluate to capture
4837 elements overlapping with the lhs into temporaries. We must do this
4838 before clearing to fetch the values before they are zeroed-out. */
4839 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4841 preeval_data.lhs_base_decl = get_base_address (object);
4842 if (!DECL_P (preeval_data.lhs_base_decl))
4843 preeval_data.lhs_base_decl = NULL;
4844 preeval_data.lhs_alias_set = get_alias_set (object);
4846 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4847 pre_p, post_p, &preeval_data);
4850 bool ctor_has_side_effects_p
4851 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4853 if (cleared)
4855 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4856 Note that we still have to gimplify, in order to handle the
4857 case of variable sized types. Avoid shared tree structures. */
4858 CONSTRUCTOR_ELTS (ctor) = NULL;
4859 TREE_SIDE_EFFECTS (ctor) = 0;
4860 object = unshare_expr (object);
4861 gimplify_stmt (expr_p, pre_p);
4864 /* If we have not block cleared the object, or if there are nonzero
4865 elements in the constructor, or if the constructor has side effects,
4866 add assignments to the individual scalar fields of the object. */
4867 if (!cleared
4868 || num_nonzero_elements > 0
4869 || ctor_has_side_effects_p)
4870 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4872 *expr_p = NULL_TREE;
4874 break;
4876 case COMPLEX_TYPE:
4878 tree r, i;
4880 if (notify_temp_creation)
4881 return GS_OK;
4883 /* Extract the real and imaginary parts out of the ctor. */
4884 gcc_assert (elts->length () == 2);
4885 r = (*elts)[0].value;
4886 i = (*elts)[1].value;
4887 if (r == NULL || i == NULL)
4889 tree zero = build_zero_cst (TREE_TYPE (type));
4890 if (r == NULL)
4891 r = zero;
4892 if (i == NULL)
4893 i = zero;
4896 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4897 represent creation of a complex value. */
4898 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4900 ctor = build_complex (type, r, i);
4901 TREE_OPERAND (*expr_p, 1) = ctor;
4903 else
4905 ctor = build2 (COMPLEX_EXPR, type, r, i);
4906 TREE_OPERAND (*expr_p, 1) = ctor;
4907 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4908 pre_p,
4909 post_p,
4910 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4911 fb_rvalue);
4914 break;
4916 case VECTOR_TYPE:
4918 unsigned HOST_WIDE_INT ix;
4919 constructor_elt *ce;
4921 if (notify_temp_creation)
4922 return GS_OK;
4924 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4925 if (TREE_CONSTANT (ctor))
4927 bool constant_p = true;
4928 tree value;
4930 /* Even when ctor is constant, it might contain non-*_CST
4931 elements, such as addresses or trapping values like
4932 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4933 in VECTOR_CST nodes. */
4934 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4935 if (!CONSTANT_CLASS_P (value))
4937 constant_p = false;
4938 break;
4941 if (constant_p)
4943 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4944 break;
4947 TREE_CONSTANT (ctor) = 0;
4950 /* Vector types use CONSTRUCTOR all the way through gimple
4951 compilation as a general initializer. */
4952 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4954 enum gimplify_status tret;
4955 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4956 fb_rvalue);
4957 if (tret == GS_ERROR)
4958 ret = GS_ERROR;
4959 else if (TREE_STATIC (ctor)
4960 && !initializer_constant_valid_p (ce->value,
4961 TREE_TYPE (ce->value)))
4962 TREE_STATIC (ctor) = 0;
4964 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4965 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4967 break;
4969 default:
4970 /* So how did we get a CONSTRUCTOR for a scalar type? */
4971 gcc_unreachable ();
4974 if (ret == GS_ERROR)
4975 return GS_ERROR;
4976 /* If we have gimplified both sides of the initializer but have
4977 not emitted an assignment, do so now. */
4978 if (*expr_p)
4980 tree lhs = TREE_OPERAND (*expr_p, 0);
4981 tree rhs = TREE_OPERAND (*expr_p, 1);
4982 if (want_value && object == lhs)
4983 lhs = unshare_expr (lhs);
4984 gassign *init = gimple_build_assign (lhs, rhs);
4985 gimplify_seq_add_stmt (pre_p, init);
4987 if (want_value)
4989 *expr_p = object;
4990 return GS_OK;
4992 else
4994 *expr_p = NULL;
4995 return GS_ALL_DONE;
4999 /* Given a pointer value OP0, return a simplified version of an
5000 indirection through OP0, or NULL_TREE if no simplification is
5001 possible. This may only be applied to a rhs of an expression.
5002 Note that the resulting type may be different from the type pointed
5003 to in the sense that it is still compatible from the langhooks
5004 point of view. */
5006 static tree
5007 gimple_fold_indirect_ref_rhs (tree t)
5009 return gimple_fold_indirect_ref (t);
5012 /* Subroutine of gimplify_modify_expr to do simplifications of
5013 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5014 something changes. */
5016 static enum gimplify_status
5017 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5018 gimple_seq *pre_p, gimple_seq *post_p,
5019 bool want_value)
5021 enum gimplify_status ret = GS_UNHANDLED;
5022 bool changed;
5026 changed = false;
5027 switch (TREE_CODE (*from_p))
5029 case VAR_DECL:
5030 /* If we're assigning from a read-only variable initialized with
5031 a constructor, do the direct assignment from the constructor,
5032 but only if neither source nor target are volatile since this
5033 latter assignment might end up being done on a per-field basis. */
5034 if (DECL_INITIAL (*from_p)
5035 && TREE_READONLY (*from_p)
5036 && !TREE_THIS_VOLATILE (*from_p)
5037 && !TREE_THIS_VOLATILE (*to_p)
5038 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5040 tree old_from = *from_p;
5041 enum gimplify_status subret;
5043 /* Move the constructor into the RHS. */
5044 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5046 /* Let's see if gimplify_init_constructor will need to put
5047 it in memory. */
5048 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5049 false, true);
5050 if (subret == GS_ERROR)
5052 /* If so, revert the change. */
5053 *from_p = old_from;
5055 else
5057 ret = GS_OK;
5058 changed = true;
5061 break;
5062 case INDIRECT_REF:
5064 /* If we have code like
5066 *(const A*)(A*)&x
5068 where the type of "x" is a (possibly cv-qualified variant
5069 of "A"), treat the entire expression as identical to "x".
5070 This kind of code arises in C++ when an object is bound
5071 to a const reference, and if "x" is a TARGET_EXPR we want
5072 to take advantage of the optimization below. */
5073 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5074 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5075 if (t)
5077 if (TREE_THIS_VOLATILE (t) != volatile_p)
5079 if (DECL_P (t))
5080 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5081 build_fold_addr_expr (t));
5082 if (REFERENCE_CLASS_P (t))
5083 TREE_THIS_VOLATILE (t) = volatile_p;
5085 *from_p = t;
5086 ret = GS_OK;
5087 changed = true;
5089 break;
5092 case TARGET_EXPR:
5094 /* If we are initializing something from a TARGET_EXPR, strip the
5095 TARGET_EXPR and initialize it directly, if possible. This can't
5096 be done if the initializer is void, since that implies that the
5097 temporary is set in some non-trivial way.
5099 ??? What about code that pulls out the temp and uses it
5100 elsewhere? I think that such code never uses the TARGET_EXPR as
5101 an initializer. If I'm wrong, we'll die because the temp won't
5102 have any RTL. In that case, I guess we'll need to replace
5103 references somehow. */
5104 tree init = TARGET_EXPR_INITIAL (*from_p);
5106 if (init
5107 && !VOID_TYPE_P (TREE_TYPE (init)))
5109 *from_p = init;
5110 ret = GS_OK;
5111 changed = true;
5114 break;
5116 case COMPOUND_EXPR:
5117 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5118 caught. */
5119 gimplify_compound_expr (from_p, pre_p, true);
5120 ret = GS_OK;
5121 changed = true;
5122 break;
5124 case CONSTRUCTOR:
5125 /* If we already made some changes, let the front end have a
5126 crack at this before we break it down. */
5127 if (ret != GS_UNHANDLED)
5128 break;
5129 /* If we're initializing from a CONSTRUCTOR, break this into
5130 individual MODIFY_EXPRs. */
5131 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5132 false);
5134 case COND_EXPR:
5135 /* If we're assigning to a non-register type, push the assignment
5136 down into the branches. This is mandatory for ADDRESSABLE types,
5137 since we cannot generate temporaries for such, but it saves a
5138 copy in other cases as well. */
5139 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5141 /* This code should mirror the code in gimplify_cond_expr. */
5142 enum tree_code code = TREE_CODE (*expr_p);
5143 tree cond = *from_p;
5144 tree result = *to_p;
5146 ret = gimplify_expr (&result, pre_p, post_p,
5147 is_gimple_lvalue, fb_lvalue);
5148 if (ret != GS_ERROR)
5149 ret = GS_OK;
5151 /* If we are going to write RESULT more than once, clear
5152 TREE_READONLY flag, otherwise we might incorrectly promote
5153 the variable to static const and initialize it at compile
5154 time in one of the branches. */
5155 if (VAR_P (result)
5156 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5157 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5158 TREE_READONLY (result) = 0;
5159 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5160 TREE_OPERAND (cond, 1)
5161 = build2 (code, void_type_node, result,
5162 TREE_OPERAND (cond, 1));
5163 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5164 TREE_OPERAND (cond, 2)
5165 = build2 (code, void_type_node, unshare_expr (result),
5166 TREE_OPERAND (cond, 2));
5168 TREE_TYPE (cond) = void_type_node;
5169 recalculate_side_effects (cond);
5171 if (want_value)
5173 gimplify_and_add (cond, pre_p);
5174 *expr_p = unshare_expr (result);
5176 else
5177 *expr_p = cond;
5178 return ret;
5180 break;
5182 case CALL_EXPR:
5183 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5184 return slot so that we don't generate a temporary. */
5185 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5186 && aggregate_value_p (*from_p, *from_p))
5188 bool use_target;
5190 if (!(rhs_predicate_for (*to_p))(*from_p))
5191 /* If we need a temporary, *to_p isn't accurate. */
5192 use_target = false;
5193 /* It's OK to use the return slot directly unless it's an NRV. */
5194 else if (TREE_CODE (*to_p) == RESULT_DECL
5195 && DECL_NAME (*to_p) == NULL_TREE
5196 && needs_to_live_in_memory (*to_p))
5197 use_target = true;
5198 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5199 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5200 /* Don't force regs into memory. */
5201 use_target = false;
5202 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5203 /* It's OK to use the target directly if it's being
5204 initialized. */
5205 use_target = true;
5206 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5207 != INTEGER_CST)
5208 /* Always use the target and thus RSO for variable-sized types.
5209 GIMPLE cannot deal with a variable-sized assignment
5210 embedded in a call statement. */
5211 use_target = true;
5212 else if (TREE_CODE (*to_p) != SSA_NAME
5213 && (!is_gimple_variable (*to_p)
5214 || needs_to_live_in_memory (*to_p)))
5215 /* Don't use the original target if it's already addressable;
5216 if its address escapes, and the called function uses the
5217 NRV optimization, a conforming program could see *to_p
5218 change before the called function returns; see c++/19317.
5219 When optimizing, the return_slot pass marks more functions
5220 as safe after we have escape info. */
5221 use_target = false;
5222 else
5223 use_target = true;
5225 if (use_target)
5227 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5228 mark_addressable (*to_p);
5231 break;
5233 case WITH_SIZE_EXPR:
5234 /* Likewise for calls that return an aggregate of non-constant size,
5235 since we would not be able to generate a temporary at all. */
5236 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5238 *from_p = TREE_OPERAND (*from_p, 0);
5239 /* We don't change ret in this case because the
5240 WITH_SIZE_EXPR might have been added in
5241 gimplify_modify_expr, so returning GS_OK would lead to an
5242 infinite loop. */
5243 changed = true;
5245 break;
5247 /* If we're initializing from a container, push the initialization
5248 inside it. */
5249 case CLEANUP_POINT_EXPR:
5250 case BIND_EXPR:
5251 case STATEMENT_LIST:
5253 tree wrap = *from_p;
5254 tree t;
5256 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5257 fb_lvalue);
5258 if (ret != GS_ERROR)
5259 ret = GS_OK;
5261 t = voidify_wrapper_expr (wrap, *expr_p);
5262 gcc_assert (t == *expr_p);
5264 if (want_value)
5266 gimplify_and_add (wrap, pre_p);
5267 *expr_p = unshare_expr (*to_p);
5269 else
5270 *expr_p = wrap;
5271 return GS_OK;
5274 case COMPOUND_LITERAL_EXPR:
5276 tree complit = TREE_OPERAND (*expr_p, 1);
5277 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5278 tree decl = DECL_EXPR_DECL (decl_s);
5279 tree init = DECL_INITIAL (decl);
5281 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5282 into struct T x = { 0, 1, 2 } if the address of the
5283 compound literal has never been taken. */
5284 if (!TREE_ADDRESSABLE (complit)
5285 && !TREE_ADDRESSABLE (decl)
5286 && init)
5288 *expr_p = copy_node (*expr_p);
5289 TREE_OPERAND (*expr_p, 1) = init;
5290 return GS_OK;
5294 default:
5295 break;
5298 while (changed);
5300 return ret;
5304 /* Return true if T looks like a valid GIMPLE statement. */
5306 static bool
5307 is_gimple_stmt (tree t)
5309 const enum tree_code code = TREE_CODE (t);
5311 switch (code)
5313 case NOP_EXPR:
5314 /* The only valid NOP_EXPR is the empty statement. */
5315 return IS_EMPTY_STMT (t);
5317 case BIND_EXPR:
5318 case COND_EXPR:
5319 /* These are only valid if they're void. */
5320 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5322 case SWITCH_EXPR:
5323 case GOTO_EXPR:
5324 case RETURN_EXPR:
5325 case LABEL_EXPR:
5326 case CASE_LABEL_EXPR:
5327 case TRY_CATCH_EXPR:
5328 case TRY_FINALLY_EXPR:
5329 case EH_FILTER_EXPR:
5330 case CATCH_EXPR:
5331 case ASM_EXPR:
5332 case STATEMENT_LIST:
5333 case OACC_PARALLEL:
5334 case OACC_KERNELS:
5335 case OACC_DATA:
5336 case OACC_HOST_DATA:
5337 case OACC_DECLARE:
5338 case OACC_UPDATE:
5339 case OACC_ENTER_DATA:
5340 case OACC_EXIT_DATA:
5341 case OACC_CACHE:
5342 case OMP_PARALLEL:
5343 case OMP_FOR:
5344 case OMP_SIMD:
5345 case CILK_SIMD:
5346 case OMP_DISTRIBUTE:
5347 case OACC_LOOP:
5348 case OMP_SECTIONS:
5349 case OMP_SECTION:
5350 case OMP_SINGLE:
5351 case OMP_MASTER:
5352 case OMP_TASKGROUP:
5353 case OMP_ORDERED:
5354 case OMP_CRITICAL:
5355 case OMP_TASK:
5356 case OMP_TARGET:
5357 case OMP_TARGET_DATA:
5358 case OMP_TARGET_UPDATE:
5359 case OMP_TARGET_ENTER_DATA:
5360 case OMP_TARGET_EXIT_DATA:
5361 case OMP_TASKLOOP:
5362 case OMP_TEAMS:
5363 /* These are always void. */
5364 return true;
5366 case CALL_EXPR:
5367 case MODIFY_EXPR:
5368 case PREDICT_EXPR:
5369 /* These are valid regardless of their type. */
5370 return true;
5372 default:
5373 return false;
5378 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5379 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5380 DECL_GIMPLE_REG_P set.
5382 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5383 other, unmodified part of the complex object just before the total store.
5384 As a consequence, if the object is still uninitialized, an undefined value
5385 will be loaded into a register, which may result in a spurious exception
5386 if the register is floating-point and the value happens to be a signaling
5387 NaN for example. Then the fully-fledged complex operations lowering pass
5388 followed by a DCE pass are necessary in order to fix things up. */
5390 static enum gimplify_status
5391 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5392 bool want_value)
5394 enum tree_code code, ocode;
5395 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5397 lhs = TREE_OPERAND (*expr_p, 0);
5398 rhs = TREE_OPERAND (*expr_p, 1);
5399 code = TREE_CODE (lhs);
5400 lhs = TREE_OPERAND (lhs, 0);
5402 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5403 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5404 TREE_NO_WARNING (other) = 1;
5405 other = get_formal_tmp_var (other, pre_p);
5407 realpart = code == REALPART_EXPR ? rhs : other;
5408 imagpart = code == REALPART_EXPR ? other : rhs;
5410 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5411 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5412 else
5413 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5415 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5416 *expr_p = (want_value) ? rhs : NULL_TREE;
5418 return GS_ALL_DONE;
5421 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5423 modify_expr
5424 : varname '=' rhs
5425 | '*' ID '=' rhs
5427 PRE_P points to the list where side effects that must happen before
5428 *EXPR_P should be stored.
5430 POST_P points to the list where side effects that must happen after
5431 *EXPR_P should be stored.
5433 WANT_VALUE is nonzero iff we want to use the value of this expression
5434 in another expression. */
5436 static enum gimplify_status
5437 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5438 bool want_value)
5440 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5441 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5442 enum gimplify_status ret = GS_UNHANDLED;
5443 gimple *assign;
5444 location_t loc = EXPR_LOCATION (*expr_p);
5445 gimple_stmt_iterator gsi;
5447 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5448 || TREE_CODE (*expr_p) == INIT_EXPR);
5450 /* Trying to simplify a clobber using normal logic doesn't work,
5451 so handle it here. */
5452 if (TREE_CLOBBER_P (*from_p))
5454 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5455 if (ret == GS_ERROR)
5456 return ret;
5457 gcc_assert (!want_value
5458 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5459 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5460 *expr_p = NULL;
5461 return GS_ALL_DONE;
5464 /* Insert pointer conversions required by the middle-end that are not
5465 required by the frontend. This fixes middle-end type checking for
5466 for example gcc.dg/redecl-6.c. */
5467 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5469 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5470 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5471 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5474 /* See if any simplifications can be done based on what the RHS is. */
5475 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5476 want_value);
5477 if (ret != GS_UNHANDLED)
5478 return ret;
5480 /* For zero sized types only gimplify the left hand side and right hand
5481 side as statements and throw away the assignment. Do this after
5482 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5483 types properly. */
5484 if (zero_sized_type (TREE_TYPE (*from_p))
5485 && !want_value
5486 /* Don't do this for calls that return addressable types, expand_call
5487 relies on those having a lhs. */
5488 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5489 && TREE_CODE (*from_p) == CALL_EXPR))
5491 gimplify_stmt (from_p, pre_p);
5492 gimplify_stmt (to_p, pre_p);
5493 *expr_p = NULL_TREE;
5494 return GS_ALL_DONE;
5497 /* If the value being copied is of variable width, compute the length
5498 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5499 before gimplifying any of the operands so that we can resolve any
5500 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5501 the size of the expression to be copied, not of the destination, so
5502 that is what we must do here. */
5503 maybe_with_size_expr (from_p);
5505 /* As a special case, we have to temporarily allow for assignments
5506 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5507 a toplevel statement, when gimplifying the GENERIC expression
5508 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5509 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5511 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5512 prevent gimplify_expr from trying to create a new temporary for
5513 foo's LHS, we tell it that it should only gimplify until it
5514 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5515 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5516 and all we need to do here is set 'a' to be its LHS. */
5518 /* Gimplify the RHS first for C++17 and bug 71104. */
5519 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5520 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5521 if (ret == GS_ERROR)
5522 return ret;
5524 /* Then gimplify the LHS. */
5525 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5526 twice we have to make sure to gimplify into non-SSA as otherwise
5527 the abnormal edge added later will make those defs not dominate
5528 their uses.
5529 ??? Technically this applies only to the registers used in the
5530 resulting non-register *TO_P. */
5531 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5532 if (saved_into_ssa
5533 && TREE_CODE (*from_p) == CALL_EXPR
5534 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5535 gimplify_ctxp->into_ssa = false;
5536 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5537 gimplify_ctxp->into_ssa = saved_into_ssa;
5538 if (ret == GS_ERROR)
5539 return ret;
5541 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5542 guess for the predicate was wrong. */
5543 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5544 if (final_pred != initial_pred)
5546 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5547 if (ret == GS_ERROR)
5548 return ret;
5551 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5552 size as argument to the call. */
5553 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5555 tree call = TREE_OPERAND (*from_p, 0);
5556 tree vlasize = TREE_OPERAND (*from_p, 1);
5558 if (TREE_CODE (call) == CALL_EXPR
5559 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5561 int nargs = call_expr_nargs (call);
5562 tree type = TREE_TYPE (call);
5563 tree ap = CALL_EXPR_ARG (call, 0);
5564 tree tag = CALL_EXPR_ARG (call, 1);
5565 tree aptag = CALL_EXPR_ARG (call, 2);
5566 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5567 IFN_VA_ARG, type,
5568 nargs + 1, ap, tag,
5569 aptag, vlasize);
5570 TREE_OPERAND (*from_p, 0) = newcall;
5574 /* Now see if the above changed *from_p to something we handle specially. */
5575 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5576 want_value);
5577 if (ret != GS_UNHANDLED)
5578 return ret;
5580 /* If we've got a variable sized assignment between two lvalues (i.e. does
5581 not involve a call), then we can make things a bit more straightforward
5582 by converting the assignment to memcpy or memset. */
5583 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5585 tree from = TREE_OPERAND (*from_p, 0);
5586 tree size = TREE_OPERAND (*from_p, 1);
5588 if (TREE_CODE (from) == CONSTRUCTOR)
5589 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5591 if (is_gimple_addressable (from))
5593 *from_p = from;
5594 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5595 pre_p);
5599 /* Transform partial stores to non-addressable complex variables into
5600 total stores. This allows us to use real instead of virtual operands
5601 for these variables, which improves optimization. */
5602 if ((TREE_CODE (*to_p) == REALPART_EXPR
5603 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5604 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5605 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5607 /* Try to alleviate the effects of the gimplification creating artificial
5608 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5609 make sure not to create DECL_DEBUG_EXPR links across functions. */
5610 if (!gimplify_ctxp->into_ssa
5611 && VAR_P (*from_p)
5612 && DECL_IGNORED_P (*from_p)
5613 && DECL_P (*to_p)
5614 && !DECL_IGNORED_P (*to_p)
5615 && decl_function_context (*to_p) == current_function_decl
5616 && decl_function_context (*from_p) == current_function_decl)
5618 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5619 DECL_NAME (*from_p)
5620 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5621 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5622 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5625 if (want_value && TREE_THIS_VOLATILE (*to_p))
5626 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5628 if (TREE_CODE (*from_p) == CALL_EXPR)
5630 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5631 instead of a GIMPLE_ASSIGN. */
5632 gcall *call_stmt;
5633 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5635 /* Gimplify internal functions created in the FEs. */
5636 int nargs = call_expr_nargs (*from_p), i;
5637 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5638 auto_vec<tree> vargs (nargs);
5640 for (i = 0; i < nargs; i++)
5642 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5643 EXPR_LOCATION (*from_p));
5644 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5646 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5647 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5648 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5650 else
5652 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5653 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5654 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5655 tree fndecl = get_callee_fndecl (*from_p);
5656 if (fndecl
5657 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5658 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5659 && call_expr_nargs (*from_p) == 3)
5660 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5661 CALL_EXPR_ARG (*from_p, 0),
5662 CALL_EXPR_ARG (*from_p, 1),
5663 CALL_EXPR_ARG (*from_p, 2));
5664 else
5666 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5669 notice_special_calls (call_stmt);
5670 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5671 gimple_call_set_lhs (call_stmt, *to_p);
5672 else if (TREE_CODE (*to_p) == SSA_NAME)
5673 /* The above is somewhat premature, avoid ICEing later for a
5674 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5675 ??? This doesn't make it a default-def. */
5676 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5678 if (EXPR_CILK_SPAWN (*from_p))
5679 gimplify_cilk_detach (pre_p);
5680 assign = call_stmt;
5682 else
5684 assign = gimple_build_assign (*to_p, *from_p);
5685 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5686 if (COMPARISON_CLASS_P (*from_p))
5687 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5690 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5692 /* We should have got an SSA name from the start. */
5693 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5694 || ! gimple_in_ssa_p (cfun));
5697 gimplify_seq_add_stmt (pre_p, assign);
5698 gsi = gsi_last (*pre_p);
5699 maybe_fold_stmt (&gsi);
5701 if (want_value)
5703 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5704 return GS_OK;
5706 else
5707 *expr_p = NULL;
5709 return GS_ALL_DONE;
5712 /* Gimplify a comparison between two variable-sized objects. Do this
5713 with a call to BUILT_IN_MEMCMP. */
5715 static enum gimplify_status
5716 gimplify_variable_sized_compare (tree *expr_p)
5718 location_t loc = EXPR_LOCATION (*expr_p);
5719 tree op0 = TREE_OPERAND (*expr_p, 0);
5720 tree op1 = TREE_OPERAND (*expr_p, 1);
5721 tree t, arg, dest, src, expr;
5723 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5724 arg = unshare_expr (arg);
5725 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5726 src = build_fold_addr_expr_loc (loc, op1);
5727 dest = build_fold_addr_expr_loc (loc, op0);
5728 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5729 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5731 expr
5732 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5733 SET_EXPR_LOCATION (expr, loc);
5734 *expr_p = expr;
5736 return GS_OK;
5739 /* Gimplify a comparison between two aggregate objects of integral scalar
5740 mode as a comparison between the bitwise equivalent scalar values. */
5742 static enum gimplify_status
5743 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5745 location_t loc = EXPR_LOCATION (*expr_p);
5746 tree op0 = TREE_OPERAND (*expr_p, 0);
5747 tree op1 = TREE_OPERAND (*expr_p, 1);
5749 tree type = TREE_TYPE (op0);
5750 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5752 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5753 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5755 *expr_p
5756 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5758 return GS_OK;
5761 /* Gimplify an expression sequence. This function gimplifies each
5762 expression and rewrites the original expression with the last
5763 expression of the sequence in GIMPLE form.
5765 PRE_P points to the list where the side effects for all the
5766 expressions in the sequence will be emitted.
5768 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5770 static enum gimplify_status
5771 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5773 tree t = *expr_p;
5777 tree *sub_p = &TREE_OPERAND (t, 0);
5779 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5780 gimplify_compound_expr (sub_p, pre_p, false);
5781 else
5782 gimplify_stmt (sub_p, pre_p);
5784 t = TREE_OPERAND (t, 1);
5786 while (TREE_CODE (t) == COMPOUND_EXPR);
5788 *expr_p = t;
5789 if (want_value)
5790 return GS_OK;
5791 else
5793 gimplify_stmt (expr_p, pre_p);
5794 return GS_ALL_DONE;
5798 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5799 gimplify. After gimplification, EXPR_P will point to a new temporary
5800 that holds the original value of the SAVE_EXPR node.
5802 PRE_P points to the list where side effects that must happen before
5803 *EXPR_P should be stored. */
5805 static enum gimplify_status
5806 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5808 enum gimplify_status ret = GS_ALL_DONE;
5809 tree val;
5811 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5812 val = TREE_OPERAND (*expr_p, 0);
5814 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5815 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5817 /* The operand may be a void-valued expression. It is
5818 being executed only for its side-effects. */
5819 if (TREE_TYPE (val) == void_type_node)
5821 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5822 is_gimple_stmt, fb_none);
5823 val = NULL;
5825 else
5826 /* The temporary may not be an SSA name as later abnormal and EH
5827 control flow may invalidate use/def domination. */
5828 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5830 TREE_OPERAND (*expr_p, 0) = val;
5831 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5834 *expr_p = val;
5836 return ret;
5839 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5841 unary_expr
5842 : ...
5843 | '&' varname
5846 PRE_P points to the list where side effects that must happen before
5847 *EXPR_P should be stored.
5849 POST_P points to the list where side effects that must happen after
5850 *EXPR_P should be stored. */
5852 static enum gimplify_status
5853 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5855 tree expr = *expr_p;
5856 tree op0 = TREE_OPERAND (expr, 0);
5857 enum gimplify_status ret;
5858 location_t loc = EXPR_LOCATION (*expr_p);
5860 switch (TREE_CODE (op0))
5862 case INDIRECT_REF:
5863 do_indirect_ref:
5864 /* Check if we are dealing with an expression of the form '&*ptr'.
5865 While the front end folds away '&*ptr' into 'ptr', these
5866 expressions may be generated internally by the compiler (e.g.,
5867 builtins like __builtin_va_end). */
5868 /* Caution: the silent array decomposition semantics we allow for
5869 ADDR_EXPR means we can't always discard the pair. */
5870 /* Gimplification of the ADDR_EXPR operand may drop
5871 cv-qualification conversions, so make sure we add them if
5872 needed. */
5874 tree op00 = TREE_OPERAND (op0, 0);
5875 tree t_expr = TREE_TYPE (expr);
5876 tree t_op00 = TREE_TYPE (op00);
5878 if (!useless_type_conversion_p (t_expr, t_op00))
5879 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5880 *expr_p = op00;
5881 ret = GS_OK;
5883 break;
5885 case VIEW_CONVERT_EXPR:
5886 /* Take the address of our operand and then convert it to the type of
5887 this ADDR_EXPR.
5889 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5890 all clear. The impact of this transformation is even less clear. */
5892 /* If the operand is a useless conversion, look through it. Doing so
5893 guarantees that the ADDR_EXPR and its operand will remain of the
5894 same type. */
5895 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5896 op0 = TREE_OPERAND (op0, 0);
5898 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5899 build_fold_addr_expr_loc (loc,
5900 TREE_OPERAND (op0, 0)));
5901 ret = GS_OK;
5902 break;
5904 case MEM_REF:
5905 if (integer_zerop (TREE_OPERAND (op0, 1)))
5906 goto do_indirect_ref;
5908 /* fall through */
5910 default:
5911 /* If we see a call to a declared builtin or see its address
5912 being taken (we can unify those cases here) then we can mark
5913 the builtin for implicit generation by GCC. */
5914 if (TREE_CODE (op0) == FUNCTION_DECL
5915 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5916 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5917 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5919 /* We use fb_either here because the C frontend sometimes takes
5920 the address of a call that returns a struct; see
5921 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5922 the implied temporary explicit. */
5924 /* Make the operand addressable. */
5925 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5926 is_gimple_addressable, fb_either);
5927 if (ret == GS_ERROR)
5928 break;
5930 /* Then mark it. Beware that it may not be possible to do so directly
5931 if a temporary has been created by the gimplification. */
5932 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5934 op0 = TREE_OPERAND (expr, 0);
5936 /* For various reasons, the gimplification of the expression
5937 may have made a new INDIRECT_REF. */
5938 if (TREE_CODE (op0) == INDIRECT_REF)
5939 goto do_indirect_ref;
5941 mark_addressable (TREE_OPERAND (expr, 0));
5943 /* The FEs may end up building ADDR_EXPRs early on a decl with
5944 an incomplete type. Re-build ADDR_EXPRs in canonical form
5945 here. */
5946 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5947 *expr_p = build_fold_addr_expr (op0);
5949 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5950 recompute_tree_invariant_for_addr_expr (*expr_p);
5952 /* If we re-built the ADDR_EXPR add a conversion to the original type
5953 if required. */
5954 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5955 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5957 break;
5960 return ret;
5963 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5964 value; output operands should be a gimple lvalue. */
5966 static enum gimplify_status
5967 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5969 tree expr;
5970 int noutputs;
5971 const char **oconstraints;
5972 int i;
5973 tree link;
5974 const char *constraint;
5975 bool allows_mem, allows_reg, is_inout;
5976 enum gimplify_status ret, tret;
5977 gasm *stmt;
5978 vec<tree, va_gc> *inputs;
5979 vec<tree, va_gc> *outputs;
5980 vec<tree, va_gc> *clobbers;
5981 vec<tree, va_gc> *labels;
5982 tree link_next;
5984 expr = *expr_p;
5985 noutputs = list_length (ASM_OUTPUTS (expr));
5986 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5988 inputs = NULL;
5989 outputs = NULL;
5990 clobbers = NULL;
5991 labels = NULL;
5993 ret = GS_ALL_DONE;
5994 link_next = NULL_TREE;
5995 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5997 bool ok;
5998 size_t constraint_len;
6000 link_next = TREE_CHAIN (link);
6002 oconstraints[i]
6003 = constraint
6004 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6005 constraint_len = strlen (constraint);
6006 if (constraint_len == 0)
6007 continue;
6009 ok = parse_output_constraint (&constraint, i, 0, 0,
6010 &allows_mem, &allows_reg, &is_inout);
6011 if (!ok)
6013 ret = GS_ERROR;
6014 is_inout = false;
6017 if (!allows_reg && allows_mem)
6018 mark_addressable (TREE_VALUE (link));
6020 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6021 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6022 fb_lvalue | fb_mayfail);
6023 if (tret == GS_ERROR)
6025 error ("invalid lvalue in asm output %d", i);
6026 ret = tret;
6029 /* If the constraint does not allow memory make sure we gimplify
6030 it to a register if it is not already but its base is. This
6031 happens for complex and vector components. */
6032 if (!allows_mem)
6034 tree op = TREE_VALUE (link);
6035 if (! is_gimple_val (op)
6036 && is_gimple_reg_type (TREE_TYPE (op))
6037 && is_gimple_reg (get_base_address (op)))
6039 tree tem = create_tmp_reg (TREE_TYPE (op));
6040 tree ass;
6041 if (is_inout)
6043 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6044 tem, unshare_expr (op));
6045 gimplify_and_add (ass, pre_p);
6047 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6048 gimplify_and_add (ass, post_p);
6050 TREE_VALUE (link) = tem;
6051 tret = GS_OK;
6055 vec_safe_push (outputs, link);
6056 TREE_CHAIN (link) = NULL_TREE;
6058 if (is_inout)
6060 /* An input/output operand. To give the optimizers more
6061 flexibility, split it into separate input and output
6062 operands. */
6063 tree input;
6064 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6065 char buf[11];
6067 /* Turn the in/out constraint into an output constraint. */
6068 char *p = xstrdup (constraint);
6069 p[0] = '=';
6070 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6072 /* And add a matching input constraint. */
6073 if (allows_reg)
6075 sprintf (buf, "%u", i);
6077 /* If there are multiple alternatives in the constraint,
6078 handle each of them individually. Those that allow register
6079 will be replaced with operand number, the others will stay
6080 unchanged. */
6081 if (strchr (p, ',') != NULL)
6083 size_t len = 0, buflen = strlen (buf);
6084 char *beg, *end, *str, *dst;
6086 for (beg = p + 1;;)
6088 end = strchr (beg, ',');
6089 if (end == NULL)
6090 end = strchr (beg, '\0');
6091 if ((size_t) (end - beg) < buflen)
6092 len += buflen + 1;
6093 else
6094 len += end - beg + 1;
6095 if (*end)
6096 beg = end + 1;
6097 else
6098 break;
6101 str = (char *) alloca (len);
6102 for (beg = p + 1, dst = str;;)
6104 const char *tem;
6105 bool mem_p, reg_p, inout_p;
6107 end = strchr (beg, ',');
6108 if (end)
6109 *end = '\0';
6110 beg[-1] = '=';
6111 tem = beg - 1;
6112 parse_output_constraint (&tem, i, 0, 0,
6113 &mem_p, &reg_p, &inout_p);
6114 if (dst != str)
6115 *dst++ = ',';
6116 if (reg_p)
6118 memcpy (dst, buf, buflen);
6119 dst += buflen;
6121 else
6123 if (end)
6124 len = end - beg;
6125 else
6126 len = strlen (beg);
6127 memcpy (dst, beg, len);
6128 dst += len;
6130 if (end)
6131 beg = end + 1;
6132 else
6133 break;
6135 *dst = '\0';
6136 input = build_string (dst - str, str);
6138 else
6139 input = build_string (strlen (buf), buf);
6141 else
6142 input = build_string (constraint_len - 1, constraint + 1);
6144 free (p);
6146 input = build_tree_list (build_tree_list (NULL_TREE, input),
6147 unshare_expr (TREE_VALUE (link)));
6148 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6152 link_next = NULL_TREE;
6153 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6155 link_next = TREE_CHAIN (link);
6156 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6157 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6158 oconstraints, &allows_mem, &allows_reg);
6160 /* If we can't make copies, we can only accept memory. */
6161 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6163 if (allows_mem)
6164 allows_reg = 0;
6165 else
6167 error ("impossible constraint in %<asm%>");
6168 error ("non-memory input %d must stay in memory", i);
6169 return GS_ERROR;
6173 /* If the operand is a memory input, it should be an lvalue. */
6174 if (!allows_reg && allows_mem)
6176 tree inputv = TREE_VALUE (link);
6177 STRIP_NOPS (inputv);
6178 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6179 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6180 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6181 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6182 || TREE_CODE (inputv) == MODIFY_EXPR)
6183 TREE_VALUE (link) = error_mark_node;
6184 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6185 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6186 if (tret != GS_ERROR)
6188 /* Unlike output operands, memory inputs are not guaranteed
6189 to be lvalues by the FE, and while the expressions are
6190 marked addressable there, if it is e.g. a statement
6191 expression, temporaries in it might not end up being
6192 addressable. They might be already used in the IL and thus
6193 it is too late to make them addressable now though. */
6194 tree x = TREE_VALUE (link);
6195 while (handled_component_p (x))
6196 x = TREE_OPERAND (x, 0);
6197 if (TREE_CODE (x) == MEM_REF
6198 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6199 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6200 if ((VAR_P (x)
6201 || TREE_CODE (x) == PARM_DECL
6202 || TREE_CODE (x) == RESULT_DECL)
6203 && !TREE_ADDRESSABLE (x)
6204 && is_gimple_reg (x))
6206 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6207 input_location), 0,
6208 "memory input %d is not directly addressable",
6210 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6213 mark_addressable (TREE_VALUE (link));
6214 if (tret == GS_ERROR)
6216 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6217 "memory input %d is not directly addressable", i);
6218 ret = tret;
6221 else
6223 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6224 is_gimple_asm_val, fb_rvalue);
6225 if (tret == GS_ERROR)
6226 ret = tret;
6229 TREE_CHAIN (link) = NULL_TREE;
6230 vec_safe_push (inputs, link);
6233 link_next = NULL_TREE;
6234 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6236 link_next = TREE_CHAIN (link);
6237 TREE_CHAIN (link) = NULL_TREE;
6238 vec_safe_push (clobbers, link);
6241 link_next = NULL_TREE;
6242 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6244 link_next = TREE_CHAIN (link);
6245 TREE_CHAIN (link) = NULL_TREE;
6246 vec_safe_push (labels, link);
6249 /* Do not add ASMs with errors to the gimple IL stream. */
6250 if (ret != GS_ERROR)
6252 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6253 inputs, outputs, clobbers, labels);
6255 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6256 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6258 gimplify_seq_add_stmt (pre_p, stmt);
6261 return ret;
6264 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6265 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6266 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6267 return to this function.
6269 FIXME should we complexify the prequeue handling instead? Or use flags
6270 for all the cleanups and let the optimizer tighten them up? The current
6271 code seems pretty fragile; it will break on a cleanup within any
6272 non-conditional nesting. But any such nesting would be broken, anyway;
6273 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6274 and continues out of it. We can do that at the RTL level, though, so
6275 having an optimizer to tighten up try/finally regions would be a Good
6276 Thing. */
6278 static enum gimplify_status
6279 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6281 gimple_stmt_iterator iter;
6282 gimple_seq body_sequence = NULL;
6284 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6286 /* We only care about the number of conditions between the innermost
6287 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6288 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6289 int old_conds = gimplify_ctxp->conditions;
6290 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6291 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6292 gimplify_ctxp->conditions = 0;
6293 gimplify_ctxp->conditional_cleanups = NULL;
6294 gimplify_ctxp->in_cleanup_point_expr = true;
6296 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6298 gimplify_ctxp->conditions = old_conds;
6299 gimplify_ctxp->conditional_cleanups = old_cleanups;
6300 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6302 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6304 gimple *wce = gsi_stmt (iter);
6306 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6308 if (gsi_one_before_end_p (iter))
6310 /* Note that gsi_insert_seq_before and gsi_remove do not
6311 scan operands, unlike some other sequence mutators. */
6312 if (!gimple_wce_cleanup_eh_only (wce))
6313 gsi_insert_seq_before_without_update (&iter,
6314 gimple_wce_cleanup (wce),
6315 GSI_SAME_STMT);
6316 gsi_remove (&iter, true);
6317 break;
6319 else
6321 gtry *gtry;
6322 gimple_seq seq;
6323 enum gimple_try_flags kind;
6325 if (gimple_wce_cleanup_eh_only (wce))
6326 kind = GIMPLE_TRY_CATCH;
6327 else
6328 kind = GIMPLE_TRY_FINALLY;
6329 seq = gsi_split_seq_after (iter);
6331 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6332 /* Do not use gsi_replace here, as it may scan operands.
6333 We want to do a simple structural modification only. */
6334 gsi_set_stmt (&iter, gtry);
6335 iter = gsi_start (gtry->eval);
6338 else
6339 gsi_next (&iter);
6342 gimplify_seq_add_seq (pre_p, body_sequence);
6343 if (temp)
6345 *expr_p = temp;
6346 return GS_OK;
6348 else
6350 *expr_p = NULL;
6351 return GS_ALL_DONE;
6355 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6356 is the cleanup action required. EH_ONLY is true if the cleanup should
6357 only be executed if an exception is thrown, not on normal exit.
6358 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6359 only valid for clobbers. */
6361 static void
6362 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6363 bool force_uncond = false)
6365 gimple *wce;
6366 gimple_seq cleanup_stmts = NULL;
6368 /* Errors can result in improperly nested cleanups. Which results in
6369 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6370 if (seen_error ())
6371 return;
6373 if (gimple_conditional_context ())
6375 /* If we're in a conditional context, this is more complex. We only
6376 want to run the cleanup if we actually ran the initialization that
6377 necessitates it, but we want to run it after the end of the
6378 conditional context. So we wrap the try/finally around the
6379 condition and use a flag to determine whether or not to actually
6380 run the destructor. Thus
6382 test ? f(A()) : 0
6384 becomes (approximately)
6386 flag = 0;
6387 try {
6388 if (test) { A::A(temp); flag = 1; val = f(temp); }
6389 else { val = 0; }
6390 } finally {
6391 if (flag) A::~A(temp);
6395 if (force_uncond)
6397 gimplify_stmt (&cleanup, &cleanup_stmts);
6398 wce = gimple_build_wce (cleanup_stmts);
6399 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6401 else
6403 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6404 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6405 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6407 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6408 gimplify_stmt (&cleanup, &cleanup_stmts);
6409 wce = gimple_build_wce (cleanup_stmts);
6411 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6412 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6413 gimplify_seq_add_stmt (pre_p, ftrue);
6415 /* Because of this manipulation, and the EH edges that jump
6416 threading cannot redirect, the temporary (VAR) will appear
6417 to be used uninitialized. Don't warn. */
6418 TREE_NO_WARNING (var) = 1;
6421 else
6423 gimplify_stmt (&cleanup, &cleanup_stmts);
6424 wce = gimple_build_wce (cleanup_stmts);
6425 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6426 gimplify_seq_add_stmt (pre_p, wce);
6430 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6432 static enum gimplify_status
6433 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6435 tree targ = *expr_p;
6436 tree temp = TARGET_EXPR_SLOT (targ);
6437 tree init = TARGET_EXPR_INITIAL (targ);
6438 enum gimplify_status ret;
6440 bool unpoison_empty_seq = false;
6441 gimple_stmt_iterator unpoison_it;
6443 if (init)
6445 tree cleanup = NULL_TREE;
6447 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6448 to the temps list. Handle also variable length TARGET_EXPRs. */
6449 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6451 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6452 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6453 gimplify_vla_decl (temp, pre_p);
6455 else
6457 /* Save location where we need to place unpoisoning. It's possible
6458 that a variable will be converted to needs_to_live_in_memory. */
6459 unpoison_it = gsi_last (*pre_p);
6460 unpoison_empty_seq = gsi_end_p (unpoison_it);
6462 gimple_add_tmp_var (temp);
6465 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6466 expression is supposed to initialize the slot. */
6467 if (VOID_TYPE_P (TREE_TYPE (init)))
6468 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6469 else
6471 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6472 init = init_expr;
6473 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6474 init = NULL;
6475 ggc_free (init_expr);
6477 if (ret == GS_ERROR)
6479 /* PR c++/28266 Make sure this is expanded only once. */
6480 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6481 return GS_ERROR;
6483 if (init)
6484 gimplify_and_add (init, pre_p);
6486 /* If needed, push the cleanup for the temp. */
6487 if (TARGET_EXPR_CLEANUP (targ))
6489 if (CLEANUP_EH_ONLY (targ))
6490 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6491 CLEANUP_EH_ONLY (targ), pre_p);
6492 else
6493 cleanup = TARGET_EXPR_CLEANUP (targ);
6496 /* Add a clobber for the temporary going out of scope, like
6497 gimplify_bind_expr. */
6498 if (gimplify_ctxp->in_cleanup_point_expr
6499 && needs_to_live_in_memory (temp))
6501 if (flag_stack_reuse == SR_ALL)
6503 tree clobber = build_constructor (TREE_TYPE (temp),
6504 NULL);
6505 TREE_THIS_VOLATILE (clobber) = true;
6506 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6507 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6509 if (asan_poisoned_variables
6510 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6511 && dbg_cnt (asan_use_after_scope))
6513 tree asan_cleanup = build_asan_poison_call_expr (temp);
6514 if (asan_cleanup)
6516 if (unpoison_empty_seq)
6517 unpoison_it = gsi_start (*pre_p);
6519 asan_poison_variable (temp, false, &unpoison_it,
6520 unpoison_empty_seq);
6521 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6525 if (cleanup)
6526 gimple_push_cleanup (temp, cleanup, false, pre_p);
6528 /* Only expand this once. */
6529 TREE_OPERAND (targ, 3) = init;
6530 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6532 else
6533 /* We should have expanded this before. */
6534 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6536 *expr_p = temp;
6537 return GS_OK;
6540 /* Gimplification of expression trees. */
6542 /* Gimplify an expression which appears at statement context. The
6543 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6544 NULL, a new sequence is allocated.
6546 Return true if we actually added a statement to the queue. */
6548 bool
6549 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6551 gimple_seq_node last;
6553 last = gimple_seq_last (*seq_p);
6554 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6555 return last != gimple_seq_last (*seq_p);
6558 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6559 to CTX. If entries already exist, force them to be some flavor of private.
6560 If there is no enclosing parallel, do nothing. */
6562 void
6563 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6565 splay_tree_node n;
6567 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6568 return;
6572 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6573 if (n != NULL)
6575 if (n->value & GOVD_SHARED)
6576 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6577 else if (n->value & GOVD_MAP)
6578 n->value |= GOVD_MAP_TO_ONLY;
6579 else
6580 return;
6582 else if ((ctx->region_type & ORT_TARGET) != 0)
6584 if (ctx->target_map_scalars_firstprivate)
6585 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6586 else
6587 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6589 else if (ctx->region_type != ORT_WORKSHARE
6590 && ctx->region_type != ORT_SIMD
6591 && ctx->region_type != ORT_ACC
6592 && !(ctx->region_type & ORT_TARGET_DATA))
6593 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6595 ctx = ctx->outer_context;
6597 while (ctx);
6600 /* Similarly for each of the type sizes of TYPE. */
6602 static void
6603 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6605 if (type == NULL || type == error_mark_node)
6606 return;
6607 type = TYPE_MAIN_VARIANT (type);
6609 if (ctx->privatized_types->add (type))
6610 return;
6612 switch (TREE_CODE (type))
6614 case INTEGER_TYPE:
6615 case ENUMERAL_TYPE:
6616 case BOOLEAN_TYPE:
6617 case REAL_TYPE:
6618 case FIXED_POINT_TYPE:
6619 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6620 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6621 break;
6623 case ARRAY_TYPE:
6624 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6625 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6626 break;
6628 case RECORD_TYPE:
6629 case UNION_TYPE:
6630 case QUAL_UNION_TYPE:
6632 tree field;
6633 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6634 if (TREE_CODE (field) == FIELD_DECL)
6636 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6637 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6640 break;
6642 case POINTER_TYPE:
6643 case REFERENCE_TYPE:
6644 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6645 break;
6647 default:
6648 break;
6651 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6652 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6653 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6656 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6658 static void
6659 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6661 splay_tree_node n;
6662 unsigned int nflags;
6663 tree t;
6665 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6666 return;
6668 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6669 there are constructors involved somewhere. Exception is a shared clause,
6670 there is nothing privatized in that case. */
6671 if ((flags & GOVD_SHARED) == 0
6672 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6673 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6674 flags |= GOVD_SEEN;
6676 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6677 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6679 /* We shouldn't be re-adding the decl with the same data
6680 sharing class. */
6681 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6682 nflags = n->value | flags;
6683 /* The only combination of data sharing classes we should see is
6684 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6685 reduction variables to be used in data sharing clauses. */
6686 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6687 || ((nflags & GOVD_DATA_SHARE_CLASS)
6688 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6689 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6690 n->value = nflags;
6691 return;
6694 /* When adding a variable-sized variable, we have to handle all sorts
6695 of additional bits of data: the pointer replacement variable, and
6696 the parameters of the type. */
6697 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6699 /* Add the pointer replacement variable as PRIVATE if the variable
6700 replacement is private, else FIRSTPRIVATE since we'll need the
6701 address of the original variable either for SHARED, or for the
6702 copy into or out of the context. */
6703 if (!(flags & GOVD_LOCAL))
6705 if (flags & GOVD_MAP)
6706 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6707 else if (flags & GOVD_PRIVATE)
6708 nflags = GOVD_PRIVATE;
6709 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6710 && (flags & GOVD_FIRSTPRIVATE))
6711 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6712 else
6713 nflags = GOVD_FIRSTPRIVATE;
6714 nflags |= flags & GOVD_SEEN;
6715 t = DECL_VALUE_EXPR (decl);
6716 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6717 t = TREE_OPERAND (t, 0);
6718 gcc_assert (DECL_P (t));
6719 omp_add_variable (ctx, t, nflags);
6722 /* Add all of the variable and type parameters (which should have
6723 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6724 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6725 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6726 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6728 /* The variable-sized variable itself is never SHARED, only some form
6729 of PRIVATE. The sharing would take place via the pointer variable
6730 which we remapped above. */
6731 if (flags & GOVD_SHARED)
6732 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6733 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6735 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6736 alloca statement we generate for the variable, so make sure it
6737 is available. This isn't automatically needed for the SHARED
6738 case, since we won't be allocating local storage then.
6739 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6740 in this case omp_notice_variable will be called later
6741 on when it is gimplified. */
6742 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6743 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6744 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6746 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6747 && lang_hooks.decls.omp_privatize_by_reference (decl))
6749 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6751 /* Similar to the direct variable sized case above, we'll need the
6752 size of references being privatized. */
6753 if ((flags & GOVD_SHARED) == 0)
6755 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6756 if (DECL_P (t))
6757 omp_notice_variable (ctx, t, true);
6761 if (n != NULL)
6762 n->value |= flags;
6763 else
6764 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6766 /* For reductions clauses in OpenACC loop directives, by default create a
6767 copy clause on the enclosing parallel construct for carrying back the
6768 results. */
6769 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6771 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6772 while (outer_ctx)
6774 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6775 if (n != NULL)
6777 /* Ignore local variables and explicitly declared clauses. */
6778 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6779 break;
6780 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6782 /* According to the OpenACC spec, such a reduction variable
6783 should already have a copy map on a kernels construct,
6784 verify that here. */
6785 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6786 && (n->value & GOVD_MAP));
6788 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6790 /* Remove firstprivate and make it a copy map. */
6791 n->value &= ~GOVD_FIRSTPRIVATE;
6792 n->value |= GOVD_MAP;
6795 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6797 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6798 GOVD_MAP | GOVD_SEEN);
6799 break;
6801 outer_ctx = outer_ctx->outer_context;
6806 /* Notice a threadprivate variable DECL used in OMP context CTX.
6807 This just prints out diagnostics about threadprivate variable uses
6808 in untied tasks. If DECL2 is non-NULL, prevent this warning
6809 on that variable. */
6811 static bool
6812 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6813 tree decl2)
6815 splay_tree_node n;
6816 struct gimplify_omp_ctx *octx;
6818 for (octx = ctx; octx; octx = octx->outer_context)
6819 if ((octx->region_type & ORT_TARGET) != 0)
6821 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6822 if (n == NULL)
6824 error ("threadprivate variable %qE used in target region",
6825 DECL_NAME (decl));
6826 error_at (octx->location, "enclosing target region");
6827 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6829 if (decl2)
6830 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6833 if (ctx->region_type != ORT_UNTIED_TASK)
6834 return false;
6835 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6836 if (n == NULL)
6838 error ("threadprivate variable %qE used in untied task",
6839 DECL_NAME (decl));
6840 error_at (ctx->location, "enclosing task");
6841 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6843 if (decl2)
6844 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6845 return false;
6848 /* Return true if global var DECL is device resident. */
6850 static bool
6851 device_resident_p (tree decl)
6853 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6855 if (!attr)
6856 return false;
6858 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6860 tree c = TREE_VALUE (t);
6861 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6862 return true;
6865 return false;
6868 /* Return true if DECL has an ACC DECLARE attribute. */
6870 static bool
6871 is_oacc_declared (tree decl)
6873 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6874 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6875 return declared != NULL_TREE;
6878 /* Determine outer default flags for DECL mentioned in an OMP region
6879 but not declared in an enclosing clause.
6881 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6882 remapped firstprivate instead of shared. To some extent this is
6883 addressed in omp_firstprivatize_type_sizes, but not
6884 effectively. */
6886 static unsigned
6887 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6888 bool in_code, unsigned flags)
6890 enum omp_clause_default_kind default_kind = ctx->default_kind;
6891 enum omp_clause_default_kind kind;
6893 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6894 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6895 default_kind = kind;
6897 switch (default_kind)
6899 case OMP_CLAUSE_DEFAULT_NONE:
6901 const char *rtype;
6903 if (ctx->region_type & ORT_PARALLEL)
6904 rtype = "parallel";
6905 else if (ctx->region_type & ORT_TASK)
6906 rtype = "task";
6907 else if (ctx->region_type & ORT_TEAMS)
6908 rtype = "teams";
6909 else
6910 gcc_unreachable ();
6912 error ("%qE not specified in enclosing %qs",
6913 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6914 error_at (ctx->location, "enclosing %qs", rtype);
6916 /* FALLTHRU */
6917 case OMP_CLAUSE_DEFAULT_SHARED:
6918 flags |= GOVD_SHARED;
6919 break;
6920 case OMP_CLAUSE_DEFAULT_PRIVATE:
6921 flags |= GOVD_PRIVATE;
6922 break;
6923 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6924 flags |= GOVD_FIRSTPRIVATE;
6925 break;
6926 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6927 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6928 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6929 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6931 omp_notice_variable (octx, decl, in_code);
6932 for (; octx; octx = octx->outer_context)
6934 splay_tree_node n2;
6936 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6937 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6938 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6939 continue;
6940 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6942 flags |= GOVD_FIRSTPRIVATE;
6943 goto found_outer;
6945 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6947 flags |= GOVD_SHARED;
6948 goto found_outer;
6953 if (TREE_CODE (decl) == PARM_DECL
6954 || (!is_global_var (decl)
6955 && DECL_CONTEXT (decl) == current_function_decl))
6956 flags |= GOVD_FIRSTPRIVATE;
6957 else
6958 flags |= GOVD_SHARED;
6959 found_outer:
6960 break;
6962 default:
6963 gcc_unreachable ();
6966 return flags;
6970 /* Determine outer default flags for DECL mentioned in an OACC region
6971 but not declared in an enclosing clause. */
6973 static unsigned
6974 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6976 const char *rkind;
6977 bool on_device = false;
6978 bool declared = is_oacc_declared (decl);
6979 tree type = TREE_TYPE (decl);
6981 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6982 type = TREE_TYPE (type);
6984 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6985 && is_global_var (decl)
6986 && device_resident_p (decl))
6988 on_device = true;
6989 flags |= GOVD_MAP_TO_ONLY;
6992 switch (ctx->region_type)
6994 case ORT_ACC_KERNELS:
6995 rkind = "kernels";
6997 if (AGGREGATE_TYPE_P (type))
6999 /* Aggregates default to 'present_or_copy', or 'present'. */
7000 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7001 flags |= GOVD_MAP;
7002 else
7003 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7005 else
7006 /* Scalars default to 'copy'. */
7007 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7009 break;
7011 case ORT_ACC_PARALLEL:
7012 rkind = "parallel";
7014 if (on_device || declared)
7015 flags |= GOVD_MAP;
7016 else if (AGGREGATE_TYPE_P (type))
7018 /* Aggregates default to 'present_or_copy', or 'present'. */
7019 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7020 flags |= GOVD_MAP;
7021 else
7022 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7024 else
7025 /* Scalars default to 'firstprivate'. */
7026 flags |= GOVD_FIRSTPRIVATE;
7028 break;
7030 default:
7031 gcc_unreachable ();
7034 if (DECL_ARTIFICIAL (decl))
7035 ; /* We can get compiler-generated decls, and should not complain
7036 about them. */
7037 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7039 error ("%qE not specified in enclosing OpenACC %qs construct",
7040 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7041 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7043 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7044 ; /* Handled above. */
7045 else
7046 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7048 return flags;
7051 /* Record the fact that DECL was used within the OMP context CTX.
7052 IN_CODE is true when real code uses DECL, and false when we should
7053 merely emit default(none) errors. Return true if DECL is going to
7054 be remapped and thus DECL shouldn't be gimplified into its
7055 DECL_VALUE_EXPR (if any). */
7057 static bool
7058 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7060 splay_tree_node n;
7061 unsigned flags = in_code ? GOVD_SEEN : 0;
7062 bool ret = false, shared;
7064 if (error_operand_p (decl))
7065 return false;
7067 if (ctx->region_type == ORT_NONE)
7068 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7070 if (is_global_var (decl))
7072 /* Threadprivate variables are predetermined. */
7073 if (DECL_THREAD_LOCAL_P (decl))
7074 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7076 if (DECL_HAS_VALUE_EXPR_P (decl))
7078 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7080 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7081 return omp_notice_threadprivate_variable (ctx, decl, value);
7084 if (gimplify_omp_ctxp->outer_context == NULL
7085 && VAR_P (decl)
7086 && oacc_get_fn_attrib (current_function_decl))
7088 location_t loc = DECL_SOURCE_LOCATION (decl);
7090 if (lookup_attribute ("omp declare target link",
7091 DECL_ATTRIBUTES (decl)))
7093 error_at (loc,
7094 "%qE with %<link%> clause used in %<routine%> function",
7095 DECL_NAME (decl));
7096 return false;
7098 else if (!lookup_attribute ("omp declare target",
7099 DECL_ATTRIBUTES (decl)))
7101 error_at (loc,
7102 "%qE requires a %<declare%> directive for use "
7103 "in a %<routine%> function", DECL_NAME (decl));
7104 return false;
7109 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7110 if ((ctx->region_type & ORT_TARGET) != 0)
7112 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7113 if (n == NULL)
7115 unsigned nflags = flags;
7116 if (ctx->target_map_pointers_as_0len_arrays
7117 || ctx->target_map_scalars_firstprivate)
7119 bool is_declare_target = false;
7120 bool is_scalar = false;
7121 if (is_global_var (decl)
7122 && varpool_node::get_create (decl)->offloadable)
7124 struct gimplify_omp_ctx *octx;
7125 for (octx = ctx->outer_context;
7126 octx; octx = octx->outer_context)
7128 n = splay_tree_lookup (octx->variables,
7129 (splay_tree_key)decl);
7130 if (n
7131 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7132 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7133 break;
7135 is_declare_target = octx == NULL;
7137 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7138 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7139 if (is_declare_target)
7141 else if (ctx->target_map_pointers_as_0len_arrays
7142 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7143 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7144 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7145 == POINTER_TYPE)))
7146 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7147 else if (is_scalar)
7148 nflags |= GOVD_FIRSTPRIVATE;
7151 struct gimplify_omp_ctx *octx = ctx->outer_context;
7152 if ((ctx->region_type & ORT_ACC) && octx)
7154 /* Look in outer OpenACC contexts, to see if there's a
7155 data attribute for this variable. */
7156 omp_notice_variable (octx, decl, in_code);
7158 for (; octx; octx = octx->outer_context)
7160 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7161 break;
7162 splay_tree_node n2
7163 = splay_tree_lookup (octx->variables,
7164 (splay_tree_key) decl);
7165 if (n2)
7167 if (octx->region_type == ORT_ACC_HOST_DATA)
7168 error ("variable %qE declared in enclosing "
7169 "%<host_data%> region", DECL_NAME (decl));
7170 nflags |= GOVD_MAP;
7171 if (octx->region_type == ORT_ACC_DATA
7172 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7173 nflags |= GOVD_MAP_0LEN_ARRAY;
7174 goto found_outer;
7180 tree type = TREE_TYPE (decl);
7182 if (nflags == flags
7183 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7184 && lang_hooks.decls.omp_privatize_by_reference (decl))
7185 type = TREE_TYPE (type);
7186 if (nflags == flags
7187 && !lang_hooks.types.omp_mappable_type (type))
7189 error ("%qD referenced in target region does not have "
7190 "a mappable type", decl);
7191 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7193 else if (nflags == flags)
7195 if ((ctx->region_type & ORT_ACC) != 0)
7196 nflags = oacc_default_clause (ctx, decl, flags);
7197 else
7198 nflags |= GOVD_MAP;
7201 found_outer:
7202 omp_add_variable (ctx, decl, nflags);
7204 else
7206 /* If nothing changed, there's nothing left to do. */
7207 if ((n->value & flags) == flags)
7208 return ret;
7209 flags |= n->value;
7210 n->value = flags;
7212 goto do_outer;
7215 if (n == NULL)
7217 if (ctx->region_type == ORT_WORKSHARE
7218 || ctx->region_type == ORT_SIMD
7219 || ctx->region_type == ORT_ACC
7220 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7221 goto do_outer;
7223 flags = omp_default_clause (ctx, decl, in_code, flags);
7225 if ((flags & GOVD_PRIVATE)
7226 && lang_hooks.decls.omp_private_outer_ref (decl))
7227 flags |= GOVD_PRIVATE_OUTER_REF;
7229 omp_add_variable (ctx, decl, flags);
7231 shared = (flags & GOVD_SHARED) != 0;
7232 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7233 goto do_outer;
7236 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7237 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7238 && DECL_SIZE (decl))
7240 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7242 splay_tree_node n2;
7243 tree t = DECL_VALUE_EXPR (decl);
7244 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7245 t = TREE_OPERAND (t, 0);
7246 gcc_assert (DECL_P (t));
7247 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7248 n2->value |= GOVD_SEEN;
7250 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7251 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7252 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7253 != INTEGER_CST))
7255 splay_tree_node n2;
7256 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7257 gcc_assert (DECL_P (t));
7258 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7259 if (n2)
7260 omp_notice_variable (ctx, t, true);
7264 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7265 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7267 /* If nothing changed, there's nothing left to do. */
7268 if ((n->value & flags) == flags)
7269 return ret;
7270 flags |= n->value;
7271 n->value = flags;
7273 do_outer:
7274 /* If the variable is private in the current context, then we don't
7275 need to propagate anything to an outer context. */
7276 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7277 return ret;
7278 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7279 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7280 return ret;
7281 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7282 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7283 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7284 return ret;
7285 if (ctx->outer_context
7286 && omp_notice_variable (ctx->outer_context, decl, in_code))
7287 return true;
7288 return ret;
7291 /* Verify that DECL is private within CTX. If there's specific information
7292 to the contrary in the innermost scope, generate an error. */
7294 static bool
7295 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7297 splay_tree_node n;
7299 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7300 if (n != NULL)
7302 if (n->value & GOVD_SHARED)
7304 if (ctx == gimplify_omp_ctxp)
7306 if (simd)
7307 error ("iteration variable %qE is predetermined linear",
7308 DECL_NAME (decl));
7309 else
7310 error ("iteration variable %qE should be private",
7311 DECL_NAME (decl));
7312 n->value = GOVD_PRIVATE;
7313 return true;
7315 else
7316 return false;
7318 else if ((n->value & GOVD_EXPLICIT) != 0
7319 && (ctx == gimplify_omp_ctxp
7320 || (ctx->region_type == ORT_COMBINED_PARALLEL
7321 && gimplify_omp_ctxp->outer_context == ctx)))
7323 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7324 error ("iteration variable %qE should not be firstprivate",
7325 DECL_NAME (decl));
7326 else if ((n->value & GOVD_REDUCTION) != 0)
7327 error ("iteration variable %qE should not be reduction",
7328 DECL_NAME (decl));
7329 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7330 error ("iteration variable %qE should not be linear",
7331 DECL_NAME (decl));
7332 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7333 error ("iteration variable %qE should not be lastprivate",
7334 DECL_NAME (decl));
7335 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7336 error ("iteration variable %qE should not be private",
7337 DECL_NAME (decl));
7338 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7339 error ("iteration variable %qE is predetermined linear",
7340 DECL_NAME (decl));
7342 return (ctx == gimplify_omp_ctxp
7343 || (ctx->region_type == ORT_COMBINED_PARALLEL
7344 && gimplify_omp_ctxp->outer_context == ctx));
7347 if (ctx->region_type != ORT_WORKSHARE
7348 && ctx->region_type != ORT_SIMD
7349 && ctx->region_type != ORT_ACC)
7350 return false;
7351 else if (ctx->outer_context)
7352 return omp_is_private (ctx->outer_context, decl, simd);
7353 return false;
7356 /* Return true if DECL is private within a parallel region
7357 that binds to the current construct's context or in parallel
7358 region's REDUCTION clause. */
7360 static bool
7361 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7363 splay_tree_node n;
7367 ctx = ctx->outer_context;
7368 if (ctx == NULL)
7370 if (is_global_var (decl))
7371 return false;
7373 /* References might be private, but might be shared too,
7374 when checking for copyprivate, assume they might be
7375 private, otherwise assume they might be shared. */
7376 if (copyprivate)
7377 return true;
7379 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7380 return false;
7382 /* Treat C++ privatized non-static data members outside
7383 of the privatization the same. */
7384 if (omp_member_access_dummy_var (decl))
7385 return false;
7387 return true;
7390 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7392 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7393 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7394 continue;
7396 if (n != NULL)
7398 if ((n->value & GOVD_LOCAL) != 0
7399 && omp_member_access_dummy_var (decl))
7400 return false;
7401 return (n->value & GOVD_SHARED) == 0;
7404 while (ctx->region_type == ORT_WORKSHARE
7405 || ctx->region_type == ORT_SIMD
7406 || ctx->region_type == ORT_ACC);
7407 return false;
7410 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7412 static tree
7413 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7415 tree t = *tp;
7417 /* If this node has been visited, unmark it and keep looking. */
7418 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7419 return t;
7421 if (IS_TYPE_OR_DECL_P (t))
7422 *walk_subtrees = 0;
7423 return NULL_TREE;
7426 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7427 and previous omp contexts. */
7429 static void
7430 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7431 enum omp_region_type region_type,
7432 enum tree_code code)
7434 struct gimplify_omp_ctx *ctx, *outer_ctx;
7435 tree c;
7436 hash_map<tree, tree> *struct_map_to_clause = NULL;
7437 tree *prev_list_p = NULL;
7439 ctx = new_omp_context (region_type);
7440 outer_ctx = ctx->outer_context;
7441 if (code == OMP_TARGET)
7443 if (!lang_GNU_Fortran ())
7444 ctx->target_map_pointers_as_0len_arrays = true;
7445 ctx->target_map_scalars_firstprivate = true;
7447 if (!lang_GNU_Fortran ())
7448 switch (code)
7450 case OMP_TARGET:
7451 case OMP_TARGET_DATA:
7452 case OMP_TARGET_ENTER_DATA:
7453 case OMP_TARGET_EXIT_DATA:
7454 case OACC_DECLARE:
7455 case OACC_HOST_DATA:
7456 ctx->target_firstprivatize_array_bases = true;
7457 default:
7458 break;
7461 while ((c = *list_p) != NULL)
7463 bool remove = false;
7464 bool notice_outer = true;
7465 const char *check_non_private = NULL;
7466 unsigned int flags;
7467 tree decl;
7469 switch (OMP_CLAUSE_CODE (c))
7471 case OMP_CLAUSE_PRIVATE:
7472 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7473 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7475 flags |= GOVD_PRIVATE_OUTER_REF;
7476 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7478 else
7479 notice_outer = false;
7480 goto do_add;
7481 case OMP_CLAUSE_SHARED:
7482 flags = GOVD_SHARED | GOVD_EXPLICIT;
7483 goto do_add;
7484 case OMP_CLAUSE_FIRSTPRIVATE:
7485 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7486 check_non_private = "firstprivate";
7487 goto do_add;
7488 case OMP_CLAUSE_LASTPRIVATE:
7489 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7490 check_non_private = "lastprivate";
7491 decl = OMP_CLAUSE_DECL (c);
7492 if (error_operand_p (decl))
7493 goto do_add;
7494 else if (outer_ctx
7495 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7496 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7497 && splay_tree_lookup (outer_ctx->variables,
7498 (splay_tree_key) decl) == NULL)
7500 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7501 if (outer_ctx->outer_context)
7502 omp_notice_variable (outer_ctx->outer_context, decl, true);
7504 else if (outer_ctx
7505 && (outer_ctx->region_type & ORT_TASK) != 0
7506 && outer_ctx->combined_loop
7507 && splay_tree_lookup (outer_ctx->variables,
7508 (splay_tree_key) decl) == NULL)
7510 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7511 if (outer_ctx->outer_context)
7512 omp_notice_variable (outer_ctx->outer_context, decl, true);
7514 else if (outer_ctx
7515 && (outer_ctx->region_type == ORT_WORKSHARE
7516 || outer_ctx->region_type == ORT_ACC)
7517 && outer_ctx->combined_loop
7518 && splay_tree_lookup (outer_ctx->variables,
7519 (splay_tree_key) decl) == NULL
7520 && !omp_check_private (outer_ctx, decl, false))
7522 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7523 if (outer_ctx->outer_context
7524 && (outer_ctx->outer_context->region_type
7525 == ORT_COMBINED_PARALLEL)
7526 && splay_tree_lookup (outer_ctx->outer_context->variables,
7527 (splay_tree_key) decl) == NULL)
7529 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7530 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7531 if (octx->outer_context)
7533 octx = octx->outer_context;
7534 if (octx->region_type == ORT_WORKSHARE
7535 && octx->combined_loop
7536 && splay_tree_lookup (octx->variables,
7537 (splay_tree_key) decl) == NULL
7538 && !omp_check_private (octx, decl, false))
7540 omp_add_variable (octx, decl,
7541 GOVD_LASTPRIVATE | GOVD_SEEN);
7542 octx = octx->outer_context;
7543 if (octx
7544 && octx->region_type == ORT_COMBINED_TEAMS
7545 && (splay_tree_lookup (octx->variables,
7546 (splay_tree_key) decl)
7547 == NULL))
7549 omp_add_variable (octx, decl,
7550 GOVD_SHARED | GOVD_SEEN);
7551 octx = octx->outer_context;
7554 if (octx)
7555 omp_notice_variable (octx, decl, true);
7558 else if (outer_ctx->outer_context)
7559 omp_notice_variable (outer_ctx->outer_context, decl, true);
7561 goto do_add;
7562 case OMP_CLAUSE_REDUCTION:
7563 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7564 /* OpenACC permits reductions on private variables. */
7565 if (!(region_type & ORT_ACC))
7566 check_non_private = "reduction";
7567 decl = OMP_CLAUSE_DECL (c);
7568 if (TREE_CODE (decl) == MEM_REF)
7570 tree type = TREE_TYPE (decl);
7571 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7572 NULL, is_gimple_val, fb_rvalue, false)
7573 == GS_ERROR)
7575 remove = true;
7576 break;
7578 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7579 if (DECL_P (v))
7581 omp_firstprivatize_variable (ctx, v);
7582 omp_notice_variable (ctx, v, true);
7584 decl = TREE_OPERAND (decl, 0);
7585 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7587 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7588 NULL, is_gimple_val, fb_rvalue, false)
7589 == GS_ERROR)
7591 remove = true;
7592 break;
7594 v = TREE_OPERAND (decl, 1);
7595 if (DECL_P (v))
7597 omp_firstprivatize_variable (ctx, v);
7598 omp_notice_variable (ctx, v, true);
7600 decl = TREE_OPERAND (decl, 0);
7602 if (TREE_CODE (decl) == ADDR_EXPR
7603 || TREE_CODE (decl) == INDIRECT_REF)
7604 decl = TREE_OPERAND (decl, 0);
7606 goto do_add_decl;
7607 case OMP_CLAUSE_LINEAR:
7608 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7609 is_gimple_val, fb_rvalue) == GS_ERROR)
7611 remove = true;
7612 break;
7614 else
7616 if (code == OMP_SIMD
7617 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7619 struct gimplify_omp_ctx *octx = outer_ctx;
7620 if (octx
7621 && octx->region_type == ORT_WORKSHARE
7622 && octx->combined_loop
7623 && !octx->distribute)
7625 if (octx->outer_context
7626 && (octx->outer_context->region_type
7627 == ORT_COMBINED_PARALLEL))
7628 octx = octx->outer_context->outer_context;
7629 else
7630 octx = octx->outer_context;
7632 if (octx
7633 && octx->region_type == ORT_WORKSHARE
7634 && octx->combined_loop
7635 && octx->distribute)
7637 error_at (OMP_CLAUSE_LOCATION (c),
7638 "%<linear%> clause for variable other than "
7639 "loop iterator specified on construct "
7640 "combined with %<distribute%>");
7641 remove = true;
7642 break;
7645 /* For combined #pragma omp parallel for simd, need to put
7646 lastprivate and perhaps firstprivate too on the
7647 parallel. Similarly for #pragma omp for simd. */
7648 struct gimplify_omp_ctx *octx = outer_ctx;
7649 decl = NULL_TREE;
7652 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7653 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7654 break;
7655 decl = OMP_CLAUSE_DECL (c);
7656 if (error_operand_p (decl))
7658 decl = NULL_TREE;
7659 break;
7661 flags = GOVD_SEEN;
7662 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7663 flags |= GOVD_FIRSTPRIVATE;
7664 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7665 flags |= GOVD_LASTPRIVATE;
7666 if (octx
7667 && octx->region_type == ORT_WORKSHARE
7668 && octx->combined_loop)
7670 if (octx->outer_context
7671 && (octx->outer_context->region_type
7672 == ORT_COMBINED_PARALLEL))
7673 octx = octx->outer_context;
7674 else if (omp_check_private (octx, decl, false))
7675 break;
7677 else if (octx
7678 && (octx->region_type & ORT_TASK) != 0
7679 && octx->combined_loop)
7681 else if (octx
7682 && octx->region_type == ORT_COMBINED_PARALLEL
7683 && ctx->region_type == ORT_WORKSHARE
7684 && octx == outer_ctx)
7685 flags = GOVD_SEEN | GOVD_SHARED;
7686 else if (octx
7687 && octx->region_type == ORT_COMBINED_TEAMS)
7688 flags = GOVD_SEEN | GOVD_SHARED;
7689 else if (octx
7690 && octx->region_type == ORT_COMBINED_TARGET)
7692 flags &= ~GOVD_LASTPRIVATE;
7693 if (flags == GOVD_SEEN)
7694 break;
7696 else
7697 break;
7698 splay_tree_node on
7699 = splay_tree_lookup (octx->variables,
7700 (splay_tree_key) decl);
7701 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7703 octx = NULL;
7704 break;
7706 omp_add_variable (octx, decl, flags);
7707 if (octx->outer_context == NULL)
7708 break;
7709 octx = octx->outer_context;
7711 while (1);
7712 if (octx
7713 && decl
7714 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7715 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7716 omp_notice_variable (octx, decl, true);
7718 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7719 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7720 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7722 notice_outer = false;
7723 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7725 goto do_add;
7727 case OMP_CLAUSE_MAP:
7728 decl = OMP_CLAUSE_DECL (c);
7729 if (error_operand_p (decl))
7730 remove = true;
7731 switch (code)
7733 case OMP_TARGET:
7734 break;
7735 case OACC_DATA:
7736 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7737 break;
7738 /* FALLTHRU */
7739 case OMP_TARGET_DATA:
7740 case OMP_TARGET_ENTER_DATA:
7741 case OMP_TARGET_EXIT_DATA:
7742 case OACC_ENTER_DATA:
7743 case OACC_EXIT_DATA:
7744 case OACC_HOST_DATA:
7745 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7746 || (OMP_CLAUSE_MAP_KIND (c)
7747 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7748 /* For target {,enter ,exit }data only the array slice is
7749 mapped, but not the pointer to it. */
7750 remove = true;
7751 break;
7752 default:
7753 break;
7755 if (remove)
7756 break;
7757 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7759 struct gimplify_omp_ctx *octx;
7760 for (octx = outer_ctx; octx; octx = octx->outer_context)
7762 if (octx->region_type != ORT_ACC_HOST_DATA)
7763 break;
7764 splay_tree_node n2
7765 = splay_tree_lookup (octx->variables,
7766 (splay_tree_key) decl);
7767 if (n2)
7768 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7769 "declared in enclosing %<host_data%> region",
7770 DECL_NAME (decl));
7773 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7774 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7775 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7776 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7777 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7779 remove = true;
7780 break;
7782 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7783 || (OMP_CLAUSE_MAP_KIND (c)
7784 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7785 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7787 OMP_CLAUSE_SIZE (c)
7788 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7789 false);
7790 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7791 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7793 if (!DECL_P (decl))
7795 tree d = decl, *pd;
7796 if (TREE_CODE (d) == ARRAY_REF)
7798 while (TREE_CODE (d) == ARRAY_REF)
7799 d = TREE_OPERAND (d, 0);
7800 if (TREE_CODE (d) == COMPONENT_REF
7801 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7802 decl = d;
7804 pd = &OMP_CLAUSE_DECL (c);
7805 if (d == decl
7806 && TREE_CODE (decl) == INDIRECT_REF
7807 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7808 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7809 == REFERENCE_TYPE))
7811 pd = &TREE_OPERAND (decl, 0);
7812 decl = TREE_OPERAND (decl, 0);
7814 if (TREE_CODE (decl) == COMPONENT_REF)
7816 while (TREE_CODE (decl) == COMPONENT_REF)
7817 decl = TREE_OPERAND (decl, 0);
7818 if (TREE_CODE (decl) == INDIRECT_REF
7819 && DECL_P (TREE_OPERAND (decl, 0))
7820 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7821 == REFERENCE_TYPE))
7822 decl = TREE_OPERAND (decl, 0);
7824 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7825 == GS_ERROR)
7827 remove = true;
7828 break;
7830 if (DECL_P (decl))
7832 if (error_operand_p (decl))
7834 remove = true;
7835 break;
7838 tree stype = TREE_TYPE (decl);
7839 if (TREE_CODE (stype) == REFERENCE_TYPE)
7840 stype = TREE_TYPE (stype);
7841 if (TYPE_SIZE_UNIT (stype) == NULL
7842 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7844 error_at (OMP_CLAUSE_LOCATION (c),
7845 "mapping field %qE of variable length "
7846 "structure", OMP_CLAUSE_DECL (c));
7847 remove = true;
7848 break;
7851 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7853 /* Error recovery. */
7854 if (prev_list_p == NULL)
7856 remove = true;
7857 break;
7859 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7861 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7862 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7864 remove = true;
7865 break;
7870 tree offset;
7871 HOST_WIDE_INT bitsize, bitpos;
7872 machine_mode mode;
7873 int unsignedp, reversep, volatilep = 0;
7874 tree base = OMP_CLAUSE_DECL (c);
7875 while (TREE_CODE (base) == ARRAY_REF)
7876 base = TREE_OPERAND (base, 0);
7877 if (TREE_CODE (base) == INDIRECT_REF)
7878 base = TREE_OPERAND (base, 0);
7879 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7880 &mode, &unsignedp, &reversep,
7881 &volatilep);
7882 tree orig_base = base;
7883 if ((TREE_CODE (base) == INDIRECT_REF
7884 || (TREE_CODE (base) == MEM_REF
7885 && integer_zerop (TREE_OPERAND (base, 1))))
7886 && DECL_P (TREE_OPERAND (base, 0))
7887 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7888 == REFERENCE_TYPE))
7889 base = TREE_OPERAND (base, 0);
7890 gcc_assert (base == decl
7891 && (offset == NULL_TREE
7892 || TREE_CODE (offset) == INTEGER_CST));
7894 splay_tree_node n
7895 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7896 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7897 == GOMP_MAP_ALWAYS_POINTER);
7898 if (n == NULL || (n->value & GOVD_MAP) == 0)
7900 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7901 OMP_CLAUSE_MAP);
7902 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7903 if (orig_base != base)
7904 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7905 else
7906 OMP_CLAUSE_DECL (l) = decl;
7907 OMP_CLAUSE_SIZE (l) = size_int (1);
7908 if (struct_map_to_clause == NULL)
7909 struct_map_to_clause = new hash_map<tree, tree>;
7910 struct_map_to_clause->put (decl, l);
7911 if (ptr)
7913 enum gomp_map_kind mkind
7914 = code == OMP_TARGET_EXIT_DATA
7915 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7916 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7917 OMP_CLAUSE_MAP);
7918 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7919 OMP_CLAUSE_DECL (c2)
7920 = unshare_expr (OMP_CLAUSE_DECL (c));
7921 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7922 OMP_CLAUSE_SIZE (c2)
7923 = TYPE_SIZE_UNIT (ptr_type_node);
7924 OMP_CLAUSE_CHAIN (l) = c2;
7925 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7927 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7928 tree c3
7929 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7930 OMP_CLAUSE_MAP);
7931 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7932 OMP_CLAUSE_DECL (c3)
7933 = unshare_expr (OMP_CLAUSE_DECL (c4));
7934 OMP_CLAUSE_SIZE (c3)
7935 = TYPE_SIZE_UNIT (ptr_type_node);
7936 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7937 OMP_CLAUSE_CHAIN (c2) = c3;
7939 *prev_list_p = l;
7940 prev_list_p = NULL;
7942 else
7944 OMP_CLAUSE_CHAIN (l) = c;
7945 *list_p = l;
7946 list_p = &OMP_CLAUSE_CHAIN (l);
7948 if (orig_base != base && code == OMP_TARGET)
7950 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7951 OMP_CLAUSE_MAP);
7952 enum gomp_map_kind mkind
7953 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7954 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7955 OMP_CLAUSE_DECL (c2) = decl;
7956 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7957 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7958 OMP_CLAUSE_CHAIN (l) = c2;
7960 flags = GOVD_MAP | GOVD_EXPLICIT;
7961 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7962 flags |= GOVD_SEEN;
7963 goto do_add_decl;
7965 else
7967 tree *osc = struct_map_to_clause->get (decl);
7968 tree *sc = NULL, *scp = NULL;
7969 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7970 n->value |= GOVD_SEEN;
7971 offset_int o1, o2;
7972 if (offset)
7973 o1 = wi::to_offset (offset);
7974 else
7975 o1 = 0;
7976 if (bitpos)
7977 o1 = o1 + bitpos / BITS_PER_UNIT;
7978 sc = &OMP_CLAUSE_CHAIN (*osc);
7979 if (*sc != c
7980 && (OMP_CLAUSE_MAP_KIND (*sc)
7981 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7982 sc = &OMP_CLAUSE_CHAIN (*sc);
7983 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7984 if (ptr && sc == prev_list_p)
7985 break;
7986 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7987 != COMPONENT_REF
7988 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7989 != INDIRECT_REF)
7990 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7991 != ARRAY_REF))
7992 break;
7993 else
7995 tree offset2;
7996 HOST_WIDE_INT bitsize2, bitpos2;
7997 base = OMP_CLAUSE_DECL (*sc);
7998 if (TREE_CODE (base) == ARRAY_REF)
8000 while (TREE_CODE (base) == ARRAY_REF)
8001 base = TREE_OPERAND (base, 0);
8002 if (TREE_CODE (base) != COMPONENT_REF
8003 || (TREE_CODE (TREE_TYPE (base))
8004 != ARRAY_TYPE))
8005 break;
8007 else if (TREE_CODE (base) == INDIRECT_REF
8008 && (TREE_CODE (TREE_OPERAND (base, 0))
8009 == COMPONENT_REF)
8010 && (TREE_CODE (TREE_TYPE
8011 (TREE_OPERAND (base, 0)))
8012 == REFERENCE_TYPE))
8013 base = TREE_OPERAND (base, 0);
8014 base = get_inner_reference (base, &bitsize2,
8015 &bitpos2, &offset2,
8016 &mode, &unsignedp,
8017 &reversep, &volatilep);
8018 if ((TREE_CODE (base) == INDIRECT_REF
8019 || (TREE_CODE (base) == MEM_REF
8020 && integer_zerop (TREE_OPERAND (base,
8021 1))))
8022 && DECL_P (TREE_OPERAND (base, 0))
8023 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8024 0)))
8025 == REFERENCE_TYPE))
8026 base = TREE_OPERAND (base, 0);
8027 if (base != decl)
8028 break;
8029 if (scp)
8030 continue;
8031 gcc_assert (offset == NULL_TREE
8032 || TREE_CODE (offset) == INTEGER_CST);
8033 tree d1 = OMP_CLAUSE_DECL (*sc);
8034 tree d2 = OMP_CLAUSE_DECL (c);
8035 while (TREE_CODE (d1) == ARRAY_REF)
8036 d1 = TREE_OPERAND (d1, 0);
8037 while (TREE_CODE (d2) == ARRAY_REF)
8038 d2 = TREE_OPERAND (d2, 0);
8039 if (TREE_CODE (d1) == INDIRECT_REF)
8040 d1 = TREE_OPERAND (d1, 0);
8041 if (TREE_CODE (d2) == INDIRECT_REF)
8042 d2 = TREE_OPERAND (d2, 0);
8043 while (TREE_CODE (d1) == COMPONENT_REF)
8044 if (TREE_CODE (d2) == COMPONENT_REF
8045 && TREE_OPERAND (d1, 1)
8046 == TREE_OPERAND (d2, 1))
8048 d1 = TREE_OPERAND (d1, 0);
8049 d2 = TREE_OPERAND (d2, 0);
8051 else
8052 break;
8053 if (d1 == d2)
8055 error_at (OMP_CLAUSE_LOCATION (c),
8056 "%qE appears more than once in map "
8057 "clauses", OMP_CLAUSE_DECL (c));
8058 remove = true;
8059 break;
8061 if (offset2)
8062 o2 = wi::to_offset (offset2);
8063 else
8064 o2 = 0;
8065 if (bitpos2)
8066 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8067 if (wi::ltu_p (o1, o2)
8068 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8070 if (ptr)
8071 scp = sc;
8072 else
8073 break;
8076 if (remove)
8077 break;
8078 OMP_CLAUSE_SIZE (*osc)
8079 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8080 size_one_node);
8081 if (ptr)
8083 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8084 OMP_CLAUSE_MAP);
8085 tree cl = NULL_TREE;
8086 enum gomp_map_kind mkind
8087 = code == OMP_TARGET_EXIT_DATA
8088 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8089 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8090 OMP_CLAUSE_DECL (c2)
8091 = unshare_expr (OMP_CLAUSE_DECL (c));
8092 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8093 OMP_CLAUSE_SIZE (c2)
8094 = TYPE_SIZE_UNIT (ptr_type_node);
8095 cl = scp ? *prev_list_p : c2;
8096 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8098 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8099 tree c3
8100 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8101 OMP_CLAUSE_MAP);
8102 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8103 OMP_CLAUSE_DECL (c3)
8104 = unshare_expr (OMP_CLAUSE_DECL (c4));
8105 OMP_CLAUSE_SIZE (c3)
8106 = TYPE_SIZE_UNIT (ptr_type_node);
8107 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8108 if (!scp)
8109 OMP_CLAUSE_CHAIN (c2) = c3;
8110 else
8111 cl = c3;
8113 if (scp)
8114 *scp = c2;
8115 if (sc == prev_list_p)
8117 *sc = cl;
8118 prev_list_p = NULL;
8120 else
8122 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8123 list_p = prev_list_p;
8124 prev_list_p = NULL;
8125 OMP_CLAUSE_CHAIN (c) = *sc;
8126 *sc = cl;
8127 continue;
8130 else if (*sc != c)
8132 *list_p = OMP_CLAUSE_CHAIN (c);
8133 OMP_CLAUSE_CHAIN (c) = *sc;
8134 *sc = c;
8135 continue;
8139 if (!remove
8140 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8141 && OMP_CLAUSE_CHAIN (c)
8142 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8143 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8144 == GOMP_MAP_ALWAYS_POINTER))
8145 prev_list_p = list_p;
8146 break;
8148 flags = GOVD_MAP | GOVD_EXPLICIT;
8149 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8150 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8151 flags |= GOVD_MAP_ALWAYS_TO;
8152 goto do_add;
8154 case OMP_CLAUSE_DEPEND:
8155 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8157 tree deps = OMP_CLAUSE_DECL (c);
8158 while (deps && TREE_CODE (deps) == TREE_LIST)
8160 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8161 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8162 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8163 pre_p, NULL, is_gimple_val, fb_rvalue);
8164 deps = TREE_CHAIN (deps);
8166 break;
8168 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8169 break;
8170 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8172 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8173 NULL, is_gimple_val, fb_rvalue);
8174 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8176 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8178 remove = true;
8179 break;
8181 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8182 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8183 is_gimple_val, fb_rvalue) == GS_ERROR)
8185 remove = true;
8186 break;
8188 break;
8190 case OMP_CLAUSE_TO:
8191 case OMP_CLAUSE_FROM:
8192 case OMP_CLAUSE__CACHE_:
8193 decl = OMP_CLAUSE_DECL (c);
8194 if (error_operand_p (decl))
8196 remove = true;
8197 break;
8199 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8200 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8201 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8202 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8203 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8205 remove = true;
8206 break;
8208 if (!DECL_P (decl))
8210 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8211 NULL, is_gimple_lvalue, fb_lvalue)
8212 == GS_ERROR)
8214 remove = true;
8215 break;
8217 break;
8219 goto do_notice;
8221 case OMP_CLAUSE_USE_DEVICE_PTR:
8222 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8223 goto do_add;
8224 case OMP_CLAUSE_IS_DEVICE_PTR:
8225 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8226 goto do_add;
8228 do_add:
8229 decl = OMP_CLAUSE_DECL (c);
8230 do_add_decl:
8231 if (error_operand_p (decl))
8233 remove = true;
8234 break;
8236 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8238 tree t = omp_member_access_dummy_var (decl);
8239 if (t)
8241 tree v = DECL_VALUE_EXPR (decl);
8242 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8243 if (outer_ctx)
8244 omp_notice_variable (outer_ctx, t, true);
8247 if (code == OACC_DATA
8248 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8249 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8250 flags |= GOVD_MAP_0LEN_ARRAY;
8251 omp_add_variable (ctx, decl, flags);
8252 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8253 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8255 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8256 GOVD_LOCAL | GOVD_SEEN);
8257 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8258 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8259 find_decl_expr,
8260 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8261 NULL) == NULL_TREE)
8262 omp_add_variable (ctx,
8263 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8264 GOVD_LOCAL | GOVD_SEEN);
8265 gimplify_omp_ctxp = ctx;
8266 push_gimplify_context ();
8268 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8269 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8271 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8272 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8273 pop_gimplify_context
8274 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8275 push_gimplify_context ();
8276 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8277 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8278 pop_gimplify_context
8279 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8280 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8281 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8283 gimplify_omp_ctxp = outer_ctx;
8285 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8286 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8288 gimplify_omp_ctxp = ctx;
8289 push_gimplify_context ();
8290 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8292 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8293 NULL, NULL);
8294 TREE_SIDE_EFFECTS (bind) = 1;
8295 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8296 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8298 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8299 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8300 pop_gimplify_context
8301 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8302 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8304 gimplify_omp_ctxp = outer_ctx;
8306 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8307 && OMP_CLAUSE_LINEAR_STMT (c))
8309 gimplify_omp_ctxp = ctx;
8310 push_gimplify_context ();
8311 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8313 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8314 NULL, NULL);
8315 TREE_SIDE_EFFECTS (bind) = 1;
8316 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8317 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8319 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8320 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8321 pop_gimplify_context
8322 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8323 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8325 gimplify_omp_ctxp = outer_ctx;
8327 if (notice_outer)
8328 goto do_notice;
8329 break;
8331 case OMP_CLAUSE_COPYIN:
8332 case OMP_CLAUSE_COPYPRIVATE:
8333 decl = OMP_CLAUSE_DECL (c);
8334 if (error_operand_p (decl))
8336 remove = true;
8337 break;
8339 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8340 && !remove
8341 && !omp_check_private (ctx, decl, true))
8343 remove = true;
8344 if (is_global_var (decl))
8346 if (DECL_THREAD_LOCAL_P (decl))
8347 remove = false;
8348 else if (DECL_HAS_VALUE_EXPR_P (decl))
8350 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8352 if (value
8353 && DECL_P (value)
8354 && DECL_THREAD_LOCAL_P (value))
8355 remove = false;
8358 if (remove)
8359 error_at (OMP_CLAUSE_LOCATION (c),
8360 "copyprivate variable %qE is not threadprivate"
8361 " or private in outer context", DECL_NAME (decl));
8363 do_notice:
8364 if (outer_ctx)
8365 omp_notice_variable (outer_ctx, decl, true);
8366 if (check_non_private
8367 && region_type == ORT_WORKSHARE
8368 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8369 || decl == OMP_CLAUSE_DECL (c)
8370 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8371 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8372 == ADDR_EXPR
8373 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8374 == POINTER_PLUS_EXPR
8375 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8376 (OMP_CLAUSE_DECL (c), 0), 0))
8377 == ADDR_EXPR)))))
8378 && omp_check_private (ctx, decl, false))
8380 error ("%s variable %qE is private in outer context",
8381 check_non_private, DECL_NAME (decl));
8382 remove = true;
8384 break;
8386 case OMP_CLAUSE_IF:
8387 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8388 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8390 const char *p[2];
8391 for (int i = 0; i < 2; i++)
8392 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8394 case OMP_PARALLEL: p[i] = "parallel"; break;
8395 case OMP_TASK: p[i] = "task"; break;
8396 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8397 case OMP_TARGET_DATA: p[i] = "target data"; break;
8398 case OMP_TARGET: p[i] = "target"; break;
8399 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8400 case OMP_TARGET_ENTER_DATA:
8401 p[i] = "target enter data"; break;
8402 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8403 default: gcc_unreachable ();
8405 error_at (OMP_CLAUSE_LOCATION (c),
8406 "expected %qs %<if%> clause modifier rather than %qs",
8407 p[0], p[1]);
8408 remove = true;
8410 /* Fall through. */
8412 case OMP_CLAUSE_FINAL:
8413 OMP_CLAUSE_OPERAND (c, 0)
8414 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8415 /* Fall through. */
8417 case OMP_CLAUSE_SCHEDULE:
8418 case OMP_CLAUSE_NUM_THREADS:
8419 case OMP_CLAUSE_NUM_TEAMS:
8420 case OMP_CLAUSE_THREAD_LIMIT:
8421 case OMP_CLAUSE_DIST_SCHEDULE:
8422 case OMP_CLAUSE_DEVICE:
8423 case OMP_CLAUSE_PRIORITY:
8424 case OMP_CLAUSE_GRAINSIZE:
8425 case OMP_CLAUSE_NUM_TASKS:
8426 case OMP_CLAUSE_HINT:
8427 case OMP_CLAUSE__CILK_FOR_COUNT_:
8428 case OMP_CLAUSE_ASYNC:
8429 case OMP_CLAUSE_WAIT:
8430 case OMP_CLAUSE_NUM_GANGS:
8431 case OMP_CLAUSE_NUM_WORKERS:
8432 case OMP_CLAUSE_VECTOR_LENGTH:
8433 case OMP_CLAUSE_WORKER:
8434 case OMP_CLAUSE_VECTOR:
8435 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8436 is_gimple_val, fb_rvalue) == GS_ERROR)
8437 remove = true;
8438 break;
8440 case OMP_CLAUSE_GANG:
8441 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8442 is_gimple_val, fb_rvalue) == GS_ERROR)
8443 remove = true;
8444 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8445 is_gimple_val, fb_rvalue) == GS_ERROR)
8446 remove = true;
8447 break;
8449 case OMP_CLAUSE_NOWAIT:
8450 case OMP_CLAUSE_ORDERED:
8451 case OMP_CLAUSE_UNTIED:
8452 case OMP_CLAUSE_COLLAPSE:
8453 case OMP_CLAUSE_TILE:
8454 case OMP_CLAUSE_AUTO:
8455 case OMP_CLAUSE_SEQ:
8456 case OMP_CLAUSE_INDEPENDENT:
8457 case OMP_CLAUSE_MERGEABLE:
8458 case OMP_CLAUSE_PROC_BIND:
8459 case OMP_CLAUSE_SAFELEN:
8460 case OMP_CLAUSE_SIMDLEN:
8461 case OMP_CLAUSE_NOGROUP:
8462 case OMP_CLAUSE_THREADS:
8463 case OMP_CLAUSE_SIMD:
8464 break;
8466 case OMP_CLAUSE_DEFAULTMAP:
8467 ctx->target_map_scalars_firstprivate = false;
8468 break;
8470 case OMP_CLAUSE_ALIGNED:
8471 decl = OMP_CLAUSE_DECL (c);
8472 if (error_operand_p (decl))
8474 remove = true;
8475 break;
8477 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8478 is_gimple_val, fb_rvalue) == GS_ERROR)
8480 remove = true;
8481 break;
8483 if (!is_global_var (decl)
8484 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8485 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8486 break;
8488 case OMP_CLAUSE_DEFAULT:
8489 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8490 break;
8492 default:
8493 gcc_unreachable ();
8496 if (code == OACC_DATA
8497 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8498 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8499 remove = true;
8500 if (remove)
8501 *list_p = OMP_CLAUSE_CHAIN (c);
8502 else
8503 list_p = &OMP_CLAUSE_CHAIN (c);
8506 gimplify_omp_ctxp = ctx;
8507 if (struct_map_to_clause)
8508 delete struct_map_to_clause;
8511 /* Return true if DECL is a candidate for shared to firstprivate
8512 optimization. We only consider non-addressable scalars, not
8513 too big, and not references. */
8515 static bool
8516 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8518 if (TREE_ADDRESSABLE (decl))
8519 return false;
8520 tree type = TREE_TYPE (decl);
8521 if (!is_gimple_reg_type (type)
8522 || TREE_CODE (type) == REFERENCE_TYPE
8523 || TREE_ADDRESSABLE (type))
8524 return false;
8525 /* Don't optimize too large decls, as each thread/task will have
8526 its own. */
8527 HOST_WIDE_INT len = int_size_in_bytes (type);
8528 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8529 return false;
8530 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8531 return false;
8532 return true;
8535 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8536 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8537 GOVD_WRITTEN in outer contexts. */
8539 static void
8540 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8542 for (; ctx; ctx = ctx->outer_context)
8544 splay_tree_node n = splay_tree_lookup (ctx->variables,
8545 (splay_tree_key) decl);
8546 if (n == NULL)
8547 continue;
8548 else if (n->value & GOVD_SHARED)
8550 n->value |= GOVD_WRITTEN;
8551 return;
8553 else if (n->value & GOVD_DATA_SHARE_CLASS)
8554 return;
8558 /* Helper callback for walk_gimple_seq to discover possible stores
8559 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8560 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8561 for those. */
8563 static tree
8564 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8566 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8568 *walk_subtrees = 0;
8569 if (!wi->is_lhs)
8570 return NULL_TREE;
8572 tree op = *tp;
8575 if (handled_component_p (op))
8576 op = TREE_OPERAND (op, 0);
8577 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8578 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8579 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8580 else
8581 break;
8583 while (1);
8584 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8585 return NULL_TREE;
8587 omp_mark_stores (gimplify_omp_ctxp, op);
8588 return NULL_TREE;
8591 /* Helper callback for walk_gimple_seq to discover possible stores
8592 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8593 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8594 for those. */
8596 static tree
8597 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8598 bool *handled_ops_p,
8599 struct walk_stmt_info *wi)
8601 gimple *stmt = gsi_stmt (*gsi_p);
8602 switch (gimple_code (stmt))
8604 /* Don't recurse on OpenMP constructs for which
8605 gimplify_adjust_omp_clauses already handled the bodies,
8606 except handle gimple_omp_for_pre_body. */
8607 case GIMPLE_OMP_FOR:
8608 *handled_ops_p = true;
8609 if (gimple_omp_for_pre_body (stmt))
8610 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8611 omp_find_stores_stmt, omp_find_stores_op, wi);
8612 break;
8613 case GIMPLE_OMP_PARALLEL:
8614 case GIMPLE_OMP_TASK:
8615 case GIMPLE_OMP_SECTIONS:
8616 case GIMPLE_OMP_SINGLE:
8617 case GIMPLE_OMP_TARGET:
8618 case GIMPLE_OMP_TEAMS:
8619 case GIMPLE_OMP_CRITICAL:
8620 *handled_ops_p = true;
8621 break;
8622 default:
8623 break;
8625 return NULL_TREE;
8628 struct gimplify_adjust_omp_clauses_data
8630 tree *list_p;
8631 gimple_seq *pre_p;
8634 /* For all variables that were not actually used within the context,
8635 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8637 static int
8638 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8640 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8641 gimple_seq *pre_p
8642 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8643 tree decl = (tree) n->key;
8644 unsigned flags = n->value;
8645 enum omp_clause_code code;
8646 tree clause;
8647 bool private_debug;
8649 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8650 return 0;
8651 if ((flags & GOVD_SEEN) == 0)
8652 return 0;
8653 if (flags & GOVD_DEBUG_PRIVATE)
8655 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8656 private_debug = true;
8658 else if (flags & GOVD_MAP)
8659 private_debug = false;
8660 else
8661 private_debug
8662 = lang_hooks.decls.omp_private_debug_clause (decl,
8663 !!(flags & GOVD_SHARED));
8664 if (private_debug)
8665 code = OMP_CLAUSE_PRIVATE;
8666 else if (flags & GOVD_MAP)
8668 code = OMP_CLAUSE_MAP;
8669 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8670 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8672 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8673 return 0;
8676 else if (flags & GOVD_SHARED)
8678 if (is_global_var (decl))
8680 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8681 while (ctx != NULL)
8683 splay_tree_node on
8684 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8685 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8686 | GOVD_PRIVATE | GOVD_REDUCTION
8687 | GOVD_LINEAR | GOVD_MAP)) != 0)
8688 break;
8689 ctx = ctx->outer_context;
8691 if (ctx == NULL)
8692 return 0;
8694 code = OMP_CLAUSE_SHARED;
8696 else if (flags & GOVD_PRIVATE)
8697 code = OMP_CLAUSE_PRIVATE;
8698 else if (flags & GOVD_FIRSTPRIVATE)
8700 code = OMP_CLAUSE_FIRSTPRIVATE;
8701 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8702 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8703 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8705 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8706 "%<target%> construct", decl);
8707 return 0;
8710 else if (flags & GOVD_LASTPRIVATE)
8711 code = OMP_CLAUSE_LASTPRIVATE;
8712 else if (flags & GOVD_ALIGNED)
8713 return 0;
8714 else
8715 gcc_unreachable ();
8717 if (((flags & GOVD_LASTPRIVATE)
8718 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8719 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8720 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8722 tree chain = *list_p;
8723 clause = build_omp_clause (input_location, code);
8724 OMP_CLAUSE_DECL (clause) = decl;
8725 OMP_CLAUSE_CHAIN (clause) = chain;
8726 if (private_debug)
8727 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8728 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8729 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8730 else if (code == OMP_CLAUSE_SHARED
8731 && (flags & GOVD_WRITTEN) == 0
8732 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8733 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8734 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8735 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8736 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8738 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8739 OMP_CLAUSE_DECL (nc) = decl;
8740 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8741 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8742 OMP_CLAUSE_DECL (clause)
8743 = build_simple_mem_ref_loc (input_location, decl);
8744 OMP_CLAUSE_DECL (clause)
8745 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8746 build_int_cst (build_pointer_type (char_type_node), 0));
8747 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8748 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8749 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8750 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8751 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8752 OMP_CLAUSE_CHAIN (nc) = chain;
8753 OMP_CLAUSE_CHAIN (clause) = nc;
8754 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8755 gimplify_omp_ctxp = ctx->outer_context;
8756 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8757 pre_p, NULL, is_gimple_val, fb_rvalue);
8758 gimplify_omp_ctxp = ctx;
8760 else if (code == OMP_CLAUSE_MAP)
8762 int kind;
8763 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8764 switch (flags & (GOVD_MAP_TO_ONLY
8765 | GOVD_MAP_FORCE
8766 | GOVD_MAP_FORCE_PRESENT))
8768 case 0:
8769 kind = GOMP_MAP_TOFROM;
8770 break;
8771 case GOVD_MAP_FORCE:
8772 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8773 break;
8774 case GOVD_MAP_TO_ONLY:
8775 kind = GOMP_MAP_TO;
8776 break;
8777 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8778 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8779 break;
8780 case GOVD_MAP_FORCE_PRESENT:
8781 kind = GOMP_MAP_FORCE_PRESENT;
8782 break;
8783 default:
8784 gcc_unreachable ();
8786 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8787 if (DECL_SIZE (decl)
8788 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8790 tree decl2 = DECL_VALUE_EXPR (decl);
8791 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8792 decl2 = TREE_OPERAND (decl2, 0);
8793 gcc_assert (DECL_P (decl2));
8794 tree mem = build_simple_mem_ref (decl2);
8795 OMP_CLAUSE_DECL (clause) = mem;
8796 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8797 if (gimplify_omp_ctxp->outer_context)
8799 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8800 omp_notice_variable (ctx, decl2, true);
8801 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8803 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8804 OMP_CLAUSE_MAP);
8805 OMP_CLAUSE_DECL (nc) = decl;
8806 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8807 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8808 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8809 else
8810 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8811 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8812 OMP_CLAUSE_CHAIN (clause) = nc;
8814 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8815 && lang_hooks.decls.omp_privatize_by_reference (decl))
8817 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8818 OMP_CLAUSE_SIZE (clause)
8819 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8820 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8821 gimplify_omp_ctxp = ctx->outer_context;
8822 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8823 pre_p, NULL, is_gimple_val, fb_rvalue);
8824 gimplify_omp_ctxp = ctx;
8825 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8826 OMP_CLAUSE_MAP);
8827 OMP_CLAUSE_DECL (nc) = decl;
8828 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8829 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8830 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8831 OMP_CLAUSE_CHAIN (clause) = nc;
8833 else
8834 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8836 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8838 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8839 OMP_CLAUSE_DECL (nc) = decl;
8840 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8841 OMP_CLAUSE_CHAIN (nc) = chain;
8842 OMP_CLAUSE_CHAIN (clause) = nc;
8843 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8844 gimplify_omp_ctxp = ctx->outer_context;
8845 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8846 gimplify_omp_ctxp = ctx;
8848 *list_p = clause;
8849 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8850 gimplify_omp_ctxp = ctx->outer_context;
8851 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8852 if (gimplify_omp_ctxp)
8853 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8854 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8855 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8856 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8857 true);
8858 gimplify_omp_ctxp = ctx;
8859 return 0;
8862 static void
8863 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8864 enum tree_code code)
8866 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8867 tree c, decl;
8869 if (body)
8871 struct gimplify_omp_ctx *octx;
8872 for (octx = ctx; octx; octx = octx->outer_context)
8873 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8874 break;
8875 if (octx)
8877 struct walk_stmt_info wi;
8878 memset (&wi, 0, sizeof (wi));
8879 walk_gimple_seq (body, omp_find_stores_stmt,
8880 omp_find_stores_op, &wi);
8883 while ((c = *list_p) != NULL)
8885 splay_tree_node n;
8886 bool remove = false;
8888 switch (OMP_CLAUSE_CODE (c))
8890 case OMP_CLAUSE_FIRSTPRIVATE:
8891 if ((ctx->region_type & ORT_TARGET)
8892 && (ctx->region_type & ORT_ACC) == 0
8893 && TYPE_ATOMIC (strip_array_types
8894 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8896 error_at (OMP_CLAUSE_LOCATION (c),
8897 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8898 "%<target%> construct", OMP_CLAUSE_DECL (c));
8899 remove = true;
8900 break;
8902 /* FALLTHRU */
8903 case OMP_CLAUSE_PRIVATE:
8904 case OMP_CLAUSE_SHARED:
8905 case OMP_CLAUSE_LINEAR:
8906 decl = OMP_CLAUSE_DECL (c);
8907 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8908 remove = !(n->value & GOVD_SEEN);
8909 if (! remove)
8911 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8912 if ((n->value & GOVD_DEBUG_PRIVATE)
8913 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8915 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8916 || ((n->value & GOVD_DATA_SHARE_CLASS)
8917 == GOVD_SHARED));
8918 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8919 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8921 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8922 && (n->value & GOVD_WRITTEN) == 0
8923 && DECL_P (decl)
8924 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8925 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8926 else if (DECL_P (decl)
8927 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8928 && (n->value & GOVD_WRITTEN) != 0)
8929 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8930 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8931 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8932 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8934 break;
8936 case OMP_CLAUSE_LASTPRIVATE:
8937 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8938 accurately reflect the presence of a FIRSTPRIVATE clause. */
8939 decl = OMP_CLAUSE_DECL (c);
8940 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8941 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8942 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8943 if (code == OMP_DISTRIBUTE
8944 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8946 remove = true;
8947 error_at (OMP_CLAUSE_LOCATION (c),
8948 "same variable used in %<firstprivate%> and "
8949 "%<lastprivate%> clauses on %<distribute%> "
8950 "construct");
8952 if (!remove
8953 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8954 && DECL_P (decl)
8955 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8956 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8957 break;
8959 case OMP_CLAUSE_ALIGNED:
8960 decl = OMP_CLAUSE_DECL (c);
8961 if (!is_global_var (decl))
8963 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8964 remove = n == NULL || !(n->value & GOVD_SEEN);
8965 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8967 struct gimplify_omp_ctx *octx;
8968 if (n != NULL
8969 && (n->value & (GOVD_DATA_SHARE_CLASS
8970 & ~GOVD_FIRSTPRIVATE)))
8971 remove = true;
8972 else
8973 for (octx = ctx->outer_context; octx;
8974 octx = octx->outer_context)
8976 n = splay_tree_lookup (octx->variables,
8977 (splay_tree_key) decl);
8978 if (n == NULL)
8979 continue;
8980 if (n->value & GOVD_LOCAL)
8981 break;
8982 /* We have to avoid assigning a shared variable
8983 to itself when trying to add
8984 __builtin_assume_aligned. */
8985 if (n->value & GOVD_SHARED)
8987 remove = true;
8988 break;
8993 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8995 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8996 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8997 remove = true;
8999 break;
9001 case OMP_CLAUSE_MAP:
9002 if (code == OMP_TARGET_EXIT_DATA
9003 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9005 remove = true;
9006 break;
9008 decl = OMP_CLAUSE_DECL (c);
9009 /* Data clauses associated with acc parallel reductions must be
9010 compatible with present_or_copy. Warn and adjust the clause
9011 if that is not the case. */
9012 if (ctx->region_type == ORT_ACC_PARALLEL)
9014 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9015 n = NULL;
9017 if (DECL_P (t))
9018 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9020 if (n && (n->value & GOVD_REDUCTION))
9022 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9024 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9025 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9026 && kind != GOMP_MAP_FORCE_PRESENT
9027 && kind != GOMP_MAP_POINTER)
9029 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9030 "incompatible data clause with reduction "
9031 "on %qE; promoting to present_or_copy",
9032 DECL_NAME (t));
9033 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9037 if (!DECL_P (decl))
9039 if ((ctx->region_type & ORT_TARGET) != 0
9040 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9042 if (TREE_CODE (decl) == INDIRECT_REF
9043 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9044 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9045 == REFERENCE_TYPE))
9046 decl = TREE_OPERAND (decl, 0);
9047 if (TREE_CODE (decl) == COMPONENT_REF)
9049 while (TREE_CODE (decl) == COMPONENT_REF)
9050 decl = TREE_OPERAND (decl, 0);
9051 if (DECL_P (decl))
9053 n = splay_tree_lookup (ctx->variables,
9054 (splay_tree_key) decl);
9055 if (!(n->value & GOVD_SEEN))
9056 remove = true;
9060 break;
9062 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9063 if ((ctx->region_type & ORT_TARGET) != 0
9064 && !(n->value & GOVD_SEEN)
9065 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9066 && (!is_global_var (decl)
9067 || !lookup_attribute ("omp declare target link",
9068 DECL_ATTRIBUTES (decl))))
9070 remove = true;
9071 /* For struct element mapping, if struct is never referenced
9072 in target block and none of the mapping has always modifier,
9073 remove all the struct element mappings, which immediately
9074 follow the GOMP_MAP_STRUCT map clause. */
9075 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9077 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9078 while (cnt--)
9079 OMP_CLAUSE_CHAIN (c)
9080 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9083 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9084 && code == OMP_TARGET_EXIT_DATA)
9085 remove = true;
9086 else if (DECL_SIZE (decl)
9087 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9088 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9089 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9090 && (OMP_CLAUSE_MAP_KIND (c)
9091 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9093 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9094 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9095 INTEGER_CST. */
9096 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9098 tree decl2 = DECL_VALUE_EXPR (decl);
9099 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9100 decl2 = TREE_OPERAND (decl2, 0);
9101 gcc_assert (DECL_P (decl2));
9102 tree mem = build_simple_mem_ref (decl2);
9103 OMP_CLAUSE_DECL (c) = mem;
9104 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9105 if (ctx->outer_context)
9107 omp_notice_variable (ctx->outer_context, decl2, true);
9108 omp_notice_variable (ctx->outer_context,
9109 OMP_CLAUSE_SIZE (c), true);
9111 if (((ctx->region_type & ORT_TARGET) != 0
9112 || !ctx->target_firstprivatize_array_bases)
9113 && ((n->value & GOVD_SEEN) == 0
9114 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9116 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9117 OMP_CLAUSE_MAP);
9118 OMP_CLAUSE_DECL (nc) = decl;
9119 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9120 if (ctx->target_firstprivatize_array_bases)
9121 OMP_CLAUSE_SET_MAP_KIND (nc,
9122 GOMP_MAP_FIRSTPRIVATE_POINTER);
9123 else
9124 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9125 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9126 OMP_CLAUSE_CHAIN (c) = nc;
9127 c = nc;
9130 else
9132 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9133 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9134 gcc_assert ((n->value & GOVD_SEEN) == 0
9135 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9136 == 0));
9138 break;
9140 case OMP_CLAUSE_TO:
9141 case OMP_CLAUSE_FROM:
9142 case OMP_CLAUSE__CACHE_:
9143 decl = OMP_CLAUSE_DECL (c);
9144 if (!DECL_P (decl))
9145 break;
9146 if (DECL_SIZE (decl)
9147 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9149 tree decl2 = DECL_VALUE_EXPR (decl);
9150 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9151 decl2 = TREE_OPERAND (decl2, 0);
9152 gcc_assert (DECL_P (decl2));
9153 tree mem = build_simple_mem_ref (decl2);
9154 OMP_CLAUSE_DECL (c) = mem;
9155 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9156 if (ctx->outer_context)
9158 omp_notice_variable (ctx->outer_context, decl2, true);
9159 omp_notice_variable (ctx->outer_context,
9160 OMP_CLAUSE_SIZE (c), true);
9163 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9164 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9165 break;
9167 case OMP_CLAUSE_REDUCTION:
9168 decl = OMP_CLAUSE_DECL (c);
9169 /* OpenACC reductions need a present_or_copy data clause.
9170 Add one if necessary. Error is the reduction is private. */
9171 if (ctx->region_type == ORT_ACC_PARALLEL)
9173 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9174 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9175 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9176 "reduction on %qE", DECL_NAME (decl));
9177 else if ((n->value & GOVD_MAP) == 0)
9179 tree next = OMP_CLAUSE_CHAIN (c);
9180 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9181 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9182 OMP_CLAUSE_DECL (nc) = decl;
9183 OMP_CLAUSE_CHAIN (c) = nc;
9184 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9185 while (1)
9187 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9188 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9189 break;
9190 nc = OMP_CLAUSE_CHAIN (nc);
9192 OMP_CLAUSE_CHAIN (nc) = next;
9193 n->value |= GOVD_MAP;
9196 if (DECL_P (decl)
9197 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9198 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9199 break;
9200 case OMP_CLAUSE_COPYIN:
9201 case OMP_CLAUSE_COPYPRIVATE:
9202 case OMP_CLAUSE_IF:
9203 case OMP_CLAUSE_NUM_THREADS:
9204 case OMP_CLAUSE_NUM_TEAMS:
9205 case OMP_CLAUSE_THREAD_LIMIT:
9206 case OMP_CLAUSE_DIST_SCHEDULE:
9207 case OMP_CLAUSE_DEVICE:
9208 case OMP_CLAUSE_SCHEDULE:
9209 case OMP_CLAUSE_NOWAIT:
9210 case OMP_CLAUSE_ORDERED:
9211 case OMP_CLAUSE_DEFAULT:
9212 case OMP_CLAUSE_UNTIED:
9213 case OMP_CLAUSE_COLLAPSE:
9214 case OMP_CLAUSE_FINAL:
9215 case OMP_CLAUSE_MERGEABLE:
9216 case OMP_CLAUSE_PROC_BIND:
9217 case OMP_CLAUSE_SAFELEN:
9218 case OMP_CLAUSE_SIMDLEN:
9219 case OMP_CLAUSE_DEPEND:
9220 case OMP_CLAUSE_PRIORITY:
9221 case OMP_CLAUSE_GRAINSIZE:
9222 case OMP_CLAUSE_NUM_TASKS:
9223 case OMP_CLAUSE_NOGROUP:
9224 case OMP_CLAUSE_THREADS:
9225 case OMP_CLAUSE_SIMD:
9226 case OMP_CLAUSE_HINT:
9227 case OMP_CLAUSE_DEFAULTMAP:
9228 case OMP_CLAUSE_USE_DEVICE_PTR:
9229 case OMP_CLAUSE_IS_DEVICE_PTR:
9230 case OMP_CLAUSE__CILK_FOR_COUNT_:
9231 case OMP_CLAUSE_ASYNC:
9232 case OMP_CLAUSE_WAIT:
9233 case OMP_CLAUSE_INDEPENDENT:
9234 case OMP_CLAUSE_NUM_GANGS:
9235 case OMP_CLAUSE_NUM_WORKERS:
9236 case OMP_CLAUSE_VECTOR_LENGTH:
9237 case OMP_CLAUSE_GANG:
9238 case OMP_CLAUSE_WORKER:
9239 case OMP_CLAUSE_VECTOR:
9240 case OMP_CLAUSE_AUTO:
9241 case OMP_CLAUSE_SEQ:
9242 case OMP_CLAUSE_TILE:
9243 break;
9245 default:
9246 gcc_unreachable ();
9249 if (remove)
9250 *list_p = OMP_CLAUSE_CHAIN (c);
9251 else
9252 list_p = &OMP_CLAUSE_CHAIN (c);
9255 /* Add in any implicit data sharing. */
9256 struct gimplify_adjust_omp_clauses_data data;
9257 data.list_p = list_p;
9258 data.pre_p = pre_p;
9259 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9261 gimplify_omp_ctxp = ctx->outer_context;
9262 delete_omp_context (ctx);
9265 /* Gimplify OACC_CACHE. */
9267 static void
9268 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9270 tree expr = *expr_p;
9272 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9273 OACC_CACHE);
9274 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9275 OACC_CACHE);
9277 /* TODO: Do something sensible with this information. */
9279 *expr_p = NULL_TREE;
9282 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9283 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9284 kind. The entry kind will replace the one in CLAUSE, while the exit
9285 kind will be used in a new omp_clause and returned to the caller. */
9287 static tree
9288 gimplify_oacc_declare_1 (tree clause)
9290 HOST_WIDE_INT kind, new_op;
9291 bool ret = false;
9292 tree c = NULL;
9294 kind = OMP_CLAUSE_MAP_KIND (clause);
9296 switch (kind)
9298 case GOMP_MAP_ALLOC:
9299 case GOMP_MAP_FORCE_ALLOC:
9300 case GOMP_MAP_FORCE_TO:
9301 new_op = GOMP_MAP_DELETE;
9302 ret = true;
9303 break;
9305 case GOMP_MAP_FORCE_FROM:
9306 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9307 new_op = GOMP_MAP_FORCE_FROM;
9308 ret = true;
9309 break;
9311 case GOMP_MAP_FORCE_TOFROM:
9312 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9313 new_op = GOMP_MAP_FORCE_FROM;
9314 ret = true;
9315 break;
9317 case GOMP_MAP_FROM:
9318 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9319 new_op = GOMP_MAP_FROM;
9320 ret = true;
9321 break;
9323 case GOMP_MAP_TOFROM:
9324 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9325 new_op = GOMP_MAP_FROM;
9326 ret = true;
9327 break;
9329 case GOMP_MAP_DEVICE_RESIDENT:
9330 case GOMP_MAP_FORCE_DEVICEPTR:
9331 case GOMP_MAP_FORCE_PRESENT:
9332 case GOMP_MAP_LINK:
9333 case GOMP_MAP_POINTER:
9334 case GOMP_MAP_TO:
9335 break;
9337 default:
9338 gcc_unreachable ();
9339 break;
9342 if (ret)
9344 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9345 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9346 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9349 return c;
9352 /* Gimplify OACC_DECLARE. */
9354 static void
9355 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9357 tree expr = *expr_p;
9358 gomp_target *stmt;
9359 tree clauses, t, decl;
9361 clauses = OACC_DECLARE_CLAUSES (expr);
9363 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9364 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9366 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9368 decl = OMP_CLAUSE_DECL (t);
9370 if (TREE_CODE (decl) == MEM_REF)
9371 decl = TREE_OPERAND (decl, 0);
9373 if (VAR_P (decl) && !is_oacc_declared (decl))
9375 tree attr = get_identifier ("oacc declare target");
9376 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9377 DECL_ATTRIBUTES (decl));
9380 if (VAR_P (decl)
9381 && !is_global_var (decl)
9382 && DECL_CONTEXT (decl) == current_function_decl)
9384 tree c = gimplify_oacc_declare_1 (t);
9385 if (c)
9387 if (oacc_declare_returns == NULL)
9388 oacc_declare_returns = new hash_map<tree, tree>;
9390 oacc_declare_returns->put (decl, c);
9394 if (gimplify_omp_ctxp)
9395 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9398 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9399 clauses);
9401 gimplify_seq_add_stmt (pre_p, stmt);
9403 *expr_p = NULL_TREE;
9406 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9407 gimplification of the body, as well as scanning the body for used
9408 variables. We need to do this scan now, because variable-sized
9409 decls will be decomposed during gimplification. */
9411 static void
9412 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9414 tree expr = *expr_p;
9415 gimple *g;
9416 gimple_seq body = NULL;
9418 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9419 OMP_PARALLEL_COMBINED (expr)
9420 ? ORT_COMBINED_PARALLEL
9421 : ORT_PARALLEL, OMP_PARALLEL);
9423 push_gimplify_context ();
9425 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9426 if (gimple_code (g) == GIMPLE_BIND)
9427 pop_gimplify_context (g);
9428 else
9429 pop_gimplify_context (NULL);
9431 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9432 OMP_PARALLEL);
9434 g = gimple_build_omp_parallel (body,
9435 OMP_PARALLEL_CLAUSES (expr),
9436 NULL_TREE, NULL_TREE);
9437 if (OMP_PARALLEL_COMBINED (expr))
9438 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9439 gimplify_seq_add_stmt (pre_p, g);
9440 *expr_p = NULL_TREE;
9443 /* Gimplify the contents of an OMP_TASK statement. This involves
9444 gimplification of the body, as well as scanning the body for used
9445 variables. We need to do this scan now, because variable-sized
9446 decls will be decomposed during gimplification. */
9448 static void
9449 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9451 tree expr = *expr_p;
9452 gimple *g;
9453 gimple_seq body = NULL;
9455 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9456 omp_find_clause (OMP_TASK_CLAUSES (expr),
9457 OMP_CLAUSE_UNTIED)
9458 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9460 push_gimplify_context ();
9462 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9463 if (gimple_code (g) == GIMPLE_BIND)
9464 pop_gimplify_context (g);
9465 else
9466 pop_gimplify_context (NULL);
9468 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9469 OMP_TASK);
9471 g = gimple_build_omp_task (body,
9472 OMP_TASK_CLAUSES (expr),
9473 NULL_TREE, NULL_TREE,
9474 NULL_TREE, NULL_TREE, NULL_TREE);
9475 gimplify_seq_add_stmt (pre_p, g);
9476 *expr_p = NULL_TREE;
9479 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9480 with non-NULL OMP_FOR_INIT. */
9482 static tree
9483 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9485 *walk_subtrees = 0;
9486 switch (TREE_CODE (*tp))
9488 case OMP_FOR:
9489 *walk_subtrees = 1;
9490 /* FALLTHRU */
9491 case OMP_SIMD:
9492 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9493 return *tp;
9494 break;
9495 case BIND_EXPR:
9496 case STATEMENT_LIST:
9497 case OMP_PARALLEL:
9498 *walk_subtrees = 1;
9499 break;
9500 default:
9501 break;
9503 return NULL_TREE;
9506 /* Gimplify the gross structure of an OMP_FOR statement. */
9508 static enum gimplify_status
9509 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9511 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9512 enum gimplify_status ret = GS_ALL_DONE;
9513 enum gimplify_status tret;
9514 gomp_for *gfor;
9515 gimple_seq for_body, for_pre_body;
9516 int i;
9517 bitmap has_decl_expr = NULL;
9518 enum omp_region_type ort = ORT_WORKSHARE;
9520 orig_for_stmt = for_stmt = *expr_p;
9522 switch (TREE_CODE (for_stmt))
9524 case OMP_FOR:
9525 case CILK_FOR:
9526 case OMP_DISTRIBUTE:
9527 break;
9528 case OACC_LOOP:
9529 ort = ORT_ACC;
9530 break;
9531 case OMP_TASKLOOP:
9532 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9533 ort = ORT_UNTIED_TASK;
9534 else
9535 ort = ORT_TASK;
9536 break;
9537 case OMP_SIMD:
9538 case CILK_SIMD:
9539 ort = ORT_SIMD;
9540 break;
9541 default:
9542 gcc_unreachable ();
9545 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9546 clause for the IV. */
9547 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9549 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9550 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9551 decl = TREE_OPERAND (t, 0);
9552 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9553 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9554 && OMP_CLAUSE_DECL (c) == decl)
9556 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9557 break;
9561 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9563 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9564 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9565 find_combined_omp_for, NULL, NULL);
9566 if (inner_for_stmt == NULL_TREE)
9568 gcc_assert (seen_error ());
9569 *expr_p = NULL_TREE;
9570 return GS_ERROR;
9574 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9575 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9576 TREE_CODE (for_stmt));
9578 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9579 gimplify_omp_ctxp->distribute = true;
9581 /* Handle OMP_FOR_INIT. */
9582 for_pre_body = NULL;
9583 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9585 has_decl_expr = BITMAP_ALLOC (NULL);
9586 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9587 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9588 == VAR_DECL)
9590 t = OMP_FOR_PRE_BODY (for_stmt);
9591 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9593 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9595 tree_stmt_iterator si;
9596 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9597 tsi_next (&si))
9599 t = tsi_stmt (si);
9600 if (TREE_CODE (t) == DECL_EXPR
9601 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9602 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9606 if (OMP_FOR_PRE_BODY (for_stmt))
9608 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9609 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9610 else
9612 struct gimplify_omp_ctx ctx;
9613 memset (&ctx, 0, sizeof (ctx));
9614 ctx.region_type = ORT_NONE;
9615 gimplify_omp_ctxp = &ctx;
9616 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9617 gimplify_omp_ctxp = NULL;
9620 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9622 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9623 for_stmt = inner_for_stmt;
9625 /* For taskloop, need to gimplify the start, end and step before the
9626 taskloop, outside of the taskloop omp context. */
9627 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9629 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9631 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9632 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9634 TREE_OPERAND (t, 1)
9635 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9636 pre_p, NULL, false);
9637 tree c = build_omp_clause (input_location,
9638 OMP_CLAUSE_FIRSTPRIVATE);
9639 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9640 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9641 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9644 /* Handle OMP_FOR_COND. */
9645 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9646 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9648 TREE_OPERAND (t, 1)
9649 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9650 gimple_seq_empty_p (for_pre_body)
9651 ? pre_p : &for_pre_body, NULL,
9652 false);
9653 tree c = build_omp_clause (input_location,
9654 OMP_CLAUSE_FIRSTPRIVATE);
9655 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9656 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9657 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9660 /* Handle OMP_FOR_INCR. */
9661 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9662 if (TREE_CODE (t) == MODIFY_EXPR)
9664 decl = TREE_OPERAND (t, 0);
9665 t = TREE_OPERAND (t, 1);
9666 tree *tp = &TREE_OPERAND (t, 1);
9667 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9668 tp = &TREE_OPERAND (t, 0);
9670 if (!is_gimple_constant (*tp))
9672 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9673 ? pre_p : &for_pre_body;
9674 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9675 tree c = build_omp_clause (input_location,
9676 OMP_CLAUSE_FIRSTPRIVATE);
9677 OMP_CLAUSE_DECL (c) = *tp;
9678 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9679 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9684 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9685 OMP_TASKLOOP);
9688 if (orig_for_stmt != for_stmt)
9689 gimplify_omp_ctxp->combined_loop = true;
9691 for_body = NULL;
9692 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9693 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9694 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9695 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9697 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9698 bool is_doacross = false;
9699 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9701 is_doacross = true;
9702 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9703 (OMP_FOR_INIT (for_stmt))
9704 * 2);
9706 int collapse = 1, tile = 0;
9707 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9708 if (c)
9709 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9710 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9711 if (c)
9712 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9713 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9715 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9716 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9717 decl = TREE_OPERAND (t, 0);
9718 gcc_assert (DECL_P (decl));
9719 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9720 || POINTER_TYPE_P (TREE_TYPE (decl)));
9721 if (is_doacross)
9723 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9724 gimplify_omp_ctxp->loop_iter_var.quick_push
9725 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9726 else
9727 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9728 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9731 /* Make sure the iteration variable is private. */
9732 tree c = NULL_TREE;
9733 tree c2 = NULL_TREE;
9734 if (orig_for_stmt != for_stmt)
9735 /* Do this only on innermost construct for combined ones. */;
9736 else if (ort == ORT_SIMD)
9738 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9739 (splay_tree_key) decl);
9740 omp_is_private (gimplify_omp_ctxp, decl,
9741 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9742 != 1));
9743 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9744 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9745 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9747 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9748 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9749 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9750 if (has_decl_expr
9751 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9753 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9754 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9756 struct gimplify_omp_ctx *outer
9757 = gimplify_omp_ctxp->outer_context;
9758 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9760 if (outer->region_type == ORT_WORKSHARE
9761 && outer->combined_loop)
9763 n = splay_tree_lookup (outer->variables,
9764 (splay_tree_key)decl);
9765 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9767 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9768 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9770 else
9772 struct gimplify_omp_ctx *octx = outer->outer_context;
9773 if (octx
9774 && octx->region_type == ORT_COMBINED_PARALLEL
9775 && octx->outer_context
9776 && (octx->outer_context->region_type
9777 == ORT_WORKSHARE)
9778 && octx->outer_context->combined_loop)
9780 octx = octx->outer_context;
9781 n = splay_tree_lookup (octx->variables,
9782 (splay_tree_key)decl);
9783 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9785 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9786 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9793 OMP_CLAUSE_DECL (c) = decl;
9794 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9795 OMP_FOR_CLAUSES (for_stmt) = c;
9796 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9797 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9799 if (outer->region_type == ORT_WORKSHARE
9800 && outer->combined_loop)
9802 if (outer->outer_context
9803 && (outer->outer_context->region_type
9804 == ORT_COMBINED_PARALLEL))
9805 outer = outer->outer_context;
9806 else if (omp_check_private (outer, decl, false))
9807 outer = NULL;
9809 else if (((outer->region_type & ORT_TASK) != 0)
9810 && outer->combined_loop
9811 && !omp_check_private (gimplify_omp_ctxp,
9812 decl, false))
9814 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9816 omp_notice_variable (outer, decl, true);
9817 outer = NULL;
9819 if (outer)
9821 n = splay_tree_lookup (outer->variables,
9822 (splay_tree_key)decl);
9823 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9825 omp_add_variable (outer, decl,
9826 GOVD_LASTPRIVATE | GOVD_SEEN);
9827 if (outer->region_type == ORT_COMBINED_PARALLEL
9828 && outer->outer_context
9829 && (outer->outer_context->region_type
9830 == ORT_WORKSHARE)
9831 && outer->outer_context->combined_loop)
9833 outer = outer->outer_context;
9834 n = splay_tree_lookup (outer->variables,
9835 (splay_tree_key)decl);
9836 if (omp_check_private (outer, decl, false))
9837 outer = NULL;
9838 else if (n == NULL
9839 || ((n->value & GOVD_DATA_SHARE_CLASS)
9840 == 0))
9841 omp_add_variable (outer, decl,
9842 GOVD_LASTPRIVATE
9843 | GOVD_SEEN);
9844 else
9845 outer = NULL;
9847 if (outer && outer->outer_context
9848 && (outer->outer_context->region_type
9849 == ORT_COMBINED_TEAMS))
9851 outer = outer->outer_context;
9852 n = splay_tree_lookup (outer->variables,
9853 (splay_tree_key)decl);
9854 if (n == NULL
9855 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9856 omp_add_variable (outer, decl,
9857 GOVD_SHARED | GOVD_SEEN);
9858 else
9859 outer = NULL;
9861 if (outer && outer->outer_context)
9862 omp_notice_variable (outer->outer_context, decl,
9863 true);
9868 else
9870 bool lastprivate
9871 = (!has_decl_expr
9872 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9873 struct gimplify_omp_ctx *outer
9874 = gimplify_omp_ctxp->outer_context;
9875 if (outer && lastprivate)
9877 if (outer->region_type == ORT_WORKSHARE
9878 && outer->combined_loop)
9880 n = splay_tree_lookup (outer->variables,
9881 (splay_tree_key)decl);
9882 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9884 lastprivate = false;
9885 outer = NULL;
9887 else if (outer->outer_context
9888 && (outer->outer_context->region_type
9889 == ORT_COMBINED_PARALLEL))
9890 outer = outer->outer_context;
9891 else if (omp_check_private (outer, decl, false))
9892 outer = NULL;
9894 else if (((outer->region_type & ORT_TASK) != 0)
9895 && outer->combined_loop
9896 && !omp_check_private (gimplify_omp_ctxp,
9897 decl, false))
9899 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9901 omp_notice_variable (outer, decl, true);
9902 outer = NULL;
9904 if (outer)
9906 n = splay_tree_lookup (outer->variables,
9907 (splay_tree_key)decl);
9908 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9910 omp_add_variable (outer, decl,
9911 GOVD_LASTPRIVATE | GOVD_SEEN);
9912 if (outer->region_type == ORT_COMBINED_PARALLEL
9913 && outer->outer_context
9914 && (outer->outer_context->region_type
9915 == ORT_WORKSHARE)
9916 && outer->outer_context->combined_loop)
9918 outer = outer->outer_context;
9919 n = splay_tree_lookup (outer->variables,
9920 (splay_tree_key)decl);
9921 if (omp_check_private (outer, decl, false))
9922 outer = NULL;
9923 else if (n == NULL
9924 || ((n->value & GOVD_DATA_SHARE_CLASS)
9925 == 0))
9926 omp_add_variable (outer, decl,
9927 GOVD_LASTPRIVATE
9928 | GOVD_SEEN);
9929 else
9930 outer = NULL;
9932 if (outer && outer->outer_context
9933 && (outer->outer_context->region_type
9934 == ORT_COMBINED_TEAMS))
9936 outer = outer->outer_context;
9937 n = splay_tree_lookup (outer->variables,
9938 (splay_tree_key)decl);
9939 if (n == NULL
9940 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9941 omp_add_variable (outer, decl,
9942 GOVD_SHARED | GOVD_SEEN);
9943 else
9944 outer = NULL;
9946 if (outer && outer->outer_context)
9947 omp_notice_variable (outer->outer_context, decl,
9948 true);
9953 c = build_omp_clause (input_location,
9954 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9955 : OMP_CLAUSE_PRIVATE);
9956 OMP_CLAUSE_DECL (c) = decl;
9957 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9958 OMP_FOR_CLAUSES (for_stmt) = c;
9959 omp_add_variable (gimplify_omp_ctxp, decl,
9960 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9961 | GOVD_EXPLICIT | GOVD_SEEN);
9962 c = NULL_TREE;
9965 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9966 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9967 else
9968 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9970 /* If DECL is not a gimple register, create a temporary variable to act
9971 as an iteration counter. This is valid, since DECL cannot be
9972 modified in the body of the loop. Similarly for any iteration vars
9973 in simd with collapse > 1 where the iterator vars must be
9974 lastprivate. */
9975 if (orig_for_stmt != for_stmt)
9976 var = decl;
9977 else if (!is_gimple_reg (decl)
9978 || (ort == ORT_SIMD
9979 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9981 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9982 /* Make sure omp_add_variable is not called on it prematurely.
9983 We call it ourselves a few lines later. */
9984 gimplify_omp_ctxp = NULL;
9985 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9986 gimplify_omp_ctxp = ctx;
9987 TREE_OPERAND (t, 0) = var;
9989 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9991 if (ort == ORT_SIMD
9992 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9994 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9995 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9996 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9997 OMP_CLAUSE_DECL (c2) = var;
9998 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9999 OMP_FOR_CLAUSES (for_stmt) = c2;
10000 omp_add_variable (gimplify_omp_ctxp, var,
10001 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10002 if (c == NULL_TREE)
10004 c = c2;
10005 c2 = NULL_TREE;
10008 else
10009 omp_add_variable (gimplify_omp_ctxp, var,
10010 GOVD_PRIVATE | GOVD_SEEN);
10012 else
10013 var = decl;
10015 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10016 is_gimple_val, fb_rvalue, false);
10017 ret = MIN (ret, tret);
10018 if (ret == GS_ERROR)
10019 return ret;
10021 /* Handle OMP_FOR_COND. */
10022 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10023 gcc_assert (COMPARISON_CLASS_P (t));
10024 gcc_assert (TREE_OPERAND (t, 0) == decl);
10026 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10027 is_gimple_val, fb_rvalue, false);
10028 ret = MIN (ret, tret);
10030 /* Handle OMP_FOR_INCR. */
10031 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10032 switch (TREE_CODE (t))
10034 case PREINCREMENT_EXPR:
10035 case POSTINCREMENT_EXPR:
10037 tree decl = TREE_OPERAND (t, 0);
10038 /* c_omp_for_incr_canonicalize_ptr() should have been
10039 called to massage things appropriately. */
10040 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10042 if (orig_for_stmt != for_stmt)
10043 break;
10044 t = build_int_cst (TREE_TYPE (decl), 1);
10045 if (c)
10046 OMP_CLAUSE_LINEAR_STEP (c) = t;
10047 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10048 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10049 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10050 break;
10053 case PREDECREMENT_EXPR:
10054 case POSTDECREMENT_EXPR:
10055 /* c_omp_for_incr_canonicalize_ptr() should have been
10056 called to massage things appropriately. */
10057 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10058 if (orig_for_stmt != for_stmt)
10059 break;
10060 t = build_int_cst (TREE_TYPE (decl), -1);
10061 if (c)
10062 OMP_CLAUSE_LINEAR_STEP (c) = t;
10063 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10064 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10065 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10066 break;
10068 case MODIFY_EXPR:
10069 gcc_assert (TREE_OPERAND (t, 0) == decl);
10070 TREE_OPERAND (t, 0) = var;
10072 t = TREE_OPERAND (t, 1);
10073 switch (TREE_CODE (t))
10075 case PLUS_EXPR:
10076 if (TREE_OPERAND (t, 1) == decl)
10078 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10079 TREE_OPERAND (t, 0) = var;
10080 break;
10083 /* Fallthru. */
10084 case MINUS_EXPR:
10085 case POINTER_PLUS_EXPR:
10086 gcc_assert (TREE_OPERAND (t, 0) == decl);
10087 TREE_OPERAND (t, 0) = var;
10088 break;
10089 default:
10090 gcc_unreachable ();
10093 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10094 is_gimple_val, fb_rvalue, false);
10095 ret = MIN (ret, tret);
10096 if (c)
10098 tree step = TREE_OPERAND (t, 1);
10099 tree stept = TREE_TYPE (decl);
10100 if (POINTER_TYPE_P (stept))
10101 stept = sizetype;
10102 step = fold_convert (stept, step);
10103 if (TREE_CODE (t) == MINUS_EXPR)
10104 step = fold_build1 (NEGATE_EXPR, stept, step);
10105 OMP_CLAUSE_LINEAR_STEP (c) = step;
10106 if (step != TREE_OPERAND (t, 1))
10108 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10109 &for_pre_body, NULL,
10110 is_gimple_val, fb_rvalue, false);
10111 ret = MIN (ret, tret);
10114 break;
10116 default:
10117 gcc_unreachable ();
10120 if (c2)
10122 gcc_assert (c);
10123 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10126 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10128 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10129 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10130 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10131 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10132 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10133 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10134 && OMP_CLAUSE_DECL (c) == decl)
10136 if (is_doacross && (collapse == 1 || i >= collapse))
10137 t = var;
10138 else
10140 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10141 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10142 gcc_assert (TREE_OPERAND (t, 0) == var);
10143 t = TREE_OPERAND (t, 1);
10144 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10145 || TREE_CODE (t) == MINUS_EXPR
10146 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10147 gcc_assert (TREE_OPERAND (t, 0) == var);
10148 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10149 is_doacross ? var : decl,
10150 TREE_OPERAND (t, 1));
10152 gimple_seq *seq;
10153 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10154 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10155 else
10156 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10157 gimplify_assign (decl, t, seq);
10162 BITMAP_FREE (has_decl_expr);
10164 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10166 push_gimplify_context ();
10167 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10169 OMP_FOR_BODY (orig_for_stmt)
10170 = build3 (BIND_EXPR, void_type_node, NULL,
10171 OMP_FOR_BODY (orig_for_stmt), NULL);
10172 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10176 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10177 &for_body);
10179 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10181 if (gimple_code (g) == GIMPLE_BIND)
10182 pop_gimplify_context (g);
10183 else
10184 pop_gimplify_context (NULL);
10187 if (orig_for_stmt != for_stmt)
10188 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10190 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10191 decl = TREE_OPERAND (t, 0);
10192 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10193 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10194 gimplify_omp_ctxp = ctx->outer_context;
10195 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10196 gimplify_omp_ctxp = ctx;
10197 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10198 TREE_OPERAND (t, 0) = var;
10199 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10200 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10201 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10204 gimplify_adjust_omp_clauses (pre_p, for_body,
10205 &OMP_FOR_CLAUSES (orig_for_stmt),
10206 TREE_CODE (orig_for_stmt));
10208 int kind;
10209 switch (TREE_CODE (orig_for_stmt))
10211 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10212 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10213 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10214 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10215 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10216 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10217 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10218 default:
10219 gcc_unreachable ();
10221 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10222 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10223 for_pre_body);
10224 if (orig_for_stmt != for_stmt)
10225 gimple_omp_for_set_combined_p (gfor, true);
10226 if (gimplify_omp_ctxp
10227 && (gimplify_omp_ctxp->combined_loop
10228 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10229 && gimplify_omp_ctxp->outer_context
10230 && gimplify_omp_ctxp->outer_context->combined_loop)))
10232 gimple_omp_for_set_combined_into_p (gfor, true);
10233 if (gimplify_omp_ctxp->combined_loop)
10234 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10235 else
10236 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10239 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10241 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10242 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10243 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10244 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10245 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10246 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10247 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10248 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10251 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10252 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10253 The outer taskloop stands for computing the number of iterations,
10254 counts for collapsed loops and holding taskloop specific clauses.
10255 The task construct stands for the effect of data sharing on the
10256 explicit task it creates and the inner taskloop stands for expansion
10257 of the static loop inside of the explicit task construct. */
10258 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10260 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10261 tree task_clauses = NULL_TREE;
10262 tree c = *gfor_clauses_ptr;
10263 tree *gtask_clauses_ptr = &task_clauses;
10264 tree outer_for_clauses = NULL_TREE;
10265 tree *gforo_clauses_ptr = &outer_for_clauses;
10266 for (; c; c = OMP_CLAUSE_CHAIN (c))
10267 switch (OMP_CLAUSE_CODE (c))
10269 /* These clauses are allowed on task, move them there. */
10270 case OMP_CLAUSE_SHARED:
10271 case OMP_CLAUSE_FIRSTPRIVATE:
10272 case OMP_CLAUSE_DEFAULT:
10273 case OMP_CLAUSE_IF:
10274 case OMP_CLAUSE_UNTIED:
10275 case OMP_CLAUSE_FINAL:
10276 case OMP_CLAUSE_MERGEABLE:
10277 case OMP_CLAUSE_PRIORITY:
10278 *gtask_clauses_ptr = c;
10279 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10280 break;
10281 case OMP_CLAUSE_PRIVATE:
10282 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10284 /* We want private on outer for and firstprivate
10285 on task. */
10286 *gtask_clauses_ptr
10287 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10288 OMP_CLAUSE_FIRSTPRIVATE);
10289 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10290 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10291 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10292 *gforo_clauses_ptr = c;
10293 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10295 else
10297 *gtask_clauses_ptr = c;
10298 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10300 break;
10301 /* These clauses go into outer taskloop clauses. */
10302 case OMP_CLAUSE_GRAINSIZE:
10303 case OMP_CLAUSE_NUM_TASKS:
10304 case OMP_CLAUSE_NOGROUP:
10305 *gforo_clauses_ptr = c;
10306 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10307 break;
10308 /* Taskloop clause we duplicate on both taskloops. */
10309 case OMP_CLAUSE_COLLAPSE:
10310 *gfor_clauses_ptr = c;
10311 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10312 *gforo_clauses_ptr = copy_node (c);
10313 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10314 break;
10315 /* For lastprivate, keep the clause on inner taskloop, and add
10316 a shared clause on task. If the same decl is also firstprivate,
10317 add also firstprivate clause on the inner taskloop. */
10318 case OMP_CLAUSE_LASTPRIVATE:
10319 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10321 /* For taskloop C++ lastprivate IVs, we want:
10322 1) private on outer taskloop
10323 2) firstprivate and shared on task
10324 3) lastprivate on inner taskloop */
10325 *gtask_clauses_ptr
10326 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10327 OMP_CLAUSE_FIRSTPRIVATE);
10328 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10329 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10330 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10331 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10332 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10333 OMP_CLAUSE_PRIVATE);
10334 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10335 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10336 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10337 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10339 *gfor_clauses_ptr = c;
10340 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10341 *gtask_clauses_ptr
10342 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10343 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10344 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10345 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10346 gtask_clauses_ptr
10347 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10348 break;
10349 default:
10350 gcc_unreachable ();
10352 *gfor_clauses_ptr = NULL_TREE;
10353 *gtask_clauses_ptr = NULL_TREE;
10354 *gforo_clauses_ptr = NULL_TREE;
10355 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10356 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10357 NULL_TREE, NULL_TREE, NULL_TREE);
10358 gimple_omp_task_set_taskloop_p (g, true);
10359 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10360 gomp_for *gforo
10361 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10362 gimple_omp_for_collapse (gfor),
10363 gimple_omp_for_pre_body (gfor));
10364 gimple_omp_for_set_pre_body (gfor, NULL);
10365 gimple_omp_for_set_combined_p (gforo, true);
10366 gimple_omp_for_set_combined_into_p (gfor, true);
10367 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10369 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10370 tree v = create_tmp_var (type);
10371 gimple_omp_for_set_index (gforo, i, v);
10372 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10373 gimple_omp_for_set_initial (gforo, i, t);
10374 gimple_omp_for_set_cond (gforo, i,
10375 gimple_omp_for_cond (gfor, i));
10376 t = unshare_expr (gimple_omp_for_final (gfor, i));
10377 gimple_omp_for_set_final (gforo, i, t);
10378 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10379 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10380 TREE_OPERAND (t, 0) = v;
10381 gimple_omp_for_set_incr (gforo, i, t);
10382 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10383 OMP_CLAUSE_DECL (t) = v;
10384 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10385 gimple_omp_for_set_clauses (gforo, t);
10387 gimplify_seq_add_stmt (pre_p, gforo);
10389 else
10390 gimplify_seq_add_stmt (pre_p, gfor);
10391 if (ret != GS_ALL_DONE)
10392 return GS_ERROR;
10393 *expr_p = NULL_TREE;
10394 return GS_ALL_DONE;
10397 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10398 of OMP_TARGET's body. */
10400 static tree
10401 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10403 *walk_subtrees = 0;
10404 switch (TREE_CODE (*tp))
10406 case OMP_TEAMS:
10407 return *tp;
10408 case BIND_EXPR:
10409 case STATEMENT_LIST:
10410 *walk_subtrees = 1;
10411 break;
10412 default:
10413 break;
10415 return NULL_TREE;
10418 /* Helper function of optimize_target_teams, determine if the expression
10419 can be computed safely before the target construct on the host. */
10421 static tree
10422 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10424 splay_tree_node n;
10426 if (TYPE_P (*tp))
10428 *walk_subtrees = 0;
10429 return NULL_TREE;
10431 switch (TREE_CODE (*tp))
10433 case VAR_DECL:
10434 case PARM_DECL:
10435 case RESULT_DECL:
10436 *walk_subtrees = 0;
10437 if (error_operand_p (*tp)
10438 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10439 || DECL_HAS_VALUE_EXPR_P (*tp)
10440 || DECL_THREAD_LOCAL_P (*tp)
10441 || TREE_SIDE_EFFECTS (*tp)
10442 || TREE_THIS_VOLATILE (*tp))
10443 return *tp;
10444 if (is_global_var (*tp)
10445 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10446 || lookup_attribute ("omp declare target link",
10447 DECL_ATTRIBUTES (*tp))))
10448 return *tp;
10449 if (VAR_P (*tp)
10450 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10451 && !is_global_var (*tp)
10452 && decl_function_context (*tp) == current_function_decl)
10453 return *tp;
10454 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10455 (splay_tree_key) *tp);
10456 if (n == NULL)
10458 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10459 return NULL_TREE;
10460 return *tp;
10462 else if (n->value & GOVD_LOCAL)
10463 return *tp;
10464 else if (n->value & GOVD_FIRSTPRIVATE)
10465 return NULL_TREE;
10466 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10467 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10468 return NULL_TREE;
10469 return *tp;
10470 case INTEGER_CST:
10471 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10472 return *tp;
10473 return NULL_TREE;
10474 case TARGET_EXPR:
10475 if (TARGET_EXPR_INITIAL (*tp)
10476 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10477 return *tp;
10478 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10479 walk_subtrees, NULL);
10480 /* Allow some reasonable subset of integral arithmetics. */
10481 case PLUS_EXPR:
10482 case MINUS_EXPR:
10483 case MULT_EXPR:
10484 case TRUNC_DIV_EXPR:
10485 case CEIL_DIV_EXPR:
10486 case FLOOR_DIV_EXPR:
10487 case ROUND_DIV_EXPR:
10488 case TRUNC_MOD_EXPR:
10489 case CEIL_MOD_EXPR:
10490 case FLOOR_MOD_EXPR:
10491 case ROUND_MOD_EXPR:
10492 case RDIV_EXPR:
10493 case EXACT_DIV_EXPR:
10494 case MIN_EXPR:
10495 case MAX_EXPR:
10496 case LSHIFT_EXPR:
10497 case RSHIFT_EXPR:
10498 case BIT_IOR_EXPR:
10499 case BIT_XOR_EXPR:
10500 case BIT_AND_EXPR:
10501 case NEGATE_EXPR:
10502 case ABS_EXPR:
10503 case BIT_NOT_EXPR:
10504 case NON_LVALUE_EXPR:
10505 CASE_CONVERT:
10506 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10507 return *tp;
10508 return NULL_TREE;
10509 /* And disallow anything else, except for comparisons. */
10510 default:
10511 if (COMPARISON_CLASS_P (*tp))
10512 return NULL_TREE;
10513 return *tp;
10517 /* Try to determine if the num_teams and/or thread_limit expressions
10518 can have their values determined already before entering the
10519 target construct.
10520 INTEGER_CSTs trivially are,
10521 integral decls that are firstprivate (explicitly or implicitly)
10522 or explicitly map(always, to:) or map(always, tofrom:) on the target
10523 region too, and expressions involving simple arithmetics on those
10524 too, function calls are not ok, dereferencing something neither etc.
10525 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10526 EXPR based on what we find:
10527 0 stands for clause not specified at all, use implementation default
10528 -1 stands for value that can't be determined easily before entering
10529 the target construct.
10530 If teams construct is not present at all, use 1 for num_teams
10531 and 0 for thread_limit (only one team is involved, and the thread
10532 limit is implementation defined. */
10534 static void
10535 optimize_target_teams (tree target, gimple_seq *pre_p)
10537 tree body = OMP_BODY (target);
10538 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10539 tree num_teams = integer_zero_node;
10540 tree thread_limit = integer_zero_node;
10541 location_t num_teams_loc = EXPR_LOCATION (target);
10542 location_t thread_limit_loc = EXPR_LOCATION (target);
10543 tree c, *p, expr;
10544 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10546 if (teams == NULL_TREE)
10547 num_teams = integer_one_node;
10548 else
10549 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10551 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10553 p = &num_teams;
10554 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10556 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10558 p = &thread_limit;
10559 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10561 else
10562 continue;
10563 expr = OMP_CLAUSE_OPERAND (c, 0);
10564 if (TREE_CODE (expr) == INTEGER_CST)
10566 *p = expr;
10567 continue;
10569 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10571 *p = integer_minus_one_node;
10572 continue;
10574 *p = expr;
10575 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10576 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10577 == GS_ERROR)
10579 gimplify_omp_ctxp = target_ctx;
10580 *p = integer_minus_one_node;
10581 continue;
10583 gimplify_omp_ctxp = target_ctx;
10584 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10585 OMP_CLAUSE_OPERAND (c, 0) = *p;
10587 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10588 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10589 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10590 OMP_TARGET_CLAUSES (target) = c;
10591 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10592 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10593 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10594 OMP_TARGET_CLAUSES (target) = c;
10597 /* Gimplify the gross structure of several OMP constructs. */
10599 static void
10600 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10602 tree expr = *expr_p;
10603 gimple *stmt;
10604 gimple_seq body = NULL;
10605 enum omp_region_type ort;
10607 switch (TREE_CODE (expr))
10609 case OMP_SECTIONS:
10610 case OMP_SINGLE:
10611 ort = ORT_WORKSHARE;
10612 break;
10613 case OMP_TARGET:
10614 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10615 break;
10616 case OACC_KERNELS:
10617 ort = ORT_ACC_KERNELS;
10618 break;
10619 case OACC_PARALLEL:
10620 ort = ORT_ACC_PARALLEL;
10621 break;
10622 case OACC_DATA:
10623 ort = ORT_ACC_DATA;
10624 break;
10625 case OMP_TARGET_DATA:
10626 ort = ORT_TARGET_DATA;
10627 break;
10628 case OMP_TEAMS:
10629 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10630 break;
10631 case OACC_HOST_DATA:
10632 ort = ORT_ACC_HOST_DATA;
10633 break;
10634 default:
10635 gcc_unreachable ();
10637 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10638 TREE_CODE (expr));
10639 if (TREE_CODE (expr) == OMP_TARGET)
10640 optimize_target_teams (expr, pre_p);
10641 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10643 push_gimplify_context ();
10644 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10645 if (gimple_code (g) == GIMPLE_BIND)
10646 pop_gimplify_context (g);
10647 else
10648 pop_gimplify_context (NULL);
10649 if ((ort & ORT_TARGET_DATA) != 0)
10651 enum built_in_function end_ix;
10652 switch (TREE_CODE (expr))
10654 case OACC_DATA:
10655 case OACC_HOST_DATA:
10656 end_ix = BUILT_IN_GOACC_DATA_END;
10657 break;
10658 case OMP_TARGET_DATA:
10659 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10660 break;
10661 default:
10662 gcc_unreachable ();
10664 tree fn = builtin_decl_explicit (end_ix);
10665 g = gimple_build_call (fn, 0);
10666 gimple_seq cleanup = NULL;
10667 gimple_seq_add_stmt (&cleanup, g);
10668 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10669 body = NULL;
10670 gimple_seq_add_stmt (&body, g);
10673 else
10674 gimplify_and_add (OMP_BODY (expr), &body);
10675 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10676 TREE_CODE (expr));
10678 switch (TREE_CODE (expr))
10680 case OACC_DATA:
10681 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10682 OMP_CLAUSES (expr));
10683 break;
10684 case OACC_KERNELS:
10685 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10686 OMP_CLAUSES (expr));
10687 break;
10688 case OACC_HOST_DATA:
10689 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10690 OMP_CLAUSES (expr));
10691 break;
10692 case OACC_PARALLEL:
10693 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10694 OMP_CLAUSES (expr));
10695 break;
10696 case OMP_SECTIONS:
10697 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10698 break;
10699 case OMP_SINGLE:
10700 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10701 break;
10702 case OMP_TARGET:
10703 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10704 OMP_CLAUSES (expr));
10705 break;
10706 case OMP_TARGET_DATA:
10707 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10708 OMP_CLAUSES (expr));
10709 break;
10710 case OMP_TEAMS:
10711 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10712 break;
10713 default:
10714 gcc_unreachable ();
10717 gimplify_seq_add_stmt (pre_p, stmt);
10718 *expr_p = NULL_TREE;
10721 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10722 target update constructs. */
10724 static void
10725 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10727 tree expr = *expr_p;
10728 int kind;
10729 gomp_target *stmt;
10730 enum omp_region_type ort = ORT_WORKSHARE;
10732 switch (TREE_CODE (expr))
10734 case OACC_ENTER_DATA:
10735 case OACC_EXIT_DATA:
10736 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10737 ort = ORT_ACC;
10738 break;
10739 case OACC_UPDATE:
10740 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10741 ort = ORT_ACC;
10742 break;
10743 case OMP_TARGET_UPDATE:
10744 kind = GF_OMP_TARGET_KIND_UPDATE;
10745 break;
10746 case OMP_TARGET_ENTER_DATA:
10747 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10748 break;
10749 case OMP_TARGET_EXIT_DATA:
10750 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10751 break;
10752 default:
10753 gcc_unreachable ();
10755 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10756 ort, TREE_CODE (expr));
10757 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10758 TREE_CODE (expr));
10759 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10761 gimplify_seq_add_stmt (pre_p, stmt);
10762 *expr_p = NULL_TREE;
10765 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10766 stabilized the lhs of the atomic operation as *ADDR. Return true if
10767 EXPR is this stabilized form. */
10769 static bool
10770 goa_lhs_expr_p (tree expr, tree addr)
10772 /* Also include casts to other type variants. The C front end is fond
10773 of adding these for e.g. volatile variables. This is like
10774 STRIP_TYPE_NOPS but includes the main variant lookup. */
10775 STRIP_USELESS_TYPE_CONVERSION (expr);
10777 if (TREE_CODE (expr) == INDIRECT_REF)
10779 expr = TREE_OPERAND (expr, 0);
10780 while (expr != addr
10781 && (CONVERT_EXPR_P (expr)
10782 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10783 && TREE_CODE (expr) == TREE_CODE (addr)
10784 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10786 expr = TREE_OPERAND (expr, 0);
10787 addr = TREE_OPERAND (addr, 0);
10789 if (expr == addr)
10790 return true;
10791 return (TREE_CODE (addr) == ADDR_EXPR
10792 && TREE_CODE (expr) == ADDR_EXPR
10793 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10795 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10796 return true;
10797 return false;
10800 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10801 expression does not involve the lhs, evaluate it into a temporary.
10802 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10803 or -1 if an error was encountered. */
10805 static int
10806 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10807 tree lhs_var)
10809 tree expr = *expr_p;
10810 int saw_lhs;
10812 if (goa_lhs_expr_p (expr, lhs_addr))
10814 *expr_p = lhs_var;
10815 return 1;
10817 if (is_gimple_val (expr))
10818 return 0;
10820 saw_lhs = 0;
10821 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10823 case tcc_binary:
10824 case tcc_comparison:
10825 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10826 lhs_var);
10827 /* FALLTHRU */
10828 case tcc_unary:
10829 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10830 lhs_var);
10831 break;
10832 case tcc_expression:
10833 switch (TREE_CODE (expr))
10835 case TRUTH_ANDIF_EXPR:
10836 case TRUTH_ORIF_EXPR:
10837 case TRUTH_AND_EXPR:
10838 case TRUTH_OR_EXPR:
10839 case TRUTH_XOR_EXPR:
10840 case BIT_INSERT_EXPR:
10841 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10842 lhs_addr, lhs_var);
10843 /* FALLTHRU */
10844 case TRUTH_NOT_EXPR:
10845 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10846 lhs_addr, lhs_var);
10847 break;
10848 case COMPOUND_EXPR:
10849 /* Break out any preevaluations from cp_build_modify_expr. */
10850 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10851 expr = TREE_OPERAND (expr, 1))
10852 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10853 *expr_p = expr;
10854 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10855 default:
10856 break;
10858 break;
10859 case tcc_reference:
10860 if (TREE_CODE (expr) == BIT_FIELD_REF)
10861 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10862 lhs_addr, lhs_var);
10863 break;
10864 default:
10865 break;
10868 if (saw_lhs == 0)
10870 enum gimplify_status gs;
10871 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10872 if (gs != GS_ALL_DONE)
10873 saw_lhs = -1;
10876 return saw_lhs;
10879 /* Gimplify an OMP_ATOMIC statement. */
10881 static enum gimplify_status
10882 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10884 tree addr = TREE_OPERAND (*expr_p, 0);
10885 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10886 ? NULL : TREE_OPERAND (*expr_p, 1);
10887 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10888 tree tmp_load;
10889 gomp_atomic_load *loadstmt;
10890 gomp_atomic_store *storestmt;
10892 tmp_load = create_tmp_reg (type);
10893 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10894 return GS_ERROR;
10896 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10897 != GS_ALL_DONE)
10898 return GS_ERROR;
10900 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10901 gimplify_seq_add_stmt (pre_p, loadstmt);
10902 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10903 != GS_ALL_DONE)
10904 return GS_ERROR;
10906 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10907 rhs = tmp_load;
10908 storestmt = gimple_build_omp_atomic_store (rhs);
10909 gimplify_seq_add_stmt (pre_p, storestmt);
10910 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10912 gimple_omp_atomic_set_seq_cst (loadstmt);
10913 gimple_omp_atomic_set_seq_cst (storestmt);
10915 switch (TREE_CODE (*expr_p))
10917 case OMP_ATOMIC_READ:
10918 case OMP_ATOMIC_CAPTURE_OLD:
10919 *expr_p = tmp_load;
10920 gimple_omp_atomic_set_need_value (loadstmt);
10921 break;
10922 case OMP_ATOMIC_CAPTURE_NEW:
10923 *expr_p = rhs;
10924 gimple_omp_atomic_set_need_value (storestmt);
10925 break;
10926 default:
10927 *expr_p = NULL;
10928 break;
10931 return GS_ALL_DONE;
10934 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10935 body, and adding some EH bits. */
10937 static enum gimplify_status
10938 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10940 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10941 gimple *body_stmt;
10942 gtransaction *trans_stmt;
10943 gimple_seq body = NULL;
10944 int subcode = 0;
10946 /* Wrap the transaction body in a BIND_EXPR so we have a context
10947 where to put decls for OMP. */
10948 if (TREE_CODE (tbody) != BIND_EXPR)
10950 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10951 TREE_SIDE_EFFECTS (bind) = 1;
10952 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10953 TRANSACTION_EXPR_BODY (expr) = bind;
10956 push_gimplify_context ();
10957 temp = voidify_wrapper_expr (*expr_p, NULL);
10959 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10960 pop_gimplify_context (body_stmt);
10962 trans_stmt = gimple_build_transaction (body);
10963 if (TRANSACTION_EXPR_OUTER (expr))
10964 subcode = GTMA_IS_OUTER;
10965 else if (TRANSACTION_EXPR_RELAXED (expr))
10966 subcode = GTMA_IS_RELAXED;
10967 gimple_transaction_set_subcode (trans_stmt, subcode);
10969 gimplify_seq_add_stmt (pre_p, trans_stmt);
10971 if (temp)
10973 *expr_p = temp;
10974 return GS_OK;
10977 *expr_p = NULL_TREE;
10978 return GS_ALL_DONE;
10981 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10982 is the OMP_BODY of the original EXPR (which has already been
10983 gimplified so it's not present in the EXPR).
10985 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10987 static gimple *
10988 gimplify_omp_ordered (tree expr, gimple_seq body)
10990 tree c, decls;
10991 int failures = 0;
10992 unsigned int i;
10993 tree source_c = NULL_TREE;
10994 tree sink_c = NULL_TREE;
10996 if (gimplify_omp_ctxp)
10998 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11000 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
11001 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
11002 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
11004 error_at (OMP_CLAUSE_LOCATION (c),
11005 "%<ordered%> construct with %<depend%> clause must be "
11006 "closely nested inside a loop with %<ordered%> clause "
11007 "with a parameter");
11008 failures++;
11010 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11011 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11013 bool fail = false;
11014 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11015 decls && TREE_CODE (decls) == TREE_LIST;
11016 decls = TREE_CHAIN (decls), ++i)
11017 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11018 continue;
11019 else if (TREE_VALUE (decls)
11020 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11022 error_at (OMP_CLAUSE_LOCATION (c),
11023 "variable %qE is not an iteration "
11024 "of outermost loop %d, expected %qE",
11025 TREE_VALUE (decls), i + 1,
11026 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11027 fail = true;
11028 failures++;
11030 else
11031 TREE_VALUE (decls)
11032 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11033 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11035 error_at (OMP_CLAUSE_LOCATION (c),
11036 "number of variables in %<depend(sink)%> "
11037 "clause does not match number of "
11038 "iteration variables");
11039 failures++;
11041 sink_c = c;
11043 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11044 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11046 if (source_c)
11048 error_at (OMP_CLAUSE_LOCATION (c),
11049 "more than one %<depend(source)%> clause on an "
11050 "%<ordered%> construct");
11051 failures++;
11053 else
11054 source_c = c;
11057 if (source_c && sink_c)
11059 error_at (OMP_CLAUSE_LOCATION (source_c),
11060 "%<depend(source)%> clause specified together with "
11061 "%<depend(sink:)%> clauses on the same construct");
11062 failures++;
11065 if (failures)
11066 return gimple_build_nop ();
11067 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11070 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11071 expression produces a value to be used as an operand inside a GIMPLE
11072 statement, the value will be stored back in *EXPR_P. This value will
11073 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11074 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11075 emitted in PRE_P and POST_P.
11077 Additionally, this process may overwrite parts of the input
11078 expression during gimplification. Ideally, it should be
11079 possible to do non-destructive gimplification.
11081 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11082 the expression needs to evaluate to a value to be used as
11083 an operand in a GIMPLE statement, this value will be stored in
11084 *EXPR_P on exit. This happens when the caller specifies one
11085 of fb_lvalue or fb_rvalue fallback flags.
11087 PRE_P will contain the sequence of GIMPLE statements corresponding
11088 to the evaluation of EXPR and all the side-effects that must
11089 be executed before the main expression. On exit, the last
11090 statement of PRE_P is the core statement being gimplified. For
11091 instance, when gimplifying 'if (++a)' the last statement in
11092 PRE_P will be 'if (t.1)' where t.1 is the result of
11093 pre-incrementing 'a'.
11095 POST_P will contain the sequence of GIMPLE statements corresponding
11096 to the evaluation of all the side-effects that must be executed
11097 after the main expression. If this is NULL, the post
11098 side-effects are stored at the end of PRE_P.
11100 The reason why the output is split in two is to handle post
11101 side-effects explicitly. In some cases, an expression may have
11102 inner and outer post side-effects which need to be emitted in
11103 an order different from the one given by the recursive
11104 traversal. For instance, for the expression (*p--)++ the post
11105 side-effects of '--' must actually occur *after* the post
11106 side-effects of '++'. However, gimplification will first visit
11107 the inner expression, so if a separate POST sequence was not
11108 used, the resulting sequence would be:
11110 1 t.1 = *p
11111 2 p = p - 1
11112 3 t.2 = t.1 + 1
11113 4 *p = t.2
11115 However, the post-decrement operation in line #2 must not be
11116 evaluated until after the store to *p at line #4, so the
11117 correct sequence should be:
11119 1 t.1 = *p
11120 2 t.2 = t.1 + 1
11121 3 *p = t.2
11122 4 p = p - 1
11124 So, by specifying a separate post queue, it is possible
11125 to emit the post side-effects in the correct order.
11126 If POST_P is NULL, an internal queue will be used. Before
11127 returning to the caller, the sequence POST_P is appended to
11128 the main output sequence PRE_P.
11130 GIMPLE_TEST_F points to a function that takes a tree T and
11131 returns nonzero if T is in the GIMPLE form requested by the
11132 caller. The GIMPLE predicates are in gimple.c.
11134 FALLBACK tells the function what sort of a temporary we want if
11135 gimplification cannot produce an expression that complies with
11136 GIMPLE_TEST_F.
11138 fb_none means that no temporary should be generated
11139 fb_rvalue means that an rvalue is OK to generate
11140 fb_lvalue means that an lvalue is OK to generate
11141 fb_either means that either is OK, but an lvalue is preferable.
11142 fb_mayfail means that gimplification may fail (in which case
11143 GS_ERROR will be returned)
11145 The return value is either GS_ERROR or GS_ALL_DONE, since this
11146 function iterates until EXPR is completely gimplified or an error
11147 occurs. */
11149 enum gimplify_status
11150 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11151 bool (*gimple_test_f) (tree), fallback_t fallback)
11153 tree tmp;
11154 gimple_seq internal_pre = NULL;
11155 gimple_seq internal_post = NULL;
11156 tree save_expr;
11157 bool is_statement;
11158 location_t saved_location;
11159 enum gimplify_status ret;
11160 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11161 tree label;
11163 save_expr = *expr_p;
11164 if (save_expr == NULL_TREE)
11165 return GS_ALL_DONE;
11167 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11168 is_statement = gimple_test_f == is_gimple_stmt;
11169 if (is_statement)
11170 gcc_assert (pre_p);
11172 /* Consistency checks. */
11173 if (gimple_test_f == is_gimple_reg)
11174 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11175 else if (gimple_test_f == is_gimple_val
11176 || gimple_test_f == is_gimple_call_addr
11177 || gimple_test_f == is_gimple_condexpr
11178 || gimple_test_f == is_gimple_mem_rhs
11179 || gimple_test_f == is_gimple_mem_rhs_or_call
11180 || gimple_test_f == is_gimple_reg_rhs
11181 || gimple_test_f == is_gimple_reg_rhs_or_call
11182 || gimple_test_f == is_gimple_asm_val
11183 || gimple_test_f == is_gimple_mem_ref_addr)
11184 gcc_assert (fallback & fb_rvalue);
11185 else if (gimple_test_f == is_gimple_min_lval
11186 || gimple_test_f == is_gimple_lvalue)
11187 gcc_assert (fallback & fb_lvalue);
11188 else if (gimple_test_f == is_gimple_addressable)
11189 gcc_assert (fallback & fb_either);
11190 else if (gimple_test_f == is_gimple_stmt)
11191 gcc_assert (fallback == fb_none);
11192 else
11194 /* We should have recognized the GIMPLE_TEST_F predicate to
11195 know what kind of fallback to use in case a temporary is
11196 needed to hold the value or address of *EXPR_P. */
11197 gcc_unreachable ();
11200 /* We used to check the predicate here and return immediately if it
11201 succeeds. This is wrong; the design is for gimplification to be
11202 idempotent, and for the predicates to only test for valid forms, not
11203 whether they are fully simplified. */
11204 if (pre_p == NULL)
11205 pre_p = &internal_pre;
11207 if (post_p == NULL)
11208 post_p = &internal_post;
11210 /* Remember the last statements added to PRE_P and POST_P. Every
11211 new statement added by the gimplification helpers needs to be
11212 annotated with location information. To centralize the
11213 responsibility, we remember the last statement that had been
11214 added to both queues before gimplifying *EXPR_P. If
11215 gimplification produces new statements in PRE_P and POST_P, those
11216 statements will be annotated with the same location information
11217 as *EXPR_P. */
11218 pre_last_gsi = gsi_last (*pre_p);
11219 post_last_gsi = gsi_last (*post_p);
11221 saved_location = input_location;
11222 if (save_expr != error_mark_node
11223 && EXPR_HAS_LOCATION (*expr_p))
11224 input_location = EXPR_LOCATION (*expr_p);
11226 /* Loop over the specific gimplifiers until the toplevel node
11227 remains the same. */
11230 /* Strip away as many useless type conversions as possible
11231 at the toplevel. */
11232 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11234 /* Remember the expr. */
11235 save_expr = *expr_p;
11237 /* Die, die, die, my darling. */
11238 if (save_expr == error_mark_node
11239 || (TREE_TYPE (save_expr)
11240 && TREE_TYPE (save_expr) == error_mark_node))
11242 ret = GS_ERROR;
11243 break;
11246 /* Do any language-specific gimplification. */
11247 ret = ((enum gimplify_status)
11248 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11249 if (ret == GS_OK)
11251 if (*expr_p == NULL_TREE)
11252 break;
11253 if (*expr_p != save_expr)
11254 continue;
11256 else if (ret != GS_UNHANDLED)
11257 break;
11259 /* Make sure that all the cases set 'ret' appropriately. */
11260 ret = GS_UNHANDLED;
11261 switch (TREE_CODE (*expr_p))
11263 /* First deal with the special cases. */
11265 case POSTINCREMENT_EXPR:
11266 case POSTDECREMENT_EXPR:
11267 case PREINCREMENT_EXPR:
11268 case PREDECREMENT_EXPR:
11269 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11270 fallback != fb_none,
11271 TREE_TYPE (*expr_p));
11272 break;
11274 case VIEW_CONVERT_EXPR:
11275 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11276 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11278 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11279 post_p, is_gimple_val, fb_rvalue);
11280 recalculate_side_effects (*expr_p);
11281 break;
11283 /* Fallthru. */
11285 case ARRAY_REF:
11286 case ARRAY_RANGE_REF:
11287 case REALPART_EXPR:
11288 case IMAGPART_EXPR:
11289 case COMPONENT_REF:
11290 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11291 fallback ? fallback : fb_rvalue);
11292 break;
11294 case COND_EXPR:
11295 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11297 /* C99 code may assign to an array in a structure value of a
11298 conditional expression, and this has undefined behavior
11299 only on execution, so create a temporary if an lvalue is
11300 required. */
11301 if (fallback == fb_lvalue)
11303 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11304 mark_addressable (*expr_p);
11305 ret = GS_OK;
11307 break;
11309 case CALL_EXPR:
11310 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11312 /* C99 code may assign to an array in a structure returned
11313 from a function, and this has undefined behavior only on
11314 execution, so create a temporary if an lvalue is
11315 required. */
11316 if (fallback == fb_lvalue)
11318 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11319 mark_addressable (*expr_p);
11320 ret = GS_OK;
11322 break;
11324 case TREE_LIST:
11325 gcc_unreachable ();
11327 case COMPOUND_EXPR:
11328 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11329 break;
11331 case COMPOUND_LITERAL_EXPR:
11332 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11333 gimple_test_f, fallback);
11334 break;
11336 case MODIFY_EXPR:
11337 case INIT_EXPR:
11338 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11339 fallback != fb_none);
11340 break;
11342 case TRUTH_ANDIF_EXPR:
11343 case TRUTH_ORIF_EXPR:
11345 /* Preserve the original type of the expression and the
11346 source location of the outer expression. */
11347 tree org_type = TREE_TYPE (*expr_p);
11348 *expr_p = gimple_boolify (*expr_p);
11349 *expr_p = build3_loc (input_location, COND_EXPR,
11350 org_type, *expr_p,
11351 fold_convert_loc
11352 (input_location,
11353 org_type, boolean_true_node),
11354 fold_convert_loc
11355 (input_location,
11356 org_type, boolean_false_node));
11357 ret = GS_OK;
11358 break;
11361 case TRUTH_NOT_EXPR:
11363 tree type = TREE_TYPE (*expr_p);
11364 /* The parsers are careful to generate TRUTH_NOT_EXPR
11365 only with operands that are always zero or one.
11366 We do not fold here but handle the only interesting case
11367 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11368 *expr_p = gimple_boolify (*expr_p);
11369 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11370 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11371 TREE_TYPE (*expr_p),
11372 TREE_OPERAND (*expr_p, 0));
11373 else
11374 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11375 TREE_TYPE (*expr_p),
11376 TREE_OPERAND (*expr_p, 0),
11377 build_int_cst (TREE_TYPE (*expr_p), 1));
11378 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11379 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11380 ret = GS_OK;
11381 break;
11384 case ADDR_EXPR:
11385 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11386 break;
11388 case ANNOTATE_EXPR:
11390 tree cond = TREE_OPERAND (*expr_p, 0);
11391 tree kind = TREE_OPERAND (*expr_p, 1);
11392 tree type = TREE_TYPE (cond);
11393 if (!INTEGRAL_TYPE_P (type))
11395 *expr_p = cond;
11396 ret = GS_OK;
11397 break;
11399 tree tmp = create_tmp_var (type);
11400 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11401 gcall *call
11402 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11403 gimple_call_set_lhs (call, tmp);
11404 gimplify_seq_add_stmt (pre_p, call);
11405 *expr_p = tmp;
11406 ret = GS_ALL_DONE;
11407 break;
11410 case VA_ARG_EXPR:
11411 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11412 break;
11414 CASE_CONVERT:
11415 if (IS_EMPTY_STMT (*expr_p))
11417 ret = GS_ALL_DONE;
11418 break;
11421 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11422 || fallback == fb_none)
11424 /* Just strip a conversion to void (or in void context) and
11425 try again. */
11426 *expr_p = TREE_OPERAND (*expr_p, 0);
11427 ret = GS_OK;
11428 break;
11431 ret = gimplify_conversion (expr_p);
11432 if (ret == GS_ERROR)
11433 break;
11434 if (*expr_p != save_expr)
11435 break;
11436 /* FALLTHRU */
11438 case FIX_TRUNC_EXPR:
11439 /* unary_expr: ... | '(' cast ')' val | ... */
11440 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11441 is_gimple_val, fb_rvalue);
11442 recalculate_side_effects (*expr_p);
11443 break;
11445 case INDIRECT_REF:
11447 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11448 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11449 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11451 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11452 if (*expr_p != save_expr)
11454 ret = GS_OK;
11455 break;
11458 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11459 is_gimple_reg, fb_rvalue);
11460 if (ret == GS_ERROR)
11461 break;
11463 recalculate_side_effects (*expr_p);
11464 *expr_p = fold_build2_loc (input_location, MEM_REF,
11465 TREE_TYPE (*expr_p),
11466 TREE_OPERAND (*expr_p, 0),
11467 build_int_cst (saved_ptr_type, 0));
11468 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11469 TREE_THIS_NOTRAP (*expr_p) = notrap;
11470 ret = GS_OK;
11471 break;
11474 /* We arrive here through the various re-gimplifcation paths. */
11475 case MEM_REF:
11476 /* First try re-folding the whole thing. */
11477 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11478 TREE_OPERAND (*expr_p, 0),
11479 TREE_OPERAND (*expr_p, 1));
11480 if (tmp)
11482 REF_REVERSE_STORAGE_ORDER (tmp)
11483 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11484 *expr_p = tmp;
11485 recalculate_side_effects (*expr_p);
11486 ret = GS_OK;
11487 break;
11489 /* Avoid re-gimplifying the address operand if it is already
11490 in suitable form. Re-gimplifying would mark the address
11491 operand addressable. Always gimplify when not in SSA form
11492 as we still may have to gimplify decls with value-exprs. */
11493 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11494 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11496 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11497 is_gimple_mem_ref_addr, fb_rvalue);
11498 if (ret == GS_ERROR)
11499 break;
11501 recalculate_side_effects (*expr_p);
11502 ret = GS_ALL_DONE;
11503 break;
11505 /* Constants need not be gimplified. */
11506 case INTEGER_CST:
11507 case REAL_CST:
11508 case FIXED_CST:
11509 case STRING_CST:
11510 case COMPLEX_CST:
11511 case VECTOR_CST:
11512 /* Drop the overflow flag on constants, we do not want
11513 that in the GIMPLE IL. */
11514 if (TREE_OVERFLOW_P (*expr_p))
11515 *expr_p = drop_tree_overflow (*expr_p);
11516 ret = GS_ALL_DONE;
11517 break;
11519 case CONST_DECL:
11520 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11521 CONST_DECL node. Otherwise the decl is replaceable by its
11522 value. */
11523 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11524 if (fallback & fb_lvalue)
11525 ret = GS_ALL_DONE;
11526 else
11528 *expr_p = DECL_INITIAL (*expr_p);
11529 ret = GS_OK;
11531 break;
11533 case DECL_EXPR:
11534 ret = gimplify_decl_expr (expr_p, pre_p);
11535 break;
11537 case BIND_EXPR:
11538 ret = gimplify_bind_expr (expr_p, pre_p);
11539 break;
11541 case LOOP_EXPR:
11542 ret = gimplify_loop_expr (expr_p, pre_p);
11543 break;
11545 case SWITCH_EXPR:
11546 ret = gimplify_switch_expr (expr_p, pre_p);
11547 break;
11549 case EXIT_EXPR:
11550 ret = gimplify_exit_expr (expr_p);
11551 break;
11553 case GOTO_EXPR:
11554 /* If the target is not LABEL, then it is a computed jump
11555 and the target needs to be gimplified. */
11556 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11558 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11559 NULL, is_gimple_val, fb_rvalue);
11560 if (ret == GS_ERROR)
11561 break;
11563 gimplify_seq_add_stmt (pre_p,
11564 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11565 ret = GS_ALL_DONE;
11566 break;
11568 case PREDICT_EXPR:
11569 gimplify_seq_add_stmt (pre_p,
11570 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11571 PREDICT_EXPR_OUTCOME (*expr_p)));
11572 ret = GS_ALL_DONE;
11573 break;
11575 case LABEL_EXPR:
11576 ret = gimplify_label_expr (expr_p, pre_p);
11577 label = LABEL_EXPR_LABEL (*expr_p);
11578 gcc_assert (decl_function_context (label) == current_function_decl);
11580 /* If the label is used in a goto statement, or address of the label
11581 is taken, we need to unpoison all variables that were seen so far.
11582 Doing so would prevent us from reporting a false positives. */
11583 if (asan_poisoned_variables
11584 && asan_used_labels != NULL
11585 && asan_used_labels->contains (label))
11586 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11587 break;
11589 case CASE_LABEL_EXPR:
11590 ret = gimplify_case_label_expr (expr_p, pre_p);
11592 if (gimplify_ctxp->live_switch_vars)
11593 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11594 pre_p);
11595 break;
11597 case RETURN_EXPR:
11598 ret = gimplify_return_expr (*expr_p, pre_p);
11599 break;
11601 case CONSTRUCTOR:
11602 /* Don't reduce this in place; let gimplify_init_constructor work its
11603 magic. Buf if we're just elaborating this for side effects, just
11604 gimplify any element that has side-effects. */
11605 if (fallback == fb_none)
11607 unsigned HOST_WIDE_INT ix;
11608 tree val;
11609 tree temp = NULL_TREE;
11610 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11611 if (TREE_SIDE_EFFECTS (val))
11612 append_to_statement_list (val, &temp);
11614 *expr_p = temp;
11615 ret = temp ? GS_OK : GS_ALL_DONE;
11617 /* C99 code may assign to an array in a constructed
11618 structure or union, and this has undefined behavior only
11619 on execution, so create a temporary if an lvalue is
11620 required. */
11621 else if (fallback == fb_lvalue)
11623 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11624 mark_addressable (*expr_p);
11625 ret = GS_OK;
11627 else
11628 ret = GS_ALL_DONE;
11629 break;
11631 /* The following are special cases that are not handled by the
11632 original GIMPLE grammar. */
11634 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11635 eliminated. */
11636 case SAVE_EXPR:
11637 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11638 break;
11640 case BIT_FIELD_REF:
11641 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11642 post_p, is_gimple_lvalue, fb_either);
11643 recalculate_side_effects (*expr_p);
11644 break;
11646 case TARGET_MEM_REF:
11648 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11650 if (TMR_BASE (*expr_p))
11651 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11652 post_p, is_gimple_mem_ref_addr, fb_either);
11653 if (TMR_INDEX (*expr_p))
11654 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11655 post_p, is_gimple_val, fb_rvalue);
11656 if (TMR_INDEX2 (*expr_p))
11657 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11658 post_p, is_gimple_val, fb_rvalue);
11659 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11660 ret = MIN (r0, r1);
11662 break;
11664 case NON_LVALUE_EXPR:
11665 /* This should have been stripped above. */
11666 gcc_unreachable ();
11668 case ASM_EXPR:
11669 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11670 break;
11672 case TRY_FINALLY_EXPR:
11673 case TRY_CATCH_EXPR:
11675 gimple_seq eval, cleanup;
11676 gtry *try_;
11678 /* Calls to destructors are generated automatically in FINALLY/CATCH
11679 block. They should have location as UNKNOWN_LOCATION. However,
11680 gimplify_call_expr will reset these call stmts to input_location
11681 if it finds stmt's location is unknown. To prevent resetting for
11682 destructors, we set the input_location to unknown.
11683 Note that this only affects the destructor calls in FINALLY/CATCH
11684 block, and will automatically reset to its original value by the
11685 end of gimplify_expr. */
11686 input_location = UNKNOWN_LOCATION;
11687 eval = cleanup = NULL;
11688 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11689 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11690 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11691 if (gimple_seq_empty_p (cleanup))
11693 gimple_seq_add_seq (pre_p, eval);
11694 ret = GS_ALL_DONE;
11695 break;
11697 try_ = gimple_build_try (eval, cleanup,
11698 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11699 ? GIMPLE_TRY_FINALLY
11700 : GIMPLE_TRY_CATCH);
11701 if (EXPR_HAS_LOCATION (save_expr))
11702 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11703 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11704 gimple_set_location (try_, saved_location);
11705 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11706 gimple_try_set_catch_is_cleanup (try_,
11707 TRY_CATCH_IS_CLEANUP (*expr_p));
11708 gimplify_seq_add_stmt (pre_p, try_);
11709 ret = GS_ALL_DONE;
11710 break;
11713 case CLEANUP_POINT_EXPR:
11714 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11715 break;
11717 case TARGET_EXPR:
11718 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11719 break;
11721 case CATCH_EXPR:
11723 gimple *c;
11724 gimple_seq handler = NULL;
11725 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11726 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11727 gimplify_seq_add_stmt (pre_p, c);
11728 ret = GS_ALL_DONE;
11729 break;
11732 case EH_FILTER_EXPR:
11734 gimple *ehf;
11735 gimple_seq failure = NULL;
11737 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11738 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11739 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11740 gimplify_seq_add_stmt (pre_p, ehf);
11741 ret = GS_ALL_DONE;
11742 break;
11745 case OBJ_TYPE_REF:
11747 enum gimplify_status r0, r1;
11748 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11749 post_p, is_gimple_val, fb_rvalue);
11750 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11751 post_p, is_gimple_val, fb_rvalue);
11752 TREE_SIDE_EFFECTS (*expr_p) = 0;
11753 ret = MIN (r0, r1);
11755 break;
11757 case LABEL_DECL:
11758 /* We get here when taking the address of a label. We mark
11759 the label as "forced"; meaning it can never be removed and
11760 it is a potential target for any computed goto. */
11761 FORCED_LABEL (*expr_p) = 1;
11762 ret = GS_ALL_DONE;
11763 break;
11765 case STATEMENT_LIST:
11766 ret = gimplify_statement_list (expr_p, pre_p);
11767 break;
11769 case WITH_SIZE_EXPR:
11771 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11772 post_p == &internal_post ? NULL : post_p,
11773 gimple_test_f, fallback);
11774 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11775 is_gimple_val, fb_rvalue);
11776 ret = GS_ALL_DONE;
11778 break;
11780 case VAR_DECL:
11781 case PARM_DECL:
11782 ret = gimplify_var_or_parm_decl (expr_p);
11783 break;
11785 case RESULT_DECL:
11786 /* When within an OMP context, notice uses of variables. */
11787 if (gimplify_omp_ctxp)
11788 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11789 ret = GS_ALL_DONE;
11790 break;
11792 case SSA_NAME:
11793 /* Allow callbacks into the gimplifier during optimization. */
11794 ret = GS_ALL_DONE;
11795 break;
11797 case OMP_PARALLEL:
11798 gimplify_omp_parallel (expr_p, pre_p);
11799 ret = GS_ALL_DONE;
11800 break;
11802 case OMP_TASK:
11803 gimplify_omp_task (expr_p, pre_p);
11804 ret = GS_ALL_DONE;
11805 break;
11807 case OMP_FOR:
11808 case OMP_SIMD:
11809 case CILK_SIMD:
11810 case CILK_FOR:
11811 case OMP_DISTRIBUTE:
11812 case OMP_TASKLOOP:
11813 case OACC_LOOP:
11814 ret = gimplify_omp_for (expr_p, pre_p);
11815 break;
11817 case OACC_CACHE:
11818 gimplify_oacc_cache (expr_p, pre_p);
11819 ret = GS_ALL_DONE;
11820 break;
11822 case OACC_DECLARE:
11823 gimplify_oacc_declare (expr_p, pre_p);
11824 ret = GS_ALL_DONE;
11825 break;
11827 case OACC_HOST_DATA:
11828 case OACC_DATA:
11829 case OACC_KERNELS:
11830 case OACC_PARALLEL:
11831 case OMP_SECTIONS:
11832 case OMP_SINGLE:
11833 case OMP_TARGET:
11834 case OMP_TARGET_DATA:
11835 case OMP_TEAMS:
11836 gimplify_omp_workshare (expr_p, pre_p);
11837 ret = GS_ALL_DONE;
11838 break;
11840 case OACC_ENTER_DATA:
11841 case OACC_EXIT_DATA:
11842 case OACC_UPDATE:
11843 case OMP_TARGET_UPDATE:
11844 case OMP_TARGET_ENTER_DATA:
11845 case OMP_TARGET_EXIT_DATA:
11846 gimplify_omp_target_update (expr_p, pre_p);
11847 ret = GS_ALL_DONE;
11848 break;
11850 case OMP_SECTION:
11851 case OMP_MASTER:
11852 case OMP_TASKGROUP:
11853 case OMP_ORDERED:
11854 case OMP_CRITICAL:
11856 gimple_seq body = NULL;
11857 gimple *g;
11859 gimplify_and_add (OMP_BODY (*expr_p), &body);
11860 switch (TREE_CODE (*expr_p))
11862 case OMP_SECTION:
11863 g = gimple_build_omp_section (body);
11864 break;
11865 case OMP_MASTER:
11866 g = gimple_build_omp_master (body);
11867 break;
11868 case OMP_TASKGROUP:
11870 gimple_seq cleanup = NULL;
11871 tree fn
11872 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11873 g = gimple_build_call (fn, 0);
11874 gimple_seq_add_stmt (&cleanup, g);
11875 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11876 body = NULL;
11877 gimple_seq_add_stmt (&body, g);
11878 g = gimple_build_omp_taskgroup (body);
11880 break;
11881 case OMP_ORDERED:
11882 g = gimplify_omp_ordered (*expr_p, body);
11883 break;
11884 case OMP_CRITICAL:
11885 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11886 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11887 gimplify_adjust_omp_clauses (pre_p, body,
11888 &OMP_CRITICAL_CLAUSES (*expr_p),
11889 OMP_CRITICAL);
11890 g = gimple_build_omp_critical (body,
11891 OMP_CRITICAL_NAME (*expr_p),
11892 OMP_CRITICAL_CLAUSES (*expr_p));
11893 break;
11894 default:
11895 gcc_unreachable ();
11897 gimplify_seq_add_stmt (pre_p, g);
11898 ret = GS_ALL_DONE;
11899 break;
11902 case OMP_ATOMIC:
11903 case OMP_ATOMIC_READ:
11904 case OMP_ATOMIC_CAPTURE_OLD:
11905 case OMP_ATOMIC_CAPTURE_NEW:
11906 ret = gimplify_omp_atomic (expr_p, pre_p);
11907 break;
11909 case TRANSACTION_EXPR:
11910 ret = gimplify_transaction (expr_p, pre_p);
11911 break;
11913 case TRUTH_AND_EXPR:
11914 case TRUTH_OR_EXPR:
11915 case TRUTH_XOR_EXPR:
11917 tree orig_type = TREE_TYPE (*expr_p);
11918 tree new_type, xop0, xop1;
11919 *expr_p = gimple_boolify (*expr_p);
11920 new_type = TREE_TYPE (*expr_p);
11921 if (!useless_type_conversion_p (orig_type, new_type))
11923 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11924 ret = GS_OK;
11925 break;
11928 /* Boolified binary truth expressions are semantically equivalent
11929 to bitwise binary expressions. Canonicalize them to the
11930 bitwise variant. */
11931 switch (TREE_CODE (*expr_p))
11933 case TRUTH_AND_EXPR:
11934 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11935 break;
11936 case TRUTH_OR_EXPR:
11937 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11938 break;
11939 case TRUTH_XOR_EXPR:
11940 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11941 break;
11942 default:
11943 break;
11945 /* Now make sure that operands have compatible type to
11946 expression's new_type. */
11947 xop0 = TREE_OPERAND (*expr_p, 0);
11948 xop1 = TREE_OPERAND (*expr_p, 1);
11949 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11950 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11951 new_type,
11952 xop0);
11953 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11954 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11955 new_type,
11956 xop1);
11957 /* Continue classified as tcc_binary. */
11958 goto expr_2;
11961 case VEC_COND_EXPR:
11963 enum gimplify_status r0, r1, r2;
11965 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11966 post_p, is_gimple_condexpr, fb_rvalue);
11967 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11968 post_p, is_gimple_val, fb_rvalue);
11969 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11970 post_p, is_gimple_val, fb_rvalue);
11972 ret = MIN (MIN (r0, r1), r2);
11973 recalculate_side_effects (*expr_p);
11975 break;
11977 case FMA_EXPR:
11978 case VEC_PERM_EXPR:
11979 /* Classified as tcc_expression. */
11980 goto expr_3;
11982 case BIT_INSERT_EXPR:
11983 /* Argument 3 is a constant. */
11984 goto expr_2;
11986 case POINTER_PLUS_EXPR:
11988 enum gimplify_status r0, r1;
11989 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11990 post_p, is_gimple_val, fb_rvalue);
11991 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11992 post_p, is_gimple_val, fb_rvalue);
11993 recalculate_side_effects (*expr_p);
11994 ret = MIN (r0, r1);
11995 break;
11998 case CILK_SYNC_STMT:
12000 if (!fn_contains_cilk_spawn_p (cfun))
12002 error_at (EXPR_LOCATION (*expr_p),
12003 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
12004 ret = GS_ERROR;
12006 else
12008 gimplify_cilk_sync (expr_p, pre_p);
12009 ret = GS_ALL_DONE;
12011 break;
12014 default:
12015 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12017 case tcc_comparison:
12018 /* Handle comparison of objects of non scalar mode aggregates
12019 with a call to memcmp. It would be nice to only have to do
12020 this for variable-sized objects, but then we'd have to allow
12021 the same nest of reference nodes we allow for MODIFY_EXPR and
12022 that's too complex.
12024 Compare scalar mode aggregates as scalar mode values. Using
12025 memcmp for them would be very inefficient at best, and is
12026 plain wrong if bitfields are involved. */
12028 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12030 /* Vector comparisons need no boolification. */
12031 if (TREE_CODE (type) == VECTOR_TYPE)
12032 goto expr_2;
12033 else if (!AGGREGATE_TYPE_P (type))
12035 tree org_type = TREE_TYPE (*expr_p);
12036 *expr_p = gimple_boolify (*expr_p);
12037 if (!useless_type_conversion_p (org_type,
12038 TREE_TYPE (*expr_p)))
12040 *expr_p = fold_convert_loc (input_location,
12041 org_type, *expr_p);
12042 ret = GS_OK;
12044 else
12045 goto expr_2;
12047 else if (TYPE_MODE (type) != BLKmode)
12048 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12049 else
12050 ret = gimplify_variable_sized_compare (expr_p);
12052 break;
12055 /* If *EXPR_P does not need to be special-cased, handle it
12056 according to its class. */
12057 case tcc_unary:
12058 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12059 post_p, is_gimple_val, fb_rvalue);
12060 break;
12062 case tcc_binary:
12063 expr_2:
12065 enum gimplify_status r0, r1;
12067 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12068 post_p, is_gimple_val, fb_rvalue);
12069 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12070 post_p, is_gimple_val, fb_rvalue);
12072 ret = MIN (r0, r1);
12073 break;
12076 expr_3:
12078 enum gimplify_status r0, r1, r2;
12080 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12081 post_p, is_gimple_val, fb_rvalue);
12082 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12083 post_p, is_gimple_val, fb_rvalue);
12084 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12085 post_p, is_gimple_val, fb_rvalue);
12087 ret = MIN (MIN (r0, r1), r2);
12088 break;
12091 case tcc_declaration:
12092 case tcc_constant:
12093 ret = GS_ALL_DONE;
12094 goto dont_recalculate;
12096 default:
12097 gcc_unreachable ();
12100 recalculate_side_effects (*expr_p);
12102 dont_recalculate:
12103 break;
12106 gcc_assert (*expr_p || ret != GS_OK);
12108 while (ret == GS_OK);
12110 /* If we encountered an error_mark somewhere nested inside, either
12111 stub out the statement or propagate the error back out. */
12112 if (ret == GS_ERROR)
12114 if (is_statement)
12115 *expr_p = NULL;
12116 goto out;
12119 /* This was only valid as a return value from the langhook, which
12120 we handled. Make sure it doesn't escape from any other context. */
12121 gcc_assert (ret != GS_UNHANDLED);
12123 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12125 /* We aren't looking for a value, and we don't have a valid
12126 statement. If it doesn't have side-effects, throw it away.
12127 We can also get here with code such as "*&&L;", where L is
12128 a LABEL_DECL that is marked as FORCED_LABEL. */
12129 if (TREE_CODE (*expr_p) == LABEL_DECL
12130 || !TREE_SIDE_EFFECTS (*expr_p))
12131 *expr_p = NULL;
12132 else if (!TREE_THIS_VOLATILE (*expr_p))
12134 /* This is probably a _REF that contains something nested that
12135 has side effects. Recurse through the operands to find it. */
12136 enum tree_code code = TREE_CODE (*expr_p);
12138 switch (code)
12140 case COMPONENT_REF:
12141 case REALPART_EXPR:
12142 case IMAGPART_EXPR:
12143 case VIEW_CONVERT_EXPR:
12144 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12145 gimple_test_f, fallback);
12146 break;
12148 case ARRAY_REF:
12149 case ARRAY_RANGE_REF:
12150 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12151 gimple_test_f, fallback);
12152 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12153 gimple_test_f, fallback);
12154 break;
12156 default:
12157 /* Anything else with side-effects must be converted to
12158 a valid statement before we get here. */
12159 gcc_unreachable ();
12162 *expr_p = NULL;
12164 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12165 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12167 /* Historically, the compiler has treated a bare reference
12168 to a non-BLKmode volatile lvalue as forcing a load. */
12169 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12171 /* Normally, we do not want to create a temporary for a
12172 TREE_ADDRESSABLE type because such a type should not be
12173 copied by bitwise-assignment. However, we make an
12174 exception here, as all we are doing here is ensuring that
12175 we read the bytes that make up the type. We use
12176 create_tmp_var_raw because create_tmp_var will abort when
12177 given a TREE_ADDRESSABLE type. */
12178 tree tmp = create_tmp_var_raw (type, "vol");
12179 gimple_add_tmp_var (tmp);
12180 gimplify_assign (tmp, *expr_p, pre_p);
12181 *expr_p = NULL;
12183 else
12184 /* We can't do anything useful with a volatile reference to
12185 an incomplete type, so just throw it away. Likewise for
12186 a BLKmode type, since any implicit inner load should
12187 already have been turned into an explicit one by the
12188 gimplification process. */
12189 *expr_p = NULL;
12192 /* If we are gimplifying at the statement level, we're done. Tack
12193 everything together and return. */
12194 if (fallback == fb_none || is_statement)
12196 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12197 it out for GC to reclaim it. */
12198 *expr_p = NULL_TREE;
12200 if (!gimple_seq_empty_p (internal_pre)
12201 || !gimple_seq_empty_p (internal_post))
12203 gimplify_seq_add_seq (&internal_pre, internal_post);
12204 gimplify_seq_add_seq (pre_p, internal_pre);
12207 /* The result of gimplifying *EXPR_P is going to be the last few
12208 statements in *PRE_P and *POST_P. Add location information
12209 to all the statements that were added by the gimplification
12210 helpers. */
12211 if (!gimple_seq_empty_p (*pre_p))
12212 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12214 if (!gimple_seq_empty_p (*post_p))
12215 annotate_all_with_location_after (*post_p, post_last_gsi,
12216 input_location);
12218 goto out;
12221 #ifdef ENABLE_GIMPLE_CHECKING
12222 if (*expr_p)
12224 enum tree_code code = TREE_CODE (*expr_p);
12225 /* These expressions should already be in gimple IR form. */
12226 gcc_assert (code != MODIFY_EXPR
12227 && code != ASM_EXPR
12228 && code != BIND_EXPR
12229 && code != CATCH_EXPR
12230 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12231 && code != EH_FILTER_EXPR
12232 && code != GOTO_EXPR
12233 && code != LABEL_EXPR
12234 && code != LOOP_EXPR
12235 && code != SWITCH_EXPR
12236 && code != TRY_FINALLY_EXPR
12237 && code != OACC_PARALLEL
12238 && code != OACC_KERNELS
12239 && code != OACC_DATA
12240 && code != OACC_HOST_DATA
12241 && code != OACC_DECLARE
12242 && code != OACC_UPDATE
12243 && code != OACC_ENTER_DATA
12244 && code != OACC_EXIT_DATA
12245 && code != OACC_CACHE
12246 && code != OMP_CRITICAL
12247 && code != OMP_FOR
12248 && code != OACC_LOOP
12249 && code != OMP_MASTER
12250 && code != OMP_TASKGROUP
12251 && code != OMP_ORDERED
12252 && code != OMP_PARALLEL
12253 && code != OMP_SECTIONS
12254 && code != OMP_SECTION
12255 && code != OMP_SINGLE);
12257 #endif
12259 /* Otherwise we're gimplifying a subexpression, so the resulting
12260 value is interesting. If it's a valid operand that matches
12261 GIMPLE_TEST_F, we're done. Unless we are handling some
12262 post-effects internally; if that's the case, we need to copy into
12263 a temporary before adding the post-effects to POST_P. */
12264 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12265 goto out;
12267 /* Otherwise, we need to create a new temporary for the gimplified
12268 expression. */
12270 /* We can't return an lvalue if we have an internal postqueue. The
12271 object the lvalue refers to would (probably) be modified by the
12272 postqueue; we need to copy the value out first, which means an
12273 rvalue. */
12274 if ((fallback & fb_lvalue)
12275 && gimple_seq_empty_p (internal_post)
12276 && is_gimple_addressable (*expr_p))
12278 /* An lvalue will do. Take the address of the expression, store it
12279 in a temporary, and replace the expression with an INDIRECT_REF of
12280 that temporary. */
12281 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12282 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12283 *expr_p = build_simple_mem_ref (tmp);
12285 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12287 /* An rvalue will do. Assign the gimplified expression into a
12288 new temporary TMP and replace the original expression with
12289 TMP. First, make sure that the expression has a type so that
12290 it can be assigned into a temporary. */
12291 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12292 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12294 else
12296 #ifdef ENABLE_GIMPLE_CHECKING
12297 if (!(fallback & fb_mayfail))
12299 fprintf (stderr, "gimplification failed:\n");
12300 print_generic_expr (stderr, *expr_p);
12301 debug_tree (*expr_p);
12302 internal_error ("gimplification failed");
12304 #endif
12305 gcc_assert (fallback & fb_mayfail);
12307 /* If this is an asm statement, and the user asked for the
12308 impossible, don't die. Fail and let gimplify_asm_expr
12309 issue an error. */
12310 ret = GS_ERROR;
12311 goto out;
12314 /* Make sure the temporary matches our predicate. */
12315 gcc_assert ((*gimple_test_f) (*expr_p));
12317 if (!gimple_seq_empty_p (internal_post))
12319 annotate_all_with_location (internal_post, input_location);
12320 gimplify_seq_add_seq (pre_p, internal_post);
12323 out:
12324 input_location = saved_location;
12325 return ret;
12328 /* Like gimplify_expr but make sure the gimplified result is not itself
12329 a SSA name (but a decl if it were). Temporaries required by
12330 evaluating *EXPR_P may be still SSA names. */
12332 static enum gimplify_status
12333 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12334 bool (*gimple_test_f) (tree), fallback_t fallback,
12335 bool allow_ssa)
12337 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12338 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12339 gimple_test_f, fallback);
12340 if (! allow_ssa
12341 && TREE_CODE (*expr_p) == SSA_NAME)
12343 tree name = *expr_p;
12344 if (was_ssa_name_p)
12345 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12346 else
12348 /* Avoid the extra copy if possible. */
12349 *expr_p = create_tmp_reg (TREE_TYPE (name));
12350 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12351 release_ssa_name (name);
12354 return ret;
12357 /* Look through TYPE for variable-sized objects and gimplify each such
12358 size that we find. Add to LIST_P any statements generated. */
12360 void
12361 gimplify_type_sizes (tree type, gimple_seq *list_p)
12363 tree field, t;
12365 if (type == NULL || type == error_mark_node)
12366 return;
12368 /* We first do the main variant, then copy into any other variants. */
12369 type = TYPE_MAIN_VARIANT (type);
12371 /* Avoid infinite recursion. */
12372 if (TYPE_SIZES_GIMPLIFIED (type))
12373 return;
12375 TYPE_SIZES_GIMPLIFIED (type) = 1;
12377 switch (TREE_CODE (type))
12379 case INTEGER_TYPE:
12380 case ENUMERAL_TYPE:
12381 case BOOLEAN_TYPE:
12382 case REAL_TYPE:
12383 case FIXED_POINT_TYPE:
12384 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12385 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12387 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12389 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12390 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12392 break;
12394 case ARRAY_TYPE:
12395 /* These types may not have declarations, so handle them here. */
12396 gimplify_type_sizes (TREE_TYPE (type), list_p);
12397 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12398 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12399 with assigned stack slots, for -O1+ -g they should be tracked
12400 by VTA. */
12401 if (!(TYPE_NAME (type)
12402 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12403 && DECL_IGNORED_P (TYPE_NAME (type)))
12404 && TYPE_DOMAIN (type)
12405 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12407 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12408 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12409 DECL_IGNORED_P (t) = 0;
12410 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12411 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12412 DECL_IGNORED_P (t) = 0;
12414 break;
12416 case RECORD_TYPE:
12417 case UNION_TYPE:
12418 case QUAL_UNION_TYPE:
12419 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12420 if (TREE_CODE (field) == FIELD_DECL)
12422 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12423 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12424 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12425 gimplify_type_sizes (TREE_TYPE (field), list_p);
12427 break;
12429 case POINTER_TYPE:
12430 case REFERENCE_TYPE:
12431 /* We used to recurse on the pointed-to type here, which turned out to
12432 be incorrect because its definition might refer to variables not
12433 yet initialized at this point if a forward declaration is involved.
12435 It was actually useful for anonymous pointed-to types to ensure
12436 that the sizes evaluation dominates every possible later use of the
12437 values. Restricting to such types here would be safe since there
12438 is no possible forward declaration around, but would introduce an
12439 undesirable middle-end semantic to anonymity. We then defer to
12440 front-ends the responsibility of ensuring that the sizes are
12441 evaluated both early and late enough, e.g. by attaching artificial
12442 type declarations to the tree. */
12443 break;
12445 default:
12446 break;
12449 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12450 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12452 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12454 TYPE_SIZE (t) = TYPE_SIZE (type);
12455 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12456 TYPE_SIZES_GIMPLIFIED (t) = 1;
12460 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12461 a size or position, has had all of its SAVE_EXPRs evaluated.
12462 We add any required statements to *STMT_P. */
12464 void
12465 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12467 tree expr = *expr_p;
12469 /* We don't do anything if the value isn't there, is constant, or contains
12470 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12471 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12472 will want to replace it with a new variable, but that will cause problems
12473 if this type is from outside the function. It's OK to have that here. */
12474 if (is_gimple_sizepos (expr))
12475 return;
12477 *expr_p = unshare_expr (expr);
12479 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12480 if the def vanishes. */
12481 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12484 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12485 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12486 is true, also gimplify the parameters. */
12488 gbind *
12489 gimplify_body (tree fndecl, bool do_parms)
12491 location_t saved_location = input_location;
12492 gimple_seq parm_stmts, seq;
12493 gimple *outer_stmt;
12494 gbind *outer_bind;
12495 struct cgraph_node *cgn;
12497 timevar_push (TV_TREE_GIMPLIFY);
12499 init_tree_ssa (cfun);
12501 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12502 gimplification. */
12503 default_rtl_profile ();
12505 gcc_assert (gimplify_ctxp == NULL);
12506 push_gimplify_context (true);
12508 if (flag_openacc || flag_openmp)
12510 gcc_assert (gimplify_omp_ctxp == NULL);
12511 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12512 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12515 /* Unshare most shared trees in the body and in that of any nested functions.
12516 It would seem we don't have to do this for nested functions because
12517 they are supposed to be output and then the outer function gimplified
12518 first, but the g++ front end doesn't always do it that way. */
12519 unshare_body (fndecl);
12520 unvisit_body (fndecl);
12522 cgn = cgraph_node::get (fndecl);
12523 if (cgn && cgn->origin)
12524 nonlocal_vlas = new hash_set<tree>;
12526 /* Make sure input_location isn't set to something weird. */
12527 input_location = DECL_SOURCE_LOCATION (fndecl);
12529 /* Resolve callee-copies. This has to be done before processing
12530 the body so that DECL_VALUE_EXPR gets processed correctly. */
12531 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12533 /* Gimplify the function's body. */
12534 seq = NULL;
12535 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12536 outer_stmt = gimple_seq_first_stmt (seq);
12537 if (!outer_stmt)
12539 outer_stmt = gimple_build_nop ();
12540 gimplify_seq_add_stmt (&seq, outer_stmt);
12543 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12544 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12545 if (gimple_code (outer_stmt) == GIMPLE_BIND
12546 && gimple_seq_first (seq) == gimple_seq_last (seq))
12547 outer_bind = as_a <gbind *> (outer_stmt);
12548 else
12549 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12551 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12553 /* If we had callee-copies statements, insert them at the beginning
12554 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12555 if (!gimple_seq_empty_p (parm_stmts))
12557 tree parm;
12559 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12560 gimple_bind_set_body (outer_bind, parm_stmts);
12562 for (parm = DECL_ARGUMENTS (current_function_decl);
12563 parm; parm = DECL_CHAIN (parm))
12564 if (DECL_HAS_VALUE_EXPR_P (parm))
12566 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12567 DECL_IGNORED_P (parm) = 0;
12571 if (nonlocal_vlas)
12573 if (nonlocal_vla_vars)
12575 /* tree-nested.c may later on call declare_vars (..., true);
12576 which relies on BLOCK_VARS chain to be the tail of the
12577 gimple_bind_vars chain. Ensure we don't violate that
12578 assumption. */
12579 if (gimple_bind_block (outer_bind)
12580 == DECL_INITIAL (current_function_decl))
12581 declare_vars (nonlocal_vla_vars, outer_bind, true);
12582 else
12583 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12584 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12585 nonlocal_vla_vars);
12586 nonlocal_vla_vars = NULL_TREE;
12588 delete nonlocal_vlas;
12589 nonlocal_vlas = NULL;
12592 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12593 && gimplify_omp_ctxp)
12595 delete_omp_context (gimplify_omp_ctxp);
12596 gimplify_omp_ctxp = NULL;
12599 pop_gimplify_context (outer_bind);
12600 gcc_assert (gimplify_ctxp == NULL);
12602 if (flag_checking && !seen_error ())
12603 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12605 timevar_pop (TV_TREE_GIMPLIFY);
12606 input_location = saved_location;
12608 return outer_bind;
12611 typedef char *char_p; /* For DEF_VEC_P. */
12613 /* Return whether we should exclude FNDECL from instrumentation. */
12615 static bool
12616 flag_instrument_functions_exclude_p (tree fndecl)
12618 vec<char_p> *v;
12620 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12621 if (v && v->length () > 0)
12623 const char *name;
12624 int i;
12625 char *s;
12627 name = lang_hooks.decl_printable_name (fndecl, 0);
12628 FOR_EACH_VEC_ELT (*v, i, s)
12629 if (strstr (name, s) != NULL)
12630 return true;
12633 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12634 if (v && v->length () > 0)
12636 const char *name;
12637 int i;
12638 char *s;
12640 name = DECL_SOURCE_FILE (fndecl);
12641 FOR_EACH_VEC_ELT (*v, i, s)
12642 if (strstr (name, s) != NULL)
12643 return true;
12646 return false;
12649 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12650 node for the function we want to gimplify.
12652 Return the sequence of GIMPLE statements corresponding to the body
12653 of FNDECL. */
12655 void
12656 gimplify_function_tree (tree fndecl)
12658 tree parm, ret;
12659 gimple_seq seq;
12660 gbind *bind;
12662 gcc_assert (!gimple_body (fndecl));
12664 if (DECL_STRUCT_FUNCTION (fndecl))
12665 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12666 else
12667 push_struct_function (fndecl);
12669 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12670 if necessary. */
12671 cfun->curr_properties |= PROP_gimple_lva;
12673 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12675 /* Preliminarily mark non-addressed complex variables as eligible
12676 for promotion to gimple registers. We'll transform their uses
12677 as we find them. */
12678 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12679 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12680 && !TREE_THIS_VOLATILE (parm)
12681 && !needs_to_live_in_memory (parm))
12682 DECL_GIMPLE_REG_P (parm) = 1;
12685 ret = DECL_RESULT (fndecl);
12686 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12687 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12688 && !needs_to_live_in_memory (ret))
12689 DECL_GIMPLE_REG_P (ret) = 1;
12691 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12692 asan_poisoned_variables = new hash_set<tree> ();
12693 bind = gimplify_body (fndecl, true);
12694 if (asan_poisoned_variables)
12696 delete asan_poisoned_variables;
12697 asan_poisoned_variables = NULL;
12700 /* The tree body of the function is no longer needed, replace it
12701 with the new GIMPLE body. */
12702 seq = NULL;
12703 gimple_seq_add_stmt (&seq, bind);
12704 gimple_set_body (fndecl, seq);
12706 /* If we're instrumenting function entry/exit, then prepend the call to
12707 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12708 catch the exit hook. */
12709 /* ??? Add some way to ignore exceptions for this TFE. */
12710 if (flag_instrument_function_entry_exit
12711 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12712 /* Do not instrument extern inline functions. */
12713 && !(DECL_DECLARED_INLINE_P (fndecl)
12714 && DECL_EXTERNAL (fndecl)
12715 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12716 && !flag_instrument_functions_exclude_p (fndecl))
12718 tree x;
12719 gbind *new_bind;
12720 gimple *tf;
12721 gimple_seq cleanup = NULL, body = NULL;
12722 tree tmp_var;
12723 gcall *call;
12725 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12726 call = gimple_build_call (x, 1, integer_zero_node);
12727 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12728 gimple_call_set_lhs (call, tmp_var);
12729 gimplify_seq_add_stmt (&cleanup, call);
12730 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12731 call = gimple_build_call (x, 2,
12732 build_fold_addr_expr (current_function_decl),
12733 tmp_var);
12734 gimplify_seq_add_stmt (&cleanup, call);
12735 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12737 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12738 call = gimple_build_call (x, 1, integer_zero_node);
12739 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12740 gimple_call_set_lhs (call, tmp_var);
12741 gimplify_seq_add_stmt (&body, call);
12742 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12743 call = gimple_build_call (x, 2,
12744 build_fold_addr_expr (current_function_decl),
12745 tmp_var);
12746 gimplify_seq_add_stmt (&body, call);
12747 gimplify_seq_add_stmt (&body, tf);
12748 new_bind = gimple_build_bind (NULL, body, NULL);
12750 /* Replace the current function body with the body
12751 wrapped in the try/finally TF. */
12752 seq = NULL;
12753 gimple_seq_add_stmt (&seq, new_bind);
12754 gimple_set_body (fndecl, seq);
12755 bind = new_bind;
12758 if (sanitize_flags_p (SANITIZE_THREAD))
12760 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12761 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12762 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12763 /* Replace the current function body with the body
12764 wrapped in the try/finally TF. */
12765 seq = NULL;
12766 gimple_seq_add_stmt (&seq, new_bind);
12767 gimple_set_body (fndecl, seq);
12770 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12771 cfun->curr_properties |= PROP_gimple_any;
12773 pop_cfun ();
12775 dump_function (TDI_gimple, fndecl);
12778 /* Return a dummy expression of type TYPE in order to keep going after an
12779 error. */
12781 static tree
12782 dummy_object (tree type)
12784 tree t = build_int_cst (build_pointer_type (type), 0);
12785 return build2 (MEM_REF, type, t, t);
12788 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12789 builtin function, but a very special sort of operator. */
12791 enum gimplify_status
12792 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12793 gimple_seq *post_p ATTRIBUTE_UNUSED)
12795 tree promoted_type, have_va_type;
12796 tree valist = TREE_OPERAND (*expr_p, 0);
12797 tree type = TREE_TYPE (*expr_p);
12798 tree t, tag, aptag;
12799 location_t loc = EXPR_LOCATION (*expr_p);
12801 /* Verify that valist is of the proper type. */
12802 have_va_type = TREE_TYPE (valist);
12803 if (have_va_type == error_mark_node)
12804 return GS_ERROR;
12805 have_va_type = targetm.canonical_va_list_type (have_va_type);
12806 if (have_va_type == NULL_TREE
12807 && POINTER_TYPE_P (TREE_TYPE (valist)))
12808 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12809 have_va_type
12810 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12811 gcc_assert (have_va_type != NULL_TREE);
12813 /* Generate a diagnostic for requesting data of a type that cannot
12814 be passed through `...' due to type promotion at the call site. */
12815 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12816 != type)
12818 static bool gave_help;
12819 bool warned;
12820 /* Use the expansion point to handle cases such as passing bool (defined
12821 in a system header) through `...'. */
12822 source_location xloc
12823 = expansion_point_location_if_in_system_header (loc);
12825 /* Unfortunately, this is merely undefined, rather than a constraint
12826 violation, so we cannot make this an error. If this call is never
12827 executed, the program is still strictly conforming. */
12828 warned = warning_at (xloc, 0,
12829 "%qT is promoted to %qT when passed through %<...%>",
12830 type, promoted_type);
12831 if (!gave_help && warned)
12833 gave_help = true;
12834 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12835 promoted_type, type);
12838 /* We can, however, treat "undefined" any way we please.
12839 Call abort to encourage the user to fix the program. */
12840 if (warned)
12841 inform (xloc, "if this code is reached, the program will abort");
12842 /* Before the abort, allow the evaluation of the va_list
12843 expression to exit or longjmp. */
12844 gimplify_and_add (valist, pre_p);
12845 t = build_call_expr_loc (loc,
12846 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12847 gimplify_and_add (t, pre_p);
12849 /* This is dead code, but go ahead and finish so that the
12850 mode of the result comes out right. */
12851 *expr_p = dummy_object (type);
12852 return GS_ALL_DONE;
12855 tag = build_int_cst (build_pointer_type (type), 0);
12856 aptag = build_int_cst (TREE_TYPE (valist), 0);
12858 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12859 valist, tag, aptag);
12861 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12862 needs to be expanded. */
12863 cfun->curr_properties &= ~PROP_gimple_lva;
12865 return GS_OK;
12868 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12870 DST/SRC are the destination and source respectively. You can pass
12871 ungimplified trees in DST or SRC, in which case they will be
12872 converted to a gimple operand if necessary.
12874 This function returns the newly created GIMPLE_ASSIGN tuple. */
12876 gimple *
12877 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12879 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12880 gimplify_and_add (t, seq_p);
12881 ggc_free (t);
12882 return gimple_seq_last_stmt (*seq_p);
12885 inline hashval_t
12886 gimplify_hasher::hash (const elt_t *p)
12888 tree t = p->val;
12889 return iterative_hash_expr (t, 0);
12892 inline bool
12893 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12895 tree t1 = p1->val;
12896 tree t2 = p2->val;
12897 enum tree_code code = TREE_CODE (t1);
12899 if (TREE_CODE (t2) != code
12900 || TREE_TYPE (t1) != TREE_TYPE (t2))
12901 return false;
12903 if (!operand_equal_p (t1, t2, 0))
12904 return false;
12906 /* Only allow them to compare equal if they also hash equal; otherwise
12907 results are nondeterminate, and we fail bootstrap comparison. */
12908 gcc_checking_assert (hash (p1) == hash (p2));
12910 return true;