PR 44292 Enable large record lengths in OPEN and INQUIRE statements
[official-gcc.git] / gcc / gimplify.c
blobd71bd2ab01ecba77e899fd446e8fb4484e8c129b
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "cilk.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
70 /* Hash set of poisoned variables in a bind expr. */
71 static hash_set<tree> *asan_poisoned_variables = NULL;
73 enum gimplify_omp_var_data
75 GOVD_SEEN = 1,
76 GOVD_EXPLICIT = 2,
77 GOVD_SHARED = 4,
78 GOVD_PRIVATE = 8,
79 GOVD_FIRSTPRIVATE = 16,
80 GOVD_LASTPRIVATE = 32,
81 GOVD_REDUCTION = 64,
82 GOVD_LOCAL = 128,
83 GOVD_MAP = 256,
84 GOVD_DEBUG_PRIVATE = 512,
85 GOVD_PRIVATE_OUTER_REF = 1024,
86 GOVD_LINEAR = 2048,
87 GOVD_ALIGNED = 4096,
89 /* Flag for GOVD_MAP: don't copy back. */
90 GOVD_MAP_TO_ONLY = 8192,
92 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
93 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
95 GOVD_MAP_0LEN_ARRAY = 32768,
97 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
98 GOVD_MAP_ALWAYS_TO = 65536,
100 /* Flag for shared vars that are or might be stored to in the region. */
101 GOVD_WRITTEN = 131072,
103 /* Flag for GOVD_MAP, if it is a forced mapping. */
104 GOVD_MAP_FORCE = 262144,
106 /* Flag for GOVD_MAP: must be present already. */
107 GOVD_MAP_FORCE_PRESENT = 524288,
109 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
110 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
111 | GOVD_LOCAL)
115 enum omp_region_type
117 ORT_WORKSHARE = 0x00,
118 ORT_SIMD = 0x01,
120 ORT_PARALLEL = 0x02,
121 ORT_COMBINED_PARALLEL = 0x03,
123 ORT_TASK = 0x04,
124 ORT_UNTIED_TASK = 0x05,
126 ORT_TEAMS = 0x08,
127 ORT_COMBINED_TEAMS = 0x09,
129 /* Data region. */
130 ORT_TARGET_DATA = 0x10,
132 /* Data region with offloading. */
133 ORT_TARGET = 0x20,
134 ORT_COMBINED_TARGET = 0x21,
136 /* OpenACC variants. */
137 ORT_ACC = 0x40, /* A generic OpenACC region. */
138 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
139 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
140 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
141 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
143 /* Dummy OpenMP region, used to disable expansion of
144 DECL_VALUE_EXPRs in taskloop pre body. */
145 ORT_NONE = 0x100
148 /* Gimplify hashtable helper. */
150 struct gimplify_hasher : free_ptr_hash <elt_t>
152 static inline hashval_t hash (const elt_t *);
153 static inline bool equal (const elt_t *, const elt_t *);
156 struct gimplify_ctx
158 struct gimplify_ctx *prev_context;
160 vec<gbind *> bind_expr_stack;
161 tree temps;
162 gimple_seq conditional_cleanups;
163 tree exit_label;
164 tree return_temp;
166 vec<tree> case_labels;
167 hash_set<tree> *live_switch_vars;
168 /* The formal temporary table. Should this be persistent? */
169 hash_table<gimplify_hasher> *temp_htab;
171 int conditions;
172 unsigned into_ssa : 1;
173 unsigned allow_rhs_cond_expr : 1;
174 unsigned in_cleanup_point_expr : 1;
175 unsigned keep_stack : 1;
176 unsigned save_stack : 1;
177 unsigned in_switch_expr : 1;
180 struct gimplify_omp_ctx
182 struct gimplify_omp_ctx *outer_context;
183 splay_tree variables;
184 hash_set<tree> *privatized_types;
185 /* Iteration variables in an OMP_FOR. */
186 vec<tree> loop_iter_var;
187 location_t location;
188 enum omp_clause_default_kind default_kind;
189 enum omp_region_type region_type;
190 bool combined_loop;
191 bool distribute;
192 bool target_map_scalars_firstprivate;
193 bool target_map_pointers_as_0len_arrays;
194 bool target_firstprivatize_array_bases;
197 static struct gimplify_ctx *gimplify_ctxp;
198 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
200 /* Forward declaration. */
201 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
202 static hash_map<tree, tree> *oacc_declare_returns;
203 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
204 bool (*) (tree), fallback_t, bool);
206 /* Shorter alias name for the above function for use in gimplify.c
207 only. */
209 static inline void
210 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
212 gimple_seq_add_stmt_without_update (seq_p, gs);
215 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
216 NULL, a new sequence is allocated. This function is
217 similar to gimple_seq_add_seq, but does not scan the operands.
218 During gimplification, we need to manipulate statement sequences
219 before the def/use vectors have been constructed. */
221 static void
222 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
224 gimple_stmt_iterator si;
226 if (src == NULL)
227 return;
229 si = gsi_last (*dst_p);
230 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
234 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
235 and popping gimplify contexts. */
237 static struct gimplify_ctx *ctx_pool = NULL;
239 /* Return a gimplify context struct from the pool. */
241 static inline struct gimplify_ctx *
242 ctx_alloc (void)
244 struct gimplify_ctx * c = ctx_pool;
246 if (c)
247 ctx_pool = c->prev_context;
248 else
249 c = XNEW (struct gimplify_ctx);
251 memset (c, '\0', sizeof (*c));
252 return c;
255 /* Put gimplify context C back into the pool. */
257 static inline void
258 ctx_free (struct gimplify_ctx *c)
260 c->prev_context = ctx_pool;
261 ctx_pool = c;
264 /* Free allocated ctx stack memory. */
266 void
267 free_gimplify_stack (void)
269 struct gimplify_ctx *c;
271 while ((c = ctx_pool))
273 ctx_pool = c->prev_context;
274 free (c);
279 /* Set up a context for the gimplifier. */
281 void
282 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
284 struct gimplify_ctx *c = ctx_alloc ();
286 c->prev_context = gimplify_ctxp;
287 gimplify_ctxp = c;
288 gimplify_ctxp->into_ssa = in_ssa;
289 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
292 /* Tear down a context for the gimplifier. If BODY is non-null, then
293 put the temporaries into the outer BIND_EXPR. Otherwise, put them
294 in the local_decls.
296 BODY is not a sequence, but the first tuple in a sequence. */
298 void
299 pop_gimplify_context (gimple *body)
301 struct gimplify_ctx *c = gimplify_ctxp;
303 gcc_assert (c
304 && (!c->bind_expr_stack.exists ()
305 || c->bind_expr_stack.is_empty ()));
306 c->bind_expr_stack.release ();
307 gimplify_ctxp = c->prev_context;
309 if (body)
310 declare_vars (c->temps, body, false);
311 else
312 record_vars (c->temps);
314 delete c->temp_htab;
315 c->temp_htab = NULL;
316 ctx_free (c);
319 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
321 static void
322 gimple_push_bind_expr (gbind *bind_stmt)
324 gimplify_ctxp->bind_expr_stack.reserve (8);
325 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
328 /* Pop the first element off the stack of bindings. */
330 static void
331 gimple_pop_bind_expr (void)
333 gimplify_ctxp->bind_expr_stack.pop ();
336 /* Return the first element of the stack of bindings. */
338 gbind *
339 gimple_current_bind_expr (void)
341 return gimplify_ctxp->bind_expr_stack.last ();
344 /* Return the stack of bindings created during gimplification. */
346 vec<gbind *>
347 gimple_bind_expr_stack (void)
349 return gimplify_ctxp->bind_expr_stack;
352 /* Return true iff there is a COND_EXPR between us and the innermost
353 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
355 static bool
356 gimple_conditional_context (void)
358 return gimplify_ctxp->conditions > 0;
361 /* Note that we've entered a COND_EXPR. */
363 static void
364 gimple_push_condition (void)
366 #ifdef ENABLE_GIMPLE_CHECKING
367 if (gimplify_ctxp->conditions == 0)
368 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
369 #endif
370 ++(gimplify_ctxp->conditions);
373 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
374 now, add any conditional cleanups we've seen to the prequeue. */
376 static void
377 gimple_pop_condition (gimple_seq *pre_p)
379 int conds = --(gimplify_ctxp->conditions);
381 gcc_assert (conds >= 0);
382 if (conds == 0)
384 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
385 gimplify_ctxp->conditional_cleanups = NULL;
389 /* A stable comparison routine for use with splay trees and DECLs. */
391 static int
392 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
394 tree a = (tree) xa;
395 tree b = (tree) xb;
397 return DECL_UID (a) - DECL_UID (b);
400 /* Create a new omp construct that deals with variable remapping. */
402 static struct gimplify_omp_ctx *
403 new_omp_context (enum omp_region_type region_type)
405 struct gimplify_omp_ctx *c;
407 c = XCNEW (struct gimplify_omp_ctx);
408 c->outer_context = gimplify_omp_ctxp;
409 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
410 c->privatized_types = new hash_set<tree>;
411 c->location = input_location;
412 c->region_type = region_type;
413 if ((region_type & ORT_TASK) == 0)
414 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
415 else
416 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
418 return c;
421 /* Destroy an omp construct that deals with variable remapping. */
423 static void
424 delete_omp_context (struct gimplify_omp_ctx *c)
426 splay_tree_delete (c->variables);
427 delete c->privatized_types;
428 c->loop_iter_var.release ();
429 XDELETE (c);
432 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
433 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
435 /* Both gimplify the statement T and append it to *SEQ_P. This function
436 behaves exactly as gimplify_stmt, but you don't have to pass T as a
437 reference. */
439 void
440 gimplify_and_add (tree t, gimple_seq *seq_p)
442 gimplify_stmt (&t, seq_p);
445 /* Gimplify statement T into sequence *SEQ_P, and return the first
446 tuple in the sequence of generated tuples for this statement.
447 Return NULL if gimplifying T produced no tuples. */
449 static gimple *
450 gimplify_and_return_first (tree t, gimple_seq *seq_p)
452 gimple_stmt_iterator last = gsi_last (*seq_p);
454 gimplify_and_add (t, seq_p);
456 if (!gsi_end_p (last))
458 gsi_next (&last);
459 return gsi_stmt (last);
461 else
462 return gimple_seq_first_stmt (*seq_p);
465 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
466 LHS, or for a call argument. */
468 static bool
469 is_gimple_mem_rhs (tree t)
471 /* If we're dealing with a renamable type, either source or dest must be
472 a renamed variable. */
473 if (is_gimple_reg_type (TREE_TYPE (t)))
474 return is_gimple_val (t);
475 else
476 return is_gimple_val (t) || is_gimple_lvalue (t);
479 /* Return true if T is a CALL_EXPR or an expression that can be
480 assigned to a temporary. Note that this predicate should only be
481 used during gimplification. See the rationale for this in
482 gimplify_modify_expr. */
484 static bool
485 is_gimple_reg_rhs_or_call (tree t)
487 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
488 || TREE_CODE (t) == CALL_EXPR);
491 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
492 this predicate should only be used during gimplification. See the
493 rationale for this in gimplify_modify_expr. */
495 static bool
496 is_gimple_mem_rhs_or_call (tree t)
498 /* If we're dealing with a renamable type, either source or dest must be
499 a renamed variable. */
500 if (is_gimple_reg_type (TREE_TYPE (t)))
501 return is_gimple_val (t);
502 else
503 return (is_gimple_val (t)
504 || is_gimple_lvalue (t)
505 || TREE_CLOBBER_P (t)
506 || TREE_CODE (t) == CALL_EXPR);
509 /* Create a temporary with a name derived from VAL. Subroutine of
510 lookup_tmp_var; nobody else should call this function. */
512 static inline tree
513 create_tmp_from_val (tree val)
515 /* Drop all qualifiers and address-space information from the value type. */
516 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
517 tree var = create_tmp_var (type, get_name (val));
518 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
519 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
520 DECL_GIMPLE_REG_P (var) = 1;
521 return var;
524 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
525 an existing expression temporary. */
527 static tree
528 lookup_tmp_var (tree val, bool is_formal)
530 tree ret;
532 /* If not optimizing, never really reuse a temporary. local-alloc
533 won't allocate any variable that is used in more than one basic
534 block, which means it will go into memory, causing much extra
535 work in reload and final and poorer code generation, outweighing
536 the extra memory allocation here. */
537 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
538 ret = create_tmp_from_val (val);
539 else
541 elt_t elt, *elt_p;
542 elt_t **slot;
544 elt.val = val;
545 if (!gimplify_ctxp->temp_htab)
546 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
547 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
548 if (*slot == NULL)
550 elt_p = XNEW (elt_t);
551 elt_p->val = val;
552 elt_p->temp = ret = create_tmp_from_val (val);
553 *slot = elt_p;
555 else
557 elt_p = *slot;
558 ret = elt_p->temp;
562 return ret;
565 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
567 static tree
568 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
569 bool is_formal, bool allow_ssa)
571 tree t, mod;
573 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
574 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
575 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
576 fb_rvalue);
578 if (allow_ssa
579 && gimplify_ctxp->into_ssa
580 && is_gimple_reg_type (TREE_TYPE (val)))
582 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
583 if (! gimple_in_ssa_p (cfun))
585 const char *name = get_name (val);
586 if (name)
587 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
590 else
591 t = lookup_tmp_var (val, is_formal);
593 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
595 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
597 /* gimplify_modify_expr might want to reduce this further. */
598 gimplify_and_add (mod, pre_p);
599 ggc_free (mod);
601 return t;
604 /* Return a formal temporary variable initialized with VAL. PRE_P is as
605 in gimplify_expr. Only use this function if:
607 1) The value of the unfactored expression represented by VAL will not
608 change between the initialization and use of the temporary, and
609 2) The temporary will not be otherwise modified.
611 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
612 and #2 means it is inappropriate for && temps.
614 For other cases, use get_initialized_tmp_var instead. */
616 tree
617 get_formal_tmp_var (tree val, gimple_seq *pre_p)
619 return internal_get_tmp_var (val, pre_p, NULL, true, true);
622 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
623 are as in gimplify_expr. */
625 tree
626 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
627 bool allow_ssa)
629 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
632 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
633 generate debug info for them; otherwise don't. */
635 void
636 declare_vars (tree vars, gimple *gs, bool debug_info)
638 tree last = vars;
639 if (last)
641 tree temps, block;
643 gbind *scope = as_a <gbind *> (gs);
645 temps = nreverse (last);
647 block = gimple_bind_block (scope);
648 gcc_assert (!block || TREE_CODE (block) == BLOCK);
649 if (!block || !debug_info)
651 DECL_CHAIN (last) = gimple_bind_vars (scope);
652 gimple_bind_set_vars (scope, temps);
654 else
656 /* We need to attach the nodes both to the BIND_EXPR and to its
657 associated BLOCK for debugging purposes. The key point here
658 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
659 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
660 if (BLOCK_VARS (block))
661 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
662 else
664 gimple_bind_set_vars (scope,
665 chainon (gimple_bind_vars (scope), temps));
666 BLOCK_VARS (block) = temps;
672 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
673 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
674 no such upper bound can be obtained. */
676 static void
677 force_constant_size (tree var)
679 /* The only attempt we make is by querying the maximum size of objects
680 of the variable's type. */
682 HOST_WIDE_INT max_size;
684 gcc_assert (VAR_P (var));
686 max_size = max_int_size_in_bytes (TREE_TYPE (var));
688 gcc_assert (max_size >= 0);
690 DECL_SIZE_UNIT (var)
691 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
692 DECL_SIZE (var)
693 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
696 /* Push the temporary variable TMP into the current binding. */
698 void
699 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
701 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
703 /* Later processing assumes that the object size is constant, which might
704 not be true at this point. Force the use of a constant upper bound in
705 this case. */
706 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
707 force_constant_size (tmp);
709 DECL_CONTEXT (tmp) = fn->decl;
710 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
712 record_vars_into (tmp, fn->decl);
715 /* Push the temporary variable TMP into the current binding. */
717 void
718 gimple_add_tmp_var (tree tmp)
720 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
726 force_constant_size (tmp);
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
731 if (gimplify_ctxp)
733 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx
741 && (ctx->region_type == ORT_WORKSHARE
742 || ctx->region_type == ORT_SIMD
743 || ctx->region_type == ORT_ACC))
744 ctx = ctx->outer_context;
745 if (ctx)
746 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
749 else if (cfun)
750 record_vars (tmp);
751 else
753 gimple_seq body_seq;
755 /* This case is for nested functions. We need to expose the locals
756 they create. */
757 body_seq = gimple_body (current_function_decl);
758 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
764 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
765 nodes that are referenced more than once in GENERIC functions. This is
766 necessary because gimplification (translation into GIMPLE) is performed
767 by modifying tree nodes in-place, so gimplication of a shared node in a
768 first context could generate an invalid GIMPLE form in a second context.
770 This is achieved with a simple mark/copy/unmark algorithm that walks the
771 GENERIC representation top-down, marks nodes with TREE_VISITED the first
772 time it encounters them, duplicates them if they already have TREE_VISITED
773 set, and finally removes the TREE_VISITED marks it has set.
775 The algorithm works only at the function level, i.e. it generates a GENERIC
776 representation of a function with no nodes shared within the function when
777 passed a GENERIC function (except for nodes that are allowed to be shared).
779 At the global level, it is also necessary to unshare tree nodes that are
780 referenced in more than one function, for the same aforementioned reason.
781 This requires some cooperation from the front-end. There are 2 strategies:
783 1. Manual unsharing. The front-end needs to call unshare_expr on every
784 expression that might end up being shared across functions.
786 2. Deep unsharing. This is an extension of regular unsharing. Instead
787 of calling unshare_expr on expressions that might be shared across
788 functions, the front-end pre-marks them with TREE_VISITED. This will
789 ensure that they are unshared on the first reference within functions
790 when the regular unsharing algorithm runs. The counterpart is that
791 this algorithm must look deeper than for manual unsharing, which is
792 specified by LANG_HOOKS_DEEP_UNSHARING.
794 If there are only few specific cases of node sharing across functions, it is
795 probably easier for a front-end to unshare the expressions manually. On the
796 contrary, if the expressions generated at the global level are as widespread
797 as expressions generated within functions, deep unsharing is very likely the
798 way to go. */
800 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
801 These nodes model computations that must be done once. If we were to
802 unshare something like SAVE_EXPR(i++), the gimplification process would
803 create wrong code. However, if DATA is non-null, it must hold a pointer
804 set that is used to unshare the subtrees of these nodes. */
806 static tree
807 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
809 tree t = *tp;
810 enum tree_code code = TREE_CODE (t);
812 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
813 copy their subtrees if we can make sure to do it only once. */
814 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
816 if (data && !((hash_set<tree> *)data)->add (t))
818 else
819 *walk_subtrees = 0;
822 /* Stop at types, decls, constants like copy_tree_r. */
823 else if (TREE_CODE_CLASS (code) == tcc_type
824 || TREE_CODE_CLASS (code) == tcc_declaration
825 || TREE_CODE_CLASS (code) == tcc_constant)
826 *walk_subtrees = 0;
828 /* Cope with the statement expression extension. */
829 else if (code == STATEMENT_LIST)
832 /* Leave the bulk of the work to copy_tree_r itself. */
833 else
834 copy_tree_r (tp, walk_subtrees, NULL);
836 return NULL_TREE;
839 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
840 If *TP has been visited already, then *TP is deeply copied by calling
841 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
843 static tree
844 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
846 tree t = *tp;
847 enum tree_code code = TREE_CODE (t);
849 /* Skip types, decls, and constants. But we do want to look at their
850 types and the bounds of types. Mark them as visited so we properly
851 unmark their subtrees on the unmark pass. If we've already seen them,
852 don't look down further. */
853 if (TREE_CODE_CLASS (code) == tcc_type
854 || TREE_CODE_CLASS (code) == tcc_declaration
855 || TREE_CODE_CLASS (code) == tcc_constant)
857 if (TREE_VISITED (t))
858 *walk_subtrees = 0;
859 else
860 TREE_VISITED (t) = 1;
863 /* If this node has been visited already, unshare it and don't look
864 any deeper. */
865 else if (TREE_VISITED (t))
867 walk_tree (tp, mostly_copy_tree_r, data, NULL);
868 *walk_subtrees = 0;
871 /* Otherwise, mark the node as visited and keep looking. */
872 else
873 TREE_VISITED (t) = 1;
875 return NULL_TREE;
878 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
879 copy_if_shared_r callback unmodified. */
881 static inline void
882 copy_if_shared (tree *tp, void *data)
884 walk_tree (tp, copy_if_shared_r, data, NULL);
887 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
888 any nested functions. */
890 static void
891 unshare_body (tree fndecl)
893 struct cgraph_node *cgn = cgraph_node::get (fndecl);
894 /* If the language requires deep unsharing, we need a pointer set to make
895 sure we don't repeatedly unshare subtrees of unshareable nodes. */
896 hash_set<tree> *visited
897 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
899 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
900 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
901 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
903 delete visited;
905 if (cgn)
906 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
907 unshare_body (cgn->decl);
910 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
911 Subtrees are walked until the first unvisited node is encountered. */
913 static tree
914 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
916 tree t = *tp;
918 /* If this node has been visited, unmark it and keep looking. */
919 if (TREE_VISITED (t))
920 TREE_VISITED (t) = 0;
922 /* Otherwise, don't look any deeper. */
923 else
924 *walk_subtrees = 0;
926 return NULL_TREE;
929 /* Unmark the visited trees rooted at *TP. */
931 static inline void
932 unmark_visited (tree *tp)
934 walk_tree (tp, unmark_visited_r, NULL, NULL);
937 /* Likewise, but mark all trees as not visited. */
939 static void
940 unvisit_body (tree fndecl)
942 struct cgraph_node *cgn = cgraph_node::get (fndecl);
944 unmark_visited (&DECL_SAVED_TREE (fndecl));
945 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
946 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
948 if (cgn)
949 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
950 unvisit_body (cgn->decl);
953 /* Unconditionally make an unshared copy of EXPR. This is used when using
954 stored expressions which span multiple functions, such as BINFO_VTABLE,
955 as the normal unsharing process can't tell that they're shared. */
957 tree
958 unshare_expr (tree expr)
960 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
961 return expr;
964 /* Worker for unshare_expr_without_location. */
966 static tree
967 prune_expr_location (tree *tp, int *walk_subtrees, void *)
969 if (EXPR_P (*tp))
970 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
971 else
972 *walk_subtrees = 0;
973 return NULL_TREE;
976 /* Similar to unshare_expr but also prune all expression locations
977 from EXPR. */
979 tree
980 unshare_expr_without_location (tree expr)
982 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
983 if (EXPR_P (expr))
984 walk_tree (&expr, prune_expr_location, NULL, NULL);
985 return expr;
988 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
989 contain statements and have a value. Assign its value to a temporary
990 and give it void_type_node. Return the temporary, or NULL_TREE if
991 WRAPPER was already void. */
993 tree
994 voidify_wrapper_expr (tree wrapper, tree temp)
996 tree type = TREE_TYPE (wrapper);
997 if (type && !VOID_TYPE_P (type))
999 tree *p;
1001 /* Set p to point to the body of the wrapper. Loop until we find
1002 something that isn't a wrapper. */
1003 for (p = &wrapper; p && *p; )
1005 switch (TREE_CODE (*p))
1007 case BIND_EXPR:
1008 TREE_SIDE_EFFECTS (*p) = 1;
1009 TREE_TYPE (*p) = void_type_node;
1010 /* For a BIND_EXPR, the body is operand 1. */
1011 p = &BIND_EXPR_BODY (*p);
1012 break;
1014 case CLEANUP_POINT_EXPR:
1015 case TRY_FINALLY_EXPR:
1016 case TRY_CATCH_EXPR:
1017 TREE_SIDE_EFFECTS (*p) = 1;
1018 TREE_TYPE (*p) = void_type_node;
1019 p = &TREE_OPERAND (*p, 0);
1020 break;
1022 case STATEMENT_LIST:
1024 tree_stmt_iterator i = tsi_last (*p);
1025 TREE_SIDE_EFFECTS (*p) = 1;
1026 TREE_TYPE (*p) = void_type_node;
1027 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1029 break;
1031 case COMPOUND_EXPR:
1032 /* Advance to the last statement. Set all container types to
1033 void. */
1034 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1036 TREE_SIDE_EFFECTS (*p) = 1;
1037 TREE_TYPE (*p) = void_type_node;
1039 break;
1041 case TRANSACTION_EXPR:
1042 TREE_SIDE_EFFECTS (*p) = 1;
1043 TREE_TYPE (*p) = void_type_node;
1044 p = &TRANSACTION_EXPR_BODY (*p);
1045 break;
1047 default:
1048 /* Assume that any tree upon which voidify_wrapper_expr is
1049 directly called is a wrapper, and that its body is op0. */
1050 if (p == &wrapper)
1052 TREE_SIDE_EFFECTS (*p) = 1;
1053 TREE_TYPE (*p) = void_type_node;
1054 p = &TREE_OPERAND (*p, 0);
1055 break;
1057 goto out;
1061 out:
1062 if (p == NULL || IS_EMPTY_STMT (*p))
1063 temp = NULL_TREE;
1064 else if (temp)
1066 /* The wrapper is on the RHS of an assignment that we're pushing
1067 down. */
1068 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1069 || TREE_CODE (temp) == MODIFY_EXPR);
1070 TREE_OPERAND (temp, 1) = *p;
1071 *p = temp;
1073 else
1075 temp = create_tmp_var (type, "retval");
1076 *p = build2 (INIT_EXPR, type, temp, *p);
1079 return temp;
1082 return NULL_TREE;
1085 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1086 a temporary through which they communicate. */
1088 static void
1089 build_stack_save_restore (gcall **save, gcall **restore)
1091 tree tmp_var;
1093 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1094 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1095 gimple_call_set_lhs (*save, tmp_var);
1097 *restore
1098 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1099 1, tmp_var);
1102 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1104 static tree
1105 build_asan_poison_call_expr (tree decl)
1107 /* Do not poison variables that have size equal to zero. */
1108 tree unit_size = DECL_SIZE_UNIT (decl);
1109 if (zerop (unit_size))
1110 return NULL_TREE;
1112 tree base = build_fold_addr_expr (decl);
1114 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1115 void_type_node, 3,
1116 build_int_cst (integer_type_node,
1117 ASAN_MARK_POISON),
1118 base, unit_size);
1121 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1122 on POISON flag, shadow memory of a DECL variable. The call will be
1123 put on location identified by IT iterator, where BEFORE flag drives
1124 position where the stmt will be put. */
1126 static void
1127 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1128 bool before)
1130 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1131 if (gimplify_omp_ctxp)
1132 return;
1134 tree unit_size = DECL_SIZE_UNIT (decl);
1135 tree base = build_fold_addr_expr (decl);
1137 /* Do not poison variables that have size equal to zero. */
1138 if (zerop (unit_size))
1139 return;
1141 /* It's necessary to have all stack variables aligned to ASAN granularity
1142 bytes. */
1143 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1144 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1146 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1148 gimple *g
1149 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1150 build_int_cst (integer_type_node, flags),
1151 base, unit_size);
1153 if (before)
1154 gsi_insert_before (it, g, GSI_NEW_STMT);
1155 else
1156 gsi_insert_after (it, g, GSI_NEW_STMT);
1159 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1160 either poisons or unpoisons a DECL. Created statement is appended
1161 to SEQ_P gimple sequence. */
1163 static void
1164 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1166 gimple_stmt_iterator it = gsi_last (*seq_p);
1167 bool before = false;
1169 if (gsi_end_p (it))
1170 before = true;
1172 asan_poison_variable (decl, poison, &it, before);
1175 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1177 static int
1178 sort_by_decl_uid (const void *a, const void *b)
1180 const tree *t1 = (const tree *)a;
1181 const tree *t2 = (const tree *)b;
1183 int uid1 = DECL_UID (*t1);
1184 int uid2 = DECL_UID (*t2);
1186 if (uid1 < uid2)
1187 return -1;
1188 else if (uid1 > uid2)
1189 return 1;
1190 else
1191 return 0;
1194 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1195 depending on POISON flag. Created statement is appended
1196 to SEQ_P gimple sequence. */
1198 static void
1199 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1201 unsigned c = variables->elements ();
1202 if (c == 0)
1203 return;
1205 auto_vec<tree> sorted_variables (c);
1207 for (hash_set<tree>::iterator it = variables->begin ();
1208 it != variables->end (); ++it)
1209 sorted_variables.safe_push (*it);
1211 sorted_variables.qsort (sort_by_decl_uid);
1213 unsigned i;
1214 tree var;
1215 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1217 asan_poison_variable (var, poison, seq_p);
1219 /* Add use_after_scope_memory attribute for the variable in order
1220 to prevent re-written into SSA. */
1221 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1222 DECL_ATTRIBUTES (var)))
1223 DECL_ATTRIBUTES (var)
1224 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1225 integer_one_node,
1226 DECL_ATTRIBUTES (var));
1230 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1232 static enum gimplify_status
1233 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1235 tree bind_expr = *expr_p;
1236 bool old_keep_stack = gimplify_ctxp->keep_stack;
1237 bool old_save_stack = gimplify_ctxp->save_stack;
1238 tree t;
1239 gbind *bind_stmt;
1240 gimple_seq body, cleanup;
1241 gcall *stack_save;
1242 location_t start_locus = 0, end_locus = 0;
1243 tree ret_clauses = NULL;
1245 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1247 /* Mark variables seen in this bind expr. */
1248 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1250 if (VAR_P (t))
1252 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1254 /* Mark variable as local. */
1255 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1256 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1257 || splay_tree_lookup (ctx->variables,
1258 (splay_tree_key) t) == NULL))
1260 if (ctx->region_type == ORT_SIMD
1261 && TREE_ADDRESSABLE (t)
1262 && !TREE_STATIC (t))
1263 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1264 else
1265 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1268 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1270 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1271 cfun->has_local_explicit_reg_vars = true;
1274 /* Preliminarily mark non-addressed complex variables as eligible
1275 for promotion to gimple registers. We'll transform their uses
1276 as we find them. */
1277 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1278 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1279 && !TREE_THIS_VOLATILE (t)
1280 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1281 && !needs_to_live_in_memory (t))
1282 DECL_GIMPLE_REG_P (t) = 1;
1285 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1286 BIND_EXPR_BLOCK (bind_expr));
1287 gimple_push_bind_expr (bind_stmt);
1289 gimplify_ctxp->keep_stack = false;
1290 gimplify_ctxp->save_stack = false;
1292 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1293 body = NULL;
1294 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1295 gimple_bind_set_body (bind_stmt, body);
1297 /* Source location wise, the cleanup code (stack_restore and clobbers)
1298 belongs to the end of the block, so propagate what we have. The
1299 stack_save operation belongs to the beginning of block, which we can
1300 infer from the bind_expr directly if the block has no explicit
1301 assignment. */
1302 if (BIND_EXPR_BLOCK (bind_expr))
1304 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1305 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1307 if (start_locus == 0)
1308 start_locus = EXPR_LOCATION (bind_expr);
1310 cleanup = NULL;
1311 stack_save = NULL;
1313 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1314 the stack space allocated to the VLAs. */
1315 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1317 gcall *stack_restore;
1319 /* Save stack on entry and restore it on exit. Add a try_finally
1320 block to achieve this. */
1321 build_stack_save_restore (&stack_save, &stack_restore);
1323 gimple_set_location (stack_save, start_locus);
1324 gimple_set_location (stack_restore, end_locus);
1326 gimplify_seq_add_stmt (&cleanup, stack_restore);
1329 /* Add clobbers for all variables that go out of scope. */
1330 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1332 if (VAR_P (t)
1333 && !is_global_var (t)
1334 && DECL_CONTEXT (t) == current_function_decl)
1336 if (!DECL_HARD_REGISTER (t)
1337 && !TREE_THIS_VOLATILE (t)
1338 && !DECL_HAS_VALUE_EXPR_P (t)
1339 /* Only care for variables that have to be in memory. Others
1340 will be rewritten into SSA names, hence moved to the
1341 top-level. */
1342 && !is_gimple_reg (t)
1343 && flag_stack_reuse != SR_NONE)
1345 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1346 gimple *clobber_stmt;
1347 TREE_THIS_VOLATILE (clobber) = 1;
1348 clobber_stmt = gimple_build_assign (t, clobber);
1349 gimple_set_location (clobber_stmt, end_locus);
1350 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1353 if (flag_openacc && oacc_declare_returns != NULL)
1355 tree *c = oacc_declare_returns->get (t);
1356 if (c != NULL)
1358 if (ret_clauses)
1359 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1361 ret_clauses = *c;
1363 oacc_declare_returns->remove (t);
1365 if (oacc_declare_returns->elements () == 0)
1367 delete oacc_declare_returns;
1368 oacc_declare_returns = NULL;
1374 if (asan_poisoned_variables != NULL
1375 && asan_poisoned_variables->contains (t))
1377 asan_poisoned_variables->remove (t);
1378 asan_poison_variable (t, true, &cleanup);
1381 if (gimplify_ctxp->live_switch_vars != NULL
1382 && gimplify_ctxp->live_switch_vars->contains (t))
1383 gimplify_ctxp->live_switch_vars->remove (t);
1386 if (ret_clauses)
1388 gomp_target *stmt;
1389 gimple_stmt_iterator si = gsi_start (cleanup);
1391 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1392 ret_clauses);
1393 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1396 if (cleanup)
1398 gtry *gs;
1399 gimple_seq new_body;
1401 new_body = NULL;
1402 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1403 GIMPLE_TRY_FINALLY);
1405 if (stack_save)
1406 gimplify_seq_add_stmt (&new_body, stack_save);
1407 gimplify_seq_add_stmt (&new_body, gs);
1408 gimple_bind_set_body (bind_stmt, new_body);
1411 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1412 if (!gimplify_ctxp->keep_stack)
1413 gimplify_ctxp->keep_stack = old_keep_stack;
1414 gimplify_ctxp->save_stack = old_save_stack;
1416 gimple_pop_bind_expr ();
1418 gimplify_seq_add_stmt (pre_p, bind_stmt);
1420 if (temp)
1422 *expr_p = temp;
1423 return GS_OK;
1426 *expr_p = NULL_TREE;
1427 return GS_ALL_DONE;
1430 /* Maybe add early return predict statement to PRE_P sequence. */
1432 static void
1433 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1435 /* If we are not in a conditional context, add PREDICT statement. */
1436 if (gimple_conditional_context ())
1438 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1439 NOT_TAKEN);
1440 gimplify_seq_add_stmt (pre_p, predict);
1444 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1445 GIMPLE value, it is assigned to a new temporary and the statement is
1446 re-written to return the temporary.
1448 PRE_P points to the sequence where side effects that must happen before
1449 STMT should be stored. */
1451 static enum gimplify_status
1452 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1454 greturn *ret;
1455 tree ret_expr = TREE_OPERAND (stmt, 0);
1456 tree result_decl, result;
1458 if (ret_expr == error_mark_node)
1459 return GS_ERROR;
1461 /* Implicit _Cilk_sync must be inserted right before any return statement
1462 if there is a _Cilk_spawn in the function. If the user has provided a
1463 _Cilk_sync, the optimizer should remove this duplicate one. */
1464 if (fn_contains_cilk_spawn_p (cfun))
1466 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1467 gimplify_and_add (impl_sync, pre_p);
1470 if (!ret_expr
1471 || TREE_CODE (ret_expr) == RESULT_DECL
1472 || ret_expr == error_mark_node)
1474 maybe_add_early_return_predict_stmt (pre_p);
1475 greturn *ret = gimple_build_return (ret_expr);
1476 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1477 gimplify_seq_add_stmt (pre_p, ret);
1478 return GS_ALL_DONE;
1481 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1482 result_decl = NULL_TREE;
1483 else
1485 result_decl = TREE_OPERAND (ret_expr, 0);
1487 /* See through a return by reference. */
1488 if (TREE_CODE (result_decl) == INDIRECT_REF)
1489 result_decl = TREE_OPERAND (result_decl, 0);
1491 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1492 || TREE_CODE (ret_expr) == INIT_EXPR)
1493 && TREE_CODE (result_decl) == RESULT_DECL);
1496 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1497 Recall that aggregate_value_p is FALSE for any aggregate type that is
1498 returned in registers. If we're returning values in registers, then
1499 we don't want to extend the lifetime of the RESULT_DECL, particularly
1500 across another call. In addition, for those aggregates for which
1501 hard_function_value generates a PARALLEL, we'll die during normal
1502 expansion of structure assignments; there's special code in expand_return
1503 to handle this case that does not exist in expand_expr. */
1504 if (!result_decl)
1505 result = NULL_TREE;
1506 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1508 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1510 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1511 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1512 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1513 should be effectively allocated by the caller, i.e. all calls to
1514 this function must be subject to the Return Slot Optimization. */
1515 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1516 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1518 result = result_decl;
1520 else if (gimplify_ctxp->return_temp)
1521 result = gimplify_ctxp->return_temp;
1522 else
1524 result = create_tmp_reg (TREE_TYPE (result_decl));
1526 /* ??? With complex control flow (usually involving abnormal edges),
1527 we can wind up warning about an uninitialized value for this. Due
1528 to how this variable is constructed and initialized, this is never
1529 true. Give up and never warn. */
1530 TREE_NO_WARNING (result) = 1;
1532 gimplify_ctxp->return_temp = result;
1535 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1536 Then gimplify the whole thing. */
1537 if (result != result_decl)
1538 TREE_OPERAND (ret_expr, 0) = result;
1540 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1542 maybe_add_early_return_predict_stmt (pre_p);
1543 ret = gimple_build_return (result);
1544 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1545 gimplify_seq_add_stmt (pre_p, ret);
1547 return GS_ALL_DONE;
1550 /* Gimplify a variable-length array DECL. */
1552 static void
1553 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1555 /* This is a variable-sized decl. Simplify its size and mark it
1556 for deferred expansion. */
1557 tree t, addr, ptr_type;
1559 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1560 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1562 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1563 if (DECL_HAS_VALUE_EXPR_P (decl))
1564 return;
1566 /* All occurrences of this decl in final gimplified code will be
1567 replaced by indirection. Setting DECL_VALUE_EXPR does two
1568 things: First, it lets the rest of the gimplifier know what
1569 replacement to use. Second, it lets the debug info know
1570 where to find the value. */
1571 ptr_type = build_pointer_type (TREE_TYPE (decl));
1572 addr = create_tmp_var (ptr_type, get_name (decl));
1573 DECL_IGNORED_P (addr) = 0;
1574 t = build_fold_indirect_ref (addr);
1575 TREE_THIS_NOTRAP (t) = 1;
1576 SET_DECL_VALUE_EXPR (decl, t);
1577 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1579 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1580 max_int_size_in_bytes (TREE_TYPE (decl)));
1581 /* The call has been built for a variable-sized object. */
1582 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1583 t = fold_convert (ptr_type, t);
1584 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1586 gimplify_and_add (t, seq_p);
1589 /* A helper function to be called via walk_tree. Mark all labels under *TP
1590 as being forced. To be called for DECL_INITIAL of static variables. */
1592 static tree
1593 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1595 if (TYPE_P (*tp))
1596 *walk_subtrees = 0;
1597 if (TREE_CODE (*tp) == LABEL_DECL)
1599 FORCED_LABEL (*tp) = 1;
1600 cfun->has_forced_label_in_static = 1;
1603 return NULL_TREE;
1606 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1607 and initialization explicit. */
1609 static enum gimplify_status
1610 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1612 tree stmt = *stmt_p;
1613 tree decl = DECL_EXPR_DECL (stmt);
1615 *stmt_p = NULL_TREE;
1617 if (TREE_TYPE (decl) == error_mark_node)
1618 return GS_ERROR;
1620 if ((TREE_CODE (decl) == TYPE_DECL
1621 || VAR_P (decl))
1622 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1624 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1625 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1626 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1629 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1630 in case its size expressions contain problematic nodes like CALL_EXPR. */
1631 if (TREE_CODE (decl) == TYPE_DECL
1632 && DECL_ORIGINAL_TYPE (decl)
1633 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1635 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1636 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1637 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1640 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1642 tree init = DECL_INITIAL (decl);
1643 bool is_vla = false;
1645 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1646 || (!TREE_STATIC (decl)
1647 && flag_stack_check == GENERIC_STACK_CHECK
1648 && compare_tree_int (DECL_SIZE_UNIT (decl),
1649 STACK_CHECK_MAX_VAR_SIZE) > 0))
1651 gimplify_vla_decl (decl, seq_p);
1652 is_vla = true;
1655 if (asan_poisoned_variables
1656 && !is_vla
1657 && TREE_ADDRESSABLE (decl)
1658 && !TREE_STATIC (decl)
1659 && !DECL_HAS_VALUE_EXPR_P (decl)
1660 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1661 && dbg_cnt (asan_use_after_scope))
1663 asan_poisoned_variables->add (decl);
1664 asan_poison_variable (decl, false, seq_p);
1665 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1666 gimplify_ctxp->live_switch_vars->add (decl);
1669 /* Some front ends do not explicitly declare all anonymous
1670 artificial variables. We compensate here by declaring the
1671 variables, though it would be better if the front ends would
1672 explicitly declare them. */
1673 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1674 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1675 gimple_add_tmp_var (decl);
1677 if (init && init != error_mark_node)
1679 if (!TREE_STATIC (decl))
1681 DECL_INITIAL (decl) = NULL_TREE;
1682 init = build2 (INIT_EXPR, void_type_node, decl, init);
1683 gimplify_and_add (init, seq_p);
1684 ggc_free (init);
1686 else
1687 /* We must still examine initializers for static variables
1688 as they may contain a label address. */
1689 walk_tree (&init, force_labels_r, NULL, NULL);
1693 return GS_ALL_DONE;
1696 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1697 and replacing the LOOP_EXPR with goto, but if the loop contains an
1698 EXIT_EXPR, we need to append a label for it to jump to. */
1700 static enum gimplify_status
1701 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1703 tree saved_label = gimplify_ctxp->exit_label;
1704 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1706 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1708 gimplify_ctxp->exit_label = NULL_TREE;
1710 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1712 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1714 if (gimplify_ctxp->exit_label)
1715 gimplify_seq_add_stmt (pre_p,
1716 gimple_build_label (gimplify_ctxp->exit_label));
1718 gimplify_ctxp->exit_label = saved_label;
1720 *expr_p = NULL;
1721 return GS_ALL_DONE;
1724 /* Gimplify a statement list onto a sequence. These may be created either
1725 by an enlightened front-end, or by shortcut_cond_expr. */
1727 static enum gimplify_status
1728 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1730 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1732 tree_stmt_iterator i = tsi_start (*expr_p);
1734 while (!tsi_end_p (i))
1736 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1737 tsi_delink (&i);
1740 if (temp)
1742 *expr_p = temp;
1743 return GS_OK;
1746 return GS_ALL_DONE;
1749 /* Callback for walk_gimple_seq. */
1751 static tree
1752 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1753 struct walk_stmt_info *wi)
1755 gimple *stmt = gsi_stmt (*gsi_p);
1757 *handled_ops_p = true;
1758 switch (gimple_code (stmt))
1760 case GIMPLE_TRY:
1761 /* A compiler-generated cleanup or a user-written try block.
1762 If it's empty, don't dive into it--that would result in
1763 worse location info. */
1764 if (gimple_try_eval (stmt) == NULL)
1766 wi->info = stmt;
1767 return integer_zero_node;
1769 /* Fall through. */
1770 case GIMPLE_BIND:
1771 case GIMPLE_CATCH:
1772 case GIMPLE_EH_FILTER:
1773 case GIMPLE_TRANSACTION:
1774 /* Walk the sub-statements. */
1775 *handled_ops_p = false;
1776 break;
1777 case GIMPLE_CALL:
1778 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1780 *handled_ops_p = false;
1781 break;
1783 /* Fall through. */
1784 default:
1785 /* Save the first "real" statement (not a decl/lexical scope/...). */
1786 wi->info = stmt;
1787 return integer_zero_node;
1789 return NULL_TREE;
1792 /* Possibly warn about unreachable statements between switch's controlling
1793 expression and the first case. SEQ is the body of a switch expression. */
1795 static void
1796 maybe_warn_switch_unreachable (gimple_seq seq)
1798 if (!warn_switch_unreachable
1799 /* This warning doesn't play well with Fortran when optimizations
1800 are on. */
1801 || lang_GNU_Fortran ()
1802 || seq == NULL)
1803 return;
1805 struct walk_stmt_info wi;
1806 memset (&wi, 0, sizeof (wi));
1807 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1808 gimple *stmt = (gimple *) wi.info;
1810 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1812 if (gimple_code (stmt) == GIMPLE_GOTO
1813 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1814 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1815 /* Don't warn for compiler-generated gotos. These occur
1816 in Duff's devices, for example. */;
1817 else
1818 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1819 "statement will never be executed");
1824 /* A label entry that pairs label and a location. */
1825 struct label_entry
1827 tree label;
1828 location_t loc;
1831 /* Find LABEL in vector of label entries VEC. */
1833 static struct label_entry *
1834 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1836 unsigned int i;
1837 struct label_entry *l;
1839 FOR_EACH_VEC_ELT (*vec, i, l)
1840 if (l->label == label)
1841 return l;
1842 return NULL;
1845 /* Return true if LABEL, a LABEL_DECL, represents a case label
1846 in a vector of labels CASES. */
1848 static bool
1849 case_label_p (const vec<tree> *cases, tree label)
1851 unsigned int i;
1852 tree l;
1854 FOR_EACH_VEC_ELT (*cases, i, l)
1855 if (CASE_LABEL (l) == label)
1856 return true;
1857 return false;
1860 /* Find the last statement in a scope STMT. */
1862 static gimple *
1863 last_stmt_in_scope (gimple *stmt)
1865 if (!stmt)
1866 return NULL;
1868 switch (gimple_code (stmt))
1870 case GIMPLE_BIND:
1872 gbind *bind = as_a <gbind *> (stmt);
1873 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1874 return last_stmt_in_scope (stmt);
1877 case GIMPLE_TRY:
1879 gtry *try_stmt = as_a <gtry *> (stmt);
1880 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1881 gimple *last_eval = last_stmt_in_scope (stmt);
1882 if (gimple_stmt_may_fallthru (last_eval)
1883 && (last_eval == NULL
1884 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1885 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1887 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1888 return last_stmt_in_scope (stmt);
1890 else
1891 return last_eval;
1894 default:
1895 return stmt;
1899 /* Collect interesting labels in LABELS and return the statement preceding
1900 another case label, or a user-defined label. */
1902 static gimple *
1903 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1904 auto_vec <struct label_entry> *labels)
1906 gimple *prev = NULL;
1910 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1911 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1913 /* Nested scope. Only look at the last statement of
1914 the innermost scope. */
1915 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1916 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1917 if (last)
1919 prev = last;
1920 /* It might be a label without a location. Use the
1921 location of the scope then. */
1922 if (!gimple_has_location (prev))
1923 gimple_set_location (prev, bind_loc);
1925 gsi_next (gsi_p);
1926 continue;
1929 /* Ifs are tricky. */
1930 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1932 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1933 tree false_lab = gimple_cond_false_label (cond_stmt);
1934 location_t if_loc = gimple_location (cond_stmt);
1936 /* If we have e.g.
1937 if (i > 1) goto <D.2259>; else goto D;
1938 we can't do much with the else-branch. */
1939 if (!DECL_ARTIFICIAL (false_lab))
1940 break;
1942 /* Go on until the false label, then one step back. */
1943 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1945 gimple *stmt = gsi_stmt (*gsi_p);
1946 if (gimple_code (stmt) == GIMPLE_LABEL
1947 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1948 break;
1951 /* Not found? Oops. */
1952 if (gsi_end_p (*gsi_p))
1953 break;
1955 struct label_entry l = { false_lab, if_loc };
1956 labels->safe_push (l);
1958 /* Go to the last statement of the then branch. */
1959 gsi_prev (gsi_p);
1961 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1962 <D.1759>:
1963 <stmt>;
1964 goto <D.1761>;
1965 <D.1760>:
1967 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1968 && !gimple_has_location (gsi_stmt (*gsi_p)))
1970 /* Look at the statement before, it might be
1971 attribute fallthrough, in which case don't warn. */
1972 gsi_prev (gsi_p);
1973 bool fallthru_before_dest
1974 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1975 gsi_next (gsi_p);
1976 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1977 if (!fallthru_before_dest)
1979 struct label_entry l = { goto_dest, if_loc };
1980 labels->safe_push (l);
1983 /* And move back. */
1984 gsi_next (gsi_p);
1987 /* Remember the last statement. Skip labels that are of no interest
1988 to us. */
1989 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1991 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1992 if (find_label_entry (labels, label))
1993 prev = gsi_stmt (*gsi_p);
1995 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1997 else
1998 prev = gsi_stmt (*gsi_p);
1999 gsi_next (gsi_p);
2001 while (!gsi_end_p (*gsi_p)
2002 /* Stop if we find a case or a user-defined label. */
2003 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2004 || !gimple_has_location (gsi_stmt (*gsi_p))));
2006 return prev;
2009 /* Return true if the switch fallthough warning should occur. LABEL is
2010 the label statement that we're falling through to. */
2012 static bool
2013 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2015 gimple_stmt_iterator gsi = *gsi_p;
2017 /* Don't warn if the label is marked with a "falls through" comment. */
2018 if (FALLTHROUGH_LABEL_P (label))
2019 return false;
2021 /* Don't warn for non-case labels followed by a statement:
2022 case 0:
2023 foo ();
2024 label:
2025 bar ();
2026 as these are likely intentional. */
2027 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2029 tree l;
2030 while (!gsi_end_p (gsi)
2031 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2032 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2033 && !case_label_p (&gimplify_ctxp->case_labels, l))
2034 gsi_next (&gsi);
2035 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2036 return false;
2039 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2040 immediately breaks. */
2041 gsi = *gsi_p;
2043 /* Skip all immediately following labels. */
2044 while (!gsi_end_p (gsi)
2045 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2046 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2047 gsi_next (&gsi);
2049 /* { ... something; default:; } */
2050 if (gsi_end_p (gsi)
2051 /* { ... something; default: break; } or
2052 { ... something; default: goto L; } */
2053 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2054 /* { ... something; default: return; } */
2055 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2056 return false;
2058 return true;
2061 /* Callback for walk_gimple_seq. */
2063 static tree
2064 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2065 struct walk_stmt_info *)
2067 gimple *stmt = gsi_stmt (*gsi_p);
2069 *handled_ops_p = true;
2070 switch (gimple_code (stmt))
2072 case GIMPLE_TRY:
2073 case GIMPLE_BIND:
2074 case GIMPLE_CATCH:
2075 case GIMPLE_EH_FILTER:
2076 case GIMPLE_TRANSACTION:
2077 /* Walk the sub-statements. */
2078 *handled_ops_p = false;
2079 break;
2081 /* Find a sequence of form:
2083 GIMPLE_LABEL
2084 [...]
2085 <may fallthru stmt>
2086 GIMPLE_LABEL
2088 and possibly warn. */
2089 case GIMPLE_LABEL:
2091 /* Found a label. Skip all immediately following labels. */
2092 while (!gsi_end_p (*gsi_p)
2093 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2094 gsi_next (gsi_p);
2096 /* There might be no more statements. */
2097 if (gsi_end_p (*gsi_p))
2098 return integer_zero_node;
2100 /* Vector of labels that fall through. */
2101 auto_vec <struct label_entry> labels;
2102 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2104 /* There might be no more statements. */
2105 if (gsi_end_p (*gsi_p))
2106 return integer_zero_node;
2108 gimple *next = gsi_stmt (*gsi_p);
2109 tree label;
2110 /* If what follows is a label, then we may have a fallthrough. */
2111 if (gimple_code (next) == GIMPLE_LABEL
2112 && gimple_has_location (next)
2113 && (label = gimple_label_label (as_a <glabel *> (next)))
2114 && prev != NULL)
2116 struct label_entry *l;
2117 bool warned_p = false;
2118 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2119 /* Quiet. */;
2120 else if (gimple_code (prev) == GIMPLE_LABEL
2121 && (label = gimple_label_label (as_a <glabel *> (prev)))
2122 && (l = find_label_entry (&labels, label)))
2123 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2124 "this statement may fall through");
2125 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2126 /* Try to be clever and don't warn when the statement
2127 can't actually fall through. */
2128 && gimple_stmt_may_fallthru (prev)
2129 && gimple_has_location (prev))
2130 warned_p = warning_at (gimple_location (prev),
2131 OPT_Wimplicit_fallthrough_,
2132 "this statement may fall through");
2133 if (warned_p)
2134 inform (gimple_location (next), "here");
2136 /* Mark this label as processed so as to prevent multiple
2137 warnings in nested switches. */
2138 FALLTHROUGH_LABEL_P (label) = true;
2140 /* So that next warn_implicit_fallthrough_r will start looking for
2141 a new sequence starting with this label. */
2142 gsi_prev (gsi_p);
2145 break;
2146 default:
2147 break;
2149 return NULL_TREE;
2152 /* Warn when a switch case falls through. */
2154 static void
2155 maybe_warn_implicit_fallthrough (gimple_seq seq)
2157 if (!warn_implicit_fallthrough)
2158 return;
2160 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2161 if (!(lang_GNU_C ()
2162 || lang_GNU_CXX ()
2163 || lang_GNU_OBJC ()))
2164 return;
2166 struct walk_stmt_info wi;
2167 memset (&wi, 0, sizeof (wi));
2168 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2171 /* Callback for walk_gimple_seq. */
2173 static tree
2174 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2175 struct walk_stmt_info *)
2177 gimple *stmt = gsi_stmt (*gsi_p);
2179 *handled_ops_p = true;
2180 switch (gimple_code (stmt))
2182 case GIMPLE_TRY:
2183 case GIMPLE_BIND:
2184 case GIMPLE_CATCH:
2185 case GIMPLE_EH_FILTER:
2186 case GIMPLE_TRANSACTION:
2187 /* Walk the sub-statements. */
2188 *handled_ops_p = false;
2189 break;
2190 case GIMPLE_CALL:
2191 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2193 gsi_remove (gsi_p, true);
2194 if (gsi_end_p (*gsi_p))
2195 return integer_zero_node;
2197 bool found = false;
2198 location_t loc = gimple_location (stmt);
2200 gimple_stmt_iterator gsi2 = *gsi_p;
2201 stmt = gsi_stmt (gsi2);
2202 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2204 /* Go on until the artificial label. */
2205 tree goto_dest = gimple_goto_dest (stmt);
2206 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2208 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2209 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2210 == goto_dest)
2211 break;
2214 /* Not found? Stop. */
2215 if (gsi_end_p (gsi2))
2216 break;
2218 /* Look one past it. */
2219 gsi_next (&gsi2);
2222 /* We're looking for a case label or default label here. */
2223 while (!gsi_end_p (gsi2))
2225 stmt = gsi_stmt (gsi2);
2226 if (gimple_code (stmt) == GIMPLE_LABEL)
2228 tree label = gimple_label_label (as_a <glabel *> (stmt));
2229 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2231 found = true;
2232 break;
2235 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2237 else
2238 /* Something other is not expected. */
2239 break;
2240 gsi_next (&gsi2);
2242 if (!found)
2243 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2244 "a case label or default label");
2246 break;
2247 default:
2248 break;
2250 return NULL_TREE;
2253 /* Expand all FALLTHROUGH () calls in SEQ. */
2255 static void
2256 expand_FALLTHROUGH (gimple_seq *seq_p)
2258 struct walk_stmt_info wi;
2259 memset (&wi, 0, sizeof (wi));
2260 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2264 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2265 branch to. */
2267 static enum gimplify_status
2268 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2270 tree switch_expr = *expr_p;
2271 gimple_seq switch_body_seq = NULL;
2272 enum gimplify_status ret;
2273 tree index_type = TREE_TYPE (switch_expr);
2274 if (index_type == NULL_TREE)
2275 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2277 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2278 fb_rvalue);
2279 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2280 return ret;
2282 if (SWITCH_BODY (switch_expr))
2284 vec<tree> labels;
2285 vec<tree> saved_labels;
2286 hash_set<tree> *saved_live_switch_vars = NULL;
2287 tree default_case = NULL_TREE;
2288 gswitch *switch_stmt;
2290 /* If someone can be bothered to fill in the labels, they can
2291 be bothered to null out the body too. */
2292 gcc_assert (!SWITCH_LABELS (switch_expr));
2294 /* Save old labels, get new ones from body, then restore the old
2295 labels. Save all the things from the switch body to append after. */
2296 saved_labels = gimplify_ctxp->case_labels;
2297 gimplify_ctxp->case_labels.create (8);
2299 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2300 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2301 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2302 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2303 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2304 else
2305 gimplify_ctxp->live_switch_vars = NULL;
2307 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2308 gimplify_ctxp->in_switch_expr = true;
2310 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2312 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2313 maybe_warn_switch_unreachable (switch_body_seq);
2314 maybe_warn_implicit_fallthrough (switch_body_seq);
2315 /* Only do this for the outermost GIMPLE_SWITCH. */
2316 if (!gimplify_ctxp->in_switch_expr)
2317 expand_FALLTHROUGH (&switch_body_seq);
2319 labels = gimplify_ctxp->case_labels;
2320 gimplify_ctxp->case_labels = saved_labels;
2322 if (gimplify_ctxp->live_switch_vars)
2324 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2325 delete gimplify_ctxp->live_switch_vars;
2327 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2329 preprocess_case_label_vec_for_gimple (labels, index_type,
2330 &default_case);
2332 if (!default_case)
2334 glabel *new_default;
2336 default_case
2337 = build_case_label (NULL_TREE, NULL_TREE,
2338 create_artificial_label (UNKNOWN_LOCATION));
2339 new_default = gimple_build_label (CASE_LABEL (default_case));
2340 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2343 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2344 default_case, labels);
2345 gimplify_seq_add_stmt (pre_p, switch_stmt);
2346 gimplify_seq_add_seq (pre_p, switch_body_seq);
2347 labels.release ();
2349 else
2350 gcc_assert (SWITCH_LABELS (switch_expr));
2352 return GS_ALL_DONE;
2355 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2357 static enum gimplify_status
2358 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2360 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2361 == current_function_decl);
2363 tree label = LABEL_EXPR_LABEL (*expr_p);
2364 glabel *label_stmt = gimple_build_label (label);
2365 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2366 gimplify_seq_add_stmt (pre_p, label_stmt);
2368 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2369 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2370 NOT_TAKEN));
2371 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2372 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2373 TAKEN));
2375 return GS_ALL_DONE;
2378 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2380 static enum gimplify_status
2381 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2383 struct gimplify_ctx *ctxp;
2384 glabel *label_stmt;
2386 /* Invalid programs can play Duff's Device type games with, for example,
2387 #pragma omp parallel. At least in the C front end, we don't
2388 detect such invalid branches until after gimplification, in the
2389 diagnose_omp_blocks pass. */
2390 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2391 if (ctxp->case_labels.exists ())
2392 break;
2394 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2395 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2396 ctxp->case_labels.safe_push (*expr_p);
2397 gimplify_seq_add_stmt (pre_p, label_stmt);
2399 return GS_ALL_DONE;
2402 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2403 if necessary. */
2405 tree
2406 build_and_jump (tree *label_p)
2408 if (label_p == NULL)
2409 /* If there's nowhere to jump, just fall through. */
2410 return NULL_TREE;
2412 if (*label_p == NULL_TREE)
2414 tree label = create_artificial_label (UNKNOWN_LOCATION);
2415 *label_p = label;
2418 return build1 (GOTO_EXPR, void_type_node, *label_p);
2421 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2422 This also involves building a label to jump to and communicating it to
2423 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2425 static enum gimplify_status
2426 gimplify_exit_expr (tree *expr_p)
2428 tree cond = TREE_OPERAND (*expr_p, 0);
2429 tree expr;
2431 expr = build_and_jump (&gimplify_ctxp->exit_label);
2432 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2433 *expr_p = expr;
2435 return GS_OK;
2438 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2439 different from its canonical type, wrap the whole thing inside a
2440 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2441 type.
2443 The canonical type of a COMPONENT_REF is the type of the field being
2444 referenced--unless the field is a bit-field which can be read directly
2445 in a smaller mode, in which case the canonical type is the
2446 sign-appropriate type corresponding to that mode. */
2448 static void
2449 canonicalize_component_ref (tree *expr_p)
2451 tree expr = *expr_p;
2452 tree type;
2454 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2456 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2457 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2458 else
2459 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2461 /* One could argue that all the stuff below is not necessary for
2462 the non-bitfield case and declare it a FE error if type
2463 adjustment would be needed. */
2464 if (TREE_TYPE (expr) != type)
2466 #ifdef ENABLE_TYPES_CHECKING
2467 tree old_type = TREE_TYPE (expr);
2468 #endif
2469 int type_quals;
2471 /* We need to preserve qualifiers and propagate them from
2472 operand 0. */
2473 type_quals = TYPE_QUALS (type)
2474 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2475 if (TYPE_QUALS (type) != type_quals)
2476 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2478 /* Set the type of the COMPONENT_REF to the underlying type. */
2479 TREE_TYPE (expr) = type;
2481 #ifdef ENABLE_TYPES_CHECKING
2482 /* It is now a FE error, if the conversion from the canonical
2483 type to the original expression type is not useless. */
2484 gcc_assert (useless_type_conversion_p (old_type, type));
2485 #endif
2489 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2490 to foo, embed that change in the ADDR_EXPR by converting
2491 T array[U];
2492 (T *)&array
2494 &array[L]
2495 where L is the lower bound. For simplicity, only do this for constant
2496 lower bound.
2497 The constraint is that the type of &array[L] is trivially convertible
2498 to T *. */
2500 static void
2501 canonicalize_addr_expr (tree *expr_p)
2503 tree expr = *expr_p;
2504 tree addr_expr = TREE_OPERAND (expr, 0);
2505 tree datype, ddatype, pddatype;
2507 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2508 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2509 || TREE_CODE (addr_expr) != ADDR_EXPR)
2510 return;
2512 /* The addr_expr type should be a pointer to an array. */
2513 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2514 if (TREE_CODE (datype) != ARRAY_TYPE)
2515 return;
2517 /* The pointer to element type shall be trivially convertible to
2518 the expression pointer type. */
2519 ddatype = TREE_TYPE (datype);
2520 pddatype = build_pointer_type (ddatype);
2521 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2522 pddatype))
2523 return;
2525 /* The lower bound and element sizes must be constant. */
2526 if (!TYPE_SIZE_UNIT (ddatype)
2527 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2528 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2529 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2530 return;
2532 /* All checks succeeded. Build a new node to merge the cast. */
2533 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2534 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2535 NULL_TREE, NULL_TREE);
2536 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2538 /* We can have stripped a required restrict qualifier above. */
2539 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2540 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2543 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2544 underneath as appropriate. */
2546 static enum gimplify_status
2547 gimplify_conversion (tree *expr_p)
2549 location_t loc = EXPR_LOCATION (*expr_p);
2550 gcc_assert (CONVERT_EXPR_P (*expr_p));
2552 /* Then strip away all but the outermost conversion. */
2553 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2555 /* And remove the outermost conversion if it's useless. */
2556 if (tree_ssa_useless_type_conversion (*expr_p))
2557 *expr_p = TREE_OPERAND (*expr_p, 0);
2559 /* If we still have a conversion at the toplevel,
2560 then canonicalize some constructs. */
2561 if (CONVERT_EXPR_P (*expr_p))
2563 tree sub = TREE_OPERAND (*expr_p, 0);
2565 /* If a NOP conversion is changing the type of a COMPONENT_REF
2566 expression, then canonicalize its type now in order to expose more
2567 redundant conversions. */
2568 if (TREE_CODE (sub) == COMPONENT_REF)
2569 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2571 /* If a NOP conversion is changing a pointer to array of foo
2572 to a pointer to foo, embed that change in the ADDR_EXPR. */
2573 else if (TREE_CODE (sub) == ADDR_EXPR)
2574 canonicalize_addr_expr (expr_p);
2577 /* If we have a conversion to a non-register type force the
2578 use of a VIEW_CONVERT_EXPR instead. */
2579 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2580 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2581 TREE_OPERAND (*expr_p, 0));
2583 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2584 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2585 TREE_SET_CODE (*expr_p, NOP_EXPR);
2587 return GS_OK;
2590 /* Nonlocal VLAs seen in the current function. */
2591 static hash_set<tree> *nonlocal_vlas;
2593 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2594 static tree nonlocal_vla_vars;
2596 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2597 DECL_VALUE_EXPR, and it's worth re-examining things. */
2599 static enum gimplify_status
2600 gimplify_var_or_parm_decl (tree *expr_p)
2602 tree decl = *expr_p;
2604 /* ??? If this is a local variable, and it has not been seen in any
2605 outer BIND_EXPR, then it's probably the result of a duplicate
2606 declaration, for which we've already issued an error. It would
2607 be really nice if the front end wouldn't leak these at all.
2608 Currently the only known culprit is C++ destructors, as seen
2609 in g++.old-deja/g++.jason/binding.C. */
2610 if (VAR_P (decl)
2611 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2612 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2613 && decl_function_context (decl) == current_function_decl)
2615 gcc_assert (seen_error ());
2616 return GS_ERROR;
2619 /* When within an OMP context, notice uses of variables. */
2620 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2621 return GS_ALL_DONE;
2623 /* If the decl is an alias for another expression, substitute it now. */
2624 if (DECL_HAS_VALUE_EXPR_P (decl))
2626 tree value_expr = DECL_VALUE_EXPR (decl);
2628 /* For referenced nonlocal VLAs add a decl for debugging purposes
2629 to the current function. */
2630 if (VAR_P (decl)
2631 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2632 && nonlocal_vlas != NULL
2633 && TREE_CODE (value_expr) == INDIRECT_REF
2634 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2635 && decl_function_context (decl) != current_function_decl)
2637 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2638 while (ctx
2639 && (ctx->region_type == ORT_WORKSHARE
2640 || ctx->region_type == ORT_SIMD
2641 || ctx->region_type == ORT_ACC))
2642 ctx = ctx->outer_context;
2643 if (!ctx && !nonlocal_vlas->add (decl))
2645 tree copy = copy_node (decl);
2647 lang_hooks.dup_lang_specific_decl (copy);
2648 SET_DECL_RTL (copy, 0);
2649 TREE_USED (copy) = 1;
2650 DECL_CHAIN (copy) = nonlocal_vla_vars;
2651 nonlocal_vla_vars = copy;
2652 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2653 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2657 *expr_p = unshare_expr (value_expr);
2658 return GS_OK;
2661 return GS_ALL_DONE;
2664 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2666 static void
2667 recalculate_side_effects (tree t)
2669 enum tree_code code = TREE_CODE (t);
2670 int len = TREE_OPERAND_LENGTH (t);
2671 int i;
2673 switch (TREE_CODE_CLASS (code))
2675 case tcc_expression:
2676 switch (code)
2678 case INIT_EXPR:
2679 case MODIFY_EXPR:
2680 case VA_ARG_EXPR:
2681 case PREDECREMENT_EXPR:
2682 case PREINCREMENT_EXPR:
2683 case POSTDECREMENT_EXPR:
2684 case POSTINCREMENT_EXPR:
2685 /* All of these have side-effects, no matter what their
2686 operands are. */
2687 return;
2689 default:
2690 break;
2692 /* Fall through. */
2694 case tcc_comparison: /* a comparison expression */
2695 case tcc_unary: /* a unary arithmetic expression */
2696 case tcc_binary: /* a binary arithmetic expression */
2697 case tcc_reference: /* a reference */
2698 case tcc_vl_exp: /* a function call */
2699 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2700 for (i = 0; i < len; ++i)
2702 tree op = TREE_OPERAND (t, i);
2703 if (op && TREE_SIDE_EFFECTS (op))
2704 TREE_SIDE_EFFECTS (t) = 1;
2706 break;
2708 case tcc_constant:
2709 /* No side-effects. */
2710 return;
2712 default:
2713 gcc_unreachable ();
2717 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2718 node *EXPR_P.
2720 compound_lval
2721 : min_lval '[' val ']'
2722 | min_lval '.' ID
2723 | compound_lval '[' val ']'
2724 | compound_lval '.' ID
2726 This is not part of the original SIMPLE definition, which separates
2727 array and member references, but it seems reasonable to handle them
2728 together. Also, this way we don't run into problems with union
2729 aliasing; gcc requires that for accesses through a union to alias, the
2730 union reference must be explicit, which was not always the case when we
2731 were splitting up array and member refs.
2733 PRE_P points to the sequence where side effects that must happen before
2734 *EXPR_P should be stored.
2736 POST_P points to the sequence where side effects that must happen after
2737 *EXPR_P should be stored. */
2739 static enum gimplify_status
2740 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2741 fallback_t fallback)
2743 tree *p;
2744 enum gimplify_status ret = GS_ALL_DONE, tret;
2745 int i;
2746 location_t loc = EXPR_LOCATION (*expr_p);
2747 tree expr = *expr_p;
2749 /* Create a stack of the subexpressions so later we can walk them in
2750 order from inner to outer. */
2751 auto_vec<tree, 10> expr_stack;
2753 /* We can handle anything that get_inner_reference can deal with. */
2754 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2756 restart:
2757 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2758 if (TREE_CODE (*p) == INDIRECT_REF)
2759 *p = fold_indirect_ref_loc (loc, *p);
2761 if (handled_component_p (*p))
2763 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2764 additional COMPONENT_REFs. */
2765 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2766 && gimplify_var_or_parm_decl (p) == GS_OK)
2767 goto restart;
2768 else
2769 break;
2771 expr_stack.safe_push (*p);
2774 gcc_assert (expr_stack.length ());
2776 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2777 walked through and P points to the innermost expression.
2779 Java requires that we elaborated nodes in source order. That
2780 means we must gimplify the inner expression followed by each of
2781 the indices, in order. But we can't gimplify the inner
2782 expression until we deal with any variable bounds, sizes, or
2783 positions in order to deal with PLACEHOLDER_EXPRs.
2785 So we do this in three steps. First we deal with the annotations
2786 for any variables in the components, then we gimplify the base,
2787 then we gimplify any indices, from left to right. */
2788 for (i = expr_stack.length () - 1; i >= 0; i--)
2790 tree t = expr_stack[i];
2792 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2794 /* Gimplify the low bound and element type size and put them into
2795 the ARRAY_REF. If these values are set, they have already been
2796 gimplified. */
2797 if (TREE_OPERAND (t, 2) == NULL_TREE)
2799 tree low = unshare_expr (array_ref_low_bound (t));
2800 if (!is_gimple_min_invariant (low))
2802 TREE_OPERAND (t, 2) = low;
2803 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2804 post_p, is_gimple_reg,
2805 fb_rvalue);
2806 ret = MIN (ret, tret);
2809 else
2811 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2812 is_gimple_reg, fb_rvalue);
2813 ret = MIN (ret, tret);
2816 if (TREE_OPERAND (t, 3) == NULL_TREE)
2818 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2819 tree elmt_size = unshare_expr (array_ref_element_size (t));
2820 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2822 /* Divide the element size by the alignment of the element
2823 type (above). */
2824 elmt_size
2825 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2827 if (!is_gimple_min_invariant (elmt_size))
2829 TREE_OPERAND (t, 3) = elmt_size;
2830 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2831 post_p, is_gimple_reg,
2832 fb_rvalue);
2833 ret = MIN (ret, tret);
2836 else
2838 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2839 is_gimple_reg, fb_rvalue);
2840 ret = MIN (ret, tret);
2843 else if (TREE_CODE (t) == COMPONENT_REF)
2845 /* Set the field offset into T and gimplify it. */
2846 if (TREE_OPERAND (t, 2) == NULL_TREE)
2848 tree offset = unshare_expr (component_ref_field_offset (t));
2849 tree field = TREE_OPERAND (t, 1);
2850 tree factor
2851 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2853 /* Divide the offset by its alignment. */
2854 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2856 if (!is_gimple_min_invariant (offset))
2858 TREE_OPERAND (t, 2) = offset;
2859 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2860 post_p, is_gimple_reg,
2861 fb_rvalue);
2862 ret = MIN (ret, tret);
2865 else
2867 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2868 is_gimple_reg, fb_rvalue);
2869 ret = MIN (ret, tret);
2874 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2875 so as to match the min_lval predicate. Failure to do so may result
2876 in the creation of large aggregate temporaries. */
2877 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2878 fallback | fb_lvalue);
2879 ret = MIN (ret, tret);
2881 /* And finally, the indices and operands of ARRAY_REF. During this
2882 loop we also remove any useless conversions. */
2883 for (; expr_stack.length () > 0; )
2885 tree t = expr_stack.pop ();
2887 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2889 /* Gimplify the dimension. */
2890 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2892 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2893 is_gimple_val, fb_rvalue);
2894 ret = MIN (ret, tret);
2898 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2900 /* The innermost expression P may have originally had
2901 TREE_SIDE_EFFECTS set which would have caused all the outer
2902 expressions in *EXPR_P leading to P to also have had
2903 TREE_SIDE_EFFECTS set. */
2904 recalculate_side_effects (t);
2907 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2908 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2910 canonicalize_component_ref (expr_p);
2913 expr_stack.release ();
2915 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2917 return ret;
2920 /* Gimplify the self modifying expression pointed to by EXPR_P
2921 (++, --, +=, -=).
2923 PRE_P points to the list where side effects that must happen before
2924 *EXPR_P should be stored.
2926 POST_P points to the list where side effects that must happen after
2927 *EXPR_P should be stored.
2929 WANT_VALUE is nonzero iff we want to use the value of this expression
2930 in another expression.
2932 ARITH_TYPE is the type the computation should be performed in. */
2934 enum gimplify_status
2935 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2936 bool want_value, tree arith_type)
2938 enum tree_code code;
2939 tree lhs, lvalue, rhs, t1;
2940 gimple_seq post = NULL, *orig_post_p = post_p;
2941 bool postfix;
2942 enum tree_code arith_code;
2943 enum gimplify_status ret;
2944 location_t loc = EXPR_LOCATION (*expr_p);
2946 code = TREE_CODE (*expr_p);
2948 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2949 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2951 /* Prefix or postfix? */
2952 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2953 /* Faster to treat as prefix if result is not used. */
2954 postfix = want_value;
2955 else
2956 postfix = false;
2958 /* For postfix, make sure the inner expression's post side effects
2959 are executed after side effects from this expression. */
2960 if (postfix)
2961 post_p = &post;
2963 /* Add or subtract? */
2964 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2965 arith_code = PLUS_EXPR;
2966 else
2967 arith_code = MINUS_EXPR;
2969 /* Gimplify the LHS into a GIMPLE lvalue. */
2970 lvalue = TREE_OPERAND (*expr_p, 0);
2971 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2972 if (ret == GS_ERROR)
2973 return ret;
2975 /* Extract the operands to the arithmetic operation. */
2976 lhs = lvalue;
2977 rhs = TREE_OPERAND (*expr_p, 1);
2979 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2980 that as the result value and in the postqueue operation. */
2981 if (postfix)
2983 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2984 if (ret == GS_ERROR)
2985 return ret;
2987 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2990 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2991 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2993 rhs = convert_to_ptrofftype_loc (loc, rhs);
2994 if (arith_code == MINUS_EXPR)
2995 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2996 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2998 else
2999 t1 = fold_convert (TREE_TYPE (*expr_p),
3000 fold_build2 (arith_code, arith_type,
3001 fold_convert (arith_type, lhs),
3002 fold_convert (arith_type, rhs)));
3004 if (postfix)
3006 gimplify_assign (lvalue, t1, pre_p);
3007 gimplify_seq_add_seq (orig_post_p, post);
3008 *expr_p = lhs;
3009 return GS_ALL_DONE;
3011 else
3013 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3014 return GS_OK;
3018 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3020 static void
3021 maybe_with_size_expr (tree *expr_p)
3023 tree expr = *expr_p;
3024 tree type = TREE_TYPE (expr);
3025 tree size;
3027 /* If we've already wrapped this or the type is error_mark_node, we can't do
3028 anything. */
3029 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3030 || type == error_mark_node)
3031 return;
3033 /* If the size isn't known or is a constant, we have nothing to do. */
3034 size = TYPE_SIZE_UNIT (type);
3035 if (!size || TREE_CODE (size) == INTEGER_CST)
3036 return;
3038 /* Otherwise, make a WITH_SIZE_EXPR. */
3039 size = unshare_expr (size);
3040 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3041 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3044 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3045 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3046 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3047 gimplified to an SSA name. */
3049 enum gimplify_status
3050 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3051 bool allow_ssa)
3053 bool (*test) (tree);
3054 fallback_t fb;
3056 /* In general, we allow lvalues for function arguments to avoid
3057 extra overhead of copying large aggregates out of even larger
3058 aggregates into temporaries only to copy the temporaries to
3059 the argument list. Make optimizers happy by pulling out to
3060 temporaries those types that fit in registers. */
3061 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3062 test = is_gimple_val, fb = fb_rvalue;
3063 else
3065 test = is_gimple_lvalue, fb = fb_either;
3066 /* Also strip a TARGET_EXPR that would force an extra copy. */
3067 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3069 tree init = TARGET_EXPR_INITIAL (*arg_p);
3070 if (init
3071 && !VOID_TYPE_P (TREE_TYPE (init)))
3072 *arg_p = init;
3076 /* If this is a variable sized type, we must remember the size. */
3077 maybe_with_size_expr (arg_p);
3079 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3080 /* Make sure arguments have the same location as the function call
3081 itself. */
3082 protected_set_expr_location (*arg_p, call_location);
3084 /* There is a sequence point before a function call. Side effects in
3085 the argument list must occur before the actual call. So, when
3086 gimplifying arguments, force gimplify_expr to use an internal
3087 post queue which is then appended to the end of PRE_P. */
3088 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3091 /* Don't fold inside offloading or taskreg regions: it can break code by
3092 adding decl references that weren't in the source. We'll do it during
3093 omplower pass instead. */
3095 static bool
3096 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3098 struct gimplify_omp_ctx *ctx;
3099 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3100 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3101 return false;
3102 return fold_stmt (gsi);
3105 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3106 with the pointer to the proper cilk frame. */
3107 static void
3108 gimplify_cilk_detach (gimple_seq *pre_p)
3110 tree frame = cfun->cilk_frame_decl;
3111 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3112 frame);
3113 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3114 ptrf);
3115 gimplify_seq_add_stmt(pre_p, detach);
3118 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3119 WANT_VALUE is true if the result of the call is desired. */
3121 static enum gimplify_status
3122 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3124 tree fndecl, parms, p, fnptrtype;
3125 enum gimplify_status ret;
3126 int i, nargs;
3127 gcall *call;
3128 bool builtin_va_start_p = false;
3129 location_t loc = EXPR_LOCATION (*expr_p);
3131 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3133 /* For reliable diagnostics during inlining, it is necessary that
3134 every call_expr be annotated with file and line. */
3135 if (! EXPR_HAS_LOCATION (*expr_p))
3136 SET_EXPR_LOCATION (*expr_p, input_location);
3138 /* Gimplify internal functions created in the FEs. */
3139 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3141 if (want_value)
3142 return GS_ALL_DONE;
3144 nargs = call_expr_nargs (*expr_p);
3145 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3146 auto_vec<tree> vargs (nargs);
3148 for (i = 0; i < nargs; i++)
3150 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3151 EXPR_LOCATION (*expr_p));
3152 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3155 if (EXPR_CILK_SPAWN (*expr_p))
3156 gimplify_cilk_detach (pre_p);
3157 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3158 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3159 gimplify_seq_add_stmt (pre_p, call);
3160 return GS_ALL_DONE;
3163 /* This may be a call to a builtin function.
3165 Builtin function calls may be transformed into different
3166 (and more efficient) builtin function calls under certain
3167 circumstances. Unfortunately, gimplification can muck things
3168 up enough that the builtin expanders are not aware that certain
3169 transformations are still valid.
3171 So we attempt transformation/gimplification of the call before
3172 we gimplify the CALL_EXPR. At this time we do not manage to
3173 transform all calls in the same manner as the expanders do, but
3174 we do transform most of them. */
3175 fndecl = get_callee_fndecl (*expr_p);
3176 if (fndecl
3177 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3178 switch (DECL_FUNCTION_CODE (fndecl))
3180 CASE_BUILT_IN_ALLOCA:
3181 /* If the call has been built for a variable-sized object, then we
3182 want to restore the stack level when the enclosing BIND_EXPR is
3183 exited to reclaim the allocated space; otherwise, we precisely
3184 need to do the opposite and preserve the latest stack level. */
3185 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3186 gimplify_ctxp->save_stack = true;
3187 else
3188 gimplify_ctxp->keep_stack = true;
3189 break;
3191 case BUILT_IN_VA_START:
3193 builtin_va_start_p = TRUE;
3194 if (call_expr_nargs (*expr_p) < 2)
3196 error ("too few arguments to function %<va_start%>");
3197 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3198 return GS_OK;
3201 if (fold_builtin_next_arg (*expr_p, true))
3203 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3204 return GS_OK;
3206 break;
3209 default:
3212 if (fndecl && DECL_BUILT_IN (fndecl))
3214 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3215 if (new_tree && new_tree != *expr_p)
3217 /* There was a transformation of this call which computes the
3218 same value, but in a more efficient way. Return and try
3219 again. */
3220 *expr_p = new_tree;
3221 return GS_OK;
3225 /* Remember the original function pointer type. */
3226 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3228 /* There is a sequence point before the call, so any side effects in
3229 the calling expression must occur before the actual call. Force
3230 gimplify_expr to use an internal post queue. */
3231 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3232 is_gimple_call_addr, fb_rvalue);
3234 nargs = call_expr_nargs (*expr_p);
3236 /* Get argument types for verification. */
3237 fndecl = get_callee_fndecl (*expr_p);
3238 parms = NULL_TREE;
3239 if (fndecl)
3240 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3241 else
3242 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3244 if (fndecl && DECL_ARGUMENTS (fndecl))
3245 p = DECL_ARGUMENTS (fndecl);
3246 else if (parms)
3247 p = parms;
3248 else
3249 p = NULL_TREE;
3250 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3253 /* If the last argument is __builtin_va_arg_pack () and it is not
3254 passed as a named argument, decrease the number of CALL_EXPR
3255 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3256 if (!p
3257 && i < nargs
3258 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3260 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3261 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3263 if (last_arg_fndecl
3264 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3265 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3266 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3268 tree call = *expr_p;
3270 --nargs;
3271 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3272 CALL_EXPR_FN (call),
3273 nargs, CALL_EXPR_ARGP (call));
3275 /* Copy all CALL_EXPR flags, location and block, except
3276 CALL_EXPR_VA_ARG_PACK flag. */
3277 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3278 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3279 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3280 = CALL_EXPR_RETURN_SLOT_OPT (call);
3281 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3282 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3284 /* Set CALL_EXPR_VA_ARG_PACK. */
3285 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3289 /* If the call returns twice then after building the CFG the call
3290 argument computations will no longer dominate the call because
3291 we add an abnormal incoming edge to the call. So do not use SSA
3292 vars there. */
3293 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3295 /* Gimplify the function arguments. */
3296 if (nargs > 0)
3298 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3299 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3300 PUSH_ARGS_REVERSED ? i-- : i++)
3302 enum gimplify_status t;
3304 /* Avoid gimplifying the second argument to va_start, which needs to
3305 be the plain PARM_DECL. */
3306 if ((i != 1) || !builtin_va_start_p)
3308 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3309 EXPR_LOCATION (*expr_p), ! returns_twice);
3311 if (t == GS_ERROR)
3312 ret = GS_ERROR;
3317 /* Gimplify the static chain. */
3318 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3320 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3321 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3322 else
3324 enum gimplify_status t;
3325 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3326 EXPR_LOCATION (*expr_p), ! returns_twice);
3327 if (t == GS_ERROR)
3328 ret = GS_ERROR;
3332 /* Verify the function result. */
3333 if (want_value && fndecl
3334 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3336 error_at (loc, "using result of function returning %<void%>");
3337 ret = GS_ERROR;
3340 /* Try this again in case gimplification exposed something. */
3341 if (ret != GS_ERROR)
3343 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3345 if (new_tree && new_tree != *expr_p)
3347 /* There was a transformation of this call which computes the
3348 same value, but in a more efficient way. Return and try
3349 again. */
3350 *expr_p = new_tree;
3351 return GS_OK;
3354 else
3356 *expr_p = error_mark_node;
3357 return GS_ERROR;
3360 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3361 decl. This allows us to eliminate redundant or useless
3362 calls to "const" functions. */
3363 if (TREE_CODE (*expr_p) == CALL_EXPR)
3365 int flags = call_expr_flags (*expr_p);
3366 if (flags & (ECF_CONST | ECF_PURE)
3367 /* An infinite loop is considered a side effect. */
3368 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3369 TREE_SIDE_EFFECTS (*expr_p) = 0;
3372 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3373 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3374 form and delegate the creation of a GIMPLE_CALL to
3375 gimplify_modify_expr. This is always possible because when
3376 WANT_VALUE is true, the caller wants the result of this call into
3377 a temporary, which means that we will emit an INIT_EXPR in
3378 internal_get_tmp_var which will then be handled by
3379 gimplify_modify_expr. */
3380 if (!want_value)
3382 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3383 have to do is replicate it as a GIMPLE_CALL tuple. */
3384 gimple_stmt_iterator gsi;
3385 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3386 notice_special_calls (call);
3387 if (EXPR_CILK_SPAWN (*expr_p))
3388 gimplify_cilk_detach (pre_p);
3389 gimplify_seq_add_stmt (pre_p, call);
3390 gsi = gsi_last (*pre_p);
3391 maybe_fold_stmt (&gsi);
3392 *expr_p = NULL_TREE;
3394 else
3395 /* Remember the original function type. */
3396 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3397 CALL_EXPR_FN (*expr_p));
3399 return ret;
3402 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3403 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3405 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3406 condition is true or false, respectively. If null, we should generate
3407 our own to skip over the evaluation of this specific expression.
3409 LOCUS is the source location of the COND_EXPR.
3411 This function is the tree equivalent of do_jump.
3413 shortcut_cond_r should only be called by shortcut_cond_expr. */
3415 static tree
3416 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3417 location_t locus)
3419 tree local_label = NULL_TREE;
3420 tree t, expr = NULL;
3422 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3423 retain the shortcut semantics. Just insert the gotos here;
3424 shortcut_cond_expr will append the real blocks later. */
3425 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3427 location_t new_locus;
3429 /* Turn if (a && b) into
3431 if (a); else goto no;
3432 if (b) goto yes; else goto no;
3433 (no:) */
3435 if (false_label_p == NULL)
3436 false_label_p = &local_label;
3438 /* Keep the original source location on the first 'if'. */
3439 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3440 append_to_statement_list (t, &expr);
3442 /* Set the source location of the && on the second 'if'. */
3443 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3444 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3445 new_locus);
3446 append_to_statement_list (t, &expr);
3448 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3450 location_t new_locus;
3452 /* Turn if (a || b) into
3454 if (a) goto yes;
3455 if (b) goto yes; else goto no;
3456 (yes:) */
3458 if (true_label_p == NULL)
3459 true_label_p = &local_label;
3461 /* Keep the original source location on the first 'if'. */
3462 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3463 append_to_statement_list (t, &expr);
3465 /* Set the source location of the || on the second 'if'. */
3466 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3467 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3468 new_locus);
3469 append_to_statement_list (t, &expr);
3471 else if (TREE_CODE (pred) == COND_EXPR
3472 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3473 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3475 location_t new_locus;
3477 /* As long as we're messing with gotos, turn if (a ? b : c) into
3478 if (a)
3479 if (b) goto yes; else goto no;
3480 else
3481 if (c) goto yes; else goto no;
3483 Don't do this if one of the arms has void type, which can happen
3484 in C++ when the arm is throw. */
3486 /* Keep the original source location on the first 'if'. Set the source
3487 location of the ? on the second 'if'. */
3488 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3489 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3490 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3491 false_label_p, locus),
3492 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3493 false_label_p, new_locus));
3495 else
3497 expr = build3 (COND_EXPR, void_type_node, pred,
3498 build_and_jump (true_label_p),
3499 build_and_jump (false_label_p));
3500 SET_EXPR_LOCATION (expr, locus);
3503 if (local_label)
3505 t = build1 (LABEL_EXPR, void_type_node, local_label);
3506 append_to_statement_list (t, &expr);
3509 return expr;
3512 /* Given a conditional expression EXPR with short-circuit boolean
3513 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3514 predicate apart into the equivalent sequence of conditionals. */
3516 static tree
3517 shortcut_cond_expr (tree expr)
3519 tree pred = TREE_OPERAND (expr, 0);
3520 tree then_ = TREE_OPERAND (expr, 1);
3521 tree else_ = TREE_OPERAND (expr, 2);
3522 tree true_label, false_label, end_label, t;
3523 tree *true_label_p;
3524 tree *false_label_p;
3525 bool emit_end, emit_false, jump_over_else;
3526 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3527 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3529 /* First do simple transformations. */
3530 if (!else_se)
3532 /* If there is no 'else', turn
3533 if (a && b) then c
3534 into
3535 if (a) if (b) then c. */
3536 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3538 /* Keep the original source location on the first 'if'. */
3539 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3540 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3541 /* Set the source location of the && on the second 'if'. */
3542 if (EXPR_HAS_LOCATION (pred))
3543 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3544 then_ = shortcut_cond_expr (expr);
3545 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3546 pred = TREE_OPERAND (pred, 0);
3547 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3548 SET_EXPR_LOCATION (expr, locus);
3552 if (!then_se)
3554 /* If there is no 'then', turn
3555 if (a || b); else d
3556 into
3557 if (a); else if (b); else d. */
3558 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3560 /* Keep the original source location on the first 'if'. */
3561 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3562 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3563 /* Set the source location of the || on the second 'if'. */
3564 if (EXPR_HAS_LOCATION (pred))
3565 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3566 else_ = shortcut_cond_expr (expr);
3567 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3568 pred = TREE_OPERAND (pred, 0);
3569 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3570 SET_EXPR_LOCATION (expr, locus);
3574 /* If we're done, great. */
3575 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3576 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3577 return expr;
3579 /* Otherwise we need to mess with gotos. Change
3580 if (a) c; else d;
3582 if (a); else goto no;
3583 c; goto end;
3584 no: d; end:
3585 and recursively gimplify the condition. */
3587 true_label = false_label = end_label = NULL_TREE;
3589 /* If our arms just jump somewhere, hijack those labels so we don't
3590 generate jumps to jumps. */
3592 if (then_
3593 && TREE_CODE (then_) == GOTO_EXPR
3594 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3596 true_label = GOTO_DESTINATION (then_);
3597 then_ = NULL;
3598 then_se = false;
3601 if (else_
3602 && TREE_CODE (else_) == GOTO_EXPR
3603 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3605 false_label = GOTO_DESTINATION (else_);
3606 else_ = NULL;
3607 else_se = false;
3610 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3611 if (true_label)
3612 true_label_p = &true_label;
3613 else
3614 true_label_p = NULL;
3616 /* The 'else' branch also needs a label if it contains interesting code. */
3617 if (false_label || else_se)
3618 false_label_p = &false_label;
3619 else
3620 false_label_p = NULL;
3622 /* If there was nothing else in our arms, just forward the label(s). */
3623 if (!then_se && !else_se)
3624 return shortcut_cond_r (pred, true_label_p, false_label_p,
3625 EXPR_LOC_OR_LOC (expr, input_location));
3627 /* If our last subexpression already has a terminal label, reuse it. */
3628 if (else_se)
3629 t = expr_last (else_);
3630 else if (then_se)
3631 t = expr_last (then_);
3632 else
3633 t = NULL;
3634 if (t && TREE_CODE (t) == LABEL_EXPR)
3635 end_label = LABEL_EXPR_LABEL (t);
3637 /* If we don't care about jumping to the 'else' branch, jump to the end
3638 if the condition is false. */
3639 if (!false_label_p)
3640 false_label_p = &end_label;
3642 /* We only want to emit these labels if we aren't hijacking them. */
3643 emit_end = (end_label == NULL_TREE);
3644 emit_false = (false_label == NULL_TREE);
3646 /* We only emit the jump over the else clause if we have to--if the
3647 then clause may fall through. Otherwise we can wind up with a
3648 useless jump and a useless label at the end of gimplified code,
3649 which will cause us to think that this conditional as a whole
3650 falls through even if it doesn't. If we then inline a function
3651 which ends with such a condition, that can cause us to issue an
3652 inappropriate warning about control reaching the end of a
3653 non-void function. */
3654 jump_over_else = block_may_fallthru (then_);
3656 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3657 EXPR_LOC_OR_LOC (expr, input_location));
3659 expr = NULL;
3660 append_to_statement_list (pred, &expr);
3662 append_to_statement_list (then_, &expr);
3663 if (else_se)
3665 if (jump_over_else)
3667 tree last = expr_last (expr);
3668 t = build_and_jump (&end_label);
3669 if (EXPR_HAS_LOCATION (last))
3670 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3671 append_to_statement_list (t, &expr);
3673 if (emit_false)
3675 t = build1 (LABEL_EXPR, void_type_node, false_label);
3676 append_to_statement_list (t, &expr);
3678 append_to_statement_list (else_, &expr);
3680 if (emit_end && end_label)
3682 t = build1 (LABEL_EXPR, void_type_node, end_label);
3683 append_to_statement_list (t, &expr);
3686 return expr;
3689 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3691 tree
3692 gimple_boolify (tree expr)
3694 tree type = TREE_TYPE (expr);
3695 location_t loc = EXPR_LOCATION (expr);
3697 if (TREE_CODE (expr) == NE_EXPR
3698 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3699 && integer_zerop (TREE_OPERAND (expr, 1)))
3701 tree call = TREE_OPERAND (expr, 0);
3702 tree fn = get_callee_fndecl (call);
3704 /* For __builtin_expect ((long) (x), y) recurse into x as well
3705 if x is truth_value_p. */
3706 if (fn
3707 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3708 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3709 && call_expr_nargs (call) == 2)
3711 tree arg = CALL_EXPR_ARG (call, 0);
3712 if (arg)
3714 if (TREE_CODE (arg) == NOP_EXPR
3715 && TREE_TYPE (arg) == TREE_TYPE (call))
3716 arg = TREE_OPERAND (arg, 0);
3717 if (truth_value_p (TREE_CODE (arg)))
3719 arg = gimple_boolify (arg);
3720 CALL_EXPR_ARG (call, 0)
3721 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3727 switch (TREE_CODE (expr))
3729 case TRUTH_AND_EXPR:
3730 case TRUTH_OR_EXPR:
3731 case TRUTH_XOR_EXPR:
3732 case TRUTH_ANDIF_EXPR:
3733 case TRUTH_ORIF_EXPR:
3734 /* Also boolify the arguments of truth exprs. */
3735 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3736 /* FALLTHRU */
3738 case TRUTH_NOT_EXPR:
3739 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3741 /* These expressions always produce boolean results. */
3742 if (TREE_CODE (type) != BOOLEAN_TYPE)
3743 TREE_TYPE (expr) = boolean_type_node;
3744 return expr;
3746 case ANNOTATE_EXPR:
3747 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3749 case annot_expr_ivdep_kind:
3750 case annot_expr_no_vector_kind:
3751 case annot_expr_vector_kind:
3752 case annot_expr_parallel_kind:
3753 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3754 if (TREE_CODE (type) != BOOLEAN_TYPE)
3755 TREE_TYPE (expr) = boolean_type_node;
3756 return expr;
3757 default:
3758 gcc_unreachable ();
3761 default:
3762 if (COMPARISON_CLASS_P (expr))
3764 /* There expressions always prduce boolean results. */
3765 if (TREE_CODE (type) != BOOLEAN_TYPE)
3766 TREE_TYPE (expr) = boolean_type_node;
3767 return expr;
3769 /* Other expressions that get here must have boolean values, but
3770 might need to be converted to the appropriate mode. */
3771 if (TREE_CODE (type) == BOOLEAN_TYPE)
3772 return expr;
3773 return fold_convert_loc (loc, boolean_type_node, expr);
3777 /* Given a conditional expression *EXPR_P without side effects, gimplify
3778 its operands. New statements are inserted to PRE_P. */
3780 static enum gimplify_status
3781 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3783 tree expr = *expr_p, cond;
3784 enum gimplify_status ret, tret;
3785 enum tree_code code;
3787 cond = gimple_boolify (COND_EXPR_COND (expr));
3789 /* We need to handle && and || specially, as their gimplification
3790 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3791 code = TREE_CODE (cond);
3792 if (code == TRUTH_ANDIF_EXPR)
3793 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3794 else if (code == TRUTH_ORIF_EXPR)
3795 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3796 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3797 COND_EXPR_COND (*expr_p) = cond;
3799 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3800 is_gimple_val, fb_rvalue);
3801 ret = MIN (ret, tret);
3802 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3803 is_gimple_val, fb_rvalue);
3805 return MIN (ret, tret);
3808 /* Return true if evaluating EXPR could trap.
3809 EXPR is GENERIC, while tree_could_trap_p can be called
3810 only on GIMPLE. */
3812 static bool
3813 generic_expr_could_trap_p (tree expr)
3815 unsigned i, n;
3817 if (!expr || is_gimple_val (expr))
3818 return false;
3820 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3821 return true;
3823 n = TREE_OPERAND_LENGTH (expr);
3824 for (i = 0; i < n; i++)
3825 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3826 return true;
3828 return false;
3831 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3832 into
3834 if (p) if (p)
3835 t1 = a; a;
3836 else or else
3837 t1 = b; b;
3840 The second form is used when *EXPR_P is of type void.
3842 PRE_P points to the list where side effects that must happen before
3843 *EXPR_P should be stored. */
3845 static enum gimplify_status
3846 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3848 tree expr = *expr_p;
3849 tree type = TREE_TYPE (expr);
3850 location_t loc = EXPR_LOCATION (expr);
3851 tree tmp, arm1, arm2;
3852 enum gimplify_status ret;
3853 tree label_true, label_false, label_cont;
3854 bool have_then_clause_p, have_else_clause_p;
3855 gcond *cond_stmt;
3856 enum tree_code pred_code;
3857 gimple_seq seq = NULL;
3859 /* If this COND_EXPR has a value, copy the values into a temporary within
3860 the arms. */
3861 if (!VOID_TYPE_P (type))
3863 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3864 tree result;
3866 /* If either an rvalue is ok or we do not require an lvalue, create the
3867 temporary. But we cannot do that if the type is addressable. */
3868 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3869 && !TREE_ADDRESSABLE (type))
3871 if (gimplify_ctxp->allow_rhs_cond_expr
3872 /* If either branch has side effects or could trap, it can't be
3873 evaluated unconditionally. */
3874 && !TREE_SIDE_EFFECTS (then_)
3875 && !generic_expr_could_trap_p (then_)
3876 && !TREE_SIDE_EFFECTS (else_)
3877 && !generic_expr_could_trap_p (else_))
3878 return gimplify_pure_cond_expr (expr_p, pre_p);
3880 tmp = create_tmp_var (type, "iftmp");
3881 result = tmp;
3884 /* Otherwise, only create and copy references to the values. */
3885 else
3887 type = build_pointer_type (type);
3889 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3890 then_ = build_fold_addr_expr_loc (loc, then_);
3892 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3893 else_ = build_fold_addr_expr_loc (loc, else_);
3895 expr
3896 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3898 tmp = create_tmp_var (type, "iftmp");
3899 result = build_simple_mem_ref_loc (loc, tmp);
3902 /* Build the new then clause, `tmp = then_;'. But don't build the
3903 assignment if the value is void; in C++ it can be if it's a throw. */
3904 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3905 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3907 /* Similarly, build the new else clause, `tmp = else_;'. */
3908 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3909 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3911 TREE_TYPE (expr) = void_type_node;
3912 recalculate_side_effects (expr);
3914 /* Move the COND_EXPR to the prequeue. */
3915 gimplify_stmt (&expr, pre_p);
3917 *expr_p = result;
3918 return GS_ALL_DONE;
3921 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3922 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3923 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3924 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3926 /* Make sure the condition has BOOLEAN_TYPE. */
3927 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3929 /* Break apart && and || conditions. */
3930 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3931 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3933 expr = shortcut_cond_expr (expr);
3935 if (expr != *expr_p)
3937 *expr_p = expr;
3939 /* We can't rely on gimplify_expr to re-gimplify the expanded
3940 form properly, as cleanups might cause the target labels to be
3941 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3942 set up a conditional context. */
3943 gimple_push_condition ();
3944 gimplify_stmt (expr_p, &seq);
3945 gimple_pop_condition (pre_p);
3946 gimple_seq_add_seq (pre_p, seq);
3948 return GS_ALL_DONE;
3952 /* Now do the normal gimplification. */
3954 /* Gimplify condition. */
3955 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3956 fb_rvalue);
3957 if (ret == GS_ERROR)
3958 return GS_ERROR;
3959 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3961 gimple_push_condition ();
3963 have_then_clause_p = have_else_clause_p = false;
3964 if (TREE_OPERAND (expr, 1) != NULL
3965 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3966 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3967 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3968 == current_function_decl)
3969 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3970 have different locations, otherwise we end up with incorrect
3971 location information on the branches. */
3972 && (optimize
3973 || !EXPR_HAS_LOCATION (expr)
3974 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3975 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3977 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3978 have_then_clause_p = true;
3980 else
3981 label_true = create_artificial_label (UNKNOWN_LOCATION);
3982 if (TREE_OPERAND (expr, 2) != NULL
3983 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3984 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3985 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3986 == current_function_decl)
3987 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3988 have different locations, otherwise we end up with incorrect
3989 location information on the branches. */
3990 && (optimize
3991 || !EXPR_HAS_LOCATION (expr)
3992 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3993 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3995 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3996 have_else_clause_p = true;
3998 else
3999 label_false = create_artificial_label (UNKNOWN_LOCATION);
4001 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4002 &arm2);
4003 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4004 label_false);
4005 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4006 gimplify_seq_add_stmt (&seq, cond_stmt);
4007 gimple_stmt_iterator gsi = gsi_last (seq);
4008 maybe_fold_stmt (&gsi);
4010 label_cont = NULL_TREE;
4011 if (!have_then_clause_p)
4013 /* For if (...) {} else { code; } put label_true after
4014 the else block. */
4015 if (TREE_OPERAND (expr, 1) == NULL_TREE
4016 && !have_else_clause_p
4017 && TREE_OPERAND (expr, 2) != NULL_TREE)
4018 label_cont = label_true;
4019 else
4021 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4022 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4023 /* For if (...) { code; } else {} or
4024 if (...) { code; } else goto label; or
4025 if (...) { code; return; } else { ... }
4026 label_cont isn't needed. */
4027 if (!have_else_clause_p
4028 && TREE_OPERAND (expr, 2) != NULL_TREE
4029 && gimple_seq_may_fallthru (seq))
4031 gimple *g;
4032 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4034 g = gimple_build_goto (label_cont);
4036 /* GIMPLE_COND's are very low level; they have embedded
4037 gotos. This particular embedded goto should not be marked
4038 with the location of the original COND_EXPR, as it would
4039 correspond to the COND_EXPR's condition, not the ELSE or the
4040 THEN arms. To avoid marking it with the wrong location, flag
4041 it as "no location". */
4042 gimple_set_do_not_emit_location (g);
4044 gimplify_seq_add_stmt (&seq, g);
4048 if (!have_else_clause_p)
4050 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4051 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4053 if (label_cont)
4054 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4056 gimple_pop_condition (pre_p);
4057 gimple_seq_add_seq (pre_p, seq);
4059 if (ret == GS_ERROR)
4060 ; /* Do nothing. */
4061 else if (have_then_clause_p || have_else_clause_p)
4062 ret = GS_ALL_DONE;
4063 else
4065 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4066 expr = TREE_OPERAND (expr, 0);
4067 gimplify_stmt (&expr, pre_p);
4070 *expr_p = NULL;
4071 return ret;
4074 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4075 to be marked addressable.
4077 We cannot rely on such an expression being directly markable if a temporary
4078 has been created by the gimplification. In this case, we create another
4079 temporary and initialize it with a copy, which will become a store after we
4080 mark it addressable. This can happen if the front-end passed us something
4081 that it could not mark addressable yet, like a Fortran pass-by-reference
4082 parameter (int) floatvar. */
4084 static void
4085 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4087 while (handled_component_p (*expr_p))
4088 expr_p = &TREE_OPERAND (*expr_p, 0);
4089 if (is_gimple_reg (*expr_p))
4091 /* Do not allow an SSA name as the temporary. */
4092 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4093 DECL_GIMPLE_REG_P (var) = 0;
4094 *expr_p = var;
4098 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4099 a call to __builtin_memcpy. */
4101 static enum gimplify_status
4102 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4103 gimple_seq *seq_p)
4105 tree t, to, to_ptr, from, from_ptr;
4106 gcall *gs;
4107 location_t loc = EXPR_LOCATION (*expr_p);
4109 to = TREE_OPERAND (*expr_p, 0);
4110 from = TREE_OPERAND (*expr_p, 1);
4112 /* Mark the RHS addressable. Beware that it may not be possible to do so
4113 directly if a temporary has been created by the gimplification. */
4114 prepare_gimple_addressable (&from, seq_p);
4116 mark_addressable (from);
4117 from_ptr = build_fold_addr_expr_loc (loc, from);
4118 gimplify_arg (&from_ptr, seq_p, loc);
4120 mark_addressable (to);
4121 to_ptr = build_fold_addr_expr_loc (loc, to);
4122 gimplify_arg (&to_ptr, seq_p, loc);
4124 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4126 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4128 if (want_value)
4130 /* tmp = memcpy() */
4131 t = create_tmp_var (TREE_TYPE (to_ptr));
4132 gimple_call_set_lhs (gs, t);
4133 gimplify_seq_add_stmt (seq_p, gs);
4135 *expr_p = build_simple_mem_ref (t);
4136 return GS_ALL_DONE;
4139 gimplify_seq_add_stmt (seq_p, gs);
4140 *expr_p = NULL;
4141 return GS_ALL_DONE;
4144 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4145 a call to __builtin_memset. In this case we know that the RHS is
4146 a CONSTRUCTOR with an empty element list. */
4148 static enum gimplify_status
4149 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4150 gimple_seq *seq_p)
4152 tree t, from, to, to_ptr;
4153 gcall *gs;
4154 location_t loc = EXPR_LOCATION (*expr_p);
4156 /* Assert our assumptions, to abort instead of producing wrong code
4157 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4158 not be immediately exposed. */
4159 from = TREE_OPERAND (*expr_p, 1);
4160 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4161 from = TREE_OPERAND (from, 0);
4163 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4164 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4166 /* Now proceed. */
4167 to = TREE_OPERAND (*expr_p, 0);
4169 to_ptr = build_fold_addr_expr_loc (loc, to);
4170 gimplify_arg (&to_ptr, seq_p, loc);
4171 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4173 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4175 if (want_value)
4177 /* tmp = memset() */
4178 t = create_tmp_var (TREE_TYPE (to_ptr));
4179 gimple_call_set_lhs (gs, t);
4180 gimplify_seq_add_stmt (seq_p, gs);
4182 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4183 return GS_ALL_DONE;
4186 gimplify_seq_add_stmt (seq_p, gs);
4187 *expr_p = NULL;
4188 return GS_ALL_DONE;
4191 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4192 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4193 assignment. Return non-null if we detect a potential overlap. */
4195 struct gimplify_init_ctor_preeval_data
4197 /* The base decl of the lhs object. May be NULL, in which case we
4198 have to assume the lhs is indirect. */
4199 tree lhs_base_decl;
4201 /* The alias set of the lhs object. */
4202 alias_set_type lhs_alias_set;
4205 static tree
4206 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4208 struct gimplify_init_ctor_preeval_data *data
4209 = (struct gimplify_init_ctor_preeval_data *) xdata;
4210 tree t = *tp;
4212 /* If we find the base object, obviously we have overlap. */
4213 if (data->lhs_base_decl == t)
4214 return t;
4216 /* If the constructor component is indirect, determine if we have a
4217 potential overlap with the lhs. The only bits of information we
4218 have to go on at this point are addressability and alias sets. */
4219 if ((INDIRECT_REF_P (t)
4220 || TREE_CODE (t) == MEM_REF)
4221 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4222 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4223 return t;
4225 /* If the constructor component is a call, determine if it can hide a
4226 potential overlap with the lhs through an INDIRECT_REF like above.
4227 ??? Ugh - this is completely broken. In fact this whole analysis
4228 doesn't look conservative. */
4229 if (TREE_CODE (t) == CALL_EXPR)
4231 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4233 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4234 if (POINTER_TYPE_P (TREE_VALUE (type))
4235 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4236 && alias_sets_conflict_p (data->lhs_alias_set,
4237 get_alias_set
4238 (TREE_TYPE (TREE_VALUE (type)))))
4239 return t;
4242 if (IS_TYPE_OR_DECL_P (t))
4243 *walk_subtrees = 0;
4244 return NULL;
4247 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4248 force values that overlap with the lhs (as described by *DATA)
4249 into temporaries. */
4251 static void
4252 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4253 struct gimplify_init_ctor_preeval_data *data)
4255 enum gimplify_status one;
4257 /* If the value is constant, then there's nothing to pre-evaluate. */
4258 if (TREE_CONSTANT (*expr_p))
4260 /* Ensure it does not have side effects, it might contain a reference to
4261 the object we're initializing. */
4262 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4263 return;
4266 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4267 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4268 return;
4270 /* Recurse for nested constructors. */
4271 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4273 unsigned HOST_WIDE_INT ix;
4274 constructor_elt *ce;
4275 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4277 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4278 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4280 return;
4283 /* If this is a variable sized type, we must remember the size. */
4284 maybe_with_size_expr (expr_p);
4286 /* Gimplify the constructor element to something appropriate for the rhs
4287 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4288 the gimplifier will consider this a store to memory. Doing this
4289 gimplification now means that we won't have to deal with complicated
4290 language-specific trees, nor trees like SAVE_EXPR that can induce
4291 exponential search behavior. */
4292 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4293 if (one == GS_ERROR)
4295 *expr_p = NULL;
4296 return;
4299 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4300 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4301 always be true for all scalars, since is_gimple_mem_rhs insists on a
4302 temporary variable for them. */
4303 if (DECL_P (*expr_p))
4304 return;
4306 /* If this is of variable size, we have no choice but to assume it doesn't
4307 overlap since we can't make a temporary for it. */
4308 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4309 return;
4311 /* Otherwise, we must search for overlap ... */
4312 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4313 return;
4315 /* ... and if found, force the value into a temporary. */
4316 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4319 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4320 a RANGE_EXPR in a CONSTRUCTOR for an array.
4322 var = lower;
4323 loop_entry:
4324 object[var] = value;
4325 if (var == upper)
4326 goto loop_exit;
4327 var = var + 1;
4328 goto loop_entry;
4329 loop_exit:
4331 We increment var _after_ the loop exit check because we might otherwise
4332 fail if upper == TYPE_MAX_VALUE (type for upper).
4334 Note that we never have to deal with SAVE_EXPRs here, because this has
4335 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4337 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4338 gimple_seq *, bool);
4340 static void
4341 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4342 tree value, tree array_elt_type,
4343 gimple_seq *pre_p, bool cleared)
4345 tree loop_entry_label, loop_exit_label, fall_thru_label;
4346 tree var, var_type, cref, tmp;
4348 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4349 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4350 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4352 /* Create and initialize the index variable. */
4353 var_type = TREE_TYPE (upper);
4354 var = create_tmp_var (var_type);
4355 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4357 /* Add the loop entry label. */
4358 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4360 /* Build the reference. */
4361 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4362 var, NULL_TREE, NULL_TREE);
4364 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4365 the store. Otherwise just assign value to the reference. */
4367 if (TREE_CODE (value) == CONSTRUCTOR)
4368 /* NB we might have to call ourself recursively through
4369 gimplify_init_ctor_eval if the value is a constructor. */
4370 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4371 pre_p, cleared);
4372 else
4373 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4375 /* We exit the loop when the index var is equal to the upper bound. */
4376 gimplify_seq_add_stmt (pre_p,
4377 gimple_build_cond (EQ_EXPR, var, upper,
4378 loop_exit_label, fall_thru_label));
4380 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4382 /* Otherwise, increment the index var... */
4383 tmp = build2 (PLUS_EXPR, var_type, var,
4384 fold_convert (var_type, integer_one_node));
4385 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4387 /* ...and jump back to the loop entry. */
4388 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4390 /* Add the loop exit label. */
4391 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4394 /* Return true if FDECL is accessing a field that is zero sized. */
4396 static bool
4397 zero_sized_field_decl (const_tree fdecl)
4399 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4400 && integer_zerop (DECL_SIZE (fdecl)))
4401 return true;
4402 return false;
4405 /* Return true if TYPE is zero sized. */
4407 static bool
4408 zero_sized_type (const_tree type)
4410 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4411 && integer_zerop (TYPE_SIZE (type)))
4412 return true;
4413 return false;
4416 /* A subroutine of gimplify_init_constructor. Generate individual
4417 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4418 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4419 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4420 zeroed first. */
4422 static void
4423 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4424 gimple_seq *pre_p, bool cleared)
4426 tree array_elt_type = NULL;
4427 unsigned HOST_WIDE_INT ix;
4428 tree purpose, value;
4430 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4431 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4433 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4435 tree cref;
4437 /* NULL values are created above for gimplification errors. */
4438 if (value == NULL)
4439 continue;
4441 if (cleared && initializer_zerop (value))
4442 continue;
4444 /* ??? Here's to hoping the front end fills in all of the indices,
4445 so we don't have to figure out what's missing ourselves. */
4446 gcc_assert (purpose);
4448 /* Skip zero-sized fields, unless value has side-effects. This can
4449 happen with calls to functions returning a zero-sized type, which
4450 we shouldn't discard. As a number of downstream passes don't
4451 expect sets of zero-sized fields, we rely on the gimplification of
4452 the MODIFY_EXPR we make below to drop the assignment statement. */
4453 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4454 continue;
4456 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4457 whole range. */
4458 if (TREE_CODE (purpose) == RANGE_EXPR)
4460 tree lower = TREE_OPERAND (purpose, 0);
4461 tree upper = TREE_OPERAND (purpose, 1);
4463 /* If the lower bound is equal to upper, just treat it as if
4464 upper was the index. */
4465 if (simple_cst_equal (lower, upper))
4466 purpose = upper;
4467 else
4469 gimplify_init_ctor_eval_range (object, lower, upper, value,
4470 array_elt_type, pre_p, cleared);
4471 continue;
4475 if (array_elt_type)
4477 /* Do not use bitsizetype for ARRAY_REF indices. */
4478 if (TYPE_DOMAIN (TREE_TYPE (object)))
4479 purpose
4480 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4481 purpose);
4482 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4483 purpose, NULL_TREE, NULL_TREE);
4485 else
4487 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4488 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4489 unshare_expr (object), purpose, NULL_TREE);
4492 if (TREE_CODE (value) == CONSTRUCTOR
4493 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4494 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4495 pre_p, cleared);
4496 else
4498 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4499 gimplify_and_add (init, pre_p);
4500 ggc_free (init);
4505 /* Return the appropriate RHS predicate for this LHS. */
4507 gimple_predicate
4508 rhs_predicate_for (tree lhs)
4510 if (is_gimple_reg (lhs))
4511 return is_gimple_reg_rhs_or_call;
4512 else
4513 return is_gimple_mem_rhs_or_call;
4516 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4517 before the LHS has been gimplified. */
4519 static gimple_predicate
4520 initial_rhs_predicate_for (tree lhs)
4522 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4523 return is_gimple_reg_rhs_or_call;
4524 else
4525 return is_gimple_mem_rhs_or_call;
4528 /* Gimplify a C99 compound literal expression. This just means adding
4529 the DECL_EXPR before the current statement and using its anonymous
4530 decl instead. */
4532 static enum gimplify_status
4533 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4534 bool (*gimple_test_f) (tree),
4535 fallback_t fallback)
4537 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4538 tree decl = DECL_EXPR_DECL (decl_s);
4539 tree init = DECL_INITIAL (decl);
4540 /* Mark the decl as addressable if the compound literal
4541 expression is addressable now, otherwise it is marked too late
4542 after we gimplify the initialization expression. */
4543 if (TREE_ADDRESSABLE (*expr_p))
4544 TREE_ADDRESSABLE (decl) = 1;
4545 /* Otherwise, if we don't need an lvalue and have a literal directly
4546 substitute it. Check if it matches the gimple predicate, as
4547 otherwise we'd generate a new temporary, and we can as well just
4548 use the decl we already have. */
4549 else if (!TREE_ADDRESSABLE (decl)
4550 && init
4551 && (fallback & fb_lvalue) == 0
4552 && gimple_test_f (init))
4554 *expr_p = init;
4555 return GS_OK;
4558 /* Preliminarily mark non-addressed complex variables as eligible
4559 for promotion to gimple registers. We'll transform their uses
4560 as we find them. */
4561 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4562 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4563 && !TREE_THIS_VOLATILE (decl)
4564 && !needs_to_live_in_memory (decl))
4565 DECL_GIMPLE_REG_P (decl) = 1;
4567 /* If the decl is not addressable, then it is being used in some
4568 expression or on the right hand side of a statement, and it can
4569 be put into a readonly data section. */
4570 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4571 TREE_READONLY (decl) = 1;
4573 /* This decl isn't mentioned in the enclosing block, so add it to the
4574 list of temps. FIXME it seems a bit of a kludge to say that
4575 anonymous artificial vars aren't pushed, but everything else is. */
4576 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4577 gimple_add_tmp_var (decl);
4579 gimplify_and_add (decl_s, pre_p);
4580 *expr_p = decl;
4581 return GS_OK;
4584 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4585 return a new CONSTRUCTOR if something changed. */
4587 static tree
4588 optimize_compound_literals_in_ctor (tree orig_ctor)
4590 tree ctor = orig_ctor;
4591 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4592 unsigned int idx, num = vec_safe_length (elts);
4594 for (idx = 0; idx < num; idx++)
4596 tree value = (*elts)[idx].value;
4597 tree newval = value;
4598 if (TREE_CODE (value) == CONSTRUCTOR)
4599 newval = optimize_compound_literals_in_ctor (value);
4600 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4602 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4603 tree decl = DECL_EXPR_DECL (decl_s);
4604 tree init = DECL_INITIAL (decl);
4606 if (!TREE_ADDRESSABLE (value)
4607 && !TREE_ADDRESSABLE (decl)
4608 && init
4609 && TREE_CODE (init) == CONSTRUCTOR)
4610 newval = optimize_compound_literals_in_ctor (init);
4612 if (newval == value)
4613 continue;
4615 if (ctor == orig_ctor)
4617 ctor = copy_node (orig_ctor);
4618 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4619 elts = CONSTRUCTOR_ELTS (ctor);
4621 (*elts)[idx].value = newval;
4623 return ctor;
4626 /* A subroutine of gimplify_modify_expr. Break out elements of a
4627 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4629 Note that we still need to clear any elements that don't have explicit
4630 initializers, so if not all elements are initialized we keep the
4631 original MODIFY_EXPR, we just remove all of the constructor elements.
4633 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4634 GS_ERROR if we would have to create a temporary when gimplifying
4635 this constructor. Otherwise, return GS_OK.
4637 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4639 static enum gimplify_status
4640 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4641 bool want_value, bool notify_temp_creation)
4643 tree object, ctor, type;
4644 enum gimplify_status ret;
4645 vec<constructor_elt, va_gc> *elts;
4647 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4649 if (!notify_temp_creation)
4651 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4652 is_gimple_lvalue, fb_lvalue);
4653 if (ret == GS_ERROR)
4654 return ret;
4657 object = TREE_OPERAND (*expr_p, 0);
4658 ctor = TREE_OPERAND (*expr_p, 1)
4659 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4660 type = TREE_TYPE (ctor);
4661 elts = CONSTRUCTOR_ELTS (ctor);
4662 ret = GS_ALL_DONE;
4664 switch (TREE_CODE (type))
4666 case RECORD_TYPE:
4667 case UNION_TYPE:
4668 case QUAL_UNION_TYPE:
4669 case ARRAY_TYPE:
4671 struct gimplify_init_ctor_preeval_data preeval_data;
4672 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4673 bool cleared, complete_p, valid_const_initializer;
4675 /* Aggregate types must lower constructors to initialization of
4676 individual elements. The exception is that a CONSTRUCTOR node
4677 with no elements indicates zero-initialization of the whole. */
4678 if (vec_safe_is_empty (elts))
4680 if (notify_temp_creation)
4681 return GS_OK;
4682 break;
4685 /* Fetch information about the constructor to direct later processing.
4686 We might want to make static versions of it in various cases, and
4687 can only do so if it known to be a valid constant initializer. */
4688 valid_const_initializer
4689 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4690 &num_ctor_elements, &complete_p);
4692 /* If a const aggregate variable is being initialized, then it
4693 should never be a lose to promote the variable to be static. */
4694 if (valid_const_initializer
4695 && num_nonzero_elements > 1
4696 && TREE_READONLY (object)
4697 && VAR_P (object)
4698 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4700 if (notify_temp_creation)
4701 return GS_ERROR;
4702 DECL_INITIAL (object) = ctor;
4703 TREE_STATIC (object) = 1;
4704 if (!DECL_NAME (object))
4705 DECL_NAME (object) = create_tmp_var_name ("C");
4706 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4708 /* ??? C++ doesn't automatically append a .<number> to the
4709 assembler name, and even when it does, it looks at FE private
4710 data structures to figure out what that number should be,
4711 which are not set for this variable. I suppose this is
4712 important for local statics for inline functions, which aren't
4713 "local" in the object file sense. So in order to get a unique
4714 TU-local symbol, we must invoke the lhd version now. */
4715 lhd_set_decl_assembler_name (object);
4717 *expr_p = NULL_TREE;
4718 break;
4721 /* If there are "lots" of initialized elements, even discounting
4722 those that are not address constants (and thus *must* be
4723 computed at runtime), then partition the constructor into
4724 constant and non-constant parts. Block copy the constant
4725 parts in, then generate code for the non-constant parts. */
4726 /* TODO. There's code in cp/typeck.c to do this. */
4728 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4729 /* store_constructor will ignore the clearing of variable-sized
4730 objects. Initializers for such objects must explicitly set
4731 every field that needs to be set. */
4732 cleared = false;
4733 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4734 /* If the constructor isn't complete, clear the whole object
4735 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4737 ??? This ought not to be needed. For any element not present
4738 in the initializer, we should simply set them to zero. Except
4739 we'd need to *find* the elements that are not present, and that
4740 requires trickery to avoid quadratic compile-time behavior in
4741 large cases or excessive memory use in small cases. */
4742 cleared = true;
4743 else if (num_ctor_elements - num_nonzero_elements
4744 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4745 && num_nonzero_elements < num_ctor_elements / 4)
4746 /* If there are "lots" of zeros, it's more efficient to clear
4747 the memory and then set the nonzero elements. */
4748 cleared = true;
4749 else
4750 cleared = false;
4752 /* If there are "lots" of initialized elements, and all of them
4753 are valid address constants, then the entire initializer can
4754 be dropped to memory, and then memcpy'd out. Don't do this
4755 for sparse arrays, though, as it's more efficient to follow
4756 the standard CONSTRUCTOR behavior of memset followed by
4757 individual element initialization. Also don't do this for small
4758 all-zero initializers (which aren't big enough to merit
4759 clearing), and don't try to make bitwise copies of
4760 TREE_ADDRESSABLE types.
4762 We cannot apply such transformation when compiling chkp static
4763 initializer because creation of initializer image in the memory
4764 will require static initialization of bounds for it. It should
4765 result in another gimplification of similar initializer and we
4766 may fall into infinite loop. */
4767 if (valid_const_initializer
4768 && !(cleared || num_nonzero_elements == 0)
4769 && !TREE_ADDRESSABLE (type)
4770 && (!current_function_decl
4771 || !lookup_attribute ("chkp ctor",
4772 DECL_ATTRIBUTES (current_function_decl))))
4774 HOST_WIDE_INT size = int_size_in_bytes (type);
4775 unsigned int align;
4777 /* ??? We can still get unbounded array types, at least
4778 from the C++ front end. This seems wrong, but attempt
4779 to work around it for now. */
4780 if (size < 0)
4782 size = int_size_in_bytes (TREE_TYPE (object));
4783 if (size >= 0)
4784 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4787 /* Find the maximum alignment we can assume for the object. */
4788 /* ??? Make use of DECL_OFFSET_ALIGN. */
4789 if (DECL_P (object))
4790 align = DECL_ALIGN (object);
4791 else
4792 align = TYPE_ALIGN (type);
4794 /* Do a block move either if the size is so small as to make
4795 each individual move a sub-unit move on average, or if it
4796 is so large as to make individual moves inefficient. */
4797 if (size > 0
4798 && num_nonzero_elements > 1
4799 && (size < num_nonzero_elements
4800 || !can_move_by_pieces (size, align)))
4802 if (notify_temp_creation)
4803 return GS_ERROR;
4805 walk_tree (&ctor, force_labels_r, NULL, NULL);
4806 ctor = tree_output_constant_def (ctor);
4807 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4808 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4809 TREE_OPERAND (*expr_p, 1) = ctor;
4811 /* This is no longer an assignment of a CONSTRUCTOR, but
4812 we still may have processing to do on the LHS. So
4813 pretend we didn't do anything here to let that happen. */
4814 return GS_UNHANDLED;
4818 /* If the target is volatile, we have non-zero elements and more than
4819 one field to assign, initialize the target from a temporary. */
4820 if (TREE_THIS_VOLATILE (object)
4821 && !TREE_ADDRESSABLE (type)
4822 && num_nonzero_elements > 0
4823 && vec_safe_length (elts) > 1)
4825 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4826 TREE_OPERAND (*expr_p, 0) = temp;
4827 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4828 *expr_p,
4829 build2 (MODIFY_EXPR, void_type_node,
4830 object, temp));
4831 return GS_OK;
4834 if (notify_temp_creation)
4835 return GS_OK;
4837 /* If there are nonzero elements and if needed, pre-evaluate to capture
4838 elements overlapping with the lhs into temporaries. We must do this
4839 before clearing to fetch the values before they are zeroed-out. */
4840 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4842 preeval_data.lhs_base_decl = get_base_address (object);
4843 if (!DECL_P (preeval_data.lhs_base_decl))
4844 preeval_data.lhs_base_decl = NULL;
4845 preeval_data.lhs_alias_set = get_alias_set (object);
4847 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4848 pre_p, post_p, &preeval_data);
4851 bool ctor_has_side_effects_p
4852 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4854 if (cleared)
4856 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4857 Note that we still have to gimplify, in order to handle the
4858 case of variable sized types. Avoid shared tree structures. */
4859 CONSTRUCTOR_ELTS (ctor) = NULL;
4860 TREE_SIDE_EFFECTS (ctor) = 0;
4861 object = unshare_expr (object);
4862 gimplify_stmt (expr_p, pre_p);
4865 /* If we have not block cleared the object, or if there are nonzero
4866 elements in the constructor, or if the constructor has side effects,
4867 add assignments to the individual scalar fields of the object. */
4868 if (!cleared
4869 || num_nonzero_elements > 0
4870 || ctor_has_side_effects_p)
4871 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4873 *expr_p = NULL_TREE;
4875 break;
4877 case COMPLEX_TYPE:
4879 tree r, i;
4881 if (notify_temp_creation)
4882 return GS_OK;
4884 /* Extract the real and imaginary parts out of the ctor. */
4885 gcc_assert (elts->length () == 2);
4886 r = (*elts)[0].value;
4887 i = (*elts)[1].value;
4888 if (r == NULL || i == NULL)
4890 tree zero = build_zero_cst (TREE_TYPE (type));
4891 if (r == NULL)
4892 r = zero;
4893 if (i == NULL)
4894 i = zero;
4897 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4898 represent creation of a complex value. */
4899 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4901 ctor = build_complex (type, r, i);
4902 TREE_OPERAND (*expr_p, 1) = ctor;
4904 else
4906 ctor = build2 (COMPLEX_EXPR, type, r, i);
4907 TREE_OPERAND (*expr_p, 1) = ctor;
4908 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4909 pre_p,
4910 post_p,
4911 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4912 fb_rvalue);
4915 break;
4917 case VECTOR_TYPE:
4919 unsigned HOST_WIDE_INT ix;
4920 constructor_elt *ce;
4922 if (notify_temp_creation)
4923 return GS_OK;
4925 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4926 if (TREE_CONSTANT (ctor))
4928 bool constant_p = true;
4929 tree value;
4931 /* Even when ctor is constant, it might contain non-*_CST
4932 elements, such as addresses or trapping values like
4933 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4934 in VECTOR_CST nodes. */
4935 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4936 if (!CONSTANT_CLASS_P (value))
4938 constant_p = false;
4939 break;
4942 if (constant_p)
4944 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4945 break;
4948 TREE_CONSTANT (ctor) = 0;
4951 /* Vector types use CONSTRUCTOR all the way through gimple
4952 compilation as a general initializer. */
4953 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4955 enum gimplify_status tret;
4956 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4957 fb_rvalue);
4958 if (tret == GS_ERROR)
4959 ret = GS_ERROR;
4960 else if (TREE_STATIC (ctor)
4961 && !initializer_constant_valid_p (ce->value,
4962 TREE_TYPE (ce->value)))
4963 TREE_STATIC (ctor) = 0;
4965 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4966 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4968 break;
4970 default:
4971 /* So how did we get a CONSTRUCTOR for a scalar type? */
4972 gcc_unreachable ();
4975 if (ret == GS_ERROR)
4976 return GS_ERROR;
4977 /* If we have gimplified both sides of the initializer but have
4978 not emitted an assignment, do so now. */
4979 if (*expr_p)
4981 tree lhs = TREE_OPERAND (*expr_p, 0);
4982 tree rhs = TREE_OPERAND (*expr_p, 1);
4983 if (want_value && object == lhs)
4984 lhs = unshare_expr (lhs);
4985 gassign *init = gimple_build_assign (lhs, rhs);
4986 gimplify_seq_add_stmt (pre_p, init);
4988 if (want_value)
4990 *expr_p = object;
4991 return GS_OK;
4993 else
4995 *expr_p = NULL;
4996 return GS_ALL_DONE;
5000 /* Given a pointer value OP0, return a simplified version of an
5001 indirection through OP0, or NULL_TREE if no simplification is
5002 possible. This may only be applied to a rhs of an expression.
5003 Note that the resulting type may be different from the type pointed
5004 to in the sense that it is still compatible from the langhooks
5005 point of view. */
5007 static tree
5008 gimple_fold_indirect_ref_rhs (tree t)
5010 return gimple_fold_indirect_ref (t);
5013 /* Subroutine of gimplify_modify_expr to do simplifications of
5014 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5015 something changes. */
5017 static enum gimplify_status
5018 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5019 gimple_seq *pre_p, gimple_seq *post_p,
5020 bool want_value)
5022 enum gimplify_status ret = GS_UNHANDLED;
5023 bool changed;
5027 changed = false;
5028 switch (TREE_CODE (*from_p))
5030 case VAR_DECL:
5031 /* If we're assigning from a read-only variable initialized with
5032 a constructor, do the direct assignment from the constructor,
5033 but only if neither source nor target are volatile since this
5034 latter assignment might end up being done on a per-field basis. */
5035 if (DECL_INITIAL (*from_p)
5036 && TREE_READONLY (*from_p)
5037 && !TREE_THIS_VOLATILE (*from_p)
5038 && !TREE_THIS_VOLATILE (*to_p)
5039 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5041 tree old_from = *from_p;
5042 enum gimplify_status subret;
5044 /* Move the constructor into the RHS. */
5045 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5047 /* Let's see if gimplify_init_constructor will need to put
5048 it in memory. */
5049 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5050 false, true);
5051 if (subret == GS_ERROR)
5053 /* If so, revert the change. */
5054 *from_p = old_from;
5056 else
5058 ret = GS_OK;
5059 changed = true;
5062 break;
5063 case INDIRECT_REF:
5065 /* If we have code like
5067 *(const A*)(A*)&x
5069 where the type of "x" is a (possibly cv-qualified variant
5070 of "A"), treat the entire expression as identical to "x".
5071 This kind of code arises in C++ when an object is bound
5072 to a const reference, and if "x" is a TARGET_EXPR we want
5073 to take advantage of the optimization below. */
5074 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5075 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5076 if (t)
5078 if (TREE_THIS_VOLATILE (t) != volatile_p)
5080 if (DECL_P (t))
5081 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5082 build_fold_addr_expr (t));
5083 if (REFERENCE_CLASS_P (t))
5084 TREE_THIS_VOLATILE (t) = volatile_p;
5086 *from_p = t;
5087 ret = GS_OK;
5088 changed = true;
5090 break;
5093 case TARGET_EXPR:
5095 /* If we are initializing something from a TARGET_EXPR, strip the
5096 TARGET_EXPR and initialize it directly, if possible. This can't
5097 be done if the initializer is void, since that implies that the
5098 temporary is set in some non-trivial way.
5100 ??? What about code that pulls out the temp and uses it
5101 elsewhere? I think that such code never uses the TARGET_EXPR as
5102 an initializer. If I'm wrong, we'll die because the temp won't
5103 have any RTL. In that case, I guess we'll need to replace
5104 references somehow. */
5105 tree init = TARGET_EXPR_INITIAL (*from_p);
5107 if (init
5108 && !VOID_TYPE_P (TREE_TYPE (init)))
5110 *from_p = init;
5111 ret = GS_OK;
5112 changed = true;
5115 break;
5117 case COMPOUND_EXPR:
5118 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5119 caught. */
5120 gimplify_compound_expr (from_p, pre_p, true);
5121 ret = GS_OK;
5122 changed = true;
5123 break;
5125 case CONSTRUCTOR:
5126 /* If we already made some changes, let the front end have a
5127 crack at this before we break it down. */
5128 if (ret != GS_UNHANDLED)
5129 break;
5130 /* If we're initializing from a CONSTRUCTOR, break this into
5131 individual MODIFY_EXPRs. */
5132 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5133 false);
5135 case COND_EXPR:
5136 /* If we're assigning to a non-register type, push the assignment
5137 down into the branches. This is mandatory for ADDRESSABLE types,
5138 since we cannot generate temporaries for such, but it saves a
5139 copy in other cases as well. */
5140 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5142 /* This code should mirror the code in gimplify_cond_expr. */
5143 enum tree_code code = TREE_CODE (*expr_p);
5144 tree cond = *from_p;
5145 tree result = *to_p;
5147 ret = gimplify_expr (&result, pre_p, post_p,
5148 is_gimple_lvalue, fb_lvalue);
5149 if (ret != GS_ERROR)
5150 ret = GS_OK;
5152 /* If we are going to write RESULT more than once, clear
5153 TREE_READONLY flag, otherwise we might incorrectly promote
5154 the variable to static const and initialize it at compile
5155 time in one of the branches. */
5156 if (VAR_P (result)
5157 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5158 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5159 TREE_READONLY (result) = 0;
5160 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5161 TREE_OPERAND (cond, 1)
5162 = build2 (code, void_type_node, result,
5163 TREE_OPERAND (cond, 1));
5164 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5165 TREE_OPERAND (cond, 2)
5166 = build2 (code, void_type_node, unshare_expr (result),
5167 TREE_OPERAND (cond, 2));
5169 TREE_TYPE (cond) = void_type_node;
5170 recalculate_side_effects (cond);
5172 if (want_value)
5174 gimplify_and_add (cond, pre_p);
5175 *expr_p = unshare_expr (result);
5177 else
5178 *expr_p = cond;
5179 return ret;
5181 break;
5183 case CALL_EXPR:
5184 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5185 return slot so that we don't generate a temporary. */
5186 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5187 && aggregate_value_p (*from_p, *from_p))
5189 bool use_target;
5191 if (!(rhs_predicate_for (*to_p))(*from_p))
5192 /* If we need a temporary, *to_p isn't accurate. */
5193 use_target = false;
5194 /* It's OK to use the return slot directly unless it's an NRV. */
5195 else if (TREE_CODE (*to_p) == RESULT_DECL
5196 && DECL_NAME (*to_p) == NULL_TREE
5197 && needs_to_live_in_memory (*to_p))
5198 use_target = true;
5199 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5200 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5201 /* Don't force regs into memory. */
5202 use_target = false;
5203 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5204 /* It's OK to use the target directly if it's being
5205 initialized. */
5206 use_target = true;
5207 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5208 != INTEGER_CST)
5209 /* Always use the target and thus RSO for variable-sized types.
5210 GIMPLE cannot deal with a variable-sized assignment
5211 embedded in a call statement. */
5212 use_target = true;
5213 else if (TREE_CODE (*to_p) != SSA_NAME
5214 && (!is_gimple_variable (*to_p)
5215 || needs_to_live_in_memory (*to_p)))
5216 /* Don't use the original target if it's already addressable;
5217 if its address escapes, and the called function uses the
5218 NRV optimization, a conforming program could see *to_p
5219 change before the called function returns; see c++/19317.
5220 When optimizing, the return_slot pass marks more functions
5221 as safe after we have escape info. */
5222 use_target = false;
5223 else
5224 use_target = true;
5226 if (use_target)
5228 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5229 mark_addressable (*to_p);
5232 break;
5234 case WITH_SIZE_EXPR:
5235 /* Likewise for calls that return an aggregate of non-constant size,
5236 since we would not be able to generate a temporary at all. */
5237 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5239 *from_p = TREE_OPERAND (*from_p, 0);
5240 /* We don't change ret in this case because the
5241 WITH_SIZE_EXPR might have been added in
5242 gimplify_modify_expr, so returning GS_OK would lead to an
5243 infinite loop. */
5244 changed = true;
5246 break;
5248 /* If we're initializing from a container, push the initialization
5249 inside it. */
5250 case CLEANUP_POINT_EXPR:
5251 case BIND_EXPR:
5252 case STATEMENT_LIST:
5254 tree wrap = *from_p;
5255 tree t;
5257 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5258 fb_lvalue);
5259 if (ret != GS_ERROR)
5260 ret = GS_OK;
5262 t = voidify_wrapper_expr (wrap, *expr_p);
5263 gcc_assert (t == *expr_p);
5265 if (want_value)
5267 gimplify_and_add (wrap, pre_p);
5268 *expr_p = unshare_expr (*to_p);
5270 else
5271 *expr_p = wrap;
5272 return GS_OK;
5275 case COMPOUND_LITERAL_EXPR:
5277 tree complit = TREE_OPERAND (*expr_p, 1);
5278 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5279 tree decl = DECL_EXPR_DECL (decl_s);
5280 tree init = DECL_INITIAL (decl);
5282 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5283 into struct T x = { 0, 1, 2 } if the address of the
5284 compound literal has never been taken. */
5285 if (!TREE_ADDRESSABLE (complit)
5286 && !TREE_ADDRESSABLE (decl)
5287 && init)
5289 *expr_p = copy_node (*expr_p);
5290 TREE_OPERAND (*expr_p, 1) = init;
5291 return GS_OK;
5295 default:
5296 break;
5299 while (changed);
5301 return ret;
5305 /* Return true if T looks like a valid GIMPLE statement. */
5307 static bool
5308 is_gimple_stmt (tree t)
5310 const enum tree_code code = TREE_CODE (t);
5312 switch (code)
5314 case NOP_EXPR:
5315 /* The only valid NOP_EXPR is the empty statement. */
5316 return IS_EMPTY_STMT (t);
5318 case BIND_EXPR:
5319 case COND_EXPR:
5320 /* These are only valid if they're void. */
5321 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5323 case SWITCH_EXPR:
5324 case GOTO_EXPR:
5325 case RETURN_EXPR:
5326 case LABEL_EXPR:
5327 case CASE_LABEL_EXPR:
5328 case TRY_CATCH_EXPR:
5329 case TRY_FINALLY_EXPR:
5330 case EH_FILTER_EXPR:
5331 case CATCH_EXPR:
5332 case ASM_EXPR:
5333 case STATEMENT_LIST:
5334 case OACC_PARALLEL:
5335 case OACC_KERNELS:
5336 case OACC_DATA:
5337 case OACC_HOST_DATA:
5338 case OACC_DECLARE:
5339 case OACC_UPDATE:
5340 case OACC_ENTER_DATA:
5341 case OACC_EXIT_DATA:
5342 case OACC_CACHE:
5343 case OMP_PARALLEL:
5344 case OMP_FOR:
5345 case OMP_SIMD:
5346 case CILK_SIMD:
5347 case OMP_DISTRIBUTE:
5348 case OACC_LOOP:
5349 case OMP_SECTIONS:
5350 case OMP_SECTION:
5351 case OMP_SINGLE:
5352 case OMP_MASTER:
5353 case OMP_TASKGROUP:
5354 case OMP_ORDERED:
5355 case OMP_CRITICAL:
5356 case OMP_TASK:
5357 case OMP_TARGET:
5358 case OMP_TARGET_DATA:
5359 case OMP_TARGET_UPDATE:
5360 case OMP_TARGET_ENTER_DATA:
5361 case OMP_TARGET_EXIT_DATA:
5362 case OMP_TASKLOOP:
5363 case OMP_TEAMS:
5364 /* These are always void. */
5365 return true;
5367 case CALL_EXPR:
5368 case MODIFY_EXPR:
5369 case PREDICT_EXPR:
5370 /* These are valid regardless of their type. */
5371 return true;
5373 default:
5374 return false;
5379 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5380 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5381 DECL_GIMPLE_REG_P set.
5383 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5384 other, unmodified part of the complex object just before the total store.
5385 As a consequence, if the object is still uninitialized, an undefined value
5386 will be loaded into a register, which may result in a spurious exception
5387 if the register is floating-point and the value happens to be a signaling
5388 NaN for example. Then the fully-fledged complex operations lowering pass
5389 followed by a DCE pass are necessary in order to fix things up. */
5391 static enum gimplify_status
5392 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5393 bool want_value)
5395 enum tree_code code, ocode;
5396 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5398 lhs = TREE_OPERAND (*expr_p, 0);
5399 rhs = TREE_OPERAND (*expr_p, 1);
5400 code = TREE_CODE (lhs);
5401 lhs = TREE_OPERAND (lhs, 0);
5403 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5404 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5405 TREE_NO_WARNING (other) = 1;
5406 other = get_formal_tmp_var (other, pre_p);
5408 realpart = code == REALPART_EXPR ? rhs : other;
5409 imagpart = code == REALPART_EXPR ? other : rhs;
5411 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5412 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5413 else
5414 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5416 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5417 *expr_p = (want_value) ? rhs : NULL_TREE;
5419 return GS_ALL_DONE;
5422 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5424 modify_expr
5425 : varname '=' rhs
5426 | '*' ID '=' rhs
5428 PRE_P points to the list where side effects that must happen before
5429 *EXPR_P should be stored.
5431 POST_P points to the list where side effects that must happen after
5432 *EXPR_P should be stored.
5434 WANT_VALUE is nonzero iff we want to use the value of this expression
5435 in another expression. */
5437 static enum gimplify_status
5438 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5439 bool want_value)
5441 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5442 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5443 enum gimplify_status ret = GS_UNHANDLED;
5444 gimple *assign;
5445 location_t loc = EXPR_LOCATION (*expr_p);
5446 gimple_stmt_iterator gsi;
5448 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5449 || TREE_CODE (*expr_p) == INIT_EXPR);
5451 /* Trying to simplify a clobber using normal logic doesn't work,
5452 so handle it here. */
5453 if (TREE_CLOBBER_P (*from_p))
5455 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5456 if (ret == GS_ERROR)
5457 return ret;
5458 gcc_assert (!want_value
5459 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5460 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5461 *expr_p = NULL;
5462 return GS_ALL_DONE;
5465 /* Insert pointer conversions required by the middle-end that are not
5466 required by the frontend. This fixes middle-end type checking for
5467 for example gcc.dg/redecl-6.c. */
5468 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5470 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5471 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5472 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5475 /* See if any simplifications can be done based on what the RHS is. */
5476 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5477 want_value);
5478 if (ret != GS_UNHANDLED)
5479 return ret;
5481 /* For zero sized types only gimplify the left hand side and right hand
5482 side as statements and throw away the assignment. Do this after
5483 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5484 types properly. */
5485 if (zero_sized_type (TREE_TYPE (*from_p))
5486 && !want_value
5487 /* Don't do this for calls that return addressable types, expand_call
5488 relies on those having a lhs. */
5489 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5490 && TREE_CODE (*from_p) == CALL_EXPR))
5492 gimplify_stmt (from_p, pre_p);
5493 gimplify_stmt (to_p, pre_p);
5494 *expr_p = NULL_TREE;
5495 return GS_ALL_DONE;
5498 /* If the value being copied is of variable width, compute the length
5499 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5500 before gimplifying any of the operands so that we can resolve any
5501 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5502 the size of the expression to be copied, not of the destination, so
5503 that is what we must do here. */
5504 maybe_with_size_expr (from_p);
5506 /* As a special case, we have to temporarily allow for assignments
5507 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5508 a toplevel statement, when gimplifying the GENERIC expression
5509 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5510 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5512 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5513 prevent gimplify_expr from trying to create a new temporary for
5514 foo's LHS, we tell it that it should only gimplify until it
5515 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5516 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5517 and all we need to do here is set 'a' to be its LHS. */
5519 /* Gimplify the RHS first for C++17 and bug 71104. */
5520 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5521 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5522 if (ret == GS_ERROR)
5523 return ret;
5525 /* Then gimplify the LHS. */
5526 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5527 twice we have to make sure to gimplify into non-SSA as otherwise
5528 the abnormal edge added later will make those defs not dominate
5529 their uses.
5530 ??? Technically this applies only to the registers used in the
5531 resulting non-register *TO_P. */
5532 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5533 if (saved_into_ssa
5534 && TREE_CODE (*from_p) == CALL_EXPR
5535 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5536 gimplify_ctxp->into_ssa = false;
5537 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5538 gimplify_ctxp->into_ssa = saved_into_ssa;
5539 if (ret == GS_ERROR)
5540 return ret;
5542 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5543 guess for the predicate was wrong. */
5544 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5545 if (final_pred != initial_pred)
5547 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5548 if (ret == GS_ERROR)
5549 return ret;
5552 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5553 size as argument to the call. */
5554 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5556 tree call = TREE_OPERAND (*from_p, 0);
5557 tree vlasize = TREE_OPERAND (*from_p, 1);
5559 if (TREE_CODE (call) == CALL_EXPR
5560 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5562 int nargs = call_expr_nargs (call);
5563 tree type = TREE_TYPE (call);
5564 tree ap = CALL_EXPR_ARG (call, 0);
5565 tree tag = CALL_EXPR_ARG (call, 1);
5566 tree aptag = CALL_EXPR_ARG (call, 2);
5567 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5568 IFN_VA_ARG, type,
5569 nargs + 1, ap, tag,
5570 aptag, vlasize);
5571 TREE_OPERAND (*from_p, 0) = newcall;
5575 /* Now see if the above changed *from_p to something we handle specially. */
5576 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5577 want_value);
5578 if (ret != GS_UNHANDLED)
5579 return ret;
5581 /* If we've got a variable sized assignment between two lvalues (i.e. does
5582 not involve a call), then we can make things a bit more straightforward
5583 by converting the assignment to memcpy or memset. */
5584 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5586 tree from = TREE_OPERAND (*from_p, 0);
5587 tree size = TREE_OPERAND (*from_p, 1);
5589 if (TREE_CODE (from) == CONSTRUCTOR)
5590 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5592 if (is_gimple_addressable (from))
5594 *from_p = from;
5595 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5596 pre_p);
5600 /* Transform partial stores to non-addressable complex variables into
5601 total stores. This allows us to use real instead of virtual operands
5602 for these variables, which improves optimization. */
5603 if ((TREE_CODE (*to_p) == REALPART_EXPR
5604 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5605 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5606 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5608 /* Try to alleviate the effects of the gimplification creating artificial
5609 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5610 make sure not to create DECL_DEBUG_EXPR links across functions. */
5611 if (!gimplify_ctxp->into_ssa
5612 && VAR_P (*from_p)
5613 && DECL_IGNORED_P (*from_p)
5614 && DECL_P (*to_p)
5615 && !DECL_IGNORED_P (*to_p)
5616 && decl_function_context (*to_p) == current_function_decl
5617 && decl_function_context (*from_p) == current_function_decl)
5619 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5620 DECL_NAME (*from_p)
5621 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5622 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5623 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5626 if (want_value && TREE_THIS_VOLATILE (*to_p))
5627 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5629 if (TREE_CODE (*from_p) == CALL_EXPR)
5631 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5632 instead of a GIMPLE_ASSIGN. */
5633 gcall *call_stmt;
5634 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5636 /* Gimplify internal functions created in the FEs. */
5637 int nargs = call_expr_nargs (*from_p), i;
5638 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5639 auto_vec<tree> vargs (nargs);
5641 for (i = 0; i < nargs; i++)
5643 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5644 EXPR_LOCATION (*from_p));
5645 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5647 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5648 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5649 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5651 else
5653 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5654 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5655 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5656 tree fndecl = get_callee_fndecl (*from_p);
5657 if (fndecl
5658 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5659 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5660 && call_expr_nargs (*from_p) == 3)
5661 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5662 CALL_EXPR_ARG (*from_p, 0),
5663 CALL_EXPR_ARG (*from_p, 1),
5664 CALL_EXPR_ARG (*from_p, 2));
5665 else
5667 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5670 notice_special_calls (call_stmt);
5671 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5672 gimple_call_set_lhs (call_stmt, *to_p);
5673 else if (TREE_CODE (*to_p) == SSA_NAME)
5674 /* The above is somewhat premature, avoid ICEing later for a
5675 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5676 ??? This doesn't make it a default-def. */
5677 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5679 if (EXPR_CILK_SPAWN (*from_p))
5680 gimplify_cilk_detach (pre_p);
5681 assign = call_stmt;
5683 else
5685 assign = gimple_build_assign (*to_p, *from_p);
5686 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5687 if (COMPARISON_CLASS_P (*from_p))
5688 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5691 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5693 /* We should have got an SSA name from the start. */
5694 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5695 || ! gimple_in_ssa_p (cfun));
5698 gimplify_seq_add_stmt (pre_p, assign);
5699 gsi = gsi_last (*pre_p);
5700 maybe_fold_stmt (&gsi);
5702 if (want_value)
5704 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5705 return GS_OK;
5707 else
5708 *expr_p = NULL;
5710 return GS_ALL_DONE;
5713 /* Gimplify a comparison between two variable-sized objects. Do this
5714 with a call to BUILT_IN_MEMCMP. */
5716 static enum gimplify_status
5717 gimplify_variable_sized_compare (tree *expr_p)
5719 location_t loc = EXPR_LOCATION (*expr_p);
5720 tree op0 = TREE_OPERAND (*expr_p, 0);
5721 tree op1 = TREE_OPERAND (*expr_p, 1);
5722 tree t, arg, dest, src, expr;
5724 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5725 arg = unshare_expr (arg);
5726 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5727 src = build_fold_addr_expr_loc (loc, op1);
5728 dest = build_fold_addr_expr_loc (loc, op0);
5729 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5730 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5732 expr
5733 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5734 SET_EXPR_LOCATION (expr, loc);
5735 *expr_p = expr;
5737 return GS_OK;
5740 /* Gimplify a comparison between two aggregate objects of integral scalar
5741 mode as a comparison between the bitwise equivalent scalar values. */
5743 static enum gimplify_status
5744 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5746 location_t loc = EXPR_LOCATION (*expr_p);
5747 tree op0 = TREE_OPERAND (*expr_p, 0);
5748 tree op1 = TREE_OPERAND (*expr_p, 1);
5750 tree type = TREE_TYPE (op0);
5751 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5753 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5754 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5756 *expr_p
5757 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5759 return GS_OK;
5762 /* Gimplify an expression sequence. This function gimplifies each
5763 expression and rewrites the original expression with the last
5764 expression of the sequence in GIMPLE form.
5766 PRE_P points to the list where the side effects for all the
5767 expressions in the sequence will be emitted.
5769 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5771 static enum gimplify_status
5772 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5774 tree t = *expr_p;
5778 tree *sub_p = &TREE_OPERAND (t, 0);
5780 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5781 gimplify_compound_expr (sub_p, pre_p, false);
5782 else
5783 gimplify_stmt (sub_p, pre_p);
5785 t = TREE_OPERAND (t, 1);
5787 while (TREE_CODE (t) == COMPOUND_EXPR);
5789 *expr_p = t;
5790 if (want_value)
5791 return GS_OK;
5792 else
5794 gimplify_stmt (expr_p, pre_p);
5795 return GS_ALL_DONE;
5799 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5800 gimplify. After gimplification, EXPR_P will point to a new temporary
5801 that holds the original value of the SAVE_EXPR node.
5803 PRE_P points to the list where side effects that must happen before
5804 *EXPR_P should be stored. */
5806 static enum gimplify_status
5807 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5809 enum gimplify_status ret = GS_ALL_DONE;
5810 tree val;
5812 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5813 val = TREE_OPERAND (*expr_p, 0);
5815 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5816 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5818 /* The operand may be a void-valued expression. It is
5819 being executed only for its side-effects. */
5820 if (TREE_TYPE (val) == void_type_node)
5822 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5823 is_gimple_stmt, fb_none);
5824 val = NULL;
5826 else
5827 /* The temporary may not be an SSA name as later abnormal and EH
5828 control flow may invalidate use/def domination. */
5829 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5831 TREE_OPERAND (*expr_p, 0) = val;
5832 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5835 *expr_p = val;
5837 return ret;
5840 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5842 unary_expr
5843 : ...
5844 | '&' varname
5847 PRE_P points to the list where side effects that must happen before
5848 *EXPR_P should be stored.
5850 POST_P points to the list where side effects that must happen after
5851 *EXPR_P should be stored. */
5853 static enum gimplify_status
5854 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5856 tree expr = *expr_p;
5857 tree op0 = TREE_OPERAND (expr, 0);
5858 enum gimplify_status ret;
5859 location_t loc = EXPR_LOCATION (*expr_p);
5861 switch (TREE_CODE (op0))
5863 case INDIRECT_REF:
5864 do_indirect_ref:
5865 /* Check if we are dealing with an expression of the form '&*ptr'.
5866 While the front end folds away '&*ptr' into 'ptr', these
5867 expressions may be generated internally by the compiler (e.g.,
5868 builtins like __builtin_va_end). */
5869 /* Caution: the silent array decomposition semantics we allow for
5870 ADDR_EXPR means we can't always discard the pair. */
5871 /* Gimplification of the ADDR_EXPR operand may drop
5872 cv-qualification conversions, so make sure we add them if
5873 needed. */
5875 tree op00 = TREE_OPERAND (op0, 0);
5876 tree t_expr = TREE_TYPE (expr);
5877 tree t_op00 = TREE_TYPE (op00);
5879 if (!useless_type_conversion_p (t_expr, t_op00))
5880 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5881 *expr_p = op00;
5882 ret = GS_OK;
5884 break;
5886 case VIEW_CONVERT_EXPR:
5887 /* Take the address of our operand and then convert it to the type of
5888 this ADDR_EXPR.
5890 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5891 all clear. The impact of this transformation is even less clear. */
5893 /* If the operand is a useless conversion, look through it. Doing so
5894 guarantees that the ADDR_EXPR and its operand will remain of the
5895 same type. */
5896 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5897 op0 = TREE_OPERAND (op0, 0);
5899 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5900 build_fold_addr_expr_loc (loc,
5901 TREE_OPERAND (op0, 0)));
5902 ret = GS_OK;
5903 break;
5905 case MEM_REF:
5906 if (integer_zerop (TREE_OPERAND (op0, 1)))
5907 goto do_indirect_ref;
5909 /* fall through */
5911 default:
5912 /* If we see a call to a declared builtin or see its address
5913 being taken (we can unify those cases here) then we can mark
5914 the builtin for implicit generation by GCC. */
5915 if (TREE_CODE (op0) == FUNCTION_DECL
5916 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5917 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5918 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5920 /* We use fb_either here because the C frontend sometimes takes
5921 the address of a call that returns a struct; see
5922 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5923 the implied temporary explicit. */
5925 /* Make the operand addressable. */
5926 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5927 is_gimple_addressable, fb_either);
5928 if (ret == GS_ERROR)
5929 break;
5931 /* Then mark it. Beware that it may not be possible to do so directly
5932 if a temporary has been created by the gimplification. */
5933 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5935 op0 = TREE_OPERAND (expr, 0);
5937 /* For various reasons, the gimplification of the expression
5938 may have made a new INDIRECT_REF. */
5939 if (TREE_CODE (op0) == INDIRECT_REF)
5940 goto do_indirect_ref;
5942 mark_addressable (TREE_OPERAND (expr, 0));
5944 /* The FEs may end up building ADDR_EXPRs early on a decl with
5945 an incomplete type. Re-build ADDR_EXPRs in canonical form
5946 here. */
5947 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5948 *expr_p = build_fold_addr_expr (op0);
5950 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5951 recompute_tree_invariant_for_addr_expr (*expr_p);
5953 /* If we re-built the ADDR_EXPR add a conversion to the original type
5954 if required. */
5955 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5956 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5958 break;
5961 return ret;
5964 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5965 value; output operands should be a gimple lvalue. */
5967 static enum gimplify_status
5968 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5970 tree expr;
5971 int noutputs;
5972 const char **oconstraints;
5973 int i;
5974 tree link;
5975 const char *constraint;
5976 bool allows_mem, allows_reg, is_inout;
5977 enum gimplify_status ret, tret;
5978 gasm *stmt;
5979 vec<tree, va_gc> *inputs;
5980 vec<tree, va_gc> *outputs;
5981 vec<tree, va_gc> *clobbers;
5982 vec<tree, va_gc> *labels;
5983 tree link_next;
5985 expr = *expr_p;
5986 noutputs = list_length (ASM_OUTPUTS (expr));
5987 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5989 inputs = NULL;
5990 outputs = NULL;
5991 clobbers = NULL;
5992 labels = NULL;
5994 ret = GS_ALL_DONE;
5995 link_next = NULL_TREE;
5996 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5998 bool ok;
5999 size_t constraint_len;
6001 link_next = TREE_CHAIN (link);
6003 oconstraints[i]
6004 = constraint
6005 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6006 constraint_len = strlen (constraint);
6007 if (constraint_len == 0)
6008 continue;
6010 ok = parse_output_constraint (&constraint, i, 0, 0,
6011 &allows_mem, &allows_reg, &is_inout);
6012 if (!ok)
6014 ret = GS_ERROR;
6015 is_inout = false;
6018 if (!allows_reg && allows_mem)
6019 mark_addressable (TREE_VALUE (link));
6021 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6022 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6023 fb_lvalue | fb_mayfail);
6024 if (tret == GS_ERROR)
6026 error ("invalid lvalue in asm output %d", i);
6027 ret = tret;
6030 /* If the constraint does not allow memory make sure we gimplify
6031 it to a register if it is not already but its base is. This
6032 happens for complex and vector components. */
6033 if (!allows_mem)
6035 tree op = TREE_VALUE (link);
6036 if (! is_gimple_val (op)
6037 && is_gimple_reg_type (TREE_TYPE (op))
6038 && is_gimple_reg (get_base_address (op)))
6040 tree tem = create_tmp_reg (TREE_TYPE (op));
6041 tree ass;
6042 if (is_inout)
6044 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6045 tem, unshare_expr (op));
6046 gimplify_and_add (ass, pre_p);
6048 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6049 gimplify_and_add (ass, post_p);
6051 TREE_VALUE (link) = tem;
6052 tret = GS_OK;
6056 vec_safe_push (outputs, link);
6057 TREE_CHAIN (link) = NULL_TREE;
6059 if (is_inout)
6061 /* An input/output operand. To give the optimizers more
6062 flexibility, split it into separate input and output
6063 operands. */
6064 tree input;
6065 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6066 char buf[11];
6068 /* Turn the in/out constraint into an output constraint. */
6069 char *p = xstrdup (constraint);
6070 p[0] = '=';
6071 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6073 /* And add a matching input constraint. */
6074 if (allows_reg)
6076 sprintf (buf, "%u", i);
6078 /* If there are multiple alternatives in the constraint,
6079 handle each of them individually. Those that allow register
6080 will be replaced with operand number, the others will stay
6081 unchanged. */
6082 if (strchr (p, ',') != NULL)
6084 size_t len = 0, buflen = strlen (buf);
6085 char *beg, *end, *str, *dst;
6087 for (beg = p + 1;;)
6089 end = strchr (beg, ',');
6090 if (end == NULL)
6091 end = strchr (beg, '\0');
6092 if ((size_t) (end - beg) < buflen)
6093 len += buflen + 1;
6094 else
6095 len += end - beg + 1;
6096 if (*end)
6097 beg = end + 1;
6098 else
6099 break;
6102 str = (char *) alloca (len);
6103 for (beg = p + 1, dst = str;;)
6105 const char *tem;
6106 bool mem_p, reg_p, inout_p;
6108 end = strchr (beg, ',');
6109 if (end)
6110 *end = '\0';
6111 beg[-1] = '=';
6112 tem = beg - 1;
6113 parse_output_constraint (&tem, i, 0, 0,
6114 &mem_p, &reg_p, &inout_p);
6115 if (dst != str)
6116 *dst++ = ',';
6117 if (reg_p)
6119 memcpy (dst, buf, buflen);
6120 dst += buflen;
6122 else
6124 if (end)
6125 len = end - beg;
6126 else
6127 len = strlen (beg);
6128 memcpy (dst, beg, len);
6129 dst += len;
6131 if (end)
6132 beg = end + 1;
6133 else
6134 break;
6136 *dst = '\0';
6137 input = build_string (dst - str, str);
6139 else
6140 input = build_string (strlen (buf), buf);
6142 else
6143 input = build_string (constraint_len - 1, constraint + 1);
6145 free (p);
6147 input = build_tree_list (build_tree_list (NULL_TREE, input),
6148 unshare_expr (TREE_VALUE (link)));
6149 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6153 link_next = NULL_TREE;
6154 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6156 link_next = TREE_CHAIN (link);
6157 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6158 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6159 oconstraints, &allows_mem, &allows_reg);
6161 /* If we can't make copies, we can only accept memory. */
6162 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6164 if (allows_mem)
6165 allows_reg = 0;
6166 else
6168 error ("impossible constraint in %<asm%>");
6169 error ("non-memory input %d must stay in memory", i);
6170 return GS_ERROR;
6174 /* If the operand is a memory input, it should be an lvalue. */
6175 if (!allows_reg && allows_mem)
6177 tree inputv = TREE_VALUE (link);
6178 STRIP_NOPS (inputv);
6179 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6180 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6181 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6182 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6183 || TREE_CODE (inputv) == MODIFY_EXPR)
6184 TREE_VALUE (link) = error_mark_node;
6185 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6186 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6187 if (tret != GS_ERROR)
6189 /* Unlike output operands, memory inputs are not guaranteed
6190 to be lvalues by the FE, and while the expressions are
6191 marked addressable there, if it is e.g. a statement
6192 expression, temporaries in it might not end up being
6193 addressable. They might be already used in the IL and thus
6194 it is too late to make them addressable now though. */
6195 tree x = TREE_VALUE (link);
6196 while (handled_component_p (x))
6197 x = TREE_OPERAND (x, 0);
6198 if (TREE_CODE (x) == MEM_REF
6199 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6200 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6201 if ((VAR_P (x)
6202 || TREE_CODE (x) == PARM_DECL
6203 || TREE_CODE (x) == RESULT_DECL)
6204 && !TREE_ADDRESSABLE (x)
6205 && is_gimple_reg (x))
6207 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6208 input_location), 0,
6209 "memory input %d is not directly addressable",
6211 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6214 mark_addressable (TREE_VALUE (link));
6215 if (tret == GS_ERROR)
6217 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6218 "memory input %d is not directly addressable", i);
6219 ret = tret;
6222 else
6224 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6225 is_gimple_asm_val, fb_rvalue);
6226 if (tret == GS_ERROR)
6227 ret = tret;
6230 TREE_CHAIN (link) = NULL_TREE;
6231 vec_safe_push (inputs, link);
6234 link_next = NULL_TREE;
6235 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6237 link_next = TREE_CHAIN (link);
6238 TREE_CHAIN (link) = NULL_TREE;
6239 vec_safe_push (clobbers, link);
6242 link_next = NULL_TREE;
6243 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6245 link_next = TREE_CHAIN (link);
6246 TREE_CHAIN (link) = NULL_TREE;
6247 vec_safe_push (labels, link);
6250 /* Do not add ASMs with errors to the gimple IL stream. */
6251 if (ret != GS_ERROR)
6253 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6254 inputs, outputs, clobbers, labels);
6256 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6257 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6259 gimplify_seq_add_stmt (pre_p, stmt);
6262 return ret;
6265 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6266 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6267 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6268 return to this function.
6270 FIXME should we complexify the prequeue handling instead? Or use flags
6271 for all the cleanups and let the optimizer tighten them up? The current
6272 code seems pretty fragile; it will break on a cleanup within any
6273 non-conditional nesting. But any such nesting would be broken, anyway;
6274 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6275 and continues out of it. We can do that at the RTL level, though, so
6276 having an optimizer to tighten up try/finally regions would be a Good
6277 Thing. */
6279 static enum gimplify_status
6280 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6282 gimple_stmt_iterator iter;
6283 gimple_seq body_sequence = NULL;
6285 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6287 /* We only care about the number of conditions between the innermost
6288 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6289 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6290 int old_conds = gimplify_ctxp->conditions;
6291 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6292 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6293 gimplify_ctxp->conditions = 0;
6294 gimplify_ctxp->conditional_cleanups = NULL;
6295 gimplify_ctxp->in_cleanup_point_expr = true;
6297 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6299 gimplify_ctxp->conditions = old_conds;
6300 gimplify_ctxp->conditional_cleanups = old_cleanups;
6301 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6303 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6305 gimple *wce = gsi_stmt (iter);
6307 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6309 if (gsi_one_before_end_p (iter))
6311 /* Note that gsi_insert_seq_before and gsi_remove do not
6312 scan operands, unlike some other sequence mutators. */
6313 if (!gimple_wce_cleanup_eh_only (wce))
6314 gsi_insert_seq_before_without_update (&iter,
6315 gimple_wce_cleanup (wce),
6316 GSI_SAME_STMT);
6317 gsi_remove (&iter, true);
6318 break;
6320 else
6322 gtry *gtry;
6323 gimple_seq seq;
6324 enum gimple_try_flags kind;
6326 if (gimple_wce_cleanup_eh_only (wce))
6327 kind = GIMPLE_TRY_CATCH;
6328 else
6329 kind = GIMPLE_TRY_FINALLY;
6330 seq = gsi_split_seq_after (iter);
6332 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6333 /* Do not use gsi_replace here, as it may scan operands.
6334 We want to do a simple structural modification only. */
6335 gsi_set_stmt (&iter, gtry);
6336 iter = gsi_start (gtry->eval);
6339 else
6340 gsi_next (&iter);
6343 gimplify_seq_add_seq (pre_p, body_sequence);
6344 if (temp)
6346 *expr_p = temp;
6347 return GS_OK;
6349 else
6351 *expr_p = NULL;
6352 return GS_ALL_DONE;
6356 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6357 is the cleanup action required. EH_ONLY is true if the cleanup should
6358 only be executed if an exception is thrown, not on normal exit.
6359 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6360 only valid for clobbers. */
6362 static void
6363 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6364 bool force_uncond = false)
6366 gimple *wce;
6367 gimple_seq cleanup_stmts = NULL;
6369 /* Errors can result in improperly nested cleanups. Which results in
6370 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6371 if (seen_error ())
6372 return;
6374 if (gimple_conditional_context ())
6376 /* If we're in a conditional context, this is more complex. We only
6377 want to run the cleanup if we actually ran the initialization that
6378 necessitates it, but we want to run it after the end of the
6379 conditional context. So we wrap the try/finally around the
6380 condition and use a flag to determine whether or not to actually
6381 run the destructor. Thus
6383 test ? f(A()) : 0
6385 becomes (approximately)
6387 flag = 0;
6388 try {
6389 if (test) { A::A(temp); flag = 1; val = f(temp); }
6390 else { val = 0; }
6391 } finally {
6392 if (flag) A::~A(temp);
6396 if (force_uncond)
6398 gimplify_stmt (&cleanup, &cleanup_stmts);
6399 wce = gimple_build_wce (cleanup_stmts);
6400 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6402 else
6404 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6405 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6406 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6408 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6409 gimplify_stmt (&cleanup, &cleanup_stmts);
6410 wce = gimple_build_wce (cleanup_stmts);
6412 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6413 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6414 gimplify_seq_add_stmt (pre_p, ftrue);
6416 /* Because of this manipulation, and the EH edges that jump
6417 threading cannot redirect, the temporary (VAR) will appear
6418 to be used uninitialized. Don't warn. */
6419 TREE_NO_WARNING (var) = 1;
6422 else
6424 gimplify_stmt (&cleanup, &cleanup_stmts);
6425 wce = gimple_build_wce (cleanup_stmts);
6426 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6427 gimplify_seq_add_stmt (pre_p, wce);
6431 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6433 static enum gimplify_status
6434 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6436 tree targ = *expr_p;
6437 tree temp = TARGET_EXPR_SLOT (targ);
6438 tree init = TARGET_EXPR_INITIAL (targ);
6439 enum gimplify_status ret;
6441 bool unpoison_empty_seq = false;
6442 gimple_stmt_iterator unpoison_it;
6444 if (init)
6446 tree cleanup = NULL_TREE;
6448 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6449 to the temps list. Handle also variable length TARGET_EXPRs. */
6450 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6452 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6453 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6454 gimplify_vla_decl (temp, pre_p);
6456 else
6458 /* Save location where we need to place unpoisoning. It's possible
6459 that a variable will be converted to needs_to_live_in_memory. */
6460 unpoison_it = gsi_last (*pre_p);
6461 unpoison_empty_seq = gsi_end_p (unpoison_it);
6463 gimple_add_tmp_var (temp);
6466 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6467 expression is supposed to initialize the slot. */
6468 if (VOID_TYPE_P (TREE_TYPE (init)))
6469 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6470 else
6472 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6473 init = init_expr;
6474 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6475 init = NULL;
6476 ggc_free (init_expr);
6478 if (ret == GS_ERROR)
6480 /* PR c++/28266 Make sure this is expanded only once. */
6481 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6482 return GS_ERROR;
6484 if (init)
6485 gimplify_and_add (init, pre_p);
6487 /* If needed, push the cleanup for the temp. */
6488 if (TARGET_EXPR_CLEANUP (targ))
6490 if (CLEANUP_EH_ONLY (targ))
6491 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6492 CLEANUP_EH_ONLY (targ), pre_p);
6493 else
6494 cleanup = TARGET_EXPR_CLEANUP (targ);
6497 /* Add a clobber for the temporary going out of scope, like
6498 gimplify_bind_expr. */
6499 if (gimplify_ctxp->in_cleanup_point_expr
6500 && needs_to_live_in_memory (temp))
6502 if (flag_stack_reuse == SR_ALL)
6504 tree clobber = build_constructor (TREE_TYPE (temp),
6505 NULL);
6506 TREE_THIS_VOLATILE (clobber) = true;
6507 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6508 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6510 if (asan_poisoned_variables
6511 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6512 && dbg_cnt (asan_use_after_scope))
6514 tree asan_cleanup = build_asan_poison_call_expr (temp);
6515 if (asan_cleanup)
6517 if (unpoison_empty_seq)
6518 unpoison_it = gsi_start (*pre_p);
6520 asan_poison_variable (temp, false, &unpoison_it,
6521 unpoison_empty_seq);
6522 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6526 if (cleanup)
6527 gimple_push_cleanup (temp, cleanup, false, pre_p);
6529 /* Only expand this once. */
6530 TREE_OPERAND (targ, 3) = init;
6531 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6533 else
6534 /* We should have expanded this before. */
6535 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6537 *expr_p = temp;
6538 return GS_OK;
6541 /* Gimplification of expression trees. */
6543 /* Gimplify an expression which appears at statement context. The
6544 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6545 NULL, a new sequence is allocated.
6547 Return true if we actually added a statement to the queue. */
6549 bool
6550 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6552 gimple_seq_node last;
6554 last = gimple_seq_last (*seq_p);
6555 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6556 return last != gimple_seq_last (*seq_p);
6559 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6560 to CTX. If entries already exist, force them to be some flavor of private.
6561 If there is no enclosing parallel, do nothing. */
6563 void
6564 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6566 splay_tree_node n;
6568 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6569 return;
6573 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6574 if (n != NULL)
6576 if (n->value & GOVD_SHARED)
6577 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6578 else if (n->value & GOVD_MAP)
6579 n->value |= GOVD_MAP_TO_ONLY;
6580 else
6581 return;
6583 else if ((ctx->region_type & ORT_TARGET) != 0)
6585 if (ctx->target_map_scalars_firstprivate)
6586 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6587 else
6588 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6590 else if (ctx->region_type != ORT_WORKSHARE
6591 && ctx->region_type != ORT_SIMD
6592 && ctx->region_type != ORT_ACC
6593 && !(ctx->region_type & ORT_TARGET_DATA))
6594 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6596 ctx = ctx->outer_context;
6598 while (ctx);
6601 /* Similarly for each of the type sizes of TYPE. */
6603 static void
6604 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6606 if (type == NULL || type == error_mark_node)
6607 return;
6608 type = TYPE_MAIN_VARIANT (type);
6610 if (ctx->privatized_types->add (type))
6611 return;
6613 switch (TREE_CODE (type))
6615 case INTEGER_TYPE:
6616 case ENUMERAL_TYPE:
6617 case BOOLEAN_TYPE:
6618 case REAL_TYPE:
6619 case FIXED_POINT_TYPE:
6620 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6621 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6622 break;
6624 case ARRAY_TYPE:
6625 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6626 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6627 break;
6629 case RECORD_TYPE:
6630 case UNION_TYPE:
6631 case QUAL_UNION_TYPE:
6633 tree field;
6634 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6635 if (TREE_CODE (field) == FIELD_DECL)
6637 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6638 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6641 break;
6643 case POINTER_TYPE:
6644 case REFERENCE_TYPE:
6645 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6646 break;
6648 default:
6649 break;
6652 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6653 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6654 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6657 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6659 static void
6660 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6662 splay_tree_node n;
6663 unsigned int nflags;
6664 tree t;
6666 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6667 return;
6669 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6670 there are constructors involved somewhere. Exception is a shared clause,
6671 there is nothing privatized in that case. */
6672 if ((flags & GOVD_SHARED) == 0
6673 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6674 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6675 flags |= GOVD_SEEN;
6677 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6678 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6680 /* We shouldn't be re-adding the decl with the same data
6681 sharing class. */
6682 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6683 nflags = n->value | flags;
6684 /* The only combination of data sharing classes we should see is
6685 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6686 reduction variables to be used in data sharing clauses. */
6687 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6688 || ((nflags & GOVD_DATA_SHARE_CLASS)
6689 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6690 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6691 n->value = nflags;
6692 return;
6695 /* When adding a variable-sized variable, we have to handle all sorts
6696 of additional bits of data: the pointer replacement variable, and
6697 the parameters of the type. */
6698 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6700 /* Add the pointer replacement variable as PRIVATE if the variable
6701 replacement is private, else FIRSTPRIVATE since we'll need the
6702 address of the original variable either for SHARED, or for the
6703 copy into or out of the context. */
6704 if (!(flags & GOVD_LOCAL))
6706 if (flags & GOVD_MAP)
6707 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6708 else if (flags & GOVD_PRIVATE)
6709 nflags = GOVD_PRIVATE;
6710 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6711 && (flags & GOVD_FIRSTPRIVATE))
6712 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6713 else
6714 nflags = GOVD_FIRSTPRIVATE;
6715 nflags |= flags & GOVD_SEEN;
6716 t = DECL_VALUE_EXPR (decl);
6717 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6718 t = TREE_OPERAND (t, 0);
6719 gcc_assert (DECL_P (t));
6720 omp_add_variable (ctx, t, nflags);
6723 /* Add all of the variable and type parameters (which should have
6724 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6725 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6726 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6727 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6729 /* The variable-sized variable itself is never SHARED, only some form
6730 of PRIVATE. The sharing would take place via the pointer variable
6731 which we remapped above. */
6732 if (flags & GOVD_SHARED)
6733 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6734 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6736 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6737 alloca statement we generate for the variable, so make sure it
6738 is available. This isn't automatically needed for the SHARED
6739 case, since we won't be allocating local storage then.
6740 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6741 in this case omp_notice_variable will be called later
6742 on when it is gimplified. */
6743 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6744 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6745 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6747 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6748 && lang_hooks.decls.omp_privatize_by_reference (decl))
6750 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6752 /* Similar to the direct variable sized case above, we'll need the
6753 size of references being privatized. */
6754 if ((flags & GOVD_SHARED) == 0)
6756 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6757 if (DECL_P (t))
6758 omp_notice_variable (ctx, t, true);
6762 if (n != NULL)
6763 n->value |= flags;
6764 else
6765 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6767 /* For reductions clauses in OpenACC loop directives, by default create a
6768 copy clause on the enclosing parallel construct for carrying back the
6769 results. */
6770 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6772 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6773 while (outer_ctx)
6775 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6776 if (n != NULL)
6778 /* Ignore local variables and explicitly declared clauses. */
6779 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6780 break;
6781 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6783 /* According to the OpenACC spec, such a reduction variable
6784 should already have a copy map on a kernels construct,
6785 verify that here. */
6786 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6787 && (n->value & GOVD_MAP));
6789 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6791 /* Remove firstprivate and make it a copy map. */
6792 n->value &= ~GOVD_FIRSTPRIVATE;
6793 n->value |= GOVD_MAP;
6796 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6798 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6799 GOVD_MAP | GOVD_SEEN);
6800 break;
6802 outer_ctx = outer_ctx->outer_context;
6807 /* Notice a threadprivate variable DECL used in OMP context CTX.
6808 This just prints out diagnostics about threadprivate variable uses
6809 in untied tasks. If DECL2 is non-NULL, prevent this warning
6810 on that variable. */
6812 static bool
6813 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6814 tree decl2)
6816 splay_tree_node n;
6817 struct gimplify_omp_ctx *octx;
6819 for (octx = ctx; octx; octx = octx->outer_context)
6820 if ((octx->region_type & ORT_TARGET) != 0)
6822 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6823 if (n == NULL)
6825 error ("threadprivate variable %qE used in target region",
6826 DECL_NAME (decl));
6827 error_at (octx->location, "enclosing target region");
6828 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6830 if (decl2)
6831 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6834 if (ctx->region_type != ORT_UNTIED_TASK)
6835 return false;
6836 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6837 if (n == NULL)
6839 error ("threadprivate variable %qE used in untied task",
6840 DECL_NAME (decl));
6841 error_at (ctx->location, "enclosing task");
6842 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6844 if (decl2)
6845 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6846 return false;
6849 /* Return true if global var DECL is device resident. */
6851 static bool
6852 device_resident_p (tree decl)
6854 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6856 if (!attr)
6857 return false;
6859 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6861 tree c = TREE_VALUE (t);
6862 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6863 return true;
6866 return false;
6869 /* Return true if DECL has an ACC DECLARE attribute. */
6871 static bool
6872 is_oacc_declared (tree decl)
6874 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6875 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6876 return declared != NULL_TREE;
6879 /* Determine outer default flags for DECL mentioned in an OMP region
6880 but not declared in an enclosing clause.
6882 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6883 remapped firstprivate instead of shared. To some extent this is
6884 addressed in omp_firstprivatize_type_sizes, but not
6885 effectively. */
6887 static unsigned
6888 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6889 bool in_code, unsigned flags)
6891 enum omp_clause_default_kind default_kind = ctx->default_kind;
6892 enum omp_clause_default_kind kind;
6894 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6895 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6896 default_kind = kind;
6898 switch (default_kind)
6900 case OMP_CLAUSE_DEFAULT_NONE:
6902 const char *rtype;
6904 if (ctx->region_type & ORT_PARALLEL)
6905 rtype = "parallel";
6906 else if (ctx->region_type & ORT_TASK)
6907 rtype = "task";
6908 else if (ctx->region_type & ORT_TEAMS)
6909 rtype = "teams";
6910 else
6911 gcc_unreachable ();
6913 error ("%qE not specified in enclosing %qs",
6914 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6915 error_at (ctx->location, "enclosing %qs", rtype);
6917 /* FALLTHRU */
6918 case OMP_CLAUSE_DEFAULT_SHARED:
6919 flags |= GOVD_SHARED;
6920 break;
6921 case OMP_CLAUSE_DEFAULT_PRIVATE:
6922 flags |= GOVD_PRIVATE;
6923 break;
6924 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6925 flags |= GOVD_FIRSTPRIVATE;
6926 break;
6927 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6928 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6929 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6930 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6932 omp_notice_variable (octx, decl, in_code);
6933 for (; octx; octx = octx->outer_context)
6935 splay_tree_node n2;
6937 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6938 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6939 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6940 continue;
6941 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6943 flags |= GOVD_FIRSTPRIVATE;
6944 goto found_outer;
6946 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6948 flags |= GOVD_SHARED;
6949 goto found_outer;
6954 if (TREE_CODE (decl) == PARM_DECL
6955 || (!is_global_var (decl)
6956 && DECL_CONTEXT (decl) == current_function_decl))
6957 flags |= GOVD_FIRSTPRIVATE;
6958 else
6959 flags |= GOVD_SHARED;
6960 found_outer:
6961 break;
6963 default:
6964 gcc_unreachable ();
6967 return flags;
6971 /* Determine outer default flags for DECL mentioned in an OACC region
6972 but not declared in an enclosing clause. */
6974 static unsigned
6975 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6977 const char *rkind;
6978 bool on_device = false;
6979 bool declared = is_oacc_declared (decl);
6980 tree type = TREE_TYPE (decl);
6982 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6983 type = TREE_TYPE (type);
6985 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6986 && is_global_var (decl)
6987 && device_resident_p (decl))
6989 on_device = true;
6990 flags |= GOVD_MAP_TO_ONLY;
6993 switch (ctx->region_type)
6995 case ORT_ACC_KERNELS:
6996 rkind = "kernels";
6998 if (AGGREGATE_TYPE_P (type))
7000 /* Aggregates default to 'present_or_copy', or 'present'. */
7001 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7002 flags |= GOVD_MAP;
7003 else
7004 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7006 else
7007 /* Scalars default to 'copy'. */
7008 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7010 break;
7012 case ORT_ACC_PARALLEL:
7013 rkind = "parallel";
7015 if (on_device || declared)
7016 flags |= GOVD_MAP;
7017 else if (AGGREGATE_TYPE_P (type))
7019 /* Aggregates default to 'present_or_copy', or 'present'. */
7020 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7021 flags |= GOVD_MAP;
7022 else
7023 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7025 else
7026 /* Scalars default to 'firstprivate'. */
7027 flags |= GOVD_FIRSTPRIVATE;
7029 break;
7031 default:
7032 gcc_unreachable ();
7035 if (DECL_ARTIFICIAL (decl))
7036 ; /* We can get compiler-generated decls, and should not complain
7037 about them. */
7038 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7040 error ("%qE not specified in enclosing OpenACC %qs construct",
7041 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7042 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7044 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7045 ; /* Handled above. */
7046 else
7047 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7049 return flags;
7052 /* Record the fact that DECL was used within the OMP context CTX.
7053 IN_CODE is true when real code uses DECL, and false when we should
7054 merely emit default(none) errors. Return true if DECL is going to
7055 be remapped and thus DECL shouldn't be gimplified into its
7056 DECL_VALUE_EXPR (if any). */
7058 static bool
7059 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7061 splay_tree_node n;
7062 unsigned flags = in_code ? GOVD_SEEN : 0;
7063 bool ret = false, shared;
7065 if (error_operand_p (decl))
7066 return false;
7068 if (ctx->region_type == ORT_NONE)
7069 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7071 if (is_global_var (decl))
7073 /* Threadprivate variables are predetermined. */
7074 if (DECL_THREAD_LOCAL_P (decl))
7075 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7077 if (DECL_HAS_VALUE_EXPR_P (decl))
7079 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7081 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7082 return omp_notice_threadprivate_variable (ctx, decl, value);
7085 if (gimplify_omp_ctxp->outer_context == NULL
7086 && VAR_P (decl)
7087 && oacc_get_fn_attrib (current_function_decl))
7089 location_t loc = DECL_SOURCE_LOCATION (decl);
7091 if (lookup_attribute ("omp declare target link",
7092 DECL_ATTRIBUTES (decl)))
7094 error_at (loc,
7095 "%qE with %<link%> clause used in %<routine%> function",
7096 DECL_NAME (decl));
7097 return false;
7099 else if (!lookup_attribute ("omp declare target",
7100 DECL_ATTRIBUTES (decl)))
7102 error_at (loc,
7103 "%qE requires a %<declare%> directive for use "
7104 "in a %<routine%> function", DECL_NAME (decl));
7105 return false;
7110 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7111 if ((ctx->region_type & ORT_TARGET) != 0)
7113 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7114 if (n == NULL)
7116 unsigned nflags = flags;
7117 if (ctx->target_map_pointers_as_0len_arrays
7118 || ctx->target_map_scalars_firstprivate)
7120 bool is_declare_target = false;
7121 bool is_scalar = false;
7122 if (is_global_var (decl)
7123 && varpool_node::get_create (decl)->offloadable)
7125 struct gimplify_omp_ctx *octx;
7126 for (octx = ctx->outer_context;
7127 octx; octx = octx->outer_context)
7129 n = splay_tree_lookup (octx->variables,
7130 (splay_tree_key)decl);
7131 if (n
7132 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7133 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7134 break;
7136 is_declare_target = octx == NULL;
7138 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7139 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7140 if (is_declare_target)
7142 else if (ctx->target_map_pointers_as_0len_arrays
7143 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7144 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7145 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7146 == POINTER_TYPE)))
7147 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7148 else if (is_scalar)
7149 nflags |= GOVD_FIRSTPRIVATE;
7152 struct gimplify_omp_ctx *octx = ctx->outer_context;
7153 if ((ctx->region_type & ORT_ACC) && octx)
7155 /* Look in outer OpenACC contexts, to see if there's a
7156 data attribute for this variable. */
7157 omp_notice_variable (octx, decl, in_code);
7159 for (; octx; octx = octx->outer_context)
7161 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7162 break;
7163 splay_tree_node n2
7164 = splay_tree_lookup (octx->variables,
7165 (splay_tree_key) decl);
7166 if (n2)
7168 if (octx->region_type == ORT_ACC_HOST_DATA)
7169 error ("variable %qE declared in enclosing "
7170 "%<host_data%> region", DECL_NAME (decl));
7171 nflags |= GOVD_MAP;
7172 if (octx->region_type == ORT_ACC_DATA
7173 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7174 nflags |= GOVD_MAP_0LEN_ARRAY;
7175 goto found_outer;
7181 tree type = TREE_TYPE (decl);
7183 if (nflags == flags
7184 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7185 && lang_hooks.decls.omp_privatize_by_reference (decl))
7186 type = TREE_TYPE (type);
7187 if (nflags == flags
7188 && !lang_hooks.types.omp_mappable_type (type))
7190 error ("%qD referenced in target region does not have "
7191 "a mappable type", decl);
7192 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7194 else if (nflags == flags)
7196 if ((ctx->region_type & ORT_ACC) != 0)
7197 nflags = oacc_default_clause (ctx, decl, flags);
7198 else
7199 nflags |= GOVD_MAP;
7202 found_outer:
7203 omp_add_variable (ctx, decl, nflags);
7205 else
7207 /* If nothing changed, there's nothing left to do. */
7208 if ((n->value & flags) == flags)
7209 return ret;
7210 flags |= n->value;
7211 n->value = flags;
7213 goto do_outer;
7216 if (n == NULL)
7218 if (ctx->region_type == ORT_WORKSHARE
7219 || ctx->region_type == ORT_SIMD
7220 || ctx->region_type == ORT_ACC
7221 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7222 goto do_outer;
7224 flags = omp_default_clause (ctx, decl, in_code, flags);
7226 if ((flags & GOVD_PRIVATE)
7227 && lang_hooks.decls.omp_private_outer_ref (decl))
7228 flags |= GOVD_PRIVATE_OUTER_REF;
7230 omp_add_variable (ctx, decl, flags);
7232 shared = (flags & GOVD_SHARED) != 0;
7233 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7234 goto do_outer;
7237 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7238 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7239 && DECL_SIZE (decl))
7241 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7243 splay_tree_node n2;
7244 tree t = DECL_VALUE_EXPR (decl);
7245 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7246 t = TREE_OPERAND (t, 0);
7247 gcc_assert (DECL_P (t));
7248 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7249 n2->value |= GOVD_SEEN;
7251 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7252 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7253 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7254 != INTEGER_CST))
7256 splay_tree_node n2;
7257 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7258 gcc_assert (DECL_P (t));
7259 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7260 if (n2)
7261 omp_notice_variable (ctx, t, true);
7265 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7266 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7268 /* If nothing changed, there's nothing left to do. */
7269 if ((n->value & flags) == flags)
7270 return ret;
7271 flags |= n->value;
7272 n->value = flags;
7274 do_outer:
7275 /* If the variable is private in the current context, then we don't
7276 need to propagate anything to an outer context. */
7277 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7278 return ret;
7279 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7280 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7281 return ret;
7282 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7283 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7284 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7285 return ret;
7286 if (ctx->outer_context
7287 && omp_notice_variable (ctx->outer_context, decl, in_code))
7288 return true;
7289 return ret;
7292 /* Verify that DECL is private within CTX. If there's specific information
7293 to the contrary in the innermost scope, generate an error. */
7295 static bool
7296 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7298 splay_tree_node n;
7300 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7301 if (n != NULL)
7303 if (n->value & GOVD_SHARED)
7305 if (ctx == gimplify_omp_ctxp)
7307 if (simd)
7308 error ("iteration variable %qE is predetermined linear",
7309 DECL_NAME (decl));
7310 else
7311 error ("iteration variable %qE should be private",
7312 DECL_NAME (decl));
7313 n->value = GOVD_PRIVATE;
7314 return true;
7316 else
7317 return false;
7319 else if ((n->value & GOVD_EXPLICIT) != 0
7320 && (ctx == gimplify_omp_ctxp
7321 || (ctx->region_type == ORT_COMBINED_PARALLEL
7322 && gimplify_omp_ctxp->outer_context == ctx)))
7324 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7325 error ("iteration variable %qE should not be firstprivate",
7326 DECL_NAME (decl));
7327 else if ((n->value & GOVD_REDUCTION) != 0)
7328 error ("iteration variable %qE should not be reduction",
7329 DECL_NAME (decl));
7330 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7331 error ("iteration variable %qE should not be linear",
7332 DECL_NAME (decl));
7333 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7334 error ("iteration variable %qE should not be lastprivate",
7335 DECL_NAME (decl));
7336 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7337 error ("iteration variable %qE should not be private",
7338 DECL_NAME (decl));
7339 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7340 error ("iteration variable %qE is predetermined linear",
7341 DECL_NAME (decl));
7343 return (ctx == gimplify_omp_ctxp
7344 || (ctx->region_type == ORT_COMBINED_PARALLEL
7345 && gimplify_omp_ctxp->outer_context == ctx));
7348 if (ctx->region_type != ORT_WORKSHARE
7349 && ctx->region_type != ORT_SIMD
7350 && ctx->region_type != ORT_ACC)
7351 return false;
7352 else if (ctx->outer_context)
7353 return omp_is_private (ctx->outer_context, decl, simd);
7354 return false;
7357 /* Return true if DECL is private within a parallel region
7358 that binds to the current construct's context or in parallel
7359 region's REDUCTION clause. */
7361 static bool
7362 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7364 splay_tree_node n;
7368 ctx = ctx->outer_context;
7369 if (ctx == NULL)
7371 if (is_global_var (decl))
7372 return false;
7374 /* References might be private, but might be shared too,
7375 when checking for copyprivate, assume they might be
7376 private, otherwise assume they might be shared. */
7377 if (copyprivate)
7378 return true;
7380 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7381 return false;
7383 /* Treat C++ privatized non-static data members outside
7384 of the privatization the same. */
7385 if (omp_member_access_dummy_var (decl))
7386 return false;
7388 return true;
7391 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7393 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7394 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7395 continue;
7397 if (n != NULL)
7399 if ((n->value & GOVD_LOCAL) != 0
7400 && omp_member_access_dummy_var (decl))
7401 return false;
7402 return (n->value & GOVD_SHARED) == 0;
7405 while (ctx->region_type == ORT_WORKSHARE
7406 || ctx->region_type == ORT_SIMD
7407 || ctx->region_type == ORT_ACC);
7408 return false;
7411 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7413 static tree
7414 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7416 tree t = *tp;
7418 /* If this node has been visited, unmark it and keep looking. */
7419 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7420 return t;
7422 if (IS_TYPE_OR_DECL_P (t))
7423 *walk_subtrees = 0;
7424 return NULL_TREE;
7427 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7428 and previous omp contexts. */
7430 static void
7431 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7432 enum omp_region_type region_type,
7433 enum tree_code code)
7435 struct gimplify_omp_ctx *ctx, *outer_ctx;
7436 tree c;
7437 hash_map<tree, tree> *struct_map_to_clause = NULL;
7438 tree *prev_list_p = NULL;
7440 ctx = new_omp_context (region_type);
7441 outer_ctx = ctx->outer_context;
7442 if (code == OMP_TARGET)
7444 if (!lang_GNU_Fortran ())
7445 ctx->target_map_pointers_as_0len_arrays = true;
7446 ctx->target_map_scalars_firstprivate = true;
7448 if (!lang_GNU_Fortran ())
7449 switch (code)
7451 case OMP_TARGET:
7452 case OMP_TARGET_DATA:
7453 case OMP_TARGET_ENTER_DATA:
7454 case OMP_TARGET_EXIT_DATA:
7455 case OACC_DECLARE:
7456 case OACC_HOST_DATA:
7457 ctx->target_firstprivatize_array_bases = true;
7458 default:
7459 break;
7462 while ((c = *list_p) != NULL)
7464 bool remove = false;
7465 bool notice_outer = true;
7466 const char *check_non_private = NULL;
7467 unsigned int flags;
7468 tree decl;
7470 switch (OMP_CLAUSE_CODE (c))
7472 case OMP_CLAUSE_PRIVATE:
7473 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7474 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7476 flags |= GOVD_PRIVATE_OUTER_REF;
7477 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7479 else
7480 notice_outer = false;
7481 goto do_add;
7482 case OMP_CLAUSE_SHARED:
7483 flags = GOVD_SHARED | GOVD_EXPLICIT;
7484 goto do_add;
7485 case OMP_CLAUSE_FIRSTPRIVATE:
7486 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7487 check_non_private = "firstprivate";
7488 goto do_add;
7489 case OMP_CLAUSE_LASTPRIVATE:
7490 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7491 check_non_private = "lastprivate";
7492 decl = OMP_CLAUSE_DECL (c);
7493 if (error_operand_p (decl))
7494 goto do_add;
7495 else if (outer_ctx
7496 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7497 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7498 && splay_tree_lookup (outer_ctx->variables,
7499 (splay_tree_key) decl) == NULL)
7501 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7502 if (outer_ctx->outer_context)
7503 omp_notice_variable (outer_ctx->outer_context, decl, true);
7505 else if (outer_ctx
7506 && (outer_ctx->region_type & ORT_TASK) != 0
7507 && outer_ctx->combined_loop
7508 && splay_tree_lookup (outer_ctx->variables,
7509 (splay_tree_key) decl) == NULL)
7511 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7512 if (outer_ctx->outer_context)
7513 omp_notice_variable (outer_ctx->outer_context, decl, true);
7515 else if (outer_ctx
7516 && (outer_ctx->region_type == ORT_WORKSHARE
7517 || outer_ctx->region_type == ORT_ACC)
7518 && outer_ctx->combined_loop
7519 && splay_tree_lookup (outer_ctx->variables,
7520 (splay_tree_key) decl) == NULL
7521 && !omp_check_private (outer_ctx, decl, false))
7523 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7524 if (outer_ctx->outer_context
7525 && (outer_ctx->outer_context->region_type
7526 == ORT_COMBINED_PARALLEL)
7527 && splay_tree_lookup (outer_ctx->outer_context->variables,
7528 (splay_tree_key) decl) == NULL)
7530 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7531 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7532 if (octx->outer_context)
7534 octx = octx->outer_context;
7535 if (octx->region_type == ORT_WORKSHARE
7536 && octx->combined_loop
7537 && splay_tree_lookup (octx->variables,
7538 (splay_tree_key) decl) == NULL
7539 && !omp_check_private (octx, decl, false))
7541 omp_add_variable (octx, decl,
7542 GOVD_LASTPRIVATE | GOVD_SEEN);
7543 octx = octx->outer_context;
7544 if (octx
7545 && octx->region_type == ORT_COMBINED_TEAMS
7546 && (splay_tree_lookup (octx->variables,
7547 (splay_tree_key) decl)
7548 == NULL))
7550 omp_add_variable (octx, decl,
7551 GOVD_SHARED | GOVD_SEEN);
7552 octx = octx->outer_context;
7555 if (octx)
7556 omp_notice_variable (octx, decl, true);
7559 else if (outer_ctx->outer_context)
7560 omp_notice_variable (outer_ctx->outer_context, decl, true);
7562 goto do_add;
7563 case OMP_CLAUSE_REDUCTION:
7564 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7565 /* OpenACC permits reductions on private variables. */
7566 if (!(region_type & ORT_ACC))
7567 check_non_private = "reduction";
7568 decl = OMP_CLAUSE_DECL (c);
7569 if (TREE_CODE (decl) == MEM_REF)
7571 tree type = TREE_TYPE (decl);
7572 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7573 NULL, is_gimple_val, fb_rvalue, false)
7574 == GS_ERROR)
7576 remove = true;
7577 break;
7579 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7580 if (DECL_P (v))
7582 omp_firstprivatize_variable (ctx, v);
7583 omp_notice_variable (ctx, v, true);
7585 decl = TREE_OPERAND (decl, 0);
7586 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7588 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7589 NULL, is_gimple_val, fb_rvalue, false)
7590 == GS_ERROR)
7592 remove = true;
7593 break;
7595 v = TREE_OPERAND (decl, 1);
7596 if (DECL_P (v))
7598 omp_firstprivatize_variable (ctx, v);
7599 omp_notice_variable (ctx, v, true);
7601 decl = TREE_OPERAND (decl, 0);
7603 if (TREE_CODE (decl) == ADDR_EXPR
7604 || TREE_CODE (decl) == INDIRECT_REF)
7605 decl = TREE_OPERAND (decl, 0);
7607 goto do_add_decl;
7608 case OMP_CLAUSE_LINEAR:
7609 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7610 is_gimple_val, fb_rvalue) == GS_ERROR)
7612 remove = true;
7613 break;
7615 else
7617 if (code == OMP_SIMD
7618 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7620 struct gimplify_omp_ctx *octx = outer_ctx;
7621 if (octx
7622 && octx->region_type == ORT_WORKSHARE
7623 && octx->combined_loop
7624 && !octx->distribute)
7626 if (octx->outer_context
7627 && (octx->outer_context->region_type
7628 == ORT_COMBINED_PARALLEL))
7629 octx = octx->outer_context->outer_context;
7630 else
7631 octx = octx->outer_context;
7633 if (octx
7634 && octx->region_type == ORT_WORKSHARE
7635 && octx->combined_loop
7636 && octx->distribute)
7638 error_at (OMP_CLAUSE_LOCATION (c),
7639 "%<linear%> clause for variable other than "
7640 "loop iterator specified on construct "
7641 "combined with %<distribute%>");
7642 remove = true;
7643 break;
7646 /* For combined #pragma omp parallel for simd, need to put
7647 lastprivate and perhaps firstprivate too on the
7648 parallel. Similarly for #pragma omp for simd. */
7649 struct gimplify_omp_ctx *octx = outer_ctx;
7650 decl = NULL_TREE;
7653 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7654 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7655 break;
7656 decl = OMP_CLAUSE_DECL (c);
7657 if (error_operand_p (decl))
7659 decl = NULL_TREE;
7660 break;
7662 flags = GOVD_SEEN;
7663 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7664 flags |= GOVD_FIRSTPRIVATE;
7665 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7666 flags |= GOVD_LASTPRIVATE;
7667 if (octx
7668 && octx->region_type == ORT_WORKSHARE
7669 && octx->combined_loop)
7671 if (octx->outer_context
7672 && (octx->outer_context->region_type
7673 == ORT_COMBINED_PARALLEL))
7674 octx = octx->outer_context;
7675 else if (omp_check_private (octx, decl, false))
7676 break;
7678 else if (octx
7679 && (octx->region_type & ORT_TASK) != 0
7680 && octx->combined_loop)
7682 else if (octx
7683 && octx->region_type == ORT_COMBINED_PARALLEL
7684 && ctx->region_type == ORT_WORKSHARE
7685 && octx == outer_ctx)
7686 flags = GOVD_SEEN | GOVD_SHARED;
7687 else if (octx
7688 && octx->region_type == ORT_COMBINED_TEAMS)
7689 flags = GOVD_SEEN | GOVD_SHARED;
7690 else if (octx
7691 && octx->region_type == ORT_COMBINED_TARGET)
7693 flags &= ~GOVD_LASTPRIVATE;
7694 if (flags == GOVD_SEEN)
7695 break;
7697 else
7698 break;
7699 splay_tree_node on
7700 = splay_tree_lookup (octx->variables,
7701 (splay_tree_key) decl);
7702 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7704 octx = NULL;
7705 break;
7707 omp_add_variable (octx, decl, flags);
7708 if (octx->outer_context == NULL)
7709 break;
7710 octx = octx->outer_context;
7712 while (1);
7713 if (octx
7714 && decl
7715 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7716 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7717 omp_notice_variable (octx, decl, true);
7719 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7720 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7721 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7723 notice_outer = false;
7724 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7726 goto do_add;
7728 case OMP_CLAUSE_MAP:
7729 decl = OMP_CLAUSE_DECL (c);
7730 if (error_operand_p (decl))
7731 remove = true;
7732 switch (code)
7734 case OMP_TARGET:
7735 break;
7736 case OACC_DATA:
7737 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7738 break;
7739 /* FALLTHRU */
7740 case OMP_TARGET_DATA:
7741 case OMP_TARGET_ENTER_DATA:
7742 case OMP_TARGET_EXIT_DATA:
7743 case OACC_ENTER_DATA:
7744 case OACC_EXIT_DATA:
7745 case OACC_HOST_DATA:
7746 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7747 || (OMP_CLAUSE_MAP_KIND (c)
7748 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7749 /* For target {,enter ,exit }data only the array slice is
7750 mapped, but not the pointer to it. */
7751 remove = true;
7752 break;
7753 default:
7754 break;
7756 if (remove)
7757 break;
7758 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7760 struct gimplify_omp_ctx *octx;
7761 for (octx = outer_ctx; octx; octx = octx->outer_context)
7763 if (octx->region_type != ORT_ACC_HOST_DATA)
7764 break;
7765 splay_tree_node n2
7766 = splay_tree_lookup (octx->variables,
7767 (splay_tree_key) decl);
7768 if (n2)
7769 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7770 "declared in enclosing %<host_data%> region",
7771 DECL_NAME (decl));
7774 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7775 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7776 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7777 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7778 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7780 remove = true;
7781 break;
7783 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7784 || (OMP_CLAUSE_MAP_KIND (c)
7785 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7786 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7788 OMP_CLAUSE_SIZE (c)
7789 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7790 false);
7791 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7792 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7794 if (!DECL_P (decl))
7796 tree d = decl, *pd;
7797 if (TREE_CODE (d) == ARRAY_REF)
7799 while (TREE_CODE (d) == ARRAY_REF)
7800 d = TREE_OPERAND (d, 0);
7801 if (TREE_CODE (d) == COMPONENT_REF
7802 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7803 decl = d;
7805 pd = &OMP_CLAUSE_DECL (c);
7806 if (d == decl
7807 && TREE_CODE (decl) == INDIRECT_REF
7808 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7809 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7810 == REFERENCE_TYPE))
7812 pd = &TREE_OPERAND (decl, 0);
7813 decl = TREE_OPERAND (decl, 0);
7815 if (TREE_CODE (decl) == COMPONENT_REF)
7817 while (TREE_CODE (decl) == COMPONENT_REF)
7818 decl = TREE_OPERAND (decl, 0);
7819 if (TREE_CODE (decl) == INDIRECT_REF
7820 && DECL_P (TREE_OPERAND (decl, 0))
7821 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7822 == REFERENCE_TYPE))
7823 decl = TREE_OPERAND (decl, 0);
7825 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7826 == GS_ERROR)
7828 remove = true;
7829 break;
7831 if (DECL_P (decl))
7833 if (error_operand_p (decl))
7835 remove = true;
7836 break;
7839 tree stype = TREE_TYPE (decl);
7840 if (TREE_CODE (stype) == REFERENCE_TYPE)
7841 stype = TREE_TYPE (stype);
7842 if (TYPE_SIZE_UNIT (stype) == NULL
7843 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7845 error_at (OMP_CLAUSE_LOCATION (c),
7846 "mapping field %qE of variable length "
7847 "structure", OMP_CLAUSE_DECL (c));
7848 remove = true;
7849 break;
7852 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7854 /* Error recovery. */
7855 if (prev_list_p == NULL)
7857 remove = true;
7858 break;
7860 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7862 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7863 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7865 remove = true;
7866 break;
7871 tree offset;
7872 HOST_WIDE_INT bitsize, bitpos;
7873 machine_mode mode;
7874 int unsignedp, reversep, volatilep = 0;
7875 tree base = OMP_CLAUSE_DECL (c);
7876 while (TREE_CODE (base) == ARRAY_REF)
7877 base = TREE_OPERAND (base, 0);
7878 if (TREE_CODE (base) == INDIRECT_REF)
7879 base = TREE_OPERAND (base, 0);
7880 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7881 &mode, &unsignedp, &reversep,
7882 &volatilep);
7883 tree orig_base = base;
7884 if ((TREE_CODE (base) == INDIRECT_REF
7885 || (TREE_CODE (base) == MEM_REF
7886 && integer_zerop (TREE_OPERAND (base, 1))))
7887 && DECL_P (TREE_OPERAND (base, 0))
7888 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7889 == REFERENCE_TYPE))
7890 base = TREE_OPERAND (base, 0);
7891 gcc_assert (base == decl
7892 && (offset == NULL_TREE
7893 || TREE_CODE (offset) == INTEGER_CST));
7895 splay_tree_node n
7896 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7897 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7898 == GOMP_MAP_ALWAYS_POINTER);
7899 if (n == NULL || (n->value & GOVD_MAP) == 0)
7901 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7902 OMP_CLAUSE_MAP);
7903 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7904 if (orig_base != base)
7905 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7906 else
7907 OMP_CLAUSE_DECL (l) = decl;
7908 OMP_CLAUSE_SIZE (l) = size_int (1);
7909 if (struct_map_to_clause == NULL)
7910 struct_map_to_clause = new hash_map<tree, tree>;
7911 struct_map_to_clause->put (decl, l);
7912 if (ptr)
7914 enum gomp_map_kind mkind
7915 = code == OMP_TARGET_EXIT_DATA
7916 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7917 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7918 OMP_CLAUSE_MAP);
7919 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7920 OMP_CLAUSE_DECL (c2)
7921 = unshare_expr (OMP_CLAUSE_DECL (c));
7922 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7923 OMP_CLAUSE_SIZE (c2)
7924 = TYPE_SIZE_UNIT (ptr_type_node);
7925 OMP_CLAUSE_CHAIN (l) = c2;
7926 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7928 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7929 tree c3
7930 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7931 OMP_CLAUSE_MAP);
7932 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7933 OMP_CLAUSE_DECL (c3)
7934 = unshare_expr (OMP_CLAUSE_DECL (c4));
7935 OMP_CLAUSE_SIZE (c3)
7936 = TYPE_SIZE_UNIT (ptr_type_node);
7937 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7938 OMP_CLAUSE_CHAIN (c2) = c3;
7940 *prev_list_p = l;
7941 prev_list_p = NULL;
7943 else
7945 OMP_CLAUSE_CHAIN (l) = c;
7946 *list_p = l;
7947 list_p = &OMP_CLAUSE_CHAIN (l);
7949 if (orig_base != base && code == OMP_TARGET)
7951 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7952 OMP_CLAUSE_MAP);
7953 enum gomp_map_kind mkind
7954 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7955 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7956 OMP_CLAUSE_DECL (c2) = decl;
7957 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7958 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7959 OMP_CLAUSE_CHAIN (l) = c2;
7961 flags = GOVD_MAP | GOVD_EXPLICIT;
7962 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7963 flags |= GOVD_SEEN;
7964 goto do_add_decl;
7966 else
7968 tree *osc = struct_map_to_clause->get (decl);
7969 tree *sc = NULL, *scp = NULL;
7970 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7971 n->value |= GOVD_SEEN;
7972 offset_int o1, o2;
7973 if (offset)
7974 o1 = wi::to_offset (offset);
7975 else
7976 o1 = 0;
7977 if (bitpos)
7978 o1 = o1 + bitpos / BITS_PER_UNIT;
7979 sc = &OMP_CLAUSE_CHAIN (*osc);
7980 if (*sc != c
7981 && (OMP_CLAUSE_MAP_KIND (*sc)
7982 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7983 sc = &OMP_CLAUSE_CHAIN (*sc);
7984 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7985 if (ptr && sc == prev_list_p)
7986 break;
7987 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7988 != COMPONENT_REF
7989 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7990 != INDIRECT_REF)
7991 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7992 != ARRAY_REF))
7993 break;
7994 else
7996 tree offset2;
7997 HOST_WIDE_INT bitsize2, bitpos2;
7998 base = OMP_CLAUSE_DECL (*sc);
7999 if (TREE_CODE (base) == ARRAY_REF)
8001 while (TREE_CODE (base) == ARRAY_REF)
8002 base = TREE_OPERAND (base, 0);
8003 if (TREE_CODE (base) != COMPONENT_REF
8004 || (TREE_CODE (TREE_TYPE (base))
8005 != ARRAY_TYPE))
8006 break;
8008 else if (TREE_CODE (base) == INDIRECT_REF
8009 && (TREE_CODE (TREE_OPERAND (base, 0))
8010 == COMPONENT_REF)
8011 && (TREE_CODE (TREE_TYPE
8012 (TREE_OPERAND (base, 0)))
8013 == REFERENCE_TYPE))
8014 base = TREE_OPERAND (base, 0);
8015 base = get_inner_reference (base, &bitsize2,
8016 &bitpos2, &offset2,
8017 &mode, &unsignedp,
8018 &reversep, &volatilep);
8019 if ((TREE_CODE (base) == INDIRECT_REF
8020 || (TREE_CODE (base) == MEM_REF
8021 && integer_zerop (TREE_OPERAND (base,
8022 1))))
8023 && DECL_P (TREE_OPERAND (base, 0))
8024 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8025 0)))
8026 == REFERENCE_TYPE))
8027 base = TREE_OPERAND (base, 0);
8028 if (base != decl)
8029 break;
8030 if (scp)
8031 continue;
8032 gcc_assert (offset == NULL_TREE
8033 || TREE_CODE (offset) == INTEGER_CST);
8034 tree d1 = OMP_CLAUSE_DECL (*sc);
8035 tree d2 = OMP_CLAUSE_DECL (c);
8036 while (TREE_CODE (d1) == ARRAY_REF)
8037 d1 = TREE_OPERAND (d1, 0);
8038 while (TREE_CODE (d2) == ARRAY_REF)
8039 d2 = TREE_OPERAND (d2, 0);
8040 if (TREE_CODE (d1) == INDIRECT_REF)
8041 d1 = TREE_OPERAND (d1, 0);
8042 if (TREE_CODE (d2) == INDIRECT_REF)
8043 d2 = TREE_OPERAND (d2, 0);
8044 while (TREE_CODE (d1) == COMPONENT_REF)
8045 if (TREE_CODE (d2) == COMPONENT_REF
8046 && TREE_OPERAND (d1, 1)
8047 == TREE_OPERAND (d2, 1))
8049 d1 = TREE_OPERAND (d1, 0);
8050 d2 = TREE_OPERAND (d2, 0);
8052 else
8053 break;
8054 if (d1 == d2)
8056 error_at (OMP_CLAUSE_LOCATION (c),
8057 "%qE appears more than once in map "
8058 "clauses", OMP_CLAUSE_DECL (c));
8059 remove = true;
8060 break;
8062 if (offset2)
8063 o2 = wi::to_offset (offset2);
8064 else
8065 o2 = 0;
8066 if (bitpos2)
8067 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8068 if (wi::ltu_p (o1, o2)
8069 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8071 if (ptr)
8072 scp = sc;
8073 else
8074 break;
8077 if (remove)
8078 break;
8079 OMP_CLAUSE_SIZE (*osc)
8080 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8081 size_one_node);
8082 if (ptr)
8084 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8085 OMP_CLAUSE_MAP);
8086 tree cl = NULL_TREE;
8087 enum gomp_map_kind mkind
8088 = code == OMP_TARGET_EXIT_DATA
8089 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8090 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8091 OMP_CLAUSE_DECL (c2)
8092 = unshare_expr (OMP_CLAUSE_DECL (c));
8093 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8094 OMP_CLAUSE_SIZE (c2)
8095 = TYPE_SIZE_UNIT (ptr_type_node);
8096 cl = scp ? *prev_list_p : c2;
8097 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8099 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8100 tree c3
8101 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8102 OMP_CLAUSE_MAP);
8103 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8104 OMP_CLAUSE_DECL (c3)
8105 = unshare_expr (OMP_CLAUSE_DECL (c4));
8106 OMP_CLAUSE_SIZE (c3)
8107 = TYPE_SIZE_UNIT (ptr_type_node);
8108 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8109 if (!scp)
8110 OMP_CLAUSE_CHAIN (c2) = c3;
8111 else
8112 cl = c3;
8114 if (scp)
8115 *scp = c2;
8116 if (sc == prev_list_p)
8118 *sc = cl;
8119 prev_list_p = NULL;
8121 else
8123 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8124 list_p = prev_list_p;
8125 prev_list_p = NULL;
8126 OMP_CLAUSE_CHAIN (c) = *sc;
8127 *sc = cl;
8128 continue;
8131 else if (*sc != c)
8133 *list_p = OMP_CLAUSE_CHAIN (c);
8134 OMP_CLAUSE_CHAIN (c) = *sc;
8135 *sc = c;
8136 continue;
8140 if (!remove
8141 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8142 && OMP_CLAUSE_CHAIN (c)
8143 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8144 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8145 == GOMP_MAP_ALWAYS_POINTER))
8146 prev_list_p = list_p;
8147 break;
8149 flags = GOVD_MAP | GOVD_EXPLICIT;
8150 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8151 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8152 flags |= GOVD_MAP_ALWAYS_TO;
8153 goto do_add;
8155 case OMP_CLAUSE_DEPEND:
8156 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8158 tree deps = OMP_CLAUSE_DECL (c);
8159 while (deps && TREE_CODE (deps) == TREE_LIST)
8161 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8162 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8163 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8164 pre_p, NULL, is_gimple_val, fb_rvalue);
8165 deps = TREE_CHAIN (deps);
8167 break;
8169 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8170 break;
8171 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8173 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8174 NULL, is_gimple_val, fb_rvalue);
8175 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8177 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8179 remove = true;
8180 break;
8182 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8183 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8184 is_gimple_val, fb_rvalue) == GS_ERROR)
8186 remove = true;
8187 break;
8189 break;
8191 case OMP_CLAUSE_TO:
8192 case OMP_CLAUSE_FROM:
8193 case OMP_CLAUSE__CACHE_:
8194 decl = OMP_CLAUSE_DECL (c);
8195 if (error_operand_p (decl))
8197 remove = true;
8198 break;
8200 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8201 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8202 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8203 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8204 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8206 remove = true;
8207 break;
8209 if (!DECL_P (decl))
8211 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8212 NULL, is_gimple_lvalue, fb_lvalue)
8213 == GS_ERROR)
8215 remove = true;
8216 break;
8218 break;
8220 goto do_notice;
8222 case OMP_CLAUSE_USE_DEVICE_PTR:
8223 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8224 goto do_add;
8225 case OMP_CLAUSE_IS_DEVICE_PTR:
8226 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8227 goto do_add;
8229 do_add:
8230 decl = OMP_CLAUSE_DECL (c);
8231 do_add_decl:
8232 if (error_operand_p (decl))
8234 remove = true;
8235 break;
8237 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8239 tree t = omp_member_access_dummy_var (decl);
8240 if (t)
8242 tree v = DECL_VALUE_EXPR (decl);
8243 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8244 if (outer_ctx)
8245 omp_notice_variable (outer_ctx, t, true);
8248 if (code == OACC_DATA
8249 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8250 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8251 flags |= GOVD_MAP_0LEN_ARRAY;
8252 omp_add_variable (ctx, decl, flags);
8253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8254 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8256 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8257 GOVD_LOCAL | GOVD_SEEN);
8258 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8259 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8260 find_decl_expr,
8261 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8262 NULL) == NULL_TREE)
8263 omp_add_variable (ctx,
8264 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8265 GOVD_LOCAL | GOVD_SEEN);
8266 gimplify_omp_ctxp = ctx;
8267 push_gimplify_context ();
8269 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8270 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8272 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8273 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8274 pop_gimplify_context
8275 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8276 push_gimplify_context ();
8277 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8278 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8279 pop_gimplify_context
8280 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8281 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8282 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8284 gimplify_omp_ctxp = outer_ctx;
8286 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8287 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8289 gimplify_omp_ctxp = ctx;
8290 push_gimplify_context ();
8291 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8293 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8294 NULL, NULL);
8295 TREE_SIDE_EFFECTS (bind) = 1;
8296 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8297 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8299 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8300 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8301 pop_gimplify_context
8302 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8303 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8305 gimplify_omp_ctxp = outer_ctx;
8307 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8308 && OMP_CLAUSE_LINEAR_STMT (c))
8310 gimplify_omp_ctxp = ctx;
8311 push_gimplify_context ();
8312 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8314 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8315 NULL, NULL);
8316 TREE_SIDE_EFFECTS (bind) = 1;
8317 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8318 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8320 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8321 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8322 pop_gimplify_context
8323 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8324 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8326 gimplify_omp_ctxp = outer_ctx;
8328 if (notice_outer)
8329 goto do_notice;
8330 break;
8332 case OMP_CLAUSE_COPYIN:
8333 case OMP_CLAUSE_COPYPRIVATE:
8334 decl = OMP_CLAUSE_DECL (c);
8335 if (error_operand_p (decl))
8337 remove = true;
8338 break;
8340 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8341 && !remove
8342 && !omp_check_private (ctx, decl, true))
8344 remove = true;
8345 if (is_global_var (decl))
8347 if (DECL_THREAD_LOCAL_P (decl))
8348 remove = false;
8349 else if (DECL_HAS_VALUE_EXPR_P (decl))
8351 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8353 if (value
8354 && DECL_P (value)
8355 && DECL_THREAD_LOCAL_P (value))
8356 remove = false;
8359 if (remove)
8360 error_at (OMP_CLAUSE_LOCATION (c),
8361 "copyprivate variable %qE is not threadprivate"
8362 " or private in outer context", DECL_NAME (decl));
8364 do_notice:
8365 if (outer_ctx)
8366 omp_notice_variable (outer_ctx, decl, true);
8367 if (check_non_private
8368 && region_type == ORT_WORKSHARE
8369 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8370 || decl == OMP_CLAUSE_DECL (c)
8371 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8372 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8373 == ADDR_EXPR
8374 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8375 == POINTER_PLUS_EXPR
8376 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8377 (OMP_CLAUSE_DECL (c), 0), 0))
8378 == ADDR_EXPR)))))
8379 && omp_check_private (ctx, decl, false))
8381 error ("%s variable %qE is private in outer context",
8382 check_non_private, DECL_NAME (decl));
8383 remove = true;
8385 break;
8387 case OMP_CLAUSE_IF:
8388 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8389 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8391 const char *p[2];
8392 for (int i = 0; i < 2; i++)
8393 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8395 case OMP_PARALLEL: p[i] = "parallel"; break;
8396 case OMP_TASK: p[i] = "task"; break;
8397 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8398 case OMP_TARGET_DATA: p[i] = "target data"; break;
8399 case OMP_TARGET: p[i] = "target"; break;
8400 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8401 case OMP_TARGET_ENTER_DATA:
8402 p[i] = "target enter data"; break;
8403 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8404 default: gcc_unreachable ();
8406 error_at (OMP_CLAUSE_LOCATION (c),
8407 "expected %qs %<if%> clause modifier rather than %qs",
8408 p[0], p[1]);
8409 remove = true;
8411 /* Fall through. */
8413 case OMP_CLAUSE_FINAL:
8414 OMP_CLAUSE_OPERAND (c, 0)
8415 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8416 /* Fall through. */
8418 case OMP_CLAUSE_SCHEDULE:
8419 case OMP_CLAUSE_NUM_THREADS:
8420 case OMP_CLAUSE_NUM_TEAMS:
8421 case OMP_CLAUSE_THREAD_LIMIT:
8422 case OMP_CLAUSE_DIST_SCHEDULE:
8423 case OMP_CLAUSE_DEVICE:
8424 case OMP_CLAUSE_PRIORITY:
8425 case OMP_CLAUSE_GRAINSIZE:
8426 case OMP_CLAUSE_NUM_TASKS:
8427 case OMP_CLAUSE_HINT:
8428 case OMP_CLAUSE__CILK_FOR_COUNT_:
8429 case OMP_CLAUSE_ASYNC:
8430 case OMP_CLAUSE_WAIT:
8431 case OMP_CLAUSE_NUM_GANGS:
8432 case OMP_CLAUSE_NUM_WORKERS:
8433 case OMP_CLAUSE_VECTOR_LENGTH:
8434 case OMP_CLAUSE_WORKER:
8435 case OMP_CLAUSE_VECTOR:
8436 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8437 is_gimple_val, fb_rvalue) == GS_ERROR)
8438 remove = true;
8439 break;
8441 case OMP_CLAUSE_GANG:
8442 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8443 is_gimple_val, fb_rvalue) == GS_ERROR)
8444 remove = true;
8445 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8446 is_gimple_val, fb_rvalue) == GS_ERROR)
8447 remove = true;
8448 break;
8450 case OMP_CLAUSE_NOWAIT:
8451 case OMP_CLAUSE_ORDERED:
8452 case OMP_CLAUSE_UNTIED:
8453 case OMP_CLAUSE_COLLAPSE:
8454 case OMP_CLAUSE_TILE:
8455 case OMP_CLAUSE_AUTO:
8456 case OMP_CLAUSE_SEQ:
8457 case OMP_CLAUSE_INDEPENDENT:
8458 case OMP_CLAUSE_MERGEABLE:
8459 case OMP_CLAUSE_PROC_BIND:
8460 case OMP_CLAUSE_SAFELEN:
8461 case OMP_CLAUSE_SIMDLEN:
8462 case OMP_CLAUSE_NOGROUP:
8463 case OMP_CLAUSE_THREADS:
8464 case OMP_CLAUSE_SIMD:
8465 break;
8467 case OMP_CLAUSE_DEFAULTMAP:
8468 ctx->target_map_scalars_firstprivate = false;
8469 break;
8471 case OMP_CLAUSE_ALIGNED:
8472 decl = OMP_CLAUSE_DECL (c);
8473 if (error_operand_p (decl))
8475 remove = true;
8476 break;
8478 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8479 is_gimple_val, fb_rvalue) == GS_ERROR)
8481 remove = true;
8482 break;
8484 if (!is_global_var (decl)
8485 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8486 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8487 break;
8489 case OMP_CLAUSE_DEFAULT:
8490 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8491 break;
8493 default:
8494 gcc_unreachable ();
8497 if (code == OACC_DATA
8498 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8499 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8500 remove = true;
8501 if (remove)
8502 *list_p = OMP_CLAUSE_CHAIN (c);
8503 else
8504 list_p = &OMP_CLAUSE_CHAIN (c);
8507 gimplify_omp_ctxp = ctx;
8508 if (struct_map_to_clause)
8509 delete struct_map_to_clause;
8512 /* Return true if DECL is a candidate for shared to firstprivate
8513 optimization. We only consider non-addressable scalars, not
8514 too big, and not references. */
8516 static bool
8517 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8519 if (TREE_ADDRESSABLE (decl))
8520 return false;
8521 tree type = TREE_TYPE (decl);
8522 if (!is_gimple_reg_type (type)
8523 || TREE_CODE (type) == REFERENCE_TYPE
8524 || TREE_ADDRESSABLE (type))
8525 return false;
8526 /* Don't optimize too large decls, as each thread/task will have
8527 its own. */
8528 HOST_WIDE_INT len = int_size_in_bytes (type);
8529 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8530 return false;
8531 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8532 return false;
8533 return true;
8536 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8537 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8538 GOVD_WRITTEN in outer contexts. */
8540 static void
8541 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8543 for (; ctx; ctx = ctx->outer_context)
8545 splay_tree_node n = splay_tree_lookup (ctx->variables,
8546 (splay_tree_key) decl);
8547 if (n == NULL)
8548 continue;
8549 else if (n->value & GOVD_SHARED)
8551 n->value |= GOVD_WRITTEN;
8552 return;
8554 else if (n->value & GOVD_DATA_SHARE_CLASS)
8555 return;
8559 /* Helper callback for walk_gimple_seq to discover possible stores
8560 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8561 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8562 for those. */
8564 static tree
8565 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8567 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8569 *walk_subtrees = 0;
8570 if (!wi->is_lhs)
8571 return NULL_TREE;
8573 tree op = *tp;
8576 if (handled_component_p (op))
8577 op = TREE_OPERAND (op, 0);
8578 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8579 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8580 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8581 else
8582 break;
8584 while (1);
8585 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8586 return NULL_TREE;
8588 omp_mark_stores (gimplify_omp_ctxp, op);
8589 return NULL_TREE;
8592 /* Helper callback for walk_gimple_seq to discover possible stores
8593 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8594 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8595 for those. */
8597 static tree
8598 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8599 bool *handled_ops_p,
8600 struct walk_stmt_info *wi)
8602 gimple *stmt = gsi_stmt (*gsi_p);
8603 switch (gimple_code (stmt))
8605 /* Don't recurse on OpenMP constructs for which
8606 gimplify_adjust_omp_clauses already handled the bodies,
8607 except handle gimple_omp_for_pre_body. */
8608 case GIMPLE_OMP_FOR:
8609 *handled_ops_p = true;
8610 if (gimple_omp_for_pre_body (stmt))
8611 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8612 omp_find_stores_stmt, omp_find_stores_op, wi);
8613 break;
8614 case GIMPLE_OMP_PARALLEL:
8615 case GIMPLE_OMP_TASK:
8616 case GIMPLE_OMP_SECTIONS:
8617 case GIMPLE_OMP_SINGLE:
8618 case GIMPLE_OMP_TARGET:
8619 case GIMPLE_OMP_TEAMS:
8620 case GIMPLE_OMP_CRITICAL:
8621 *handled_ops_p = true;
8622 break;
8623 default:
8624 break;
8626 return NULL_TREE;
8629 struct gimplify_adjust_omp_clauses_data
8631 tree *list_p;
8632 gimple_seq *pre_p;
8635 /* For all variables that were not actually used within the context,
8636 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8638 static int
8639 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8641 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8642 gimple_seq *pre_p
8643 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8644 tree decl = (tree) n->key;
8645 unsigned flags = n->value;
8646 enum omp_clause_code code;
8647 tree clause;
8648 bool private_debug;
8650 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8651 return 0;
8652 if ((flags & GOVD_SEEN) == 0)
8653 return 0;
8654 if (flags & GOVD_DEBUG_PRIVATE)
8656 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8657 private_debug = true;
8659 else if (flags & GOVD_MAP)
8660 private_debug = false;
8661 else
8662 private_debug
8663 = lang_hooks.decls.omp_private_debug_clause (decl,
8664 !!(flags & GOVD_SHARED));
8665 if (private_debug)
8666 code = OMP_CLAUSE_PRIVATE;
8667 else if (flags & GOVD_MAP)
8669 code = OMP_CLAUSE_MAP;
8670 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8671 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8673 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8674 return 0;
8677 else if (flags & GOVD_SHARED)
8679 if (is_global_var (decl))
8681 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8682 while (ctx != NULL)
8684 splay_tree_node on
8685 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8686 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8687 | GOVD_PRIVATE | GOVD_REDUCTION
8688 | GOVD_LINEAR | GOVD_MAP)) != 0)
8689 break;
8690 ctx = ctx->outer_context;
8692 if (ctx == NULL)
8693 return 0;
8695 code = OMP_CLAUSE_SHARED;
8697 else if (flags & GOVD_PRIVATE)
8698 code = OMP_CLAUSE_PRIVATE;
8699 else if (flags & GOVD_FIRSTPRIVATE)
8701 code = OMP_CLAUSE_FIRSTPRIVATE;
8702 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8703 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8704 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8706 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8707 "%<target%> construct", decl);
8708 return 0;
8711 else if (flags & GOVD_LASTPRIVATE)
8712 code = OMP_CLAUSE_LASTPRIVATE;
8713 else if (flags & GOVD_ALIGNED)
8714 return 0;
8715 else
8716 gcc_unreachable ();
8718 if (((flags & GOVD_LASTPRIVATE)
8719 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8720 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8721 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8723 tree chain = *list_p;
8724 clause = build_omp_clause (input_location, code);
8725 OMP_CLAUSE_DECL (clause) = decl;
8726 OMP_CLAUSE_CHAIN (clause) = chain;
8727 if (private_debug)
8728 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8729 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8730 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8731 else if (code == OMP_CLAUSE_SHARED
8732 && (flags & GOVD_WRITTEN) == 0
8733 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8734 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8735 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8736 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8737 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8739 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8740 OMP_CLAUSE_DECL (nc) = decl;
8741 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8742 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8743 OMP_CLAUSE_DECL (clause)
8744 = build_simple_mem_ref_loc (input_location, decl);
8745 OMP_CLAUSE_DECL (clause)
8746 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8747 build_int_cst (build_pointer_type (char_type_node), 0));
8748 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8749 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8750 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8751 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8752 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8753 OMP_CLAUSE_CHAIN (nc) = chain;
8754 OMP_CLAUSE_CHAIN (clause) = nc;
8755 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8756 gimplify_omp_ctxp = ctx->outer_context;
8757 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8758 pre_p, NULL, is_gimple_val, fb_rvalue);
8759 gimplify_omp_ctxp = ctx;
8761 else if (code == OMP_CLAUSE_MAP)
8763 int kind;
8764 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8765 switch (flags & (GOVD_MAP_TO_ONLY
8766 | GOVD_MAP_FORCE
8767 | GOVD_MAP_FORCE_PRESENT))
8769 case 0:
8770 kind = GOMP_MAP_TOFROM;
8771 break;
8772 case GOVD_MAP_FORCE:
8773 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8774 break;
8775 case GOVD_MAP_TO_ONLY:
8776 kind = GOMP_MAP_TO;
8777 break;
8778 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8779 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8780 break;
8781 case GOVD_MAP_FORCE_PRESENT:
8782 kind = GOMP_MAP_FORCE_PRESENT;
8783 break;
8784 default:
8785 gcc_unreachable ();
8787 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8788 if (DECL_SIZE (decl)
8789 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8791 tree decl2 = DECL_VALUE_EXPR (decl);
8792 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8793 decl2 = TREE_OPERAND (decl2, 0);
8794 gcc_assert (DECL_P (decl2));
8795 tree mem = build_simple_mem_ref (decl2);
8796 OMP_CLAUSE_DECL (clause) = mem;
8797 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8798 if (gimplify_omp_ctxp->outer_context)
8800 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8801 omp_notice_variable (ctx, decl2, true);
8802 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8804 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8805 OMP_CLAUSE_MAP);
8806 OMP_CLAUSE_DECL (nc) = decl;
8807 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8808 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8809 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8810 else
8811 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8812 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8813 OMP_CLAUSE_CHAIN (clause) = nc;
8815 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8816 && lang_hooks.decls.omp_privatize_by_reference (decl))
8818 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8819 OMP_CLAUSE_SIZE (clause)
8820 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8821 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8822 gimplify_omp_ctxp = ctx->outer_context;
8823 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8824 pre_p, NULL, is_gimple_val, fb_rvalue);
8825 gimplify_omp_ctxp = ctx;
8826 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8827 OMP_CLAUSE_MAP);
8828 OMP_CLAUSE_DECL (nc) = decl;
8829 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8830 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8831 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8832 OMP_CLAUSE_CHAIN (clause) = nc;
8834 else
8835 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8837 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8839 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8840 OMP_CLAUSE_DECL (nc) = decl;
8841 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8842 OMP_CLAUSE_CHAIN (nc) = chain;
8843 OMP_CLAUSE_CHAIN (clause) = nc;
8844 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8845 gimplify_omp_ctxp = ctx->outer_context;
8846 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8847 gimplify_omp_ctxp = ctx;
8849 *list_p = clause;
8850 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8851 gimplify_omp_ctxp = ctx->outer_context;
8852 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8853 if (gimplify_omp_ctxp)
8854 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8855 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8856 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8857 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8858 true);
8859 gimplify_omp_ctxp = ctx;
8860 return 0;
8863 static void
8864 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8865 enum tree_code code)
8867 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8868 tree c, decl;
8870 if (body)
8872 struct gimplify_omp_ctx *octx;
8873 for (octx = ctx; octx; octx = octx->outer_context)
8874 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8875 break;
8876 if (octx)
8878 struct walk_stmt_info wi;
8879 memset (&wi, 0, sizeof (wi));
8880 walk_gimple_seq (body, omp_find_stores_stmt,
8881 omp_find_stores_op, &wi);
8884 while ((c = *list_p) != NULL)
8886 splay_tree_node n;
8887 bool remove = false;
8889 switch (OMP_CLAUSE_CODE (c))
8891 case OMP_CLAUSE_FIRSTPRIVATE:
8892 if ((ctx->region_type & ORT_TARGET)
8893 && (ctx->region_type & ORT_ACC) == 0
8894 && TYPE_ATOMIC (strip_array_types
8895 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8897 error_at (OMP_CLAUSE_LOCATION (c),
8898 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8899 "%<target%> construct", OMP_CLAUSE_DECL (c));
8900 remove = true;
8901 break;
8903 /* FALLTHRU */
8904 case OMP_CLAUSE_PRIVATE:
8905 case OMP_CLAUSE_SHARED:
8906 case OMP_CLAUSE_LINEAR:
8907 decl = OMP_CLAUSE_DECL (c);
8908 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8909 remove = !(n->value & GOVD_SEEN);
8910 if (! remove)
8912 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8913 if ((n->value & GOVD_DEBUG_PRIVATE)
8914 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8916 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8917 || ((n->value & GOVD_DATA_SHARE_CLASS)
8918 == GOVD_SHARED));
8919 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8920 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8922 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8923 && (n->value & GOVD_WRITTEN) == 0
8924 && DECL_P (decl)
8925 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8926 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8927 else if (DECL_P (decl)
8928 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8929 && (n->value & GOVD_WRITTEN) != 0)
8930 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8931 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8932 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8933 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8935 break;
8937 case OMP_CLAUSE_LASTPRIVATE:
8938 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8939 accurately reflect the presence of a FIRSTPRIVATE clause. */
8940 decl = OMP_CLAUSE_DECL (c);
8941 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8942 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8943 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8944 if (code == OMP_DISTRIBUTE
8945 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8947 remove = true;
8948 error_at (OMP_CLAUSE_LOCATION (c),
8949 "same variable used in %<firstprivate%> and "
8950 "%<lastprivate%> clauses on %<distribute%> "
8951 "construct");
8953 if (!remove
8954 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8955 && DECL_P (decl)
8956 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8957 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8958 break;
8960 case OMP_CLAUSE_ALIGNED:
8961 decl = OMP_CLAUSE_DECL (c);
8962 if (!is_global_var (decl))
8964 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8965 remove = n == NULL || !(n->value & GOVD_SEEN);
8966 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8968 struct gimplify_omp_ctx *octx;
8969 if (n != NULL
8970 && (n->value & (GOVD_DATA_SHARE_CLASS
8971 & ~GOVD_FIRSTPRIVATE)))
8972 remove = true;
8973 else
8974 for (octx = ctx->outer_context; octx;
8975 octx = octx->outer_context)
8977 n = splay_tree_lookup (octx->variables,
8978 (splay_tree_key) decl);
8979 if (n == NULL)
8980 continue;
8981 if (n->value & GOVD_LOCAL)
8982 break;
8983 /* We have to avoid assigning a shared variable
8984 to itself when trying to add
8985 __builtin_assume_aligned. */
8986 if (n->value & GOVD_SHARED)
8988 remove = true;
8989 break;
8994 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8996 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8997 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8998 remove = true;
9000 break;
9002 case OMP_CLAUSE_MAP:
9003 if (code == OMP_TARGET_EXIT_DATA
9004 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9006 remove = true;
9007 break;
9009 decl = OMP_CLAUSE_DECL (c);
9010 /* Data clauses associated with acc parallel reductions must be
9011 compatible with present_or_copy. Warn and adjust the clause
9012 if that is not the case. */
9013 if (ctx->region_type == ORT_ACC_PARALLEL)
9015 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9016 n = NULL;
9018 if (DECL_P (t))
9019 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9021 if (n && (n->value & GOVD_REDUCTION))
9023 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9025 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9026 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9027 && kind != GOMP_MAP_FORCE_PRESENT
9028 && kind != GOMP_MAP_POINTER)
9030 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9031 "incompatible data clause with reduction "
9032 "on %qE; promoting to present_or_copy",
9033 DECL_NAME (t));
9034 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9038 if (!DECL_P (decl))
9040 if ((ctx->region_type & ORT_TARGET) != 0
9041 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9043 if (TREE_CODE (decl) == INDIRECT_REF
9044 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9045 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9046 == REFERENCE_TYPE))
9047 decl = TREE_OPERAND (decl, 0);
9048 if (TREE_CODE (decl) == COMPONENT_REF)
9050 while (TREE_CODE (decl) == COMPONENT_REF)
9051 decl = TREE_OPERAND (decl, 0);
9052 if (DECL_P (decl))
9054 n = splay_tree_lookup (ctx->variables,
9055 (splay_tree_key) decl);
9056 if (!(n->value & GOVD_SEEN))
9057 remove = true;
9061 break;
9063 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9064 if ((ctx->region_type & ORT_TARGET) != 0
9065 && !(n->value & GOVD_SEEN)
9066 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9067 && (!is_global_var (decl)
9068 || !lookup_attribute ("omp declare target link",
9069 DECL_ATTRIBUTES (decl))))
9071 remove = true;
9072 /* For struct element mapping, if struct is never referenced
9073 in target block and none of the mapping has always modifier,
9074 remove all the struct element mappings, which immediately
9075 follow the GOMP_MAP_STRUCT map clause. */
9076 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9078 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9079 while (cnt--)
9080 OMP_CLAUSE_CHAIN (c)
9081 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9084 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9085 && code == OMP_TARGET_EXIT_DATA)
9086 remove = true;
9087 else if (DECL_SIZE (decl)
9088 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9089 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9090 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9091 && (OMP_CLAUSE_MAP_KIND (c)
9092 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9094 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9095 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9096 INTEGER_CST. */
9097 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9099 tree decl2 = DECL_VALUE_EXPR (decl);
9100 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9101 decl2 = TREE_OPERAND (decl2, 0);
9102 gcc_assert (DECL_P (decl2));
9103 tree mem = build_simple_mem_ref (decl2);
9104 OMP_CLAUSE_DECL (c) = mem;
9105 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9106 if (ctx->outer_context)
9108 omp_notice_variable (ctx->outer_context, decl2, true);
9109 omp_notice_variable (ctx->outer_context,
9110 OMP_CLAUSE_SIZE (c), true);
9112 if (((ctx->region_type & ORT_TARGET) != 0
9113 || !ctx->target_firstprivatize_array_bases)
9114 && ((n->value & GOVD_SEEN) == 0
9115 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9117 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9118 OMP_CLAUSE_MAP);
9119 OMP_CLAUSE_DECL (nc) = decl;
9120 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9121 if (ctx->target_firstprivatize_array_bases)
9122 OMP_CLAUSE_SET_MAP_KIND (nc,
9123 GOMP_MAP_FIRSTPRIVATE_POINTER);
9124 else
9125 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9126 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9127 OMP_CLAUSE_CHAIN (c) = nc;
9128 c = nc;
9131 else
9133 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9134 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9135 gcc_assert ((n->value & GOVD_SEEN) == 0
9136 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9137 == 0));
9139 break;
9141 case OMP_CLAUSE_TO:
9142 case OMP_CLAUSE_FROM:
9143 case OMP_CLAUSE__CACHE_:
9144 decl = OMP_CLAUSE_DECL (c);
9145 if (!DECL_P (decl))
9146 break;
9147 if (DECL_SIZE (decl)
9148 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9150 tree decl2 = DECL_VALUE_EXPR (decl);
9151 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9152 decl2 = TREE_OPERAND (decl2, 0);
9153 gcc_assert (DECL_P (decl2));
9154 tree mem = build_simple_mem_ref (decl2);
9155 OMP_CLAUSE_DECL (c) = mem;
9156 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9157 if (ctx->outer_context)
9159 omp_notice_variable (ctx->outer_context, decl2, true);
9160 omp_notice_variable (ctx->outer_context,
9161 OMP_CLAUSE_SIZE (c), true);
9164 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9165 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9166 break;
9168 case OMP_CLAUSE_REDUCTION:
9169 decl = OMP_CLAUSE_DECL (c);
9170 /* OpenACC reductions need a present_or_copy data clause.
9171 Add one if necessary. Error is the reduction is private. */
9172 if (ctx->region_type == ORT_ACC_PARALLEL)
9174 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9175 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9176 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9177 "reduction on %qE", DECL_NAME (decl));
9178 else if ((n->value & GOVD_MAP) == 0)
9180 tree next = OMP_CLAUSE_CHAIN (c);
9181 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9182 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9183 OMP_CLAUSE_DECL (nc) = decl;
9184 OMP_CLAUSE_CHAIN (c) = nc;
9185 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9186 while (1)
9188 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9189 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9190 break;
9191 nc = OMP_CLAUSE_CHAIN (nc);
9193 OMP_CLAUSE_CHAIN (nc) = next;
9194 n->value |= GOVD_MAP;
9197 if (DECL_P (decl)
9198 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9199 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9200 break;
9201 case OMP_CLAUSE_COPYIN:
9202 case OMP_CLAUSE_COPYPRIVATE:
9203 case OMP_CLAUSE_IF:
9204 case OMP_CLAUSE_NUM_THREADS:
9205 case OMP_CLAUSE_NUM_TEAMS:
9206 case OMP_CLAUSE_THREAD_LIMIT:
9207 case OMP_CLAUSE_DIST_SCHEDULE:
9208 case OMP_CLAUSE_DEVICE:
9209 case OMP_CLAUSE_SCHEDULE:
9210 case OMP_CLAUSE_NOWAIT:
9211 case OMP_CLAUSE_ORDERED:
9212 case OMP_CLAUSE_DEFAULT:
9213 case OMP_CLAUSE_UNTIED:
9214 case OMP_CLAUSE_COLLAPSE:
9215 case OMP_CLAUSE_FINAL:
9216 case OMP_CLAUSE_MERGEABLE:
9217 case OMP_CLAUSE_PROC_BIND:
9218 case OMP_CLAUSE_SAFELEN:
9219 case OMP_CLAUSE_SIMDLEN:
9220 case OMP_CLAUSE_DEPEND:
9221 case OMP_CLAUSE_PRIORITY:
9222 case OMP_CLAUSE_GRAINSIZE:
9223 case OMP_CLAUSE_NUM_TASKS:
9224 case OMP_CLAUSE_NOGROUP:
9225 case OMP_CLAUSE_THREADS:
9226 case OMP_CLAUSE_SIMD:
9227 case OMP_CLAUSE_HINT:
9228 case OMP_CLAUSE_DEFAULTMAP:
9229 case OMP_CLAUSE_USE_DEVICE_PTR:
9230 case OMP_CLAUSE_IS_DEVICE_PTR:
9231 case OMP_CLAUSE__CILK_FOR_COUNT_:
9232 case OMP_CLAUSE_ASYNC:
9233 case OMP_CLAUSE_WAIT:
9234 case OMP_CLAUSE_INDEPENDENT:
9235 case OMP_CLAUSE_NUM_GANGS:
9236 case OMP_CLAUSE_NUM_WORKERS:
9237 case OMP_CLAUSE_VECTOR_LENGTH:
9238 case OMP_CLAUSE_GANG:
9239 case OMP_CLAUSE_WORKER:
9240 case OMP_CLAUSE_VECTOR:
9241 case OMP_CLAUSE_AUTO:
9242 case OMP_CLAUSE_SEQ:
9243 case OMP_CLAUSE_TILE:
9244 break;
9246 default:
9247 gcc_unreachable ();
9250 if (remove)
9251 *list_p = OMP_CLAUSE_CHAIN (c);
9252 else
9253 list_p = &OMP_CLAUSE_CHAIN (c);
9256 /* Add in any implicit data sharing. */
9257 struct gimplify_adjust_omp_clauses_data data;
9258 data.list_p = list_p;
9259 data.pre_p = pre_p;
9260 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9262 gimplify_omp_ctxp = ctx->outer_context;
9263 delete_omp_context (ctx);
9266 /* Gimplify OACC_CACHE. */
9268 static void
9269 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9271 tree expr = *expr_p;
9273 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9274 OACC_CACHE);
9275 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9276 OACC_CACHE);
9278 /* TODO: Do something sensible with this information. */
9280 *expr_p = NULL_TREE;
9283 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9284 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9285 kind. The entry kind will replace the one in CLAUSE, while the exit
9286 kind will be used in a new omp_clause and returned to the caller. */
9288 static tree
9289 gimplify_oacc_declare_1 (tree clause)
9291 HOST_WIDE_INT kind, new_op;
9292 bool ret = false;
9293 tree c = NULL;
9295 kind = OMP_CLAUSE_MAP_KIND (clause);
9297 switch (kind)
9299 case GOMP_MAP_ALLOC:
9300 case GOMP_MAP_FORCE_ALLOC:
9301 case GOMP_MAP_FORCE_TO:
9302 new_op = GOMP_MAP_DELETE;
9303 ret = true;
9304 break;
9306 case GOMP_MAP_FORCE_FROM:
9307 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9308 new_op = GOMP_MAP_FORCE_FROM;
9309 ret = true;
9310 break;
9312 case GOMP_MAP_FORCE_TOFROM:
9313 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9314 new_op = GOMP_MAP_FORCE_FROM;
9315 ret = true;
9316 break;
9318 case GOMP_MAP_FROM:
9319 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9320 new_op = GOMP_MAP_FROM;
9321 ret = true;
9322 break;
9324 case GOMP_MAP_TOFROM:
9325 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9326 new_op = GOMP_MAP_FROM;
9327 ret = true;
9328 break;
9330 case GOMP_MAP_DEVICE_RESIDENT:
9331 case GOMP_MAP_FORCE_DEVICEPTR:
9332 case GOMP_MAP_FORCE_PRESENT:
9333 case GOMP_MAP_LINK:
9334 case GOMP_MAP_POINTER:
9335 case GOMP_MAP_TO:
9336 break;
9338 default:
9339 gcc_unreachable ();
9340 break;
9343 if (ret)
9345 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9346 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9347 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9350 return c;
9353 /* Gimplify OACC_DECLARE. */
9355 static void
9356 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9358 tree expr = *expr_p;
9359 gomp_target *stmt;
9360 tree clauses, t, decl;
9362 clauses = OACC_DECLARE_CLAUSES (expr);
9364 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9365 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9367 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9369 decl = OMP_CLAUSE_DECL (t);
9371 if (TREE_CODE (decl) == MEM_REF)
9372 decl = TREE_OPERAND (decl, 0);
9374 if (VAR_P (decl) && !is_oacc_declared (decl))
9376 tree attr = get_identifier ("oacc declare target");
9377 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9378 DECL_ATTRIBUTES (decl));
9381 if (VAR_P (decl)
9382 && !is_global_var (decl)
9383 && DECL_CONTEXT (decl) == current_function_decl)
9385 tree c = gimplify_oacc_declare_1 (t);
9386 if (c)
9388 if (oacc_declare_returns == NULL)
9389 oacc_declare_returns = new hash_map<tree, tree>;
9391 oacc_declare_returns->put (decl, c);
9395 if (gimplify_omp_ctxp)
9396 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9399 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9400 clauses);
9402 gimplify_seq_add_stmt (pre_p, stmt);
9404 *expr_p = NULL_TREE;
9407 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9408 gimplification of the body, as well as scanning the body for used
9409 variables. We need to do this scan now, because variable-sized
9410 decls will be decomposed during gimplification. */
9412 static void
9413 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9415 tree expr = *expr_p;
9416 gimple *g;
9417 gimple_seq body = NULL;
9419 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9420 OMP_PARALLEL_COMBINED (expr)
9421 ? ORT_COMBINED_PARALLEL
9422 : ORT_PARALLEL, OMP_PARALLEL);
9424 push_gimplify_context ();
9426 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9427 if (gimple_code (g) == GIMPLE_BIND)
9428 pop_gimplify_context (g);
9429 else
9430 pop_gimplify_context (NULL);
9432 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9433 OMP_PARALLEL);
9435 g = gimple_build_omp_parallel (body,
9436 OMP_PARALLEL_CLAUSES (expr),
9437 NULL_TREE, NULL_TREE);
9438 if (OMP_PARALLEL_COMBINED (expr))
9439 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9440 gimplify_seq_add_stmt (pre_p, g);
9441 *expr_p = NULL_TREE;
9444 /* Gimplify the contents of an OMP_TASK statement. This involves
9445 gimplification of the body, as well as scanning the body for used
9446 variables. We need to do this scan now, because variable-sized
9447 decls will be decomposed during gimplification. */
9449 static void
9450 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9452 tree expr = *expr_p;
9453 gimple *g;
9454 gimple_seq body = NULL;
9456 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9457 omp_find_clause (OMP_TASK_CLAUSES (expr),
9458 OMP_CLAUSE_UNTIED)
9459 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9461 push_gimplify_context ();
9463 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9464 if (gimple_code (g) == GIMPLE_BIND)
9465 pop_gimplify_context (g);
9466 else
9467 pop_gimplify_context (NULL);
9469 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9470 OMP_TASK);
9472 g = gimple_build_omp_task (body,
9473 OMP_TASK_CLAUSES (expr),
9474 NULL_TREE, NULL_TREE,
9475 NULL_TREE, NULL_TREE, NULL_TREE);
9476 gimplify_seq_add_stmt (pre_p, g);
9477 *expr_p = NULL_TREE;
9480 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9481 with non-NULL OMP_FOR_INIT. */
9483 static tree
9484 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9486 *walk_subtrees = 0;
9487 switch (TREE_CODE (*tp))
9489 case OMP_FOR:
9490 *walk_subtrees = 1;
9491 /* FALLTHRU */
9492 case OMP_SIMD:
9493 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9494 return *tp;
9495 break;
9496 case BIND_EXPR:
9497 case STATEMENT_LIST:
9498 case OMP_PARALLEL:
9499 *walk_subtrees = 1;
9500 break;
9501 default:
9502 break;
9504 return NULL_TREE;
9507 /* Gimplify the gross structure of an OMP_FOR statement. */
9509 static enum gimplify_status
9510 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9512 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9513 enum gimplify_status ret = GS_ALL_DONE;
9514 enum gimplify_status tret;
9515 gomp_for *gfor;
9516 gimple_seq for_body, for_pre_body;
9517 int i;
9518 bitmap has_decl_expr = NULL;
9519 enum omp_region_type ort = ORT_WORKSHARE;
9521 orig_for_stmt = for_stmt = *expr_p;
9523 switch (TREE_CODE (for_stmt))
9525 case OMP_FOR:
9526 case CILK_FOR:
9527 case OMP_DISTRIBUTE:
9528 break;
9529 case OACC_LOOP:
9530 ort = ORT_ACC;
9531 break;
9532 case OMP_TASKLOOP:
9533 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9534 ort = ORT_UNTIED_TASK;
9535 else
9536 ort = ORT_TASK;
9537 break;
9538 case OMP_SIMD:
9539 case CILK_SIMD:
9540 ort = ORT_SIMD;
9541 break;
9542 default:
9543 gcc_unreachable ();
9546 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9547 clause for the IV. */
9548 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9550 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9551 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9552 decl = TREE_OPERAND (t, 0);
9553 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9554 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9555 && OMP_CLAUSE_DECL (c) == decl)
9557 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9558 break;
9562 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9564 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9565 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9566 find_combined_omp_for, NULL, NULL);
9567 if (inner_for_stmt == NULL_TREE)
9569 gcc_assert (seen_error ());
9570 *expr_p = NULL_TREE;
9571 return GS_ERROR;
9575 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9576 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9577 TREE_CODE (for_stmt));
9579 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9580 gimplify_omp_ctxp->distribute = true;
9582 /* Handle OMP_FOR_INIT. */
9583 for_pre_body = NULL;
9584 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9586 has_decl_expr = BITMAP_ALLOC (NULL);
9587 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9588 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9589 == VAR_DECL)
9591 t = OMP_FOR_PRE_BODY (for_stmt);
9592 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9594 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9596 tree_stmt_iterator si;
9597 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9598 tsi_next (&si))
9600 t = tsi_stmt (si);
9601 if (TREE_CODE (t) == DECL_EXPR
9602 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9603 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9607 if (OMP_FOR_PRE_BODY (for_stmt))
9609 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9610 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9611 else
9613 struct gimplify_omp_ctx ctx;
9614 memset (&ctx, 0, sizeof (ctx));
9615 ctx.region_type = ORT_NONE;
9616 gimplify_omp_ctxp = &ctx;
9617 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9618 gimplify_omp_ctxp = NULL;
9621 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9623 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9624 for_stmt = inner_for_stmt;
9626 /* For taskloop, need to gimplify the start, end and step before the
9627 taskloop, outside of the taskloop omp context. */
9628 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9630 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9632 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9633 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9635 TREE_OPERAND (t, 1)
9636 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9637 pre_p, NULL, false);
9638 tree c = build_omp_clause (input_location,
9639 OMP_CLAUSE_FIRSTPRIVATE);
9640 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9641 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9642 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9645 /* Handle OMP_FOR_COND. */
9646 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9647 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9649 TREE_OPERAND (t, 1)
9650 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9651 gimple_seq_empty_p (for_pre_body)
9652 ? pre_p : &for_pre_body, NULL,
9653 false);
9654 tree c = build_omp_clause (input_location,
9655 OMP_CLAUSE_FIRSTPRIVATE);
9656 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9657 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9658 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9661 /* Handle OMP_FOR_INCR. */
9662 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9663 if (TREE_CODE (t) == MODIFY_EXPR)
9665 decl = TREE_OPERAND (t, 0);
9666 t = TREE_OPERAND (t, 1);
9667 tree *tp = &TREE_OPERAND (t, 1);
9668 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9669 tp = &TREE_OPERAND (t, 0);
9671 if (!is_gimple_constant (*tp))
9673 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9674 ? pre_p : &for_pre_body;
9675 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9676 tree c = build_omp_clause (input_location,
9677 OMP_CLAUSE_FIRSTPRIVATE);
9678 OMP_CLAUSE_DECL (c) = *tp;
9679 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9680 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9685 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9686 OMP_TASKLOOP);
9689 if (orig_for_stmt != for_stmt)
9690 gimplify_omp_ctxp->combined_loop = true;
9692 for_body = NULL;
9693 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9694 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9695 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9696 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9698 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9699 bool is_doacross = false;
9700 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9702 is_doacross = true;
9703 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9704 (OMP_FOR_INIT (for_stmt))
9705 * 2);
9707 int collapse = 1, tile = 0;
9708 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9709 if (c)
9710 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9711 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9712 if (c)
9713 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9714 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9716 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9717 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9718 decl = TREE_OPERAND (t, 0);
9719 gcc_assert (DECL_P (decl));
9720 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9721 || POINTER_TYPE_P (TREE_TYPE (decl)));
9722 if (is_doacross)
9724 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9725 gimplify_omp_ctxp->loop_iter_var.quick_push
9726 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9727 else
9728 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9729 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9732 /* Make sure the iteration variable is private. */
9733 tree c = NULL_TREE;
9734 tree c2 = NULL_TREE;
9735 if (orig_for_stmt != for_stmt)
9736 /* Do this only on innermost construct for combined ones. */;
9737 else if (ort == ORT_SIMD)
9739 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9740 (splay_tree_key) decl);
9741 omp_is_private (gimplify_omp_ctxp, decl,
9742 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9743 != 1));
9744 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9745 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9746 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9748 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9749 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9750 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9751 if (has_decl_expr
9752 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9754 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9755 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9757 struct gimplify_omp_ctx *outer
9758 = gimplify_omp_ctxp->outer_context;
9759 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9761 if (outer->region_type == ORT_WORKSHARE
9762 && outer->combined_loop)
9764 n = splay_tree_lookup (outer->variables,
9765 (splay_tree_key)decl);
9766 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9768 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9769 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9771 else
9773 struct gimplify_omp_ctx *octx = outer->outer_context;
9774 if (octx
9775 && octx->region_type == ORT_COMBINED_PARALLEL
9776 && octx->outer_context
9777 && (octx->outer_context->region_type
9778 == ORT_WORKSHARE)
9779 && octx->outer_context->combined_loop)
9781 octx = octx->outer_context;
9782 n = splay_tree_lookup (octx->variables,
9783 (splay_tree_key)decl);
9784 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9786 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9787 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9794 OMP_CLAUSE_DECL (c) = decl;
9795 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9796 OMP_FOR_CLAUSES (for_stmt) = c;
9797 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9798 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9800 if (outer->region_type == ORT_WORKSHARE
9801 && outer->combined_loop)
9803 if (outer->outer_context
9804 && (outer->outer_context->region_type
9805 == ORT_COMBINED_PARALLEL))
9806 outer = outer->outer_context;
9807 else if (omp_check_private (outer, decl, false))
9808 outer = NULL;
9810 else if (((outer->region_type & ORT_TASK) != 0)
9811 && outer->combined_loop
9812 && !omp_check_private (gimplify_omp_ctxp,
9813 decl, false))
9815 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9817 omp_notice_variable (outer, decl, true);
9818 outer = NULL;
9820 if (outer)
9822 n = splay_tree_lookup (outer->variables,
9823 (splay_tree_key)decl);
9824 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9826 omp_add_variable (outer, decl,
9827 GOVD_LASTPRIVATE | GOVD_SEEN);
9828 if (outer->region_type == ORT_COMBINED_PARALLEL
9829 && outer->outer_context
9830 && (outer->outer_context->region_type
9831 == ORT_WORKSHARE)
9832 && outer->outer_context->combined_loop)
9834 outer = outer->outer_context;
9835 n = splay_tree_lookup (outer->variables,
9836 (splay_tree_key)decl);
9837 if (omp_check_private (outer, decl, false))
9838 outer = NULL;
9839 else if (n == NULL
9840 || ((n->value & GOVD_DATA_SHARE_CLASS)
9841 == 0))
9842 omp_add_variable (outer, decl,
9843 GOVD_LASTPRIVATE
9844 | GOVD_SEEN);
9845 else
9846 outer = NULL;
9848 if (outer && outer->outer_context
9849 && (outer->outer_context->region_type
9850 == ORT_COMBINED_TEAMS))
9852 outer = outer->outer_context;
9853 n = splay_tree_lookup (outer->variables,
9854 (splay_tree_key)decl);
9855 if (n == NULL
9856 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9857 omp_add_variable (outer, decl,
9858 GOVD_SHARED | GOVD_SEEN);
9859 else
9860 outer = NULL;
9862 if (outer && outer->outer_context)
9863 omp_notice_variable (outer->outer_context, decl,
9864 true);
9869 else
9871 bool lastprivate
9872 = (!has_decl_expr
9873 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9874 struct gimplify_omp_ctx *outer
9875 = gimplify_omp_ctxp->outer_context;
9876 if (outer && lastprivate)
9878 if (outer->region_type == ORT_WORKSHARE
9879 && outer->combined_loop)
9881 n = splay_tree_lookup (outer->variables,
9882 (splay_tree_key)decl);
9883 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9885 lastprivate = false;
9886 outer = NULL;
9888 else if (outer->outer_context
9889 && (outer->outer_context->region_type
9890 == ORT_COMBINED_PARALLEL))
9891 outer = outer->outer_context;
9892 else if (omp_check_private (outer, decl, false))
9893 outer = NULL;
9895 else if (((outer->region_type & ORT_TASK) != 0)
9896 && outer->combined_loop
9897 && !omp_check_private (gimplify_omp_ctxp,
9898 decl, false))
9900 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9902 omp_notice_variable (outer, decl, true);
9903 outer = NULL;
9905 if (outer)
9907 n = splay_tree_lookup (outer->variables,
9908 (splay_tree_key)decl);
9909 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9911 omp_add_variable (outer, decl,
9912 GOVD_LASTPRIVATE | GOVD_SEEN);
9913 if (outer->region_type == ORT_COMBINED_PARALLEL
9914 && outer->outer_context
9915 && (outer->outer_context->region_type
9916 == ORT_WORKSHARE)
9917 && outer->outer_context->combined_loop)
9919 outer = outer->outer_context;
9920 n = splay_tree_lookup (outer->variables,
9921 (splay_tree_key)decl);
9922 if (omp_check_private (outer, decl, false))
9923 outer = NULL;
9924 else if (n == NULL
9925 || ((n->value & GOVD_DATA_SHARE_CLASS)
9926 == 0))
9927 omp_add_variable (outer, decl,
9928 GOVD_LASTPRIVATE
9929 | GOVD_SEEN);
9930 else
9931 outer = NULL;
9933 if (outer && outer->outer_context
9934 && (outer->outer_context->region_type
9935 == ORT_COMBINED_TEAMS))
9937 outer = outer->outer_context;
9938 n = splay_tree_lookup (outer->variables,
9939 (splay_tree_key)decl);
9940 if (n == NULL
9941 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9942 omp_add_variable (outer, decl,
9943 GOVD_SHARED | GOVD_SEEN);
9944 else
9945 outer = NULL;
9947 if (outer && outer->outer_context)
9948 omp_notice_variable (outer->outer_context, decl,
9949 true);
9954 c = build_omp_clause (input_location,
9955 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9956 : OMP_CLAUSE_PRIVATE);
9957 OMP_CLAUSE_DECL (c) = decl;
9958 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9959 OMP_FOR_CLAUSES (for_stmt) = c;
9960 omp_add_variable (gimplify_omp_ctxp, decl,
9961 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9962 | GOVD_EXPLICIT | GOVD_SEEN);
9963 c = NULL_TREE;
9966 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9967 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9968 else
9969 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9971 /* If DECL is not a gimple register, create a temporary variable to act
9972 as an iteration counter. This is valid, since DECL cannot be
9973 modified in the body of the loop. Similarly for any iteration vars
9974 in simd with collapse > 1 where the iterator vars must be
9975 lastprivate. */
9976 if (orig_for_stmt != for_stmt)
9977 var = decl;
9978 else if (!is_gimple_reg (decl)
9979 || (ort == ORT_SIMD
9980 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9982 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9983 /* Make sure omp_add_variable is not called on it prematurely.
9984 We call it ourselves a few lines later. */
9985 gimplify_omp_ctxp = NULL;
9986 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9987 gimplify_omp_ctxp = ctx;
9988 TREE_OPERAND (t, 0) = var;
9990 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9992 if (ort == ORT_SIMD
9993 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9995 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9996 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9997 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9998 OMP_CLAUSE_DECL (c2) = var;
9999 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
10000 OMP_FOR_CLAUSES (for_stmt) = c2;
10001 omp_add_variable (gimplify_omp_ctxp, var,
10002 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10003 if (c == NULL_TREE)
10005 c = c2;
10006 c2 = NULL_TREE;
10009 else
10010 omp_add_variable (gimplify_omp_ctxp, var,
10011 GOVD_PRIVATE | GOVD_SEEN);
10013 else
10014 var = decl;
10016 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10017 is_gimple_val, fb_rvalue, false);
10018 ret = MIN (ret, tret);
10019 if (ret == GS_ERROR)
10020 return ret;
10022 /* Handle OMP_FOR_COND. */
10023 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10024 gcc_assert (COMPARISON_CLASS_P (t));
10025 gcc_assert (TREE_OPERAND (t, 0) == decl);
10027 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10028 is_gimple_val, fb_rvalue, false);
10029 ret = MIN (ret, tret);
10031 /* Handle OMP_FOR_INCR. */
10032 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10033 switch (TREE_CODE (t))
10035 case PREINCREMENT_EXPR:
10036 case POSTINCREMENT_EXPR:
10038 tree decl = TREE_OPERAND (t, 0);
10039 /* c_omp_for_incr_canonicalize_ptr() should have been
10040 called to massage things appropriately. */
10041 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10043 if (orig_for_stmt != for_stmt)
10044 break;
10045 t = build_int_cst (TREE_TYPE (decl), 1);
10046 if (c)
10047 OMP_CLAUSE_LINEAR_STEP (c) = t;
10048 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10049 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10050 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10051 break;
10054 case PREDECREMENT_EXPR:
10055 case POSTDECREMENT_EXPR:
10056 /* c_omp_for_incr_canonicalize_ptr() should have been
10057 called to massage things appropriately. */
10058 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10059 if (orig_for_stmt != for_stmt)
10060 break;
10061 t = build_int_cst (TREE_TYPE (decl), -1);
10062 if (c)
10063 OMP_CLAUSE_LINEAR_STEP (c) = t;
10064 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10065 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10066 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10067 break;
10069 case MODIFY_EXPR:
10070 gcc_assert (TREE_OPERAND (t, 0) == decl);
10071 TREE_OPERAND (t, 0) = var;
10073 t = TREE_OPERAND (t, 1);
10074 switch (TREE_CODE (t))
10076 case PLUS_EXPR:
10077 if (TREE_OPERAND (t, 1) == decl)
10079 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10080 TREE_OPERAND (t, 0) = var;
10081 break;
10084 /* Fallthru. */
10085 case MINUS_EXPR:
10086 case POINTER_PLUS_EXPR:
10087 gcc_assert (TREE_OPERAND (t, 0) == decl);
10088 TREE_OPERAND (t, 0) = var;
10089 break;
10090 default:
10091 gcc_unreachable ();
10094 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10095 is_gimple_val, fb_rvalue, false);
10096 ret = MIN (ret, tret);
10097 if (c)
10099 tree step = TREE_OPERAND (t, 1);
10100 tree stept = TREE_TYPE (decl);
10101 if (POINTER_TYPE_P (stept))
10102 stept = sizetype;
10103 step = fold_convert (stept, step);
10104 if (TREE_CODE (t) == MINUS_EXPR)
10105 step = fold_build1 (NEGATE_EXPR, stept, step);
10106 OMP_CLAUSE_LINEAR_STEP (c) = step;
10107 if (step != TREE_OPERAND (t, 1))
10109 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10110 &for_pre_body, NULL,
10111 is_gimple_val, fb_rvalue, false);
10112 ret = MIN (ret, tret);
10115 break;
10117 default:
10118 gcc_unreachable ();
10121 if (c2)
10123 gcc_assert (c);
10124 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10127 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10129 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10130 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10131 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10132 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10133 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10134 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10135 && OMP_CLAUSE_DECL (c) == decl)
10137 if (is_doacross && (collapse == 1 || i >= collapse))
10138 t = var;
10139 else
10141 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10142 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10143 gcc_assert (TREE_OPERAND (t, 0) == var);
10144 t = TREE_OPERAND (t, 1);
10145 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10146 || TREE_CODE (t) == MINUS_EXPR
10147 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10148 gcc_assert (TREE_OPERAND (t, 0) == var);
10149 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10150 is_doacross ? var : decl,
10151 TREE_OPERAND (t, 1));
10153 gimple_seq *seq;
10154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10155 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10156 else
10157 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10158 gimplify_assign (decl, t, seq);
10163 BITMAP_FREE (has_decl_expr);
10165 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10167 push_gimplify_context ();
10168 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10170 OMP_FOR_BODY (orig_for_stmt)
10171 = build3 (BIND_EXPR, void_type_node, NULL,
10172 OMP_FOR_BODY (orig_for_stmt), NULL);
10173 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10177 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10178 &for_body);
10180 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10182 if (gimple_code (g) == GIMPLE_BIND)
10183 pop_gimplify_context (g);
10184 else
10185 pop_gimplify_context (NULL);
10188 if (orig_for_stmt != for_stmt)
10189 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10191 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10192 decl = TREE_OPERAND (t, 0);
10193 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10194 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10195 gimplify_omp_ctxp = ctx->outer_context;
10196 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10197 gimplify_omp_ctxp = ctx;
10198 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10199 TREE_OPERAND (t, 0) = var;
10200 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10201 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10202 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10205 gimplify_adjust_omp_clauses (pre_p, for_body,
10206 &OMP_FOR_CLAUSES (orig_for_stmt),
10207 TREE_CODE (orig_for_stmt));
10209 int kind;
10210 switch (TREE_CODE (orig_for_stmt))
10212 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10213 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10214 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10215 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10216 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10217 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10218 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10219 default:
10220 gcc_unreachable ();
10222 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10223 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10224 for_pre_body);
10225 if (orig_for_stmt != for_stmt)
10226 gimple_omp_for_set_combined_p (gfor, true);
10227 if (gimplify_omp_ctxp
10228 && (gimplify_omp_ctxp->combined_loop
10229 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10230 && gimplify_omp_ctxp->outer_context
10231 && gimplify_omp_ctxp->outer_context->combined_loop)))
10233 gimple_omp_for_set_combined_into_p (gfor, true);
10234 if (gimplify_omp_ctxp->combined_loop)
10235 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10236 else
10237 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10240 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10242 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10243 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10244 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10245 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10246 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10247 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10248 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10249 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10252 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10253 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10254 The outer taskloop stands for computing the number of iterations,
10255 counts for collapsed loops and holding taskloop specific clauses.
10256 The task construct stands for the effect of data sharing on the
10257 explicit task it creates and the inner taskloop stands for expansion
10258 of the static loop inside of the explicit task construct. */
10259 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10261 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10262 tree task_clauses = NULL_TREE;
10263 tree c = *gfor_clauses_ptr;
10264 tree *gtask_clauses_ptr = &task_clauses;
10265 tree outer_for_clauses = NULL_TREE;
10266 tree *gforo_clauses_ptr = &outer_for_clauses;
10267 for (; c; c = OMP_CLAUSE_CHAIN (c))
10268 switch (OMP_CLAUSE_CODE (c))
10270 /* These clauses are allowed on task, move them there. */
10271 case OMP_CLAUSE_SHARED:
10272 case OMP_CLAUSE_FIRSTPRIVATE:
10273 case OMP_CLAUSE_DEFAULT:
10274 case OMP_CLAUSE_IF:
10275 case OMP_CLAUSE_UNTIED:
10276 case OMP_CLAUSE_FINAL:
10277 case OMP_CLAUSE_MERGEABLE:
10278 case OMP_CLAUSE_PRIORITY:
10279 *gtask_clauses_ptr = c;
10280 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10281 break;
10282 case OMP_CLAUSE_PRIVATE:
10283 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10285 /* We want private on outer for and firstprivate
10286 on task. */
10287 *gtask_clauses_ptr
10288 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10289 OMP_CLAUSE_FIRSTPRIVATE);
10290 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10291 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10292 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10293 *gforo_clauses_ptr = c;
10294 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10296 else
10298 *gtask_clauses_ptr = c;
10299 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10301 break;
10302 /* These clauses go into outer taskloop clauses. */
10303 case OMP_CLAUSE_GRAINSIZE:
10304 case OMP_CLAUSE_NUM_TASKS:
10305 case OMP_CLAUSE_NOGROUP:
10306 *gforo_clauses_ptr = c;
10307 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10308 break;
10309 /* Taskloop clause we duplicate on both taskloops. */
10310 case OMP_CLAUSE_COLLAPSE:
10311 *gfor_clauses_ptr = c;
10312 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10313 *gforo_clauses_ptr = copy_node (c);
10314 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10315 break;
10316 /* For lastprivate, keep the clause on inner taskloop, and add
10317 a shared clause on task. If the same decl is also firstprivate,
10318 add also firstprivate clause on the inner taskloop. */
10319 case OMP_CLAUSE_LASTPRIVATE:
10320 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10322 /* For taskloop C++ lastprivate IVs, we want:
10323 1) private on outer taskloop
10324 2) firstprivate and shared on task
10325 3) lastprivate on inner taskloop */
10326 *gtask_clauses_ptr
10327 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10328 OMP_CLAUSE_FIRSTPRIVATE);
10329 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10330 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10331 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10332 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10333 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10334 OMP_CLAUSE_PRIVATE);
10335 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10336 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10337 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10338 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10340 *gfor_clauses_ptr = c;
10341 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10342 *gtask_clauses_ptr
10343 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10344 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10345 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10346 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10347 gtask_clauses_ptr
10348 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10349 break;
10350 default:
10351 gcc_unreachable ();
10353 *gfor_clauses_ptr = NULL_TREE;
10354 *gtask_clauses_ptr = NULL_TREE;
10355 *gforo_clauses_ptr = NULL_TREE;
10356 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10357 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10358 NULL_TREE, NULL_TREE, NULL_TREE);
10359 gimple_omp_task_set_taskloop_p (g, true);
10360 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10361 gomp_for *gforo
10362 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10363 gimple_omp_for_collapse (gfor),
10364 gimple_omp_for_pre_body (gfor));
10365 gimple_omp_for_set_pre_body (gfor, NULL);
10366 gimple_omp_for_set_combined_p (gforo, true);
10367 gimple_omp_for_set_combined_into_p (gfor, true);
10368 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10370 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10371 tree v = create_tmp_var (type);
10372 gimple_omp_for_set_index (gforo, i, v);
10373 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10374 gimple_omp_for_set_initial (gforo, i, t);
10375 gimple_omp_for_set_cond (gforo, i,
10376 gimple_omp_for_cond (gfor, i));
10377 t = unshare_expr (gimple_omp_for_final (gfor, i));
10378 gimple_omp_for_set_final (gforo, i, t);
10379 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10380 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10381 TREE_OPERAND (t, 0) = v;
10382 gimple_omp_for_set_incr (gforo, i, t);
10383 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10384 OMP_CLAUSE_DECL (t) = v;
10385 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10386 gimple_omp_for_set_clauses (gforo, t);
10388 gimplify_seq_add_stmt (pre_p, gforo);
10390 else
10391 gimplify_seq_add_stmt (pre_p, gfor);
10392 if (ret != GS_ALL_DONE)
10393 return GS_ERROR;
10394 *expr_p = NULL_TREE;
10395 return GS_ALL_DONE;
10398 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10399 of OMP_TARGET's body. */
10401 static tree
10402 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10404 *walk_subtrees = 0;
10405 switch (TREE_CODE (*tp))
10407 case OMP_TEAMS:
10408 return *tp;
10409 case BIND_EXPR:
10410 case STATEMENT_LIST:
10411 *walk_subtrees = 1;
10412 break;
10413 default:
10414 break;
10416 return NULL_TREE;
10419 /* Helper function of optimize_target_teams, determine if the expression
10420 can be computed safely before the target construct on the host. */
10422 static tree
10423 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10425 splay_tree_node n;
10427 if (TYPE_P (*tp))
10429 *walk_subtrees = 0;
10430 return NULL_TREE;
10432 switch (TREE_CODE (*tp))
10434 case VAR_DECL:
10435 case PARM_DECL:
10436 case RESULT_DECL:
10437 *walk_subtrees = 0;
10438 if (error_operand_p (*tp)
10439 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10440 || DECL_HAS_VALUE_EXPR_P (*tp)
10441 || DECL_THREAD_LOCAL_P (*tp)
10442 || TREE_SIDE_EFFECTS (*tp)
10443 || TREE_THIS_VOLATILE (*tp))
10444 return *tp;
10445 if (is_global_var (*tp)
10446 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10447 || lookup_attribute ("omp declare target link",
10448 DECL_ATTRIBUTES (*tp))))
10449 return *tp;
10450 if (VAR_P (*tp)
10451 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10452 && !is_global_var (*tp)
10453 && decl_function_context (*tp) == current_function_decl)
10454 return *tp;
10455 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10456 (splay_tree_key) *tp);
10457 if (n == NULL)
10459 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10460 return NULL_TREE;
10461 return *tp;
10463 else if (n->value & GOVD_LOCAL)
10464 return *tp;
10465 else if (n->value & GOVD_FIRSTPRIVATE)
10466 return NULL_TREE;
10467 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10468 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10469 return NULL_TREE;
10470 return *tp;
10471 case INTEGER_CST:
10472 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10473 return *tp;
10474 return NULL_TREE;
10475 case TARGET_EXPR:
10476 if (TARGET_EXPR_INITIAL (*tp)
10477 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10478 return *tp;
10479 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10480 walk_subtrees, NULL);
10481 /* Allow some reasonable subset of integral arithmetics. */
10482 case PLUS_EXPR:
10483 case MINUS_EXPR:
10484 case MULT_EXPR:
10485 case TRUNC_DIV_EXPR:
10486 case CEIL_DIV_EXPR:
10487 case FLOOR_DIV_EXPR:
10488 case ROUND_DIV_EXPR:
10489 case TRUNC_MOD_EXPR:
10490 case CEIL_MOD_EXPR:
10491 case FLOOR_MOD_EXPR:
10492 case ROUND_MOD_EXPR:
10493 case RDIV_EXPR:
10494 case EXACT_DIV_EXPR:
10495 case MIN_EXPR:
10496 case MAX_EXPR:
10497 case LSHIFT_EXPR:
10498 case RSHIFT_EXPR:
10499 case BIT_IOR_EXPR:
10500 case BIT_XOR_EXPR:
10501 case BIT_AND_EXPR:
10502 case NEGATE_EXPR:
10503 case ABS_EXPR:
10504 case BIT_NOT_EXPR:
10505 case NON_LVALUE_EXPR:
10506 CASE_CONVERT:
10507 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10508 return *tp;
10509 return NULL_TREE;
10510 /* And disallow anything else, except for comparisons. */
10511 default:
10512 if (COMPARISON_CLASS_P (*tp))
10513 return NULL_TREE;
10514 return *tp;
10518 /* Try to determine if the num_teams and/or thread_limit expressions
10519 can have their values determined already before entering the
10520 target construct.
10521 INTEGER_CSTs trivially are,
10522 integral decls that are firstprivate (explicitly or implicitly)
10523 or explicitly map(always, to:) or map(always, tofrom:) on the target
10524 region too, and expressions involving simple arithmetics on those
10525 too, function calls are not ok, dereferencing something neither etc.
10526 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10527 EXPR based on what we find:
10528 0 stands for clause not specified at all, use implementation default
10529 -1 stands for value that can't be determined easily before entering
10530 the target construct.
10531 If teams construct is not present at all, use 1 for num_teams
10532 and 0 for thread_limit (only one team is involved, and the thread
10533 limit is implementation defined. */
10535 static void
10536 optimize_target_teams (tree target, gimple_seq *pre_p)
10538 tree body = OMP_BODY (target);
10539 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10540 tree num_teams = integer_zero_node;
10541 tree thread_limit = integer_zero_node;
10542 location_t num_teams_loc = EXPR_LOCATION (target);
10543 location_t thread_limit_loc = EXPR_LOCATION (target);
10544 tree c, *p, expr;
10545 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10547 if (teams == NULL_TREE)
10548 num_teams = integer_one_node;
10549 else
10550 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10554 p = &num_teams;
10555 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10557 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10559 p = &thread_limit;
10560 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10562 else
10563 continue;
10564 expr = OMP_CLAUSE_OPERAND (c, 0);
10565 if (TREE_CODE (expr) == INTEGER_CST)
10567 *p = expr;
10568 continue;
10570 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10572 *p = integer_minus_one_node;
10573 continue;
10575 *p = expr;
10576 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10577 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10578 == GS_ERROR)
10580 gimplify_omp_ctxp = target_ctx;
10581 *p = integer_minus_one_node;
10582 continue;
10584 gimplify_omp_ctxp = target_ctx;
10585 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10586 OMP_CLAUSE_OPERAND (c, 0) = *p;
10588 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10589 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10590 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10591 OMP_TARGET_CLAUSES (target) = c;
10592 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10593 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10594 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10595 OMP_TARGET_CLAUSES (target) = c;
10598 /* Gimplify the gross structure of several OMP constructs. */
10600 static void
10601 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10603 tree expr = *expr_p;
10604 gimple *stmt;
10605 gimple_seq body = NULL;
10606 enum omp_region_type ort;
10608 switch (TREE_CODE (expr))
10610 case OMP_SECTIONS:
10611 case OMP_SINGLE:
10612 ort = ORT_WORKSHARE;
10613 break;
10614 case OMP_TARGET:
10615 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10616 break;
10617 case OACC_KERNELS:
10618 ort = ORT_ACC_KERNELS;
10619 break;
10620 case OACC_PARALLEL:
10621 ort = ORT_ACC_PARALLEL;
10622 break;
10623 case OACC_DATA:
10624 ort = ORT_ACC_DATA;
10625 break;
10626 case OMP_TARGET_DATA:
10627 ort = ORT_TARGET_DATA;
10628 break;
10629 case OMP_TEAMS:
10630 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10631 break;
10632 case OACC_HOST_DATA:
10633 ort = ORT_ACC_HOST_DATA;
10634 break;
10635 default:
10636 gcc_unreachable ();
10638 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10639 TREE_CODE (expr));
10640 if (TREE_CODE (expr) == OMP_TARGET)
10641 optimize_target_teams (expr, pre_p);
10642 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10644 push_gimplify_context ();
10645 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10646 if (gimple_code (g) == GIMPLE_BIND)
10647 pop_gimplify_context (g);
10648 else
10649 pop_gimplify_context (NULL);
10650 if ((ort & ORT_TARGET_DATA) != 0)
10652 enum built_in_function end_ix;
10653 switch (TREE_CODE (expr))
10655 case OACC_DATA:
10656 case OACC_HOST_DATA:
10657 end_ix = BUILT_IN_GOACC_DATA_END;
10658 break;
10659 case OMP_TARGET_DATA:
10660 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10661 break;
10662 default:
10663 gcc_unreachable ();
10665 tree fn = builtin_decl_explicit (end_ix);
10666 g = gimple_build_call (fn, 0);
10667 gimple_seq cleanup = NULL;
10668 gimple_seq_add_stmt (&cleanup, g);
10669 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10670 body = NULL;
10671 gimple_seq_add_stmt (&body, g);
10674 else
10675 gimplify_and_add (OMP_BODY (expr), &body);
10676 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10677 TREE_CODE (expr));
10679 switch (TREE_CODE (expr))
10681 case OACC_DATA:
10682 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10683 OMP_CLAUSES (expr));
10684 break;
10685 case OACC_KERNELS:
10686 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10687 OMP_CLAUSES (expr));
10688 break;
10689 case OACC_HOST_DATA:
10690 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10691 OMP_CLAUSES (expr));
10692 break;
10693 case OACC_PARALLEL:
10694 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10695 OMP_CLAUSES (expr));
10696 break;
10697 case OMP_SECTIONS:
10698 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10699 break;
10700 case OMP_SINGLE:
10701 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10702 break;
10703 case OMP_TARGET:
10704 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10705 OMP_CLAUSES (expr));
10706 break;
10707 case OMP_TARGET_DATA:
10708 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10709 OMP_CLAUSES (expr));
10710 break;
10711 case OMP_TEAMS:
10712 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10713 break;
10714 default:
10715 gcc_unreachable ();
10718 gimplify_seq_add_stmt (pre_p, stmt);
10719 *expr_p = NULL_TREE;
10722 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10723 target update constructs. */
10725 static void
10726 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10728 tree expr = *expr_p;
10729 int kind;
10730 gomp_target *stmt;
10731 enum omp_region_type ort = ORT_WORKSHARE;
10733 switch (TREE_CODE (expr))
10735 case OACC_ENTER_DATA:
10736 case OACC_EXIT_DATA:
10737 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10738 ort = ORT_ACC;
10739 break;
10740 case OACC_UPDATE:
10741 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10742 ort = ORT_ACC;
10743 break;
10744 case OMP_TARGET_UPDATE:
10745 kind = GF_OMP_TARGET_KIND_UPDATE;
10746 break;
10747 case OMP_TARGET_ENTER_DATA:
10748 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10749 break;
10750 case OMP_TARGET_EXIT_DATA:
10751 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10752 break;
10753 default:
10754 gcc_unreachable ();
10756 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10757 ort, TREE_CODE (expr));
10758 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10759 TREE_CODE (expr));
10760 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10762 gimplify_seq_add_stmt (pre_p, stmt);
10763 *expr_p = NULL_TREE;
10766 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10767 stabilized the lhs of the atomic operation as *ADDR. Return true if
10768 EXPR is this stabilized form. */
10770 static bool
10771 goa_lhs_expr_p (tree expr, tree addr)
10773 /* Also include casts to other type variants. The C front end is fond
10774 of adding these for e.g. volatile variables. This is like
10775 STRIP_TYPE_NOPS but includes the main variant lookup. */
10776 STRIP_USELESS_TYPE_CONVERSION (expr);
10778 if (TREE_CODE (expr) == INDIRECT_REF)
10780 expr = TREE_OPERAND (expr, 0);
10781 while (expr != addr
10782 && (CONVERT_EXPR_P (expr)
10783 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10784 && TREE_CODE (expr) == TREE_CODE (addr)
10785 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10787 expr = TREE_OPERAND (expr, 0);
10788 addr = TREE_OPERAND (addr, 0);
10790 if (expr == addr)
10791 return true;
10792 return (TREE_CODE (addr) == ADDR_EXPR
10793 && TREE_CODE (expr) == ADDR_EXPR
10794 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10796 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10797 return true;
10798 return false;
10801 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10802 expression does not involve the lhs, evaluate it into a temporary.
10803 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10804 or -1 if an error was encountered. */
10806 static int
10807 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10808 tree lhs_var)
10810 tree expr = *expr_p;
10811 int saw_lhs;
10813 if (goa_lhs_expr_p (expr, lhs_addr))
10815 *expr_p = lhs_var;
10816 return 1;
10818 if (is_gimple_val (expr))
10819 return 0;
10821 saw_lhs = 0;
10822 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10824 case tcc_binary:
10825 case tcc_comparison:
10826 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10827 lhs_var);
10828 /* FALLTHRU */
10829 case tcc_unary:
10830 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10831 lhs_var);
10832 break;
10833 case tcc_expression:
10834 switch (TREE_CODE (expr))
10836 case TRUTH_ANDIF_EXPR:
10837 case TRUTH_ORIF_EXPR:
10838 case TRUTH_AND_EXPR:
10839 case TRUTH_OR_EXPR:
10840 case TRUTH_XOR_EXPR:
10841 case BIT_INSERT_EXPR:
10842 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10843 lhs_addr, lhs_var);
10844 /* FALLTHRU */
10845 case TRUTH_NOT_EXPR:
10846 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10847 lhs_addr, lhs_var);
10848 break;
10849 case COMPOUND_EXPR:
10850 /* Break out any preevaluations from cp_build_modify_expr. */
10851 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10852 expr = TREE_OPERAND (expr, 1))
10853 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10854 *expr_p = expr;
10855 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10856 default:
10857 break;
10859 break;
10860 case tcc_reference:
10861 if (TREE_CODE (expr) == BIT_FIELD_REF)
10862 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10863 lhs_addr, lhs_var);
10864 break;
10865 default:
10866 break;
10869 if (saw_lhs == 0)
10871 enum gimplify_status gs;
10872 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10873 if (gs != GS_ALL_DONE)
10874 saw_lhs = -1;
10877 return saw_lhs;
10880 /* Gimplify an OMP_ATOMIC statement. */
10882 static enum gimplify_status
10883 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10885 tree addr = TREE_OPERAND (*expr_p, 0);
10886 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10887 ? NULL : TREE_OPERAND (*expr_p, 1);
10888 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10889 tree tmp_load;
10890 gomp_atomic_load *loadstmt;
10891 gomp_atomic_store *storestmt;
10893 tmp_load = create_tmp_reg (type);
10894 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10895 return GS_ERROR;
10897 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10898 != GS_ALL_DONE)
10899 return GS_ERROR;
10901 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10902 gimplify_seq_add_stmt (pre_p, loadstmt);
10903 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10904 != GS_ALL_DONE)
10905 return GS_ERROR;
10907 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10908 rhs = tmp_load;
10909 storestmt = gimple_build_omp_atomic_store (rhs);
10910 gimplify_seq_add_stmt (pre_p, storestmt);
10911 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10913 gimple_omp_atomic_set_seq_cst (loadstmt);
10914 gimple_omp_atomic_set_seq_cst (storestmt);
10916 switch (TREE_CODE (*expr_p))
10918 case OMP_ATOMIC_READ:
10919 case OMP_ATOMIC_CAPTURE_OLD:
10920 *expr_p = tmp_load;
10921 gimple_omp_atomic_set_need_value (loadstmt);
10922 break;
10923 case OMP_ATOMIC_CAPTURE_NEW:
10924 *expr_p = rhs;
10925 gimple_omp_atomic_set_need_value (storestmt);
10926 break;
10927 default:
10928 *expr_p = NULL;
10929 break;
10932 return GS_ALL_DONE;
10935 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10936 body, and adding some EH bits. */
10938 static enum gimplify_status
10939 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10941 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10942 gimple *body_stmt;
10943 gtransaction *trans_stmt;
10944 gimple_seq body = NULL;
10945 int subcode = 0;
10947 /* Wrap the transaction body in a BIND_EXPR so we have a context
10948 where to put decls for OMP. */
10949 if (TREE_CODE (tbody) != BIND_EXPR)
10951 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10952 TREE_SIDE_EFFECTS (bind) = 1;
10953 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10954 TRANSACTION_EXPR_BODY (expr) = bind;
10957 push_gimplify_context ();
10958 temp = voidify_wrapper_expr (*expr_p, NULL);
10960 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10961 pop_gimplify_context (body_stmt);
10963 trans_stmt = gimple_build_transaction (body);
10964 if (TRANSACTION_EXPR_OUTER (expr))
10965 subcode = GTMA_IS_OUTER;
10966 else if (TRANSACTION_EXPR_RELAXED (expr))
10967 subcode = GTMA_IS_RELAXED;
10968 gimple_transaction_set_subcode (trans_stmt, subcode);
10970 gimplify_seq_add_stmt (pre_p, trans_stmt);
10972 if (temp)
10974 *expr_p = temp;
10975 return GS_OK;
10978 *expr_p = NULL_TREE;
10979 return GS_ALL_DONE;
10982 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10983 is the OMP_BODY of the original EXPR (which has already been
10984 gimplified so it's not present in the EXPR).
10986 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10988 static gimple *
10989 gimplify_omp_ordered (tree expr, gimple_seq body)
10991 tree c, decls;
10992 int failures = 0;
10993 unsigned int i;
10994 tree source_c = NULL_TREE;
10995 tree sink_c = NULL_TREE;
10997 if (gimplify_omp_ctxp)
10999 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11000 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11001 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
11002 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
11003 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
11005 error_at (OMP_CLAUSE_LOCATION (c),
11006 "%<ordered%> construct with %<depend%> clause must be "
11007 "closely nested inside a loop with %<ordered%> clause "
11008 "with a parameter");
11009 failures++;
11011 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11012 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11014 bool fail = false;
11015 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11016 decls && TREE_CODE (decls) == TREE_LIST;
11017 decls = TREE_CHAIN (decls), ++i)
11018 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11019 continue;
11020 else if (TREE_VALUE (decls)
11021 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11023 error_at (OMP_CLAUSE_LOCATION (c),
11024 "variable %qE is not an iteration "
11025 "of outermost loop %d, expected %qE",
11026 TREE_VALUE (decls), i + 1,
11027 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11028 fail = true;
11029 failures++;
11031 else
11032 TREE_VALUE (decls)
11033 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11034 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11036 error_at (OMP_CLAUSE_LOCATION (c),
11037 "number of variables in %<depend(sink)%> "
11038 "clause does not match number of "
11039 "iteration variables");
11040 failures++;
11042 sink_c = c;
11044 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11045 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11047 if (source_c)
11049 error_at (OMP_CLAUSE_LOCATION (c),
11050 "more than one %<depend(source)%> clause on an "
11051 "%<ordered%> construct");
11052 failures++;
11054 else
11055 source_c = c;
11058 if (source_c && sink_c)
11060 error_at (OMP_CLAUSE_LOCATION (source_c),
11061 "%<depend(source)%> clause specified together with "
11062 "%<depend(sink:)%> clauses on the same construct");
11063 failures++;
11066 if (failures)
11067 return gimple_build_nop ();
11068 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11071 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11072 expression produces a value to be used as an operand inside a GIMPLE
11073 statement, the value will be stored back in *EXPR_P. This value will
11074 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11075 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11076 emitted in PRE_P and POST_P.
11078 Additionally, this process may overwrite parts of the input
11079 expression during gimplification. Ideally, it should be
11080 possible to do non-destructive gimplification.
11082 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11083 the expression needs to evaluate to a value to be used as
11084 an operand in a GIMPLE statement, this value will be stored in
11085 *EXPR_P on exit. This happens when the caller specifies one
11086 of fb_lvalue or fb_rvalue fallback flags.
11088 PRE_P will contain the sequence of GIMPLE statements corresponding
11089 to the evaluation of EXPR and all the side-effects that must
11090 be executed before the main expression. On exit, the last
11091 statement of PRE_P is the core statement being gimplified. For
11092 instance, when gimplifying 'if (++a)' the last statement in
11093 PRE_P will be 'if (t.1)' where t.1 is the result of
11094 pre-incrementing 'a'.
11096 POST_P will contain the sequence of GIMPLE statements corresponding
11097 to the evaluation of all the side-effects that must be executed
11098 after the main expression. If this is NULL, the post
11099 side-effects are stored at the end of PRE_P.
11101 The reason why the output is split in two is to handle post
11102 side-effects explicitly. In some cases, an expression may have
11103 inner and outer post side-effects which need to be emitted in
11104 an order different from the one given by the recursive
11105 traversal. For instance, for the expression (*p--)++ the post
11106 side-effects of '--' must actually occur *after* the post
11107 side-effects of '++'. However, gimplification will first visit
11108 the inner expression, so if a separate POST sequence was not
11109 used, the resulting sequence would be:
11111 1 t.1 = *p
11112 2 p = p - 1
11113 3 t.2 = t.1 + 1
11114 4 *p = t.2
11116 However, the post-decrement operation in line #2 must not be
11117 evaluated until after the store to *p at line #4, so the
11118 correct sequence should be:
11120 1 t.1 = *p
11121 2 t.2 = t.1 + 1
11122 3 *p = t.2
11123 4 p = p - 1
11125 So, by specifying a separate post queue, it is possible
11126 to emit the post side-effects in the correct order.
11127 If POST_P is NULL, an internal queue will be used. Before
11128 returning to the caller, the sequence POST_P is appended to
11129 the main output sequence PRE_P.
11131 GIMPLE_TEST_F points to a function that takes a tree T and
11132 returns nonzero if T is in the GIMPLE form requested by the
11133 caller. The GIMPLE predicates are in gimple.c.
11135 FALLBACK tells the function what sort of a temporary we want if
11136 gimplification cannot produce an expression that complies with
11137 GIMPLE_TEST_F.
11139 fb_none means that no temporary should be generated
11140 fb_rvalue means that an rvalue is OK to generate
11141 fb_lvalue means that an lvalue is OK to generate
11142 fb_either means that either is OK, but an lvalue is preferable.
11143 fb_mayfail means that gimplification may fail (in which case
11144 GS_ERROR will be returned)
11146 The return value is either GS_ERROR or GS_ALL_DONE, since this
11147 function iterates until EXPR is completely gimplified or an error
11148 occurs. */
11150 enum gimplify_status
11151 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11152 bool (*gimple_test_f) (tree), fallback_t fallback)
11154 tree tmp;
11155 gimple_seq internal_pre = NULL;
11156 gimple_seq internal_post = NULL;
11157 tree save_expr;
11158 bool is_statement;
11159 location_t saved_location;
11160 enum gimplify_status ret;
11161 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11162 tree label;
11164 save_expr = *expr_p;
11165 if (save_expr == NULL_TREE)
11166 return GS_ALL_DONE;
11168 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11169 is_statement = gimple_test_f == is_gimple_stmt;
11170 if (is_statement)
11171 gcc_assert (pre_p);
11173 /* Consistency checks. */
11174 if (gimple_test_f == is_gimple_reg)
11175 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11176 else if (gimple_test_f == is_gimple_val
11177 || gimple_test_f == is_gimple_call_addr
11178 || gimple_test_f == is_gimple_condexpr
11179 || gimple_test_f == is_gimple_mem_rhs
11180 || gimple_test_f == is_gimple_mem_rhs_or_call
11181 || gimple_test_f == is_gimple_reg_rhs
11182 || gimple_test_f == is_gimple_reg_rhs_or_call
11183 || gimple_test_f == is_gimple_asm_val
11184 || gimple_test_f == is_gimple_mem_ref_addr)
11185 gcc_assert (fallback & fb_rvalue);
11186 else if (gimple_test_f == is_gimple_min_lval
11187 || gimple_test_f == is_gimple_lvalue)
11188 gcc_assert (fallback & fb_lvalue);
11189 else if (gimple_test_f == is_gimple_addressable)
11190 gcc_assert (fallback & fb_either);
11191 else if (gimple_test_f == is_gimple_stmt)
11192 gcc_assert (fallback == fb_none);
11193 else
11195 /* We should have recognized the GIMPLE_TEST_F predicate to
11196 know what kind of fallback to use in case a temporary is
11197 needed to hold the value or address of *EXPR_P. */
11198 gcc_unreachable ();
11201 /* We used to check the predicate here and return immediately if it
11202 succeeds. This is wrong; the design is for gimplification to be
11203 idempotent, and for the predicates to only test for valid forms, not
11204 whether they are fully simplified. */
11205 if (pre_p == NULL)
11206 pre_p = &internal_pre;
11208 if (post_p == NULL)
11209 post_p = &internal_post;
11211 /* Remember the last statements added to PRE_P and POST_P. Every
11212 new statement added by the gimplification helpers needs to be
11213 annotated with location information. To centralize the
11214 responsibility, we remember the last statement that had been
11215 added to both queues before gimplifying *EXPR_P. If
11216 gimplification produces new statements in PRE_P and POST_P, those
11217 statements will be annotated with the same location information
11218 as *EXPR_P. */
11219 pre_last_gsi = gsi_last (*pre_p);
11220 post_last_gsi = gsi_last (*post_p);
11222 saved_location = input_location;
11223 if (save_expr != error_mark_node
11224 && EXPR_HAS_LOCATION (*expr_p))
11225 input_location = EXPR_LOCATION (*expr_p);
11227 /* Loop over the specific gimplifiers until the toplevel node
11228 remains the same. */
11231 /* Strip away as many useless type conversions as possible
11232 at the toplevel. */
11233 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11235 /* Remember the expr. */
11236 save_expr = *expr_p;
11238 /* Die, die, die, my darling. */
11239 if (save_expr == error_mark_node
11240 || (TREE_TYPE (save_expr)
11241 && TREE_TYPE (save_expr) == error_mark_node))
11243 ret = GS_ERROR;
11244 break;
11247 /* Do any language-specific gimplification. */
11248 ret = ((enum gimplify_status)
11249 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11250 if (ret == GS_OK)
11252 if (*expr_p == NULL_TREE)
11253 break;
11254 if (*expr_p != save_expr)
11255 continue;
11257 else if (ret != GS_UNHANDLED)
11258 break;
11260 /* Make sure that all the cases set 'ret' appropriately. */
11261 ret = GS_UNHANDLED;
11262 switch (TREE_CODE (*expr_p))
11264 /* First deal with the special cases. */
11266 case POSTINCREMENT_EXPR:
11267 case POSTDECREMENT_EXPR:
11268 case PREINCREMENT_EXPR:
11269 case PREDECREMENT_EXPR:
11270 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11271 fallback != fb_none,
11272 TREE_TYPE (*expr_p));
11273 break;
11275 case VIEW_CONVERT_EXPR:
11276 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11277 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11279 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11280 post_p, is_gimple_val, fb_rvalue);
11281 recalculate_side_effects (*expr_p);
11282 break;
11284 /* Fallthru. */
11286 case ARRAY_REF:
11287 case ARRAY_RANGE_REF:
11288 case REALPART_EXPR:
11289 case IMAGPART_EXPR:
11290 case COMPONENT_REF:
11291 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11292 fallback ? fallback : fb_rvalue);
11293 break;
11295 case COND_EXPR:
11296 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11298 /* C99 code may assign to an array in a structure value of a
11299 conditional expression, and this has undefined behavior
11300 only on execution, so create a temporary if an lvalue is
11301 required. */
11302 if (fallback == fb_lvalue)
11304 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11305 mark_addressable (*expr_p);
11306 ret = GS_OK;
11308 break;
11310 case CALL_EXPR:
11311 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11313 /* C99 code may assign to an array in a structure returned
11314 from a function, and this has undefined behavior only on
11315 execution, so create a temporary if an lvalue is
11316 required. */
11317 if (fallback == fb_lvalue)
11319 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11320 mark_addressable (*expr_p);
11321 ret = GS_OK;
11323 break;
11325 case TREE_LIST:
11326 gcc_unreachable ();
11328 case COMPOUND_EXPR:
11329 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11330 break;
11332 case COMPOUND_LITERAL_EXPR:
11333 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11334 gimple_test_f, fallback);
11335 break;
11337 case MODIFY_EXPR:
11338 case INIT_EXPR:
11339 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11340 fallback != fb_none);
11341 break;
11343 case TRUTH_ANDIF_EXPR:
11344 case TRUTH_ORIF_EXPR:
11346 /* Preserve the original type of the expression and the
11347 source location of the outer expression. */
11348 tree org_type = TREE_TYPE (*expr_p);
11349 *expr_p = gimple_boolify (*expr_p);
11350 *expr_p = build3_loc (input_location, COND_EXPR,
11351 org_type, *expr_p,
11352 fold_convert_loc
11353 (input_location,
11354 org_type, boolean_true_node),
11355 fold_convert_loc
11356 (input_location,
11357 org_type, boolean_false_node));
11358 ret = GS_OK;
11359 break;
11362 case TRUTH_NOT_EXPR:
11364 tree type = TREE_TYPE (*expr_p);
11365 /* The parsers are careful to generate TRUTH_NOT_EXPR
11366 only with operands that are always zero or one.
11367 We do not fold here but handle the only interesting case
11368 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11369 *expr_p = gimple_boolify (*expr_p);
11370 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11371 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11372 TREE_TYPE (*expr_p),
11373 TREE_OPERAND (*expr_p, 0));
11374 else
11375 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11376 TREE_TYPE (*expr_p),
11377 TREE_OPERAND (*expr_p, 0),
11378 build_int_cst (TREE_TYPE (*expr_p), 1));
11379 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11380 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11381 ret = GS_OK;
11382 break;
11385 case ADDR_EXPR:
11386 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11387 break;
11389 case ANNOTATE_EXPR:
11391 tree cond = TREE_OPERAND (*expr_p, 0);
11392 tree kind = TREE_OPERAND (*expr_p, 1);
11393 tree type = TREE_TYPE (cond);
11394 if (!INTEGRAL_TYPE_P (type))
11396 *expr_p = cond;
11397 ret = GS_OK;
11398 break;
11400 tree tmp = create_tmp_var (type);
11401 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11402 gcall *call
11403 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11404 gimple_call_set_lhs (call, tmp);
11405 gimplify_seq_add_stmt (pre_p, call);
11406 *expr_p = tmp;
11407 ret = GS_ALL_DONE;
11408 break;
11411 case VA_ARG_EXPR:
11412 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11413 break;
11415 CASE_CONVERT:
11416 if (IS_EMPTY_STMT (*expr_p))
11418 ret = GS_ALL_DONE;
11419 break;
11422 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11423 || fallback == fb_none)
11425 /* Just strip a conversion to void (or in void context) and
11426 try again. */
11427 *expr_p = TREE_OPERAND (*expr_p, 0);
11428 ret = GS_OK;
11429 break;
11432 ret = gimplify_conversion (expr_p);
11433 if (ret == GS_ERROR)
11434 break;
11435 if (*expr_p != save_expr)
11436 break;
11437 /* FALLTHRU */
11439 case FIX_TRUNC_EXPR:
11440 /* unary_expr: ... | '(' cast ')' val | ... */
11441 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11442 is_gimple_val, fb_rvalue);
11443 recalculate_side_effects (*expr_p);
11444 break;
11446 case INDIRECT_REF:
11448 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11449 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11450 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11452 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11453 if (*expr_p != save_expr)
11455 ret = GS_OK;
11456 break;
11459 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11460 is_gimple_reg, fb_rvalue);
11461 if (ret == GS_ERROR)
11462 break;
11464 recalculate_side_effects (*expr_p);
11465 *expr_p = fold_build2_loc (input_location, MEM_REF,
11466 TREE_TYPE (*expr_p),
11467 TREE_OPERAND (*expr_p, 0),
11468 build_int_cst (saved_ptr_type, 0));
11469 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11470 TREE_THIS_NOTRAP (*expr_p) = notrap;
11471 ret = GS_OK;
11472 break;
11475 /* We arrive here through the various re-gimplifcation paths. */
11476 case MEM_REF:
11477 /* First try re-folding the whole thing. */
11478 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11479 TREE_OPERAND (*expr_p, 0),
11480 TREE_OPERAND (*expr_p, 1));
11481 if (tmp)
11483 REF_REVERSE_STORAGE_ORDER (tmp)
11484 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11485 *expr_p = tmp;
11486 recalculate_side_effects (*expr_p);
11487 ret = GS_OK;
11488 break;
11490 /* Avoid re-gimplifying the address operand if it is already
11491 in suitable form. Re-gimplifying would mark the address
11492 operand addressable. Always gimplify when not in SSA form
11493 as we still may have to gimplify decls with value-exprs. */
11494 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11495 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11497 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11498 is_gimple_mem_ref_addr, fb_rvalue);
11499 if (ret == GS_ERROR)
11500 break;
11502 recalculate_side_effects (*expr_p);
11503 ret = GS_ALL_DONE;
11504 break;
11506 /* Constants need not be gimplified. */
11507 case INTEGER_CST:
11508 case REAL_CST:
11509 case FIXED_CST:
11510 case STRING_CST:
11511 case COMPLEX_CST:
11512 case VECTOR_CST:
11513 /* Drop the overflow flag on constants, we do not want
11514 that in the GIMPLE IL. */
11515 if (TREE_OVERFLOW_P (*expr_p))
11516 *expr_p = drop_tree_overflow (*expr_p);
11517 ret = GS_ALL_DONE;
11518 break;
11520 case CONST_DECL:
11521 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11522 CONST_DECL node. Otherwise the decl is replaceable by its
11523 value. */
11524 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11525 if (fallback & fb_lvalue)
11526 ret = GS_ALL_DONE;
11527 else
11529 *expr_p = DECL_INITIAL (*expr_p);
11530 ret = GS_OK;
11532 break;
11534 case DECL_EXPR:
11535 ret = gimplify_decl_expr (expr_p, pre_p);
11536 break;
11538 case BIND_EXPR:
11539 ret = gimplify_bind_expr (expr_p, pre_p);
11540 break;
11542 case LOOP_EXPR:
11543 ret = gimplify_loop_expr (expr_p, pre_p);
11544 break;
11546 case SWITCH_EXPR:
11547 ret = gimplify_switch_expr (expr_p, pre_p);
11548 break;
11550 case EXIT_EXPR:
11551 ret = gimplify_exit_expr (expr_p);
11552 break;
11554 case GOTO_EXPR:
11555 /* If the target is not LABEL, then it is a computed jump
11556 and the target needs to be gimplified. */
11557 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11559 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11560 NULL, is_gimple_val, fb_rvalue);
11561 if (ret == GS_ERROR)
11562 break;
11564 gimplify_seq_add_stmt (pre_p,
11565 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11566 ret = GS_ALL_DONE;
11567 break;
11569 case PREDICT_EXPR:
11570 gimplify_seq_add_stmt (pre_p,
11571 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11572 PREDICT_EXPR_OUTCOME (*expr_p)));
11573 ret = GS_ALL_DONE;
11574 break;
11576 case LABEL_EXPR:
11577 ret = gimplify_label_expr (expr_p, pre_p);
11578 label = LABEL_EXPR_LABEL (*expr_p);
11579 gcc_assert (decl_function_context (label) == current_function_decl);
11581 /* If the label is used in a goto statement, or address of the label
11582 is taken, we need to unpoison all variables that were seen so far.
11583 Doing so would prevent us from reporting a false positives. */
11584 if (asan_poisoned_variables
11585 && asan_used_labels != NULL
11586 && asan_used_labels->contains (label))
11587 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11588 break;
11590 case CASE_LABEL_EXPR:
11591 ret = gimplify_case_label_expr (expr_p, pre_p);
11593 if (gimplify_ctxp->live_switch_vars)
11594 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11595 pre_p);
11596 break;
11598 case RETURN_EXPR:
11599 ret = gimplify_return_expr (*expr_p, pre_p);
11600 break;
11602 case CONSTRUCTOR:
11603 /* Don't reduce this in place; let gimplify_init_constructor work its
11604 magic. Buf if we're just elaborating this for side effects, just
11605 gimplify any element that has side-effects. */
11606 if (fallback == fb_none)
11608 unsigned HOST_WIDE_INT ix;
11609 tree val;
11610 tree temp = NULL_TREE;
11611 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11612 if (TREE_SIDE_EFFECTS (val))
11613 append_to_statement_list (val, &temp);
11615 *expr_p = temp;
11616 ret = temp ? GS_OK : GS_ALL_DONE;
11618 /* C99 code may assign to an array in a constructed
11619 structure or union, and this has undefined behavior only
11620 on execution, so create a temporary if an lvalue is
11621 required. */
11622 else if (fallback == fb_lvalue)
11624 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11625 mark_addressable (*expr_p);
11626 ret = GS_OK;
11628 else
11629 ret = GS_ALL_DONE;
11630 break;
11632 /* The following are special cases that are not handled by the
11633 original GIMPLE grammar. */
11635 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11636 eliminated. */
11637 case SAVE_EXPR:
11638 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11639 break;
11641 case BIT_FIELD_REF:
11642 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11643 post_p, is_gimple_lvalue, fb_either);
11644 recalculate_side_effects (*expr_p);
11645 break;
11647 case TARGET_MEM_REF:
11649 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11651 if (TMR_BASE (*expr_p))
11652 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11653 post_p, is_gimple_mem_ref_addr, fb_either);
11654 if (TMR_INDEX (*expr_p))
11655 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11656 post_p, is_gimple_val, fb_rvalue);
11657 if (TMR_INDEX2 (*expr_p))
11658 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11659 post_p, is_gimple_val, fb_rvalue);
11660 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11661 ret = MIN (r0, r1);
11663 break;
11665 case NON_LVALUE_EXPR:
11666 /* This should have been stripped above. */
11667 gcc_unreachable ();
11669 case ASM_EXPR:
11670 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11671 break;
11673 case TRY_FINALLY_EXPR:
11674 case TRY_CATCH_EXPR:
11676 gimple_seq eval, cleanup;
11677 gtry *try_;
11679 /* Calls to destructors are generated automatically in FINALLY/CATCH
11680 block. They should have location as UNKNOWN_LOCATION. However,
11681 gimplify_call_expr will reset these call stmts to input_location
11682 if it finds stmt's location is unknown. To prevent resetting for
11683 destructors, we set the input_location to unknown.
11684 Note that this only affects the destructor calls in FINALLY/CATCH
11685 block, and will automatically reset to its original value by the
11686 end of gimplify_expr. */
11687 input_location = UNKNOWN_LOCATION;
11688 eval = cleanup = NULL;
11689 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11690 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11691 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11692 if (gimple_seq_empty_p (cleanup))
11694 gimple_seq_add_seq (pre_p, eval);
11695 ret = GS_ALL_DONE;
11696 break;
11698 try_ = gimple_build_try (eval, cleanup,
11699 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11700 ? GIMPLE_TRY_FINALLY
11701 : GIMPLE_TRY_CATCH);
11702 if (EXPR_HAS_LOCATION (save_expr))
11703 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11704 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11705 gimple_set_location (try_, saved_location);
11706 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11707 gimple_try_set_catch_is_cleanup (try_,
11708 TRY_CATCH_IS_CLEANUP (*expr_p));
11709 gimplify_seq_add_stmt (pre_p, try_);
11710 ret = GS_ALL_DONE;
11711 break;
11714 case CLEANUP_POINT_EXPR:
11715 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11716 break;
11718 case TARGET_EXPR:
11719 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11720 break;
11722 case CATCH_EXPR:
11724 gimple *c;
11725 gimple_seq handler = NULL;
11726 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11727 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11728 gimplify_seq_add_stmt (pre_p, c);
11729 ret = GS_ALL_DONE;
11730 break;
11733 case EH_FILTER_EXPR:
11735 gimple *ehf;
11736 gimple_seq failure = NULL;
11738 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11739 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11740 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11741 gimplify_seq_add_stmt (pre_p, ehf);
11742 ret = GS_ALL_DONE;
11743 break;
11746 case OBJ_TYPE_REF:
11748 enum gimplify_status r0, r1;
11749 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11750 post_p, is_gimple_val, fb_rvalue);
11751 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11752 post_p, is_gimple_val, fb_rvalue);
11753 TREE_SIDE_EFFECTS (*expr_p) = 0;
11754 ret = MIN (r0, r1);
11756 break;
11758 case LABEL_DECL:
11759 /* We get here when taking the address of a label. We mark
11760 the label as "forced"; meaning it can never be removed and
11761 it is a potential target for any computed goto. */
11762 FORCED_LABEL (*expr_p) = 1;
11763 ret = GS_ALL_DONE;
11764 break;
11766 case STATEMENT_LIST:
11767 ret = gimplify_statement_list (expr_p, pre_p);
11768 break;
11770 case WITH_SIZE_EXPR:
11772 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11773 post_p == &internal_post ? NULL : post_p,
11774 gimple_test_f, fallback);
11775 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11776 is_gimple_val, fb_rvalue);
11777 ret = GS_ALL_DONE;
11779 break;
11781 case VAR_DECL:
11782 case PARM_DECL:
11783 ret = gimplify_var_or_parm_decl (expr_p);
11784 break;
11786 case RESULT_DECL:
11787 /* When within an OMP context, notice uses of variables. */
11788 if (gimplify_omp_ctxp)
11789 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11790 ret = GS_ALL_DONE;
11791 break;
11793 case SSA_NAME:
11794 /* Allow callbacks into the gimplifier during optimization. */
11795 ret = GS_ALL_DONE;
11796 break;
11798 case OMP_PARALLEL:
11799 gimplify_omp_parallel (expr_p, pre_p);
11800 ret = GS_ALL_DONE;
11801 break;
11803 case OMP_TASK:
11804 gimplify_omp_task (expr_p, pre_p);
11805 ret = GS_ALL_DONE;
11806 break;
11808 case OMP_FOR:
11809 case OMP_SIMD:
11810 case CILK_SIMD:
11811 case CILK_FOR:
11812 case OMP_DISTRIBUTE:
11813 case OMP_TASKLOOP:
11814 case OACC_LOOP:
11815 ret = gimplify_omp_for (expr_p, pre_p);
11816 break;
11818 case OACC_CACHE:
11819 gimplify_oacc_cache (expr_p, pre_p);
11820 ret = GS_ALL_DONE;
11821 break;
11823 case OACC_DECLARE:
11824 gimplify_oacc_declare (expr_p, pre_p);
11825 ret = GS_ALL_DONE;
11826 break;
11828 case OACC_HOST_DATA:
11829 case OACC_DATA:
11830 case OACC_KERNELS:
11831 case OACC_PARALLEL:
11832 case OMP_SECTIONS:
11833 case OMP_SINGLE:
11834 case OMP_TARGET:
11835 case OMP_TARGET_DATA:
11836 case OMP_TEAMS:
11837 gimplify_omp_workshare (expr_p, pre_p);
11838 ret = GS_ALL_DONE;
11839 break;
11841 case OACC_ENTER_DATA:
11842 case OACC_EXIT_DATA:
11843 case OACC_UPDATE:
11844 case OMP_TARGET_UPDATE:
11845 case OMP_TARGET_ENTER_DATA:
11846 case OMP_TARGET_EXIT_DATA:
11847 gimplify_omp_target_update (expr_p, pre_p);
11848 ret = GS_ALL_DONE;
11849 break;
11851 case OMP_SECTION:
11852 case OMP_MASTER:
11853 case OMP_TASKGROUP:
11854 case OMP_ORDERED:
11855 case OMP_CRITICAL:
11857 gimple_seq body = NULL;
11858 gimple *g;
11860 gimplify_and_add (OMP_BODY (*expr_p), &body);
11861 switch (TREE_CODE (*expr_p))
11863 case OMP_SECTION:
11864 g = gimple_build_omp_section (body);
11865 break;
11866 case OMP_MASTER:
11867 g = gimple_build_omp_master (body);
11868 break;
11869 case OMP_TASKGROUP:
11871 gimple_seq cleanup = NULL;
11872 tree fn
11873 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11874 g = gimple_build_call (fn, 0);
11875 gimple_seq_add_stmt (&cleanup, g);
11876 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11877 body = NULL;
11878 gimple_seq_add_stmt (&body, g);
11879 g = gimple_build_omp_taskgroup (body);
11881 break;
11882 case OMP_ORDERED:
11883 g = gimplify_omp_ordered (*expr_p, body);
11884 break;
11885 case OMP_CRITICAL:
11886 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11887 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11888 gimplify_adjust_omp_clauses (pre_p, body,
11889 &OMP_CRITICAL_CLAUSES (*expr_p),
11890 OMP_CRITICAL);
11891 g = gimple_build_omp_critical (body,
11892 OMP_CRITICAL_NAME (*expr_p),
11893 OMP_CRITICAL_CLAUSES (*expr_p));
11894 break;
11895 default:
11896 gcc_unreachable ();
11898 gimplify_seq_add_stmt (pre_p, g);
11899 ret = GS_ALL_DONE;
11900 break;
11903 case OMP_ATOMIC:
11904 case OMP_ATOMIC_READ:
11905 case OMP_ATOMIC_CAPTURE_OLD:
11906 case OMP_ATOMIC_CAPTURE_NEW:
11907 ret = gimplify_omp_atomic (expr_p, pre_p);
11908 break;
11910 case TRANSACTION_EXPR:
11911 ret = gimplify_transaction (expr_p, pre_p);
11912 break;
11914 case TRUTH_AND_EXPR:
11915 case TRUTH_OR_EXPR:
11916 case TRUTH_XOR_EXPR:
11918 tree orig_type = TREE_TYPE (*expr_p);
11919 tree new_type, xop0, xop1;
11920 *expr_p = gimple_boolify (*expr_p);
11921 new_type = TREE_TYPE (*expr_p);
11922 if (!useless_type_conversion_p (orig_type, new_type))
11924 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11925 ret = GS_OK;
11926 break;
11929 /* Boolified binary truth expressions are semantically equivalent
11930 to bitwise binary expressions. Canonicalize them to the
11931 bitwise variant. */
11932 switch (TREE_CODE (*expr_p))
11934 case TRUTH_AND_EXPR:
11935 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11936 break;
11937 case TRUTH_OR_EXPR:
11938 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11939 break;
11940 case TRUTH_XOR_EXPR:
11941 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11942 break;
11943 default:
11944 break;
11946 /* Now make sure that operands have compatible type to
11947 expression's new_type. */
11948 xop0 = TREE_OPERAND (*expr_p, 0);
11949 xop1 = TREE_OPERAND (*expr_p, 1);
11950 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11951 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11952 new_type,
11953 xop0);
11954 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11955 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11956 new_type,
11957 xop1);
11958 /* Continue classified as tcc_binary. */
11959 goto expr_2;
11962 case VEC_COND_EXPR:
11964 enum gimplify_status r0, r1, r2;
11966 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11967 post_p, is_gimple_condexpr, fb_rvalue);
11968 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11969 post_p, is_gimple_val, fb_rvalue);
11970 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11971 post_p, is_gimple_val, fb_rvalue);
11973 ret = MIN (MIN (r0, r1), r2);
11974 recalculate_side_effects (*expr_p);
11976 break;
11978 case FMA_EXPR:
11979 case VEC_PERM_EXPR:
11980 /* Classified as tcc_expression. */
11981 goto expr_3;
11983 case BIT_INSERT_EXPR:
11984 /* Argument 3 is a constant. */
11985 goto expr_2;
11987 case POINTER_PLUS_EXPR:
11989 enum gimplify_status r0, r1;
11990 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11991 post_p, is_gimple_val, fb_rvalue);
11992 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11993 post_p, is_gimple_val, fb_rvalue);
11994 recalculate_side_effects (*expr_p);
11995 ret = MIN (r0, r1);
11996 break;
11999 case CILK_SYNC_STMT:
12001 if (!fn_contains_cilk_spawn_p (cfun))
12003 error_at (EXPR_LOCATION (*expr_p),
12004 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
12005 ret = GS_ERROR;
12007 else
12009 gimplify_cilk_sync (expr_p, pre_p);
12010 ret = GS_ALL_DONE;
12012 break;
12015 default:
12016 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12018 case tcc_comparison:
12019 /* Handle comparison of objects of non scalar mode aggregates
12020 with a call to memcmp. It would be nice to only have to do
12021 this for variable-sized objects, but then we'd have to allow
12022 the same nest of reference nodes we allow for MODIFY_EXPR and
12023 that's too complex.
12025 Compare scalar mode aggregates as scalar mode values. Using
12026 memcmp for them would be very inefficient at best, and is
12027 plain wrong if bitfields are involved. */
12029 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12031 /* Vector comparisons need no boolification. */
12032 if (TREE_CODE (type) == VECTOR_TYPE)
12033 goto expr_2;
12034 else if (!AGGREGATE_TYPE_P (type))
12036 tree org_type = TREE_TYPE (*expr_p);
12037 *expr_p = gimple_boolify (*expr_p);
12038 if (!useless_type_conversion_p (org_type,
12039 TREE_TYPE (*expr_p)))
12041 *expr_p = fold_convert_loc (input_location,
12042 org_type, *expr_p);
12043 ret = GS_OK;
12045 else
12046 goto expr_2;
12048 else if (TYPE_MODE (type) != BLKmode)
12049 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12050 else
12051 ret = gimplify_variable_sized_compare (expr_p);
12053 break;
12056 /* If *EXPR_P does not need to be special-cased, handle it
12057 according to its class. */
12058 case tcc_unary:
12059 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12060 post_p, is_gimple_val, fb_rvalue);
12061 break;
12063 case tcc_binary:
12064 expr_2:
12066 enum gimplify_status r0, r1;
12068 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12069 post_p, is_gimple_val, fb_rvalue);
12070 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12071 post_p, is_gimple_val, fb_rvalue);
12073 ret = MIN (r0, r1);
12074 break;
12077 expr_3:
12079 enum gimplify_status r0, r1, r2;
12081 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12082 post_p, is_gimple_val, fb_rvalue);
12083 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12084 post_p, is_gimple_val, fb_rvalue);
12085 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12086 post_p, is_gimple_val, fb_rvalue);
12088 ret = MIN (MIN (r0, r1), r2);
12089 break;
12092 case tcc_declaration:
12093 case tcc_constant:
12094 ret = GS_ALL_DONE;
12095 goto dont_recalculate;
12097 default:
12098 gcc_unreachable ();
12101 recalculate_side_effects (*expr_p);
12103 dont_recalculate:
12104 break;
12107 gcc_assert (*expr_p || ret != GS_OK);
12109 while (ret == GS_OK);
12111 /* If we encountered an error_mark somewhere nested inside, either
12112 stub out the statement or propagate the error back out. */
12113 if (ret == GS_ERROR)
12115 if (is_statement)
12116 *expr_p = NULL;
12117 goto out;
12120 /* This was only valid as a return value from the langhook, which
12121 we handled. Make sure it doesn't escape from any other context. */
12122 gcc_assert (ret != GS_UNHANDLED);
12124 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12126 /* We aren't looking for a value, and we don't have a valid
12127 statement. If it doesn't have side-effects, throw it away.
12128 We can also get here with code such as "*&&L;", where L is
12129 a LABEL_DECL that is marked as FORCED_LABEL. */
12130 if (TREE_CODE (*expr_p) == LABEL_DECL
12131 || !TREE_SIDE_EFFECTS (*expr_p))
12132 *expr_p = NULL;
12133 else if (!TREE_THIS_VOLATILE (*expr_p))
12135 /* This is probably a _REF that contains something nested that
12136 has side effects. Recurse through the operands to find it. */
12137 enum tree_code code = TREE_CODE (*expr_p);
12139 switch (code)
12141 case COMPONENT_REF:
12142 case REALPART_EXPR:
12143 case IMAGPART_EXPR:
12144 case VIEW_CONVERT_EXPR:
12145 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12146 gimple_test_f, fallback);
12147 break;
12149 case ARRAY_REF:
12150 case ARRAY_RANGE_REF:
12151 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12152 gimple_test_f, fallback);
12153 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12154 gimple_test_f, fallback);
12155 break;
12157 default:
12158 /* Anything else with side-effects must be converted to
12159 a valid statement before we get here. */
12160 gcc_unreachable ();
12163 *expr_p = NULL;
12165 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12166 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12168 /* Historically, the compiler has treated a bare reference
12169 to a non-BLKmode volatile lvalue as forcing a load. */
12170 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12172 /* Normally, we do not want to create a temporary for a
12173 TREE_ADDRESSABLE type because such a type should not be
12174 copied by bitwise-assignment. However, we make an
12175 exception here, as all we are doing here is ensuring that
12176 we read the bytes that make up the type. We use
12177 create_tmp_var_raw because create_tmp_var will abort when
12178 given a TREE_ADDRESSABLE type. */
12179 tree tmp = create_tmp_var_raw (type, "vol");
12180 gimple_add_tmp_var (tmp);
12181 gimplify_assign (tmp, *expr_p, pre_p);
12182 *expr_p = NULL;
12184 else
12185 /* We can't do anything useful with a volatile reference to
12186 an incomplete type, so just throw it away. Likewise for
12187 a BLKmode type, since any implicit inner load should
12188 already have been turned into an explicit one by the
12189 gimplification process. */
12190 *expr_p = NULL;
12193 /* If we are gimplifying at the statement level, we're done. Tack
12194 everything together and return. */
12195 if (fallback == fb_none || is_statement)
12197 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12198 it out for GC to reclaim it. */
12199 *expr_p = NULL_TREE;
12201 if (!gimple_seq_empty_p (internal_pre)
12202 || !gimple_seq_empty_p (internal_post))
12204 gimplify_seq_add_seq (&internal_pre, internal_post);
12205 gimplify_seq_add_seq (pre_p, internal_pre);
12208 /* The result of gimplifying *EXPR_P is going to be the last few
12209 statements in *PRE_P and *POST_P. Add location information
12210 to all the statements that were added by the gimplification
12211 helpers. */
12212 if (!gimple_seq_empty_p (*pre_p))
12213 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12215 if (!gimple_seq_empty_p (*post_p))
12216 annotate_all_with_location_after (*post_p, post_last_gsi,
12217 input_location);
12219 goto out;
12222 #ifdef ENABLE_GIMPLE_CHECKING
12223 if (*expr_p)
12225 enum tree_code code = TREE_CODE (*expr_p);
12226 /* These expressions should already be in gimple IR form. */
12227 gcc_assert (code != MODIFY_EXPR
12228 && code != ASM_EXPR
12229 && code != BIND_EXPR
12230 && code != CATCH_EXPR
12231 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12232 && code != EH_FILTER_EXPR
12233 && code != GOTO_EXPR
12234 && code != LABEL_EXPR
12235 && code != LOOP_EXPR
12236 && code != SWITCH_EXPR
12237 && code != TRY_FINALLY_EXPR
12238 && code != OACC_PARALLEL
12239 && code != OACC_KERNELS
12240 && code != OACC_DATA
12241 && code != OACC_HOST_DATA
12242 && code != OACC_DECLARE
12243 && code != OACC_UPDATE
12244 && code != OACC_ENTER_DATA
12245 && code != OACC_EXIT_DATA
12246 && code != OACC_CACHE
12247 && code != OMP_CRITICAL
12248 && code != OMP_FOR
12249 && code != OACC_LOOP
12250 && code != OMP_MASTER
12251 && code != OMP_TASKGROUP
12252 && code != OMP_ORDERED
12253 && code != OMP_PARALLEL
12254 && code != OMP_SECTIONS
12255 && code != OMP_SECTION
12256 && code != OMP_SINGLE);
12258 #endif
12260 /* Otherwise we're gimplifying a subexpression, so the resulting
12261 value is interesting. If it's a valid operand that matches
12262 GIMPLE_TEST_F, we're done. Unless we are handling some
12263 post-effects internally; if that's the case, we need to copy into
12264 a temporary before adding the post-effects to POST_P. */
12265 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12266 goto out;
12268 /* Otherwise, we need to create a new temporary for the gimplified
12269 expression. */
12271 /* We can't return an lvalue if we have an internal postqueue. The
12272 object the lvalue refers to would (probably) be modified by the
12273 postqueue; we need to copy the value out first, which means an
12274 rvalue. */
12275 if ((fallback & fb_lvalue)
12276 && gimple_seq_empty_p (internal_post)
12277 && is_gimple_addressable (*expr_p))
12279 /* An lvalue will do. Take the address of the expression, store it
12280 in a temporary, and replace the expression with an INDIRECT_REF of
12281 that temporary. */
12282 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12283 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12284 *expr_p = build_simple_mem_ref (tmp);
12286 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12288 /* An rvalue will do. Assign the gimplified expression into a
12289 new temporary TMP and replace the original expression with
12290 TMP. First, make sure that the expression has a type so that
12291 it can be assigned into a temporary. */
12292 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12293 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12295 else
12297 #ifdef ENABLE_GIMPLE_CHECKING
12298 if (!(fallback & fb_mayfail))
12300 fprintf (stderr, "gimplification failed:\n");
12301 print_generic_expr (stderr, *expr_p);
12302 debug_tree (*expr_p);
12303 internal_error ("gimplification failed");
12305 #endif
12306 gcc_assert (fallback & fb_mayfail);
12308 /* If this is an asm statement, and the user asked for the
12309 impossible, don't die. Fail and let gimplify_asm_expr
12310 issue an error. */
12311 ret = GS_ERROR;
12312 goto out;
12315 /* Make sure the temporary matches our predicate. */
12316 gcc_assert ((*gimple_test_f) (*expr_p));
12318 if (!gimple_seq_empty_p (internal_post))
12320 annotate_all_with_location (internal_post, input_location);
12321 gimplify_seq_add_seq (pre_p, internal_post);
12324 out:
12325 input_location = saved_location;
12326 return ret;
12329 /* Like gimplify_expr but make sure the gimplified result is not itself
12330 a SSA name (but a decl if it were). Temporaries required by
12331 evaluating *EXPR_P may be still SSA names. */
12333 static enum gimplify_status
12334 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12335 bool (*gimple_test_f) (tree), fallback_t fallback,
12336 bool allow_ssa)
12338 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12339 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12340 gimple_test_f, fallback);
12341 if (! allow_ssa
12342 && TREE_CODE (*expr_p) == SSA_NAME)
12344 tree name = *expr_p;
12345 if (was_ssa_name_p)
12346 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12347 else
12349 /* Avoid the extra copy if possible. */
12350 *expr_p = create_tmp_reg (TREE_TYPE (name));
12351 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12352 release_ssa_name (name);
12355 return ret;
12358 /* Look through TYPE for variable-sized objects and gimplify each such
12359 size that we find. Add to LIST_P any statements generated. */
12361 void
12362 gimplify_type_sizes (tree type, gimple_seq *list_p)
12364 tree field, t;
12366 if (type == NULL || type == error_mark_node)
12367 return;
12369 /* We first do the main variant, then copy into any other variants. */
12370 type = TYPE_MAIN_VARIANT (type);
12372 /* Avoid infinite recursion. */
12373 if (TYPE_SIZES_GIMPLIFIED (type))
12374 return;
12376 TYPE_SIZES_GIMPLIFIED (type) = 1;
12378 switch (TREE_CODE (type))
12380 case INTEGER_TYPE:
12381 case ENUMERAL_TYPE:
12382 case BOOLEAN_TYPE:
12383 case REAL_TYPE:
12384 case FIXED_POINT_TYPE:
12385 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12386 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12388 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12390 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12391 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12393 break;
12395 case ARRAY_TYPE:
12396 /* These types may not have declarations, so handle them here. */
12397 gimplify_type_sizes (TREE_TYPE (type), list_p);
12398 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12399 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12400 with assigned stack slots, for -O1+ -g they should be tracked
12401 by VTA. */
12402 if (!(TYPE_NAME (type)
12403 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12404 && DECL_IGNORED_P (TYPE_NAME (type)))
12405 && TYPE_DOMAIN (type)
12406 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12408 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12409 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12410 DECL_IGNORED_P (t) = 0;
12411 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12412 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12413 DECL_IGNORED_P (t) = 0;
12415 break;
12417 case RECORD_TYPE:
12418 case UNION_TYPE:
12419 case QUAL_UNION_TYPE:
12420 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12421 if (TREE_CODE (field) == FIELD_DECL)
12423 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12424 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12425 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12426 gimplify_type_sizes (TREE_TYPE (field), list_p);
12428 break;
12430 case POINTER_TYPE:
12431 case REFERENCE_TYPE:
12432 /* We used to recurse on the pointed-to type here, which turned out to
12433 be incorrect because its definition might refer to variables not
12434 yet initialized at this point if a forward declaration is involved.
12436 It was actually useful for anonymous pointed-to types to ensure
12437 that the sizes evaluation dominates every possible later use of the
12438 values. Restricting to such types here would be safe since there
12439 is no possible forward declaration around, but would introduce an
12440 undesirable middle-end semantic to anonymity. We then defer to
12441 front-ends the responsibility of ensuring that the sizes are
12442 evaluated both early and late enough, e.g. by attaching artificial
12443 type declarations to the tree. */
12444 break;
12446 default:
12447 break;
12450 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12451 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12453 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12455 TYPE_SIZE (t) = TYPE_SIZE (type);
12456 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12457 TYPE_SIZES_GIMPLIFIED (t) = 1;
12461 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12462 a size or position, has had all of its SAVE_EXPRs evaluated.
12463 We add any required statements to *STMT_P. */
12465 void
12466 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12468 tree expr = *expr_p;
12470 /* We don't do anything if the value isn't there, is constant, or contains
12471 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12472 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12473 will want to replace it with a new variable, but that will cause problems
12474 if this type is from outside the function. It's OK to have that here. */
12475 if (is_gimple_sizepos (expr))
12476 return;
12478 *expr_p = unshare_expr (expr);
12480 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12481 if the def vanishes. */
12482 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12485 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12486 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12487 is true, also gimplify the parameters. */
12489 gbind *
12490 gimplify_body (tree fndecl, bool do_parms)
12492 location_t saved_location = input_location;
12493 gimple_seq parm_stmts, seq;
12494 gimple *outer_stmt;
12495 gbind *outer_bind;
12496 struct cgraph_node *cgn;
12498 timevar_push (TV_TREE_GIMPLIFY);
12500 init_tree_ssa (cfun);
12502 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12503 gimplification. */
12504 default_rtl_profile ();
12506 gcc_assert (gimplify_ctxp == NULL);
12507 push_gimplify_context (true);
12509 if (flag_openacc || flag_openmp)
12511 gcc_assert (gimplify_omp_ctxp == NULL);
12512 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12513 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12516 /* Unshare most shared trees in the body and in that of any nested functions.
12517 It would seem we don't have to do this for nested functions because
12518 they are supposed to be output and then the outer function gimplified
12519 first, but the g++ front end doesn't always do it that way. */
12520 unshare_body (fndecl);
12521 unvisit_body (fndecl);
12523 cgn = cgraph_node::get (fndecl);
12524 if (cgn && cgn->origin)
12525 nonlocal_vlas = new hash_set<tree>;
12527 /* Make sure input_location isn't set to something weird. */
12528 input_location = DECL_SOURCE_LOCATION (fndecl);
12530 /* Resolve callee-copies. This has to be done before processing
12531 the body so that DECL_VALUE_EXPR gets processed correctly. */
12532 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12534 /* Gimplify the function's body. */
12535 seq = NULL;
12536 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12537 outer_stmt = gimple_seq_first_stmt (seq);
12538 if (!outer_stmt)
12540 outer_stmt = gimple_build_nop ();
12541 gimplify_seq_add_stmt (&seq, outer_stmt);
12544 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12545 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12546 if (gimple_code (outer_stmt) == GIMPLE_BIND
12547 && gimple_seq_first (seq) == gimple_seq_last (seq))
12548 outer_bind = as_a <gbind *> (outer_stmt);
12549 else
12550 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12552 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12554 /* If we had callee-copies statements, insert them at the beginning
12555 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12556 if (!gimple_seq_empty_p (parm_stmts))
12558 tree parm;
12560 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12561 gimple_bind_set_body (outer_bind, parm_stmts);
12563 for (parm = DECL_ARGUMENTS (current_function_decl);
12564 parm; parm = DECL_CHAIN (parm))
12565 if (DECL_HAS_VALUE_EXPR_P (parm))
12567 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12568 DECL_IGNORED_P (parm) = 0;
12572 if (nonlocal_vlas)
12574 if (nonlocal_vla_vars)
12576 /* tree-nested.c may later on call declare_vars (..., true);
12577 which relies on BLOCK_VARS chain to be the tail of the
12578 gimple_bind_vars chain. Ensure we don't violate that
12579 assumption. */
12580 if (gimple_bind_block (outer_bind)
12581 == DECL_INITIAL (current_function_decl))
12582 declare_vars (nonlocal_vla_vars, outer_bind, true);
12583 else
12584 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12585 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12586 nonlocal_vla_vars);
12587 nonlocal_vla_vars = NULL_TREE;
12589 delete nonlocal_vlas;
12590 nonlocal_vlas = NULL;
12593 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12594 && gimplify_omp_ctxp)
12596 delete_omp_context (gimplify_omp_ctxp);
12597 gimplify_omp_ctxp = NULL;
12600 pop_gimplify_context (outer_bind);
12601 gcc_assert (gimplify_ctxp == NULL);
12603 if (flag_checking && !seen_error ())
12604 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12606 timevar_pop (TV_TREE_GIMPLIFY);
12607 input_location = saved_location;
12609 return outer_bind;
12612 typedef char *char_p; /* For DEF_VEC_P. */
12614 /* Return whether we should exclude FNDECL from instrumentation. */
12616 static bool
12617 flag_instrument_functions_exclude_p (tree fndecl)
12619 vec<char_p> *v;
12621 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12622 if (v && v->length () > 0)
12624 const char *name;
12625 int i;
12626 char *s;
12628 name = lang_hooks.decl_printable_name (fndecl, 0);
12629 FOR_EACH_VEC_ELT (*v, i, s)
12630 if (strstr (name, s) != NULL)
12631 return true;
12634 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12635 if (v && v->length () > 0)
12637 const char *name;
12638 int i;
12639 char *s;
12641 name = DECL_SOURCE_FILE (fndecl);
12642 FOR_EACH_VEC_ELT (*v, i, s)
12643 if (strstr (name, s) != NULL)
12644 return true;
12647 return false;
12650 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12651 node for the function we want to gimplify.
12653 Return the sequence of GIMPLE statements corresponding to the body
12654 of FNDECL. */
12656 void
12657 gimplify_function_tree (tree fndecl)
12659 tree parm, ret;
12660 gimple_seq seq;
12661 gbind *bind;
12663 gcc_assert (!gimple_body (fndecl));
12665 if (DECL_STRUCT_FUNCTION (fndecl))
12666 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12667 else
12668 push_struct_function (fndecl);
12670 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12671 if necessary. */
12672 cfun->curr_properties |= PROP_gimple_lva;
12674 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12676 /* Preliminarily mark non-addressed complex variables as eligible
12677 for promotion to gimple registers. We'll transform their uses
12678 as we find them. */
12679 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12680 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12681 && !TREE_THIS_VOLATILE (parm)
12682 && !needs_to_live_in_memory (parm))
12683 DECL_GIMPLE_REG_P (parm) = 1;
12686 ret = DECL_RESULT (fndecl);
12687 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12688 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12689 && !needs_to_live_in_memory (ret))
12690 DECL_GIMPLE_REG_P (ret) = 1;
12692 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12693 asan_poisoned_variables = new hash_set<tree> ();
12694 bind = gimplify_body (fndecl, true);
12695 if (asan_poisoned_variables)
12697 delete asan_poisoned_variables;
12698 asan_poisoned_variables = NULL;
12701 /* The tree body of the function is no longer needed, replace it
12702 with the new GIMPLE body. */
12703 seq = NULL;
12704 gimple_seq_add_stmt (&seq, bind);
12705 gimple_set_body (fndecl, seq);
12707 /* If we're instrumenting function entry/exit, then prepend the call to
12708 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12709 catch the exit hook. */
12710 /* ??? Add some way to ignore exceptions for this TFE. */
12711 if (flag_instrument_function_entry_exit
12712 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12713 /* Do not instrument extern inline functions. */
12714 && !(DECL_DECLARED_INLINE_P (fndecl)
12715 && DECL_EXTERNAL (fndecl)
12716 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12717 && !flag_instrument_functions_exclude_p (fndecl))
12719 tree x;
12720 gbind *new_bind;
12721 gimple *tf;
12722 gimple_seq cleanup = NULL, body = NULL;
12723 tree tmp_var;
12724 gcall *call;
12726 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12727 call = gimple_build_call (x, 1, integer_zero_node);
12728 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12729 gimple_call_set_lhs (call, tmp_var);
12730 gimplify_seq_add_stmt (&cleanup, call);
12731 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12732 call = gimple_build_call (x, 2,
12733 build_fold_addr_expr (current_function_decl),
12734 tmp_var);
12735 gimplify_seq_add_stmt (&cleanup, call);
12736 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12738 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12739 call = gimple_build_call (x, 1, integer_zero_node);
12740 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12741 gimple_call_set_lhs (call, tmp_var);
12742 gimplify_seq_add_stmt (&body, call);
12743 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12744 call = gimple_build_call (x, 2,
12745 build_fold_addr_expr (current_function_decl),
12746 tmp_var);
12747 gimplify_seq_add_stmt (&body, call);
12748 gimplify_seq_add_stmt (&body, tf);
12749 new_bind = gimple_build_bind (NULL, body, NULL);
12751 /* Replace the current function body with the body
12752 wrapped in the try/finally TF. */
12753 seq = NULL;
12754 gimple_seq_add_stmt (&seq, new_bind);
12755 gimple_set_body (fndecl, seq);
12756 bind = new_bind;
12759 if (sanitize_flags_p (SANITIZE_THREAD))
12761 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12762 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12763 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12764 /* Replace the current function body with the body
12765 wrapped in the try/finally TF. */
12766 seq = NULL;
12767 gimple_seq_add_stmt (&seq, new_bind);
12768 gimple_set_body (fndecl, seq);
12771 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12772 cfun->curr_properties |= PROP_gimple_any;
12774 pop_cfun ();
12776 dump_function (TDI_gimple, fndecl);
12779 /* Return a dummy expression of type TYPE in order to keep going after an
12780 error. */
12782 static tree
12783 dummy_object (tree type)
12785 tree t = build_int_cst (build_pointer_type (type), 0);
12786 return build2 (MEM_REF, type, t, t);
12789 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12790 builtin function, but a very special sort of operator. */
12792 enum gimplify_status
12793 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12794 gimple_seq *post_p ATTRIBUTE_UNUSED)
12796 tree promoted_type, have_va_type;
12797 tree valist = TREE_OPERAND (*expr_p, 0);
12798 tree type = TREE_TYPE (*expr_p);
12799 tree t, tag, aptag;
12800 location_t loc = EXPR_LOCATION (*expr_p);
12802 /* Verify that valist is of the proper type. */
12803 have_va_type = TREE_TYPE (valist);
12804 if (have_va_type == error_mark_node)
12805 return GS_ERROR;
12806 have_va_type = targetm.canonical_va_list_type (have_va_type);
12807 if (have_va_type == NULL_TREE
12808 && POINTER_TYPE_P (TREE_TYPE (valist)))
12809 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12810 have_va_type
12811 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12812 gcc_assert (have_va_type != NULL_TREE);
12814 /* Generate a diagnostic for requesting data of a type that cannot
12815 be passed through `...' due to type promotion at the call site. */
12816 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12817 != type)
12819 static bool gave_help;
12820 bool warned;
12821 /* Use the expansion point to handle cases such as passing bool (defined
12822 in a system header) through `...'. */
12823 source_location xloc
12824 = expansion_point_location_if_in_system_header (loc);
12826 /* Unfortunately, this is merely undefined, rather than a constraint
12827 violation, so we cannot make this an error. If this call is never
12828 executed, the program is still strictly conforming. */
12829 warned = warning_at (xloc, 0,
12830 "%qT is promoted to %qT when passed through %<...%>",
12831 type, promoted_type);
12832 if (!gave_help && warned)
12834 gave_help = true;
12835 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12836 promoted_type, type);
12839 /* We can, however, treat "undefined" any way we please.
12840 Call abort to encourage the user to fix the program. */
12841 if (warned)
12842 inform (xloc, "if this code is reached, the program will abort");
12843 /* Before the abort, allow the evaluation of the va_list
12844 expression to exit or longjmp. */
12845 gimplify_and_add (valist, pre_p);
12846 t = build_call_expr_loc (loc,
12847 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12848 gimplify_and_add (t, pre_p);
12850 /* This is dead code, but go ahead and finish so that the
12851 mode of the result comes out right. */
12852 *expr_p = dummy_object (type);
12853 return GS_ALL_DONE;
12856 tag = build_int_cst (build_pointer_type (type), 0);
12857 aptag = build_int_cst (TREE_TYPE (valist), 0);
12859 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12860 valist, tag, aptag);
12862 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12863 needs to be expanded. */
12864 cfun->curr_properties &= ~PROP_gimple_lva;
12866 return GS_OK;
12869 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12871 DST/SRC are the destination and source respectively. You can pass
12872 ungimplified trees in DST or SRC, in which case they will be
12873 converted to a gimple operand if necessary.
12875 This function returns the newly created GIMPLE_ASSIGN tuple. */
12877 gimple *
12878 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12880 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12881 gimplify_and_add (t, seq_p);
12882 ggc_free (t);
12883 return gimple_seq_last_stmt (*seq_p);
12886 inline hashval_t
12887 gimplify_hasher::hash (const elt_t *p)
12889 tree t = p->val;
12890 return iterative_hash_expr (t, 0);
12893 inline bool
12894 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12896 tree t1 = p1->val;
12897 tree t2 = p2->val;
12898 enum tree_code code = TREE_CODE (t1);
12900 if (TREE_CODE (t2) != code
12901 || TREE_TYPE (t1) != TREE_TYPE (t2))
12902 return false;
12904 if (!operand_equal_p (t1, t2, 0))
12905 return false;
12907 /* Only allow them to compare equal if they also hash equal; otherwise
12908 results are nondeterminate, and we fail bootstrap comparison. */
12909 gcc_checking_assert (hash (p1) == hash (p2));
12911 return true;