PR middle-end/83164
[official-gcc.git] / gcc / gimplify.c
blob16a86ce70f04e2cb56787e91db045c613b6ebc46
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
69 /* Hash set of poisoned variables in a bind expr. */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
72 enum gimplify_omp_var_data
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
85 GOVD_LINEAR = 2048,
86 GOVD_ALIGNED = 4096,
88 /* Flag for GOVD_MAP: don't copy back. */
89 GOVD_MAP_TO_ONLY = 8192,
91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
94 GOVD_MAP_0LEN_ARRAY = 32768,
96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
108 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
109 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
110 | GOVD_LOCAL)
114 enum omp_region_type
116 ORT_WORKSHARE = 0x00,
117 ORT_SIMD = 0x01,
119 ORT_PARALLEL = 0x02,
120 ORT_COMBINED_PARALLEL = 0x03,
122 ORT_TASK = 0x04,
123 ORT_UNTIED_TASK = 0x05,
125 ORT_TEAMS = 0x08,
126 ORT_COMBINED_TEAMS = 0x09,
128 /* Data region. */
129 ORT_TARGET_DATA = 0x10,
131 /* Data region with offloading. */
132 ORT_TARGET = 0x20,
133 ORT_COMBINED_TARGET = 0x21,
135 /* OpenACC variants. */
136 ORT_ACC = 0x40, /* A generic OpenACC region. */
137 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
138 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
139 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
140 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
142 /* Dummy OpenMP region, used to disable expansion of
143 DECL_VALUE_EXPRs in taskloop pre body. */
144 ORT_NONE = 0x100
147 /* Gimplify hashtable helper. */
149 struct gimplify_hasher : free_ptr_hash <elt_t>
151 static inline hashval_t hash (const elt_t *);
152 static inline bool equal (const elt_t *, const elt_t *);
155 struct gimplify_ctx
157 struct gimplify_ctx *prev_context;
159 vec<gbind *> bind_expr_stack;
160 tree temps;
161 gimple_seq conditional_cleanups;
162 tree exit_label;
163 tree return_temp;
165 vec<tree> case_labels;
166 hash_set<tree> *live_switch_vars;
167 /* The formal temporary table. Should this be persistent? */
168 hash_table<gimplify_hasher> *temp_htab;
170 int conditions;
171 unsigned into_ssa : 1;
172 unsigned allow_rhs_cond_expr : 1;
173 unsigned in_cleanup_point_expr : 1;
174 unsigned keep_stack : 1;
175 unsigned save_stack : 1;
176 unsigned in_switch_expr : 1;
179 struct gimplify_omp_ctx
181 struct gimplify_omp_ctx *outer_context;
182 splay_tree variables;
183 hash_set<tree> *privatized_types;
184 /* Iteration variables in an OMP_FOR. */
185 vec<tree> loop_iter_var;
186 location_t location;
187 enum omp_clause_default_kind default_kind;
188 enum omp_region_type region_type;
189 bool combined_loop;
190 bool distribute;
191 bool target_map_scalars_firstprivate;
192 bool target_map_pointers_as_0len_arrays;
193 bool target_firstprivatize_array_bases;
196 static struct gimplify_ctx *gimplify_ctxp;
197 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
199 /* Forward declaration. */
200 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
201 static hash_map<tree, tree> *oacc_declare_returns;
202 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
203 bool (*) (tree), fallback_t, bool);
205 /* Shorter alias name for the above function for use in gimplify.c
206 only. */
208 static inline void
209 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
211 gimple_seq_add_stmt_without_update (seq_p, gs);
214 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
215 NULL, a new sequence is allocated. This function is
216 similar to gimple_seq_add_seq, but does not scan the operands.
217 During gimplification, we need to manipulate statement sequences
218 before the def/use vectors have been constructed. */
220 static void
221 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
223 gimple_stmt_iterator si;
225 if (src == NULL)
226 return;
228 si = gsi_last (*dst_p);
229 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
233 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
234 and popping gimplify contexts. */
236 static struct gimplify_ctx *ctx_pool = NULL;
238 /* Return a gimplify context struct from the pool. */
240 static inline struct gimplify_ctx *
241 ctx_alloc (void)
243 struct gimplify_ctx * c = ctx_pool;
245 if (c)
246 ctx_pool = c->prev_context;
247 else
248 c = XNEW (struct gimplify_ctx);
250 memset (c, '\0', sizeof (*c));
251 return c;
254 /* Put gimplify context C back into the pool. */
256 static inline void
257 ctx_free (struct gimplify_ctx *c)
259 c->prev_context = ctx_pool;
260 ctx_pool = c;
263 /* Free allocated ctx stack memory. */
265 void
266 free_gimplify_stack (void)
268 struct gimplify_ctx *c;
270 while ((c = ctx_pool))
272 ctx_pool = c->prev_context;
273 free (c);
278 /* Set up a context for the gimplifier. */
280 void
281 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
283 struct gimplify_ctx *c = ctx_alloc ();
285 c->prev_context = gimplify_ctxp;
286 gimplify_ctxp = c;
287 gimplify_ctxp->into_ssa = in_ssa;
288 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
291 /* Tear down a context for the gimplifier. If BODY is non-null, then
292 put the temporaries into the outer BIND_EXPR. Otherwise, put them
293 in the local_decls.
295 BODY is not a sequence, but the first tuple in a sequence. */
297 void
298 pop_gimplify_context (gimple *body)
300 struct gimplify_ctx *c = gimplify_ctxp;
302 gcc_assert (c
303 && (!c->bind_expr_stack.exists ()
304 || c->bind_expr_stack.is_empty ()));
305 c->bind_expr_stack.release ();
306 gimplify_ctxp = c->prev_context;
308 if (body)
309 declare_vars (c->temps, body, false);
310 else
311 record_vars (c->temps);
313 delete c->temp_htab;
314 c->temp_htab = NULL;
315 ctx_free (c);
318 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
320 static void
321 gimple_push_bind_expr (gbind *bind_stmt)
323 gimplify_ctxp->bind_expr_stack.reserve (8);
324 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
327 /* Pop the first element off the stack of bindings. */
329 static void
330 gimple_pop_bind_expr (void)
332 gimplify_ctxp->bind_expr_stack.pop ();
335 /* Return the first element of the stack of bindings. */
337 gbind *
338 gimple_current_bind_expr (void)
340 return gimplify_ctxp->bind_expr_stack.last ();
343 /* Return the stack of bindings created during gimplification. */
345 vec<gbind *>
346 gimple_bind_expr_stack (void)
348 return gimplify_ctxp->bind_expr_stack;
351 /* Return true iff there is a COND_EXPR between us and the innermost
352 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
354 static bool
355 gimple_conditional_context (void)
357 return gimplify_ctxp->conditions > 0;
360 /* Note that we've entered a COND_EXPR. */
362 static void
363 gimple_push_condition (void)
365 #ifdef ENABLE_GIMPLE_CHECKING
366 if (gimplify_ctxp->conditions == 0)
367 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
368 #endif
369 ++(gimplify_ctxp->conditions);
372 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
373 now, add any conditional cleanups we've seen to the prequeue. */
375 static void
376 gimple_pop_condition (gimple_seq *pre_p)
378 int conds = --(gimplify_ctxp->conditions);
380 gcc_assert (conds >= 0);
381 if (conds == 0)
383 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
384 gimplify_ctxp->conditional_cleanups = NULL;
388 /* A stable comparison routine for use with splay trees and DECLs. */
390 static int
391 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
393 tree a = (tree) xa;
394 tree b = (tree) xb;
396 return DECL_UID (a) - DECL_UID (b);
399 /* Create a new omp construct that deals with variable remapping. */
401 static struct gimplify_omp_ctx *
402 new_omp_context (enum omp_region_type region_type)
404 struct gimplify_omp_ctx *c;
406 c = XCNEW (struct gimplify_omp_ctx);
407 c->outer_context = gimplify_omp_ctxp;
408 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
409 c->privatized_types = new hash_set<tree>;
410 c->location = input_location;
411 c->region_type = region_type;
412 if ((region_type & ORT_TASK) == 0)
413 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
414 else
415 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
417 return c;
420 /* Destroy an omp construct that deals with variable remapping. */
422 static void
423 delete_omp_context (struct gimplify_omp_ctx *c)
425 splay_tree_delete (c->variables);
426 delete c->privatized_types;
427 c->loop_iter_var.release ();
428 XDELETE (c);
431 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
432 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
434 /* Both gimplify the statement T and append it to *SEQ_P. This function
435 behaves exactly as gimplify_stmt, but you don't have to pass T as a
436 reference. */
438 void
439 gimplify_and_add (tree t, gimple_seq *seq_p)
441 gimplify_stmt (&t, seq_p);
444 /* Gimplify statement T into sequence *SEQ_P, and return the first
445 tuple in the sequence of generated tuples for this statement.
446 Return NULL if gimplifying T produced no tuples. */
448 static gimple *
449 gimplify_and_return_first (tree t, gimple_seq *seq_p)
451 gimple_stmt_iterator last = gsi_last (*seq_p);
453 gimplify_and_add (t, seq_p);
455 if (!gsi_end_p (last))
457 gsi_next (&last);
458 return gsi_stmt (last);
460 else
461 return gimple_seq_first_stmt (*seq_p);
464 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
465 LHS, or for a call argument. */
467 static bool
468 is_gimple_mem_rhs (tree t)
470 /* If we're dealing with a renamable type, either source or dest must be
471 a renamed variable. */
472 if (is_gimple_reg_type (TREE_TYPE (t)))
473 return is_gimple_val (t);
474 else
475 return is_gimple_val (t) || is_gimple_lvalue (t);
478 /* Return true if T is a CALL_EXPR or an expression that can be
479 assigned to a temporary. Note that this predicate should only be
480 used during gimplification. See the rationale for this in
481 gimplify_modify_expr. */
483 static bool
484 is_gimple_reg_rhs_or_call (tree t)
486 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
487 || TREE_CODE (t) == CALL_EXPR);
490 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
491 this predicate should only be used during gimplification. See the
492 rationale for this in gimplify_modify_expr. */
494 static bool
495 is_gimple_mem_rhs_or_call (tree t)
497 /* If we're dealing with a renamable type, either source or dest must be
498 a renamed variable. */
499 if (is_gimple_reg_type (TREE_TYPE (t)))
500 return is_gimple_val (t);
501 else
502 return (is_gimple_val (t)
503 || is_gimple_lvalue (t)
504 || TREE_CLOBBER_P (t)
505 || TREE_CODE (t) == CALL_EXPR);
508 /* Create a temporary with a name derived from VAL. Subroutine of
509 lookup_tmp_var; nobody else should call this function. */
511 static inline tree
512 create_tmp_from_val (tree val)
514 /* Drop all qualifiers and address-space information from the value type. */
515 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
516 tree var = create_tmp_var (type, get_name (val));
517 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
518 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
519 DECL_GIMPLE_REG_P (var) = 1;
520 return var;
523 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
524 an existing expression temporary. */
526 static tree
527 lookup_tmp_var (tree val, bool is_formal)
529 tree ret;
531 /* If not optimizing, never really reuse a temporary. local-alloc
532 won't allocate any variable that is used in more than one basic
533 block, which means it will go into memory, causing much extra
534 work in reload and final and poorer code generation, outweighing
535 the extra memory allocation here. */
536 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
537 ret = create_tmp_from_val (val);
538 else
540 elt_t elt, *elt_p;
541 elt_t **slot;
543 elt.val = val;
544 if (!gimplify_ctxp->temp_htab)
545 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
546 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
547 if (*slot == NULL)
549 elt_p = XNEW (elt_t);
550 elt_p->val = val;
551 elt_p->temp = ret = create_tmp_from_val (val);
552 *slot = elt_p;
554 else
556 elt_p = *slot;
557 ret = elt_p->temp;
561 return ret;
564 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
566 static tree
567 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
568 bool is_formal, bool allow_ssa)
570 tree t, mod;
572 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
573 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
574 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
575 fb_rvalue);
577 if (allow_ssa
578 && gimplify_ctxp->into_ssa
579 && is_gimple_reg_type (TREE_TYPE (val)))
581 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
582 if (! gimple_in_ssa_p (cfun))
584 const char *name = get_name (val);
585 if (name)
586 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
589 else
590 t = lookup_tmp_var (val, is_formal);
592 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
594 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
596 /* gimplify_modify_expr might want to reduce this further. */
597 gimplify_and_add (mod, pre_p);
598 ggc_free (mod);
600 return t;
603 /* Return a formal temporary variable initialized with VAL. PRE_P is as
604 in gimplify_expr. Only use this function if:
606 1) The value of the unfactored expression represented by VAL will not
607 change between the initialization and use of the temporary, and
608 2) The temporary will not be otherwise modified.
610 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
611 and #2 means it is inappropriate for && temps.
613 For other cases, use get_initialized_tmp_var instead. */
615 tree
616 get_formal_tmp_var (tree val, gimple_seq *pre_p)
618 return internal_get_tmp_var (val, pre_p, NULL, true, true);
621 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
622 are as in gimplify_expr. */
624 tree
625 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
626 bool allow_ssa)
628 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
631 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
632 generate debug info for them; otherwise don't. */
634 void
635 declare_vars (tree vars, gimple *gs, bool debug_info)
637 tree last = vars;
638 if (last)
640 tree temps, block;
642 gbind *scope = as_a <gbind *> (gs);
644 temps = nreverse (last);
646 block = gimple_bind_block (scope);
647 gcc_assert (!block || TREE_CODE (block) == BLOCK);
648 if (!block || !debug_info)
650 DECL_CHAIN (last) = gimple_bind_vars (scope);
651 gimple_bind_set_vars (scope, temps);
653 else
655 /* We need to attach the nodes both to the BIND_EXPR and to its
656 associated BLOCK for debugging purposes. The key point here
657 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
658 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
659 if (BLOCK_VARS (block))
660 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
661 else
663 gimple_bind_set_vars (scope,
664 chainon (gimple_bind_vars (scope), temps));
665 BLOCK_VARS (block) = temps;
671 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
672 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
673 no such upper bound can be obtained. */
675 static void
676 force_constant_size (tree var)
678 /* The only attempt we make is by querying the maximum size of objects
679 of the variable's type. */
681 HOST_WIDE_INT max_size;
683 gcc_assert (VAR_P (var));
685 max_size = max_int_size_in_bytes (TREE_TYPE (var));
687 gcc_assert (max_size >= 0);
689 DECL_SIZE_UNIT (var)
690 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
691 DECL_SIZE (var)
692 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
695 /* Push the temporary variable TMP into the current binding. */
697 void
698 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
700 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
702 /* Later processing assumes that the object size is constant, which might
703 not be true at this point. Force the use of a constant upper bound in
704 this case. */
705 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
706 force_constant_size (tmp);
708 DECL_CONTEXT (tmp) = fn->decl;
709 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
711 record_vars_into (tmp, fn->decl);
714 /* Push the temporary variable TMP into the current binding. */
716 void
717 gimple_add_tmp_var (tree tmp)
719 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
721 /* Later processing assumes that the object size is constant, which might
722 not be true at this point. Force the use of a constant upper bound in
723 this case. */
724 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
725 force_constant_size (tmp);
727 DECL_CONTEXT (tmp) = current_function_decl;
728 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
730 if (gimplify_ctxp)
732 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
733 gimplify_ctxp->temps = tmp;
735 /* Mark temporaries local within the nearest enclosing parallel. */
736 if (gimplify_omp_ctxp)
738 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
739 while (ctx
740 && (ctx->region_type == ORT_WORKSHARE
741 || ctx->region_type == ORT_SIMD
742 || ctx->region_type == ORT_ACC))
743 ctx = ctx->outer_context;
744 if (ctx)
745 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
748 else if (cfun)
749 record_vars (tmp);
750 else
752 gimple_seq body_seq;
754 /* This case is for nested functions. We need to expose the locals
755 they create. */
756 body_seq = gimple_body (current_function_decl);
757 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
763 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
764 nodes that are referenced more than once in GENERIC functions. This is
765 necessary because gimplification (translation into GIMPLE) is performed
766 by modifying tree nodes in-place, so gimplication of a shared node in a
767 first context could generate an invalid GIMPLE form in a second context.
769 This is achieved with a simple mark/copy/unmark algorithm that walks the
770 GENERIC representation top-down, marks nodes with TREE_VISITED the first
771 time it encounters them, duplicates them if they already have TREE_VISITED
772 set, and finally removes the TREE_VISITED marks it has set.
774 The algorithm works only at the function level, i.e. it generates a GENERIC
775 representation of a function with no nodes shared within the function when
776 passed a GENERIC function (except for nodes that are allowed to be shared).
778 At the global level, it is also necessary to unshare tree nodes that are
779 referenced in more than one function, for the same aforementioned reason.
780 This requires some cooperation from the front-end. There are 2 strategies:
782 1. Manual unsharing. The front-end needs to call unshare_expr on every
783 expression that might end up being shared across functions.
785 2. Deep unsharing. This is an extension of regular unsharing. Instead
786 of calling unshare_expr on expressions that might be shared across
787 functions, the front-end pre-marks them with TREE_VISITED. This will
788 ensure that they are unshared on the first reference within functions
789 when the regular unsharing algorithm runs. The counterpart is that
790 this algorithm must look deeper than for manual unsharing, which is
791 specified by LANG_HOOKS_DEEP_UNSHARING.
793 If there are only few specific cases of node sharing across functions, it is
794 probably easier for a front-end to unshare the expressions manually. On the
795 contrary, if the expressions generated at the global level are as widespread
796 as expressions generated within functions, deep unsharing is very likely the
797 way to go. */
799 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
800 These nodes model computations that must be done once. If we were to
801 unshare something like SAVE_EXPR(i++), the gimplification process would
802 create wrong code. However, if DATA is non-null, it must hold a pointer
803 set that is used to unshare the subtrees of these nodes. */
805 static tree
806 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
808 tree t = *tp;
809 enum tree_code code = TREE_CODE (t);
811 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
812 copy their subtrees if we can make sure to do it only once. */
813 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
815 if (data && !((hash_set<tree> *)data)->add (t))
817 else
818 *walk_subtrees = 0;
821 /* Stop at types, decls, constants like copy_tree_r. */
822 else if (TREE_CODE_CLASS (code) == tcc_type
823 || TREE_CODE_CLASS (code) == tcc_declaration
824 || TREE_CODE_CLASS (code) == tcc_constant)
825 *walk_subtrees = 0;
827 /* Cope with the statement expression extension. */
828 else if (code == STATEMENT_LIST)
831 /* Leave the bulk of the work to copy_tree_r itself. */
832 else
833 copy_tree_r (tp, walk_subtrees, NULL);
835 return NULL_TREE;
838 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
839 If *TP has been visited already, then *TP is deeply copied by calling
840 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
842 static tree
843 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
845 tree t = *tp;
846 enum tree_code code = TREE_CODE (t);
848 /* Skip types, decls, and constants. But we do want to look at their
849 types and the bounds of types. Mark them as visited so we properly
850 unmark their subtrees on the unmark pass. If we've already seen them,
851 don't look down further. */
852 if (TREE_CODE_CLASS (code) == tcc_type
853 || TREE_CODE_CLASS (code) == tcc_declaration
854 || TREE_CODE_CLASS (code) == tcc_constant)
856 if (TREE_VISITED (t))
857 *walk_subtrees = 0;
858 else
859 TREE_VISITED (t) = 1;
862 /* If this node has been visited already, unshare it and don't look
863 any deeper. */
864 else if (TREE_VISITED (t))
866 walk_tree (tp, mostly_copy_tree_r, data, NULL);
867 *walk_subtrees = 0;
870 /* Otherwise, mark the node as visited and keep looking. */
871 else
872 TREE_VISITED (t) = 1;
874 return NULL_TREE;
877 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
878 copy_if_shared_r callback unmodified. */
880 static inline void
881 copy_if_shared (tree *tp, void *data)
883 walk_tree (tp, copy_if_shared_r, data, NULL);
886 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
887 any nested functions. */
889 static void
890 unshare_body (tree fndecl)
892 struct cgraph_node *cgn = cgraph_node::get (fndecl);
893 /* If the language requires deep unsharing, we need a pointer set to make
894 sure we don't repeatedly unshare subtrees of unshareable nodes. */
895 hash_set<tree> *visited
896 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
898 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
899 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
900 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
902 delete visited;
904 if (cgn)
905 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
906 unshare_body (cgn->decl);
909 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
910 Subtrees are walked until the first unvisited node is encountered. */
912 static tree
913 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
915 tree t = *tp;
917 /* If this node has been visited, unmark it and keep looking. */
918 if (TREE_VISITED (t))
919 TREE_VISITED (t) = 0;
921 /* Otherwise, don't look any deeper. */
922 else
923 *walk_subtrees = 0;
925 return NULL_TREE;
928 /* Unmark the visited trees rooted at *TP. */
930 static inline void
931 unmark_visited (tree *tp)
933 walk_tree (tp, unmark_visited_r, NULL, NULL);
936 /* Likewise, but mark all trees as not visited. */
938 static void
939 unvisit_body (tree fndecl)
941 struct cgraph_node *cgn = cgraph_node::get (fndecl);
943 unmark_visited (&DECL_SAVED_TREE (fndecl));
944 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
945 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
947 if (cgn)
948 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
949 unvisit_body (cgn->decl);
952 /* Unconditionally make an unshared copy of EXPR. This is used when using
953 stored expressions which span multiple functions, such as BINFO_VTABLE,
954 as the normal unsharing process can't tell that they're shared. */
956 tree
957 unshare_expr (tree expr)
959 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
960 return expr;
963 /* Worker for unshare_expr_without_location. */
965 static tree
966 prune_expr_location (tree *tp, int *walk_subtrees, void *)
968 if (EXPR_P (*tp))
969 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
970 else
971 *walk_subtrees = 0;
972 return NULL_TREE;
975 /* Similar to unshare_expr but also prune all expression locations
976 from EXPR. */
978 tree
979 unshare_expr_without_location (tree expr)
981 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
982 if (EXPR_P (expr))
983 walk_tree (&expr, prune_expr_location, NULL, NULL);
984 return expr;
987 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
988 contain statements and have a value. Assign its value to a temporary
989 and give it void_type_node. Return the temporary, or NULL_TREE if
990 WRAPPER was already void. */
992 tree
993 voidify_wrapper_expr (tree wrapper, tree temp)
995 tree type = TREE_TYPE (wrapper);
996 if (type && !VOID_TYPE_P (type))
998 tree *p;
1000 /* Set p to point to the body of the wrapper. Loop until we find
1001 something that isn't a wrapper. */
1002 for (p = &wrapper; p && *p; )
1004 switch (TREE_CODE (*p))
1006 case BIND_EXPR:
1007 TREE_SIDE_EFFECTS (*p) = 1;
1008 TREE_TYPE (*p) = void_type_node;
1009 /* For a BIND_EXPR, the body is operand 1. */
1010 p = &BIND_EXPR_BODY (*p);
1011 break;
1013 case CLEANUP_POINT_EXPR:
1014 case TRY_FINALLY_EXPR:
1015 case TRY_CATCH_EXPR:
1016 TREE_SIDE_EFFECTS (*p) = 1;
1017 TREE_TYPE (*p) = void_type_node;
1018 p = &TREE_OPERAND (*p, 0);
1019 break;
1021 case STATEMENT_LIST:
1023 tree_stmt_iterator i = tsi_last (*p);
1024 TREE_SIDE_EFFECTS (*p) = 1;
1025 TREE_TYPE (*p) = void_type_node;
1026 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1028 break;
1030 case COMPOUND_EXPR:
1031 /* Advance to the last statement. Set all container types to
1032 void. */
1033 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1035 TREE_SIDE_EFFECTS (*p) = 1;
1036 TREE_TYPE (*p) = void_type_node;
1038 break;
1040 case TRANSACTION_EXPR:
1041 TREE_SIDE_EFFECTS (*p) = 1;
1042 TREE_TYPE (*p) = void_type_node;
1043 p = &TRANSACTION_EXPR_BODY (*p);
1044 break;
1046 default:
1047 /* Assume that any tree upon which voidify_wrapper_expr is
1048 directly called is a wrapper, and that its body is op0. */
1049 if (p == &wrapper)
1051 TREE_SIDE_EFFECTS (*p) = 1;
1052 TREE_TYPE (*p) = void_type_node;
1053 p = &TREE_OPERAND (*p, 0);
1054 break;
1056 goto out;
1060 out:
1061 if (p == NULL || IS_EMPTY_STMT (*p))
1062 temp = NULL_TREE;
1063 else if (temp)
1065 /* The wrapper is on the RHS of an assignment that we're pushing
1066 down. */
1067 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1068 || TREE_CODE (temp) == MODIFY_EXPR);
1069 TREE_OPERAND (temp, 1) = *p;
1070 *p = temp;
1072 else
1074 temp = create_tmp_var (type, "retval");
1075 *p = build2 (INIT_EXPR, type, temp, *p);
1078 return temp;
1081 return NULL_TREE;
1084 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1085 a temporary through which they communicate. */
1087 static void
1088 build_stack_save_restore (gcall **save, gcall **restore)
1090 tree tmp_var;
1092 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1093 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1094 gimple_call_set_lhs (*save, tmp_var);
1096 *restore
1097 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1098 1, tmp_var);
1101 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1103 static tree
1104 build_asan_poison_call_expr (tree decl)
1106 /* Do not poison variables that have size equal to zero. */
1107 tree unit_size = DECL_SIZE_UNIT (decl);
1108 if (zerop (unit_size))
1109 return NULL_TREE;
1111 tree base = build_fold_addr_expr (decl);
1113 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1114 void_type_node, 3,
1115 build_int_cst (integer_type_node,
1116 ASAN_MARK_POISON),
1117 base, unit_size);
1120 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1121 on POISON flag, shadow memory of a DECL variable. The call will be
1122 put on location identified by IT iterator, where BEFORE flag drives
1123 position where the stmt will be put. */
1125 static void
1126 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1127 bool before)
1129 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1130 if (gimplify_omp_ctxp)
1131 return;
1133 tree unit_size = DECL_SIZE_UNIT (decl);
1134 tree base = build_fold_addr_expr (decl);
1136 /* Do not poison variables that have size equal to zero. */
1137 if (zerop (unit_size))
1138 return;
1140 /* It's necessary to have all stack variables aligned to ASAN granularity
1141 bytes. */
1142 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1143 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1145 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1147 gimple *g
1148 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1149 build_int_cst (integer_type_node, flags),
1150 base, unit_size);
1152 if (before)
1153 gsi_insert_before (it, g, GSI_NEW_STMT);
1154 else
1155 gsi_insert_after (it, g, GSI_NEW_STMT);
1158 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1159 either poisons or unpoisons a DECL. Created statement is appended
1160 to SEQ_P gimple sequence. */
1162 static void
1163 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1165 gimple_stmt_iterator it = gsi_last (*seq_p);
1166 bool before = false;
1168 if (gsi_end_p (it))
1169 before = true;
1171 asan_poison_variable (decl, poison, &it, before);
1174 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1176 static int
1177 sort_by_decl_uid (const void *a, const void *b)
1179 const tree *t1 = (const tree *)a;
1180 const tree *t2 = (const tree *)b;
1182 int uid1 = DECL_UID (*t1);
1183 int uid2 = DECL_UID (*t2);
1185 if (uid1 < uid2)
1186 return -1;
1187 else if (uid1 > uid2)
1188 return 1;
1189 else
1190 return 0;
1193 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1194 depending on POISON flag. Created statement is appended
1195 to SEQ_P gimple sequence. */
1197 static void
1198 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1200 unsigned c = variables->elements ();
1201 if (c == 0)
1202 return;
1204 auto_vec<tree> sorted_variables (c);
1206 for (hash_set<tree>::iterator it = variables->begin ();
1207 it != variables->end (); ++it)
1208 sorted_variables.safe_push (*it);
1210 sorted_variables.qsort (sort_by_decl_uid);
1212 unsigned i;
1213 tree var;
1214 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1216 asan_poison_variable (var, poison, seq_p);
1218 /* Add use_after_scope_memory attribute for the variable in order
1219 to prevent re-written into SSA. */
1220 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1221 DECL_ATTRIBUTES (var)))
1222 DECL_ATTRIBUTES (var)
1223 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1224 integer_one_node,
1225 DECL_ATTRIBUTES (var));
1229 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1231 static enum gimplify_status
1232 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1234 tree bind_expr = *expr_p;
1235 bool old_keep_stack = gimplify_ctxp->keep_stack;
1236 bool old_save_stack = gimplify_ctxp->save_stack;
1237 tree t;
1238 gbind *bind_stmt;
1239 gimple_seq body, cleanup;
1240 gcall *stack_save;
1241 location_t start_locus = 0, end_locus = 0;
1242 tree ret_clauses = NULL;
1244 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1246 /* Mark variables seen in this bind expr. */
1247 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1249 if (VAR_P (t))
1251 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1253 /* Mark variable as local. */
1254 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1255 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1256 || splay_tree_lookup (ctx->variables,
1257 (splay_tree_key) t) == NULL))
1259 if (ctx->region_type == ORT_SIMD
1260 && TREE_ADDRESSABLE (t)
1261 && !TREE_STATIC (t))
1262 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1263 else
1264 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1267 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1269 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1270 cfun->has_local_explicit_reg_vars = true;
1273 /* Preliminarily mark non-addressed complex variables as eligible
1274 for promotion to gimple registers. We'll transform their uses
1275 as we find them. */
1276 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1277 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1278 && !TREE_THIS_VOLATILE (t)
1279 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1280 && !needs_to_live_in_memory (t))
1281 DECL_GIMPLE_REG_P (t) = 1;
1284 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1285 BIND_EXPR_BLOCK (bind_expr));
1286 gimple_push_bind_expr (bind_stmt);
1288 gimplify_ctxp->keep_stack = false;
1289 gimplify_ctxp->save_stack = false;
1291 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1292 body = NULL;
1293 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1294 gimple_bind_set_body (bind_stmt, body);
1296 /* Source location wise, the cleanup code (stack_restore and clobbers)
1297 belongs to the end of the block, so propagate what we have. The
1298 stack_save operation belongs to the beginning of block, which we can
1299 infer from the bind_expr directly if the block has no explicit
1300 assignment. */
1301 if (BIND_EXPR_BLOCK (bind_expr))
1303 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1304 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1306 if (start_locus == 0)
1307 start_locus = EXPR_LOCATION (bind_expr);
1309 cleanup = NULL;
1310 stack_save = NULL;
1312 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1313 the stack space allocated to the VLAs. */
1314 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1316 gcall *stack_restore;
1318 /* Save stack on entry and restore it on exit. Add a try_finally
1319 block to achieve this. */
1320 build_stack_save_restore (&stack_save, &stack_restore);
1322 gimple_set_location (stack_save, start_locus);
1323 gimple_set_location (stack_restore, end_locus);
1325 gimplify_seq_add_stmt (&cleanup, stack_restore);
1328 /* Add clobbers for all variables that go out of scope. */
1329 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1331 if (VAR_P (t)
1332 && !is_global_var (t)
1333 && DECL_CONTEXT (t) == current_function_decl)
1335 if (!DECL_HARD_REGISTER (t)
1336 && !TREE_THIS_VOLATILE (t)
1337 && !DECL_HAS_VALUE_EXPR_P (t)
1338 /* Only care for variables that have to be in memory. Others
1339 will be rewritten into SSA names, hence moved to the
1340 top-level. */
1341 && !is_gimple_reg (t)
1342 && flag_stack_reuse != SR_NONE)
1344 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1345 gimple *clobber_stmt;
1346 TREE_THIS_VOLATILE (clobber) = 1;
1347 clobber_stmt = gimple_build_assign (t, clobber);
1348 gimple_set_location (clobber_stmt, end_locus);
1349 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1352 if (flag_openacc && oacc_declare_returns != NULL)
1354 tree *c = oacc_declare_returns->get (t);
1355 if (c != NULL)
1357 if (ret_clauses)
1358 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1360 ret_clauses = *c;
1362 oacc_declare_returns->remove (t);
1364 if (oacc_declare_returns->elements () == 0)
1366 delete oacc_declare_returns;
1367 oacc_declare_returns = NULL;
1373 if (asan_poisoned_variables != NULL
1374 && asan_poisoned_variables->contains (t))
1376 asan_poisoned_variables->remove (t);
1377 asan_poison_variable (t, true, &cleanup);
1380 if (gimplify_ctxp->live_switch_vars != NULL
1381 && gimplify_ctxp->live_switch_vars->contains (t))
1382 gimplify_ctxp->live_switch_vars->remove (t);
1385 if (ret_clauses)
1387 gomp_target *stmt;
1388 gimple_stmt_iterator si = gsi_start (cleanup);
1390 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1391 ret_clauses);
1392 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1395 if (cleanup)
1397 gtry *gs;
1398 gimple_seq new_body;
1400 new_body = NULL;
1401 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1402 GIMPLE_TRY_FINALLY);
1404 if (stack_save)
1405 gimplify_seq_add_stmt (&new_body, stack_save);
1406 gimplify_seq_add_stmt (&new_body, gs);
1407 gimple_bind_set_body (bind_stmt, new_body);
1410 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1411 if (!gimplify_ctxp->keep_stack)
1412 gimplify_ctxp->keep_stack = old_keep_stack;
1413 gimplify_ctxp->save_stack = old_save_stack;
1415 gimple_pop_bind_expr ();
1417 gimplify_seq_add_stmt (pre_p, bind_stmt);
1419 if (temp)
1421 *expr_p = temp;
1422 return GS_OK;
1425 *expr_p = NULL_TREE;
1426 return GS_ALL_DONE;
1429 /* Maybe add early return predict statement to PRE_P sequence. */
1431 static void
1432 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1434 /* If we are not in a conditional context, add PREDICT statement. */
1435 if (gimple_conditional_context ())
1437 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1438 NOT_TAKEN);
1439 gimplify_seq_add_stmt (pre_p, predict);
1443 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1444 GIMPLE value, it is assigned to a new temporary and the statement is
1445 re-written to return the temporary.
1447 PRE_P points to the sequence where side effects that must happen before
1448 STMT should be stored. */
1450 static enum gimplify_status
1451 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1453 greturn *ret;
1454 tree ret_expr = TREE_OPERAND (stmt, 0);
1455 tree result_decl, result;
1457 if (ret_expr == error_mark_node)
1458 return GS_ERROR;
1460 if (!ret_expr
1461 || TREE_CODE (ret_expr) == RESULT_DECL
1462 || ret_expr == error_mark_node)
1464 maybe_add_early_return_predict_stmt (pre_p);
1465 greturn *ret = gimple_build_return (ret_expr);
1466 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1467 gimplify_seq_add_stmt (pre_p, ret);
1468 return GS_ALL_DONE;
1471 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1472 result_decl = NULL_TREE;
1473 else
1475 result_decl = TREE_OPERAND (ret_expr, 0);
1477 /* See through a return by reference. */
1478 if (TREE_CODE (result_decl) == INDIRECT_REF)
1479 result_decl = TREE_OPERAND (result_decl, 0);
1481 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1482 || TREE_CODE (ret_expr) == INIT_EXPR)
1483 && TREE_CODE (result_decl) == RESULT_DECL);
1486 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1487 Recall that aggregate_value_p is FALSE for any aggregate type that is
1488 returned in registers. If we're returning values in registers, then
1489 we don't want to extend the lifetime of the RESULT_DECL, particularly
1490 across another call. In addition, for those aggregates for which
1491 hard_function_value generates a PARALLEL, we'll die during normal
1492 expansion of structure assignments; there's special code in expand_return
1493 to handle this case that does not exist in expand_expr. */
1494 if (!result_decl)
1495 result = NULL_TREE;
1496 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1498 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1500 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1501 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1502 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1503 should be effectively allocated by the caller, i.e. all calls to
1504 this function must be subject to the Return Slot Optimization. */
1505 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1506 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1508 result = result_decl;
1510 else if (gimplify_ctxp->return_temp)
1511 result = gimplify_ctxp->return_temp;
1512 else
1514 result = create_tmp_reg (TREE_TYPE (result_decl));
1516 /* ??? With complex control flow (usually involving abnormal edges),
1517 we can wind up warning about an uninitialized value for this. Due
1518 to how this variable is constructed and initialized, this is never
1519 true. Give up and never warn. */
1520 TREE_NO_WARNING (result) = 1;
1522 gimplify_ctxp->return_temp = result;
1525 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1526 Then gimplify the whole thing. */
1527 if (result != result_decl)
1528 TREE_OPERAND (ret_expr, 0) = result;
1530 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1532 maybe_add_early_return_predict_stmt (pre_p);
1533 ret = gimple_build_return (result);
1534 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1535 gimplify_seq_add_stmt (pre_p, ret);
1537 return GS_ALL_DONE;
1540 /* Gimplify a variable-length array DECL. */
1542 static void
1543 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1545 /* This is a variable-sized decl. Simplify its size and mark it
1546 for deferred expansion. */
1547 tree t, addr, ptr_type;
1549 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1550 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1552 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1553 if (DECL_HAS_VALUE_EXPR_P (decl))
1554 return;
1556 /* All occurrences of this decl in final gimplified code will be
1557 replaced by indirection. Setting DECL_VALUE_EXPR does two
1558 things: First, it lets the rest of the gimplifier know what
1559 replacement to use. Second, it lets the debug info know
1560 where to find the value. */
1561 ptr_type = build_pointer_type (TREE_TYPE (decl));
1562 addr = create_tmp_var (ptr_type, get_name (decl));
1563 DECL_IGNORED_P (addr) = 0;
1564 t = build_fold_indirect_ref (addr);
1565 TREE_THIS_NOTRAP (t) = 1;
1566 SET_DECL_VALUE_EXPR (decl, t);
1567 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1569 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1570 max_int_size_in_bytes (TREE_TYPE (decl)));
1571 /* The call has been built for a variable-sized object. */
1572 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1573 t = fold_convert (ptr_type, t);
1574 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1576 gimplify_and_add (t, seq_p);
1579 /* A helper function to be called via walk_tree. Mark all labels under *TP
1580 as being forced. To be called for DECL_INITIAL of static variables. */
1582 static tree
1583 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1585 if (TYPE_P (*tp))
1586 *walk_subtrees = 0;
1587 if (TREE_CODE (*tp) == LABEL_DECL)
1589 FORCED_LABEL (*tp) = 1;
1590 cfun->has_forced_label_in_static = 1;
1593 return NULL_TREE;
1596 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1597 and initialization explicit. */
1599 static enum gimplify_status
1600 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1602 tree stmt = *stmt_p;
1603 tree decl = DECL_EXPR_DECL (stmt);
1605 *stmt_p = NULL_TREE;
1607 if (TREE_TYPE (decl) == error_mark_node)
1608 return GS_ERROR;
1610 if ((TREE_CODE (decl) == TYPE_DECL
1611 || VAR_P (decl))
1612 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1614 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1615 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1616 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1619 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1620 in case its size expressions contain problematic nodes like CALL_EXPR. */
1621 if (TREE_CODE (decl) == TYPE_DECL
1622 && DECL_ORIGINAL_TYPE (decl)
1623 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1625 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1626 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1627 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1630 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1632 tree init = DECL_INITIAL (decl);
1633 bool is_vla = false;
1635 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1636 || (!TREE_STATIC (decl)
1637 && flag_stack_check == GENERIC_STACK_CHECK
1638 && compare_tree_int (DECL_SIZE_UNIT (decl),
1639 STACK_CHECK_MAX_VAR_SIZE) > 0))
1641 gimplify_vla_decl (decl, seq_p);
1642 is_vla = true;
1645 if (asan_poisoned_variables
1646 && !is_vla
1647 && TREE_ADDRESSABLE (decl)
1648 && !TREE_STATIC (decl)
1649 && !DECL_HAS_VALUE_EXPR_P (decl)
1650 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1651 && dbg_cnt (asan_use_after_scope))
1653 asan_poisoned_variables->add (decl);
1654 asan_poison_variable (decl, false, seq_p);
1655 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1656 gimplify_ctxp->live_switch_vars->add (decl);
1659 /* Some front ends do not explicitly declare all anonymous
1660 artificial variables. We compensate here by declaring the
1661 variables, though it would be better if the front ends would
1662 explicitly declare them. */
1663 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1664 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1665 gimple_add_tmp_var (decl);
1667 if (init && init != error_mark_node)
1669 if (!TREE_STATIC (decl))
1671 DECL_INITIAL (decl) = NULL_TREE;
1672 init = build2 (INIT_EXPR, void_type_node, decl, init);
1673 gimplify_and_add (init, seq_p);
1674 ggc_free (init);
1676 else
1677 /* We must still examine initializers for static variables
1678 as they may contain a label address. */
1679 walk_tree (&init, force_labels_r, NULL, NULL);
1683 return GS_ALL_DONE;
1686 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1687 and replacing the LOOP_EXPR with goto, but if the loop contains an
1688 EXIT_EXPR, we need to append a label for it to jump to. */
1690 static enum gimplify_status
1691 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1693 tree saved_label = gimplify_ctxp->exit_label;
1694 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1696 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1698 gimplify_ctxp->exit_label = NULL_TREE;
1700 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1702 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1704 if (gimplify_ctxp->exit_label)
1705 gimplify_seq_add_stmt (pre_p,
1706 gimple_build_label (gimplify_ctxp->exit_label));
1708 gimplify_ctxp->exit_label = saved_label;
1710 *expr_p = NULL;
1711 return GS_ALL_DONE;
1714 /* Gimplify a statement list onto a sequence. These may be created either
1715 by an enlightened front-end, or by shortcut_cond_expr. */
1717 static enum gimplify_status
1718 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1720 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1722 tree_stmt_iterator i = tsi_start (*expr_p);
1724 while (!tsi_end_p (i))
1726 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1727 tsi_delink (&i);
1730 if (temp)
1732 *expr_p = temp;
1733 return GS_OK;
1736 return GS_ALL_DONE;
1739 /* Callback for walk_gimple_seq. */
1741 static tree
1742 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1743 struct walk_stmt_info *wi)
1745 gimple *stmt = gsi_stmt (*gsi_p);
1747 *handled_ops_p = true;
1748 switch (gimple_code (stmt))
1750 case GIMPLE_TRY:
1751 /* A compiler-generated cleanup or a user-written try block.
1752 If it's empty, don't dive into it--that would result in
1753 worse location info. */
1754 if (gimple_try_eval (stmt) == NULL)
1756 wi->info = stmt;
1757 return integer_zero_node;
1759 /* Fall through. */
1760 case GIMPLE_BIND:
1761 case GIMPLE_CATCH:
1762 case GIMPLE_EH_FILTER:
1763 case GIMPLE_TRANSACTION:
1764 /* Walk the sub-statements. */
1765 *handled_ops_p = false;
1766 break;
1767 case GIMPLE_CALL:
1768 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1770 *handled_ops_p = false;
1771 break;
1773 /* Fall through. */
1774 default:
1775 /* Save the first "real" statement (not a decl/lexical scope/...). */
1776 wi->info = stmt;
1777 return integer_zero_node;
1779 return NULL_TREE;
1782 /* Possibly warn about unreachable statements between switch's controlling
1783 expression and the first case. SEQ is the body of a switch expression. */
1785 static void
1786 maybe_warn_switch_unreachable (gimple_seq seq)
1788 if (!warn_switch_unreachable
1789 /* This warning doesn't play well with Fortran when optimizations
1790 are on. */
1791 || lang_GNU_Fortran ()
1792 || seq == NULL)
1793 return;
1795 struct walk_stmt_info wi;
1796 memset (&wi, 0, sizeof (wi));
1797 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1798 gimple *stmt = (gimple *) wi.info;
1800 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1802 if (gimple_code (stmt) == GIMPLE_GOTO
1803 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1804 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1805 /* Don't warn for compiler-generated gotos. These occur
1806 in Duff's devices, for example. */;
1807 else
1808 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1809 "statement will never be executed");
1814 /* A label entry that pairs label and a location. */
1815 struct label_entry
1817 tree label;
1818 location_t loc;
1821 /* Find LABEL in vector of label entries VEC. */
1823 static struct label_entry *
1824 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1826 unsigned int i;
1827 struct label_entry *l;
1829 FOR_EACH_VEC_ELT (*vec, i, l)
1830 if (l->label == label)
1831 return l;
1832 return NULL;
1835 /* Return true if LABEL, a LABEL_DECL, represents a case label
1836 in a vector of labels CASES. */
1838 static bool
1839 case_label_p (const vec<tree> *cases, tree label)
1841 unsigned int i;
1842 tree l;
1844 FOR_EACH_VEC_ELT (*cases, i, l)
1845 if (CASE_LABEL (l) == label)
1846 return true;
1847 return false;
1850 /* Find the last statement in a scope STMT. */
1852 static gimple *
1853 last_stmt_in_scope (gimple *stmt)
1855 if (!stmt)
1856 return NULL;
1858 switch (gimple_code (stmt))
1860 case GIMPLE_BIND:
1862 gbind *bind = as_a <gbind *> (stmt);
1863 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1864 return last_stmt_in_scope (stmt);
1867 case GIMPLE_TRY:
1869 gtry *try_stmt = as_a <gtry *> (stmt);
1870 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1871 gimple *last_eval = last_stmt_in_scope (stmt);
1872 if (gimple_stmt_may_fallthru (last_eval)
1873 && (last_eval == NULL
1874 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1875 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1877 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1878 return last_stmt_in_scope (stmt);
1880 else
1881 return last_eval;
1884 default:
1885 return stmt;
1889 /* Collect interesting labels in LABELS and return the statement preceding
1890 another case label, or a user-defined label. */
1892 static gimple *
1893 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1894 auto_vec <struct label_entry> *labels)
1896 gimple *prev = NULL;
1900 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1902 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1903 which starts on a GIMPLE_SWITCH and ends with a break label.
1904 Handle that as a single statement that can fall through. */
1905 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1906 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1907 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1908 if (last
1909 && gimple_code (first) == GIMPLE_SWITCH
1910 && gimple_code (last) == GIMPLE_LABEL)
1912 tree label = gimple_label_label (as_a <glabel *> (last));
1913 if (SWITCH_BREAK_LABEL_P (label))
1915 prev = bind;
1916 gsi_next (gsi_p);
1917 continue;
1921 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1922 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1924 /* Nested scope. Only look at the last statement of
1925 the innermost scope. */
1926 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1927 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1928 if (last)
1930 prev = last;
1931 /* It might be a label without a location. Use the
1932 location of the scope then. */
1933 if (!gimple_has_location (prev))
1934 gimple_set_location (prev, bind_loc);
1936 gsi_next (gsi_p);
1937 continue;
1940 /* Ifs are tricky. */
1941 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1943 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1944 tree false_lab = gimple_cond_false_label (cond_stmt);
1945 location_t if_loc = gimple_location (cond_stmt);
1947 /* If we have e.g.
1948 if (i > 1) goto <D.2259>; else goto D;
1949 we can't do much with the else-branch. */
1950 if (!DECL_ARTIFICIAL (false_lab))
1951 break;
1953 /* Go on until the false label, then one step back. */
1954 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1956 gimple *stmt = gsi_stmt (*gsi_p);
1957 if (gimple_code (stmt) == GIMPLE_LABEL
1958 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1959 break;
1962 /* Not found? Oops. */
1963 if (gsi_end_p (*gsi_p))
1964 break;
1966 struct label_entry l = { false_lab, if_loc };
1967 labels->safe_push (l);
1969 /* Go to the last statement of the then branch. */
1970 gsi_prev (gsi_p);
1972 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1973 <D.1759>:
1974 <stmt>;
1975 goto <D.1761>;
1976 <D.1760>:
1978 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1979 && !gimple_has_location (gsi_stmt (*gsi_p)))
1981 /* Look at the statement before, it might be
1982 attribute fallthrough, in which case don't warn. */
1983 gsi_prev (gsi_p);
1984 bool fallthru_before_dest
1985 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1986 gsi_next (gsi_p);
1987 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1988 if (!fallthru_before_dest)
1990 struct label_entry l = { goto_dest, if_loc };
1991 labels->safe_push (l);
1994 /* And move back. */
1995 gsi_next (gsi_p);
1998 /* Remember the last statement. Skip labels that are of no interest
1999 to us. */
2000 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2002 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2003 if (find_label_entry (labels, label))
2004 prev = gsi_stmt (*gsi_p);
2006 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2008 else
2009 prev = gsi_stmt (*gsi_p);
2010 gsi_next (gsi_p);
2012 while (!gsi_end_p (*gsi_p)
2013 /* Stop if we find a case or a user-defined label. */
2014 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2015 || !gimple_has_location (gsi_stmt (*gsi_p))));
2017 return prev;
2020 /* Return true if the switch fallthough warning should occur. LABEL is
2021 the label statement that we're falling through to. */
2023 static bool
2024 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2026 gimple_stmt_iterator gsi = *gsi_p;
2028 /* Don't warn if the label is marked with a "falls through" comment. */
2029 if (FALLTHROUGH_LABEL_P (label))
2030 return false;
2032 /* Don't warn for non-case labels followed by a statement:
2033 case 0:
2034 foo ();
2035 label:
2036 bar ();
2037 as these are likely intentional. */
2038 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2040 tree l;
2041 while (!gsi_end_p (gsi)
2042 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2043 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2044 && !case_label_p (&gimplify_ctxp->case_labels, l))
2045 gsi_next (&gsi);
2046 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2047 return false;
2050 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2051 immediately breaks. */
2052 gsi = *gsi_p;
2054 /* Skip all immediately following labels. */
2055 while (!gsi_end_p (gsi)
2056 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2057 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2058 gsi_next (&gsi);
2060 /* { ... something; default:; } */
2061 if (gsi_end_p (gsi)
2062 /* { ... something; default: break; } or
2063 { ... something; default: goto L; } */
2064 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2065 /* { ... something; default: return; } */
2066 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2067 return false;
2069 return true;
2072 /* Callback for walk_gimple_seq. */
2074 static tree
2075 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2076 struct walk_stmt_info *)
2078 gimple *stmt = gsi_stmt (*gsi_p);
2080 *handled_ops_p = true;
2081 switch (gimple_code (stmt))
2083 case GIMPLE_TRY:
2084 case GIMPLE_BIND:
2085 case GIMPLE_CATCH:
2086 case GIMPLE_EH_FILTER:
2087 case GIMPLE_TRANSACTION:
2088 /* Walk the sub-statements. */
2089 *handled_ops_p = false;
2090 break;
2092 /* Find a sequence of form:
2094 GIMPLE_LABEL
2095 [...]
2096 <may fallthru stmt>
2097 GIMPLE_LABEL
2099 and possibly warn. */
2100 case GIMPLE_LABEL:
2102 /* Found a label. Skip all immediately following labels. */
2103 while (!gsi_end_p (*gsi_p)
2104 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2105 gsi_next (gsi_p);
2107 /* There might be no more statements. */
2108 if (gsi_end_p (*gsi_p))
2109 return integer_zero_node;
2111 /* Vector of labels that fall through. */
2112 auto_vec <struct label_entry> labels;
2113 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2115 /* There might be no more statements. */
2116 if (gsi_end_p (*gsi_p))
2117 return integer_zero_node;
2119 gimple *next = gsi_stmt (*gsi_p);
2120 tree label;
2121 /* If what follows is a label, then we may have a fallthrough. */
2122 if (gimple_code (next) == GIMPLE_LABEL
2123 && gimple_has_location (next)
2124 && (label = gimple_label_label (as_a <glabel *> (next)))
2125 && prev != NULL)
2127 struct label_entry *l;
2128 bool warned_p = false;
2129 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2130 /* Quiet. */;
2131 else if (gimple_code (prev) == GIMPLE_LABEL
2132 && (label = gimple_label_label (as_a <glabel *> (prev)))
2133 && (l = find_label_entry (&labels, label)))
2134 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2135 "this statement may fall through");
2136 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2137 /* Try to be clever and don't warn when the statement
2138 can't actually fall through. */
2139 && gimple_stmt_may_fallthru (prev)
2140 && gimple_has_location (prev))
2141 warned_p = warning_at (gimple_location (prev),
2142 OPT_Wimplicit_fallthrough_,
2143 "this statement may fall through");
2144 if (warned_p)
2145 inform (gimple_location (next), "here");
2147 /* Mark this label as processed so as to prevent multiple
2148 warnings in nested switches. */
2149 FALLTHROUGH_LABEL_P (label) = true;
2151 /* So that next warn_implicit_fallthrough_r will start looking for
2152 a new sequence starting with this label. */
2153 gsi_prev (gsi_p);
2156 break;
2157 default:
2158 break;
2160 return NULL_TREE;
2163 /* Warn when a switch case falls through. */
2165 static void
2166 maybe_warn_implicit_fallthrough (gimple_seq seq)
2168 if (!warn_implicit_fallthrough)
2169 return;
2171 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2172 if (!(lang_GNU_C ()
2173 || lang_GNU_CXX ()
2174 || lang_GNU_OBJC ()))
2175 return;
2177 struct walk_stmt_info wi;
2178 memset (&wi, 0, sizeof (wi));
2179 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2182 /* Callback for walk_gimple_seq. */
2184 static tree
2185 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2186 struct walk_stmt_info *)
2188 gimple *stmt = gsi_stmt (*gsi_p);
2190 *handled_ops_p = true;
2191 switch (gimple_code (stmt))
2193 case GIMPLE_TRY:
2194 case GIMPLE_BIND:
2195 case GIMPLE_CATCH:
2196 case GIMPLE_EH_FILTER:
2197 case GIMPLE_TRANSACTION:
2198 /* Walk the sub-statements. */
2199 *handled_ops_p = false;
2200 break;
2201 case GIMPLE_CALL:
2202 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2204 gsi_remove (gsi_p, true);
2205 if (gsi_end_p (*gsi_p))
2206 return integer_zero_node;
2208 bool found = false;
2209 location_t loc = gimple_location (stmt);
2211 gimple_stmt_iterator gsi2 = *gsi_p;
2212 stmt = gsi_stmt (gsi2);
2213 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2215 /* Go on until the artificial label. */
2216 tree goto_dest = gimple_goto_dest (stmt);
2217 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2219 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2220 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2221 == goto_dest)
2222 break;
2225 /* Not found? Stop. */
2226 if (gsi_end_p (gsi2))
2227 break;
2229 /* Look one past it. */
2230 gsi_next (&gsi2);
2233 /* We're looking for a case label or default label here. */
2234 while (!gsi_end_p (gsi2))
2236 stmt = gsi_stmt (gsi2);
2237 if (gimple_code (stmt) == GIMPLE_LABEL)
2239 tree label = gimple_label_label (as_a <glabel *> (stmt));
2240 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2242 found = true;
2243 break;
2246 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2248 else
2249 /* Something other is not expected. */
2250 break;
2251 gsi_next (&gsi2);
2253 if (!found)
2254 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2255 "a case label or default label");
2257 break;
2258 default:
2259 break;
2261 return NULL_TREE;
2264 /* Expand all FALLTHROUGH () calls in SEQ. */
2266 static void
2267 expand_FALLTHROUGH (gimple_seq *seq_p)
2269 struct walk_stmt_info wi;
2270 memset (&wi, 0, sizeof (wi));
2271 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2275 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2276 branch to. */
2278 static enum gimplify_status
2279 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2281 tree switch_expr = *expr_p;
2282 gimple_seq switch_body_seq = NULL;
2283 enum gimplify_status ret;
2284 tree index_type = TREE_TYPE (switch_expr);
2285 if (index_type == NULL_TREE)
2286 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2288 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2289 fb_rvalue);
2290 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2291 return ret;
2293 if (SWITCH_BODY (switch_expr))
2295 vec<tree> labels;
2296 vec<tree> saved_labels;
2297 hash_set<tree> *saved_live_switch_vars = NULL;
2298 tree default_case = NULL_TREE;
2299 gswitch *switch_stmt;
2301 /* Save old labels, get new ones from body, then restore the old
2302 labels. Save all the things from the switch body to append after. */
2303 saved_labels = gimplify_ctxp->case_labels;
2304 gimplify_ctxp->case_labels.create (8);
2306 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2307 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2308 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2309 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2310 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2311 else
2312 gimplify_ctxp->live_switch_vars = NULL;
2314 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2315 gimplify_ctxp->in_switch_expr = true;
2317 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2319 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2320 maybe_warn_switch_unreachable (switch_body_seq);
2321 maybe_warn_implicit_fallthrough (switch_body_seq);
2322 /* Only do this for the outermost GIMPLE_SWITCH. */
2323 if (!gimplify_ctxp->in_switch_expr)
2324 expand_FALLTHROUGH (&switch_body_seq);
2326 labels = gimplify_ctxp->case_labels;
2327 gimplify_ctxp->case_labels = saved_labels;
2329 if (gimplify_ctxp->live_switch_vars)
2331 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2332 delete gimplify_ctxp->live_switch_vars;
2334 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2336 preprocess_case_label_vec_for_gimple (labels, index_type,
2337 &default_case);
2339 bool add_bind = false;
2340 if (!default_case)
2342 glabel *new_default;
2344 default_case
2345 = build_case_label (NULL_TREE, NULL_TREE,
2346 create_artificial_label (UNKNOWN_LOCATION));
2347 if (old_in_switch_expr)
2349 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2350 add_bind = true;
2352 new_default = gimple_build_label (CASE_LABEL (default_case));
2353 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2355 else if (old_in_switch_expr)
2357 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2358 if (last && gimple_code (last) == GIMPLE_LABEL)
2360 tree label = gimple_label_label (as_a <glabel *> (last));
2361 if (SWITCH_BREAK_LABEL_P (label))
2362 add_bind = true;
2366 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2367 default_case, labels);
2368 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2369 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2370 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2371 so that we can easily find the start and end of the switch
2372 statement. */
2373 if (add_bind)
2375 gimple_seq bind_body = NULL;
2376 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2377 gimple_seq_add_seq (&bind_body, switch_body_seq);
2378 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2379 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2380 gimplify_seq_add_stmt (pre_p, bind);
2382 else
2384 gimplify_seq_add_stmt (pre_p, switch_stmt);
2385 gimplify_seq_add_seq (pre_p, switch_body_seq);
2387 labels.release ();
2389 else
2390 gcc_unreachable ();
2392 return GS_ALL_DONE;
2395 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2397 static enum gimplify_status
2398 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2400 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2401 == current_function_decl);
2403 tree label = LABEL_EXPR_LABEL (*expr_p);
2404 glabel *label_stmt = gimple_build_label (label);
2405 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2406 gimplify_seq_add_stmt (pre_p, label_stmt);
2408 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2409 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2410 NOT_TAKEN));
2411 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2412 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2413 TAKEN));
2415 return GS_ALL_DONE;
2418 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2420 static enum gimplify_status
2421 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2423 struct gimplify_ctx *ctxp;
2424 glabel *label_stmt;
2426 /* Invalid programs can play Duff's Device type games with, for example,
2427 #pragma omp parallel. At least in the C front end, we don't
2428 detect such invalid branches until after gimplification, in the
2429 diagnose_omp_blocks pass. */
2430 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2431 if (ctxp->case_labels.exists ())
2432 break;
2434 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2435 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2436 ctxp->case_labels.safe_push (*expr_p);
2437 gimplify_seq_add_stmt (pre_p, label_stmt);
2439 return GS_ALL_DONE;
2442 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2443 if necessary. */
2445 tree
2446 build_and_jump (tree *label_p)
2448 if (label_p == NULL)
2449 /* If there's nowhere to jump, just fall through. */
2450 return NULL_TREE;
2452 if (*label_p == NULL_TREE)
2454 tree label = create_artificial_label (UNKNOWN_LOCATION);
2455 *label_p = label;
2458 return build1 (GOTO_EXPR, void_type_node, *label_p);
2461 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2462 This also involves building a label to jump to and communicating it to
2463 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2465 static enum gimplify_status
2466 gimplify_exit_expr (tree *expr_p)
2468 tree cond = TREE_OPERAND (*expr_p, 0);
2469 tree expr;
2471 expr = build_and_jump (&gimplify_ctxp->exit_label);
2472 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2473 *expr_p = expr;
2475 return GS_OK;
2478 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2479 different from its canonical type, wrap the whole thing inside a
2480 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2481 type.
2483 The canonical type of a COMPONENT_REF is the type of the field being
2484 referenced--unless the field is a bit-field which can be read directly
2485 in a smaller mode, in which case the canonical type is the
2486 sign-appropriate type corresponding to that mode. */
2488 static void
2489 canonicalize_component_ref (tree *expr_p)
2491 tree expr = *expr_p;
2492 tree type;
2494 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2496 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2497 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2498 else
2499 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2501 /* One could argue that all the stuff below is not necessary for
2502 the non-bitfield case and declare it a FE error if type
2503 adjustment would be needed. */
2504 if (TREE_TYPE (expr) != type)
2506 #ifdef ENABLE_TYPES_CHECKING
2507 tree old_type = TREE_TYPE (expr);
2508 #endif
2509 int type_quals;
2511 /* We need to preserve qualifiers and propagate them from
2512 operand 0. */
2513 type_quals = TYPE_QUALS (type)
2514 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2515 if (TYPE_QUALS (type) != type_quals)
2516 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2518 /* Set the type of the COMPONENT_REF to the underlying type. */
2519 TREE_TYPE (expr) = type;
2521 #ifdef ENABLE_TYPES_CHECKING
2522 /* It is now a FE error, if the conversion from the canonical
2523 type to the original expression type is not useless. */
2524 gcc_assert (useless_type_conversion_p (old_type, type));
2525 #endif
2529 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2530 to foo, embed that change in the ADDR_EXPR by converting
2531 T array[U];
2532 (T *)&array
2534 &array[L]
2535 where L is the lower bound. For simplicity, only do this for constant
2536 lower bound.
2537 The constraint is that the type of &array[L] is trivially convertible
2538 to T *. */
2540 static void
2541 canonicalize_addr_expr (tree *expr_p)
2543 tree expr = *expr_p;
2544 tree addr_expr = TREE_OPERAND (expr, 0);
2545 tree datype, ddatype, pddatype;
2547 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2548 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2549 || TREE_CODE (addr_expr) != ADDR_EXPR)
2550 return;
2552 /* The addr_expr type should be a pointer to an array. */
2553 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2554 if (TREE_CODE (datype) != ARRAY_TYPE)
2555 return;
2557 /* The pointer to element type shall be trivially convertible to
2558 the expression pointer type. */
2559 ddatype = TREE_TYPE (datype);
2560 pddatype = build_pointer_type (ddatype);
2561 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2562 pddatype))
2563 return;
2565 /* The lower bound and element sizes must be constant. */
2566 if (!TYPE_SIZE_UNIT (ddatype)
2567 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2568 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2569 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2570 return;
2572 /* All checks succeeded. Build a new node to merge the cast. */
2573 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2574 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2575 NULL_TREE, NULL_TREE);
2576 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2578 /* We can have stripped a required restrict qualifier above. */
2579 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2580 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2583 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2584 underneath as appropriate. */
2586 static enum gimplify_status
2587 gimplify_conversion (tree *expr_p)
2589 location_t loc = EXPR_LOCATION (*expr_p);
2590 gcc_assert (CONVERT_EXPR_P (*expr_p));
2592 /* Then strip away all but the outermost conversion. */
2593 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2595 /* And remove the outermost conversion if it's useless. */
2596 if (tree_ssa_useless_type_conversion (*expr_p))
2597 *expr_p = TREE_OPERAND (*expr_p, 0);
2599 /* If we still have a conversion at the toplevel,
2600 then canonicalize some constructs. */
2601 if (CONVERT_EXPR_P (*expr_p))
2603 tree sub = TREE_OPERAND (*expr_p, 0);
2605 /* If a NOP conversion is changing the type of a COMPONENT_REF
2606 expression, then canonicalize its type now in order to expose more
2607 redundant conversions. */
2608 if (TREE_CODE (sub) == COMPONENT_REF)
2609 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2611 /* If a NOP conversion is changing a pointer to array of foo
2612 to a pointer to foo, embed that change in the ADDR_EXPR. */
2613 else if (TREE_CODE (sub) == ADDR_EXPR)
2614 canonicalize_addr_expr (expr_p);
2617 /* If we have a conversion to a non-register type force the
2618 use of a VIEW_CONVERT_EXPR instead. */
2619 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2620 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2621 TREE_OPERAND (*expr_p, 0));
2623 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2624 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2625 TREE_SET_CODE (*expr_p, NOP_EXPR);
2627 return GS_OK;
2630 /* Nonlocal VLAs seen in the current function. */
2631 static hash_set<tree> *nonlocal_vlas;
2633 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2634 static tree nonlocal_vla_vars;
2636 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2637 DECL_VALUE_EXPR, and it's worth re-examining things. */
2639 static enum gimplify_status
2640 gimplify_var_or_parm_decl (tree *expr_p)
2642 tree decl = *expr_p;
2644 /* ??? If this is a local variable, and it has not been seen in any
2645 outer BIND_EXPR, then it's probably the result of a duplicate
2646 declaration, for which we've already issued an error. It would
2647 be really nice if the front end wouldn't leak these at all.
2648 Currently the only known culprit is C++ destructors, as seen
2649 in g++.old-deja/g++.jason/binding.C. */
2650 if (VAR_P (decl)
2651 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2652 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2653 && decl_function_context (decl) == current_function_decl)
2655 gcc_assert (seen_error ());
2656 return GS_ERROR;
2659 /* When within an OMP context, notice uses of variables. */
2660 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2661 return GS_ALL_DONE;
2663 /* If the decl is an alias for another expression, substitute it now. */
2664 if (DECL_HAS_VALUE_EXPR_P (decl))
2666 tree value_expr = DECL_VALUE_EXPR (decl);
2668 /* For referenced nonlocal VLAs add a decl for debugging purposes
2669 to the current function. */
2670 if (VAR_P (decl)
2671 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2672 && nonlocal_vlas != NULL
2673 && TREE_CODE (value_expr) == INDIRECT_REF
2674 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2675 && decl_function_context (decl) != current_function_decl)
2677 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2678 while (ctx
2679 && (ctx->region_type == ORT_WORKSHARE
2680 || ctx->region_type == ORT_SIMD
2681 || ctx->region_type == ORT_ACC))
2682 ctx = ctx->outer_context;
2683 if (!ctx && !nonlocal_vlas->add (decl))
2685 tree copy = copy_node (decl);
2687 lang_hooks.dup_lang_specific_decl (copy);
2688 SET_DECL_RTL (copy, 0);
2689 TREE_USED (copy) = 1;
2690 DECL_CHAIN (copy) = nonlocal_vla_vars;
2691 nonlocal_vla_vars = copy;
2692 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2693 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2697 *expr_p = unshare_expr (value_expr);
2698 return GS_OK;
2701 return GS_ALL_DONE;
2704 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2706 static void
2707 recalculate_side_effects (tree t)
2709 enum tree_code code = TREE_CODE (t);
2710 int len = TREE_OPERAND_LENGTH (t);
2711 int i;
2713 switch (TREE_CODE_CLASS (code))
2715 case tcc_expression:
2716 switch (code)
2718 case INIT_EXPR:
2719 case MODIFY_EXPR:
2720 case VA_ARG_EXPR:
2721 case PREDECREMENT_EXPR:
2722 case PREINCREMENT_EXPR:
2723 case POSTDECREMENT_EXPR:
2724 case POSTINCREMENT_EXPR:
2725 /* All of these have side-effects, no matter what their
2726 operands are. */
2727 return;
2729 default:
2730 break;
2732 /* Fall through. */
2734 case tcc_comparison: /* a comparison expression */
2735 case tcc_unary: /* a unary arithmetic expression */
2736 case tcc_binary: /* a binary arithmetic expression */
2737 case tcc_reference: /* a reference */
2738 case tcc_vl_exp: /* a function call */
2739 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2740 for (i = 0; i < len; ++i)
2742 tree op = TREE_OPERAND (t, i);
2743 if (op && TREE_SIDE_EFFECTS (op))
2744 TREE_SIDE_EFFECTS (t) = 1;
2746 break;
2748 case tcc_constant:
2749 /* No side-effects. */
2750 return;
2752 default:
2753 gcc_unreachable ();
2757 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2758 node *EXPR_P.
2760 compound_lval
2761 : min_lval '[' val ']'
2762 | min_lval '.' ID
2763 | compound_lval '[' val ']'
2764 | compound_lval '.' ID
2766 This is not part of the original SIMPLE definition, which separates
2767 array and member references, but it seems reasonable to handle them
2768 together. Also, this way we don't run into problems with union
2769 aliasing; gcc requires that for accesses through a union to alias, the
2770 union reference must be explicit, which was not always the case when we
2771 were splitting up array and member refs.
2773 PRE_P points to the sequence where side effects that must happen before
2774 *EXPR_P should be stored.
2776 POST_P points to the sequence where side effects that must happen after
2777 *EXPR_P should be stored. */
2779 static enum gimplify_status
2780 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2781 fallback_t fallback)
2783 tree *p;
2784 enum gimplify_status ret = GS_ALL_DONE, tret;
2785 int i;
2786 location_t loc = EXPR_LOCATION (*expr_p);
2787 tree expr = *expr_p;
2789 /* Create a stack of the subexpressions so later we can walk them in
2790 order from inner to outer. */
2791 auto_vec<tree, 10> expr_stack;
2793 /* We can handle anything that get_inner_reference can deal with. */
2794 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2796 restart:
2797 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2798 if (TREE_CODE (*p) == INDIRECT_REF)
2799 *p = fold_indirect_ref_loc (loc, *p);
2801 if (handled_component_p (*p))
2803 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2804 additional COMPONENT_REFs. */
2805 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2806 && gimplify_var_or_parm_decl (p) == GS_OK)
2807 goto restart;
2808 else
2809 break;
2811 expr_stack.safe_push (*p);
2814 gcc_assert (expr_stack.length ());
2816 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2817 walked through and P points to the innermost expression.
2819 Java requires that we elaborated nodes in source order. That
2820 means we must gimplify the inner expression followed by each of
2821 the indices, in order. But we can't gimplify the inner
2822 expression until we deal with any variable bounds, sizes, or
2823 positions in order to deal with PLACEHOLDER_EXPRs.
2825 So we do this in three steps. First we deal with the annotations
2826 for any variables in the components, then we gimplify the base,
2827 then we gimplify any indices, from left to right. */
2828 for (i = expr_stack.length () - 1; i >= 0; i--)
2830 tree t = expr_stack[i];
2832 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2834 /* Gimplify the low bound and element type size and put them into
2835 the ARRAY_REF. If these values are set, they have already been
2836 gimplified. */
2837 if (TREE_OPERAND (t, 2) == NULL_TREE)
2839 tree low = unshare_expr (array_ref_low_bound (t));
2840 if (!is_gimple_min_invariant (low))
2842 TREE_OPERAND (t, 2) = low;
2843 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2844 post_p, is_gimple_reg,
2845 fb_rvalue);
2846 ret = MIN (ret, tret);
2849 else
2851 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2852 is_gimple_reg, fb_rvalue);
2853 ret = MIN (ret, tret);
2856 if (TREE_OPERAND (t, 3) == NULL_TREE)
2858 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2859 tree elmt_size = unshare_expr (array_ref_element_size (t));
2860 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2862 /* Divide the element size by the alignment of the element
2863 type (above). */
2864 elmt_size
2865 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2867 if (!is_gimple_min_invariant (elmt_size))
2869 TREE_OPERAND (t, 3) = elmt_size;
2870 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2871 post_p, is_gimple_reg,
2872 fb_rvalue);
2873 ret = MIN (ret, tret);
2876 else
2878 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2879 is_gimple_reg, fb_rvalue);
2880 ret = MIN (ret, tret);
2883 else if (TREE_CODE (t) == COMPONENT_REF)
2885 /* Set the field offset into T and gimplify it. */
2886 if (TREE_OPERAND (t, 2) == NULL_TREE)
2888 tree offset = unshare_expr (component_ref_field_offset (t));
2889 tree field = TREE_OPERAND (t, 1);
2890 tree factor
2891 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2893 /* Divide the offset by its alignment. */
2894 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2896 if (!is_gimple_min_invariant (offset))
2898 TREE_OPERAND (t, 2) = offset;
2899 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2900 post_p, is_gimple_reg,
2901 fb_rvalue);
2902 ret = MIN (ret, tret);
2905 else
2907 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2908 is_gimple_reg, fb_rvalue);
2909 ret = MIN (ret, tret);
2914 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2915 so as to match the min_lval predicate. Failure to do so may result
2916 in the creation of large aggregate temporaries. */
2917 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2918 fallback | fb_lvalue);
2919 ret = MIN (ret, tret);
2921 /* And finally, the indices and operands of ARRAY_REF. During this
2922 loop we also remove any useless conversions. */
2923 for (; expr_stack.length () > 0; )
2925 tree t = expr_stack.pop ();
2927 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2929 /* Gimplify the dimension. */
2930 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2932 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2933 is_gimple_val, fb_rvalue);
2934 ret = MIN (ret, tret);
2938 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2940 /* The innermost expression P may have originally had
2941 TREE_SIDE_EFFECTS set which would have caused all the outer
2942 expressions in *EXPR_P leading to P to also have had
2943 TREE_SIDE_EFFECTS set. */
2944 recalculate_side_effects (t);
2947 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2948 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2950 canonicalize_component_ref (expr_p);
2953 expr_stack.release ();
2955 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2957 return ret;
2960 /* Gimplify the self modifying expression pointed to by EXPR_P
2961 (++, --, +=, -=).
2963 PRE_P points to the list where side effects that must happen before
2964 *EXPR_P should be stored.
2966 POST_P points to the list where side effects that must happen after
2967 *EXPR_P should be stored.
2969 WANT_VALUE is nonzero iff we want to use the value of this expression
2970 in another expression.
2972 ARITH_TYPE is the type the computation should be performed in. */
2974 enum gimplify_status
2975 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2976 bool want_value, tree arith_type)
2978 enum tree_code code;
2979 tree lhs, lvalue, rhs, t1;
2980 gimple_seq post = NULL, *orig_post_p = post_p;
2981 bool postfix;
2982 enum tree_code arith_code;
2983 enum gimplify_status ret;
2984 location_t loc = EXPR_LOCATION (*expr_p);
2986 code = TREE_CODE (*expr_p);
2988 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2989 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2991 /* Prefix or postfix? */
2992 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2993 /* Faster to treat as prefix if result is not used. */
2994 postfix = want_value;
2995 else
2996 postfix = false;
2998 /* For postfix, make sure the inner expression's post side effects
2999 are executed after side effects from this expression. */
3000 if (postfix)
3001 post_p = &post;
3003 /* Add or subtract? */
3004 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3005 arith_code = PLUS_EXPR;
3006 else
3007 arith_code = MINUS_EXPR;
3009 /* Gimplify the LHS into a GIMPLE lvalue. */
3010 lvalue = TREE_OPERAND (*expr_p, 0);
3011 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3012 if (ret == GS_ERROR)
3013 return ret;
3015 /* Extract the operands to the arithmetic operation. */
3016 lhs = lvalue;
3017 rhs = TREE_OPERAND (*expr_p, 1);
3019 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3020 that as the result value and in the postqueue operation. */
3021 if (postfix)
3023 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3024 if (ret == GS_ERROR)
3025 return ret;
3027 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3030 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3031 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3033 rhs = convert_to_ptrofftype_loc (loc, rhs);
3034 if (arith_code == MINUS_EXPR)
3035 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3036 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3038 else
3039 t1 = fold_convert (TREE_TYPE (*expr_p),
3040 fold_build2 (arith_code, arith_type,
3041 fold_convert (arith_type, lhs),
3042 fold_convert (arith_type, rhs)));
3044 if (postfix)
3046 gimplify_assign (lvalue, t1, pre_p);
3047 gimplify_seq_add_seq (orig_post_p, post);
3048 *expr_p = lhs;
3049 return GS_ALL_DONE;
3051 else
3053 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3054 return GS_OK;
3058 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3060 static void
3061 maybe_with_size_expr (tree *expr_p)
3063 tree expr = *expr_p;
3064 tree type = TREE_TYPE (expr);
3065 tree size;
3067 /* If we've already wrapped this or the type is error_mark_node, we can't do
3068 anything. */
3069 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3070 || type == error_mark_node)
3071 return;
3073 /* If the size isn't known or is a constant, we have nothing to do. */
3074 size = TYPE_SIZE_UNIT (type);
3075 if (!size || TREE_CODE (size) == INTEGER_CST)
3076 return;
3078 /* Otherwise, make a WITH_SIZE_EXPR. */
3079 size = unshare_expr (size);
3080 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3081 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3084 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3085 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3086 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3087 gimplified to an SSA name. */
3089 enum gimplify_status
3090 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3091 bool allow_ssa)
3093 bool (*test) (tree);
3094 fallback_t fb;
3096 /* In general, we allow lvalues for function arguments to avoid
3097 extra overhead of copying large aggregates out of even larger
3098 aggregates into temporaries only to copy the temporaries to
3099 the argument list. Make optimizers happy by pulling out to
3100 temporaries those types that fit in registers. */
3101 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3102 test = is_gimple_val, fb = fb_rvalue;
3103 else
3105 test = is_gimple_lvalue, fb = fb_either;
3106 /* Also strip a TARGET_EXPR that would force an extra copy. */
3107 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3109 tree init = TARGET_EXPR_INITIAL (*arg_p);
3110 if (init
3111 && !VOID_TYPE_P (TREE_TYPE (init)))
3112 *arg_p = init;
3116 /* If this is a variable sized type, we must remember the size. */
3117 maybe_with_size_expr (arg_p);
3119 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3120 /* Make sure arguments have the same location as the function call
3121 itself. */
3122 protected_set_expr_location (*arg_p, call_location);
3124 /* There is a sequence point before a function call. Side effects in
3125 the argument list must occur before the actual call. So, when
3126 gimplifying arguments, force gimplify_expr to use an internal
3127 post queue which is then appended to the end of PRE_P. */
3128 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3131 /* Don't fold inside offloading or taskreg regions: it can break code by
3132 adding decl references that weren't in the source. We'll do it during
3133 omplower pass instead. */
3135 static bool
3136 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3138 struct gimplify_omp_ctx *ctx;
3139 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3140 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3141 return false;
3142 return fold_stmt (gsi);
3145 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3146 WANT_VALUE is true if the result of the call is desired. */
3148 static enum gimplify_status
3149 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3151 tree fndecl, parms, p, fnptrtype;
3152 enum gimplify_status ret;
3153 int i, nargs;
3154 gcall *call;
3155 bool builtin_va_start_p = false;
3156 location_t loc = EXPR_LOCATION (*expr_p);
3158 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3160 /* For reliable diagnostics during inlining, it is necessary that
3161 every call_expr be annotated with file and line. */
3162 if (! EXPR_HAS_LOCATION (*expr_p))
3163 SET_EXPR_LOCATION (*expr_p, input_location);
3165 /* Gimplify internal functions created in the FEs. */
3166 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3168 if (want_value)
3169 return GS_ALL_DONE;
3171 nargs = call_expr_nargs (*expr_p);
3172 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3173 auto_vec<tree> vargs (nargs);
3175 for (i = 0; i < nargs; i++)
3177 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3178 EXPR_LOCATION (*expr_p));
3179 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3182 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3183 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3184 gimplify_seq_add_stmt (pre_p, call);
3185 return GS_ALL_DONE;
3188 /* This may be a call to a builtin function.
3190 Builtin function calls may be transformed into different
3191 (and more efficient) builtin function calls under certain
3192 circumstances. Unfortunately, gimplification can muck things
3193 up enough that the builtin expanders are not aware that certain
3194 transformations are still valid.
3196 So we attempt transformation/gimplification of the call before
3197 we gimplify the CALL_EXPR. At this time we do not manage to
3198 transform all calls in the same manner as the expanders do, but
3199 we do transform most of them. */
3200 fndecl = get_callee_fndecl (*expr_p);
3201 if (fndecl
3202 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3203 switch (DECL_FUNCTION_CODE (fndecl))
3205 CASE_BUILT_IN_ALLOCA:
3206 /* If the call has been built for a variable-sized object, then we
3207 want to restore the stack level when the enclosing BIND_EXPR is
3208 exited to reclaim the allocated space; otherwise, we precisely
3209 need to do the opposite and preserve the latest stack level. */
3210 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3211 gimplify_ctxp->save_stack = true;
3212 else
3213 gimplify_ctxp->keep_stack = true;
3214 break;
3216 case BUILT_IN_VA_START:
3218 builtin_va_start_p = TRUE;
3219 if (call_expr_nargs (*expr_p) < 2)
3221 error ("too few arguments to function %<va_start%>");
3222 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3223 return GS_OK;
3226 if (fold_builtin_next_arg (*expr_p, true))
3228 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3229 return GS_OK;
3231 break;
3234 default:
3237 if (fndecl && DECL_BUILT_IN (fndecl))
3239 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3240 if (new_tree && new_tree != *expr_p)
3242 /* There was a transformation of this call which computes the
3243 same value, but in a more efficient way. Return and try
3244 again. */
3245 *expr_p = new_tree;
3246 return GS_OK;
3250 /* Remember the original function pointer type. */
3251 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3253 /* There is a sequence point before the call, so any side effects in
3254 the calling expression must occur before the actual call. Force
3255 gimplify_expr to use an internal post queue. */
3256 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3257 is_gimple_call_addr, fb_rvalue);
3259 nargs = call_expr_nargs (*expr_p);
3261 /* Get argument types for verification. */
3262 fndecl = get_callee_fndecl (*expr_p);
3263 parms = NULL_TREE;
3264 if (fndecl)
3265 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3266 else
3267 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3269 if (fndecl && DECL_ARGUMENTS (fndecl))
3270 p = DECL_ARGUMENTS (fndecl);
3271 else if (parms)
3272 p = parms;
3273 else
3274 p = NULL_TREE;
3275 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3278 /* If the last argument is __builtin_va_arg_pack () and it is not
3279 passed as a named argument, decrease the number of CALL_EXPR
3280 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3281 if (!p
3282 && i < nargs
3283 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3285 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3286 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3288 if (last_arg_fndecl
3289 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3290 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3291 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3293 tree call = *expr_p;
3295 --nargs;
3296 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3297 CALL_EXPR_FN (call),
3298 nargs, CALL_EXPR_ARGP (call));
3300 /* Copy all CALL_EXPR flags, location and block, except
3301 CALL_EXPR_VA_ARG_PACK flag. */
3302 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3303 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3304 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3305 = CALL_EXPR_RETURN_SLOT_OPT (call);
3306 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3307 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3309 /* Set CALL_EXPR_VA_ARG_PACK. */
3310 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3314 /* If the call returns twice then after building the CFG the call
3315 argument computations will no longer dominate the call because
3316 we add an abnormal incoming edge to the call. So do not use SSA
3317 vars there. */
3318 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3320 /* Gimplify the function arguments. */
3321 if (nargs > 0)
3323 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3324 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3325 PUSH_ARGS_REVERSED ? i-- : i++)
3327 enum gimplify_status t;
3329 /* Avoid gimplifying the second argument to va_start, which needs to
3330 be the plain PARM_DECL. */
3331 if ((i != 1) || !builtin_va_start_p)
3333 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3334 EXPR_LOCATION (*expr_p), ! returns_twice);
3336 if (t == GS_ERROR)
3337 ret = GS_ERROR;
3342 /* Gimplify the static chain. */
3343 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3345 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3346 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3347 else
3349 enum gimplify_status t;
3350 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3351 EXPR_LOCATION (*expr_p), ! returns_twice);
3352 if (t == GS_ERROR)
3353 ret = GS_ERROR;
3357 /* Verify the function result. */
3358 if (want_value && fndecl
3359 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3361 error_at (loc, "using result of function returning %<void%>");
3362 ret = GS_ERROR;
3365 /* Try this again in case gimplification exposed something. */
3366 if (ret != GS_ERROR)
3368 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3370 if (new_tree && new_tree != *expr_p)
3372 /* There was a transformation of this call which computes the
3373 same value, but in a more efficient way. Return and try
3374 again. */
3375 *expr_p = new_tree;
3376 return GS_OK;
3379 else
3381 *expr_p = error_mark_node;
3382 return GS_ERROR;
3385 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3386 decl. This allows us to eliminate redundant or useless
3387 calls to "const" functions. */
3388 if (TREE_CODE (*expr_p) == CALL_EXPR)
3390 int flags = call_expr_flags (*expr_p);
3391 if (flags & (ECF_CONST | ECF_PURE)
3392 /* An infinite loop is considered a side effect. */
3393 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3394 TREE_SIDE_EFFECTS (*expr_p) = 0;
3397 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3398 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3399 form and delegate the creation of a GIMPLE_CALL to
3400 gimplify_modify_expr. This is always possible because when
3401 WANT_VALUE is true, the caller wants the result of this call into
3402 a temporary, which means that we will emit an INIT_EXPR in
3403 internal_get_tmp_var which will then be handled by
3404 gimplify_modify_expr. */
3405 if (!want_value)
3407 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3408 have to do is replicate it as a GIMPLE_CALL tuple. */
3409 gimple_stmt_iterator gsi;
3410 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3411 notice_special_calls (call);
3412 gimplify_seq_add_stmt (pre_p, call);
3413 gsi = gsi_last (*pre_p);
3414 maybe_fold_stmt (&gsi);
3415 *expr_p = NULL_TREE;
3417 else
3418 /* Remember the original function type. */
3419 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3420 CALL_EXPR_FN (*expr_p));
3422 return ret;
3425 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3426 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3428 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3429 condition is true or false, respectively. If null, we should generate
3430 our own to skip over the evaluation of this specific expression.
3432 LOCUS is the source location of the COND_EXPR.
3434 This function is the tree equivalent of do_jump.
3436 shortcut_cond_r should only be called by shortcut_cond_expr. */
3438 static tree
3439 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3440 location_t locus)
3442 tree local_label = NULL_TREE;
3443 tree t, expr = NULL;
3445 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3446 retain the shortcut semantics. Just insert the gotos here;
3447 shortcut_cond_expr will append the real blocks later. */
3448 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3450 location_t new_locus;
3452 /* Turn if (a && b) into
3454 if (a); else goto no;
3455 if (b) goto yes; else goto no;
3456 (no:) */
3458 if (false_label_p == NULL)
3459 false_label_p = &local_label;
3461 /* Keep the original source location on the first 'if'. */
3462 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3463 append_to_statement_list (t, &expr);
3465 /* Set the source location of the && on the second 'if'. */
3466 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3467 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3468 new_locus);
3469 append_to_statement_list (t, &expr);
3471 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3473 location_t new_locus;
3475 /* Turn if (a || b) into
3477 if (a) goto yes;
3478 if (b) goto yes; else goto no;
3479 (yes:) */
3481 if (true_label_p == NULL)
3482 true_label_p = &local_label;
3484 /* Keep the original source location on the first 'if'. */
3485 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3486 append_to_statement_list (t, &expr);
3488 /* Set the source location of the || on the second 'if'. */
3489 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3490 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3491 new_locus);
3492 append_to_statement_list (t, &expr);
3494 else if (TREE_CODE (pred) == COND_EXPR
3495 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3496 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3498 location_t new_locus;
3500 /* As long as we're messing with gotos, turn if (a ? b : c) into
3501 if (a)
3502 if (b) goto yes; else goto no;
3503 else
3504 if (c) goto yes; else goto no;
3506 Don't do this if one of the arms has void type, which can happen
3507 in C++ when the arm is throw. */
3509 /* Keep the original source location on the first 'if'. Set the source
3510 location of the ? on the second 'if'. */
3511 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3512 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3513 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3514 false_label_p, locus),
3515 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3516 false_label_p, new_locus));
3518 else
3520 expr = build3 (COND_EXPR, void_type_node, pred,
3521 build_and_jump (true_label_p),
3522 build_and_jump (false_label_p));
3523 SET_EXPR_LOCATION (expr, locus);
3526 if (local_label)
3528 t = build1 (LABEL_EXPR, void_type_node, local_label);
3529 append_to_statement_list (t, &expr);
3532 return expr;
3535 /* Given a conditional expression EXPR with short-circuit boolean
3536 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3537 predicate apart into the equivalent sequence of conditionals. */
3539 static tree
3540 shortcut_cond_expr (tree expr)
3542 tree pred = TREE_OPERAND (expr, 0);
3543 tree then_ = TREE_OPERAND (expr, 1);
3544 tree else_ = TREE_OPERAND (expr, 2);
3545 tree true_label, false_label, end_label, t;
3546 tree *true_label_p;
3547 tree *false_label_p;
3548 bool emit_end, emit_false, jump_over_else;
3549 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3550 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3552 /* First do simple transformations. */
3553 if (!else_se)
3555 /* If there is no 'else', turn
3556 if (a && b) then c
3557 into
3558 if (a) if (b) then c. */
3559 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3561 /* Keep the original source location on the first 'if'. */
3562 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3563 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3564 /* Set the source location of the && on the second 'if'. */
3565 if (EXPR_HAS_LOCATION (pred))
3566 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3567 then_ = shortcut_cond_expr (expr);
3568 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3569 pred = TREE_OPERAND (pred, 0);
3570 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3571 SET_EXPR_LOCATION (expr, locus);
3575 if (!then_se)
3577 /* If there is no 'then', turn
3578 if (a || b); else d
3579 into
3580 if (a); else if (b); else d. */
3581 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3583 /* Keep the original source location on the first 'if'. */
3584 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3585 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3586 /* Set the source location of the || on the second 'if'. */
3587 if (EXPR_HAS_LOCATION (pred))
3588 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3589 else_ = shortcut_cond_expr (expr);
3590 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3591 pred = TREE_OPERAND (pred, 0);
3592 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3593 SET_EXPR_LOCATION (expr, locus);
3597 /* If we're done, great. */
3598 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3599 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3600 return expr;
3602 /* Otherwise we need to mess with gotos. Change
3603 if (a) c; else d;
3605 if (a); else goto no;
3606 c; goto end;
3607 no: d; end:
3608 and recursively gimplify the condition. */
3610 true_label = false_label = end_label = NULL_TREE;
3612 /* If our arms just jump somewhere, hijack those labels so we don't
3613 generate jumps to jumps. */
3615 if (then_
3616 && TREE_CODE (then_) == GOTO_EXPR
3617 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3619 true_label = GOTO_DESTINATION (then_);
3620 then_ = NULL;
3621 then_se = false;
3624 if (else_
3625 && TREE_CODE (else_) == GOTO_EXPR
3626 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3628 false_label = GOTO_DESTINATION (else_);
3629 else_ = NULL;
3630 else_se = false;
3633 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3634 if (true_label)
3635 true_label_p = &true_label;
3636 else
3637 true_label_p = NULL;
3639 /* The 'else' branch also needs a label if it contains interesting code. */
3640 if (false_label || else_se)
3641 false_label_p = &false_label;
3642 else
3643 false_label_p = NULL;
3645 /* If there was nothing else in our arms, just forward the label(s). */
3646 if (!then_se && !else_se)
3647 return shortcut_cond_r (pred, true_label_p, false_label_p,
3648 EXPR_LOC_OR_LOC (expr, input_location));
3650 /* If our last subexpression already has a terminal label, reuse it. */
3651 if (else_se)
3652 t = expr_last (else_);
3653 else if (then_se)
3654 t = expr_last (then_);
3655 else
3656 t = NULL;
3657 if (t && TREE_CODE (t) == LABEL_EXPR)
3658 end_label = LABEL_EXPR_LABEL (t);
3660 /* If we don't care about jumping to the 'else' branch, jump to the end
3661 if the condition is false. */
3662 if (!false_label_p)
3663 false_label_p = &end_label;
3665 /* We only want to emit these labels if we aren't hijacking them. */
3666 emit_end = (end_label == NULL_TREE);
3667 emit_false = (false_label == NULL_TREE);
3669 /* We only emit the jump over the else clause if we have to--if the
3670 then clause may fall through. Otherwise we can wind up with a
3671 useless jump and a useless label at the end of gimplified code,
3672 which will cause us to think that this conditional as a whole
3673 falls through even if it doesn't. If we then inline a function
3674 which ends with such a condition, that can cause us to issue an
3675 inappropriate warning about control reaching the end of a
3676 non-void function. */
3677 jump_over_else = block_may_fallthru (then_);
3679 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3680 EXPR_LOC_OR_LOC (expr, input_location));
3682 expr = NULL;
3683 append_to_statement_list (pred, &expr);
3685 append_to_statement_list (then_, &expr);
3686 if (else_se)
3688 if (jump_over_else)
3690 tree last = expr_last (expr);
3691 t = build_and_jump (&end_label);
3692 if (EXPR_HAS_LOCATION (last))
3693 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3694 append_to_statement_list (t, &expr);
3696 if (emit_false)
3698 t = build1 (LABEL_EXPR, void_type_node, false_label);
3699 append_to_statement_list (t, &expr);
3701 append_to_statement_list (else_, &expr);
3703 if (emit_end && end_label)
3705 t = build1 (LABEL_EXPR, void_type_node, end_label);
3706 append_to_statement_list (t, &expr);
3709 return expr;
3712 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3714 tree
3715 gimple_boolify (tree expr)
3717 tree type = TREE_TYPE (expr);
3718 location_t loc = EXPR_LOCATION (expr);
3720 if (TREE_CODE (expr) == NE_EXPR
3721 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3722 && integer_zerop (TREE_OPERAND (expr, 1)))
3724 tree call = TREE_OPERAND (expr, 0);
3725 tree fn = get_callee_fndecl (call);
3727 /* For __builtin_expect ((long) (x), y) recurse into x as well
3728 if x is truth_value_p. */
3729 if (fn
3730 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3731 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3732 && call_expr_nargs (call) == 2)
3734 tree arg = CALL_EXPR_ARG (call, 0);
3735 if (arg)
3737 if (TREE_CODE (arg) == NOP_EXPR
3738 && TREE_TYPE (arg) == TREE_TYPE (call))
3739 arg = TREE_OPERAND (arg, 0);
3740 if (truth_value_p (TREE_CODE (arg)))
3742 arg = gimple_boolify (arg);
3743 CALL_EXPR_ARG (call, 0)
3744 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3750 switch (TREE_CODE (expr))
3752 case TRUTH_AND_EXPR:
3753 case TRUTH_OR_EXPR:
3754 case TRUTH_XOR_EXPR:
3755 case TRUTH_ANDIF_EXPR:
3756 case TRUTH_ORIF_EXPR:
3757 /* Also boolify the arguments of truth exprs. */
3758 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3759 /* FALLTHRU */
3761 case TRUTH_NOT_EXPR:
3762 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3764 /* These expressions always produce boolean results. */
3765 if (TREE_CODE (type) != BOOLEAN_TYPE)
3766 TREE_TYPE (expr) = boolean_type_node;
3767 return expr;
3769 case ANNOTATE_EXPR:
3770 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3772 case annot_expr_ivdep_kind:
3773 case annot_expr_unroll_kind:
3774 case annot_expr_no_vector_kind:
3775 case annot_expr_vector_kind:
3776 case annot_expr_parallel_kind:
3777 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3778 if (TREE_CODE (type) != BOOLEAN_TYPE)
3779 TREE_TYPE (expr) = boolean_type_node;
3780 return expr;
3781 default:
3782 gcc_unreachable ();
3785 default:
3786 if (COMPARISON_CLASS_P (expr))
3788 /* There expressions always prduce boolean results. */
3789 if (TREE_CODE (type) != BOOLEAN_TYPE)
3790 TREE_TYPE (expr) = boolean_type_node;
3791 return expr;
3793 /* Other expressions that get here must have boolean values, but
3794 might need to be converted to the appropriate mode. */
3795 if (TREE_CODE (type) == BOOLEAN_TYPE)
3796 return expr;
3797 return fold_convert_loc (loc, boolean_type_node, expr);
3801 /* Given a conditional expression *EXPR_P without side effects, gimplify
3802 its operands. New statements are inserted to PRE_P. */
3804 static enum gimplify_status
3805 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3807 tree expr = *expr_p, cond;
3808 enum gimplify_status ret, tret;
3809 enum tree_code code;
3811 cond = gimple_boolify (COND_EXPR_COND (expr));
3813 /* We need to handle && and || specially, as their gimplification
3814 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3815 code = TREE_CODE (cond);
3816 if (code == TRUTH_ANDIF_EXPR)
3817 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3818 else if (code == TRUTH_ORIF_EXPR)
3819 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3820 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3821 COND_EXPR_COND (*expr_p) = cond;
3823 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3824 is_gimple_val, fb_rvalue);
3825 ret = MIN (ret, tret);
3826 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3827 is_gimple_val, fb_rvalue);
3829 return MIN (ret, tret);
3832 /* Return true if evaluating EXPR could trap.
3833 EXPR is GENERIC, while tree_could_trap_p can be called
3834 only on GIMPLE. */
3836 static bool
3837 generic_expr_could_trap_p (tree expr)
3839 unsigned i, n;
3841 if (!expr || is_gimple_val (expr))
3842 return false;
3844 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3845 return true;
3847 n = TREE_OPERAND_LENGTH (expr);
3848 for (i = 0; i < n; i++)
3849 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3850 return true;
3852 return false;
3855 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3856 into
3858 if (p) if (p)
3859 t1 = a; a;
3860 else or else
3861 t1 = b; b;
3864 The second form is used when *EXPR_P is of type void.
3866 PRE_P points to the list where side effects that must happen before
3867 *EXPR_P should be stored. */
3869 static enum gimplify_status
3870 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3872 tree expr = *expr_p;
3873 tree type = TREE_TYPE (expr);
3874 location_t loc = EXPR_LOCATION (expr);
3875 tree tmp, arm1, arm2;
3876 enum gimplify_status ret;
3877 tree label_true, label_false, label_cont;
3878 bool have_then_clause_p, have_else_clause_p;
3879 gcond *cond_stmt;
3880 enum tree_code pred_code;
3881 gimple_seq seq = NULL;
3883 /* If this COND_EXPR has a value, copy the values into a temporary within
3884 the arms. */
3885 if (!VOID_TYPE_P (type))
3887 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3888 tree result;
3890 /* If either an rvalue is ok or we do not require an lvalue, create the
3891 temporary. But we cannot do that if the type is addressable. */
3892 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3893 && !TREE_ADDRESSABLE (type))
3895 if (gimplify_ctxp->allow_rhs_cond_expr
3896 /* If either branch has side effects or could trap, it can't be
3897 evaluated unconditionally. */
3898 && !TREE_SIDE_EFFECTS (then_)
3899 && !generic_expr_could_trap_p (then_)
3900 && !TREE_SIDE_EFFECTS (else_)
3901 && !generic_expr_could_trap_p (else_))
3902 return gimplify_pure_cond_expr (expr_p, pre_p);
3904 tmp = create_tmp_var (type, "iftmp");
3905 result = tmp;
3908 /* Otherwise, only create and copy references to the values. */
3909 else
3911 type = build_pointer_type (type);
3913 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3914 then_ = build_fold_addr_expr_loc (loc, then_);
3916 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3917 else_ = build_fold_addr_expr_loc (loc, else_);
3919 expr
3920 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3922 tmp = create_tmp_var (type, "iftmp");
3923 result = build_simple_mem_ref_loc (loc, tmp);
3926 /* Build the new then clause, `tmp = then_;'. But don't build the
3927 assignment if the value is void; in C++ it can be if it's a throw. */
3928 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3929 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3931 /* Similarly, build the new else clause, `tmp = else_;'. */
3932 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3933 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3935 TREE_TYPE (expr) = void_type_node;
3936 recalculate_side_effects (expr);
3938 /* Move the COND_EXPR to the prequeue. */
3939 gimplify_stmt (&expr, pre_p);
3941 *expr_p = result;
3942 return GS_ALL_DONE;
3945 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3946 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3947 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3948 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3950 /* Make sure the condition has BOOLEAN_TYPE. */
3951 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3953 /* Break apart && and || conditions. */
3954 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3955 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3957 expr = shortcut_cond_expr (expr);
3959 if (expr != *expr_p)
3961 *expr_p = expr;
3963 /* We can't rely on gimplify_expr to re-gimplify the expanded
3964 form properly, as cleanups might cause the target labels to be
3965 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3966 set up a conditional context. */
3967 gimple_push_condition ();
3968 gimplify_stmt (expr_p, &seq);
3969 gimple_pop_condition (pre_p);
3970 gimple_seq_add_seq (pre_p, seq);
3972 return GS_ALL_DONE;
3976 /* Now do the normal gimplification. */
3978 /* Gimplify condition. */
3979 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3980 fb_rvalue);
3981 if (ret == GS_ERROR)
3982 return GS_ERROR;
3983 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3985 gimple_push_condition ();
3987 have_then_clause_p = have_else_clause_p = false;
3988 if (TREE_OPERAND (expr, 1) != NULL
3989 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3990 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3991 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3992 == current_function_decl)
3993 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3994 have different locations, otherwise we end up with incorrect
3995 location information on the branches. */
3996 && (optimize
3997 || !EXPR_HAS_LOCATION (expr)
3998 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3999 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
4001 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
4002 have_then_clause_p = true;
4004 else
4005 label_true = create_artificial_label (UNKNOWN_LOCATION);
4006 if (TREE_OPERAND (expr, 2) != NULL
4007 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
4008 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
4009 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
4010 == current_function_decl)
4011 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4012 have different locations, otherwise we end up with incorrect
4013 location information on the branches. */
4014 && (optimize
4015 || !EXPR_HAS_LOCATION (expr)
4016 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
4017 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
4019 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
4020 have_else_clause_p = true;
4022 else
4023 label_false = create_artificial_label (UNKNOWN_LOCATION);
4025 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4026 &arm2);
4027 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4028 label_false);
4029 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4030 gimplify_seq_add_stmt (&seq, cond_stmt);
4031 gimple_stmt_iterator gsi = gsi_last (seq);
4032 maybe_fold_stmt (&gsi);
4034 label_cont = NULL_TREE;
4035 if (!have_then_clause_p)
4037 /* For if (...) {} else { code; } put label_true after
4038 the else block. */
4039 if (TREE_OPERAND (expr, 1) == NULL_TREE
4040 && !have_else_clause_p
4041 && TREE_OPERAND (expr, 2) != NULL_TREE)
4042 label_cont = label_true;
4043 else
4045 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4046 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4047 /* For if (...) { code; } else {} or
4048 if (...) { code; } else goto label; or
4049 if (...) { code; return; } else { ... }
4050 label_cont isn't needed. */
4051 if (!have_else_clause_p
4052 && TREE_OPERAND (expr, 2) != NULL_TREE
4053 && gimple_seq_may_fallthru (seq))
4055 gimple *g;
4056 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4058 g = gimple_build_goto (label_cont);
4060 /* GIMPLE_COND's are very low level; they have embedded
4061 gotos. This particular embedded goto should not be marked
4062 with the location of the original COND_EXPR, as it would
4063 correspond to the COND_EXPR's condition, not the ELSE or the
4064 THEN arms. To avoid marking it with the wrong location, flag
4065 it as "no location". */
4066 gimple_set_do_not_emit_location (g);
4068 gimplify_seq_add_stmt (&seq, g);
4072 if (!have_else_clause_p)
4074 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4075 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4077 if (label_cont)
4078 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4080 gimple_pop_condition (pre_p);
4081 gimple_seq_add_seq (pre_p, seq);
4083 if (ret == GS_ERROR)
4084 ; /* Do nothing. */
4085 else if (have_then_clause_p || have_else_clause_p)
4086 ret = GS_ALL_DONE;
4087 else
4089 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4090 expr = TREE_OPERAND (expr, 0);
4091 gimplify_stmt (&expr, pre_p);
4094 *expr_p = NULL;
4095 return ret;
4098 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4099 to be marked addressable.
4101 We cannot rely on such an expression being directly markable if a temporary
4102 has been created by the gimplification. In this case, we create another
4103 temporary and initialize it with a copy, which will become a store after we
4104 mark it addressable. This can happen if the front-end passed us something
4105 that it could not mark addressable yet, like a Fortran pass-by-reference
4106 parameter (int) floatvar. */
4108 static void
4109 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4111 while (handled_component_p (*expr_p))
4112 expr_p = &TREE_OPERAND (*expr_p, 0);
4113 if (is_gimple_reg (*expr_p))
4115 /* Do not allow an SSA name as the temporary. */
4116 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4117 DECL_GIMPLE_REG_P (var) = 0;
4118 *expr_p = var;
4122 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4123 a call to __builtin_memcpy. */
4125 static enum gimplify_status
4126 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4127 gimple_seq *seq_p)
4129 tree t, to, to_ptr, from, from_ptr;
4130 gcall *gs;
4131 location_t loc = EXPR_LOCATION (*expr_p);
4133 to = TREE_OPERAND (*expr_p, 0);
4134 from = TREE_OPERAND (*expr_p, 1);
4136 /* Mark the RHS addressable. Beware that it may not be possible to do so
4137 directly if a temporary has been created by the gimplification. */
4138 prepare_gimple_addressable (&from, seq_p);
4140 mark_addressable (from);
4141 from_ptr = build_fold_addr_expr_loc (loc, from);
4142 gimplify_arg (&from_ptr, seq_p, loc);
4144 mark_addressable (to);
4145 to_ptr = build_fold_addr_expr_loc (loc, to);
4146 gimplify_arg (&to_ptr, seq_p, loc);
4148 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4150 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4152 if (want_value)
4154 /* tmp = memcpy() */
4155 t = create_tmp_var (TREE_TYPE (to_ptr));
4156 gimple_call_set_lhs (gs, t);
4157 gimplify_seq_add_stmt (seq_p, gs);
4159 *expr_p = build_simple_mem_ref (t);
4160 return GS_ALL_DONE;
4163 gimplify_seq_add_stmt (seq_p, gs);
4164 *expr_p = NULL;
4165 return GS_ALL_DONE;
4168 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4169 a call to __builtin_memset. In this case we know that the RHS is
4170 a CONSTRUCTOR with an empty element list. */
4172 static enum gimplify_status
4173 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4174 gimple_seq *seq_p)
4176 tree t, from, to, to_ptr;
4177 gcall *gs;
4178 location_t loc = EXPR_LOCATION (*expr_p);
4180 /* Assert our assumptions, to abort instead of producing wrong code
4181 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4182 not be immediately exposed. */
4183 from = TREE_OPERAND (*expr_p, 1);
4184 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4185 from = TREE_OPERAND (from, 0);
4187 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4188 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4190 /* Now proceed. */
4191 to = TREE_OPERAND (*expr_p, 0);
4193 to_ptr = build_fold_addr_expr_loc (loc, to);
4194 gimplify_arg (&to_ptr, seq_p, loc);
4195 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4197 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4199 if (want_value)
4201 /* tmp = memset() */
4202 t = create_tmp_var (TREE_TYPE (to_ptr));
4203 gimple_call_set_lhs (gs, t);
4204 gimplify_seq_add_stmt (seq_p, gs);
4206 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4207 return GS_ALL_DONE;
4210 gimplify_seq_add_stmt (seq_p, gs);
4211 *expr_p = NULL;
4212 return GS_ALL_DONE;
4215 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4216 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4217 assignment. Return non-null if we detect a potential overlap. */
4219 struct gimplify_init_ctor_preeval_data
4221 /* The base decl of the lhs object. May be NULL, in which case we
4222 have to assume the lhs is indirect. */
4223 tree lhs_base_decl;
4225 /* The alias set of the lhs object. */
4226 alias_set_type lhs_alias_set;
4229 static tree
4230 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4232 struct gimplify_init_ctor_preeval_data *data
4233 = (struct gimplify_init_ctor_preeval_data *) xdata;
4234 tree t = *tp;
4236 /* If we find the base object, obviously we have overlap. */
4237 if (data->lhs_base_decl == t)
4238 return t;
4240 /* If the constructor component is indirect, determine if we have a
4241 potential overlap with the lhs. The only bits of information we
4242 have to go on at this point are addressability and alias sets. */
4243 if ((INDIRECT_REF_P (t)
4244 || TREE_CODE (t) == MEM_REF)
4245 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4246 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4247 return t;
4249 /* If the constructor component is a call, determine if it can hide a
4250 potential overlap with the lhs through an INDIRECT_REF like above.
4251 ??? Ugh - this is completely broken. In fact this whole analysis
4252 doesn't look conservative. */
4253 if (TREE_CODE (t) == CALL_EXPR)
4255 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4257 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4258 if (POINTER_TYPE_P (TREE_VALUE (type))
4259 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4260 && alias_sets_conflict_p (data->lhs_alias_set,
4261 get_alias_set
4262 (TREE_TYPE (TREE_VALUE (type)))))
4263 return t;
4266 if (IS_TYPE_OR_DECL_P (t))
4267 *walk_subtrees = 0;
4268 return NULL;
4271 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4272 force values that overlap with the lhs (as described by *DATA)
4273 into temporaries. */
4275 static void
4276 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4277 struct gimplify_init_ctor_preeval_data *data)
4279 enum gimplify_status one;
4281 /* If the value is constant, then there's nothing to pre-evaluate. */
4282 if (TREE_CONSTANT (*expr_p))
4284 /* Ensure it does not have side effects, it might contain a reference to
4285 the object we're initializing. */
4286 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4287 return;
4290 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4291 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4292 return;
4294 /* Recurse for nested constructors. */
4295 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4297 unsigned HOST_WIDE_INT ix;
4298 constructor_elt *ce;
4299 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4301 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4302 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4304 return;
4307 /* If this is a variable sized type, we must remember the size. */
4308 maybe_with_size_expr (expr_p);
4310 /* Gimplify the constructor element to something appropriate for the rhs
4311 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4312 the gimplifier will consider this a store to memory. Doing this
4313 gimplification now means that we won't have to deal with complicated
4314 language-specific trees, nor trees like SAVE_EXPR that can induce
4315 exponential search behavior. */
4316 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4317 if (one == GS_ERROR)
4319 *expr_p = NULL;
4320 return;
4323 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4324 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4325 always be true for all scalars, since is_gimple_mem_rhs insists on a
4326 temporary variable for them. */
4327 if (DECL_P (*expr_p))
4328 return;
4330 /* If this is of variable size, we have no choice but to assume it doesn't
4331 overlap since we can't make a temporary for it. */
4332 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4333 return;
4335 /* Otherwise, we must search for overlap ... */
4336 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4337 return;
4339 /* ... and if found, force the value into a temporary. */
4340 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4343 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4344 a RANGE_EXPR in a CONSTRUCTOR for an array.
4346 var = lower;
4347 loop_entry:
4348 object[var] = value;
4349 if (var == upper)
4350 goto loop_exit;
4351 var = var + 1;
4352 goto loop_entry;
4353 loop_exit:
4355 We increment var _after_ the loop exit check because we might otherwise
4356 fail if upper == TYPE_MAX_VALUE (type for upper).
4358 Note that we never have to deal with SAVE_EXPRs here, because this has
4359 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4361 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4362 gimple_seq *, bool);
4364 static void
4365 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4366 tree value, tree array_elt_type,
4367 gimple_seq *pre_p, bool cleared)
4369 tree loop_entry_label, loop_exit_label, fall_thru_label;
4370 tree var, var_type, cref, tmp;
4372 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4373 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4374 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4376 /* Create and initialize the index variable. */
4377 var_type = TREE_TYPE (upper);
4378 var = create_tmp_var (var_type);
4379 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4381 /* Add the loop entry label. */
4382 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4384 /* Build the reference. */
4385 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4386 var, NULL_TREE, NULL_TREE);
4388 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4389 the store. Otherwise just assign value to the reference. */
4391 if (TREE_CODE (value) == CONSTRUCTOR)
4392 /* NB we might have to call ourself recursively through
4393 gimplify_init_ctor_eval if the value is a constructor. */
4394 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4395 pre_p, cleared);
4396 else
4397 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4399 /* We exit the loop when the index var is equal to the upper bound. */
4400 gimplify_seq_add_stmt (pre_p,
4401 gimple_build_cond (EQ_EXPR, var, upper,
4402 loop_exit_label, fall_thru_label));
4404 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4406 /* Otherwise, increment the index var... */
4407 tmp = build2 (PLUS_EXPR, var_type, var,
4408 fold_convert (var_type, integer_one_node));
4409 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4411 /* ...and jump back to the loop entry. */
4412 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4414 /* Add the loop exit label. */
4415 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4418 /* Return true if FDECL is accessing a field that is zero sized. */
4420 static bool
4421 zero_sized_field_decl (const_tree fdecl)
4423 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4424 && integer_zerop (DECL_SIZE (fdecl)))
4425 return true;
4426 return false;
4429 /* Return true if TYPE is zero sized. */
4431 static bool
4432 zero_sized_type (const_tree type)
4434 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4435 && integer_zerop (TYPE_SIZE (type)))
4436 return true;
4437 return false;
4440 /* A subroutine of gimplify_init_constructor. Generate individual
4441 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4442 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4443 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4444 zeroed first. */
4446 static void
4447 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4448 gimple_seq *pre_p, bool cleared)
4450 tree array_elt_type = NULL;
4451 unsigned HOST_WIDE_INT ix;
4452 tree purpose, value;
4454 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4455 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4457 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4459 tree cref;
4461 /* NULL values are created above for gimplification errors. */
4462 if (value == NULL)
4463 continue;
4465 if (cleared && initializer_zerop (value))
4466 continue;
4468 /* ??? Here's to hoping the front end fills in all of the indices,
4469 so we don't have to figure out what's missing ourselves. */
4470 gcc_assert (purpose);
4472 /* Skip zero-sized fields, unless value has side-effects. This can
4473 happen with calls to functions returning a zero-sized type, which
4474 we shouldn't discard. As a number of downstream passes don't
4475 expect sets of zero-sized fields, we rely on the gimplification of
4476 the MODIFY_EXPR we make below to drop the assignment statement. */
4477 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4478 continue;
4480 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4481 whole range. */
4482 if (TREE_CODE (purpose) == RANGE_EXPR)
4484 tree lower = TREE_OPERAND (purpose, 0);
4485 tree upper = TREE_OPERAND (purpose, 1);
4487 /* If the lower bound is equal to upper, just treat it as if
4488 upper was the index. */
4489 if (simple_cst_equal (lower, upper))
4490 purpose = upper;
4491 else
4493 gimplify_init_ctor_eval_range (object, lower, upper, value,
4494 array_elt_type, pre_p, cleared);
4495 continue;
4499 if (array_elt_type)
4501 /* Do not use bitsizetype for ARRAY_REF indices. */
4502 if (TYPE_DOMAIN (TREE_TYPE (object)))
4503 purpose
4504 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4505 purpose);
4506 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4507 purpose, NULL_TREE, NULL_TREE);
4509 else
4511 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4512 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4513 unshare_expr (object), purpose, NULL_TREE);
4516 if (TREE_CODE (value) == CONSTRUCTOR
4517 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4518 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4519 pre_p, cleared);
4520 else
4522 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4523 gimplify_and_add (init, pre_p);
4524 ggc_free (init);
4529 /* Return the appropriate RHS predicate for this LHS. */
4531 gimple_predicate
4532 rhs_predicate_for (tree lhs)
4534 if (is_gimple_reg (lhs))
4535 return is_gimple_reg_rhs_or_call;
4536 else
4537 return is_gimple_mem_rhs_or_call;
4540 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4541 before the LHS has been gimplified. */
4543 static gimple_predicate
4544 initial_rhs_predicate_for (tree lhs)
4546 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4547 return is_gimple_reg_rhs_or_call;
4548 else
4549 return is_gimple_mem_rhs_or_call;
4552 /* Gimplify a C99 compound literal expression. This just means adding
4553 the DECL_EXPR before the current statement and using its anonymous
4554 decl instead. */
4556 static enum gimplify_status
4557 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4558 bool (*gimple_test_f) (tree),
4559 fallback_t fallback)
4561 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4562 tree decl = DECL_EXPR_DECL (decl_s);
4563 tree init = DECL_INITIAL (decl);
4564 /* Mark the decl as addressable if the compound literal
4565 expression is addressable now, otherwise it is marked too late
4566 after we gimplify the initialization expression. */
4567 if (TREE_ADDRESSABLE (*expr_p))
4568 TREE_ADDRESSABLE (decl) = 1;
4569 /* Otherwise, if we don't need an lvalue and have a literal directly
4570 substitute it. Check if it matches the gimple predicate, as
4571 otherwise we'd generate a new temporary, and we can as well just
4572 use the decl we already have. */
4573 else if (!TREE_ADDRESSABLE (decl)
4574 && init
4575 && (fallback & fb_lvalue) == 0
4576 && gimple_test_f (init))
4578 *expr_p = init;
4579 return GS_OK;
4582 /* Preliminarily mark non-addressed complex variables as eligible
4583 for promotion to gimple registers. We'll transform their uses
4584 as we find them. */
4585 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4586 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4587 && !TREE_THIS_VOLATILE (decl)
4588 && !needs_to_live_in_memory (decl))
4589 DECL_GIMPLE_REG_P (decl) = 1;
4591 /* If the decl is not addressable, then it is being used in some
4592 expression or on the right hand side of a statement, and it can
4593 be put into a readonly data section. */
4594 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4595 TREE_READONLY (decl) = 1;
4597 /* This decl isn't mentioned in the enclosing block, so add it to the
4598 list of temps. FIXME it seems a bit of a kludge to say that
4599 anonymous artificial vars aren't pushed, but everything else is. */
4600 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4601 gimple_add_tmp_var (decl);
4603 gimplify_and_add (decl_s, pre_p);
4604 *expr_p = decl;
4605 return GS_OK;
4608 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4609 return a new CONSTRUCTOR if something changed. */
4611 static tree
4612 optimize_compound_literals_in_ctor (tree orig_ctor)
4614 tree ctor = orig_ctor;
4615 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4616 unsigned int idx, num = vec_safe_length (elts);
4618 for (idx = 0; idx < num; idx++)
4620 tree value = (*elts)[idx].value;
4621 tree newval = value;
4622 if (TREE_CODE (value) == CONSTRUCTOR)
4623 newval = optimize_compound_literals_in_ctor (value);
4624 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4626 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4627 tree decl = DECL_EXPR_DECL (decl_s);
4628 tree init = DECL_INITIAL (decl);
4630 if (!TREE_ADDRESSABLE (value)
4631 && !TREE_ADDRESSABLE (decl)
4632 && init
4633 && TREE_CODE (init) == CONSTRUCTOR)
4634 newval = optimize_compound_literals_in_ctor (init);
4636 if (newval == value)
4637 continue;
4639 if (ctor == orig_ctor)
4641 ctor = copy_node (orig_ctor);
4642 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4643 elts = CONSTRUCTOR_ELTS (ctor);
4645 (*elts)[idx].value = newval;
4647 return ctor;
4650 /* A subroutine of gimplify_modify_expr. Break out elements of a
4651 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4653 Note that we still need to clear any elements that don't have explicit
4654 initializers, so if not all elements are initialized we keep the
4655 original MODIFY_EXPR, we just remove all of the constructor elements.
4657 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4658 GS_ERROR if we would have to create a temporary when gimplifying
4659 this constructor. Otherwise, return GS_OK.
4661 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4663 static enum gimplify_status
4664 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4665 bool want_value, bool notify_temp_creation)
4667 tree object, ctor, type;
4668 enum gimplify_status ret;
4669 vec<constructor_elt, va_gc> *elts;
4671 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4673 if (!notify_temp_creation)
4675 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4676 is_gimple_lvalue, fb_lvalue);
4677 if (ret == GS_ERROR)
4678 return ret;
4681 object = TREE_OPERAND (*expr_p, 0);
4682 ctor = TREE_OPERAND (*expr_p, 1)
4683 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4684 type = TREE_TYPE (ctor);
4685 elts = CONSTRUCTOR_ELTS (ctor);
4686 ret = GS_ALL_DONE;
4688 switch (TREE_CODE (type))
4690 case RECORD_TYPE:
4691 case UNION_TYPE:
4692 case QUAL_UNION_TYPE:
4693 case ARRAY_TYPE:
4695 struct gimplify_init_ctor_preeval_data preeval_data;
4696 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4697 bool cleared, complete_p, valid_const_initializer;
4699 /* Aggregate types must lower constructors to initialization of
4700 individual elements. The exception is that a CONSTRUCTOR node
4701 with no elements indicates zero-initialization of the whole. */
4702 if (vec_safe_is_empty (elts))
4704 if (notify_temp_creation)
4705 return GS_OK;
4706 break;
4709 /* Fetch information about the constructor to direct later processing.
4710 We might want to make static versions of it in various cases, and
4711 can only do so if it known to be a valid constant initializer. */
4712 valid_const_initializer
4713 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4714 &num_ctor_elements, &complete_p);
4716 /* If a const aggregate variable is being initialized, then it
4717 should never be a lose to promote the variable to be static. */
4718 if (valid_const_initializer
4719 && num_nonzero_elements > 1
4720 && TREE_READONLY (object)
4721 && VAR_P (object)
4722 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4724 if (notify_temp_creation)
4725 return GS_ERROR;
4726 DECL_INITIAL (object) = ctor;
4727 TREE_STATIC (object) = 1;
4728 if (!DECL_NAME (object))
4729 DECL_NAME (object) = create_tmp_var_name ("C");
4730 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4732 /* ??? C++ doesn't automatically append a .<number> to the
4733 assembler name, and even when it does, it looks at FE private
4734 data structures to figure out what that number should be,
4735 which are not set for this variable. I suppose this is
4736 important for local statics for inline functions, which aren't
4737 "local" in the object file sense. So in order to get a unique
4738 TU-local symbol, we must invoke the lhd version now. */
4739 lhd_set_decl_assembler_name (object);
4741 *expr_p = NULL_TREE;
4742 break;
4745 /* If there are "lots" of initialized elements, even discounting
4746 those that are not address constants (and thus *must* be
4747 computed at runtime), then partition the constructor into
4748 constant and non-constant parts. Block copy the constant
4749 parts in, then generate code for the non-constant parts. */
4750 /* TODO. There's code in cp/typeck.c to do this. */
4752 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4753 /* store_constructor will ignore the clearing of variable-sized
4754 objects. Initializers for such objects must explicitly set
4755 every field that needs to be set. */
4756 cleared = false;
4757 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4758 /* If the constructor isn't complete, clear the whole object
4759 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4761 ??? This ought not to be needed. For any element not present
4762 in the initializer, we should simply set them to zero. Except
4763 we'd need to *find* the elements that are not present, and that
4764 requires trickery to avoid quadratic compile-time behavior in
4765 large cases or excessive memory use in small cases. */
4766 cleared = true;
4767 else if (num_ctor_elements - num_nonzero_elements
4768 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4769 && num_nonzero_elements < num_ctor_elements / 4)
4770 /* If there are "lots" of zeros, it's more efficient to clear
4771 the memory and then set the nonzero elements. */
4772 cleared = true;
4773 else
4774 cleared = false;
4776 /* If there are "lots" of initialized elements, and all of them
4777 are valid address constants, then the entire initializer can
4778 be dropped to memory, and then memcpy'd out. Don't do this
4779 for sparse arrays, though, as it's more efficient to follow
4780 the standard CONSTRUCTOR behavior of memset followed by
4781 individual element initialization. Also don't do this for small
4782 all-zero initializers (which aren't big enough to merit
4783 clearing), and don't try to make bitwise copies of
4784 TREE_ADDRESSABLE types.
4786 We cannot apply such transformation when compiling chkp static
4787 initializer because creation of initializer image in the memory
4788 will require static initialization of bounds for it. It should
4789 result in another gimplification of similar initializer and we
4790 may fall into infinite loop. */
4791 if (valid_const_initializer
4792 && !(cleared || num_nonzero_elements == 0)
4793 && !TREE_ADDRESSABLE (type)
4794 && (!current_function_decl
4795 || !lookup_attribute ("chkp ctor",
4796 DECL_ATTRIBUTES (current_function_decl))))
4798 HOST_WIDE_INT size = int_size_in_bytes (type);
4799 unsigned int align;
4801 /* ??? We can still get unbounded array types, at least
4802 from the C++ front end. This seems wrong, but attempt
4803 to work around it for now. */
4804 if (size < 0)
4806 size = int_size_in_bytes (TREE_TYPE (object));
4807 if (size >= 0)
4808 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4811 /* Find the maximum alignment we can assume for the object. */
4812 /* ??? Make use of DECL_OFFSET_ALIGN. */
4813 if (DECL_P (object))
4814 align = DECL_ALIGN (object);
4815 else
4816 align = TYPE_ALIGN (type);
4818 /* Do a block move either if the size is so small as to make
4819 each individual move a sub-unit move on average, or if it
4820 is so large as to make individual moves inefficient. */
4821 if (size > 0
4822 && num_nonzero_elements > 1
4823 && (size < num_nonzero_elements
4824 || !can_move_by_pieces (size, align)))
4826 if (notify_temp_creation)
4827 return GS_ERROR;
4829 walk_tree (&ctor, force_labels_r, NULL, NULL);
4830 ctor = tree_output_constant_def (ctor);
4831 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4832 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4833 TREE_OPERAND (*expr_p, 1) = ctor;
4835 /* This is no longer an assignment of a CONSTRUCTOR, but
4836 we still may have processing to do on the LHS. So
4837 pretend we didn't do anything here to let that happen. */
4838 return GS_UNHANDLED;
4842 /* If the target is volatile, we have non-zero elements and more than
4843 one field to assign, initialize the target from a temporary. */
4844 if (TREE_THIS_VOLATILE (object)
4845 && !TREE_ADDRESSABLE (type)
4846 && num_nonzero_elements > 0
4847 && vec_safe_length (elts) > 1)
4849 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4850 TREE_OPERAND (*expr_p, 0) = temp;
4851 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4852 *expr_p,
4853 build2 (MODIFY_EXPR, void_type_node,
4854 object, temp));
4855 return GS_OK;
4858 if (notify_temp_creation)
4859 return GS_OK;
4861 /* If there are nonzero elements and if needed, pre-evaluate to capture
4862 elements overlapping with the lhs into temporaries. We must do this
4863 before clearing to fetch the values before they are zeroed-out. */
4864 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4866 preeval_data.lhs_base_decl = get_base_address (object);
4867 if (!DECL_P (preeval_data.lhs_base_decl))
4868 preeval_data.lhs_base_decl = NULL;
4869 preeval_data.lhs_alias_set = get_alias_set (object);
4871 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4872 pre_p, post_p, &preeval_data);
4875 bool ctor_has_side_effects_p
4876 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4878 if (cleared)
4880 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4881 Note that we still have to gimplify, in order to handle the
4882 case of variable sized types. Avoid shared tree structures. */
4883 CONSTRUCTOR_ELTS (ctor) = NULL;
4884 TREE_SIDE_EFFECTS (ctor) = 0;
4885 object = unshare_expr (object);
4886 gimplify_stmt (expr_p, pre_p);
4889 /* If we have not block cleared the object, or if there are nonzero
4890 elements in the constructor, or if the constructor has side effects,
4891 add assignments to the individual scalar fields of the object. */
4892 if (!cleared
4893 || num_nonzero_elements > 0
4894 || ctor_has_side_effects_p)
4895 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4897 *expr_p = NULL_TREE;
4899 break;
4901 case COMPLEX_TYPE:
4903 tree r, i;
4905 if (notify_temp_creation)
4906 return GS_OK;
4908 /* Extract the real and imaginary parts out of the ctor. */
4909 gcc_assert (elts->length () == 2);
4910 r = (*elts)[0].value;
4911 i = (*elts)[1].value;
4912 if (r == NULL || i == NULL)
4914 tree zero = build_zero_cst (TREE_TYPE (type));
4915 if (r == NULL)
4916 r = zero;
4917 if (i == NULL)
4918 i = zero;
4921 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4922 represent creation of a complex value. */
4923 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4925 ctor = build_complex (type, r, i);
4926 TREE_OPERAND (*expr_p, 1) = ctor;
4928 else
4930 ctor = build2 (COMPLEX_EXPR, type, r, i);
4931 TREE_OPERAND (*expr_p, 1) = ctor;
4932 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4933 pre_p,
4934 post_p,
4935 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4936 fb_rvalue);
4939 break;
4941 case VECTOR_TYPE:
4943 unsigned HOST_WIDE_INT ix;
4944 constructor_elt *ce;
4946 if (notify_temp_creation)
4947 return GS_OK;
4949 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4950 if (TREE_CONSTANT (ctor))
4952 bool constant_p = true;
4953 tree value;
4955 /* Even when ctor is constant, it might contain non-*_CST
4956 elements, such as addresses or trapping values like
4957 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4958 in VECTOR_CST nodes. */
4959 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4960 if (!CONSTANT_CLASS_P (value))
4962 constant_p = false;
4963 break;
4966 if (constant_p)
4968 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4969 break;
4972 TREE_CONSTANT (ctor) = 0;
4975 /* Vector types use CONSTRUCTOR all the way through gimple
4976 compilation as a general initializer. */
4977 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4979 enum gimplify_status tret;
4980 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4981 fb_rvalue);
4982 if (tret == GS_ERROR)
4983 ret = GS_ERROR;
4984 else if (TREE_STATIC (ctor)
4985 && !initializer_constant_valid_p (ce->value,
4986 TREE_TYPE (ce->value)))
4987 TREE_STATIC (ctor) = 0;
4989 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4990 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4992 break;
4994 default:
4995 /* So how did we get a CONSTRUCTOR for a scalar type? */
4996 gcc_unreachable ();
4999 if (ret == GS_ERROR)
5000 return GS_ERROR;
5001 /* If we have gimplified both sides of the initializer but have
5002 not emitted an assignment, do so now. */
5003 if (*expr_p)
5005 tree lhs = TREE_OPERAND (*expr_p, 0);
5006 tree rhs = TREE_OPERAND (*expr_p, 1);
5007 if (want_value && object == lhs)
5008 lhs = unshare_expr (lhs);
5009 gassign *init = gimple_build_assign (lhs, rhs);
5010 gimplify_seq_add_stmt (pre_p, init);
5012 if (want_value)
5014 *expr_p = object;
5015 return GS_OK;
5017 else
5019 *expr_p = NULL;
5020 return GS_ALL_DONE;
5024 /* Given a pointer value OP0, return a simplified version of an
5025 indirection through OP0, or NULL_TREE if no simplification is
5026 possible. This may only be applied to a rhs of an expression.
5027 Note that the resulting type may be different from the type pointed
5028 to in the sense that it is still compatible from the langhooks
5029 point of view. */
5031 static tree
5032 gimple_fold_indirect_ref_rhs (tree t)
5034 return gimple_fold_indirect_ref (t);
5037 /* Subroutine of gimplify_modify_expr to do simplifications of
5038 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5039 something changes. */
5041 static enum gimplify_status
5042 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5043 gimple_seq *pre_p, gimple_seq *post_p,
5044 bool want_value)
5046 enum gimplify_status ret = GS_UNHANDLED;
5047 bool changed;
5051 changed = false;
5052 switch (TREE_CODE (*from_p))
5054 case VAR_DECL:
5055 /* If we're assigning from a read-only variable initialized with
5056 a constructor, do the direct assignment from the constructor,
5057 but only if neither source nor target are volatile since this
5058 latter assignment might end up being done on a per-field basis. */
5059 if (DECL_INITIAL (*from_p)
5060 && TREE_READONLY (*from_p)
5061 && !TREE_THIS_VOLATILE (*from_p)
5062 && !TREE_THIS_VOLATILE (*to_p)
5063 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5065 tree old_from = *from_p;
5066 enum gimplify_status subret;
5068 /* Move the constructor into the RHS. */
5069 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5071 /* Let's see if gimplify_init_constructor will need to put
5072 it in memory. */
5073 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5074 false, true);
5075 if (subret == GS_ERROR)
5077 /* If so, revert the change. */
5078 *from_p = old_from;
5080 else
5082 ret = GS_OK;
5083 changed = true;
5086 break;
5087 case INDIRECT_REF:
5089 /* If we have code like
5091 *(const A*)(A*)&x
5093 where the type of "x" is a (possibly cv-qualified variant
5094 of "A"), treat the entire expression as identical to "x".
5095 This kind of code arises in C++ when an object is bound
5096 to a const reference, and if "x" is a TARGET_EXPR we want
5097 to take advantage of the optimization below. */
5098 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5099 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5100 if (t)
5102 if (TREE_THIS_VOLATILE (t) != volatile_p)
5104 if (DECL_P (t))
5105 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5106 build_fold_addr_expr (t));
5107 if (REFERENCE_CLASS_P (t))
5108 TREE_THIS_VOLATILE (t) = volatile_p;
5110 *from_p = t;
5111 ret = GS_OK;
5112 changed = true;
5114 break;
5117 case TARGET_EXPR:
5119 /* If we are initializing something from a TARGET_EXPR, strip the
5120 TARGET_EXPR and initialize it directly, if possible. This can't
5121 be done if the initializer is void, since that implies that the
5122 temporary is set in some non-trivial way.
5124 ??? What about code that pulls out the temp and uses it
5125 elsewhere? I think that such code never uses the TARGET_EXPR as
5126 an initializer. If I'm wrong, we'll die because the temp won't
5127 have any RTL. In that case, I guess we'll need to replace
5128 references somehow. */
5129 tree init = TARGET_EXPR_INITIAL (*from_p);
5131 if (init
5132 && !VOID_TYPE_P (TREE_TYPE (init)))
5134 *from_p = init;
5135 ret = GS_OK;
5136 changed = true;
5139 break;
5141 case COMPOUND_EXPR:
5142 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5143 caught. */
5144 gimplify_compound_expr (from_p, pre_p, true);
5145 ret = GS_OK;
5146 changed = true;
5147 break;
5149 case CONSTRUCTOR:
5150 /* If we already made some changes, let the front end have a
5151 crack at this before we break it down. */
5152 if (ret != GS_UNHANDLED)
5153 break;
5154 /* If we're initializing from a CONSTRUCTOR, break this into
5155 individual MODIFY_EXPRs. */
5156 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5157 false);
5159 case COND_EXPR:
5160 /* If we're assigning to a non-register type, push the assignment
5161 down into the branches. This is mandatory for ADDRESSABLE types,
5162 since we cannot generate temporaries for such, but it saves a
5163 copy in other cases as well. */
5164 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5166 /* This code should mirror the code in gimplify_cond_expr. */
5167 enum tree_code code = TREE_CODE (*expr_p);
5168 tree cond = *from_p;
5169 tree result = *to_p;
5171 ret = gimplify_expr (&result, pre_p, post_p,
5172 is_gimple_lvalue, fb_lvalue);
5173 if (ret != GS_ERROR)
5174 ret = GS_OK;
5176 /* If we are going to write RESULT more than once, clear
5177 TREE_READONLY flag, otherwise we might incorrectly promote
5178 the variable to static const and initialize it at compile
5179 time in one of the branches. */
5180 if (VAR_P (result)
5181 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5182 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5183 TREE_READONLY (result) = 0;
5184 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5185 TREE_OPERAND (cond, 1)
5186 = build2 (code, void_type_node, result,
5187 TREE_OPERAND (cond, 1));
5188 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5189 TREE_OPERAND (cond, 2)
5190 = build2 (code, void_type_node, unshare_expr (result),
5191 TREE_OPERAND (cond, 2));
5193 TREE_TYPE (cond) = void_type_node;
5194 recalculate_side_effects (cond);
5196 if (want_value)
5198 gimplify_and_add (cond, pre_p);
5199 *expr_p = unshare_expr (result);
5201 else
5202 *expr_p = cond;
5203 return ret;
5205 break;
5207 case CALL_EXPR:
5208 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5209 return slot so that we don't generate a temporary. */
5210 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5211 && aggregate_value_p (*from_p, *from_p))
5213 bool use_target;
5215 if (!(rhs_predicate_for (*to_p))(*from_p))
5216 /* If we need a temporary, *to_p isn't accurate. */
5217 use_target = false;
5218 /* It's OK to use the return slot directly unless it's an NRV. */
5219 else if (TREE_CODE (*to_p) == RESULT_DECL
5220 && DECL_NAME (*to_p) == NULL_TREE
5221 && needs_to_live_in_memory (*to_p))
5222 use_target = true;
5223 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5224 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5225 /* Don't force regs into memory. */
5226 use_target = false;
5227 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5228 /* It's OK to use the target directly if it's being
5229 initialized. */
5230 use_target = true;
5231 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5232 != INTEGER_CST)
5233 /* Always use the target and thus RSO for variable-sized types.
5234 GIMPLE cannot deal with a variable-sized assignment
5235 embedded in a call statement. */
5236 use_target = true;
5237 else if (TREE_CODE (*to_p) != SSA_NAME
5238 && (!is_gimple_variable (*to_p)
5239 || needs_to_live_in_memory (*to_p)))
5240 /* Don't use the original target if it's already addressable;
5241 if its address escapes, and the called function uses the
5242 NRV optimization, a conforming program could see *to_p
5243 change before the called function returns; see c++/19317.
5244 When optimizing, the return_slot pass marks more functions
5245 as safe after we have escape info. */
5246 use_target = false;
5247 else
5248 use_target = true;
5250 if (use_target)
5252 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5253 mark_addressable (*to_p);
5256 break;
5258 case WITH_SIZE_EXPR:
5259 /* Likewise for calls that return an aggregate of non-constant size,
5260 since we would not be able to generate a temporary at all. */
5261 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5263 *from_p = TREE_OPERAND (*from_p, 0);
5264 /* We don't change ret in this case because the
5265 WITH_SIZE_EXPR might have been added in
5266 gimplify_modify_expr, so returning GS_OK would lead to an
5267 infinite loop. */
5268 changed = true;
5270 break;
5272 /* If we're initializing from a container, push the initialization
5273 inside it. */
5274 case CLEANUP_POINT_EXPR:
5275 case BIND_EXPR:
5276 case STATEMENT_LIST:
5278 tree wrap = *from_p;
5279 tree t;
5281 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5282 fb_lvalue);
5283 if (ret != GS_ERROR)
5284 ret = GS_OK;
5286 t = voidify_wrapper_expr (wrap, *expr_p);
5287 gcc_assert (t == *expr_p);
5289 if (want_value)
5291 gimplify_and_add (wrap, pre_p);
5292 *expr_p = unshare_expr (*to_p);
5294 else
5295 *expr_p = wrap;
5296 return GS_OK;
5299 case COMPOUND_LITERAL_EXPR:
5301 tree complit = TREE_OPERAND (*expr_p, 1);
5302 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5303 tree decl = DECL_EXPR_DECL (decl_s);
5304 tree init = DECL_INITIAL (decl);
5306 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5307 into struct T x = { 0, 1, 2 } if the address of the
5308 compound literal has never been taken. */
5309 if (!TREE_ADDRESSABLE (complit)
5310 && !TREE_ADDRESSABLE (decl)
5311 && init)
5313 *expr_p = copy_node (*expr_p);
5314 TREE_OPERAND (*expr_p, 1) = init;
5315 return GS_OK;
5319 default:
5320 break;
5323 while (changed);
5325 return ret;
5329 /* Return true if T looks like a valid GIMPLE statement. */
5331 static bool
5332 is_gimple_stmt (tree t)
5334 const enum tree_code code = TREE_CODE (t);
5336 switch (code)
5338 case NOP_EXPR:
5339 /* The only valid NOP_EXPR is the empty statement. */
5340 return IS_EMPTY_STMT (t);
5342 case BIND_EXPR:
5343 case COND_EXPR:
5344 /* These are only valid if they're void. */
5345 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5347 case SWITCH_EXPR:
5348 case GOTO_EXPR:
5349 case RETURN_EXPR:
5350 case LABEL_EXPR:
5351 case CASE_LABEL_EXPR:
5352 case TRY_CATCH_EXPR:
5353 case TRY_FINALLY_EXPR:
5354 case EH_FILTER_EXPR:
5355 case CATCH_EXPR:
5356 case ASM_EXPR:
5357 case STATEMENT_LIST:
5358 case OACC_PARALLEL:
5359 case OACC_KERNELS:
5360 case OACC_DATA:
5361 case OACC_HOST_DATA:
5362 case OACC_DECLARE:
5363 case OACC_UPDATE:
5364 case OACC_ENTER_DATA:
5365 case OACC_EXIT_DATA:
5366 case OACC_CACHE:
5367 case OMP_PARALLEL:
5368 case OMP_FOR:
5369 case OMP_SIMD:
5370 case OMP_DISTRIBUTE:
5371 case OACC_LOOP:
5372 case OMP_SECTIONS:
5373 case OMP_SECTION:
5374 case OMP_SINGLE:
5375 case OMP_MASTER:
5376 case OMP_TASKGROUP:
5377 case OMP_ORDERED:
5378 case OMP_CRITICAL:
5379 case OMP_TASK:
5380 case OMP_TARGET:
5381 case OMP_TARGET_DATA:
5382 case OMP_TARGET_UPDATE:
5383 case OMP_TARGET_ENTER_DATA:
5384 case OMP_TARGET_EXIT_DATA:
5385 case OMP_TASKLOOP:
5386 case OMP_TEAMS:
5387 /* These are always void. */
5388 return true;
5390 case CALL_EXPR:
5391 case MODIFY_EXPR:
5392 case PREDICT_EXPR:
5393 /* These are valid regardless of their type. */
5394 return true;
5396 default:
5397 return false;
5402 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5403 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5404 DECL_GIMPLE_REG_P set.
5406 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5407 other, unmodified part of the complex object just before the total store.
5408 As a consequence, if the object is still uninitialized, an undefined value
5409 will be loaded into a register, which may result in a spurious exception
5410 if the register is floating-point and the value happens to be a signaling
5411 NaN for example. Then the fully-fledged complex operations lowering pass
5412 followed by a DCE pass are necessary in order to fix things up. */
5414 static enum gimplify_status
5415 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5416 bool want_value)
5418 enum tree_code code, ocode;
5419 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5421 lhs = TREE_OPERAND (*expr_p, 0);
5422 rhs = TREE_OPERAND (*expr_p, 1);
5423 code = TREE_CODE (lhs);
5424 lhs = TREE_OPERAND (lhs, 0);
5426 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5427 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5428 TREE_NO_WARNING (other) = 1;
5429 other = get_formal_tmp_var (other, pre_p);
5431 realpart = code == REALPART_EXPR ? rhs : other;
5432 imagpart = code == REALPART_EXPR ? other : rhs;
5434 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5435 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5436 else
5437 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5439 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5440 *expr_p = (want_value) ? rhs : NULL_TREE;
5442 return GS_ALL_DONE;
5445 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5447 modify_expr
5448 : varname '=' rhs
5449 | '*' ID '=' rhs
5451 PRE_P points to the list where side effects that must happen before
5452 *EXPR_P should be stored.
5454 POST_P points to the list where side effects that must happen after
5455 *EXPR_P should be stored.
5457 WANT_VALUE is nonzero iff we want to use the value of this expression
5458 in another expression. */
5460 static enum gimplify_status
5461 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5462 bool want_value)
5464 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5465 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5466 enum gimplify_status ret = GS_UNHANDLED;
5467 gimple *assign;
5468 location_t loc = EXPR_LOCATION (*expr_p);
5469 gimple_stmt_iterator gsi;
5471 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5472 || TREE_CODE (*expr_p) == INIT_EXPR);
5474 /* Trying to simplify a clobber using normal logic doesn't work,
5475 so handle it here. */
5476 if (TREE_CLOBBER_P (*from_p))
5478 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5479 if (ret == GS_ERROR)
5480 return ret;
5481 gcc_assert (!want_value
5482 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5483 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5484 *expr_p = NULL;
5485 return GS_ALL_DONE;
5488 /* Insert pointer conversions required by the middle-end that are not
5489 required by the frontend. This fixes middle-end type checking for
5490 for example gcc.dg/redecl-6.c. */
5491 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5493 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5494 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5495 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5498 /* See if any simplifications can be done based on what the RHS is. */
5499 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5500 want_value);
5501 if (ret != GS_UNHANDLED)
5502 return ret;
5504 /* For zero sized types only gimplify the left hand side and right hand
5505 side as statements and throw away the assignment. Do this after
5506 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5507 types properly. */
5508 if (zero_sized_type (TREE_TYPE (*from_p))
5509 && !want_value
5510 /* Don't do this for calls that return addressable types, expand_call
5511 relies on those having a lhs. */
5512 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5513 && TREE_CODE (*from_p) == CALL_EXPR))
5515 gimplify_stmt (from_p, pre_p);
5516 gimplify_stmt (to_p, pre_p);
5517 *expr_p = NULL_TREE;
5518 return GS_ALL_DONE;
5521 /* If the value being copied is of variable width, compute the length
5522 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5523 before gimplifying any of the operands so that we can resolve any
5524 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5525 the size of the expression to be copied, not of the destination, so
5526 that is what we must do here. */
5527 maybe_with_size_expr (from_p);
5529 /* As a special case, we have to temporarily allow for assignments
5530 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5531 a toplevel statement, when gimplifying the GENERIC expression
5532 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5533 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5535 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5536 prevent gimplify_expr from trying to create a new temporary for
5537 foo's LHS, we tell it that it should only gimplify until it
5538 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5539 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5540 and all we need to do here is set 'a' to be its LHS. */
5542 /* Gimplify the RHS first for C++17 and bug 71104. */
5543 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5544 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5545 if (ret == GS_ERROR)
5546 return ret;
5548 /* Then gimplify the LHS. */
5549 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5550 twice we have to make sure to gimplify into non-SSA as otherwise
5551 the abnormal edge added later will make those defs not dominate
5552 their uses.
5553 ??? Technically this applies only to the registers used in the
5554 resulting non-register *TO_P. */
5555 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5556 if (saved_into_ssa
5557 && TREE_CODE (*from_p) == CALL_EXPR
5558 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5559 gimplify_ctxp->into_ssa = false;
5560 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5561 gimplify_ctxp->into_ssa = saved_into_ssa;
5562 if (ret == GS_ERROR)
5563 return ret;
5565 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5566 guess for the predicate was wrong. */
5567 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5568 if (final_pred != initial_pred)
5570 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5571 if (ret == GS_ERROR)
5572 return ret;
5575 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5576 size as argument to the call. */
5577 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5579 tree call = TREE_OPERAND (*from_p, 0);
5580 tree vlasize = TREE_OPERAND (*from_p, 1);
5582 if (TREE_CODE (call) == CALL_EXPR
5583 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5585 int nargs = call_expr_nargs (call);
5586 tree type = TREE_TYPE (call);
5587 tree ap = CALL_EXPR_ARG (call, 0);
5588 tree tag = CALL_EXPR_ARG (call, 1);
5589 tree aptag = CALL_EXPR_ARG (call, 2);
5590 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5591 IFN_VA_ARG, type,
5592 nargs + 1, ap, tag,
5593 aptag, vlasize);
5594 TREE_OPERAND (*from_p, 0) = newcall;
5598 /* Now see if the above changed *from_p to something we handle specially. */
5599 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5600 want_value);
5601 if (ret != GS_UNHANDLED)
5602 return ret;
5604 /* If we've got a variable sized assignment between two lvalues (i.e. does
5605 not involve a call), then we can make things a bit more straightforward
5606 by converting the assignment to memcpy or memset. */
5607 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5609 tree from = TREE_OPERAND (*from_p, 0);
5610 tree size = TREE_OPERAND (*from_p, 1);
5612 if (TREE_CODE (from) == CONSTRUCTOR)
5613 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5615 if (is_gimple_addressable (from))
5617 *from_p = from;
5618 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5619 pre_p);
5623 /* Transform partial stores to non-addressable complex variables into
5624 total stores. This allows us to use real instead of virtual operands
5625 for these variables, which improves optimization. */
5626 if ((TREE_CODE (*to_p) == REALPART_EXPR
5627 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5628 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5629 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5631 /* Try to alleviate the effects of the gimplification creating artificial
5632 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5633 make sure not to create DECL_DEBUG_EXPR links across functions. */
5634 if (!gimplify_ctxp->into_ssa
5635 && VAR_P (*from_p)
5636 && DECL_IGNORED_P (*from_p)
5637 && DECL_P (*to_p)
5638 && !DECL_IGNORED_P (*to_p)
5639 && decl_function_context (*to_p) == current_function_decl
5640 && decl_function_context (*from_p) == current_function_decl)
5642 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5643 DECL_NAME (*from_p)
5644 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5645 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5646 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5649 if (want_value && TREE_THIS_VOLATILE (*to_p))
5650 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5652 if (TREE_CODE (*from_p) == CALL_EXPR)
5654 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5655 instead of a GIMPLE_ASSIGN. */
5656 gcall *call_stmt;
5657 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5659 /* Gimplify internal functions created in the FEs. */
5660 int nargs = call_expr_nargs (*from_p), i;
5661 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5662 auto_vec<tree> vargs (nargs);
5664 for (i = 0; i < nargs; i++)
5666 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5667 EXPR_LOCATION (*from_p));
5668 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5670 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5671 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5672 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5674 else
5676 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5677 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5678 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5679 tree fndecl = get_callee_fndecl (*from_p);
5680 if (fndecl
5681 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5682 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5683 && call_expr_nargs (*from_p) == 3)
5684 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5685 CALL_EXPR_ARG (*from_p, 0),
5686 CALL_EXPR_ARG (*from_p, 1),
5687 CALL_EXPR_ARG (*from_p, 2));
5688 else
5690 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5693 notice_special_calls (call_stmt);
5694 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5695 gimple_call_set_lhs (call_stmt, *to_p);
5696 else if (TREE_CODE (*to_p) == SSA_NAME)
5697 /* The above is somewhat premature, avoid ICEing later for a
5698 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5699 ??? This doesn't make it a default-def. */
5700 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5702 assign = call_stmt;
5704 else
5706 assign = gimple_build_assign (*to_p, *from_p);
5707 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5708 if (COMPARISON_CLASS_P (*from_p))
5709 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5712 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5714 /* We should have got an SSA name from the start. */
5715 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5716 || ! gimple_in_ssa_p (cfun));
5719 gimplify_seq_add_stmt (pre_p, assign);
5720 gsi = gsi_last (*pre_p);
5721 maybe_fold_stmt (&gsi);
5723 if (want_value)
5725 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5726 return GS_OK;
5728 else
5729 *expr_p = NULL;
5731 return GS_ALL_DONE;
5734 /* Gimplify a comparison between two variable-sized objects. Do this
5735 with a call to BUILT_IN_MEMCMP. */
5737 static enum gimplify_status
5738 gimplify_variable_sized_compare (tree *expr_p)
5740 location_t loc = EXPR_LOCATION (*expr_p);
5741 tree op0 = TREE_OPERAND (*expr_p, 0);
5742 tree op1 = TREE_OPERAND (*expr_p, 1);
5743 tree t, arg, dest, src, expr;
5745 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5746 arg = unshare_expr (arg);
5747 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5748 src = build_fold_addr_expr_loc (loc, op1);
5749 dest = build_fold_addr_expr_loc (loc, op0);
5750 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5751 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5753 expr
5754 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5755 SET_EXPR_LOCATION (expr, loc);
5756 *expr_p = expr;
5758 return GS_OK;
5761 /* Gimplify a comparison between two aggregate objects of integral scalar
5762 mode as a comparison between the bitwise equivalent scalar values. */
5764 static enum gimplify_status
5765 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5767 location_t loc = EXPR_LOCATION (*expr_p);
5768 tree op0 = TREE_OPERAND (*expr_p, 0);
5769 tree op1 = TREE_OPERAND (*expr_p, 1);
5771 tree type = TREE_TYPE (op0);
5772 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5774 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5775 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5777 *expr_p
5778 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5780 return GS_OK;
5783 /* Gimplify an expression sequence. This function gimplifies each
5784 expression and rewrites the original expression with the last
5785 expression of the sequence in GIMPLE form.
5787 PRE_P points to the list where the side effects for all the
5788 expressions in the sequence will be emitted.
5790 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5792 static enum gimplify_status
5793 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5795 tree t = *expr_p;
5799 tree *sub_p = &TREE_OPERAND (t, 0);
5801 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5802 gimplify_compound_expr (sub_p, pre_p, false);
5803 else
5804 gimplify_stmt (sub_p, pre_p);
5806 t = TREE_OPERAND (t, 1);
5808 while (TREE_CODE (t) == COMPOUND_EXPR);
5810 *expr_p = t;
5811 if (want_value)
5812 return GS_OK;
5813 else
5815 gimplify_stmt (expr_p, pre_p);
5816 return GS_ALL_DONE;
5820 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5821 gimplify. After gimplification, EXPR_P will point to a new temporary
5822 that holds the original value of the SAVE_EXPR node.
5824 PRE_P points to the list where side effects that must happen before
5825 *EXPR_P should be stored. */
5827 static enum gimplify_status
5828 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5830 enum gimplify_status ret = GS_ALL_DONE;
5831 tree val;
5833 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5834 val = TREE_OPERAND (*expr_p, 0);
5836 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5837 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5839 /* The operand may be a void-valued expression. It is
5840 being executed only for its side-effects. */
5841 if (TREE_TYPE (val) == void_type_node)
5843 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5844 is_gimple_stmt, fb_none);
5845 val = NULL;
5847 else
5848 /* The temporary may not be an SSA name as later abnormal and EH
5849 control flow may invalidate use/def domination. */
5850 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5852 TREE_OPERAND (*expr_p, 0) = val;
5853 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5856 *expr_p = val;
5858 return ret;
5861 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5863 unary_expr
5864 : ...
5865 | '&' varname
5868 PRE_P points to the list where side effects that must happen before
5869 *EXPR_P should be stored.
5871 POST_P points to the list where side effects that must happen after
5872 *EXPR_P should be stored. */
5874 static enum gimplify_status
5875 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5877 tree expr = *expr_p;
5878 tree op0 = TREE_OPERAND (expr, 0);
5879 enum gimplify_status ret;
5880 location_t loc = EXPR_LOCATION (*expr_p);
5882 switch (TREE_CODE (op0))
5884 case INDIRECT_REF:
5885 do_indirect_ref:
5886 /* Check if we are dealing with an expression of the form '&*ptr'.
5887 While the front end folds away '&*ptr' into 'ptr', these
5888 expressions may be generated internally by the compiler (e.g.,
5889 builtins like __builtin_va_end). */
5890 /* Caution: the silent array decomposition semantics we allow for
5891 ADDR_EXPR means we can't always discard the pair. */
5892 /* Gimplification of the ADDR_EXPR operand may drop
5893 cv-qualification conversions, so make sure we add them if
5894 needed. */
5896 tree op00 = TREE_OPERAND (op0, 0);
5897 tree t_expr = TREE_TYPE (expr);
5898 tree t_op00 = TREE_TYPE (op00);
5900 if (!useless_type_conversion_p (t_expr, t_op00))
5901 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5902 *expr_p = op00;
5903 ret = GS_OK;
5905 break;
5907 case VIEW_CONVERT_EXPR:
5908 /* Take the address of our operand and then convert it to the type of
5909 this ADDR_EXPR.
5911 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5912 all clear. The impact of this transformation is even less clear. */
5914 /* If the operand is a useless conversion, look through it. Doing so
5915 guarantees that the ADDR_EXPR and its operand will remain of the
5916 same type. */
5917 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5918 op0 = TREE_OPERAND (op0, 0);
5920 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5921 build_fold_addr_expr_loc (loc,
5922 TREE_OPERAND (op0, 0)));
5923 ret = GS_OK;
5924 break;
5926 case MEM_REF:
5927 if (integer_zerop (TREE_OPERAND (op0, 1)))
5928 goto do_indirect_ref;
5930 /* fall through */
5932 default:
5933 /* If we see a call to a declared builtin or see its address
5934 being taken (we can unify those cases here) then we can mark
5935 the builtin for implicit generation by GCC. */
5936 if (TREE_CODE (op0) == FUNCTION_DECL
5937 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5938 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5939 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5941 /* We use fb_either here because the C frontend sometimes takes
5942 the address of a call that returns a struct; see
5943 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5944 the implied temporary explicit. */
5946 /* Make the operand addressable. */
5947 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5948 is_gimple_addressable, fb_either);
5949 if (ret == GS_ERROR)
5950 break;
5952 /* Then mark it. Beware that it may not be possible to do so directly
5953 if a temporary has been created by the gimplification. */
5954 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5956 op0 = TREE_OPERAND (expr, 0);
5958 /* For various reasons, the gimplification of the expression
5959 may have made a new INDIRECT_REF. */
5960 if (TREE_CODE (op0) == INDIRECT_REF)
5961 goto do_indirect_ref;
5963 mark_addressable (TREE_OPERAND (expr, 0));
5965 /* The FEs may end up building ADDR_EXPRs early on a decl with
5966 an incomplete type. Re-build ADDR_EXPRs in canonical form
5967 here. */
5968 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5969 *expr_p = build_fold_addr_expr (op0);
5971 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5972 recompute_tree_invariant_for_addr_expr (*expr_p);
5974 /* If we re-built the ADDR_EXPR add a conversion to the original type
5975 if required. */
5976 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5977 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5979 break;
5982 return ret;
5985 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5986 value; output operands should be a gimple lvalue. */
5988 static enum gimplify_status
5989 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5991 tree expr;
5992 int noutputs;
5993 const char **oconstraints;
5994 int i;
5995 tree link;
5996 const char *constraint;
5997 bool allows_mem, allows_reg, is_inout;
5998 enum gimplify_status ret, tret;
5999 gasm *stmt;
6000 vec<tree, va_gc> *inputs;
6001 vec<tree, va_gc> *outputs;
6002 vec<tree, va_gc> *clobbers;
6003 vec<tree, va_gc> *labels;
6004 tree link_next;
6006 expr = *expr_p;
6007 noutputs = list_length (ASM_OUTPUTS (expr));
6008 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6010 inputs = NULL;
6011 outputs = NULL;
6012 clobbers = NULL;
6013 labels = NULL;
6015 ret = GS_ALL_DONE;
6016 link_next = NULL_TREE;
6017 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6019 bool ok;
6020 size_t constraint_len;
6022 link_next = TREE_CHAIN (link);
6024 oconstraints[i]
6025 = constraint
6026 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6027 constraint_len = strlen (constraint);
6028 if (constraint_len == 0)
6029 continue;
6031 ok = parse_output_constraint (&constraint, i, 0, 0,
6032 &allows_mem, &allows_reg, &is_inout);
6033 if (!ok)
6035 ret = GS_ERROR;
6036 is_inout = false;
6039 if (!allows_reg && allows_mem)
6040 mark_addressable (TREE_VALUE (link));
6042 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6043 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6044 fb_lvalue | fb_mayfail);
6045 if (tret == GS_ERROR)
6047 error ("invalid lvalue in asm output %d", i);
6048 ret = tret;
6051 /* If the constraint does not allow memory make sure we gimplify
6052 it to a register if it is not already but its base is. This
6053 happens for complex and vector components. */
6054 if (!allows_mem)
6056 tree op = TREE_VALUE (link);
6057 if (! is_gimple_val (op)
6058 && is_gimple_reg_type (TREE_TYPE (op))
6059 && is_gimple_reg (get_base_address (op)))
6061 tree tem = create_tmp_reg (TREE_TYPE (op));
6062 tree ass;
6063 if (is_inout)
6065 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6066 tem, unshare_expr (op));
6067 gimplify_and_add (ass, pre_p);
6069 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6070 gimplify_and_add (ass, post_p);
6072 TREE_VALUE (link) = tem;
6073 tret = GS_OK;
6077 vec_safe_push (outputs, link);
6078 TREE_CHAIN (link) = NULL_TREE;
6080 if (is_inout)
6082 /* An input/output operand. To give the optimizers more
6083 flexibility, split it into separate input and output
6084 operands. */
6085 tree input;
6086 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6087 char buf[11];
6089 /* Turn the in/out constraint into an output constraint. */
6090 char *p = xstrdup (constraint);
6091 p[0] = '=';
6092 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6094 /* And add a matching input constraint. */
6095 if (allows_reg)
6097 sprintf (buf, "%u", i);
6099 /* If there are multiple alternatives in the constraint,
6100 handle each of them individually. Those that allow register
6101 will be replaced with operand number, the others will stay
6102 unchanged. */
6103 if (strchr (p, ',') != NULL)
6105 size_t len = 0, buflen = strlen (buf);
6106 char *beg, *end, *str, *dst;
6108 for (beg = p + 1;;)
6110 end = strchr (beg, ',');
6111 if (end == NULL)
6112 end = strchr (beg, '\0');
6113 if ((size_t) (end - beg) < buflen)
6114 len += buflen + 1;
6115 else
6116 len += end - beg + 1;
6117 if (*end)
6118 beg = end + 1;
6119 else
6120 break;
6123 str = (char *) alloca (len);
6124 for (beg = p + 1, dst = str;;)
6126 const char *tem;
6127 bool mem_p, reg_p, inout_p;
6129 end = strchr (beg, ',');
6130 if (end)
6131 *end = '\0';
6132 beg[-1] = '=';
6133 tem = beg - 1;
6134 parse_output_constraint (&tem, i, 0, 0,
6135 &mem_p, &reg_p, &inout_p);
6136 if (dst != str)
6137 *dst++ = ',';
6138 if (reg_p)
6140 memcpy (dst, buf, buflen);
6141 dst += buflen;
6143 else
6145 if (end)
6146 len = end - beg;
6147 else
6148 len = strlen (beg);
6149 memcpy (dst, beg, len);
6150 dst += len;
6152 if (end)
6153 beg = end + 1;
6154 else
6155 break;
6157 *dst = '\0';
6158 input = build_string (dst - str, str);
6160 else
6161 input = build_string (strlen (buf), buf);
6163 else
6164 input = build_string (constraint_len - 1, constraint + 1);
6166 free (p);
6168 input = build_tree_list (build_tree_list (NULL_TREE, input),
6169 unshare_expr (TREE_VALUE (link)));
6170 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6174 link_next = NULL_TREE;
6175 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6177 link_next = TREE_CHAIN (link);
6178 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6179 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6180 oconstraints, &allows_mem, &allows_reg);
6182 /* If we can't make copies, we can only accept memory. */
6183 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6185 if (allows_mem)
6186 allows_reg = 0;
6187 else
6189 error ("impossible constraint in %<asm%>");
6190 error ("non-memory input %d must stay in memory", i);
6191 return GS_ERROR;
6195 /* If the operand is a memory input, it should be an lvalue. */
6196 if (!allows_reg && allows_mem)
6198 tree inputv = TREE_VALUE (link);
6199 STRIP_NOPS (inputv);
6200 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6201 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6202 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6203 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6204 || TREE_CODE (inputv) == MODIFY_EXPR)
6205 TREE_VALUE (link) = error_mark_node;
6206 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6207 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6208 if (tret != GS_ERROR)
6210 /* Unlike output operands, memory inputs are not guaranteed
6211 to be lvalues by the FE, and while the expressions are
6212 marked addressable there, if it is e.g. a statement
6213 expression, temporaries in it might not end up being
6214 addressable. They might be already used in the IL and thus
6215 it is too late to make them addressable now though. */
6216 tree x = TREE_VALUE (link);
6217 while (handled_component_p (x))
6218 x = TREE_OPERAND (x, 0);
6219 if (TREE_CODE (x) == MEM_REF
6220 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6221 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6222 if ((VAR_P (x)
6223 || TREE_CODE (x) == PARM_DECL
6224 || TREE_CODE (x) == RESULT_DECL)
6225 && !TREE_ADDRESSABLE (x)
6226 && is_gimple_reg (x))
6228 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6229 input_location), 0,
6230 "memory input %d is not directly addressable",
6232 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6235 mark_addressable (TREE_VALUE (link));
6236 if (tret == GS_ERROR)
6238 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6239 "memory input %d is not directly addressable", i);
6240 ret = tret;
6243 else
6245 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6246 is_gimple_asm_val, fb_rvalue);
6247 if (tret == GS_ERROR)
6248 ret = tret;
6251 TREE_CHAIN (link) = NULL_TREE;
6252 vec_safe_push (inputs, link);
6255 link_next = NULL_TREE;
6256 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6258 link_next = TREE_CHAIN (link);
6259 TREE_CHAIN (link) = NULL_TREE;
6260 vec_safe_push (clobbers, link);
6263 link_next = NULL_TREE;
6264 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6266 link_next = TREE_CHAIN (link);
6267 TREE_CHAIN (link) = NULL_TREE;
6268 vec_safe_push (labels, link);
6271 /* Do not add ASMs with errors to the gimple IL stream. */
6272 if (ret != GS_ERROR)
6274 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6275 inputs, outputs, clobbers, labels);
6277 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6278 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6280 gimplify_seq_add_stmt (pre_p, stmt);
6283 return ret;
6286 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6287 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6288 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6289 return to this function.
6291 FIXME should we complexify the prequeue handling instead? Or use flags
6292 for all the cleanups and let the optimizer tighten them up? The current
6293 code seems pretty fragile; it will break on a cleanup within any
6294 non-conditional nesting. But any such nesting would be broken, anyway;
6295 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6296 and continues out of it. We can do that at the RTL level, though, so
6297 having an optimizer to tighten up try/finally regions would be a Good
6298 Thing. */
6300 static enum gimplify_status
6301 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6303 gimple_stmt_iterator iter;
6304 gimple_seq body_sequence = NULL;
6306 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6308 /* We only care about the number of conditions between the innermost
6309 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6310 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6311 int old_conds = gimplify_ctxp->conditions;
6312 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6313 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6314 gimplify_ctxp->conditions = 0;
6315 gimplify_ctxp->conditional_cleanups = NULL;
6316 gimplify_ctxp->in_cleanup_point_expr = true;
6318 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6320 gimplify_ctxp->conditions = old_conds;
6321 gimplify_ctxp->conditional_cleanups = old_cleanups;
6322 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6324 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6326 gimple *wce = gsi_stmt (iter);
6328 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6330 if (gsi_one_before_end_p (iter))
6332 /* Note that gsi_insert_seq_before and gsi_remove do not
6333 scan operands, unlike some other sequence mutators. */
6334 if (!gimple_wce_cleanup_eh_only (wce))
6335 gsi_insert_seq_before_without_update (&iter,
6336 gimple_wce_cleanup (wce),
6337 GSI_SAME_STMT);
6338 gsi_remove (&iter, true);
6339 break;
6341 else
6343 gtry *gtry;
6344 gimple_seq seq;
6345 enum gimple_try_flags kind;
6347 if (gimple_wce_cleanup_eh_only (wce))
6348 kind = GIMPLE_TRY_CATCH;
6349 else
6350 kind = GIMPLE_TRY_FINALLY;
6351 seq = gsi_split_seq_after (iter);
6353 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6354 /* Do not use gsi_replace here, as it may scan operands.
6355 We want to do a simple structural modification only. */
6356 gsi_set_stmt (&iter, gtry);
6357 iter = gsi_start (gtry->eval);
6360 else
6361 gsi_next (&iter);
6364 gimplify_seq_add_seq (pre_p, body_sequence);
6365 if (temp)
6367 *expr_p = temp;
6368 return GS_OK;
6370 else
6372 *expr_p = NULL;
6373 return GS_ALL_DONE;
6377 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6378 is the cleanup action required. EH_ONLY is true if the cleanup should
6379 only be executed if an exception is thrown, not on normal exit.
6380 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6381 only valid for clobbers. */
6383 static void
6384 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6385 bool force_uncond = false)
6387 gimple *wce;
6388 gimple_seq cleanup_stmts = NULL;
6390 /* Errors can result in improperly nested cleanups. Which results in
6391 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6392 if (seen_error ())
6393 return;
6395 if (gimple_conditional_context ())
6397 /* If we're in a conditional context, this is more complex. We only
6398 want to run the cleanup if we actually ran the initialization that
6399 necessitates it, but we want to run it after the end of the
6400 conditional context. So we wrap the try/finally around the
6401 condition and use a flag to determine whether or not to actually
6402 run the destructor. Thus
6404 test ? f(A()) : 0
6406 becomes (approximately)
6408 flag = 0;
6409 try {
6410 if (test) { A::A(temp); flag = 1; val = f(temp); }
6411 else { val = 0; }
6412 } finally {
6413 if (flag) A::~A(temp);
6417 if (force_uncond)
6419 gimplify_stmt (&cleanup, &cleanup_stmts);
6420 wce = gimple_build_wce (cleanup_stmts);
6421 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6423 else
6425 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6426 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6427 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6429 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6430 gimplify_stmt (&cleanup, &cleanup_stmts);
6431 wce = gimple_build_wce (cleanup_stmts);
6433 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6434 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6435 gimplify_seq_add_stmt (pre_p, ftrue);
6437 /* Because of this manipulation, and the EH edges that jump
6438 threading cannot redirect, the temporary (VAR) will appear
6439 to be used uninitialized. Don't warn. */
6440 TREE_NO_WARNING (var) = 1;
6443 else
6445 gimplify_stmt (&cleanup, &cleanup_stmts);
6446 wce = gimple_build_wce (cleanup_stmts);
6447 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6448 gimplify_seq_add_stmt (pre_p, wce);
6452 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6454 static enum gimplify_status
6455 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6457 tree targ = *expr_p;
6458 tree temp = TARGET_EXPR_SLOT (targ);
6459 tree init = TARGET_EXPR_INITIAL (targ);
6460 enum gimplify_status ret;
6462 bool unpoison_empty_seq = false;
6463 gimple_stmt_iterator unpoison_it;
6465 if (init)
6467 tree cleanup = NULL_TREE;
6469 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6470 to the temps list. Handle also variable length TARGET_EXPRs. */
6471 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6473 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6474 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6475 gimplify_vla_decl (temp, pre_p);
6477 else
6479 /* Save location where we need to place unpoisoning. It's possible
6480 that a variable will be converted to needs_to_live_in_memory. */
6481 unpoison_it = gsi_last (*pre_p);
6482 unpoison_empty_seq = gsi_end_p (unpoison_it);
6484 gimple_add_tmp_var (temp);
6487 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6488 expression is supposed to initialize the slot. */
6489 if (VOID_TYPE_P (TREE_TYPE (init)))
6490 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6491 else
6493 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6494 init = init_expr;
6495 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6496 init = NULL;
6497 ggc_free (init_expr);
6499 if (ret == GS_ERROR)
6501 /* PR c++/28266 Make sure this is expanded only once. */
6502 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6503 return GS_ERROR;
6505 if (init)
6506 gimplify_and_add (init, pre_p);
6508 /* If needed, push the cleanup for the temp. */
6509 if (TARGET_EXPR_CLEANUP (targ))
6511 if (CLEANUP_EH_ONLY (targ))
6512 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6513 CLEANUP_EH_ONLY (targ), pre_p);
6514 else
6515 cleanup = TARGET_EXPR_CLEANUP (targ);
6518 /* Add a clobber for the temporary going out of scope, like
6519 gimplify_bind_expr. */
6520 if (gimplify_ctxp->in_cleanup_point_expr
6521 && needs_to_live_in_memory (temp))
6523 if (flag_stack_reuse == SR_ALL)
6525 tree clobber = build_constructor (TREE_TYPE (temp),
6526 NULL);
6527 TREE_THIS_VOLATILE (clobber) = true;
6528 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6529 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6531 if (asan_poisoned_variables
6532 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6533 && dbg_cnt (asan_use_after_scope))
6535 tree asan_cleanup = build_asan_poison_call_expr (temp);
6536 if (asan_cleanup)
6538 if (unpoison_empty_seq)
6539 unpoison_it = gsi_start (*pre_p);
6541 asan_poison_variable (temp, false, &unpoison_it,
6542 unpoison_empty_seq);
6543 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6547 if (cleanup)
6548 gimple_push_cleanup (temp, cleanup, false, pre_p);
6550 /* Only expand this once. */
6551 TREE_OPERAND (targ, 3) = init;
6552 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6554 else
6555 /* We should have expanded this before. */
6556 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6558 *expr_p = temp;
6559 return GS_OK;
6562 /* Gimplification of expression trees. */
6564 /* Gimplify an expression which appears at statement context. The
6565 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6566 NULL, a new sequence is allocated.
6568 Return true if we actually added a statement to the queue. */
6570 bool
6571 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6573 gimple_seq_node last;
6575 last = gimple_seq_last (*seq_p);
6576 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6577 return last != gimple_seq_last (*seq_p);
6580 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6581 to CTX. If entries already exist, force them to be some flavor of private.
6582 If there is no enclosing parallel, do nothing. */
6584 void
6585 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6587 splay_tree_node n;
6589 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6590 return;
6594 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6595 if (n != NULL)
6597 if (n->value & GOVD_SHARED)
6598 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6599 else if (n->value & GOVD_MAP)
6600 n->value |= GOVD_MAP_TO_ONLY;
6601 else
6602 return;
6604 else if ((ctx->region_type & ORT_TARGET) != 0)
6606 if (ctx->target_map_scalars_firstprivate)
6607 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6608 else
6609 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6611 else if (ctx->region_type != ORT_WORKSHARE
6612 && ctx->region_type != ORT_SIMD
6613 && ctx->region_type != ORT_ACC
6614 && !(ctx->region_type & ORT_TARGET_DATA))
6615 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6617 ctx = ctx->outer_context;
6619 while (ctx);
6622 /* Similarly for each of the type sizes of TYPE. */
6624 static void
6625 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6627 if (type == NULL || type == error_mark_node)
6628 return;
6629 type = TYPE_MAIN_VARIANT (type);
6631 if (ctx->privatized_types->add (type))
6632 return;
6634 switch (TREE_CODE (type))
6636 case INTEGER_TYPE:
6637 case ENUMERAL_TYPE:
6638 case BOOLEAN_TYPE:
6639 case REAL_TYPE:
6640 case FIXED_POINT_TYPE:
6641 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6642 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6643 break;
6645 case ARRAY_TYPE:
6646 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6647 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6648 break;
6650 case RECORD_TYPE:
6651 case UNION_TYPE:
6652 case QUAL_UNION_TYPE:
6654 tree field;
6655 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6656 if (TREE_CODE (field) == FIELD_DECL)
6658 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6659 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6662 break;
6664 case POINTER_TYPE:
6665 case REFERENCE_TYPE:
6666 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6667 break;
6669 default:
6670 break;
6673 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6674 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6675 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6678 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6680 static void
6681 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6683 splay_tree_node n;
6684 unsigned int nflags;
6685 tree t;
6687 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6688 return;
6690 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6691 there are constructors involved somewhere. Exception is a shared clause,
6692 there is nothing privatized in that case. */
6693 if ((flags & GOVD_SHARED) == 0
6694 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6695 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6696 flags |= GOVD_SEEN;
6698 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6699 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6701 /* We shouldn't be re-adding the decl with the same data
6702 sharing class. */
6703 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6704 nflags = n->value | flags;
6705 /* The only combination of data sharing classes we should see is
6706 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6707 reduction variables to be used in data sharing clauses. */
6708 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6709 || ((nflags & GOVD_DATA_SHARE_CLASS)
6710 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6711 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6712 n->value = nflags;
6713 return;
6716 /* When adding a variable-sized variable, we have to handle all sorts
6717 of additional bits of data: the pointer replacement variable, and
6718 the parameters of the type. */
6719 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6721 /* Add the pointer replacement variable as PRIVATE if the variable
6722 replacement is private, else FIRSTPRIVATE since we'll need the
6723 address of the original variable either for SHARED, or for the
6724 copy into or out of the context. */
6725 if (!(flags & GOVD_LOCAL))
6727 if (flags & GOVD_MAP)
6728 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6729 else if (flags & GOVD_PRIVATE)
6730 nflags = GOVD_PRIVATE;
6731 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6732 && (flags & GOVD_FIRSTPRIVATE))
6733 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6734 else
6735 nflags = GOVD_FIRSTPRIVATE;
6736 nflags |= flags & GOVD_SEEN;
6737 t = DECL_VALUE_EXPR (decl);
6738 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6739 t = TREE_OPERAND (t, 0);
6740 gcc_assert (DECL_P (t));
6741 omp_add_variable (ctx, t, nflags);
6744 /* Add all of the variable and type parameters (which should have
6745 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6746 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6747 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6748 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6750 /* The variable-sized variable itself is never SHARED, only some form
6751 of PRIVATE. The sharing would take place via the pointer variable
6752 which we remapped above. */
6753 if (flags & GOVD_SHARED)
6754 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6755 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6757 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6758 alloca statement we generate for the variable, so make sure it
6759 is available. This isn't automatically needed for the SHARED
6760 case, since we won't be allocating local storage then.
6761 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6762 in this case omp_notice_variable will be called later
6763 on when it is gimplified. */
6764 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6765 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6766 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6768 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6769 && lang_hooks.decls.omp_privatize_by_reference (decl))
6771 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6773 /* Similar to the direct variable sized case above, we'll need the
6774 size of references being privatized. */
6775 if ((flags & GOVD_SHARED) == 0)
6777 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6778 if (DECL_P (t))
6779 omp_notice_variable (ctx, t, true);
6783 if (n != NULL)
6784 n->value |= flags;
6785 else
6786 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6788 /* For reductions clauses in OpenACC loop directives, by default create a
6789 copy clause on the enclosing parallel construct for carrying back the
6790 results. */
6791 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6793 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6794 while (outer_ctx)
6796 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6797 if (n != NULL)
6799 /* Ignore local variables and explicitly declared clauses. */
6800 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6801 break;
6802 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6804 /* According to the OpenACC spec, such a reduction variable
6805 should already have a copy map on a kernels construct,
6806 verify that here. */
6807 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6808 && (n->value & GOVD_MAP));
6810 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6812 /* Remove firstprivate and make it a copy map. */
6813 n->value &= ~GOVD_FIRSTPRIVATE;
6814 n->value |= GOVD_MAP;
6817 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6819 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6820 GOVD_MAP | GOVD_SEEN);
6821 break;
6823 outer_ctx = outer_ctx->outer_context;
6828 /* Notice a threadprivate variable DECL used in OMP context CTX.
6829 This just prints out diagnostics about threadprivate variable uses
6830 in untied tasks. If DECL2 is non-NULL, prevent this warning
6831 on that variable. */
6833 static bool
6834 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6835 tree decl2)
6837 splay_tree_node n;
6838 struct gimplify_omp_ctx *octx;
6840 for (octx = ctx; octx; octx = octx->outer_context)
6841 if ((octx->region_type & ORT_TARGET) != 0)
6843 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6844 if (n == NULL)
6846 error ("threadprivate variable %qE used in target region",
6847 DECL_NAME (decl));
6848 error_at (octx->location, "enclosing target region");
6849 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6851 if (decl2)
6852 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6855 if (ctx->region_type != ORT_UNTIED_TASK)
6856 return false;
6857 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6858 if (n == NULL)
6860 error ("threadprivate variable %qE used in untied task",
6861 DECL_NAME (decl));
6862 error_at (ctx->location, "enclosing task");
6863 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6865 if (decl2)
6866 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6867 return false;
6870 /* Return true if global var DECL is device resident. */
6872 static bool
6873 device_resident_p (tree decl)
6875 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6877 if (!attr)
6878 return false;
6880 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6882 tree c = TREE_VALUE (t);
6883 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6884 return true;
6887 return false;
6890 /* Return true if DECL has an ACC DECLARE attribute. */
6892 static bool
6893 is_oacc_declared (tree decl)
6895 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6896 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6897 return declared != NULL_TREE;
6900 /* Determine outer default flags for DECL mentioned in an OMP region
6901 but not declared in an enclosing clause.
6903 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6904 remapped firstprivate instead of shared. To some extent this is
6905 addressed in omp_firstprivatize_type_sizes, but not
6906 effectively. */
6908 static unsigned
6909 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6910 bool in_code, unsigned flags)
6912 enum omp_clause_default_kind default_kind = ctx->default_kind;
6913 enum omp_clause_default_kind kind;
6915 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6916 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6917 default_kind = kind;
6919 switch (default_kind)
6921 case OMP_CLAUSE_DEFAULT_NONE:
6923 const char *rtype;
6925 if (ctx->region_type & ORT_PARALLEL)
6926 rtype = "parallel";
6927 else if (ctx->region_type & ORT_TASK)
6928 rtype = "task";
6929 else if (ctx->region_type & ORT_TEAMS)
6930 rtype = "teams";
6931 else
6932 gcc_unreachable ();
6934 error ("%qE not specified in enclosing %qs",
6935 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6936 error_at (ctx->location, "enclosing %qs", rtype);
6938 /* FALLTHRU */
6939 case OMP_CLAUSE_DEFAULT_SHARED:
6940 flags |= GOVD_SHARED;
6941 break;
6942 case OMP_CLAUSE_DEFAULT_PRIVATE:
6943 flags |= GOVD_PRIVATE;
6944 break;
6945 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6946 flags |= GOVD_FIRSTPRIVATE;
6947 break;
6948 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6949 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6950 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6951 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6953 omp_notice_variable (octx, decl, in_code);
6954 for (; octx; octx = octx->outer_context)
6956 splay_tree_node n2;
6958 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6959 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6960 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6961 continue;
6962 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6964 flags |= GOVD_FIRSTPRIVATE;
6965 goto found_outer;
6967 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6969 flags |= GOVD_SHARED;
6970 goto found_outer;
6975 if (TREE_CODE (decl) == PARM_DECL
6976 || (!is_global_var (decl)
6977 && DECL_CONTEXT (decl) == current_function_decl))
6978 flags |= GOVD_FIRSTPRIVATE;
6979 else
6980 flags |= GOVD_SHARED;
6981 found_outer:
6982 break;
6984 default:
6985 gcc_unreachable ();
6988 return flags;
6992 /* Determine outer default flags for DECL mentioned in an OACC region
6993 but not declared in an enclosing clause. */
6995 static unsigned
6996 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6998 const char *rkind;
6999 bool on_device = false;
7000 bool declared = is_oacc_declared (decl);
7001 tree type = TREE_TYPE (decl);
7003 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7004 type = TREE_TYPE (type);
7006 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7007 && is_global_var (decl)
7008 && device_resident_p (decl))
7010 on_device = true;
7011 flags |= GOVD_MAP_TO_ONLY;
7014 switch (ctx->region_type)
7016 case ORT_ACC_KERNELS:
7017 rkind = "kernels";
7019 if (AGGREGATE_TYPE_P (type))
7021 /* Aggregates default to 'present_or_copy', or 'present'. */
7022 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7023 flags |= GOVD_MAP;
7024 else
7025 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7027 else
7028 /* Scalars default to 'copy'. */
7029 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7031 break;
7033 case ORT_ACC_PARALLEL:
7034 rkind = "parallel";
7036 if (on_device || declared)
7037 flags |= GOVD_MAP;
7038 else if (AGGREGATE_TYPE_P (type))
7040 /* Aggregates default to 'present_or_copy', or 'present'. */
7041 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7042 flags |= GOVD_MAP;
7043 else
7044 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7046 else
7047 /* Scalars default to 'firstprivate'. */
7048 flags |= GOVD_FIRSTPRIVATE;
7050 break;
7052 default:
7053 gcc_unreachable ();
7056 if (DECL_ARTIFICIAL (decl))
7057 ; /* We can get compiler-generated decls, and should not complain
7058 about them. */
7059 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7061 error ("%qE not specified in enclosing OpenACC %qs construct",
7062 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7063 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7065 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7066 ; /* Handled above. */
7067 else
7068 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7070 return flags;
7073 /* Record the fact that DECL was used within the OMP context CTX.
7074 IN_CODE is true when real code uses DECL, and false when we should
7075 merely emit default(none) errors. Return true if DECL is going to
7076 be remapped and thus DECL shouldn't be gimplified into its
7077 DECL_VALUE_EXPR (if any). */
7079 static bool
7080 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7082 splay_tree_node n;
7083 unsigned flags = in_code ? GOVD_SEEN : 0;
7084 bool ret = false, shared;
7086 if (error_operand_p (decl))
7087 return false;
7089 if (ctx->region_type == ORT_NONE)
7090 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7092 if (is_global_var (decl))
7094 /* Threadprivate variables are predetermined. */
7095 if (DECL_THREAD_LOCAL_P (decl))
7096 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7098 if (DECL_HAS_VALUE_EXPR_P (decl))
7100 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7102 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7103 return omp_notice_threadprivate_variable (ctx, decl, value);
7106 if (gimplify_omp_ctxp->outer_context == NULL
7107 && VAR_P (decl)
7108 && oacc_get_fn_attrib (current_function_decl))
7110 location_t loc = DECL_SOURCE_LOCATION (decl);
7112 if (lookup_attribute ("omp declare target link",
7113 DECL_ATTRIBUTES (decl)))
7115 error_at (loc,
7116 "%qE with %<link%> clause used in %<routine%> function",
7117 DECL_NAME (decl));
7118 return false;
7120 else if (!lookup_attribute ("omp declare target",
7121 DECL_ATTRIBUTES (decl)))
7123 error_at (loc,
7124 "%qE requires a %<declare%> directive for use "
7125 "in a %<routine%> function", DECL_NAME (decl));
7126 return false;
7131 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7132 if ((ctx->region_type & ORT_TARGET) != 0)
7134 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7135 if (n == NULL)
7137 unsigned nflags = flags;
7138 if (ctx->target_map_pointers_as_0len_arrays
7139 || ctx->target_map_scalars_firstprivate)
7141 bool is_declare_target = false;
7142 bool is_scalar = false;
7143 if (is_global_var (decl)
7144 && varpool_node::get_create (decl)->offloadable)
7146 struct gimplify_omp_ctx *octx;
7147 for (octx = ctx->outer_context;
7148 octx; octx = octx->outer_context)
7150 n = splay_tree_lookup (octx->variables,
7151 (splay_tree_key)decl);
7152 if (n
7153 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7154 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7155 break;
7157 is_declare_target = octx == NULL;
7159 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7160 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7161 if (is_declare_target)
7163 else if (ctx->target_map_pointers_as_0len_arrays
7164 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7165 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7166 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7167 == POINTER_TYPE)))
7168 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7169 else if (is_scalar)
7170 nflags |= GOVD_FIRSTPRIVATE;
7173 struct gimplify_omp_ctx *octx = ctx->outer_context;
7174 if ((ctx->region_type & ORT_ACC) && octx)
7176 /* Look in outer OpenACC contexts, to see if there's a
7177 data attribute for this variable. */
7178 omp_notice_variable (octx, decl, in_code);
7180 for (; octx; octx = octx->outer_context)
7182 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7183 break;
7184 splay_tree_node n2
7185 = splay_tree_lookup (octx->variables,
7186 (splay_tree_key) decl);
7187 if (n2)
7189 if (octx->region_type == ORT_ACC_HOST_DATA)
7190 error ("variable %qE declared in enclosing "
7191 "%<host_data%> region", DECL_NAME (decl));
7192 nflags |= GOVD_MAP;
7193 if (octx->region_type == ORT_ACC_DATA
7194 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7195 nflags |= GOVD_MAP_0LEN_ARRAY;
7196 goto found_outer;
7202 tree type = TREE_TYPE (decl);
7204 if (nflags == flags
7205 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7206 && lang_hooks.decls.omp_privatize_by_reference (decl))
7207 type = TREE_TYPE (type);
7208 if (nflags == flags
7209 && !lang_hooks.types.omp_mappable_type (type))
7211 error ("%qD referenced in target region does not have "
7212 "a mappable type", decl);
7213 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7215 else if (nflags == flags)
7217 if ((ctx->region_type & ORT_ACC) != 0)
7218 nflags = oacc_default_clause (ctx, decl, flags);
7219 else
7220 nflags |= GOVD_MAP;
7223 found_outer:
7224 omp_add_variable (ctx, decl, nflags);
7226 else
7228 /* If nothing changed, there's nothing left to do. */
7229 if ((n->value & flags) == flags)
7230 return ret;
7231 flags |= n->value;
7232 n->value = flags;
7234 goto do_outer;
7237 if (n == NULL)
7239 if (ctx->region_type == ORT_WORKSHARE
7240 || ctx->region_type == ORT_SIMD
7241 || ctx->region_type == ORT_ACC
7242 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7243 goto do_outer;
7245 flags = omp_default_clause (ctx, decl, in_code, flags);
7247 if ((flags & GOVD_PRIVATE)
7248 && lang_hooks.decls.omp_private_outer_ref (decl))
7249 flags |= GOVD_PRIVATE_OUTER_REF;
7251 omp_add_variable (ctx, decl, flags);
7253 shared = (flags & GOVD_SHARED) != 0;
7254 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7255 goto do_outer;
7258 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7259 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7260 && DECL_SIZE (decl))
7262 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7264 splay_tree_node n2;
7265 tree t = DECL_VALUE_EXPR (decl);
7266 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7267 t = TREE_OPERAND (t, 0);
7268 gcc_assert (DECL_P (t));
7269 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7270 n2->value |= GOVD_SEEN;
7272 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7273 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7274 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7275 != INTEGER_CST))
7277 splay_tree_node n2;
7278 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7279 gcc_assert (DECL_P (t));
7280 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7281 if (n2)
7282 omp_notice_variable (ctx, t, true);
7286 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7287 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7289 /* If nothing changed, there's nothing left to do. */
7290 if ((n->value & flags) == flags)
7291 return ret;
7292 flags |= n->value;
7293 n->value = flags;
7295 do_outer:
7296 /* If the variable is private in the current context, then we don't
7297 need to propagate anything to an outer context. */
7298 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7299 return ret;
7300 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7301 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7302 return ret;
7303 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7304 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7305 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7306 return ret;
7307 if (ctx->outer_context
7308 && omp_notice_variable (ctx->outer_context, decl, in_code))
7309 return true;
7310 return ret;
7313 /* Verify that DECL is private within CTX. If there's specific information
7314 to the contrary in the innermost scope, generate an error. */
7316 static bool
7317 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7319 splay_tree_node n;
7321 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7322 if (n != NULL)
7324 if (n->value & GOVD_SHARED)
7326 if (ctx == gimplify_omp_ctxp)
7328 if (simd)
7329 error ("iteration variable %qE is predetermined linear",
7330 DECL_NAME (decl));
7331 else
7332 error ("iteration variable %qE should be private",
7333 DECL_NAME (decl));
7334 n->value = GOVD_PRIVATE;
7335 return true;
7337 else
7338 return false;
7340 else if ((n->value & GOVD_EXPLICIT) != 0
7341 && (ctx == gimplify_omp_ctxp
7342 || (ctx->region_type == ORT_COMBINED_PARALLEL
7343 && gimplify_omp_ctxp->outer_context == ctx)))
7345 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7346 error ("iteration variable %qE should not be firstprivate",
7347 DECL_NAME (decl));
7348 else if ((n->value & GOVD_REDUCTION) != 0)
7349 error ("iteration variable %qE should not be reduction",
7350 DECL_NAME (decl));
7351 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7352 error ("iteration variable %qE should not be linear",
7353 DECL_NAME (decl));
7354 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7355 error ("iteration variable %qE should not be lastprivate",
7356 DECL_NAME (decl));
7357 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7358 error ("iteration variable %qE should not be private",
7359 DECL_NAME (decl));
7360 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7361 error ("iteration variable %qE is predetermined linear",
7362 DECL_NAME (decl));
7364 return (ctx == gimplify_omp_ctxp
7365 || (ctx->region_type == ORT_COMBINED_PARALLEL
7366 && gimplify_omp_ctxp->outer_context == ctx));
7369 if (ctx->region_type != ORT_WORKSHARE
7370 && ctx->region_type != ORT_SIMD
7371 && ctx->region_type != ORT_ACC)
7372 return false;
7373 else if (ctx->outer_context)
7374 return omp_is_private (ctx->outer_context, decl, simd);
7375 return false;
7378 /* Return true if DECL is private within a parallel region
7379 that binds to the current construct's context or in parallel
7380 region's REDUCTION clause. */
7382 static bool
7383 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7385 splay_tree_node n;
7389 ctx = ctx->outer_context;
7390 if (ctx == NULL)
7392 if (is_global_var (decl))
7393 return false;
7395 /* References might be private, but might be shared too,
7396 when checking for copyprivate, assume they might be
7397 private, otherwise assume they might be shared. */
7398 if (copyprivate)
7399 return true;
7401 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7402 return false;
7404 /* Treat C++ privatized non-static data members outside
7405 of the privatization the same. */
7406 if (omp_member_access_dummy_var (decl))
7407 return false;
7409 return true;
7412 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7414 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7415 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7416 continue;
7418 if (n != NULL)
7420 if ((n->value & GOVD_LOCAL) != 0
7421 && omp_member_access_dummy_var (decl))
7422 return false;
7423 return (n->value & GOVD_SHARED) == 0;
7426 while (ctx->region_type == ORT_WORKSHARE
7427 || ctx->region_type == ORT_SIMD
7428 || ctx->region_type == ORT_ACC);
7429 return false;
7432 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7434 static tree
7435 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7437 tree t = *tp;
7439 /* If this node has been visited, unmark it and keep looking. */
7440 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7441 return t;
7443 if (IS_TYPE_OR_DECL_P (t))
7444 *walk_subtrees = 0;
7445 return NULL_TREE;
7448 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7449 and previous omp contexts. */
7451 static void
7452 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7453 enum omp_region_type region_type,
7454 enum tree_code code)
7456 struct gimplify_omp_ctx *ctx, *outer_ctx;
7457 tree c;
7458 hash_map<tree, tree> *struct_map_to_clause = NULL;
7459 tree *prev_list_p = NULL;
7461 ctx = new_omp_context (region_type);
7462 outer_ctx = ctx->outer_context;
7463 if (code == OMP_TARGET)
7465 if (!lang_GNU_Fortran ())
7466 ctx->target_map_pointers_as_0len_arrays = true;
7467 ctx->target_map_scalars_firstprivate = true;
7469 if (!lang_GNU_Fortran ())
7470 switch (code)
7472 case OMP_TARGET:
7473 case OMP_TARGET_DATA:
7474 case OMP_TARGET_ENTER_DATA:
7475 case OMP_TARGET_EXIT_DATA:
7476 case OACC_DECLARE:
7477 case OACC_HOST_DATA:
7478 ctx->target_firstprivatize_array_bases = true;
7479 default:
7480 break;
7483 while ((c = *list_p) != NULL)
7485 bool remove = false;
7486 bool notice_outer = true;
7487 const char *check_non_private = NULL;
7488 unsigned int flags;
7489 tree decl;
7491 switch (OMP_CLAUSE_CODE (c))
7493 case OMP_CLAUSE_PRIVATE:
7494 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7495 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7497 flags |= GOVD_PRIVATE_OUTER_REF;
7498 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7500 else
7501 notice_outer = false;
7502 goto do_add;
7503 case OMP_CLAUSE_SHARED:
7504 flags = GOVD_SHARED | GOVD_EXPLICIT;
7505 goto do_add;
7506 case OMP_CLAUSE_FIRSTPRIVATE:
7507 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7508 check_non_private = "firstprivate";
7509 goto do_add;
7510 case OMP_CLAUSE_LASTPRIVATE:
7511 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7512 check_non_private = "lastprivate";
7513 decl = OMP_CLAUSE_DECL (c);
7514 if (error_operand_p (decl))
7515 goto do_add;
7516 else if (outer_ctx
7517 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7518 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7519 && splay_tree_lookup (outer_ctx->variables,
7520 (splay_tree_key) decl) == NULL)
7522 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7523 if (outer_ctx->outer_context)
7524 omp_notice_variable (outer_ctx->outer_context, decl, true);
7526 else if (outer_ctx
7527 && (outer_ctx->region_type & ORT_TASK) != 0
7528 && outer_ctx->combined_loop
7529 && splay_tree_lookup (outer_ctx->variables,
7530 (splay_tree_key) decl) == NULL)
7532 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7533 if (outer_ctx->outer_context)
7534 omp_notice_variable (outer_ctx->outer_context, decl, true);
7536 else if (outer_ctx
7537 && (outer_ctx->region_type == ORT_WORKSHARE
7538 || outer_ctx->region_type == ORT_ACC)
7539 && outer_ctx->combined_loop
7540 && splay_tree_lookup (outer_ctx->variables,
7541 (splay_tree_key) decl) == NULL
7542 && !omp_check_private (outer_ctx, decl, false))
7544 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7545 if (outer_ctx->outer_context
7546 && (outer_ctx->outer_context->region_type
7547 == ORT_COMBINED_PARALLEL)
7548 && splay_tree_lookup (outer_ctx->outer_context->variables,
7549 (splay_tree_key) decl) == NULL)
7551 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7552 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7553 if (octx->outer_context)
7555 octx = octx->outer_context;
7556 if (octx->region_type == ORT_WORKSHARE
7557 && octx->combined_loop
7558 && splay_tree_lookup (octx->variables,
7559 (splay_tree_key) decl) == NULL
7560 && !omp_check_private (octx, decl, false))
7562 omp_add_variable (octx, decl,
7563 GOVD_LASTPRIVATE | GOVD_SEEN);
7564 octx = octx->outer_context;
7565 if (octx
7566 && octx->region_type == ORT_COMBINED_TEAMS
7567 && (splay_tree_lookup (octx->variables,
7568 (splay_tree_key) decl)
7569 == NULL))
7571 omp_add_variable (octx, decl,
7572 GOVD_SHARED | GOVD_SEEN);
7573 octx = octx->outer_context;
7576 if (octx)
7577 omp_notice_variable (octx, decl, true);
7580 else if (outer_ctx->outer_context)
7581 omp_notice_variable (outer_ctx->outer_context, decl, true);
7583 goto do_add;
7584 case OMP_CLAUSE_REDUCTION:
7585 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7586 /* OpenACC permits reductions on private variables. */
7587 if (!(region_type & ORT_ACC))
7588 check_non_private = "reduction";
7589 decl = OMP_CLAUSE_DECL (c);
7590 if (TREE_CODE (decl) == MEM_REF)
7592 tree type = TREE_TYPE (decl);
7593 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7594 NULL, is_gimple_val, fb_rvalue, false)
7595 == GS_ERROR)
7597 remove = true;
7598 break;
7600 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7601 if (DECL_P (v))
7603 omp_firstprivatize_variable (ctx, v);
7604 omp_notice_variable (ctx, v, true);
7606 decl = TREE_OPERAND (decl, 0);
7607 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7609 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7610 NULL, is_gimple_val, fb_rvalue, false)
7611 == GS_ERROR)
7613 remove = true;
7614 break;
7616 v = TREE_OPERAND (decl, 1);
7617 if (DECL_P (v))
7619 omp_firstprivatize_variable (ctx, v);
7620 omp_notice_variable (ctx, v, true);
7622 decl = TREE_OPERAND (decl, 0);
7624 if (TREE_CODE (decl) == ADDR_EXPR
7625 || TREE_CODE (decl) == INDIRECT_REF)
7626 decl = TREE_OPERAND (decl, 0);
7628 goto do_add_decl;
7629 case OMP_CLAUSE_LINEAR:
7630 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7631 is_gimple_val, fb_rvalue) == GS_ERROR)
7633 remove = true;
7634 break;
7636 else
7638 if (code == OMP_SIMD
7639 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7641 struct gimplify_omp_ctx *octx = outer_ctx;
7642 if (octx
7643 && octx->region_type == ORT_WORKSHARE
7644 && octx->combined_loop
7645 && !octx->distribute)
7647 if (octx->outer_context
7648 && (octx->outer_context->region_type
7649 == ORT_COMBINED_PARALLEL))
7650 octx = octx->outer_context->outer_context;
7651 else
7652 octx = octx->outer_context;
7654 if (octx
7655 && octx->region_type == ORT_WORKSHARE
7656 && octx->combined_loop
7657 && octx->distribute)
7659 error_at (OMP_CLAUSE_LOCATION (c),
7660 "%<linear%> clause for variable other than "
7661 "loop iterator specified on construct "
7662 "combined with %<distribute%>");
7663 remove = true;
7664 break;
7667 /* For combined #pragma omp parallel for simd, need to put
7668 lastprivate and perhaps firstprivate too on the
7669 parallel. Similarly for #pragma omp for simd. */
7670 struct gimplify_omp_ctx *octx = outer_ctx;
7671 decl = NULL_TREE;
7674 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7675 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7676 break;
7677 decl = OMP_CLAUSE_DECL (c);
7678 if (error_operand_p (decl))
7680 decl = NULL_TREE;
7681 break;
7683 flags = GOVD_SEEN;
7684 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7685 flags |= GOVD_FIRSTPRIVATE;
7686 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7687 flags |= GOVD_LASTPRIVATE;
7688 if (octx
7689 && octx->region_type == ORT_WORKSHARE
7690 && octx->combined_loop)
7692 if (octx->outer_context
7693 && (octx->outer_context->region_type
7694 == ORT_COMBINED_PARALLEL))
7695 octx = octx->outer_context;
7696 else if (omp_check_private (octx, decl, false))
7697 break;
7699 else if (octx
7700 && (octx->region_type & ORT_TASK) != 0
7701 && octx->combined_loop)
7703 else if (octx
7704 && octx->region_type == ORT_COMBINED_PARALLEL
7705 && ctx->region_type == ORT_WORKSHARE
7706 && octx == outer_ctx)
7707 flags = GOVD_SEEN | GOVD_SHARED;
7708 else if (octx
7709 && octx->region_type == ORT_COMBINED_TEAMS)
7710 flags = GOVD_SEEN | GOVD_SHARED;
7711 else if (octx
7712 && octx->region_type == ORT_COMBINED_TARGET)
7714 flags &= ~GOVD_LASTPRIVATE;
7715 if (flags == GOVD_SEEN)
7716 break;
7718 else
7719 break;
7720 splay_tree_node on
7721 = splay_tree_lookup (octx->variables,
7722 (splay_tree_key) decl);
7723 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7725 octx = NULL;
7726 break;
7728 omp_add_variable (octx, decl, flags);
7729 if (octx->outer_context == NULL)
7730 break;
7731 octx = octx->outer_context;
7733 while (1);
7734 if (octx
7735 && decl
7736 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7737 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7738 omp_notice_variable (octx, decl, true);
7740 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7741 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7742 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7744 notice_outer = false;
7745 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7747 goto do_add;
7749 case OMP_CLAUSE_MAP:
7750 decl = OMP_CLAUSE_DECL (c);
7751 if (error_operand_p (decl))
7752 remove = true;
7753 switch (code)
7755 case OMP_TARGET:
7756 break;
7757 case OACC_DATA:
7758 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7759 break;
7760 /* FALLTHRU */
7761 case OMP_TARGET_DATA:
7762 case OMP_TARGET_ENTER_DATA:
7763 case OMP_TARGET_EXIT_DATA:
7764 case OACC_ENTER_DATA:
7765 case OACC_EXIT_DATA:
7766 case OACC_HOST_DATA:
7767 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7768 || (OMP_CLAUSE_MAP_KIND (c)
7769 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7770 /* For target {,enter ,exit }data only the array slice is
7771 mapped, but not the pointer to it. */
7772 remove = true;
7773 break;
7774 default:
7775 break;
7777 if (remove)
7778 break;
7779 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7781 struct gimplify_omp_ctx *octx;
7782 for (octx = outer_ctx; octx; octx = octx->outer_context)
7784 if (octx->region_type != ORT_ACC_HOST_DATA)
7785 break;
7786 splay_tree_node n2
7787 = splay_tree_lookup (octx->variables,
7788 (splay_tree_key) decl);
7789 if (n2)
7790 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7791 "declared in enclosing %<host_data%> region",
7792 DECL_NAME (decl));
7795 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7796 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7797 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7798 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7799 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7801 remove = true;
7802 break;
7804 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7805 || (OMP_CLAUSE_MAP_KIND (c)
7806 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7807 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7809 OMP_CLAUSE_SIZE (c)
7810 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7811 false);
7812 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7813 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7815 if (!DECL_P (decl))
7817 tree d = decl, *pd;
7818 if (TREE_CODE (d) == ARRAY_REF)
7820 while (TREE_CODE (d) == ARRAY_REF)
7821 d = TREE_OPERAND (d, 0);
7822 if (TREE_CODE (d) == COMPONENT_REF
7823 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7824 decl = d;
7826 pd = &OMP_CLAUSE_DECL (c);
7827 if (d == decl
7828 && TREE_CODE (decl) == INDIRECT_REF
7829 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7830 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7831 == REFERENCE_TYPE))
7833 pd = &TREE_OPERAND (decl, 0);
7834 decl = TREE_OPERAND (decl, 0);
7836 if (TREE_CODE (decl) == COMPONENT_REF)
7838 while (TREE_CODE (decl) == COMPONENT_REF)
7839 decl = TREE_OPERAND (decl, 0);
7840 if (TREE_CODE (decl) == INDIRECT_REF
7841 && DECL_P (TREE_OPERAND (decl, 0))
7842 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7843 == REFERENCE_TYPE))
7844 decl = TREE_OPERAND (decl, 0);
7846 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7847 == GS_ERROR)
7849 remove = true;
7850 break;
7852 if (DECL_P (decl))
7854 if (error_operand_p (decl))
7856 remove = true;
7857 break;
7860 tree stype = TREE_TYPE (decl);
7861 if (TREE_CODE (stype) == REFERENCE_TYPE)
7862 stype = TREE_TYPE (stype);
7863 if (TYPE_SIZE_UNIT (stype) == NULL
7864 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7866 error_at (OMP_CLAUSE_LOCATION (c),
7867 "mapping field %qE of variable length "
7868 "structure", OMP_CLAUSE_DECL (c));
7869 remove = true;
7870 break;
7873 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7875 /* Error recovery. */
7876 if (prev_list_p == NULL)
7878 remove = true;
7879 break;
7881 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7883 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7884 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7886 remove = true;
7887 break;
7892 tree offset;
7893 HOST_WIDE_INT bitsize, bitpos;
7894 machine_mode mode;
7895 int unsignedp, reversep, volatilep = 0;
7896 tree base = OMP_CLAUSE_DECL (c);
7897 while (TREE_CODE (base) == ARRAY_REF)
7898 base = TREE_OPERAND (base, 0);
7899 if (TREE_CODE (base) == INDIRECT_REF)
7900 base = TREE_OPERAND (base, 0);
7901 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7902 &mode, &unsignedp, &reversep,
7903 &volatilep);
7904 tree orig_base = base;
7905 if ((TREE_CODE (base) == INDIRECT_REF
7906 || (TREE_CODE (base) == MEM_REF
7907 && integer_zerop (TREE_OPERAND (base, 1))))
7908 && DECL_P (TREE_OPERAND (base, 0))
7909 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7910 == REFERENCE_TYPE))
7911 base = TREE_OPERAND (base, 0);
7912 gcc_assert (base == decl
7913 && (offset == NULL_TREE
7914 || TREE_CODE (offset) == INTEGER_CST));
7916 splay_tree_node n
7917 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7918 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7919 == GOMP_MAP_ALWAYS_POINTER);
7920 if (n == NULL || (n->value & GOVD_MAP) == 0)
7922 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7923 OMP_CLAUSE_MAP);
7924 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7925 if (orig_base != base)
7926 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7927 else
7928 OMP_CLAUSE_DECL (l) = decl;
7929 OMP_CLAUSE_SIZE (l) = size_int (1);
7930 if (struct_map_to_clause == NULL)
7931 struct_map_to_clause = new hash_map<tree, tree>;
7932 struct_map_to_clause->put (decl, l);
7933 if (ptr)
7935 enum gomp_map_kind mkind
7936 = code == OMP_TARGET_EXIT_DATA
7937 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7938 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7939 OMP_CLAUSE_MAP);
7940 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7941 OMP_CLAUSE_DECL (c2)
7942 = unshare_expr (OMP_CLAUSE_DECL (c));
7943 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7944 OMP_CLAUSE_SIZE (c2)
7945 = TYPE_SIZE_UNIT (ptr_type_node);
7946 OMP_CLAUSE_CHAIN (l) = c2;
7947 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7949 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7950 tree c3
7951 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7952 OMP_CLAUSE_MAP);
7953 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7954 OMP_CLAUSE_DECL (c3)
7955 = unshare_expr (OMP_CLAUSE_DECL (c4));
7956 OMP_CLAUSE_SIZE (c3)
7957 = TYPE_SIZE_UNIT (ptr_type_node);
7958 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7959 OMP_CLAUSE_CHAIN (c2) = c3;
7961 *prev_list_p = l;
7962 prev_list_p = NULL;
7964 else
7966 OMP_CLAUSE_CHAIN (l) = c;
7967 *list_p = l;
7968 list_p = &OMP_CLAUSE_CHAIN (l);
7970 if (orig_base != base && code == OMP_TARGET)
7972 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7973 OMP_CLAUSE_MAP);
7974 enum gomp_map_kind mkind
7975 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7976 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7977 OMP_CLAUSE_DECL (c2) = decl;
7978 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7979 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7980 OMP_CLAUSE_CHAIN (l) = c2;
7982 flags = GOVD_MAP | GOVD_EXPLICIT;
7983 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7984 flags |= GOVD_SEEN;
7985 goto do_add_decl;
7987 else
7989 tree *osc = struct_map_to_clause->get (decl);
7990 tree *sc = NULL, *scp = NULL;
7991 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7992 n->value |= GOVD_SEEN;
7993 offset_int o1, o2;
7994 if (offset)
7995 o1 = wi::to_offset (offset);
7996 else
7997 o1 = 0;
7998 if (bitpos)
7999 o1 = o1 + bitpos / BITS_PER_UNIT;
8000 sc = &OMP_CLAUSE_CHAIN (*osc);
8001 if (*sc != c
8002 && (OMP_CLAUSE_MAP_KIND (*sc)
8003 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8004 sc = &OMP_CLAUSE_CHAIN (*sc);
8005 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8006 if (ptr && sc == prev_list_p)
8007 break;
8008 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8009 != COMPONENT_REF
8010 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8011 != INDIRECT_REF)
8012 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8013 != ARRAY_REF))
8014 break;
8015 else
8017 tree offset2;
8018 HOST_WIDE_INT bitsize2, bitpos2;
8019 base = OMP_CLAUSE_DECL (*sc);
8020 if (TREE_CODE (base) == ARRAY_REF)
8022 while (TREE_CODE (base) == ARRAY_REF)
8023 base = TREE_OPERAND (base, 0);
8024 if (TREE_CODE (base) != COMPONENT_REF
8025 || (TREE_CODE (TREE_TYPE (base))
8026 != ARRAY_TYPE))
8027 break;
8029 else if (TREE_CODE (base) == INDIRECT_REF
8030 && (TREE_CODE (TREE_OPERAND (base, 0))
8031 == COMPONENT_REF)
8032 && (TREE_CODE (TREE_TYPE
8033 (TREE_OPERAND (base, 0)))
8034 == REFERENCE_TYPE))
8035 base = TREE_OPERAND (base, 0);
8036 base = get_inner_reference (base, &bitsize2,
8037 &bitpos2, &offset2,
8038 &mode, &unsignedp,
8039 &reversep, &volatilep);
8040 if ((TREE_CODE (base) == INDIRECT_REF
8041 || (TREE_CODE (base) == MEM_REF
8042 && integer_zerop (TREE_OPERAND (base,
8043 1))))
8044 && DECL_P (TREE_OPERAND (base, 0))
8045 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8046 0)))
8047 == REFERENCE_TYPE))
8048 base = TREE_OPERAND (base, 0);
8049 if (base != decl)
8050 break;
8051 if (scp)
8052 continue;
8053 gcc_assert (offset == NULL_TREE
8054 || TREE_CODE (offset) == INTEGER_CST);
8055 tree d1 = OMP_CLAUSE_DECL (*sc);
8056 tree d2 = OMP_CLAUSE_DECL (c);
8057 while (TREE_CODE (d1) == ARRAY_REF)
8058 d1 = TREE_OPERAND (d1, 0);
8059 while (TREE_CODE (d2) == ARRAY_REF)
8060 d2 = TREE_OPERAND (d2, 0);
8061 if (TREE_CODE (d1) == INDIRECT_REF)
8062 d1 = TREE_OPERAND (d1, 0);
8063 if (TREE_CODE (d2) == INDIRECT_REF)
8064 d2 = TREE_OPERAND (d2, 0);
8065 while (TREE_CODE (d1) == COMPONENT_REF)
8066 if (TREE_CODE (d2) == COMPONENT_REF
8067 && TREE_OPERAND (d1, 1)
8068 == TREE_OPERAND (d2, 1))
8070 d1 = TREE_OPERAND (d1, 0);
8071 d2 = TREE_OPERAND (d2, 0);
8073 else
8074 break;
8075 if (d1 == d2)
8077 error_at (OMP_CLAUSE_LOCATION (c),
8078 "%qE appears more than once in map "
8079 "clauses", OMP_CLAUSE_DECL (c));
8080 remove = true;
8081 break;
8083 if (offset2)
8084 o2 = wi::to_offset (offset2);
8085 else
8086 o2 = 0;
8087 if (bitpos2)
8088 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8089 if (wi::ltu_p (o1, o2)
8090 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8092 if (ptr)
8093 scp = sc;
8094 else
8095 break;
8098 if (remove)
8099 break;
8100 OMP_CLAUSE_SIZE (*osc)
8101 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8102 size_one_node);
8103 if (ptr)
8105 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8106 OMP_CLAUSE_MAP);
8107 tree cl = NULL_TREE;
8108 enum gomp_map_kind mkind
8109 = code == OMP_TARGET_EXIT_DATA
8110 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8111 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8112 OMP_CLAUSE_DECL (c2)
8113 = unshare_expr (OMP_CLAUSE_DECL (c));
8114 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8115 OMP_CLAUSE_SIZE (c2)
8116 = TYPE_SIZE_UNIT (ptr_type_node);
8117 cl = scp ? *prev_list_p : c2;
8118 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8120 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8121 tree c3
8122 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8123 OMP_CLAUSE_MAP);
8124 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8125 OMP_CLAUSE_DECL (c3)
8126 = unshare_expr (OMP_CLAUSE_DECL (c4));
8127 OMP_CLAUSE_SIZE (c3)
8128 = TYPE_SIZE_UNIT (ptr_type_node);
8129 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8130 if (!scp)
8131 OMP_CLAUSE_CHAIN (c2) = c3;
8132 else
8133 cl = c3;
8135 if (scp)
8136 *scp = c2;
8137 if (sc == prev_list_p)
8139 *sc = cl;
8140 prev_list_p = NULL;
8142 else
8144 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8145 list_p = prev_list_p;
8146 prev_list_p = NULL;
8147 OMP_CLAUSE_CHAIN (c) = *sc;
8148 *sc = cl;
8149 continue;
8152 else if (*sc != c)
8154 *list_p = OMP_CLAUSE_CHAIN (c);
8155 OMP_CLAUSE_CHAIN (c) = *sc;
8156 *sc = c;
8157 continue;
8161 if (!remove
8162 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8163 && OMP_CLAUSE_CHAIN (c)
8164 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8165 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8166 == GOMP_MAP_ALWAYS_POINTER))
8167 prev_list_p = list_p;
8168 break;
8170 flags = GOVD_MAP | GOVD_EXPLICIT;
8171 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8172 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8173 flags |= GOVD_MAP_ALWAYS_TO;
8174 goto do_add;
8176 case OMP_CLAUSE_DEPEND:
8177 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8179 tree deps = OMP_CLAUSE_DECL (c);
8180 while (deps && TREE_CODE (deps) == TREE_LIST)
8182 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8183 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8184 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8185 pre_p, NULL, is_gimple_val, fb_rvalue);
8186 deps = TREE_CHAIN (deps);
8188 break;
8190 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8191 break;
8192 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8194 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8195 NULL, is_gimple_val, fb_rvalue);
8196 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8198 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8200 remove = true;
8201 break;
8203 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8204 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8205 is_gimple_val, fb_rvalue) == GS_ERROR)
8207 remove = true;
8208 break;
8210 break;
8212 case OMP_CLAUSE_TO:
8213 case OMP_CLAUSE_FROM:
8214 case OMP_CLAUSE__CACHE_:
8215 decl = OMP_CLAUSE_DECL (c);
8216 if (error_operand_p (decl))
8218 remove = true;
8219 break;
8221 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8222 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8223 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8224 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8225 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8227 remove = true;
8228 break;
8230 if (!DECL_P (decl))
8232 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8233 NULL, is_gimple_lvalue, fb_lvalue)
8234 == GS_ERROR)
8236 remove = true;
8237 break;
8239 break;
8241 goto do_notice;
8243 case OMP_CLAUSE_USE_DEVICE_PTR:
8244 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8245 goto do_add;
8246 case OMP_CLAUSE_IS_DEVICE_PTR:
8247 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8248 goto do_add;
8250 do_add:
8251 decl = OMP_CLAUSE_DECL (c);
8252 do_add_decl:
8253 if (error_operand_p (decl))
8255 remove = true;
8256 break;
8258 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8260 tree t = omp_member_access_dummy_var (decl);
8261 if (t)
8263 tree v = DECL_VALUE_EXPR (decl);
8264 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8265 if (outer_ctx)
8266 omp_notice_variable (outer_ctx, t, true);
8269 if (code == OACC_DATA
8270 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8271 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8272 flags |= GOVD_MAP_0LEN_ARRAY;
8273 omp_add_variable (ctx, decl, flags);
8274 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8275 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8277 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8278 GOVD_LOCAL | GOVD_SEEN);
8279 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8280 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8281 find_decl_expr,
8282 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8283 NULL) == NULL_TREE)
8284 omp_add_variable (ctx,
8285 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8286 GOVD_LOCAL | GOVD_SEEN);
8287 gimplify_omp_ctxp = ctx;
8288 push_gimplify_context ();
8290 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8291 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8293 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8294 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8295 pop_gimplify_context
8296 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8297 push_gimplify_context ();
8298 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8299 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8300 pop_gimplify_context
8301 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8302 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8303 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8305 gimplify_omp_ctxp = outer_ctx;
8307 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8308 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8310 gimplify_omp_ctxp = ctx;
8311 push_gimplify_context ();
8312 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8314 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8315 NULL, NULL);
8316 TREE_SIDE_EFFECTS (bind) = 1;
8317 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8318 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8320 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8321 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8322 pop_gimplify_context
8323 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8324 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8326 gimplify_omp_ctxp = outer_ctx;
8328 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8329 && OMP_CLAUSE_LINEAR_STMT (c))
8331 gimplify_omp_ctxp = ctx;
8332 push_gimplify_context ();
8333 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8335 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8336 NULL, NULL);
8337 TREE_SIDE_EFFECTS (bind) = 1;
8338 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8339 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8341 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8342 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8343 pop_gimplify_context
8344 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8345 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8347 gimplify_omp_ctxp = outer_ctx;
8349 if (notice_outer)
8350 goto do_notice;
8351 break;
8353 case OMP_CLAUSE_COPYIN:
8354 case OMP_CLAUSE_COPYPRIVATE:
8355 decl = OMP_CLAUSE_DECL (c);
8356 if (error_operand_p (decl))
8358 remove = true;
8359 break;
8361 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8362 && !remove
8363 && !omp_check_private (ctx, decl, true))
8365 remove = true;
8366 if (is_global_var (decl))
8368 if (DECL_THREAD_LOCAL_P (decl))
8369 remove = false;
8370 else if (DECL_HAS_VALUE_EXPR_P (decl))
8372 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8374 if (value
8375 && DECL_P (value)
8376 && DECL_THREAD_LOCAL_P (value))
8377 remove = false;
8380 if (remove)
8381 error_at (OMP_CLAUSE_LOCATION (c),
8382 "copyprivate variable %qE is not threadprivate"
8383 " or private in outer context", DECL_NAME (decl));
8385 do_notice:
8386 if (outer_ctx)
8387 omp_notice_variable (outer_ctx, decl, true);
8388 if (check_non_private
8389 && region_type == ORT_WORKSHARE
8390 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8391 || decl == OMP_CLAUSE_DECL (c)
8392 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8393 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8394 == ADDR_EXPR
8395 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8396 == POINTER_PLUS_EXPR
8397 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8398 (OMP_CLAUSE_DECL (c), 0), 0))
8399 == ADDR_EXPR)))))
8400 && omp_check_private (ctx, decl, false))
8402 error ("%s variable %qE is private in outer context",
8403 check_non_private, DECL_NAME (decl));
8404 remove = true;
8406 break;
8408 case OMP_CLAUSE_IF:
8409 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8410 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8412 const char *p[2];
8413 for (int i = 0; i < 2; i++)
8414 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8416 case OMP_PARALLEL: p[i] = "parallel"; break;
8417 case OMP_TASK: p[i] = "task"; break;
8418 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8419 case OMP_TARGET_DATA: p[i] = "target data"; break;
8420 case OMP_TARGET: p[i] = "target"; break;
8421 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8422 case OMP_TARGET_ENTER_DATA:
8423 p[i] = "target enter data"; break;
8424 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8425 default: gcc_unreachable ();
8427 error_at (OMP_CLAUSE_LOCATION (c),
8428 "expected %qs %<if%> clause modifier rather than %qs",
8429 p[0], p[1]);
8430 remove = true;
8432 /* Fall through. */
8434 case OMP_CLAUSE_FINAL:
8435 OMP_CLAUSE_OPERAND (c, 0)
8436 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8437 /* Fall through. */
8439 case OMP_CLAUSE_SCHEDULE:
8440 case OMP_CLAUSE_NUM_THREADS:
8441 case OMP_CLAUSE_NUM_TEAMS:
8442 case OMP_CLAUSE_THREAD_LIMIT:
8443 case OMP_CLAUSE_DIST_SCHEDULE:
8444 case OMP_CLAUSE_DEVICE:
8445 case OMP_CLAUSE_PRIORITY:
8446 case OMP_CLAUSE_GRAINSIZE:
8447 case OMP_CLAUSE_NUM_TASKS:
8448 case OMP_CLAUSE_HINT:
8449 case OMP_CLAUSE_ASYNC:
8450 case OMP_CLAUSE_WAIT:
8451 case OMP_CLAUSE_NUM_GANGS:
8452 case OMP_CLAUSE_NUM_WORKERS:
8453 case OMP_CLAUSE_VECTOR_LENGTH:
8454 case OMP_CLAUSE_WORKER:
8455 case OMP_CLAUSE_VECTOR:
8456 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8457 is_gimple_val, fb_rvalue) == GS_ERROR)
8458 remove = true;
8459 break;
8461 case OMP_CLAUSE_GANG:
8462 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8463 is_gimple_val, fb_rvalue) == GS_ERROR)
8464 remove = true;
8465 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8466 is_gimple_val, fb_rvalue) == GS_ERROR)
8467 remove = true;
8468 break;
8470 case OMP_CLAUSE_NOWAIT:
8471 case OMP_CLAUSE_ORDERED:
8472 case OMP_CLAUSE_UNTIED:
8473 case OMP_CLAUSE_COLLAPSE:
8474 case OMP_CLAUSE_TILE:
8475 case OMP_CLAUSE_AUTO:
8476 case OMP_CLAUSE_SEQ:
8477 case OMP_CLAUSE_INDEPENDENT:
8478 case OMP_CLAUSE_MERGEABLE:
8479 case OMP_CLAUSE_PROC_BIND:
8480 case OMP_CLAUSE_SAFELEN:
8481 case OMP_CLAUSE_SIMDLEN:
8482 case OMP_CLAUSE_NOGROUP:
8483 case OMP_CLAUSE_THREADS:
8484 case OMP_CLAUSE_SIMD:
8485 break;
8487 case OMP_CLAUSE_DEFAULTMAP:
8488 ctx->target_map_scalars_firstprivate = false;
8489 break;
8491 case OMP_CLAUSE_ALIGNED:
8492 decl = OMP_CLAUSE_DECL (c);
8493 if (error_operand_p (decl))
8495 remove = true;
8496 break;
8498 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8499 is_gimple_val, fb_rvalue) == GS_ERROR)
8501 remove = true;
8502 break;
8504 if (!is_global_var (decl)
8505 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8506 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8507 break;
8509 case OMP_CLAUSE_DEFAULT:
8510 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8511 break;
8513 default:
8514 gcc_unreachable ();
8517 if (code == OACC_DATA
8518 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8519 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8520 remove = true;
8521 if (remove)
8522 *list_p = OMP_CLAUSE_CHAIN (c);
8523 else
8524 list_p = &OMP_CLAUSE_CHAIN (c);
8527 gimplify_omp_ctxp = ctx;
8528 if (struct_map_to_clause)
8529 delete struct_map_to_clause;
8532 /* Return true if DECL is a candidate for shared to firstprivate
8533 optimization. We only consider non-addressable scalars, not
8534 too big, and not references. */
8536 static bool
8537 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8539 if (TREE_ADDRESSABLE (decl))
8540 return false;
8541 tree type = TREE_TYPE (decl);
8542 if (!is_gimple_reg_type (type)
8543 || TREE_CODE (type) == REFERENCE_TYPE
8544 || TREE_ADDRESSABLE (type))
8545 return false;
8546 /* Don't optimize too large decls, as each thread/task will have
8547 its own. */
8548 HOST_WIDE_INT len = int_size_in_bytes (type);
8549 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8550 return false;
8551 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8552 return false;
8553 return true;
8556 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8557 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8558 GOVD_WRITTEN in outer contexts. */
8560 static void
8561 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8563 for (; ctx; ctx = ctx->outer_context)
8565 splay_tree_node n = splay_tree_lookup (ctx->variables,
8566 (splay_tree_key) decl);
8567 if (n == NULL)
8568 continue;
8569 else if (n->value & GOVD_SHARED)
8571 n->value |= GOVD_WRITTEN;
8572 return;
8574 else if (n->value & GOVD_DATA_SHARE_CLASS)
8575 return;
8579 /* Helper callback for walk_gimple_seq to discover possible stores
8580 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8581 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8582 for those. */
8584 static tree
8585 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8587 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8589 *walk_subtrees = 0;
8590 if (!wi->is_lhs)
8591 return NULL_TREE;
8593 tree op = *tp;
8596 if (handled_component_p (op))
8597 op = TREE_OPERAND (op, 0);
8598 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8599 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8600 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8601 else
8602 break;
8604 while (1);
8605 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8606 return NULL_TREE;
8608 omp_mark_stores (gimplify_omp_ctxp, op);
8609 return NULL_TREE;
8612 /* Helper callback for walk_gimple_seq to discover possible stores
8613 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8614 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8615 for those. */
8617 static tree
8618 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8619 bool *handled_ops_p,
8620 struct walk_stmt_info *wi)
8622 gimple *stmt = gsi_stmt (*gsi_p);
8623 switch (gimple_code (stmt))
8625 /* Don't recurse on OpenMP constructs for which
8626 gimplify_adjust_omp_clauses already handled the bodies,
8627 except handle gimple_omp_for_pre_body. */
8628 case GIMPLE_OMP_FOR:
8629 *handled_ops_p = true;
8630 if (gimple_omp_for_pre_body (stmt))
8631 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8632 omp_find_stores_stmt, omp_find_stores_op, wi);
8633 break;
8634 case GIMPLE_OMP_PARALLEL:
8635 case GIMPLE_OMP_TASK:
8636 case GIMPLE_OMP_SECTIONS:
8637 case GIMPLE_OMP_SINGLE:
8638 case GIMPLE_OMP_TARGET:
8639 case GIMPLE_OMP_TEAMS:
8640 case GIMPLE_OMP_CRITICAL:
8641 *handled_ops_p = true;
8642 break;
8643 default:
8644 break;
8646 return NULL_TREE;
8649 struct gimplify_adjust_omp_clauses_data
8651 tree *list_p;
8652 gimple_seq *pre_p;
8655 /* For all variables that were not actually used within the context,
8656 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8658 static int
8659 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8661 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8662 gimple_seq *pre_p
8663 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8664 tree decl = (tree) n->key;
8665 unsigned flags = n->value;
8666 enum omp_clause_code code;
8667 tree clause;
8668 bool private_debug;
8670 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8671 return 0;
8672 if ((flags & GOVD_SEEN) == 0)
8673 return 0;
8674 if (flags & GOVD_DEBUG_PRIVATE)
8676 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8677 private_debug = true;
8679 else if (flags & GOVD_MAP)
8680 private_debug = false;
8681 else
8682 private_debug
8683 = lang_hooks.decls.omp_private_debug_clause (decl,
8684 !!(flags & GOVD_SHARED));
8685 if (private_debug)
8686 code = OMP_CLAUSE_PRIVATE;
8687 else if (flags & GOVD_MAP)
8689 code = OMP_CLAUSE_MAP;
8690 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8691 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8693 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8694 return 0;
8697 else if (flags & GOVD_SHARED)
8699 if (is_global_var (decl))
8701 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8702 while (ctx != NULL)
8704 splay_tree_node on
8705 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8706 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8707 | GOVD_PRIVATE | GOVD_REDUCTION
8708 | GOVD_LINEAR | GOVD_MAP)) != 0)
8709 break;
8710 ctx = ctx->outer_context;
8712 if (ctx == NULL)
8713 return 0;
8715 code = OMP_CLAUSE_SHARED;
8717 else if (flags & GOVD_PRIVATE)
8718 code = OMP_CLAUSE_PRIVATE;
8719 else if (flags & GOVD_FIRSTPRIVATE)
8721 code = OMP_CLAUSE_FIRSTPRIVATE;
8722 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8723 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8724 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8726 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8727 "%<target%> construct", decl);
8728 return 0;
8731 else if (flags & GOVD_LASTPRIVATE)
8732 code = OMP_CLAUSE_LASTPRIVATE;
8733 else if (flags & GOVD_ALIGNED)
8734 return 0;
8735 else
8736 gcc_unreachable ();
8738 if (((flags & GOVD_LASTPRIVATE)
8739 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8740 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8741 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8743 tree chain = *list_p;
8744 clause = build_omp_clause (input_location, code);
8745 OMP_CLAUSE_DECL (clause) = decl;
8746 OMP_CLAUSE_CHAIN (clause) = chain;
8747 if (private_debug)
8748 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8749 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8750 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8751 else if (code == OMP_CLAUSE_SHARED
8752 && (flags & GOVD_WRITTEN) == 0
8753 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8754 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8755 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8756 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8757 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8759 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8760 OMP_CLAUSE_DECL (nc) = decl;
8761 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8762 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8763 OMP_CLAUSE_DECL (clause)
8764 = build_simple_mem_ref_loc (input_location, decl);
8765 OMP_CLAUSE_DECL (clause)
8766 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8767 build_int_cst (build_pointer_type (char_type_node), 0));
8768 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8769 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8770 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8771 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8772 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8773 OMP_CLAUSE_CHAIN (nc) = chain;
8774 OMP_CLAUSE_CHAIN (clause) = nc;
8775 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8776 gimplify_omp_ctxp = ctx->outer_context;
8777 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8778 pre_p, NULL, is_gimple_val, fb_rvalue);
8779 gimplify_omp_ctxp = ctx;
8781 else if (code == OMP_CLAUSE_MAP)
8783 int kind;
8784 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8785 switch (flags & (GOVD_MAP_TO_ONLY
8786 | GOVD_MAP_FORCE
8787 | GOVD_MAP_FORCE_PRESENT))
8789 case 0:
8790 kind = GOMP_MAP_TOFROM;
8791 break;
8792 case GOVD_MAP_FORCE:
8793 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8794 break;
8795 case GOVD_MAP_TO_ONLY:
8796 kind = GOMP_MAP_TO;
8797 break;
8798 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8799 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8800 break;
8801 case GOVD_MAP_FORCE_PRESENT:
8802 kind = GOMP_MAP_FORCE_PRESENT;
8803 break;
8804 default:
8805 gcc_unreachable ();
8807 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8808 if (DECL_SIZE (decl)
8809 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8811 tree decl2 = DECL_VALUE_EXPR (decl);
8812 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8813 decl2 = TREE_OPERAND (decl2, 0);
8814 gcc_assert (DECL_P (decl2));
8815 tree mem = build_simple_mem_ref (decl2);
8816 OMP_CLAUSE_DECL (clause) = mem;
8817 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8818 if (gimplify_omp_ctxp->outer_context)
8820 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8821 omp_notice_variable (ctx, decl2, true);
8822 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8824 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8825 OMP_CLAUSE_MAP);
8826 OMP_CLAUSE_DECL (nc) = decl;
8827 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8828 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8829 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8830 else
8831 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8832 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8833 OMP_CLAUSE_CHAIN (clause) = nc;
8835 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8836 && lang_hooks.decls.omp_privatize_by_reference (decl))
8838 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8839 OMP_CLAUSE_SIZE (clause)
8840 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8841 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8842 gimplify_omp_ctxp = ctx->outer_context;
8843 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8844 pre_p, NULL, is_gimple_val, fb_rvalue);
8845 gimplify_omp_ctxp = ctx;
8846 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8847 OMP_CLAUSE_MAP);
8848 OMP_CLAUSE_DECL (nc) = decl;
8849 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8850 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8851 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8852 OMP_CLAUSE_CHAIN (clause) = nc;
8854 else
8855 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8857 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8859 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8860 OMP_CLAUSE_DECL (nc) = decl;
8861 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8862 OMP_CLAUSE_CHAIN (nc) = chain;
8863 OMP_CLAUSE_CHAIN (clause) = nc;
8864 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8865 gimplify_omp_ctxp = ctx->outer_context;
8866 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8867 gimplify_omp_ctxp = ctx;
8869 *list_p = clause;
8870 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8871 gimplify_omp_ctxp = ctx->outer_context;
8872 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8873 if (gimplify_omp_ctxp)
8874 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8875 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8876 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8877 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8878 true);
8879 gimplify_omp_ctxp = ctx;
8880 return 0;
8883 static void
8884 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8885 enum tree_code code)
8887 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8888 tree c, decl;
8890 if (body)
8892 struct gimplify_omp_ctx *octx;
8893 for (octx = ctx; octx; octx = octx->outer_context)
8894 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8895 break;
8896 if (octx)
8898 struct walk_stmt_info wi;
8899 memset (&wi, 0, sizeof (wi));
8900 walk_gimple_seq (body, omp_find_stores_stmt,
8901 omp_find_stores_op, &wi);
8904 while ((c = *list_p) != NULL)
8906 splay_tree_node n;
8907 bool remove = false;
8909 switch (OMP_CLAUSE_CODE (c))
8911 case OMP_CLAUSE_FIRSTPRIVATE:
8912 if ((ctx->region_type & ORT_TARGET)
8913 && (ctx->region_type & ORT_ACC) == 0
8914 && TYPE_ATOMIC (strip_array_types
8915 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8917 error_at (OMP_CLAUSE_LOCATION (c),
8918 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8919 "%<target%> construct", OMP_CLAUSE_DECL (c));
8920 remove = true;
8921 break;
8923 /* FALLTHRU */
8924 case OMP_CLAUSE_PRIVATE:
8925 case OMP_CLAUSE_SHARED:
8926 case OMP_CLAUSE_LINEAR:
8927 decl = OMP_CLAUSE_DECL (c);
8928 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8929 remove = !(n->value & GOVD_SEEN);
8930 if (! remove)
8932 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8933 if ((n->value & GOVD_DEBUG_PRIVATE)
8934 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8936 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8937 || ((n->value & GOVD_DATA_SHARE_CLASS)
8938 == GOVD_SHARED));
8939 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8940 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8942 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8943 && (n->value & GOVD_WRITTEN) == 0
8944 && DECL_P (decl)
8945 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8946 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8947 else if (DECL_P (decl)
8948 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8949 && (n->value & GOVD_WRITTEN) != 0)
8950 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8951 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8952 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8953 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8955 break;
8957 case OMP_CLAUSE_LASTPRIVATE:
8958 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8959 accurately reflect the presence of a FIRSTPRIVATE clause. */
8960 decl = OMP_CLAUSE_DECL (c);
8961 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8962 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8963 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8964 if (code == OMP_DISTRIBUTE
8965 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8967 remove = true;
8968 error_at (OMP_CLAUSE_LOCATION (c),
8969 "same variable used in %<firstprivate%> and "
8970 "%<lastprivate%> clauses on %<distribute%> "
8971 "construct");
8973 if (!remove
8974 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8975 && DECL_P (decl)
8976 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8977 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8978 break;
8980 case OMP_CLAUSE_ALIGNED:
8981 decl = OMP_CLAUSE_DECL (c);
8982 if (!is_global_var (decl))
8984 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8985 remove = n == NULL || !(n->value & GOVD_SEEN);
8986 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8988 struct gimplify_omp_ctx *octx;
8989 if (n != NULL
8990 && (n->value & (GOVD_DATA_SHARE_CLASS
8991 & ~GOVD_FIRSTPRIVATE)))
8992 remove = true;
8993 else
8994 for (octx = ctx->outer_context; octx;
8995 octx = octx->outer_context)
8997 n = splay_tree_lookup (octx->variables,
8998 (splay_tree_key) decl);
8999 if (n == NULL)
9000 continue;
9001 if (n->value & GOVD_LOCAL)
9002 break;
9003 /* We have to avoid assigning a shared variable
9004 to itself when trying to add
9005 __builtin_assume_aligned. */
9006 if (n->value & GOVD_SHARED)
9008 remove = true;
9009 break;
9014 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9016 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9017 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9018 remove = true;
9020 break;
9022 case OMP_CLAUSE_MAP:
9023 if (code == OMP_TARGET_EXIT_DATA
9024 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9026 remove = true;
9027 break;
9029 decl = OMP_CLAUSE_DECL (c);
9030 /* Data clauses associated with acc parallel reductions must be
9031 compatible with present_or_copy. Warn and adjust the clause
9032 if that is not the case. */
9033 if (ctx->region_type == ORT_ACC_PARALLEL)
9035 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9036 n = NULL;
9038 if (DECL_P (t))
9039 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9041 if (n && (n->value & GOVD_REDUCTION))
9043 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9045 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9046 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9047 && kind != GOMP_MAP_FORCE_PRESENT
9048 && kind != GOMP_MAP_POINTER)
9050 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9051 "incompatible data clause with reduction "
9052 "on %qE; promoting to present_or_copy",
9053 DECL_NAME (t));
9054 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9058 if (!DECL_P (decl))
9060 if ((ctx->region_type & ORT_TARGET) != 0
9061 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9063 if (TREE_CODE (decl) == INDIRECT_REF
9064 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9065 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9066 == REFERENCE_TYPE))
9067 decl = TREE_OPERAND (decl, 0);
9068 if (TREE_CODE (decl) == COMPONENT_REF)
9070 while (TREE_CODE (decl) == COMPONENT_REF)
9071 decl = TREE_OPERAND (decl, 0);
9072 if (DECL_P (decl))
9074 n = splay_tree_lookup (ctx->variables,
9075 (splay_tree_key) decl);
9076 if (!(n->value & GOVD_SEEN))
9077 remove = true;
9081 break;
9083 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9084 if ((ctx->region_type & ORT_TARGET) != 0
9085 && !(n->value & GOVD_SEEN)
9086 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9087 && (!is_global_var (decl)
9088 || !lookup_attribute ("omp declare target link",
9089 DECL_ATTRIBUTES (decl))))
9091 remove = true;
9092 /* For struct element mapping, if struct is never referenced
9093 in target block and none of the mapping has always modifier,
9094 remove all the struct element mappings, which immediately
9095 follow the GOMP_MAP_STRUCT map clause. */
9096 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9098 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9099 while (cnt--)
9100 OMP_CLAUSE_CHAIN (c)
9101 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9104 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9105 && code == OMP_TARGET_EXIT_DATA)
9106 remove = true;
9107 else if (DECL_SIZE (decl)
9108 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9109 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9110 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9111 && (OMP_CLAUSE_MAP_KIND (c)
9112 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9114 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9115 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9116 INTEGER_CST. */
9117 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9119 tree decl2 = DECL_VALUE_EXPR (decl);
9120 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9121 decl2 = TREE_OPERAND (decl2, 0);
9122 gcc_assert (DECL_P (decl2));
9123 tree mem = build_simple_mem_ref (decl2);
9124 OMP_CLAUSE_DECL (c) = mem;
9125 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9126 if (ctx->outer_context)
9128 omp_notice_variable (ctx->outer_context, decl2, true);
9129 omp_notice_variable (ctx->outer_context,
9130 OMP_CLAUSE_SIZE (c), true);
9132 if (((ctx->region_type & ORT_TARGET) != 0
9133 || !ctx->target_firstprivatize_array_bases)
9134 && ((n->value & GOVD_SEEN) == 0
9135 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9137 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9138 OMP_CLAUSE_MAP);
9139 OMP_CLAUSE_DECL (nc) = decl;
9140 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9141 if (ctx->target_firstprivatize_array_bases)
9142 OMP_CLAUSE_SET_MAP_KIND (nc,
9143 GOMP_MAP_FIRSTPRIVATE_POINTER);
9144 else
9145 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9146 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9147 OMP_CLAUSE_CHAIN (c) = nc;
9148 c = nc;
9151 else
9153 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9154 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9155 gcc_assert ((n->value & GOVD_SEEN) == 0
9156 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9157 == 0));
9159 break;
9161 case OMP_CLAUSE_TO:
9162 case OMP_CLAUSE_FROM:
9163 case OMP_CLAUSE__CACHE_:
9164 decl = OMP_CLAUSE_DECL (c);
9165 if (!DECL_P (decl))
9166 break;
9167 if (DECL_SIZE (decl)
9168 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9170 tree decl2 = DECL_VALUE_EXPR (decl);
9171 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9172 decl2 = TREE_OPERAND (decl2, 0);
9173 gcc_assert (DECL_P (decl2));
9174 tree mem = build_simple_mem_ref (decl2);
9175 OMP_CLAUSE_DECL (c) = mem;
9176 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9177 if (ctx->outer_context)
9179 omp_notice_variable (ctx->outer_context, decl2, true);
9180 omp_notice_variable (ctx->outer_context,
9181 OMP_CLAUSE_SIZE (c), true);
9184 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9185 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9186 break;
9188 case OMP_CLAUSE_REDUCTION:
9189 decl = OMP_CLAUSE_DECL (c);
9190 /* OpenACC reductions need a present_or_copy data clause.
9191 Add one if necessary. Error is the reduction is private. */
9192 if (ctx->region_type == ORT_ACC_PARALLEL)
9194 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9195 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9196 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9197 "reduction on %qE", DECL_NAME (decl));
9198 else if ((n->value & GOVD_MAP) == 0)
9200 tree next = OMP_CLAUSE_CHAIN (c);
9201 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9202 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9203 OMP_CLAUSE_DECL (nc) = decl;
9204 OMP_CLAUSE_CHAIN (c) = nc;
9205 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9206 while (1)
9208 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9209 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9210 break;
9211 nc = OMP_CLAUSE_CHAIN (nc);
9213 OMP_CLAUSE_CHAIN (nc) = next;
9214 n->value |= GOVD_MAP;
9217 if (DECL_P (decl)
9218 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9219 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9220 break;
9221 case OMP_CLAUSE_COPYIN:
9222 case OMP_CLAUSE_COPYPRIVATE:
9223 case OMP_CLAUSE_IF:
9224 case OMP_CLAUSE_NUM_THREADS:
9225 case OMP_CLAUSE_NUM_TEAMS:
9226 case OMP_CLAUSE_THREAD_LIMIT:
9227 case OMP_CLAUSE_DIST_SCHEDULE:
9228 case OMP_CLAUSE_DEVICE:
9229 case OMP_CLAUSE_SCHEDULE:
9230 case OMP_CLAUSE_NOWAIT:
9231 case OMP_CLAUSE_ORDERED:
9232 case OMP_CLAUSE_DEFAULT:
9233 case OMP_CLAUSE_UNTIED:
9234 case OMP_CLAUSE_COLLAPSE:
9235 case OMP_CLAUSE_FINAL:
9236 case OMP_CLAUSE_MERGEABLE:
9237 case OMP_CLAUSE_PROC_BIND:
9238 case OMP_CLAUSE_SAFELEN:
9239 case OMP_CLAUSE_SIMDLEN:
9240 case OMP_CLAUSE_DEPEND:
9241 case OMP_CLAUSE_PRIORITY:
9242 case OMP_CLAUSE_GRAINSIZE:
9243 case OMP_CLAUSE_NUM_TASKS:
9244 case OMP_CLAUSE_NOGROUP:
9245 case OMP_CLAUSE_THREADS:
9246 case OMP_CLAUSE_SIMD:
9247 case OMP_CLAUSE_HINT:
9248 case OMP_CLAUSE_DEFAULTMAP:
9249 case OMP_CLAUSE_USE_DEVICE_PTR:
9250 case OMP_CLAUSE_IS_DEVICE_PTR:
9251 case OMP_CLAUSE_ASYNC:
9252 case OMP_CLAUSE_WAIT:
9253 case OMP_CLAUSE_INDEPENDENT:
9254 case OMP_CLAUSE_NUM_GANGS:
9255 case OMP_CLAUSE_NUM_WORKERS:
9256 case OMP_CLAUSE_VECTOR_LENGTH:
9257 case OMP_CLAUSE_GANG:
9258 case OMP_CLAUSE_WORKER:
9259 case OMP_CLAUSE_VECTOR:
9260 case OMP_CLAUSE_AUTO:
9261 case OMP_CLAUSE_SEQ:
9262 case OMP_CLAUSE_TILE:
9263 break;
9265 default:
9266 gcc_unreachable ();
9269 if (remove)
9270 *list_p = OMP_CLAUSE_CHAIN (c);
9271 else
9272 list_p = &OMP_CLAUSE_CHAIN (c);
9275 /* Add in any implicit data sharing. */
9276 struct gimplify_adjust_omp_clauses_data data;
9277 data.list_p = list_p;
9278 data.pre_p = pre_p;
9279 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9281 gimplify_omp_ctxp = ctx->outer_context;
9282 delete_omp_context (ctx);
9285 /* Gimplify OACC_CACHE. */
9287 static void
9288 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9290 tree expr = *expr_p;
9292 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9293 OACC_CACHE);
9294 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9295 OACC_CACHE);
9297 /* TODO: Do something sensible with this information. */
9299 *expr_p = NULL_TREE;
9302 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9303 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9304 kind. The entry kind will replace the one in CLAUSE, while the exit
9305 kind will be used in a new omp_clause and returned to the caller. */
9307 static tree
9308 gimplify_oacc_declare_1 (tree clause)
9310 HOST_WIDE_INT kind, new_op;
9311 bool ret = false;
9312 tree c = NULL;
9314 kind = OMP_CLAUSE_MAP_KIND (clause);
9316 switch (kind)
9318 case GOMP_MAP_ALLOC:
9319 case GOMP_MAP_FORCE_ALLOC:
9320 case GOMP_MAP_FORCE_TO:
9321 new_op = GOMP_MAP_DELETE;
9322 ret = true;
9323 break;
9325 case GOMP_MAP_FORCE_FROM:
9326 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9327 new_op = GOMP_MAP_FORCE_FROM;
9328 ret = true;
9329 break;
9331 case GOMP_MAP_FORCE_TOFROM:
9332 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9333 new_op = GOMP_MAP_FORCE_FROM;
9334 ret = true;
9335 break;
9337 case GOMP_MAP_FROM:
9338 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9339 new_op = GOMP_MAP_FROM;
9340 ret = true;
9341 break;
9343 case GOMP_MAP_TOFROM:
9344 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9345 new_op = GOMP_MAP_FROM;
9346 ret = true;
9347 break;
9349 case GOMP_MAP_DEVICE_RESIDENT:
9350 case GOMP_MAP_FORCE_DEVICEPTR:
9351 case GOMP_MAP_FORCE_PRESENT:
9352 case GOMP_MAP_LINK:
9353 case GOMP_MAP_POINTER:
9354 case GOMP_MAP_TO:
9355 break;
9357 default:
9358 gcc_unreachable ();
9359 break;
9362 if (ret)
9364 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9365 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9366 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9369 return c;
9372 /* Gimplify OACC_DECLARE. */
9374 static void
9375 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9377 tree expr = *expr_p;
9378 gomp_target *stmt;
9379 tree clauses, t, decl;
9381 clauses = OACC_DECLARE_CLAUSES (expr);
9383 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9384 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9386 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9388 decl = OMP_CLAUSE_DECL (t);
9390 if (TREE_CODE (decl) == MEM_REF)
9391 decl = TREE_OPERAND (decl, 0);
9393 if (VAR_P (decl) && !is_oacc_declared (decl))
9395 tree attr = get_identifier ("oacc declare target");
9396 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9397 DECL_ATTRIBUTES (decl));
9400 if (VAR_P (decl)
9401 && !is_global_var (decl)
9402 && DECL_CONTEXT (decl) == current_function_decl)
9404 tree c = gimplify_oacc_declare_1 (t);
9405 if (c)
9407 if (oacc_declare_returns == NULL)
9408 oacc_declare_returns = new hash_map<tree, tree>;
9410 oacc_declare_returns->put (decl, c);
9414 if (gimplify_omp_ctxp)
9415 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9418 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9419 clauses);
9421 gimplify_seq_add_stmt (pre_p, stmt);
9423 *expr_p = NULL_TREE;
9426 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9427 gimplification of the body, as well as scanning the body for used
9428 variables. We need to do this scan now, because variable-sized
9429 decls will be decomposed during gimplification. */
9431 static void
9432 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9434 tree expr = *expr_p;
9435 gimple *g;
9436 gimple_seq body = NULL;
9438 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9439 OMP_PARALLEL_COMBINED (expr)
9440 ? ORT_COMBINED_PARALLEL
9441 : ORT_PARALLEL, OMP_PARALLEL);
9443 push_gimplify_context ();
9445 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9446 if (gimple_code (g) == GIMPLE_BIND)
9447 pop_gimplify_context (g);
9448 else
9449 pop_gimplify_context (NULL);
9451 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9452 OMP_PARALLEL);
9454 g = gimple_build_omp_parallel (body,
9455 OMP_PARALLEL_CLAUSES (expr),
9456 NULL_TREE, NULL_TREE);
9457 if (OMP_PARALLEL_COMBINED (expr))
9458 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9459 gimplify_seq_add_stmt (pre_p, g);
9460 *expr_p = NULL_TREE;
9463 /* Gimplify the contents of an OMP_TASK statement. This involves
9464 gimplification of the body, as well as scanning the body for used
9465 variables. We need to do this scan now, because variable-sized
9466 decls will be decomposed during gimplification. */
9468 static void
9469 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9471 tree expr = *expr_p;
9472 gimple *g;
9473 gimple_seq body = NULL;
9475 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9476 omp_find_clause (OMP_TASK_CLAUSES (expr),
9477 OMP_CLAUSE_UNTIED)
9478 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9480 push_gimplify_context ();
9482 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9483 if (gimple_code (g) == GIMPLE_BIND)
9484 pop_gimplify_context (g);
9485 else
9486 pop_gimplify_context (NULL);
9488 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9489 OMP_TASK);
9491 g = gimple_build_omp_task (body,
9492 OMP_TASK_CLAUSES (expr),
9493 NULL_TREE, NULL_TREE,
9494 NULL_TREE, NULL_TREE, NULL_TREE);
9495 gimplify_seq_add_stmt (pre_p, g);
9496 *expr_p = NULL_TREE;
9499 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9500 with non-NULL OMP_FOR_INIT. */
9502 static tree
9503 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9505 *walk_subtrees = 0;
9506 switch (TREE_CODE (*tp))
9508 case OMP_FOR:
9509 *walk_subtrees = 1;
9510 /* FALLTHRU */
9511 case OMP_SIMD:
9512 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9513 return *tp;
9514 break;
9515 case BIND_EXPR:
9516 case STATEMENT_LIST:
9517 case OMP_PARALLEL:
9518 *walk_subtrees = 1;
9519 break;
9520 default:
9521 break;
9523 return NULL_TREE;
9526 /* Gimplify the gross structure of an OMP_FOR statement. */
9528 static enum gimplify_status
9529 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9531 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9532 enum gimplify_status ret = GS_ALL_DONE;
9533 enum gimplify_status tret;
9534 gomp_for *gfor;
9535 gimple_seq for_body, for_pre_body;
9536 int i;
9537 bitmap has_decl_expr = NULL;
9538 enum omp_region_type ort = ORT_WORKSHARE;
9540 orig_for_stmt = for_stmt = *expr_p;
9542 switch (TREE_CODE (for_stmt))
9544 case OMP_FOR:
9545 case OMP_DISTRIBUTE:
9546 break;
9547 case OACC_LOOP:
9548 ort = ORT_ACC;
9549 break;
9550 case OMP_TASKLOOP:
9551 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9552 ort = ORT_UNTIED_TASK;
9553 else
9554 ort = ORT_TASK;
9555 break;
9556 case OMP_SIMD:
9557 ort = ORT_SIMD;
9558 break;
9559 default:
9560 gcc_unreachable ();
9563 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9564 clause for the IV. */
9565 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9567 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9568 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9569 decl = TREE_OPERAND (t, 0);
9570 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9571 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9572 && OMP_CLAUSE_DECL (c) == decl)
9574 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9575 break;
9579 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9581 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9582 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9583 find_combined_omp_for, NULL, NULL);
9584 if (inner_for_stmt == NULL_TREE)
9586 gcc_assert (seen_error ());
9587 *expr_p = NULL_TREE;
9588 return GS_ERROR;
9592 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9593 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9594 TREE_CODE (for_stmt));
9596 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9597 gimplify_omp_ctxp->distribute = true;
9599 /* Handle OMP_FOR_INIT. */
9600 for_pre_body = NULL;
9601 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9603 has_decl_expr = BITMAP_ALLOC (NULL);
9604 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9605 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9606 == VAR_DECL)
9608 t = OMP_FOR_PRE_BODY (for_stmt);
9609 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9611 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9613 tree_stmt_iterator si;
9614 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9615 tsi_next (&si))
9617 t = tsi_stmt (si);
9618 if (TREE_CODE (t) == DECL_EXPR
9619 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9620 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9624 if (OMP_FOR_PRE_BODY (for_stmt))
9626 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9627 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9628 else
9630 struct gimplify_omp_ctx ctx;
9631 memset (&ctx, 0, sizeof (ctx));
9632 ctx.region_type = ORT_NONE;
9633 gimplify_omp_ctxp = &ctx;
9634 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9635 gimplify_omp_ctxp = NULL;
9638 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9640 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9641 for_stmt = inner_for_stmt;
9643 /* For taskloop, need to gimplify the start, end and step before the
9644 taskloop, outside of the taskloop omp context. */
9645 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9647 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9649 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9650 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9652 TREE_OPERAND (t, 1)
9653 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9654 pre_p, NULL, false);
9655 tree c = build_omp_clause (input_location,
9656 OMP_CLAUSE_FIRSTPRIVATE);
9657 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9658 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9659 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9662 /* Handle OMP_FOR_COND. */
9663 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9664 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9666 TREE_OPERAND (t, 1)
9667 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9668 gimple_seq_empty_p (for_pre_body)
9669 ? pre_p : &for_pre_body, NULL,
9670 false);
9671 tree c = build_omp_clause (input_location,
9672 OMP_CLAUSE_FIRSTPRIVATE);
9673 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9674 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9675 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9678 /* Handle OMP_FOR_INCR. */
9679 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9680 if (TREE_CODE (t) == MODIFY_EXPR)
9682 decl = TREE_OPERAND (t, 0);
9683 t = TREE_OPERAND (t, 1);
9684 tree *tp = &TREE_OPERAND (t, 1);
9685 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9686 tp = &TREE_OPERAND (t, 0);
9688 if (!is_gimple_constant (*tp))
9690 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9691 ? pre_p : &for_pre_body;
9692 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9693 tree c = build_omp_clause (input_location,
9694 OMP_CLAUSE_FIRSTPRIVATE);
9695 OMP_CLAUSE_DECL (c) = *tp;
9696 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9697 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9702 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9703 OMP_TASKLOOP);
9706 if (orig_for_stmt != for_stmt)
9707 gimplify_omp_ctxp->combined_loop = true;
9709 for_body = NULL;
9710 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9711 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9712 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9713 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9715 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9716 bool is_doacross = false;
9717 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9719 is_doacross = true;
9720 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9721 (OMP_FOR_INIT (for_stmt))
9722 * 2);
9724 int collapse = 1, tile = 0;
9725 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9726 if (c)
9727 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9728 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9729 if (c)
9730 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9731 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9733 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9734 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9735 decl = TREE_OPERAND (t, 0);
9736 gcc_assert (DECL_P (decl));
9737 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9738 || POINTER_TYPE_P (TREE_TYPE (decl)));
9739 if (is_doacross)
9741 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9742 gimplify_omp_ctxp->loop_iter_var.quick_push
9743 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9744 else
9745 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9746 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9749 /* Make sure the iteration variable is private. */
9750 tree c = NULL_TREE;
9751 tree c2 = NULL_TREE;
9752 if (orig_for_stmt != for_stmt)
9753 /* Do this only on innermost construct for combined ones. */;
9754 else if (ort == ORT_SIMD)
9756 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9757 (splay_tree_key) decl);
9758 omp_is_private (gimplify_omp_ctxp, decl,
9759 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9760 != 1));
9761 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9762 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9763 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9765 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9766 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9767 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9768 if (has_decl_expr
9769 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9771 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9772 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9774 struct gimplify_omp_ctx *outer
9775 = gimplify_omp_ctxp->outer_context;
9776 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9778 if (outer->region_type == ORT_WORKSHARE
9779 && outer->combined_loop)
9781 n = splay_tree_lookup (outer->variables,
9782 (splay_tree_key)decl);
9783 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9785 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9786 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9788 else
9790 struct gimplify_omp_ctx *octx = outer->outer_context;
9791 if (octx
9792 && octx->region_type == ORT_COMBINED_PARALLEL
9793 && octx->outer_context
9794 && (octx->outer_context->region_type
9795 == ORT_WORKSHARE)
9796 && octx->outer_context->combined_loop)
9798 octx = octx->outer_context;
9799 n = splay_tree_lookup (octx->variables,
9800 (splay_tree_key)decl);
9801 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9803 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9804 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9811 OMP_CLAUSE_DECL (c) = decl;
9812 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9813 OMP_FOR_CLAUSES (for_stmt) = c;
9814 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9815 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9817 if (outer->region_type == ORT_WORKSHARE
9818 && outer->combined_loop)
9820 if (outer->outer_context
9821 && (outer->outer_context->region_type
9822 == ORT_COMBINED_PARALLEL))
9823 outer = outer->outer_context;
9824 else if (omp_check_private (outer, decl, false))
9825 outer = NULL;
9827 else if (((outer->region_type & ORT_TASK) != 0)
9828 && outer->combined_loop
9829 && !omp_check_private (gimplify_omp_ctxp,
9830 decl, false))
9832 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9834 omp_notice_variable (outer, decl, true);
9835 outer = NULL;
9837 if (outer)
9839 n = splay_tree_lookup (outer->variables,
9840 (splay_tree_key)decl);
9841 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9843 omp_add_variable (outer, decl,
9844 GOVD_LASTPRIVATE | GOVD_SEEN);
9845 if (outer->region_type == ORT_COMBINED_PARALLEL
9846 && outer->outer_context
9847 && (outer->outer_context->region_type
9848 == ORT_WORKSHARE)
9849 && outer->outer_context->combined_loop)
9851 outer = outer->outer_context;
9852 n = splay_tree_lookup (outer->variables,
9853 (splay_tree_key)decl);
9854 if (omp_check_private (outer, decl, false))
9855 outer = NULL;
9856 else if (n == NULL
9857 || ((n->value & GOVD_DATA_SHARE_CLASS)
9858 == 0))
9859 omp_add_variable (outer, decl,
9860 GOVD_LASTPRIVATE
9861 | GOVD_SEEN);
9862 else
9863 outer = NULL;
9865 if (outer && outer->outer_context
9866 && (outer->outer_context->region_type
9867 == ORT_COMBINED_TEAMS))
9869 outer = outer->outer_context;
9870 n = splay_tree_lookup (outer->variables,
9871 (splay_tree_key)decl);
9872 if (n == NULL
9873 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9874 omp_add_variable (outer, decl,
9875 GOVD_SHARED | GOVD_SEEN);
9876 else
9877 outer = NULL;
9879 if (outer && outer->outer_context)
9880 omp_notice_variable (outer->outer_context, decl,
9881 true);
9886 else
9888 bool lastprivate
9889 = (!has_decl_expr
9890 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9891 struct gimplify_omp_ctx *outer
9892 = gimplify_omp_ctxp->outer_context;
9893 if (outer && lastprivate)
9895 if (outer->region_type == ORT_WORKSHARE
9896 && outer->combined_loop)
9898 n = splay_tree_lookup (outer->variables,
9899 (splay_tree_key)decl);
9900 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9902 lastprivate = false;
9903 outer = NULL;
9905 else if (outer->outer_context
9906 && (outer->outer_context->region_type
9907 == ORT_COMBINED_PARALLEL))
9908 outer = outer->outer_context;
9909 else if (omp_check_private (outer, decl, false))
9910 outer = NULL;
9912 else if (((outer->region_type & ORT_TASK) != 0)
9913 && outer->combined_loop
9914 && !omp_check_private (gimplify_omp_ctxp,
9915 decl, false))
9917 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9919 omp_notice_variable (outer, decl, true);
9920 outer = NULL;
9922 if (outer)
9924 n = splay_tree_lookup (outer->variables,
9925 (splay_tree_key)decl);
9926 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9928 omp_add_variable (outer, decl,
9929 GOVD_LASTPRIVATE | GOVD_SEEN);
9930 if (outer->region_type == ORT_COMBINED_PARALLEL
9931 && outer->outer_context
9932 && (outer->outer_context->region_type
9933 == ORT_WORKSHARE)
9934 && outer->outer_context->combined_loop)
9936 outer = outer->outer_context;
9937 n = splay_tree_lookup (outer->variables,
9938 (splay_tree_key)decl);
9939 if (omp_check_private (outer, decl, false))
9940 outer = NULL;
9941 else if (n == NULL
9942 || ((n->value & GOVD_DATA_SHARE_CLASS)
9943 == 0))
9944 omp_add_variable (outer, decl,
9945 GOVD_LASTPRIVATE
9946 | GOVD_SEEN);
9947 else
9948 outer = NULL;
9950 if (outer && outer->outer_context
9951 && (outer->outer_context->region_type
9952 == ORT_COMBINED_TEAMS))
9954 outer = outer->outer_context;
9955 n = splay_tree_lookup (outer->variables,
9956 (splay_tree_key)decl);
9957 if (n == NULL
9958 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9959 omp_add_variable (outer, decl,
9960 GOVD_SHARED | GOVD_SEEN);
9961 else
9962 outer = NULL;
9964 if (outer && outer->outer_context)
9965 omp_notice_variable (outer->outer_context, decl,
9966 true);
9971 c = build_omp_clause (input_location,
9972 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9973 : OMP_CLAUSE_PRIVATE);
9974 OMP_CLAUSE_DECL (c) = decl;
9975 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9976 OMP_FOR_CLAUSES (for_stmt) = c;
9977 omp_add_variable (gimplify_omp_ctxp, decl,
9978 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9979 | GOVD_EXPLICIT | GOVD_SEEN);
9980 c = NULL_TREE;
9983 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9984 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9985 else
9986 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9988 /* If DECL is not a gimple register, create a temporary variable to act
9989 as an iteration counter. This is valid, since DECL cannot be
9990 modified in the body of the loop. Similarly for any iteration vars
9991 in simd with collapse > 1 where the iterator vars must be
9992 lastprivate. */
9993 if (orig_for_stmt != for_stmt)
9994 var = decl;
9995 else if (!is_gimple_reg (decl)
9996 || (ort == ORT_SIMD
9997 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9999 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10000 /* Make sure omp_add_variable is not called on it prematurely.
10001 We call it ourselves a few lines later. */
10002 gimplify_omp_ctxp = NULL;
10003 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10004 gimplify_omp_ctxp = ctx;
10005 TREE_OPERAND (t, 0) = var;
10007 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
10009 if (ort == ORT_SIMD
10010 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10012 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10013 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
10014 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
10015 OMP_CLAUSE_DECL (c2) = var;
10016 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
10017 OMP_FOR_CLAUSES (for_stmt) = c2;
10018 omp_add_variable (gimplify_omp_ctxp, var,
10019 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10020 if (c == NULL_TREE)
10022 c = c2;
10023 c2 = NULL_TREE;
10026 else
10027 omp_add_variable (gimplify_omp_ctxp, var,
10028 GOVD_PRIVATE | GOVD_SEEN);
10030 else
10031 var = decl;
10033 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10034 is_gimple_val, fb_rvalue, false);
10035 ret = MIN (ret, tret);
10036 if (ret == GS_ERROR)
10037 return ret;
10039 /* Handle OMP_FOR_COND. */
10040 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10041 gcc_assert (COMPARISON_CLASS_P (t));
10042 gcc_assert (TREE_OPERAND (t, 0) == decl);
10044 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10045 is_gimple_val, fb_rvalue, false);
10046 ret = MIN (ret, tret);
10048 /* Handle OMP_FOR_INCR. */
10049 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10050 switch (TREE_CODE (t))
10052 case PREINCREMENT_EXPR:
10053 case POSTINCREMENT_EXPR:
10055 tree decl = TREE_OPERAND (t, 0);
10056 /* c_omp_for_incr_canonicalize_ptr() should have been
10057 called to massage things appropriately. */
10058 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10060 if (orig_for_stmt != for_stmt)
10061 break;
10062 t = build_int_cst (TREE_TYPE (decl), 1);
10063 if (c)
10064 OMP_CLAUSE_LINEAR_STEP (c) = t;
10065 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10066 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10067 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10068 break;
10071 case PREDECREMENT_EXPR:
10072 case POSTDECREMENT_EXPR:
10073 /* c_omp_for_incr_canonicalize_ptr() should have been
10074 called to massage things appropriately. */
10075 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10076 if (orig_for_stmt != for_stmt)
10077 break;
10078 t = build_int_cst (TREE_TYPE (decl), -1);
10079 if (c)
10080 OMP_CLAUSE_LINEAR_STEP (c) = t;
10081 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10082 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10083 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10084 break;
10086 case MODIFY_EXPR:
10087 gcc_assert (TREE_OPERAND (t, 0) == decl);
10088 TREE_OPERAND (t, 0) = var;
10090 t = TREE_OPERAND (t, 1);
10091 switch (TREE_CODE (t))
10093 case PLUS_EXPR:
10094 if (TREE_OPERAND (t, 1) == decl)
10096 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10097 TREE_OPERAND (t, 0) = var;
10098 break;
10101 /* Fallthru. */
10102 case MINUS_EXPR:
10103 case POINTER_PLUS_EXPR:
10104 gcc_assert (TREE_OPERAND (t, 0) == decl);
10105 TREE_OPERAND (t, 0) = var;
10106 break;
10107 default:
10108 gcc_unreachable ();
10111 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10112 is_gimple_val, fb_rvalue, false);
10113 ret = MIN (ret, tret);
10114 if (c)
10116 tree step = TREE_OPERAND (t, 1);
10117 tree stept = TREE_TYPE (decl);
10118 if (POINTER_TYPE_P (stept))
10119 stept = sizetype;
10120 step = fold_convert (stept, step);
10121 if (TREE_CODE (t) == MINUS_EXPR)
10122 step = fold_build1 (NEGATE_EXPR, stept, step);
10123 OMP_CLAUSE_LINEAR_STEP (c) = step;
10124 if (step != TREE_OPERAND (t, 1))
10126 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10127 &for_pre_body, NULL,
10128 is_gimple_val, fb_rvalue, false);
10129 ret = MIN (ret, tret);
10132 break;
10134 default:
10135 gcc_unreachable ();
10138 if (c2)
10140 gcc_assert (c);
10141 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10144 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10146 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10147 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10148 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10149 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10150 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10151 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10152 && OMP_CLAUSE_DECL (c) == decl)
10154 if (is_doacross && (collapse == 1 || i >= collapse))
10155 t = var;
10156 else
10158 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10159 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10160 gcc_assert (TREE_OPERAND (t, 0) == var);
10161 t = TREE_OPERAND (t, 1);
10162 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10163 || TREE_CODE (t) == MINUS_EXPR
10164 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10165 gcc_assert (TREE_OPERAND (t, 0) == var);
10166 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10167 is_doacross ? var : decl,
10168 TREE_OPERAND (t, 1));
10170 gimple_seq *seq;
10171 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10172 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10173 else
10174 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10175 gimplify_assign (decl, t, seq);
10180 BITMAP_FREE (has_decl_expr);
10182 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10184 push_gimplify_context ();
10185 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10187 OMP_FOR_BODY (orig_for_stmt)
10188 = build3 (BIND_EXPR, void_type_node, NULL,
10189 OMP_FOR_BODY (orig_for_stmt), NULL);
10190 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10194 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10195 &for_body);
10197 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10199 if (gimple_code (g) == GIMPLE_BIND)
10200 pop_gimplify_context (g);
10201 else
10202 pop_gimplify_context (NULL);
10205 if (orig_for_stmt != for_stmt)
10206 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10208 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10209 decl = TREE_OPERAND (t, 0);
10210 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10211 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10212 gimplify_omp_ctxp = ctx->outer_context;
10213 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10214 gimplify_omp_ctxp = ctx;
10215 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10216 TREE_OPERAND (t, 0) = var;
10217 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10218 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10219 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10222 gimplify_adjust_omp_clauses (pre_p, for_body,
10223 &OMP_FOR_CLAUSES (orig_for_stmt),
10224 TREE_CODE (orig_for_stmt));
10226 int kind;
10227 switch (TREE_CODE (orig_for_stmt))
10229 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10230 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10231 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10232 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10233 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10234 default:
10235 gcc_unreachable ();
10237 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10238 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10239 for_pre_body);
10240 if (orig_for_stmt != for_stmt)
10241 gimple_omp_for_set_combined_p (gfor, true);
10242 if (gimplify_omp_ctxp
10243 && (gimplify_omp_ctxp->combined_loop
10244 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10245 && gimplify_omp_ctxp->outer_context
10246 && gimplify_omp_ctxp->outer_context->combined_loop)))
10248 gimple_omp_for_set_combined_into_p (gfor, true);
10249 if (gimplify_omp_ctxp->combined_loop)
10250 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10251 else
10252 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10255 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10257 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10258 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10259 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10260 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10261 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10262 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10263 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10264 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10267 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10268 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10269 The outer taskloop stands for computing the number of iterations,
10270 counts for collapsed loops and holding taskloop specific clauses.
10271 The task construct stands for the effect of data sharing on the
10272 explicit task it creates and the inner taskloop stands for expansion
10273 of the static loop inside of the explicit task construct. */
10274 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10276 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10277 tree task_clauses = NULL_TREE;
10278 tree c = *gfor_clauses_ptr;
10279 tree *gtask_clauses_ptr = &task_clauses;
10280 tree outer_for_clauses = NULL_TREE;
10281 tree *gforo_clauses_ptr = &outer_for_clauses;
10282 for (; c; c = OMP_CLAUSE_CHAIN (c))
10283 switch (OMP_CLAUSE_CODE (c))
10285 /* These clauses are allowed on task, move them there. */
10286 case OMP_CLAUSE_SHARED:
10287 case OMP_CLAUSE_FIRSTPRIVATE:
10288 case OMP_CLAUSE_DEFAULT:
10289 case OMP_CLAUSE_IF:
10290 case OMP_CLAUSE_UNTIED:
10291 case OMP_CLAUSE_FINAL:
10292 case OMP_CLAUSE_MERGEABLE:
10293 case OMP_CLAUSE_PRIORITY:
10294 *gtask_clauses_ptr = c;
10295 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10296 break;
10297 case OMP_CLAUSE_PRIVATE:
10298 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10300 /* We want private on outer for and firstprivate
10301 on task. */
10302 *gtask_clauses_ptr
10303 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10304 OMP_CLAUSE_FIRSTPRIVATE);
10305 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10306 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10307 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10308 *gforo_clauses_ptr = c;
10309 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10311 else
10313 *gtask_clauses_ptr = c;
10314 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10316 break;
10317 /* These clauses go into outer taskloop clauses. */
10318 case OMP_CLAUSE_GRAINSIZE:
10319 case OMP_CLAUSE_NUM_TASKS:
10320 case OMP_CLAUSE_NOGROUP:
10321 *gforo_clauses_ptr = c;
10322 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10323 break;
10324 /* Taskloop clause we duplicate on both taskloops. */
10325 case OMP_CLAUSE_COLLAPSE:
10326 *gfor_clauses_ptr = c;
10327 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10328 *gforo_clauses_ptr = copy_node (c);
10329 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10330 break;
10331 /* For lastprivate, keep the clause on inner taskloop, and add
10332 a shared clause on task. If the same decl is also firstprivate,
10333 add also firstprivate clause on the inner taskloop. */
10334 case OMP_CLAUSE_LASTPRIVATE:
10335 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10337 /* For taskloop C++ lastprivate IVs, we want:
10338 1) private on outer taskloop
10339 2) firstprivate and shared on task
10340 3) lastprivate on inner taskloop */
10341 *gtask_clauses_ptr
10342 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10343 OMP_CLAUSE_FIRSTPRIVATE);
10344 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10345 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10346 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10347 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10348 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10349 OMP_CLAUSE_PRIVATE);
10350 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10351 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10352 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10353 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10355 *gfor_clauses_ptr = c;
10356 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10357 *gtask_clauses_ptr
10358 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10359 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10360 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10361 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10362 gtask_clauses_ptr
10363 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10364 break;
10365 default:
10366 gcc_unreachable ();
10368 *gfor_clauses_ptr = NULL_TREE;
10369 *gtask_clauses_ptr = NULL_TREE;
10370 *gforo_clauses_ptr = NULL_TREE;
10371 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10372 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10373 NULL_TREE, NULL_TREE, NULL_TREE);
10374 gimple_omp_task_set_taskloop_p (g, true);
10375 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10376 gomp_for *gforo
10377 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10378 gimple_omp_for_collapse (gfor),
10379 gimple_omp_for_pre_body (gfor));
10380 gimple_omp_for_set_pre_body (gfor, NULL);
10381 gimple_omp_for_set_combined_p (gforo, true);
10382 gimple_omp_for_set_combined_into_p (gfor, true);
10383 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10385 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10386 tree v = create_tmp_var (type);
10387 gimple_omp_for_set_index (gforo, i, v);
10388 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10389 gimple_omp_for_set_initial (gforo, i, t);
10390 gimple_omp_for_set_cond (gforo, i,
10391 gimple_omp_for_cond (gfor, i));
10392 t = unshare_expr (gimple_omp_for_final (gfor, i));
10393 gimple_omp_for_set_final (gforo, i, t);
10394 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10395 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10396 TREE_OPERAND (t, 0) = v;
10397 gimple_omp_for_set_incr (gforo, i, t);
10398 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10399 OMP_CLAUSE_DECL (t) = v;
10400 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10401 gimple_omp_for_set_clauses (gforo, t);
10403 gimplify_seq_add_stmt (pre_p, gforo);
10405 else
10406 gimplify_seq_add_stmt (pre_p, gfor);
10407 if (ret != GS_ALL_DONE)
10408 return GS_ERROR;
10409 *expr_p = NULL_TREE;
10410 return GS_ALL_DONE;
10413 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10414 of OMP_TARGET's body. */
10416 static tree
10417 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10419 *walk_subtrees = 0;
10420 switch (TREE_CODE (*tp))
10422 case OMP_TEAMS:
10423 return *tp;
10424 case BIND_EXPR:
10425 case STATEMENT_LIST:
10426 *walk_subtrees = 1;
10427 break;
10428 default:
10429 break;
10431 return NULL_TREE;
10434 /* Helper function of optimize_target_teams, determine if the expression
10435 can be computed safely before the target construct on the host. */
10437 static tree
10438 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10440 splay_tree_node n;
10442 if (TYPE_P (*tp))
10444 *walk_subtrees = 0;
10445 return NULL_TREE;
10447 switch (TREE_CODE (*tp))
10449 case VAR_DECL:
10450 case PARM_DECL:
10451 case RESULT_DECL:
10452 *walk_subtrees = 0;
10453 if (error_operand_p (*tp)
10454 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10455 || DECL_HAS_VALUE_EXPR_P (*tp)
10456 || DECL_THREAD_LOCAL_P (*tp)
10457 || TREE_SIDE_EFFECTS (*tp)
10458 || TREE_THIS_VOLATILE (*tp))
10459 return *tp;
10460 if (is_global_var (*tp)
10461 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10462 || lookup_attribute ("omp declare target link",
10463 DECL_ATTRIBUTES (*tp))))
10464 return *tp;
10465 if (VAR_P (*tp)
10466 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10467 && !is_global_var (*tp)
10468 && decl_function_context (*tp) == current_function_decl)
10469 return *tp;
10470 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10471 (splay_tree_key) *tp);
10472 if (n == NULL)
10474 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10475 return NULL_TREE;
10476 return *tp;
10478 else if (n->value & GOVD_LOCAL)
10479 return *tp;
10480 else if (n->value & GOVD_FIRSTPRIVATE)
10481 return NULL_TREE;
10482 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10483 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10484 return NULL_TREE;
10485 return *tp;
10486 case INTEGER_CST:
10487 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10488 return *tp;
10489 return NULL_TREE;
10490 case TARGET_EXPR:
10491 if (TARGET_EXPR_INITIAL (*tp)
10492 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10493 return *tp;
10494 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10495 walk_subtrees, NULL);
10496 /* Allow some reasonable subset of integral arithmetics. */
10497 case PLUS_EXPR:
10498 case MINUS_EXPR:
10499 case MULT_EXPR:
10500 case TRUNC_DIV_EXPR:
10501 case CEIL_DIV_EXPR:
10502 case FLOOR_DIV_EXPR:
10503 case ROUND_DIV_EXPR:
10504 case TRUNC_MOD_EXPR:
10505 case CEIL_MOD_EXPR:
10506 case FLOOR_MOD_EXPR:
10507 case ROUND_MOD_EXPR:
10508 case RDIV_EXPR:
10509 case EXACT_DIV_EXPR:
10510 case MIN_EXPR:
10511 case MAX_EXPR:
10512 case LSHIFT_EXPR:
10513 case RSHIFT_EXPR:
10514 case BIT_IOR_EXPR:
10515 case BIT_XOR_EXPR:
10516 case BIT_AND_EXPR:
10517 case NEGATE_EXPR:
10518 case ABS_EXPR:
10519 case BIT_NOT_EXPR:
10520 case NON_LVALUE_EXPR:
10521 CASE_CONVERT:
10522 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10523 return *tp;
10524 return NULL_TREE;
10525 /* And disallow anything else, except for comparisons. */
10526 default:
10527 if (COMPARISON_CLASS_P (*tp))
10528 return NULL_TREE;
10529 return *tp;
10533 /* Try to determine if the num_teams and/or thread_limit expressions
10534 can have their values determined already before entering the
10535 target construct.
10536 INTEGER_CSTs trivially are,
10537 integral decls that are firstprivate (explicitly or implicitly)
10538 or explicitly map(always, to:) or map(always, tofrom:) on the target
10539 region too, and expressions involving simple arithmetics on those
10540 too, function calls are not ok, dereferencing something neither etc.
10541 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10542 EXPR based on what we find:
10543 0 stands for clause not specified at all, use implementation default
10544 -1 stands for value that can't be determined easily before entering
10545 the target construct.
10546 If teams construct is not present at all, use 1 for num_teams
10547 and 0 for thread_limit (only one team is involved, and the thread
10548 limit is implementation defined. */
10550 static void
10551 optimize_target_teams (tree target, gimple_seq *pre_p)
10553 tree body = OMP_BODY (target);
10554 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10555 tree num_teams = integer_zero_node;
10556 tree thread_limit = integer_zero_node;
10557 location_t num_teams_loc = EXPR_LOCATION (target);
10558 location_t thread_limit_loc = EXPR_LOCATION (target);
10559 tree c, *p, expr;
10560 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10562 if (teams == NULL_TREE)
10563 num_teams = integer_one_node;
10564 else
10565 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10567 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10569 p = &num_teams;
10570 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10572 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10574 p = &thread_limit;
10575 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10577 else
10578 continue;
10579 expr = OMP_CLAUSE_OPERAND (c, 0);
10580 if (TREE_CODE (expr) == INTEGER_CST)
10582 *p = expr;
10583 continue;
10585 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10587 *p = integer_minus_one_node;
10588 continue;
10590 *p = expr;
10591 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10592 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10593 == GS_ERROR)
10595 gimplify_omp_ctxp = target_ctx;
10596 *p = integer_minus_one_node;
10597 continue;
10599 gimplify_omp_ctxp = target_ctx;
10600 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10601 OMP_CLAUSE_OPERAND (c, 0) = *p;
10603 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10604 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10605 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10606 OMP_TARGET_CLAUSES (target) = c;
10607 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10608 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10609 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10610 OMP_TARGET_CLAUSES (target) = c;
10613 /* Gimplify the gross structure of several OMP constructs. */
10615 static void
10616 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10618 tree expr = *expr_p;
10619 gimple *stmt;
10620 gimple_seq body = NULL;
10621 enum omp_region_type ort;
10623 switch (TREE_CODE (expr))
10625 case OMP_SECTIONS:
10626 case OMP_SINGLE:
10627 ort = ORT_WORKSHARE;
10628 break;
10629 case OMP_TARGET:
10630 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10631 break;
10632 case OACC_KERNELS:
10633 ort = ORT_ACC_KERNELS;
10634 break;
10635 case OACC_PARALLEL:
10636 ort = ORT_ACC_PARALLEL;
10637 break;
10638 case OACC_DATA:
10639 ort = ORT_ACC_DATA;
10640 break;
10641 case OMP_TARGET_DATA:
10642 ort = ORT_TARGET_DATA;
10643 break;
10644 case OMP_TEAMS:
10645 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10646 break;
10647 case OACC_HOST_DATA:
10648 ort = ORT_ACC_HOST_DATA;
10649 break;
10650 default:
10651 gcc_unreachable ();
10653 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10654 TREE_CODE (expr));
10655 if (TREE_CODE (expr) == OMP_TARGET)
10656 optimize_target_teams (expr, pre_p);
10657 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10659 push_gimplify_context ();
10660 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10661 if (gimple_code (g) == GIMPLE_BIND)
10662 pop_gimplify_context (g);
10663 else
10664 pop_gimplify_context (NULL);
10665 if ((ort & ORT_TARGET_DATA) != 0)
10667 enum built_in_function end_ix;
10668 switch (TREE_CODE (expr))
10670 case OACC_DATA:
10671 case OACC_HOST_DATA:
10672 end_ix = BUILT_IN_GOACC_DATA_END;
10673 break;
10674 case OMP_TARGET_DATA:
10675 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10676 break;
10677 default:
10678 gcc_unreachable ();
10680 tree fn = builtin_decl_explicit (end_ix);
10681 g = gimple_build_call (fn, 0);
10682 gimple_seq cleanup = NULL;
10683 gimple_seq_add_stmt (&cleanup, g);
10684 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10685 body = NULL;
10686 gimple_seq_add_stmt (&body, g);
10689 else
10690 gimplify_and_add (OMP_BODY (expr), &body);
10691 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10692 TREE_CODE (expr));
10694 switch (TREE_CODE (expr))
10696 case OACC_DATA:
10697 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10698 OMP_CLAUSES (expr));
10699 break;
10700 case OACC_KERNELS:
10701 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10702 OMP_CLAUSES (expr));
10703 break;
10704 case OACC_HOST_DATA:
10705 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10706 OMP_CLAUSES (expr));
10707 break;
10708 case OACC_PARALLEL:
10709 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10710 OMP_CLAUSES (expr));
10711 break;
10712 case OMP_SECTIONS:
10713 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10714 break;
10715 case OMP_SINGLE:
10716 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10717 break;
10718 case OMP_TARGET:
10719 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10720 OMP_CLAUSES (expr));
10721 break;
10722 case OMP_TARGET_DATA:
10723 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10724 OMP_CLAUSES (expr));
10725 break;
10726 case OMP_TEAMS:
10727 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10728 break;
10729 default:
10730 gcc_unreachable ();
10733 gimplify_seq_add_stmt (pre_p, stmt);
10734 *expr_p = NULL_TREE;
10737 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10738 target update constructs. */
10740 static void
10741 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10743 tree expr = *expr_p;
10744 int kind;
10745 gomp_target *stmt;
10746 enum omp_region_type ort = ORT_WORKSHARE;
10748 switch (TREE_CODE (expr))
10750 case OACC_ENTER_DATA:
10751 case OACC_EXIT_DATA:
10752 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10753 ort = ORT_ACC;
10754 break;
10755 case OACC_UPDATE:
10756 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10757 ort = ORT_ACC;
10758 break;
10759 case OMP_TARGET_UPDATE:
10760 kind = GF_OMP_TARGET_KIND_UPDATE;
10761 break;
10762 case OMP_TARGET_ENTER_DATA:
10763 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10764 break;
10765 case OMP_TARGET_EXIT_DATA:
10766 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10767 break;
10768 default:
10769 gcc_unreachable ();
10771 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10772 ort, TREE_CODE (expr));
10773 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10774 TREE_CODE (expr));
10775 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10777 gimplify_seq_add_stmt (pre_p, stmt);
10778 *expr_p = NULL_TREE;
10781 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10782 stabilized the lhs of the atomic operation as *ADDR. Return true if
10783 EXPR is this stabilized form. */
10785 static bool
10786 goa_lhs_expr_p (tree expr, tree addr)
10788 /* Also include casts to other type variants. The C front end is fond
10789 of adding these for e.g. volatile variables. This is like
10790 STRIP_TYPE_NOPS but includes the main variant lookup. */
10791 STRIP_USELESS_TYPE_CONVERSION (expr);
10793 if (TREE_CODE (expr) == INDIRECT_REF)
10795 expr = TREE_OPERAND (expr, 0);
10796 while (expr != addr
10797 && (CONVERT_EXPR_P (expr)
10798 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10799 && TREE_CODE (expr) == TREE_CODE (addr)
10800 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10802 expr = TREE_OPERAND (expr, 0);
10803 addr = TREE_OPERAND (addr, 0);
10805 if (expr == addr)
10806 return true;
10807 return (TREE_CODE (addr) == ADDR_EXPR
10808 && TREE_CODE (expr) == ADDR_EXPR
10809 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10811 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10812 return true;
10813 return false;
10816 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10817 expression does not involve the lhs, evaluate it into a temporary.
10818 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10819 or -1 if an error was encountered. */
10821 static int
10822 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10823 tree lhs_var)
10825 tree expr = *expr_p;
10826 int saw_lhs;
10828 if (goa_lhs_expr_p (expr, lhs_addr))
10830 *expr_p = lhs_var;
10831 return 1;
10833 if (is_gimple_val (expr))
10834 return 0;
10836 saw_lhs = 0;
10837 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10839 case tcc_binary:
10840 case tcc_comparison:
10841 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10842 lhs_var);
10843 /* FALLTHRU */
10844 case tcc_unary:
10845 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10846 lhs_var);
10847 break;
10848 case tcc_expression:
10849 switch (TREE_CODE (expr))
10851 case TRUTH_ANDIF_EXPR:
10852 case TRUTH_ORIF_EXPR:
10853 case TRUTH_AND_EXPR:
10854 case TRUTH_OR_EXPR:
10855 case TRUTH_XOR_EXPR:
10856 case BIT_INSERT_EXPR:
10857 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10858 lhs_addr, lhs_var);
10859 /* FALLTHRU */
10860 case TRUTH_NOT_EXPR:
10861 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10862 lhs_addr, lhs_var);
10863 break;
10864 case COMPOUND_EXPR:
10865 /* Break out any preevaluations from cp_build_modify_expr. */
10866 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10867 expr = TREE_OPERAND (expr, 1))
10868 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10869 *expr_p = expr;
10870 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10871 default:
10872 break;
10874 break;
10875 case tcc_reference:
10876 if (TREE_CODE (expr) == BIT_FIELD_REF)
10877 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10878 lhs_addr, lhs_var);
10879 break;
10880 default:
10881 break;
10884 if (saw_lhs == 0)
10886 enum gimplify_status gs;
10887 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10888 if (gs != GS_ALL_DONE)
10889 saw_lhs = -1;
10892 return saw_lhs;
10895 /* Gimplify an OMP_ATOMIC statement. */
10897 static enum gimplify_status
10898 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10900 tree addr = TREE_OPERAND (*expr_p, 0);
10901 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10902 ? NULL : TREE_OPERAND (*expr_p, 1);
10903 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10904 tree tmp_load;
10905 gomp_atomic_load *loadstmt;
10906 gomp_atomic_store *storestmt;
10908 tmp_load = create_tmp_reg (type);
10909 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10910 return GS_ERROR;
10912 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10913 != GS_ALL_DONE)
10914 return GS_ERROR;
10916 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10917 gimplify_seq_add_stmt (pre_p, loadstmt);
10918 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10919 != GS_ALL_DONE)
10920 return GS_ERROR;
10922 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10923 rhs = tmp_load;
10924 storestmt = gimple_build_omp_atomic_store (rhs);
10925 gimplify_seq_add_stmt (pre_p, storestmt);
10926 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10928 gimple_omp_atomic_set_seq_cst (loadstmt);
10929 gimple_omp_atomic_set_seq_cst (storestmt);
10931 switch (TREE_CODE (*expr_p))
10933 case OMP_ATOMIC_READ:
10934 case OMP_ATOMIC_CAPTURE_OLD:
10935 *expr_p = tmp_load;
10936 gimple_omp_atomic_set_need_value (loadstmt);
10937 break;
10938 case OMP_ATOMIC_CAPTURE_NEW:
10939 *expr_p = rhs;
10940 gimple_omp_atomic_set_need_value (storestmt);
10941 break;
10942 default:
10943 *expr_p = NULL;
10944 break;
10947 return GS_ALL_DONE;
10950 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10951 body, and adding some EH bits. */
10953 static enum gimplify_status
10954 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10956 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10957 gimple *body_stmt;
10958 gtransaction *trans_stmt;
10959 gimple_seq body = NULL;
10960 int subcode = 0;
10962 /* Wrap the transaction body in a BIND_EXPR so we have a context
10963 where to put decls for OMP. */
10964 if (TREE_CODE (tbody) != BIND_EXPR)
10966 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10967 TREE_SIDE_EFFECTS (bind) = 1;
10968 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10969 TRANSACTION_EXPR_BODY (expr) = bind;
10972 push_gimplify_context ();
10973 temp = voidify_wrapper_expr (*expr_p, NULL);
10975 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10976 pop_gimplify_context (body_stmt);
10978 trans_stmt = gimple_build_transaction (body);
10979 if (TRANSACTION_EXPR_OUTER (expr))
10980 subcode = GTMA_IS_OUTER;
10981 else if (TRANSACTION_EXPR_RELAXED (expr))
10982 subcode = GTMA_IS_RELAXED;
10983 gimple_transaction_set_subcode (trans_stmt, subcode);
10985 gimplify_seq_add_stmt (pre_p, trans_stmt);
10987 if (temp)
10989 *expr_p = temp;
10990 return GS_OK;
10993 *expr_p = NULL_TREE;
10994 return GS_ALL_DONE;
10997 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10998 is the OMP_BODY of the original EXPR (which has already been
10999 gimplified so it's not present in the EXPR).
11001 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
11003 static gimple *
11004 gimplify_omp_ordered (tree expr, gimple_seq body)
11006 tree c, decls;
11007 int failures = 0;
11008 unsigned int i;
11009 tree source_c = NULL_TREE;
11010 tree sink_c = NULL_TREE;
11012 if (gimplify_omp_ctxp)
11014 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11015 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11016 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
11017 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
11018 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
11020 error_at (OMP_CLAUSE_LOCATION (c),
11021 "%<ordered%> construct with %<depend%> clause must be "
11022 "closely nested inside a loop with %<ordered%> clause "
11023 "with a parameter");
11024 failures++;
11026 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11027 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11029 bool fail = false;
11030 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11031 decls && TREE_CODE (decls) == TREE_LIST;
11032 decls = TREE_CHAIN (decls), ++i)
11033 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11034 continue;
11035 else if (TREE_VALUE (decls)
11036 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11038 error_at (OMP_CLAUSE_LOCATION (c),
11039 "variable %qE is not an iteration "
11040 "of outermost loop %d, expected %qE",
11041 TREE_VALUE (decls), i + 1,
11042 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11043 fail = true;
11044 failures++;
11046 else
11047 TREE_VALUE (decls)
11048 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11049 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11051 error_at (OMP_CLAUSE_LOCATION (c),
11052 "number of variables in %<depend(sink)%> "
11053 "clause does not match number of "
11054 "iteration variables");
11055 failures++;
11057 sink_c = c;
11059 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11060 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11062 if (source_c)
11064 error_at (OMP_CLAUSE_LOCATION (c),
11065 "more than one %<depend(source)%> clause on an "
11066 "%<ordered%> construct");
11067 failures++;
11069 else
11070 source_c = c;
11073 if (source_c && sink_c)
11075 error_at (OMP_CLAUSE_LOCATION (source_c),
11076 "%<depend(source)%> clause specified together with "
11077 "%<depend(sink:)%> clauses on the same construct");
11078 failures++;
11081 if (failures)
11082 return gimple_build_nop ();
11083 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11086 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11087 expression produces a value to be used as an operand inside a GIMPLE
11088 statement, the value will be stored back in *EXPR_P. This value will
11089 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11090 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11091 emitted in PRE_P and POST_P.
11093 Additionally, this process may overwrite parts of the input
11094 expression during gimplification. Ideally, it should be
11095 possible to do non-destructive gimplification.
11097 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11098 the expression needs to evaluate to a value to be used as
11099 an operand in a GIMPLE statement, this value will be stored in
11100 *EXPR_P on exit. This happens when the caller specifies one
11101 of fb_lvalue or fb_rvalue fallback flags.
11103 PRE_P will contain the sequence of GIMPLE statements corresponding
11104 to the evaluation of EXPR and all the side-effects that must
11105 be executed before the main expression. On exit, the last
11106 statement of PRE_P is the core statement being gimplified. For
11107 instance, when gimplifying 'if (++a)' the last statement in
11108 PRE_P will be 'if (t.1)' where t.1 is the result of
11109 pre-incrementing 'a'.
11111 POST_P will contain the sequence of GIMPLE statements corresponding
11112 to the evaluation of all the side-effects that must be executed
11113 after the main expression. If this is NULL, the post
11114 side-effects are stored at the end of PRE_P.
11116 The reason why the output is split in two is to handle post
11117 side-effects explicitly. In some cases, an expression may have
11118 inner and outer post side-effects which need to be emitted in
11119 an order different from the one given by the recursive
11120 traversal. For instance, for the expression (*p--)++ the post
11121 side-effects of '--' must actually occur *after* the post
11122 side-effects of '++'. However, gimplification will first visit
11123 the inner expression, so if a separate POST sequence was not
11124 used, the resulting sequence would be:
11126 1 t.1 = *p
11127 2 p = p - 1
11128 3 t.2 = t.1 + 1
11129 4 *p = t.2
11131 However, the post-decrement operation in line #2 must not be
11132 evaluated until after the store to *p at line #4, so the
11133 correct sequence should be:
11135 1 t.1 = *p
11136 2 t.2 = t.1 + 1
11137 3 *p = t.2
11138 4 p = p - 1
11140 So, by specifying a separate post queue, it is possible
11141 to emit the post side-effects in the correct order.
11142 If POST_P is NULL, an internal queue will be used. Before
11143 returning to the caller, the sequence POST_P is appended to
11144 the main output sequence PRE_P.
11146 GIMPLE_TEST_F points to a function that takes a tree T and
11147 returns nonzero if T is in the GIMPLE form requested by the
11148 caller. The GIMPLE predicates are in gimple.c.
11150 FALLBACK tells the function what sort of a temporary we want if
11151 gimplification cannot produce an expression that complies with
11152 GIMPLE_TEST_F.
11154 fb_none means that no temporary should be generated
11155 fb_rvalue means that an rvalue is OK to generate
11156 fb_lvalue means that an lvalue is OK to generate
11157 fb_either means that either is OK, but an lvalue is preferable.
11158 fb_mayfail means that gimplification may fail (in which case
11159 GS_ERROR will be returned)
11161 The return value is either GS_ERROR or GS_ALL_DONE, since this
11162 function iterates until EXPR is completely gimplified or an error
11163 occurs. */
11165 enum gimplify_status
11166 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11167 bool (*gimple_test_f) (tree), fallback_t fallback)
11169 tree tmp;
11170 gimple_seq internal_pre = NULL;
11171 gimple_seq internal_post = NULL;
11172 tree save_expr;
11173 bool is_statement;
11174 location_t saved_location;
11175 enum gimplify_status ret;
11176 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11177 tree label;
11179 save_expr = *expr_p;
11180 if (save_expr == NULL_TREE)
11181 return GS_ALL_DONE;
11183 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11184 is_statement = gimple_test_f == is_gimple_stmt;
11185 if (is_statement)
11186 gcc_assert (pre_p);
11188 /* Consistency checks. */
11189 if (gimple_test_f == is_gimple_reg)
11190 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11191 else if (gimple_test_f == is_gimple_val
11192 || gimple_test_f == is_gimple_call_addr
11193 || gimple_test_f == is_gimple_condexpr
11194 || gimple_test_f == is_gimple_mem_rhs
11195 || gimple_test_f == is_gimple_mem_rhs_or_call
11196 || gimple_test_f == is_gimple_reg_rhs
11197 || gimple_test_f == is_gimple_reg_rhs_or_call
11198 || gimple_test_f == is_gimple_asm_val
11199 || gimple_test_f == is_gimple_mem_ref_addr)
11200 gcc_assert (fallback & fb_rvalue);
11201 else if (gimple_test_f == is_gimple_min_lval
11202 || gimple_test_f == is_gimple_lvalue)
11203 gcc_assert (fallback & fb_lvalue);
11204 else if (gimple_test_f == is_gimple_addressable)
11205 gcc_assert (fallback & fb_either);
11206 else if (gimple_test_f == is_gimple_stmt)
11207 gcc_assert (fallback == fb_none);
11208 else
11210 /* We should have recognized the GIMPLE_TEST_F predicate to
11211 know what kind of fallback to use in case a temporary is
11212 needed to hold the value or address of *EXPR_P. */
11213 gcc_unreachable ();
11216 /* We used to check the predicate here and return immediately if it
11217 succeeds. This is wrong; the design is for gimplification to be
11218 idempotent, and for the predicates to only test for valid forms, not
11219 whether they are fully simplified. */
11220 if (pre_p == NULL)
11221 pre_p = &internal_pre;
11223 if (post_p == NULL)
11224 post_p = &internal_post;
11226 /* Remember the last statements added to PRE_P and POST_P. Every
11227 new statement added by the gimplification helpers needs to be
11228 annotated with location information. To centralize the
11229 responsibility, we remember the last statement that had been
11230 added to both queues before gimplifying *EXPR_P. If
11231 gimplification produces new statements in PRE_P and POST_P, those
11232 statements will be annotated with the same location information
11233 as *EXPR_P. */
11234 pre_last_gsi = gsi_last (*pre_p);
11235 post_last_gsi = gsi_last (*post_p);
11237 saved_location = input_location;
11238 if (save_expr != error_mark_node
11239 && EXPR_HAS_LOCATION (*expr_p))
11240 input_location = EXPR_LOCATION (*expr_p);
11242 /* Loop over the specific gimplifiers until the toplevel node
11243 remains the same. */
11246 /* Strip away as many useless type conversions as possible
11247 at the toplevel. */
11248 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11250 /* Remember the expr. */
11251 save_expr = *expr_p;
11253 /* Die, die, die, my darling. */
11254 if (save_expr == error_mark_node
11255 || (TREE_TYPE (save_expr)
11256 && TREE_TYPE (save_expr) == error_mark_node))
11258 ret = GS_ERROR;
11259 break;
11262 /* Do any language-specific gimplification. */
11263 ret = ((enum gimplify_status)
11264 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11265 if (ret == GS_OK)
11267 if (*expr_p == NULL_TREE)
11268 break;
11269 if (*expr_p != save_expr)
11270 continue;
11272 else if (ret != GS_UNHANDLED)
11273 break;
11275 /* Make sure that all the cases set 'ret' appropriately. */
11276 ret = GS_UNHANDLED;
11277 switch (TREE_CODE (*expr_p))
11279 /* First deal with the special cases. */
11281 case POSTINCREMENT_EXPR:
11282 case POSTDECREMENT_EXPR:
11283 case PREINCREMENT_EXPR:
11284 case PREDECREMENT_EXPR:
11285 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11286 fallback != fb_none,
11287 TREE_TYPE (*expr_p));
11288 break;
11290 case VIEW_CONVERT_EXPR:
11291 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11292 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11294 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11295 post_p, is_gimple_val, fb_rvalue);
11296 recalculate_side_effects (*expr_p);
11297 break;
11299 /* Fallthru. */
11301 case ARRAY_REF:
11302 case ARRAY_RANGE_REF:
11303 case REALPART_EXPR:
11304 case IMAGPART_EXPR:
11305 case COMPONENT_REF:
11306 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11307 fallback ? fallback : fb_rvalue);
11308 break;
11310 case COND_EXPR:
11311 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11313 /* C99 code may assign to an array in a structure value of a
11314 conditional expression, and this has undefined behavior
11315 only on execution, so create a temporary if an lvalue is
11316 required. */
11317 if (fallback == fb_lvalue)
11319 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11320 mark_addressable (*expr_p);
11321 ret = GS_OK;
11323 break;
11325 case CALL_EXPR:
11326 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11328 /* C99 code may assign to an array in a structure returned
11329 from a function, and this has undefined behavior only on
11330 execution, so create a temporary if an lvalue is
11331 required. */
11332 if (fallback == fb_lvalue)
11334 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11335 mark_addressable (*expr_p);
11336 ret = GS_OK;
11338 break;
11340 case TREE_LIST:
11341 gcc_unreachable ();
11343 case COMPOUND_EXPR:
11344 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11345 break;
11347 case COMPOUND_LITERAL_EXPR:
11348 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11349 gimple_test_f, fallback);
11350 break;
11352 case MODIFY_EXPR:
11353 case INIT_EXPR:
11354 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11355 fallback != fb_none);
11356 break;
11358 case TRUTH_ANDIF_EXPR:
11359 case TRUTH_ORIF_EXPR:
11361 /* Preserve the original type of the expression and the
11362 source location of the outer expression. */
11363 tree org_type = TREE_TYPE (*expr_p);
11364 *expr_p = gimple_boolify (*expr_p);
11365 *expr_p = build3_loc (input_location, COND_EXPR,
11366 org_type, *expr_p,
11367 fold_convert_loc
11368 (input_location,
11369 org_type, boolean_true_node),
11370 fold_convert_loc
11371 (input_location,
11372 org_type, boolean_false_node));
11373 ret = GS_OK;
11374 break;
11377 case TRUTH_NOT_EXPR:
11379 tree type = TREE_TYPE (*expr_p);
11380 /* The parsers are careful to generate TRUTH_NOT_EXPR
11381 only with operands that are always zero or one.
11382 We do not fold here but handle the only interesting case
11383 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11384 *expr_p = gimple_boolify (*expr_p);
11385 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11386 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11387 TREE_TYPE (*expr_p),
11388 TREE_OPERAND (*expr_p, 0));
11389 else
11390 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11391 TREE_TYPE (*expr_p),
11392 TREE_OPERAND (*expr_p, 0),
11393 build_int_cst (TREE_TYPE (*expr_p), 1));
11394 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11395 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11396 ret = GS_OK;
11397 break;
11400 case ADDR_EXPR:
11401 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11402 break;
11404 case ANNOTATE_EXPR:
11406 tree cond = TREE_OPERAND (*expr_p, 0);
11407 tree kind = TREE_OPERAND (*expr_p, 1);
11408 tree data = TREE_OPERAND (*expr_p, 2);
11409 tree type = TREE_TYPE (cond);
11410 if (!INTEGRAL_TYPE_P (type))
11412 *expr_p = cond;
11413 ret = GS_OK;
11414 break;
11416 tree tmp = create_tmp_var (type);
11417 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11418 gcall *call
11419 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
11420 gimple_call_set_lhs (call, tmp);
11421 gimplify_seq_add_stmt (pre_p, call);
11422 *expr_p = tmp;
11423 ret = GS_ALL_DONE;
11424 break;
11427 case VA_ARG_EXPR:
11428 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11429 break;
11431 CASE_CONVERT:
11432 if (IS_EMPTY_STMT (*expr_p))
11434 ret = GS_ALL_DONE;
11435 break;
11438 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11439 || fallback == fb_none)
11441 /* Just strip a conversion to void (or in void context) and
11442 try again. */
11443 *expr_p = TREE_OPERAND (*expr_p, 0);
11444 ret = GS_OK;
11445 break;
11448 ret = gimplify_conversion (expr_p);
11449 if (ret == GS_ERROR)
11450 break;
11451 if (*expr_p != save_expr)
11452 break;
11453 /* FALLTHRU */
11455 case FIX_TRUNC_EXPR:
11456 /* unary_expr: ... | '(' cast ')' val | ... */
11457 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11458 is_gimple_val, fb_rvalue);
11459 recalculate_side_effects (*expr_p);
11460 break;
11462 case INDIRECT_REF:
11464 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11465 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11466 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11468 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11469 if (*expr_p != save_expr)
11471 ret = GS_OK;
11472 break;
11475 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11476 is_gimple_reg, fb_rvalue);
11477 if (ret == GS_ERROR)
11478 break;
11480 recalculate_side_effects (*expr_p);
11481 *expr_p = fold_build2_loc (input_location, MEM_REF,
11482 TREE_TYPE (*expr_p),
11483 TREE_OPERAND (*expr_p, 0),
11484 build_int_cst (saved_ptr_type, 0));
11485 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11486 TREE_THIS_NOTRAP (*expr_p) = notrap;
11487 ret = GS_OK;
11488 break;
11491 /* We arrive here through the various re-gimplifcation paths. */
11492 case MEM_REF:
11493 /* First try re-folding the whole thing. */
11494 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11495 TREE_OPERAND (*expr_p, 0),
11496 TREE_OPERAND (*expr_p, 1));
11497 if (tmp)
11499 REF_REVERSE_STORAGE_ORDER (tmp)
11500 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11501 *expr_p = tmp;
11502 recalculate_side_effects (*expr_p);
11503 ret = GS_OK;
11504 break;
11506 /* Avoid re-gimplifying the address operand if it is already
11507 in suitable form. Re-gimplifying would mark the address
11508 operand addressable. Always gimplify when not in SSA form
11509 as we still may have to gimplify decls with value-exprs. */
11510 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11511 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11513 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11514 is_gimple_mem_ref_addr, fb_rvalue);
11515 if (ret == GS_ERROR)
11516 break;
11518 recalculate_side_effects (*expr_p);
11519 ret = GS_ALL_DONE;
11520 break;
11522 /* Constants need not be gimplified. */
11523 case INTEGER_CST:
11524 case REAL_CST:
11525 case FIXED_CST:
11526 case STRING_CST:
11527 case COMPLEX_CST:
11528 case VECTOR_CST:
11529 /* Drop the overflow flag on constants, we do not want
11530 that in the GIMPLE IL. */
11531 if (TREE_OVERFLOW_P (*expr_p))
11532 *expr_p = drop_tree_overflow (*expr_p);
11533 ret = GS_ALL_DONE;
11534 break;
11536 case CONST_DECL:
11537 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11538 CONST_DECL node. Otherwise the decl is replaceable by its
11539 value. */
11540 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11541 if (fallback & fb_lvalue)
11542 ret = GS_ALL_DONE;
11543 else
11545 *expr_p = DECL_INITIAL (*expr_p);
11546 ret = GS_OK;
11548 break;
11550 case DECL_EXPR:
11551 ret = gimplify_decl_expr (expr_p, pre_p);
11552 break;
11554 case BIND_EXPR:
11555 ret = gimplify_bind_expr (expr_p, pre_p);
11556 break;
11558 case LOOP_EXPR:
11559 ret = gimplify_loop_expr (expr_p, pre_p);
11560 break;
11562 case SWITCH_EXPR:
11563 ret = gimplify_switch_expr (expr_p, pre_p);
11564 break;
11566 case EXIT_EXPR:
11567 ret = gimplify_exit_expr (expr_p);
11568 break;
11570 case GOTO_EXPR:
11571 /* If the target is not LABEL, then it is a computed jump
11572 and the target needs to be gimplified. */
11573 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11575 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11576 NULL, is_gimple_val, fb_rvalue);
11577 if (ret == GS_ERROR)
11578 break;
11580 gimplify_seq_add_stmt (pre_p,
11581 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11582 ret = GS_ALL_DONE;
11583 break;
11585 case PREDICT_EXPR:
11586 gimplify_seq_add_stmt (pre_p,
11587 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11588 PREDICT_EXPR_OUTCOME (*expr_p)));
11589 ret = GS_ALL_DONE;
11590 break;
11592 case LABEL_EXPR:
11593 ret = gimplify_label_expr (expr_p, pre_p);
11594 label = LABEL_EXPR_LABEL (*expr_p);
11595 gcc_assert (decl_function_context (label) == current_function_decl);
11597 /* If the label is used in a goto statement, or address of the label
11598 is taken, we need to unpoison all variables that were seen so far.
11599 Doing so would prevent us from reporting a false positives. */
11600 if (asan_poisoned_variables
11601 && asan_used_labels != NULL
11602 && asan_used_labels->contains (label))
11603 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11604 break;
11606 case CASE_LABEL_EXPR:
11607 ret = gimplify_case_label_expr (expr_p, pre_p);
11609 if (gimplify_ctxp->live_switch_vars)
11610 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11611 pre_p);
11612 break;
11614 case RETURN_EXPR:
11615 ret = gimplify_return_expr (*expr_p, pre_p);
11616 break;
11618 case CONSTRUCTOR:
11619 /* Don't reduce this in place; let gimplify_init_constructor work its
11620 magic. Buf if we're just elaborating this for side effects, just
11621 gimplify any element that has side-effects. */
11622 if (fallback == fb_none)
11624 unsigned HOST_WIDE_INT ix;
11625 tree val;
11626 tree temp = NULL_TREE;
11627 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11628 if (TREE_SIDE_EFFECTS (val))
11629 append_to_statement_list (val, &temp);
11631 *expr_p = temp;
11632 ret = temp ? GS_OK : GS_ALL_DONE;
11634 /* C99 code may assign to an array in a constructed
11635 structure or union, and this has undefined behavior only
11636 on execution, so create a temporary if an lvalue is
11637 required. */
11638 else if (fallback == fb_lvalue)
11640 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11641 mark_addressable (*expr_p);
11642 ret = GS_OK;
11644 else
11645 ret = GS_ALL_DONE;
11646 break;
11648 /* The following are special cases that are not handled by the
11649 original GIMPLE grammar. */
11651 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11652 eliminated. */
11653 case SAVE_EXPR:
11654 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11655 break;
11657 case BIT_FIELD_REF:
11658 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11659 post_p, is_gimple_lvalue, fb_either);
11660 recalculate_side_effects (*expr_p);
11661 break;
11663 case TARGET_MEM_REF:
11665 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11667 if (TMR_BASE (*expr_p))
11668 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11669 post_p, is_gimple_mem_ref_addr, fb_either);
11670 if (TMR_INDEX (*expr_p))
11671 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11672 post_p, is_gimple_val, fb_rvalue);
11673 if (TMR_INDEX2 (*expr_p))
11674 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11675 post_p, is_gimple_val, fb_rvalue);
11676 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11677 ret = MIN (r0, r1);
11679 break;
11681 case NON_LVALUE_EXPR:
11682 /* This should have been stripped above. */
11683 gcc_unreachable ();
11685 case ASM_EXPR:
11686 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11687 break;
11689 case TRY_FINALLY_EXPR:
11690 case TRY_CATCH_EXPR:
11692 gimple_seq eval, cleanup;
11693 gtry *try_;
11695 /* Calls to destructors are generated automatically in FINALLY/CATCH
11696 block. They should have location as UNKNOWN_LOCATION. However,
11697 gimplify_call_expr will reset these call stmts to input_location
11698 if it finds stmt's location is unknown. To prevent resetting for
11699 destructors, we set the input_location to unknown.
11700 Note that this only affects the destructor calls in FINALLY/CATCH
11701 block, and will automatically reset to its original value by the
11702 end of gimplify_expr. */
11703 input_location = UNKNOWN_LOCATION;
11704 eval = cleanup = NULL;
11705 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11706 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11707 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11708 if (gimple_seq_empty_p (cleanup))
11710 gimple_seq_add_seq (pre_p, eval);
11711 ret = GS_ALL_DONE;
11712 break;
11714 try_ = gimple_build_try (eval, cleanup,
11715 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11716 ? GIMPLE_TRY_FINALLY
11717 : GIMPLE_TRY_CATCH);
11718 if (EXPR_HAS_LOCATION (save_expr))
11719 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11720 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11721 gimple_set_location (try_, saved_location);
11722 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11723 gimple_try_set_catch_is_cleanup (try_,
11724 TRY_CATCH_IS_CLEANUP (*expr_p));
11725 gimplify_seq_add_stmt (pre_p, try_);
11726 ret = GS_ALL_DONE;
11727 break;
11730 case CLEANUP_POINT_EXPR:
11731 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11732 break;
11734 case TARGET_EXPR:
11735 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11736 break;
11738 case CATCH_EXPR:
11740 gimple *c;
11741 gimple_seq handler = NULL;
11742 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11743 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11744 gimplify_seq_add_stmt (pre_p, c);
11745 ret = GS_ALL_DONE;
11746 break;
11749 case EH_FILTER_EXPR:
11751 gimple *ehf;
11752 gimple_seq failure = NULL;
11754 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11755 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11756 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11757 gimplify_seq_add_stmt (pre_p, ehf);
11758 ret = GS_ALL_DONE;
11759 break;
11762 case OBJ_TYPE_REF:
11764 enum gimplify_status r0, r1;
11765 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11766 post_p, is_gimple_val, fb_rvalue);
11767 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11768 post_p, is_gimple_val, fb_rvalue);
11769 TREE_SIDE_EFFECTS (*expr_p) = 0;
11770 ret = MIN (r0, r1);
11772 break;
11774 case LABEL_DECL:
11775 /* We get here when taking the address of a label. We mark
11776 the label as "forced"; meaning it can never be removed and
11777 it is a potential target for any computed goto. */
11778 FORCED_LABEL (*expr_p) = 1;
11779 ret = GS_ALL_DONE;
11780 break;
11782 case STATEMENT_LIST:
11783 ret = gimplify_statement_list (expr_p, pre_p);
11784 break;
11786 case WITH_SIZE_EXPR:
11788 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11789 post_p == &internal_post ? NULL : post_p,
11790 gimple_test_f, fallback);
11791 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11792 is_gimple_val, fb_rvalue);
11793 ret = GS_ALL_DONE;
11795 break;
11797 case VAR_DECL:
11798 case PARM_DECL:
11799 ret = gimplify_var_or_parm_decl (expr_p);
11800 break;
11802 case RESULT_DECL:
11803 /* When within an OMP context, notice uses of variables. */
11804 if (gimplify_omp_ctxp)
11805 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11806 ret = GS_ALL_DONE;
11807 break;
11809 case SSA_NAME:
11810 /* Allow callbacks into the gimplifier during optimization. */
11811 ret = GS_ALL_DONE;
11812 break;
11814 case OMP_PARALLEL:
11815 gimplify_omp_parallel (expr_p, pre_p);
11816 ret = GS_ALL_DONE;
11817 break;
11819 case OMP_TASK:
11820 gimplify_omp_task (expr_p, pre_p);
11821 ret = GS_ALL_DONE;
11822 break;
11824 case OMP_FOR:
11825 case OMP_SIMD:
11826 case OMP_DISTRIBUTE:
11827 case OMP_TASKLOOP:
11828 case OACC_LOOP:
11829 ret = gimplify_omp_for (expr_p, pre_p);
11830 break;
11832 case OACC_CACHE:
11833 gimplify_oacc_cache (expr_p, pre_p);
11834 ret = GS_ALL_DONE;
11835 break;
11837 case OACC_DECLARE:
11838 gimplify_oacc_declare (expr_p, pre_p);
11839 ret = GS_ALL_DONE;
11840 break;
11842 case OACC_HOST_DATA:
11843 case OACC_DATA:
11844 case OACC_KERNELS:
11845 case OACC_PARALLEL:
11846 case OMP_SECTIONS:
11847 case OMP_SINGLE:
11848 case OMP_TARGET:
11849 case OMP_TARGET_DATA:
11850 case OMP_TEAMS:
11851 gimplify_omp_workshare (expr_p, pre_p);
11852 ret = GS_ALL_DONE;
11853 break;
11855 case OACC_ENTER_DATA:
11856 case OACC_EXIT_DATA:
11857 case OACC_UPDATE:
11858 case OMP_TARGET_UPDATE:
11859 case OMP_TARGET_ENTER_DATA:
11860 case OMP_TARGET_EXIT_DATA:
11861 gimplify_omp_target_update (expr_p, pre_p);
11862 ret = GS_ALL_DONE;
11863 break;
11865 case OMP_SECTION:
11866 case OMP_MASTER:
11867 case OMP_TASKGROUP:
11868 case OMP_ORDERED:
11869 case OMP_CRITICAL:
11871 gimple_seq body = NULL;
11872 gimple *g;
11874 gimplify_and_add (OMP_BODY (*expr_p), &body);
11875 switch (TREE_CODE (*expr_p))
11877 case OMP_SECTION:
11878 g = gimple_build_omp_section (body);
11879 break;
11880 case OMP_MASTER:
11881 g = gimple_build_omp_master (body);
11882 break;
11883 case OMP_TASKGROUP:
11885 gimple_seq cleanup = NULL;
11886 tree fn
11887 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11888 g = gimple_build_call (fn, 0);
11889 gimple_seq_add_stmt (&cleanup, g);
11890 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11891 body = NULL;
11892 gimple_seq_add_stmt (&body, g);
11893 g = gimple_build_omp_taskgroup (body);
11895 break;
11896 case OMP_ORDERED:
11897 g = gimplify_omp_ordered (*expr_p, body);
11898 break;
11899 case OMP_CRITICAL:
11900 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11901 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11902 gimplify_adjust_omp_clauses (pre_p, body,
11903 &OMP_CRITICAL_CLAUSES (*expr_p),
11904 OMP_CRITICAL);
11905 g = gimple_build_omp_critical (body,
11906 OMP_CRITICAL_NAME (*expr_p),
11907 OMP_CRITICAL_CLAUSES (*expr_p));
11908 break;
11909 default:
11910 gcc_unreachable ();
11912 gimplify_seq_add_stmt (pre_p, g);
11913 ret = GS_ALL_DONE;
11914 break;
11917 case OMP_ATOMIC:
11918 case OMP_ATOMIC_READ:
11919 case OMP_ATOMIC_CAPTURE_OLD:
11920 case OMP_ATOMIC_CAPTURE_NEW:
11921 ret = gimplify_omp_atomic (expr_p, pre_p);
11922 break;
11924 case TRANSACTION_EXPR:
11925 ret = gimplify_transaction (expr_p, pre_p);
11926 break;
11928 case TRUTH_AND_EXPR:
11929 case TRUTH_OR_EXPR:
11930 case TRUTH_XOR_EXPR:
11932 tree orig_type = TREE_TYPE (*expr_p);
11933 tree new_type, xop0, xop1;
11934 *expr_p = gimple_boolify (*expr_p);
11935 new_type = TREE_TYPE (*expr_p);
11936 if (!useless_type_conversion_p (orig_type, new_type))
11938 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11939 ret = GS_OK;
11940 break;
11943 /* Boolified binary truth expressions are semantically equivalent
11944 to bitwise binary expressions. Canonicalize them to the
11945 bitwise variant. */
11946 switch (TREE_CODE (*expr_p))
11948 case TRUTH_AND_EXPR:
11949 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11950 break;
11951 case TRUTH_OR_EXPR:
11952 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11953 break;
11954 case TRUTH_XOR_EXPR:
11955 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11956 break;
11957 default:
11958 break;
11960 /* Now make sure that operands have compatible type to
11961 expression's new_type. */
11962 xop0 = TREE_OPERAND (*expr_p, 0);
11963 xop1 = TREE_OPERAND (*expr_p, 1);
11964 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11965 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11966 new_type,
11967 xop0);
11968 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11969 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11970 new_type,
11971 xop1);
11972 /* Continue classified as tcc_binary. */
11973 goto expr_2;
11976 case VEC_COND_EXPR:
11978 enum gimplify_status r0, r1, r2;
11980 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11981 post_p, is_gimple_condexpr, fb_rvalue);
11982 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11983 post_p, is_gimple_val, fb_rvalue);
11984 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11985 post_p, is_gimple_val, fb_rvalue);
11987 ret = MIN (MIN (r0, r1), r2);
11988 recalculate_side_effects (*expr_p);
11990 break;
11992 case FMA_EXPR:
11993 case VEC_PERM_EXPR:
11994 /* Classified as tcc_expression. */
11995 goto expr_3;
11997 case BIT_INSERT_EXPR:
11998 /* Argument 3 is a constant. */
11999 goto expr_2;
12001 case POINTER_PLUS_EXPR:
12003 enum gimplify_status r0, r1;
12004 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12005 post_p, is_gimple_val, fb_rvalue);
12006 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12007 post_p, is_gimple_val, fb_rvalue);
12008 recalculate_side_effects (*expr_p);
12009 ret = MIN (r0, r1);
12010 break;
12013 default:
12014 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12016 case tcc_comparison:
12017 /* Handle comparison of objects of non scalar mode aggregates
12018 with a call to memcmp. It would be nice to only have to do
12019 this for variable-sized objects, but then we'd have to allow
12020 the same nest of reference nodes we allow for MODIFY_EXPR and
12021 that's too complex.
12023 Compare scalar mode aggregates as scalar mode values. Using
12024 memcmp for them would be very inefficient at best, and is
12025 plain wrong if bitfields are involved. */
12027 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12029 /* Vector comparisons need no boolification. */
12030 if (TREE_CODE (type) == VECTOR_TYPE)
12031 goto expr_2;
12032 else if (!AGGREGATE_TYPE_P (type))
12034 tree org_type = TREE_TYPE (*expr_p);
12035 *expr_p = gimple_boolify (*expr_p);
12036 if (!useless_type_conversion_p (org_type,
12037 TREE_TYPE (*expr_p)))
12039 *expr_p = fold_convert_loc (input_location,
12040 org_type, *expr_p);
12041 ret = GS_OK;
12043 else
12044 goto expr_2;
12046 else if (TYPE_MODE (type) != BLKmode)
12047 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12048 else
12049 ret = gimplify_variable_sized_compare (expr_p);
12051 break;
12054 /* If *EXPR_P does not need to be special-cased, handle it
12055 according to its class. */
12056 case tcc_unary:
12057 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12058 post_p, is_gimple_val, fb_rvalue);
12059 break;
12061 case tcc_binary:
12062 expr_2:
12064 enum gimplify_status r0, r1;
12066 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12067 post_p, is_gimple_val, fb_rvalue);
12068 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12069 post_p, is_gimple_val, fb_rvalue);
12071 ret = MIN (r0, r1);
12072 break;
12075 expr_3:
12077 enum gimplify_status r0, r1, r2;
12079 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12080 post_p, is_gimple_val, fb_rvalue);
12081 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12082 post_p, is_gimple_val, fb_rvalue);
12083 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12084 post_p, is_gimple_val, fb_rvalue);
12086 ret = MIN (MIN (r0, r1), r2);
12087 break;
12090 case tcc_declaration:
12091 case tcc_constant:
12092 ret = GS_ALL_DONE;
12093 goto dont_recalculate;
12095 default:
12096 gcc_unreachable ();
12099 recalculate_side_effects (*expr_p);
12101 dont_recalculate:
12102 break;
12105 gcc_assert (*expr_p || ret != GS_OK);
12107 while (ret == GS_OK);
12109 /* If we encountered an error_mark somewhere nested inside, either
12110 stub out the statement or propagate the error back out. */
12111 if (ret == GS_ERROR)
12113 if (is_statement)
12114 *expr_p = NULL;
12115 goto out;
12118 /* This was only valid as a return value from the langhook, which
12119 we handled. Make sure it doesn't escape from any other context. */
12120 gcc_assert (ret != GS_UNHANDLED);
12122 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12124 /* We aren't looking for a value, and we don't have a valid
12125 statement. If it doesn't have side-effects, throw it away.
12126 We can also get here with code such as "*&&L;", where L is
12127 a LABEL_DECL that is marked as FORCED_LABEL. */
12128 if (TREE_CODE (*expr_p) == LABEL_DECL
12129 || !TREE_SIDE_EFFECTS (*expr_p))
12130 *expr_p = NULL;
12131 else if (!TREE_THIS_VOLATILE (*expr_p))
12133 /* This is probably a _REF that contains something nested that
12134 has side effects. Recurse through the operands to find it. */
12135 enum tree_code code = TREE_CODE (*expr_p);
12137 switch (code)
12139 case COMPONENT_REF:
12140 case REALPART_EXPR:
12141 case IMAGPART_EXPR:
12142 case VIEW_CONVERT_EXPR:
12143 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12144 gimple_test_f, fallback);
12145 break;
12147 case ARRAY_REF:
12148 case ARRAY_RANGE_REF:
12149 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12150 gimple_test_f, fallback);
12151 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12152 gimple_test_f, fallback);
12153 break;
12155 default:
12156 /* Anything else with side-effects must be converted to
12157 a valid statement before we get here. */
12158 gcc_unreachable ();
12161 *expr_p = NULL;
12163 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12164 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12166 /* Historically, the compiler has treated a bare reference
12167 to a non-BLKmode volatile lvalue as forcing a load. */
12168 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12170 /* Normally, we do not want to create a temporary for a
12171 TREE_ADDRESSABLE type because such a type should not be
12172 copied by bitwise-assignment. However, we make an
12173 exception here, as all we are doing here is ensuring that
12174 we read the bytes that make up the type. We use
12175 create_tmp_var_raw because create_tmp_var will abort when
12176 given a TREE_ADDRESSABLE type. */
12177 tree tmp = create_tmp_var_raw (type, "vol");
12178 gimple_add_tmp_var (tmp);
12179 gimplify_assign (tmp, *expr_p, pre_p);
12180 *expr_p = NULL;
12182 else
12183 /* We can't do anything useful with a volatile reference to
12184 an incomplete type, so just throw it away. Likewise for
12185 a BLKmode type, since any implicit inner load should
12186 already have been turned into an explicit one by the
12187 gimplification process. */
12188 *expr_p = NULL;
12191 /* If we are gimplifying at the statement level, we're done. Tack
12192 everything together and return. */
12193 if (fallback == fb_none || is_statement)
12195 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12196 it out for GC to reclaim it. */
12197 *expr_p = NULL_TREE;
12199 if (!gimple_seq_empty_p (internal_pre)
12200 || !gimple_seq_empty_p (internal_post))
12202 gimplify_seq_add_seq (&internal_pre, internal_post);
12203 gimplify_seq_add_seq (pre_p, internal_pre);
12206 /* The result of gimplifying *EXPR_P is going to be the last few
12207 statements in *PRE_P and *POST_P. Add location information
12208 to all the statements that were added by the gimplification
12209 helpers. */
12210 if (!gimple_seq_empty_p (*pre_p))
12211 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12213 if (!gimple_seq_empty_p (*post_p))
12214 annotate_all_with_location_after (*post_p, post_last_gsi,
12215 input_location);
12217 goto out;
12220 #ifdef ENABLE_GIMPLE_CHECKING
12221 if (*expr_p)
12223 enum tree_code code = TREE_CODE (*expr_p);
12224 /* These expressions should already be in gimple IR form. */
12225 gcc_assert (code != MODIFY_EXPR
12226 && code != ASM_EXPR
12227 && code != BIND_EXPR
12228 && code != CATCH_EXPR
12229 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12230 && code != EH_FILTER_EXPR
12231 && code != GOTO_EXPR
12232 && code != LABEL_EXPR
12233 && code != LOOP_EXPR
12234 && code != SWITCH_EXPR
12235 && code != TRY_FINALLY_EXPR
12236 && code != OACC_PARALLEL
12237 && code != OACC_KERNELS
12238 && code != OACC_DATA
12239 && code != OACC_HOST_DATA
12240 && code != OACC_DECLARE
12241 && code != OACC_UPDATE
12242 && code != OACC_ENTER_DATA
12243 && code != OACC_EXIT_DATA
12244 && code != OACC_CACHE
12245 && code != OMP_CRITICAL
12246 && code != OMP_FOR
12247 && code != OACC_LOOP
12248 && code != OMP_MASTER
12249 && code != OMP_TASKGROUP
12250 && code != OMP_ORDERED
12251 && code != OMP_PARALLEL
12252 && code != OMP_SECTIONS
12253 && code != OMP_SECTION
12254 && code != OMP_SINGLE);
12256 #endif
12258 /* Otherwise we're gimplifying a subexpression, so the resulting
12259 value is interesting. If it's a valid operand that matches
12260 GIMPLE_TEST_F, we're done. Unless we are handling some
12261 post-effects internally; if that's the case, we need to copy into
12262 a temporary before adding the post-effects to POST_P. */
12263 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12264 goto out;
12266 /* Otherwise, we need to create a new temporary for the gimplified
12267 expression. */
12269 /* We can't return an lvalue if we have an internal postqueue. The
12270 object the lvalue refers to would (probably) be modified by the
12271 postqueue; we need to copy the value out first, which means an
12272 rvalue. */
12273 if ((fallback & fb_lvalue)
12274 && gimple_seq_empty_p (internal_post)
12275 && is_gimple_addressable (*expr_p))
12277 /* An lvalue will do. Take the address of the expression, store it
12278 in a temporary, and replace the expression with an INDIRECT_REF of
12279 that temporary. */
12280 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12281 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12282 *expr_p = build_simple_mem_ref (tmp);
12284 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12286 /* An rvalue will do. Assign the gimplified expression into a
12287 new temporary TMP and replace the original expression with
12288 TMP. First, make sure that the expression has a type so that
12289 it can be assigned into a temporary. */
12290 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12291 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12293 else
12295 #ifdef ENABLE_GIMPLE_CHECKING
12296 if (!(fallback & fb_mayfail))
12298 fprintf (stderr, "gimplification failed:\n");
12299 print_generic_expr (stderr, *expr_p);
12300 debug_tree (*expr_p);
12301 internal_error ("gimplification failed");
12303 #endif
12304 gcc_assert (fallback & fb_mayfail);
12306 /* If this is an asm statement, and the user asked for the
12307 impossible, don't die. Fail and let gimplify_asm_expr
12308 issue an error. */
12309 ret = GS_ERROR;
12310 goto out;
12313 /* Make sure the temporary matches our predicate. */
12314 gcc_assert ((*gimple_test_f) (*expr_p));
12316 if (!gimple_seq_empty_p (internal_post))
12318 annotate_all_with_location (internal_post, input_location);
12319 gimplify_seq_add_seq (pre_p, internal_post);
12322 out:
12323 input_location = saved_location;
12324 return ret;
12327 /* Like gimplify_expr but make sure the gimplified result is not itself
12328 a SSA name (but a decl if it were). Temporaries required by
12329 evaluating *EXPR_P may be still SSA names. */
12331 static enum gimplify_status
12332 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12333 bool (*gimple_test_f) (tree), fallback_t fallback,
12334 bool allow_ssa)
12336 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12337 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12338 gimple_test_f, fallback);
12339 if (! allow_ssa
12340 && TREE_CODE (*expr_p) == SSA_NAME)
12342 tree name = *expr_p;
12343 if (was_ssa_name_p)
12344 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12345 else
12347 /* Avoid the extra copy if possible. */
12348 *expr_p = create_tmp_reg (TREE_TYPE (name));
12349 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12350 release_ssa_name (name);
12353 return ret;
12356 /* Look through TYPE for variable-sized objects and gimplify each such
12357 size that we find. Add to LIST_P any statements generated. */
12359 void
12360 gimplify_type_sizes (tree type, gimple_seq *list_p)
12362 tree field, t;
12364 if (type == NULL || type == error_mark_node)
12365 return;
12367 /* We first do the main variant, then copy into any other variants. */
12368 type = TYPE_MAIN_VARIANT (type);
12370 /* Avoid infinite recursion. */
12371 if (TYPE_SIZES_GIMPLIFIED (type))
12372 return;
12374 TYPE_SIZES_GIMPLIFIED (type) = 1;
12376 switch (TREE_CODE (type))
12378 case INTEGER_TYPE:
12379 case ENUMERAL_TYPE:
12380 case BOOLEAN_TYPE:
12381 case REAL_TYPE:
12382 case FIXED_POINT_TYPE:
12383 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12384 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12386 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12388 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12389 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12391 break;
12393 case ARRAY_TYPE:
12394 /* These types may not have declarations, so handle them here. */
12395 gimplify_type_sizes (TREE_TYPE (type), list_p);
12396 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12397 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12398 with assigned stack slots, for -O1+ -g they should be tracked
12399 by VTA. */
12400 if (!(TYPE_NAME (type)
12401 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12402 && DECL_IGNORED_P (TYPE_NAME (type)))
12403 && TYPE_DOMAIN (type)
12404 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12406 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12407 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12408 DECL_IGNORED_P (t) = 0;
12409 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12410 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12411 DECL_IGNORED_P (t) = 0;
12413 break;
12415 case RECORD_TYPE:
12416 case UNION_TYPE:
12417 case QUAL_UNION_TYPE:
12418 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12419 if (TREE_CODE (field) == FIELD_DECL)
12421 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12422 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12423 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12424 gimplify_type_sizes (TREE_TYPE (field), list_p);
12426 break;
12428 case POINTER_TYPE:
12429 case REFERENCE_TYPE:
12430 /* We used to recurse on the pointed-to type here, which turned out to
12431 be incorrect because its definition might refer to variables not
12432 yet initialized at this point if a forward declaration is involved.
12434 It was actually useful for anonymous pointed-to types to ensure
12435 that the sizes evaluation dominates every possible later use of the
12436 values. Restricting to such types here would be safe since there
12437 is no possible forward declaration around, but would introduce an
12438 undesirable middle-end semantic to anonymity. We then defer to
12439 front-ends the responsibility of ensuring that the sizes are
12440 evaluated both early and late enough, e.g. by attaching artificial
12441 type declarations to the tree. */
12442 break;
12444 default:
12445 break;
12448 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12449 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12451 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12453 TYPE_SIZE (t) = TYPE_SIZE (type);
12454 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12455 TYPE_SIZES_GIMPLIFIED (t) = 1;
12459 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12460 a size or position, has had all of its SAVE_EXPRs evaluated.
12461 We add any required statements to *STMT_P. */
12463 void
12464 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12466 tree expr = *expr_p;
12468 /* We don't do anything if the value isn't there, is constant, or contains
12469 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12470 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12471 will want to replace it with a new variable, but that will cause problems
12472 if this type is from outside the function. It's OK to have that here. */
12473 if (is_gimple_sizepos (expr))
12474 return;
12476 *expr_p = unshare_expr (expr);
12478 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12479 if the def vanishes. */
12480 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12483 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12484 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12485 is true, also gimplify the parameters. */
12487 gbind *
12488 gimplify_body (tree fndecl, bool do_parms)
12490 location_t saved_location = input_location;
12491 gimple_seq parm_stmts, seq;
12492 gimple *outer_stmt;
12493 gbind *outer_bind;
12494 struct cgraph_node *cgn;
12496 timevar_push (TV_TREE_GIMPLIFY);
12498 init_tree_ssa (cfun);
12500 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12501 gimplification. */
12502 default_rtl_profile ();
12504 gcc_assert (gimplify_ctxp == NULL);
12505 push_gimplify_context (true);
12507 if (flag_openacc || flag_openmp)
12509 gcc_assert (gimplify_omp_ctxp == NULL);
12510 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12511 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12514 /* Unshare most shared trees in the body and in that of any nested functions.
12515 It would seem we don't have to do this for nested functions because
12516 they are supposed to be output and then the outer function gimplified
12517 first, but the g++ front end doesn't always do it that way. */
12518 unshare_body (fndecl);
12519 unvisit_body (fndecl);
12521 cgn = cgraph_node::get (fndecl);
12522 if (cgn && cgn->origin)
12523 nonlocal_vlas = new hash_set<tree>;
12525 /* Make sure input_location isn't set to something weird. */
12526 input_location = DECL_SOURCE_LOCATION (fndecl);
12528 /* Resolve callee-copies. This has to be done before processing
12529 the body so that DECL_VALUE_EXPR gets processed correctly. */
12530 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12532 /* Gimplify the function's body. */
12533 seq = NULL;
12534 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12535 outer_stmt = gimple_seq_first_stmt (seq);
12536 if (!outer_stmt)
12538 outer_stmt = gimple_build_nop ();
12539 gimplify_seq_add_stmt (&seq, outer_stmt);
12542 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12543 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12544 if (gimple_code (outer_stmt) == GIMPLE_BIND
12545 && gimple_seq_first (seq) == gimple_seq_last (seq))
12546 outer_bind = as_a <gbind *> (outer_stmt);
12547 else
12548 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12550 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12552 /* If we had callee-copies statements, insert them at the beginning
12553 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12554 if (!gimple_seq_empty_p (parm_stmts))
12556 tree parm;
12558 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12559 gimple_bind_set_body (outer_bind, parm_stmts);
12561 for (parm = DECL_ARGUMENTS (current_function_decl);
12562 parm; parm = DECL_CHAIN (parm))
12563 if (DECL_HAS_VALUE_EXPR_P (parm))
12565 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12566 DECL_IGNORED_P (parm) = 0;
12570 if (nonlocal_vlas)
12572 if (nonlocal_vla_vars)
12574 /* tree-nested.c may later on call declare_vars (..., true);
12575 which relies on BLOCK_VARS chain to be the tail of the
12576 gimple_bind_vars chain. Ensure we don't violate that
12577 assumption. */
12578 if (gimple_bind_block (outer_bind)
12579 == DECL_INITIAL (current_function_decl))
12580 declare_vars (nonlocal_vla_vars, outer_bind, true);
12581 else
12582 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12583 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12584 nonlocal_vla_vars);
12585 nonlocal_vla_vars = NULL_TREE;
12587 delete nonlocal_vlas;
12588 nonlocal_vlas = NULL;
12591 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12592 && gimplify_omp_ctxp)
12594 delete_omp_context (gimplify_omp_ctxp);
12595 gimplify_omp_ctxp = NULL;
12598 pop_gimplify_context (outer_bind);
12599 gcc_assert (gimplify_ctxp == NULL);
12601 if (flag_checking && !seen_error ())
12602 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12604 timevar_pop (TV_TREE_GIMPLIFY);
12605 input_location = saved_location;
12607 return outer_bind;
12610 typedef char *char_p; /* For DEF_VEC_P. */
12612 /* Return whether we should exclude FNDECL from instrumentation. */
12614 static bool
12615 flag_instrument_functions_exclude_p (tree fndecl)
12617 vec<char_p> *v;
12619 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12620 if (v && v->length () > 0)
12622 const char *name;
12623 int i;
12624 char *s;
12626 name = lang_hooks.decl_printable_name (fndecl, 0);
12627 FOR_EACH_VEC_ELT (*v, i, s)
12628 if (strstr (name, s) != NULL)
12629 return true;
12632 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12633 if (v && v->length () > 0)
12635 const char *name;
12636 int i;
12637 char *s;
12639 name = DECL_SOURCE_FILE (fndecl);
12640 FOR_EACH_VEC_ELT (*v, i, s)
12641 if (strstr (name, s) != NULL)
12642 return true;
12645 return false;
12648 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12649 node for the function we want to gimplify.
12651 Return the sequence of GIMPLE statements corresponding to the body
12652 of FNDECL. */
12654 void
12655 gimplify_function_tree (tree fndecl)
12657 tree parm, ret;
12658 gimple_seq seq;
12659 gbind *bind;
12661 gcc_assert (!gimple_body (fndecl));
12663 if (DECL_STRUCT_FUNCTION (fndecl))
12664 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12665 else
12666 push_struct_function (fndecl);
12668 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12669 if necessary. */
12670 cfun->curr_properties |= PROP_gimple_lva;
12672 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12674 /* Preliminarily mark non-addressed complex variables as eligible
12675 for promotion to gimple registers. We'll transform their uses
12676 as we find them. */
12677 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12678 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12679 && !TREE_THIS_VOLATILE (parm)
12680 && !needs_to_live_in_memory (parm))
12681 DECL_GIMPLE_REG_P (parm) = 1;
12684 ret = DECL_RESULT (fndecl);
12685 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12686 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12687 && !needs_to_live_in_memory (ret))
12688 DECL_GIMPLE_REG_P (ret) = 1;
12690 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12691 asan_poisoned_variables = new hash_set<tree> ();
12692 bind = gimplify_body (fndecl, true);
12693 if (asan_poisoned_variables)
12695 delete asan_poisoned_variables;
12696 asan_poisoned_variables = NULL;
12699 /* The tree body of the function is no longer needed, replace it
12700 with the new GIMPLE body. */
12701 seq = NULL;
12702 gimple_seq_add_stmt (&seq, bind);
12703 gimple_set_body (fndecl, seq);
12705 /* If we're instrumenting function entry/exit, then prepend the call to
12706 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12707 catch the exit hook. */
12708 /* ??? Add some way to ignore exceptions for this TFE. */
12709 if (flag_instrument_function_entry_exit
12710 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12711 /* Do not instrument extern inline functions. */
12712 && !(DECL_DECLARED_INLINE_P (fndecl)
12713 && DECL_EXTERNAL (fndecl)
12714 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12715 && !flag_instrument_functions_exclude_p (fndecl))
12717 tree x;
12718 gbind *new_bind;
12719 gimple *tf;
12720 gimple_seq cleanup = NULL, body = NULL;
12721 tree tmp_var;
12722 gcall *call;
12724 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12725 call = gimple_build_call (x, 1, integer_zero_node);
12726 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12727 gimple_call_set_lhs (call, tmp_var);
12728 gimplify_seq_add_stmt (&cleanup, call);
12729 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12730 call = gimple_build_call (x, 2,
12731 build_fold_addr_expr (current_function_decl),
12732 tmp_var);
12733 gimplify_seq_add_stmt (&cleanup, call);
12734 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12736 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12737 call = gimple_build_call (x, 1, integer_zero_node);
12738 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12739 gimple_call_set_lhs (call, tmp_var);
12740 gimplify_seq_add_stmt (&body, call);
12741 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12742 call = gimple_build_call (x, 2,
12743 build_fold_addr_expr (current_function_decl),
12744 tmp_var);
12745 gimplify_seq_add_stmt (&body, call);
12746 gimplify_seq_add_stmt (&body, tf);
12747 new_bind = gimple_build_bind (NULL, body, NULL);
12749 /* Replace the current function body with the body
12750 wrapped in the try/finally TF. */
12751 seq = NULL;
12752 gimple_seq_add_stmt (&seq, new_bind);
12753 gimple_set_body (fndecl, seq);
12754 bind = new_bind;
12757 if (sanitize_flags_p (SANITIZE_THREAD))
12759 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12760 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12761 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12762 /* Replace the current function body with the body
12763 wrapped in the try/finally TF. */
12764 seq = NULL;
12765 gimple_seq_add_stmt (&seq, new_bind);
12766 gimple_set_body (fndecl, seq);
12769 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12770 cfun->curr_properties |= PROP_gimple_any;
12772 pop_cfun ();
12774 dump_function (TDI_gimple, fndecl);
12777 /* Return a dummy expression of type TYPE in order to keep going after an
12778 error. */
12780 static tree
12781 dummy_object (tree type)
12783 tree t = build_int_cst (build_pointer_type (type), 0);
12784 return build2 (MEM_REF, type, t, t);
12787 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12788 builtin function, but a very special sort of operator. */
12790 enum gimplify_status
12791 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12792 gimple_seq *post_p ATTRIBUTE_UNUSED)
12794 tree promoted_type, have_va_type;
12795 tree valist = TREE_OPERAND (*expr_p, 0);
12796 tree type = TREE_TYPE (*expr_p);
12797 tree t, tag, aptag;
12798 location_t loc = EXPR_LOCATION (*expr_p);
12800 /* Verify that valist is of the proper type. */
12801 have_va_type = TREE_TYPE (valist);
12802 if (have_va_type == error_mark_node)
12803 return GS_ERROR;
12804 have_va_type = targetm.canonical_va_list_type (have_va_type);
12805 if (have_va_type == NULL_TREE
12806 && POINTER_TYPE_P (TREE_TYPE (valist)))
12807 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12808 have_va_type
12809 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12810 gcc_assert (have_va_type != NULL_TREE);
12812 /* Generate a diagnostic for requesting data of a type that cannot
12813 be passed through `...' due to type promotion at the call site. */
12814 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12815 != type)
12817 static bool gave_help;
12818 bool warned;
12819 /* Use the expansion point to handle cases such as passing bool (defined
12820 in a system header) through `...'. */
12821 source_location xloc
12822 = expansion_point_location_if_in_system_header (loc);
12824 /* Unfortunately, this is merely undefined, rather than a constraint
12825 violation, so we cannot make this an error. If this call is never
12826 executed, the program is still strictly conforming. */
12827 warned = warning_at (xloc, 0,
12828 "%qT is promoted to %qT when passed through %<...%>",
12829 type, promoted_type);
12830 if (!gave_help && warned)
12832 gave_help = true;
12833 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12834 promoted_type, type);
12837 /* We can, however, treat "undefined" any way we please.
12838 Call abort to encourage the user to fix the program. */
12839 if (warned)
12840 inform (xloc, "if this code is reached, the program will abort");
12841 /* Before the abort, allow the evaluation of the va_list
12842 expression to exit or longjmp. */
12843 gimplify_and_add (valist, pre_p);
12844 t = build_call_expr_loc (loc,
12845 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12846 gimplify_and_add (t, pre_p);
12848 /* This is dead code, but go ahead and finish so that the
12849 mode of the result comes out right. */
12850 *expr_p = dummy_object (type);
12851 return GS_ALL_DONE;
12854 tag = build_int_cst (build_pointer_type (type), 0);
12855 aptag = build_int_cst (TREE_TYPE (valist), 0);
12857 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12858 valist, tag, aptag);
12860 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12861 needs to be expanded. */
12862 cfun->curr_properties &= ~PROP_gimple_lva;
12864 return GS_OK;
12867 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12869 DST/SRC are the destination and source respectively. You can pass
12870 ungimplified trees in DST or SRC, in which case they will be
12871 converted to a gimple operand if necessary.
12873 This function returns the newly created GIMPLE_ASSIGN tuple. */
12875 gimple *
12876 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12878 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12879 gimplify_and_add (t, seq_p);
12880 ggc_free (t);
12881 return gimple_seq_last_stmt (*seq_p);
12884 inline hashval_t
12885 gimplify_hasher::hash (const elt_t *p)
12887 tree t = p->val;
12888 return iterative_hash_expr (t, 0);
12891 inline bool
12892 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12894 tree t1 = p1->val;
12895 tree t2 = p2->val;
12896 enum tree_code code = TREE_CODE (t1);
12898 if (TREE_CODE (t2) != code
12899 || TREE_TYPE (t1) != TREE_TYPE (t2))
12900 return false;
12902 if (!operand_equal_p (t1, t2, 0))
12903 return false;
12905 /* Only allow them to compare equal if they also hash equal; otherwise
12906 results are nondeterminate, and we fail bootstrap comparison. */
12907 gcc_checking_assert (hash (p1) == hash (p2));
12909 return true;