Daily bump.
[official-gcc.git] / gcc / gimplify.c
blob2c2abd76c0896afc9305cb0f1d321beb76f6aa47
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
69 /* Hash set of poisoned variables in a bind expr. */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
72 enum gimplify_omp_var_data
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
85 GOVD_LINEAR = 2048,
86 GOVD_ALIGNED = 4096,
88 /* Flag for GOVD_MAP: don't copy back. */
89 GOVD_MAP_TO_ONLY = 8192,
91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
94 GOVD_MAP_0LEN_ARRAY = 32768,
96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
108 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
109 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
110 | GOVD_LOCAL)
114 enum omp_region_type
116 ORT_WORKSHARE = 0x00,
117 ORT_SIMD = 0x01,
119 ORT_PARALLEL = 0x02,
120 ORT_COMBINED_PARALLEL = 0x03,
122 ORT_TASK = 0x04,
123 ORT_UNTIED_TASK = 0x05,
125 ORT_TEAMS = 0x08,
126 ORT_COMBINED_TEAMS = 0x09,
128 /* Data region. */
129 ORT_TARGET_DATA = 0x10,
131 /* Data region with offloading. */
132 ORT_TARGET = 0x20,
133 ORT_COMBINED_TARGET = 0x21,
135 /* OpenACC variants. */
136 ORT_ACC = 0x40, /* A generic OpenACC region. */
137 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
138 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
139 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
140 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
142 /* Dummy OpenMP region, used to disable expansion of
143 DECL_VALUE_EXPRs in taskloop pre body. */
144 ORT_NONE = 0x100
147 /* Gimplify hashtable helper. */
149 struct gimplify_hasher : free_ptr_hash <elt_t>
151 static inline hashval_t hash (const elt_t *);
152 static inline bool equal (const elt_t *, const elt_t *);
155 struct gimplify_ctx
157 struct gimplify_ctx *prev_context;
159 vec<gbind *> bind_expr_stack;
160 tree temps;
161 gimple_seq conditional_cleanups;
162 tree exit_label;
163 tree return_temp;
165 vec<tree> case_labels;
166 hash_set<tree> *live_switch_vars;
167 /* The formal temporary table. Should this be persistent? */
168 hash_table<gimplify_hasher> *temp_htab;
170 int conditions;
171 unsigned into_ssa : 1;
172 unsigned allow_rhs_cond_expr : 1;
173 unsigned in_cleanup_point_expr : 1;
174 unsigned keep_stack : 1;
175 unsigned save_stack : 1;
176 unsigned in_switch_expr : 1;
179 struct gimplify_omp_ctx
181 struct gimplify_omp_ctx *outer_context;
182 splay_tree variables;
183 hash_set<tree> *privatized_types;
184 /* Iteration variables in an OMP_FOR. */
185 vec<tree> loop_iter_var;
186 location_t location;
187 enum omp_clause_default_kind default_kind;
188 enum omp_region_type region_type;
189 bool combined_loop;
190 bool distribute;
191 bool target_map_scalars_firstprivate;
192 bool target_map_pointers_as_0len_arrays;
193 bool target_firstprivatize_array_bases;
196 static struct gimplify_ctx *gimplify_ctxp;
197 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
199 /* Forward declaration. */
200 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
201 static hash_map<tree, tree> *oacc_declare_returns;
202 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
203 bool (*) (tree), fallback_t, bool);
205 /* Shorter alias name for the above function for use in gimplify.c
206 only. */
208 static inline void
209 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
211 gimple_seq_add_stmt_without_update (seq_p, gs);
214 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
215 NULL, a new sequence is allocated. This function is
216 similar to gimple_seq_add_seq, but does not scan the operands.
217 During gimplification, we need to manipulate statement sequences
218 before the def/use vectors have been constructed. */
220 static void
221 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
223 gimple_stmt_iterator si;
225 if (src == NULL)
226 return;
228 si = gsi_last (*dst_p);
229 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
233 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
234 and popping gimplify contexts. */
236 static struct gimplify_ctx *ctx_pool = NULL;
238 /* Return a gimplify context struct from the pool. */
240 static inline struct gimplify_ctx *
241 ctx_alloc (void)
243 struct gimplify_ctx * c = ctx_pool;
245 if (c)
246 ctx_pool = c->prev_context;
247 else
248 c = XNEW (struct gimplify_ctx);
250 memset (c, '\0', sizeof (*c));
251 return c;
254 /* Put gimplify context C back into the pool. */
256 static inline void
257 ctx_free (struct gimplify_ctx *c)
259 c->prev_context = ctx_pool;
260 ctx_pool = c;
263 /* Free allocated ctx stack memory. */
265 void
266 free_gimplify_stack (void)
268 struct gimplify_ctx *c;
270 while ((c = ctx_pool))
272 ctx_pool = c->prev_context;
273 free (c);
278 /* Set up a context for the gimplifier. */
280 void
281 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
283 struct gimplify_ctx *c = ctx_alloc ();
285 c->prev_context = gimplify_ctxp;
286 gimplify_ctxp = c;
287 gimplify_ctxp->into_ssa = in_ssa;
288 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
291 /* Tear down a context for the gimplifier. If BODY is non-null, then
292 put the temporaries into the outer BIND_EXPR. Otherwise, put them
293 in the local_decls.
295 BODY is not a sequence, but the first tuple in a sequence. */
297 void
298 pop_gimplify_context (gimple *body)
300 struct gimplify_ctx *c = gimplify_ctxp;
302 gcc_assert (c
303 && (!c->bind_expr_stack.exists ()
304 || c->bind_expr_stack.is_empty ()));
305 c->bind_expr_stack.release ();
306 gimplify_ctxp = c->prev_context;
308 if (body)
309 declare_vars (c->temps, body, false);
310 else
311 record_vars (c->temps);
313 delete c->temp_htab;
314 c->temp_htab = NULL;
315 ctx_free (c);
318 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
320 static void
321 gimple_push_bind_expr (gbind *bind_stmt)
323 gimplify_ctxp->bind_expr_stack.reserve (8);
324 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
327 /* Pop the first element off the stack of bindings. */
329 static void
330 gimple_pop_bind_expr (void)
332 gimplify_ctxp->bind_expr_stack.pop ();
335 /* Return the first element of the stack of bindings. */
337 gbind *
338 gimple_current_bind_expr (void)
340 return gimplify_ctxp->bind_expr_stack.last ();
343 /* Return the stack of bindings created during gimplification. */
345 vec<gbind *>
346 gimple_bind_expr_stack (void)
348 return gimplify_ctxp->bind_expr_stack;
351 /* Return true iff there is a COND_EXPR between us and the innermost
352 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
354 static bool
355 gimple_conditional_context (void)
357 return gimplify_ctxp->conditions > 0;
360 /* Note that we've entered a COND_EXPR. */
362 static void
363 gimple_push_condition (void)
365 #ifdef ENABLE_GIMPLE_CHECKING
366 if (gimplify_ctxp->conditions == 0)
367 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
368 #endif
369 ++(gimplify_ctxp->conditions);
372 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
373 now, add any conditional cleanups we've seen to the prequeue. */
375 static void
376 gimple_pop_condition (gimple_seq *pre_p)
378 int conds = --(gimplify_ctxp->conditions);
380 gcc_assert (conds >= 0);
381 if (conds == 0)
383 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
384 gimplify_ctxp->conditional_cleanups = NULL;
388 /* A stable comparison routine for use with splay trees and DECLs. */
390 static int
391 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
393 tree a = (tree) xa;
394 tree b = (tree) xb;
396 return DECL_UID (a) - DECL_UID (b);
399 /* Create a new omp construct that deals with variable remapping. */
401 static struct gimplify_omp_ctx *
402 new_omp_context (enum omp_region_type region_type)
404 struct gimplify_omp_ctx *c;
406 c = XCNEW (struct gimplify_omp_ctx);
407 c->outer_context = gimplify_omp_ctxp;
408 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
409 c->privatized_types = new hash_set<tree>;
410 c->location = input_location;
411 c->region_type = region_type;
412 if ((region_type & ORT_TASK) == 0)
413 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
414 else
415 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
417 return c;
420 /* Destroy an omp construct that deals with variable remapping. */
422 static void
423 delete_omp_context (struct gimplify_omp_ctx *c)
425 splay_tree_delete (c->variables);
426 delete c->privatized_types;
427 c->loop_iter_var.release ();
428 XDELETE (c);
431 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
432 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
434 /* Both gimplify the statement T and append it to *SEQ_P. This function
435 behaves exactly as gimplify_stmt, but you don't have to pass T as a
436 reference. */
438 void
439 gimplify_and_add (tree t, gimple_seq *seq_p)
441 gimplify_stmt (&t, seq_p);
444 /* Gimplify statement T into sequence *SEQ_P, and return the first
445 tuple in the sequence of generated tuples for this statement.
446 Return NULL if gimplifying T produced no tuples. */
448 static gimple *
449 gimplify_and_return_first (tree t, gimple_seq *seq_p)
451 gimple_stmt_iterator last = gsi_last (*seq_p);
453 gimplify_and_add (t, seq_p);
455 if (!gsi_end_p (last))
457 gsi_next (&last);
458 return gsi_stmt (last);
460 else
461 return gimple_seq_first_stmt (*seq_p);
464 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
465 LHS, or for a call argument. */
467 static bool
468 is_gimple_mem_rhs (tree t)
470 /* If we're dealing with a renamable type, either source or dest must be
471 a renamed variable. */
472 if (is_gimple_reg_type (TREE_TYPE (t)))
473 return is_gimple_val (t);
474 else
475 return is_gimple_val (t) || is_gimple_lvalue (t);
478 /* Return true if T is a CALL_EXPR or an expression that can be
479 assigned to a temporary. Note that this predicate should only be
480 used during gimplification. See the rationale for this in
481 gimplify_modify_expr. */
483 static bool
484 is_gimple_reg_rhs_or_call (tree t)
486 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
487 || TREE_CODE (t) == CALL_EXPR);
490 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
491 this predicate should only be used during gimplification. See the
492 rationale for this in gimplify_modify_expr. */
494 static bool
495 is_gimple_mem_rhs_or_call (tree t)
497 /* If we're dealing with a renamable type, either source or dest must be
498 a renamed variable. */
499 if (is_gimple_reg_type (TREE_TYPE (t)))
500 return is_gimple_val (t);
501 else
502 return (is_gimple_val (t)
503 || is_gimple_lvalue (t)
504 || TREE_CLOBBER_P (t)
505 || TREE_CODE (t) == CALL_EXPR);
508 /* Create a temporary with a name derived from VAL. Subroutine of
509 lookup_tmp_var; nobody else should call this function. */
511 static inline tree
512 create_tmp_from_val (tree val)
514 /* Drop all qualifiers and address-space information from the value type. */
515 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
516 tree var = create_tmp_var (type, get_name (val));
517 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
518 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
519 DECL_GIMPLE_REG_P (var) = 1;
520 return var;
523 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
524 an existing expression temporary. */
526 static tree
527 lookup_tmp_var (tree val, bool is_formal)
529 tree ret;
531 /* If not optimizing, never really reuse a temporary. local-alloc
532 won't allocate any variable that is used in more than one basic
533 block, which means it will go into memory, causing much extra
534 work in reload and final and poorer code generation, outweighing
535 the extra memory allocation here. */
536 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
537 ret = create_tmp_from_val (val);
538 else
540 elt_t elt, *elt_p;
541 elt_t **slot;
543 elt.val = val;
544 if (!gimplify_ctxp->temp_htab)
545 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
546 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
547 if (*slot == NULL)
549 elt_p = XNEW (elt_t);
550 elt_p->val = val;
551 elt_p->temp = ret = create_tmp_from_val (val);
552 *slot = elt_p;
554 else
556 elt_p = *slot;
557 ret = elt_p->temp;
561 return ret;
564 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
566 static tree
567 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
568 bool is_formal, bool allow_ssa)
570 tree t, mod;
572 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
573 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
574 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
575 fb_rvalue);
577 if (allow_ssa
578 && gimplify_ctxp->into_ssa
579 && is_gimple_reg_type (TREE_TYPE (val)))
581 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
582 if (! gimple_in_ssa_p (cfun))
584 const char *name = get_name (val);
585 if (name)
586 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
589 else
590 t = lookup_tmp_var (val, is_formal);
592 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
594 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
596 /* gimplify_modify_expr might want to reduce this further. */
597 gimplify_and_add (mod, pre_p);
598 ggc_free (mod);
600 return t;
603 /* Return a formal temporary variable initialized with VAL. PRE_P is as
604 in gimplify_expr. Only use this function if:
606 1) The value of the unfactored expression represented by VAL will not
607 change between the initialization and use of the temporary, and
608 2) The temporary will not be otherwise modified.
610 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
611 and #2 means it is inappropriate for && temps.
613 For other cases, use get_initialized_tmp_var instead. */
615 tree
616 get_formal_tmp_var (tree val, gimple_seq *pre_p)
618 return internal_get_tmp_var (val, pre_p, NULL, true, true);
621 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
622 are as in gimplify_expr. */
624 tree
625 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
626 bool allow_ssa)
628 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
631 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
632 generate debug info for them; otherwise don't. */
634 void
635 declare_vars (tree vars, gimple *gs, bool debug_info)
637 tree last = vars;
638 if (last)
640 tree temps, block;
642 gbind *scope = as_a <gbind *> (gs);
644 temps = nreverse (last);
646 block = gimple_bind_block (scope);
647 gcc_assert (!block || TREE_CODE (block) == BLOCK);
648 if (!block || !debug_info)
650 DECL_CHAIN (last) = gimple_bind_vars (scope);
651 gimple_bind_set_vars (scope, temps);
653 else
655 /* We need to attach the nodes both to the BIND_EXPR and to its
656 associated BLOCK for debugging purposes. The key point here
657 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
658 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
659 if (BLOCK_VARS (block))
660 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
661 else
663 gimple_bind_set_vars (scope,
664 chainon (gimple_bind_vars (scope), temps));
665 BLOCK_VARS (block) = temps;
671 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
672 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
673 no such upper bound can be obtained. */
675 static void
676 force_constant_size (tree var)
678 /* The only attempt we make is by querying the maximum size of objects
679 of the variable's type. */
681 HOST_WIDE_INT max_size;
683 gcc_assert (VAR_P (var));
685 max_size = max_int_size_in_bytes (TREE_TYPE (var));
687 gcc_assert (max_size >= 0);
689 DECL_SIZE_UNIT (var)
690 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
691 DECL_SIZE (var)
692 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
695 /* Push the temporary variable TMP into the current binding. */
697 void
698 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
700 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
702 /* Later processing assumes that the object size is constant, which might
703 not be true at this point. Force the use of a constant upper bound in
704 this case. */
705 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
706 force_constant_size (tmp);
708 DECL_CONTEXT (tmp) = fn->decl;
709 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
711 record_vars_into (tmp, fn->decl);
714 /* Push the temporary variable TMP into the current binding. */
716 void
717 gimple_add_tmp_var (tree tmp)
719 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
721 /* Later processing assumes that the object size is constant, which might
722 not be true at this point. Force the use of a constant upper bound in
723 this case. */
724 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
725 force_constant_size (tmp);
727 DECL_CONTEXT (tmp) = current_function_decl;
728 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
730 if (gimplify_ctxp)
732 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
733 gimplify_ctxp->temps = tmp;
735 /* Mark temporaries local within the nearest enclosing parallel. */
736 if (gimplify_omp_ctxp)
738 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
739 while (ctx
740 && (ctx->region_type == ORT_WORKSHARE
741 || ctx->region_type == ORT_SIMD
742 || ctx->region_type == ORT_ACC))
743 ctx = ctx->outer_context;
744 if (ctx)
745 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
748 else if (cfun)
749 record_vars (tmp);
750 else
752 gimple_seq body_seq;
754 /* This case is for nested functions. We need to expose the locals
755 they create. */
756 body_seq = gimple_body (current_function_decl);
757 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
763 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
764 nodes that are referenced more than once in GENERIC functions. This is
765 necessary because gimplification (translation into GIMPLE) is performed
766 by modifying tree nodes in-place, so gimplication of a shared node in a
767 first context could generate an invalid GIMPLE form in a second context.
769 This is achieved with a simple mark/copy/unmark algorithm that walks the
770 GENERIC representation top-down, marks nodes with TREE_VISITED the first
771 time it encounters them, duplicates them if they already have TREE_VISITED
772 set, and finally removes the TREE_VISITED marks it has set.
774 The algorithm works only at the function level, i.e. it generates a GENERIC
775 representation of a function with no nodes shared within the function when
776 passed a GENERIC function (except for nodes that are allowed to be shared).
778 At the global level, it is also necessary to unshare tree nodes that are
779 referenced in more than one function, for the same aforementioned reason.
780 This requires some cooperation from the front-end. There are 2 strategies:
782 1. Manual unsharing. The front-end needs to call unshare_expr on every
783 expression that might end up being shared across functions.
785 2. Deep unsharing. This is an extension of regular unsharing. Instead
786 of calling unshare_expr on expressions that might be shared across
787 functions, the front-end pre-marks them with TREE_VISITED. This will
788 ensure that they are unshared on the first reference within functions
789 when the regular unsharing algorithm runs. The counterpart is that
790 this algorithm must look deeper than for manual unsharing, which is
791 specified by LANG_HOOKS_DEEP_UNSHARING.
793 If there are only few specific cases of node sharing across functions, it is
794 probably easier for a front-end to unshare the expressions manually. On the
795 contrary, if the expressions generated at the global level are as widespread
796 as expressions generated within functions, deep unsharing is very likely the
797 way to go. */
799 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
800 These nodes model computations that must be done once. If we were to
801 unshare something like SAVE_EXPR(i++), the gimplification process would
802 create wrong code. However, if DATA is non-null, it must hold a pointer
803 set that is used to unshare the subtrees of these nodes. */
805 static tree
806 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
808 tree t = *tp;
809 enum tree_code code = TREE_CODE (t);
811 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
812 copy their subtrees if we can make sure to do it only once. */
813 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
815 if (data && !((hash_set<tree> *)data)->add (t))
817 else
818 *walk_subtrees = 0;
821 /* Stop at types, decls, constants like copy_tree_r. */
822 else if (TREE_CODE_CLASS (code) == tcc_type
823 || TREE_CODE_CLASS (code) == tcc_declaration
824 || TREE_CODE_CLASS (code) == tcc_constant)
825 *walk_subtrees = 0;
827 /* Cope with the statement expression extension. */
828 else if (code == STATEMENT_LIST)
831 /* Leave the bulk of the work to copy_tree_r itself. */
832 else
833 copy_tree_r (tp, walk_subtrees, NULL);
835 return NULL_TREE;
838 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
839 If *TP has been visited already, then *TP is deeply copied by calling
840 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
842 static tree
843 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
845 tree t = *tp;
846 enum tree_code code = TREE_CODE (t);
848 /* Skip types, decls, and constants. But we do want to look at their
849 types and the bounds of types. Mark them as visited so we properly
850 unmark their subtrees on the unmark pass. If we've already seen them,
851 don't look down further. */
852 if (TREE_CODE_CLASS (code) == tcc_type
853 || TREE_CODE_CLASS (code) == tcc_declaration
854 || TREE_CODE_CLASS (code) == tcc_constant)
856 if (TREE_VISITED (t))
857 *walk_subtrees = 0;
858 else
859 TREE_VISITED (t) = 1;
862 /* If this node has been visited already, unshare it and don't look
863 any deeper. */
864 else if (TREE_VISITED (t))
866 walk_tree (tp, mostly_copy_tree_r, data, NULL);
867 *walk_subtrees = 0;
870 /* Otherwise, mark the node as visited and keep looking. */
871 else
872 TREE_VISITED (t) = 1;
874 return NULL_TREE;
877 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
878 copy_if_shared_r callback unmodified. */
880 static inline void
881 copy_if_shared (tree *tp, void *data)
883 walk_tree (tp, copy_if_shared_r, data, NULL);
886 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
887 any nested functions. */
889 static void
890 unshare_body (tree fndecl)
892 struct cgraph_node *cgn = cgraph_node::get (fndecl);
893 /* If the language requires deep unsharing, we need a pointer set to make
894 sure we don't repeatedly unshare subtrees of unshareable nodes. */
895 hash_set<tree> *visited
896 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
898 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
899 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
900 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
902 delete visited;
904 if (cgn)
905 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
906 unshare_body (cgn->decl);
909 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
910 Subtrees are walked until the first unvisited node is encountered. */
912 static tree
913 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
915 tree t = *tp;
917 /* If this node has been visited, unmark it and keep looking. */
918 if (TREE_VISITED (t))
919 TREE_VISITED (t) = 0;
921 /* Otherwise, don't look any deeper. */
922 else
923 *walk_subtrees = 0;
925 return NULL_TREE;
928 /* Unmark the visited trees rooted at *TP. */
930 static inline void
931 unmark_visited (tree *tp)
933 walk_tree (tp, unmark_visited_r, NULL, NULL);
936 /* Likewise, but mark all trees as not visited. */
938 static void
939 unvisit_body (tree fndecl)
941 struct cgraph_node *cgn = cgraph_node::get (fndecl);
943 unmark_visited (&DECL_SAVED_TREE (fndecl));
944 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
945 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
947 if (cgn)
948 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
949 unvisit_body (cgn->decl);
952 /* Unconditionally make an unshared copy of EXPR. This is used when using
953 stored expressions which span multiple functions, such as BINFO_VTABLE,
954 as the normal unsharing process can't tell that they're shared. */
956 tree
957 unshare_expr (tree expr)
959 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
960 return expr;
963 /* Worker for unshare_expr_without_location. */
965 static tree
966 prune_expr_location (tree *tp, int *walk_subtrees, void *)
968 if (EXPR_P (*tp))
969 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
970 else
971 *walk_subtrees = 0;
972 return NULL_TREE;
975 /* Similar to unshare_expr but also prune all expression locations
976 from EXPR. */
978 tree
979 unshare_expr_without_location (tree expr)
981 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
982 if (EXPR_P (expr))
983 walk_tree (&expr, prune_expr_location, NULL, NULL);
984 return expr;
987 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
988 contain statements and have a value. Assign its value to a temporary
989 and give it void_type_node. Return the temporary, or NULL_TREE if
990 WRAPPER was already void. */
992 tree
993 voidify_wrapper_expr (tree wrapper, tree temp)
995 tree type = TREE_TYPE (wrapper);
996 if (type && !VOID_TYPE_P (type))
998 tree *p;
1000 /* Set p to point to the body of the wrapper. Loop until we find
1001 something that isn't a wrapper. */
1002 for (p = &wrapper; p && *p; )
1004 switch (TREE_CODE (*p))
1006 case BIND_EXPR:
1007 TREE_SIDE_EFFECTS (*p) = 1;
1008 TREE_TYPE (*p) = void_type_node;
1009 /* For a BIND_EXPR, the body is operand 1. */
1010 p = &BIND_EXPR_BODY (*p);
1011 break;
1013 case CLEANUP_POINT_EXPR:
1014 case TRY_FINALLY_EXPR:
1015 case TRY_CATCH_EXPR:
1016 TREE_SIDE_EFFECTS (*p) = 1;
1017 TREE_TYPE (*p) = void_type_node;
1018 p = &TREE_OPERAND (*p, 0);
1019 break;
1021 case STATEMENT_LIST:
1023 tree_stmt_iterator i = tsi_last (*p);
1024 TREE_SIDE_EFFECTS (*p) = 1;
1025 TREE_TYPE (*p) = void_type_node;
1026 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1028 break;
1030 case COMPOUND_EXPR:
1031 /* Advance to the last statement. Set all container types to
1032 void. */
1033 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1035 TREE_SIDE_EFFECTS (*p) = 1;
1036 TREE_TYPE (*p) = void_type_node;
1038 break;
1040 case TRANSACTION_EXPR:
1041 TREE_SIDE_EFFECTS (*p) = 1;
1042 TREE_TYPE (*p) = void_type_node;
1043 p = &TRANSACTION_EXPR_BODY (*p);
1044 break;
1046 default:
1047 /* Assume that any tree upon which voidify_wrapper_expr is
1048 directly called is a wrapper, and that its body is op0. */
1049 if (p == &wrapper)
1051 TREE_SIDE_EFFECTS (*p) = 1;
1052 TREE_TYPE (*p) = void_type_node;
1053 p = &TREE_OPERAND (*p, 0);
1054 break;
1056 goto out;
1060 out:
1061 if (p == NULL || IS_EMPTY_STMT (*p))
1062 temp = NULL_TREE;
1063 else if (temp)
1065 /* The wrapper is on the RHS of an assignment that we're pushing
1066 down. */
1067 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1068 || TREE_CODE (temp) == MODIFY_EXPR);
1069 TREE_OPERAND (temp, 1) = *p;
1070 *p = temp;
1072 else
1074 temp = create_tmp_var (type, "retval");
1075 *p = build2 (INIT_EXPR, type, temp, *p);
1078 return temp;
1081 return NULL_TREE;
1084 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1085 a temporary through which they communicate. */
1087 static void
1088 build_stack_save_restore (gcall **save, gcall **restore)
1090 tree tmp_var;
1092 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1093 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1094 gimple_call_set_lhs (*save, tmp_var);
1096 *restore
1097 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1098 1, tmp_var);
1101 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1103 static tree
1104 build_asan_poison_call_expr (tree decl)
1106 /* Do not poison variables that have size equal to zero. */
1107 tree unit_size = DECL_SIZE_UNIT (decl);
1108 if (zerop (unit_size))
1109 return NULL_TREE;
1111 tree base = build_fold_addr_expr (decl);
1113 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1114 void_type_node, 3,
1115 build_int_cst (integer_type_node,
1116 ASAN_MARK_POISON),
1117 base, unit_size);
1120 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1121 on POISON flag, shadow memory of a DECL variable. The call will be
1122 put on location identified by IT iterator, where BEFORE flag drives
1123 position where the stmt will be put. */
1125 static void
1126 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1127 bool before)
1129 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1130 if (gimplify_omp_ctxp)
1131 return;
1133 tree unit_size = DECL_SIZE_UNIT (decl);
1134 tree base = build_fold_addr_expr (decl);
1136 /* Do not poison variables that have size equal to zero. */
1137 if (zerop (unit_size))
1138 return;
1140 /* It's necessary to have all stack variables aligned to ASAN granularity
1141 bytes. */
1142 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1143 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1145 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1147 gimple *g
1148 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1149 build_int_cst (integer_type_node, flags),
1150 base, unit_size);
1152 if (before)
1153 gsi_insert_before (it, g, GSI_NEW_STMT);
1154 else
1155 gsi_insert_after (it, g, GSI_NEW_STMT);
1158 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1159 either poisons or unpoisons a DECL. Created statement is appended
1160 to SEQ_P gimple sequence. */
1162 static void
1163 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1165 gimple_stmt_iterator it = gsi_last (*seq_p);
1166 bool before = false;
1168 if (gsi_end_p (it))
1169 before = true;
1171 asan_poison_variable (decl, poison, &it, before);
1174 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1176 static int
1177 sort_by_decl_uid (const void *a, const void *b)
1179 const tree *t1 = (const tree *)a;
1180 const tree *t2 = (const tree *)b;
1182 int uid1 = DECL_UID (*t1);
1183 int uid2 = DECL_UID (*t2);
1185 if (uid1 < uid2)
1186 return -1;
1187 else if (uid1 > uid2)
1188 return 1;
1189 else
1190 return 0;
1193 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1194 depending on POISON flag. Created statement is appended
1195 to SEQ_P gimple sequence. */
1197 static void
1198 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1200 unsigned c = variables->elements ();
1201 if (c == 0)
1202 return;
1204 auto_vec<tree> sorted_variables (c);
1206 for (hash_set<tree>::iterator it = variables->begin ();
1207 it != variables->end (); ++it)
1208 sorted_variables.safe_push (*it);
1210 sorted_variables.qsort (sort_by_decl_uid);
1212 unsigned i;
1213 tree var;
1214 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1216 asan_poison_variable (var, poison, seq_p);
1218 /* Add use_after_scope_memory attribute for the variable in order
1219 to prevent re-written into SSA. */
1220 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1221 DECL_ATTRIBUTES (var)))
1222 DECL_ATTRIBUTES (var)
1223 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1224 integer_one_node,
1225 DECL_ATTRIBUTES (var));
1229 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1231 static enum gimplify_status
1232 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1234 tree bind_expr = *expr_p;
1235 bool old_keep_stack = gimplify_ctxp->keep_stack;
1236 bool old_save_stack = gimplify_ctxp->save_stack;
1237 tree t;
1238 gbind *bind_stmt;
1239 gimple_seq body, cleanup;
1240 gcall *stack_save;
1241 location_t start_locus = 0, end_locus = 0;
1242 tree ret_clauses = NULL;
1244 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1246 /* Mark variables seen in this bind expr. */
1247 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1249 if (VAR_P (t))
1251 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1253 /* Mark variable as local. */
1254 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1255 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1256 || splay_tree_lookup (ctx->variables,
1257 (splay_tree_key) t) == NULL))
1259 if (ctx->region_type == ORT_SIMD
1260 && TREE_ADDRESSABLE (t)
1261 && !TREE_STATIC (t))
1262 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1263 else
1264 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1267 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1269 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1270 cfun->has_local_explicit_reg_vars = true;
1273 /* Preliminarily mark non-addressed complex variables as eligible
1274 for promotion to gimple registers. We'll transform their uses
1275 as we find them. */
1276 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1277 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1278 && !TREE_THIS_VOLATILE (t)
1279 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1280 && !needs_to_live_in_memory (t))
1281 DECL_GIMPLE_REG_P (t) = 1;
1284 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1285 BIND_EXPR_BLOCK (bind_expr));
1286 gimple_push_bind_expr (bind_stmt);
1288 gimplify_ctxp->keep_stack = false;
1289 gimplify_ctxp->save_stack = false;
1291 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1292 body = NULL;
1293 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1294 gimple_bind_set_body (bind_stmt, body);
1296 /* Source location wise, the cleanup code (stack_restore and clobbers)
1297 belongs to the end of the block, so propagate what we have. The
1298 stack_save operation belongs to the beginning of block, which we can
1299 infer from the bind_expr directly if the block has no explicit
1300 assignment. */
1301 if (BIND_EXPR_BLOCK (bind_expr))
1303 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1304 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1306 if (start_locus == 0)
1307 start_locus = EXPR_LOCATION (bind_expr);
1309 cleanup = NULL;
1310 stack_save = NULL;
1312 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1313 the stack space allocated to the VLAs. */
1314 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1316 gcall *stack_restore;
1318 /* Save stack on entry and restore it on exit. Add a try_finally
1319 block to achieve this. */
1320 build_stack_save_restore (&stack_save, &stack_restore);
1322 gimple_set_location (stack_save, start_locus);
1323 gimple_set_location (stack_restore, end_locus);
1325 gimplify_seq_add_stmt (&cleanup, stack_restore);
1328 /* Add clobbers for all variables that go out of scope. */
1329 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1331 if (VAR_P (t)
1332 && !is_global_var (t)
1333 && DECL_CONTEXT (t) == current_function_decl)
1335 if (!DECL_HARD_REGISTER (t)
1336 && !TREE_THIS_VOLATILE (t)
1337 && !DECL_HAS_VALUE_EXPR_P (t)
1338 /* Only care for variables that have to be in memory. Others
1339 will be rewritten into SSA names, hence moved to the
1340 top-level. */
1341 && !is_gimple_reg (t)
1342 && flag_stack_reuse != SR_NONE)
1344 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1345 gimple *clobber_stmt;
1346 TREE_THIS_VOLATILE (clobber) = 1;
1347 clobber_stmt = gimple_build_assign (t, clobber);
1348 gimple_set_location (clobber_stmt, end_locus);
1349 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1352 if (flag_openacc && oacc_declare_returns != NULL)
1354 tree *c = oacc_declare_returns->get (t);
1355 if (c != NULL)
1357 if (ret_clauses)
1358 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1360 ret_clauses = *c;
1362 oacc_declare_returns->remove (t);
1364 if (oacc_declare_returns->elements () == 0)
1366 delete oacc_declare_returns;
1367 oacc_declare_returns = NULL;
1373 if (asan_poisoned_variables != NULL
1374 && asan_poisoned_variables->contains (t))
1376 asan_poisoned_variables->remove (t);
1377 asan_poison_variable (t, true, &cleanup);
1380 if (gimplify_ctxp->live_switch_vars != NULL
1381 && gimplify_ctxp->live_switch_vars->contains (t))
1382 gimplify_ctxp->live_switch_vars->remove (t);
1385 if (ret_clauses)
1387 gomp_target *stmt;
1388 gimple_stmt_iterator si = gsi_start (cleanup);
1390 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1391 ret_clauses);
1392 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1395 if (cleanup)
1397 gtry *gs;
1398 gimple_seq new_body;
1400 new_body = NULL;
1401 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1402 GIMPLE_TRY_FINALLY);
1404 if (stack_save)
1405 gimplify_seq_add_stmt (&new_body, stack_save);
1406 gimplify_seq_add_stmt (&new_body, gs);
1407 gimple_bind_set_body (bind_stmt, new_body);
1410 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1411 if (!gimplify_ctxp->keep_stack)
1412 gimplify_ctxp->keep_stack = old_keep_stack;
1413 gimplify_ctxp->save_stack = old_save_stack;
1415 gimple_pop_bind_expr ();
1417 gimplify_seq_add_stmt (pre_p, bind_stmt);
1419 if (temp)
1421 *expr_p = temp;
1422 return GS_OK;
1425 *expr_p = NULL_TREE;
1426 return GS_ALL_DONE;
1429 /* Maybe add early return predict statement to PRE_P sequence. */
1431 static void
1432 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1434 /* If we are not in a conditional context, add PREDICT statement. */
1435 if (gimple_conditional_context ())
1437 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1438 NOT_TAKEN);
1439 gimplify_seq_add_stmt (pre_p, predict);
1443 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1444 GIMPLE value, it is assigned to a new temporary and the statement is
1445 re-written to return the temporary.
1447 PRE_P points to the sequence where side effects that must happen before
1448 STMT should be stored. */
1450 static enum gimplify_status
1451 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1453 greturn *ret;
1454 tree ret_expr = TREE_OPERAND (stmt, 0);
1455 tree result_decl, result;
1457 if (ret_expr == error_mark_node)
1458 return GS_ERROR;
1460 if (!ret_expr
1461 || TREE_CODE (ret_expr) == RESULT_DECL
1462 || ret_expr == error_mark_node)
1464 maybe_add_early_return_predict_stmt (pre_p);
1465 greturn *ret = gimple_build_return (ret_expr);
1466 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1467 gimplify_seq_add_stmt (pre_p, ret);
1468 return GS_ALL_DONE;
1471 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1472 result_decl = NULL_TREE;
1473 else
1475 result_decl = TREE_OPERAND (ret_expr, 0);
1477 /* See through a return by reference. */
1478 if (TREE_CODE (result_decl) == INDIRECT_REF)
1479 result_decl = TREE_OPERAND (result_decl, 0);
1481 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1482 || TREE_CODE (ret_expr) == INIT_EXPR)
1483 && TREE_CODE (result_decl) == RESULT_DECL);
1486 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1487 Recall that aggregate_value_p is FALSE for any aggregate type that is
1488 returned in registers. If we're returning values in registers, then
1489 we don't want to extend the lifetime of the RESULT_DECL, particularly
1490 across another call. In addition, for those aggregates for which
1491 hard_function_value generates a PARALLEL, we'll die during normal
1492 expansion of structure assignments; there's special code in expand_return
1493 to handle this case that does not exist in expand_expr. */
1494 if (!result_decl)
1495 result = NULL_TREE;
1496 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1498 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1500 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1501 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1502 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1503 should be effectively allocated by the caller, i.e. all calls to
1504 this function must be subject to the Return Slot Optimization. */
1505 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1506 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1508 result = result_decl;
1510 else if (gimplify_ctxp->return_temp)
1511 result = gimplify_ctxp->return_temp;
1512 else
1514 result = create_tmp_reg (TREE_TYPE (result_decl));
1516 /* ??? With complex control flow (usually involving abnormal edges),
1517 we can wind up warning about an uninitialized value for this. Due
1518 to how this variable is constructed and initialized, this is never
1519 true. Give up and never warn. */
1520 TREE_NO_WARNING (result) = 1;
1522 gimplify_ctxp->return_temp = result;
1525 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1526 Then gimplify the whole thing. */
1527 if (result != result_decl)
1528 TREE_OPERAND (ret_expr, 0) = result;
1530 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1532 maybe_add_early_return_predict_stmt (pre_p);
1533 ret = gimple_build_return (result);
1534 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1535 gimplify_seq_add_stmt (pre_p, ret);
1537 return GS_ALL_DONE;
1540 /* Gimplify a variable-length array DECL. */
1542 static void
1543 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1545 /* This is a variable-sized decl. Simplify its size and mark it
1546 for deferred expansion. */
1547 tree t, addr, ptr_type;
1549 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1550 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1552 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1553 if (DECL_HAS_VALUE_EXPR_P (decl))
1554 return;
1556 /* All occurrences of this decl in final gimplified code will be
1557 replaced by indirection. Setting DECL_VALUE_EXPR does two
1558 things: First, it lets the rest of the gimplifier know what
1559 replacement to use. Second, it lets the debug info know
1560 where to find the value. */
1561 ptr_type = build_pointer_type (TREE_TYPE (decl));
1562 addr = create_tmp_var (ptr_type, get_name (decl));
1563 DECL_IGNORED_P (addr) = 0;
1564 t = build_fold_indirect_ref (addr);
1565 TREE_THIS_NOTRAP (t) = 1;
1566 SET_DECL_VALUE_EXPR (decl, t);
1567 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1569 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1570 max_int_size_in_bytes (TREE_TYPE (decl)));
1571 /* The call has been built for a variable-sized object. */
1572 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1573 t = fold_convert (ptr_type, t);
1574 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1576 gimplify_and_add (t, seq_p);
1579 /* A helper function to be called via walk_tree. Mark all labels under *TP
1580 as being forced. To be called for DECL_INITIAL of static variables. */
1582 static tree
1583 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1585 if (TYPE_P (*tp))
1586 *walk_subtrees = 0;
1587 if (TREE_CODE (*tp) == LABEL_DECL)
1589 FORCED_LABEL (*tp) = 1;
1590 cfun->has_forced_label_in_static = 1;
1593 return NULL_TREE;
1596 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1597 and initialization explicit. */
1599 static enum gimplify_status
1600 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1602 tree stmt = *stmt_p;
1603 tree decl = DECL_EXPR_DECL (stmt);
1605 *stmt_p = NULL_TREE;
1607 if (TREE_TYPE (decl) == error_mark_node)
1608 return GS_ERROR;
1610 if ((TREE_CODE (decl) == TYPE_DECL
1611 || VAR_P (decl))
1612 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1614 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1615 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1616 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1619 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1620 in case its size expressions contain problematic nodes like CALL_EXPR. */
1621 if (TREE_CODE (decl) == TYPE_DECL
1622 && DECL_ORIGINAL_TYPE (decl)
1623 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1625 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1626 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1627 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1630 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1632 tree init = DECL_INITIAL (decl);
1633 bool is_vla = false;
1635 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1636 || (!TREE_STATIC (decl)
1637 && flag_stack_check == GENERIC_STACK_CHECK
1638 && compare_tree_int (DECL_SIZE_UNIT (decl),
1639 STACK_CHECK_MAX_VAR_SIZE) > 0))
1641 gimplify_vla_decl (decl, seq_p);
1642 is_vla = true;
1645 if (asan_poisoned_variables
1646 && !is_vla
1647 && TREE_ADDRESSABLE (decl)
1648 && !TREE_STATIC (decl)
1649 && !DECL_HAS_VALUE_EXPR_P (decl)
1650 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1651 && dbg_cnt (asan_use_after_scope))
1653 asan_poisoned_variables->add (decl);
1654 asan_poison_variable (decl, false, seq_p);
1655 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1656 gimplify_ctxp->live_switch_vars->add (decl);
1659 /* Some front ends do not explicitly declare all anonymous
1660 artificial variables. We compensate here by declaring the
1661 variables, though it would be better if the front ends would
1662 explicitly declare them. */
1663 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1664 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1665 gimple_add_tmp_var (decl);
1667 if (init && init != error_mark_node)
1669 if (!TREE_STATIC (decl))
1671 DECL_INITIAL (decl) = NULL_TREE;
1672 init = build2 (INIT_EXPR, void_type_node, decl, init);
1673 gimplify_and_add (init, seq_p);
1674 ggc_free (init);
1676 else
1677 /* We must still examine initializers for static variables
1678 as they may contain a label address. */
1679 walk_tree (&init, force_labels_r, NULL, NULL);
1683 return GS_ALL_DONE;
1686 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1687 and replacing the LOOP_EXPR with goto, but if the loop contains an
1688 EXIT_EXPR, we need to append a label for it to jump to. */
1690 static enum gimplify_status
1691 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1693 tree saved_label = gimplify_ctxp->exit_label;
1694 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1696 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1698 gimplify_ctxp->exit_label = NULL_TREE;
1700 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1702 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1704 if (gimplify_ctxp->exit_label)
1705 gimplify_seq_add_stmt (pre_p,
1706 gimple_build_label (gimplify_ctxp->exit_label));
1708 gimplify_ctxp->exit_label = saved_label;
1710 *expr_p = NULL;
1711 return GS_ALL_DONE;
1714 /* Gimplify a statement list onto a sequence. These may be created either
1715 by an enlightened front-end, or by shortcut_cond_expr. */
1717 static enum gimplify_status
1718 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1720 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1722 tree_stmt_iterator i = tsi_start (*expr_p);
1724 while (!tsi_end_p (i))
1726 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1727 tsi_delink (&i);
1730 if (temp)
1732 *expr_p = temp;
1733 return GS_OK;
1736 return GS_ALL_DONE;
1739 /* Callback for walk_gimple_seq. */
1741 static tree
1742 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1743 struct walk_stmt_info *wi)
1745 gimple *stmt = gsi_stmt (*gsi_p);
1747 *handled_ops_p = true;
1748 switch (gimple_code (stmt))
1750 case GIMPLE_TRY:
1751 /* A compiler-generated cleanup or a user-written try block.
1752 If it's empty, don't dive into it--that would result in
1753 worse location info. */
1754 if (gimple_try_eval (stmt) == NULL)
1756 wi->info = stmt;
1757 return integer_zero_node;
1759 /* Fall through. */
1760 case GIMPLE_BIND:
1761 case GIMPLE_CATCH:
1762 case GIMPLE_EH_FILTER:
1763 case GIMPLE_TRANSACTION:
1764 /* Walk the sub-statements. */
1765 *handled_ops_p = false;
1766 break;
1767 case GIMPLE_CALL:
1768 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1770 *handled_ops_p = false;
1771 break;
1773 /* Fall through. */
1774 default:
1775 /* Save the first "real" statement (not a decl/lexical scope/...). */
1776 wi->info = stmt;
1777 return integer_zero_node;
1779 return NULL_TREE;
1782 /* Possibly warn about unreachable statements between switch's controlling
1783 expression and the first case. SEQ is the body of a switch expression. */
1785 static void
1786 maybe_warn_switch_unreachable (gimple_seq seq)
1788 if (!warn_switch_unreachable
1789 /* This warning doesn't play well with Fortran when optimizations
1790 are on. */
1791 || lang_GNU_Fortran ()
1792 || seq == NULL)
1793 return;
1795 struct walk_stmt_info wi;
1796 memset (&wi, 0, sizeof (wi));
1797 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1798 gimple *stmt = (gimple *) wi.info;
1800 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1802 if (gimple_code (stmt) == GIMPLE_GOTO
1803 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1804 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1805 /* Don't warn for compiler-generated gotos. These occur
1806 in Duff's devices, for example. */;
1807 else
1808 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1809 "statement will never be executed");
1814 /* A label entry that pairs label and a location. */
1815 struct label_entry
1817 tree label;
1818 location_t loc;
1821 /* Find LABEL in vector of label entries VEC. */
1823 static struct label_entry *
1824 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1826 unsigned int i;
1827 struct label_entry *l;
1829 FOR_EACH_VEC_ELT (*vec, i, l)
1830 if (l->label == label)
1831 return l;
1832 return NULL;
1835 /* Return true if LABEL, a LABEL_DECL, represents a case label
1836 in a vector of labels CASES. */
1838 static bool
1839 case_label_p (const vec<tree> *cases, tree label)
1841 unsigned int i;
1842 tree l;
1844 FOR_EACH_VEC_ELT (*cases, i, l)
1845 if (CASE_LABEL (l) == label)
1846 return true;
1847 return false;
1850 /* Find the last statement in a scope STMT. */
1852 static gimple *
1853 last_stmt_in_scope (gimple *stmt)
1855 if (!stmt)
1856 return NULL;
1858 switch (gimple_code (stmt))
1860 case GIMPLE_BIND:
1862 gbind *bind = as_a <gbind *> (stmt);
1863 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1864 return last_stmt_in_scope (stmt);
1867 case GIMPLE_TRY:
1869 gtry *try_stmt = as_a <gtry *> (stmt);
1870 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1871 gimple *last_eval = last_stmt_in_scope (stmt);
1872 if (gimple_stmt_may_fallthru (last_eval)
1873 && (last_eval == NULL
1874 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1875 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1877 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1878 return last_stmt_in_scope (stmt);
1880 else
1881 return last_eval;
1884 default:
1885 return stmt;
1889 /* Collect interesting labels in LABELS and return the statement preceding
1890 another case label, or a user-defined label. */
1892 static gimple *
1893 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1894 auto_vec <struct label_entry> *labels)
1896 gimple *prev = NULL;
1900 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1901 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1903 /* Nested scope. Only look at the last statement of
1904 the innermost scope. */
1905 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1906 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1907 if (last)
1909 prev = last;
1910 /* It might be a label without a location. Use the
1911 location of the scope then. */
1912 if (!gimple_has_location (prev))
1913 gimple_set_location (prev, bind_loc);
1915 gsi_next (gsi_p);
1916 continue;
1919 /* Ifs are tricky. */
1920 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1922 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1923 tree false_lab = gimple_cond_false_label (cond_stmt);
1924 location_t if_loc = gimple_location (cond_stmt);
1926 /* If we have e.g.
1927 if (i > 1) goto <D.2259>; else goto D;
1928 we can't do much with the else-branch. */
1929 if (!DECL_ARTIFICIAL (false_lab))
1930 break;
1932 /* Go on until the false label, then one step back. */
1933 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1935 gimple *stmt = gsi_stmt (*gsi_p);
1936 if (gimple_code (stmt) == GIMPLE_LABEL
1937 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1938 break;
1941 /* Not found? Oops. */
1942 if (gsi_end_p (*gsi_p))
1943 break;
1945 struct label_entry l = { false_lab, if_loc };
1946 labels->safe_push (l);
1948 /* Go to the last statement of the then branch. */
1949 gsi_prev (gsi_p);
1951 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1952 <D.1759>:
1953 <stmt>;
1954 goto <D.1761>;
1955 <D.1760>:
1957 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1958 && !gimple_has_location (gsi_stmt (*gsi_p)))
1960 /* Look at the statement before, it might be
1961 attribute fallthrough, in which case don't warn. */
1962 gsi_prev (gsi_p);
1963 bool fallthru_before_dest
1964 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1965 gsi_next (gsi_p);
1966 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1967 if (!fallthru_before_dest)
1969 struct label_entry l = { goto_dest, if_loc };
1970 labels->safe_push (l);
1973 /* And move back. */
1974 gsi_next (gsi_p);
1977 /* Remember the last statement. Skip labels that are of no interest
1978 to us. */
1979 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1981 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1982 if (find_label_entry (labels, label))
1983 prev = gsi_stmt (*gsi_p);
1985 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1987 else
1988 prev = gsi_stmt (*gsi_p);
1989 gsi_next (gsi_p);
1991 while (!gsi_end_p (*gsi_p)
1992 /* Stop if we find a case or a user-defined label. */
1993 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
1994 || !gimple_has_location (gsi_stmt (*gsi_p))));
1996 return prev;
1999 /* Return true if the switch fallthough warning should occur. LABEL is
2000 the label statement that we're falling through to. */
2002 static bool
2003 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2005 gimple_stmt_iterator gsi = *gsi_p;
2007 /* Don't warn if the label is marked with a "falls through" comment. */
2008 if (FALLTHROUGH_LABEL_P (label))
2009 return false;
2011 /* Don't warn for non-case labels followed by a statement:
2012 case 0:
2013 foo ();
2014 label:
2015 bar ();
2016 as these are likely intentional. */
2017 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2019 tree l;
2020 while (!gsi_end_p (gsi)
2021 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2022 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2023 && !case_label_p (&gimplify_ctxp->case_labels, l))
2024 gsi_next (&gsi);
2025 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2026 return false;
2029 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2030 immediately breaks. */
2031 gsi = *gsi_p;
2033 /* Skip all immediately following labels. */
2034 while (!gsi_end_p (gsi)
2035 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2036 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2037 gsi_next (&gsi);
2039 /* { ... something; default:; } */
2040 if (gsi_end_p (gsi)
2041 /* { ... something; default: break; } or
2042 { ... something; default: goto L; } */
2043 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2044 /* { ... something; default: return; } */
2045 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2046 return false;
2048 return true;
2051 /* Callback for walk_gimple_seq. */
2053 static tree
2054 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2055 struct walk_stmt_info *)
2057 gimple *stmt = gsi_stmt (*gsi_p);
2059 *handled_ops_p = true;
2060 switch (gimple_code (stmt))
2062 case GIMPLE_TRY:
2063 case GIMPLE_BIND:
2064 case GIMPLE_CATCH:
2065 case GIMPLE_EH_FILTER:
2066 case GIMPLE_TRANSACTION:
2067 /* Walk the sub-statements. */
2068 *handled_ops_p = false;
2069 break;
2071 /* Find a sequence of form:
2073 GIMPLE_LABEL
2074 [...]
2075 <may fallthru stmt>
2076 GIMPLE_LABEL
2078 and possibly warn. */
2079 case GIMPLE_LABEL:
2081 /* Found a label. Skip all immediately following labels. */
2082 while (!gsi_end_p (*gsi_p)
2083 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2084 gsi_next (gsi_p);
2086 /* There might be no more statements. */
2087 if (gsi_end_p (*gsi_p))
2088 return integer_zero_node;
2090 /* Vector of labels that fall through. */
2091 auto_vec <struct label_entry> labels;
2092 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2094 /* There might be no more statements. */
2095 if (gsi_end_p (*gsi_p))
2096 return integer_zero_node;
2098 gimple *next = gsi_stmt (*gsi_p);
2099 tree label;
2100 /* If what follows is a label, then we may have a fallthrough. */
2101 if (gimple_code (next) == GIMPLE_LABEL
2102 && gimple_has_location (next)
2103 && (label = gimple_label_label (as_a <glabel *> (next)))
2104 && prev != NULL)
2106 struct label_entry *l;
2107 bool warned_p = false;
2108 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2109 /* Quiet. */;
2110 else if (gimple_code (prev) == GIMPLE_LABEL
2111 && (label = gimple_label_label (as_a <glabel *> (prev)))
2112 && (l = find_label_entry (&labels, label)))
2113 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2114 "this statement may fall through");
2115 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2116 /* Try to be clever and don't warn when the statement
2117 can't actually fall through. */
2118 && gimple_stmt_may_fallthru (prev)
2119 && gimple_has_location (prev))
2120 warned_p = warning_at (gimple_location (prev),
2121 OPT_Wimplicit_fallthrough_,
2122 "this statement may fall through");
2123 if (warned_p)
2124 inform (gimple_location (next), "here");
2126 /* Mark this label as processed so as to prevent multiple
2127 warnings in nested switches. */
2128 FALLTHROUGH_LABEL_P (label) = true;
2130 /* So that next warn_implicit_fallthrough_r will start looking for
2131 a new sequence starting with this label. */
2132 gsi_prev (gsi_p);
2135 break;
2136 default:
2137 break;
2139 return NULL_TREE;
2142 /* Warn when a switch case falls through. */
2144 static void
2145 maybe_warn_implicit_fallthrough (gimple_seq seq)
2147 if (!warn_implicit_fallthrough)
2148 return;
2150 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2151 if (!(lang_GNU_C ()
2152 || lang_GNU_CXX ()
2153 || lang_GNU_OBJC ()))
2154 return;
2156 struct walk_stmt_info wi;
2157 memset (&wi, 0, sizeof (wi));
2158 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2161 /* Callback for walk_gimple_seq. */
2163 static tree
2164 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2165 struct walk_stmt_info *)
2167 gimple *stmt = gsi_stmt (*gsi_p);
2169 *handled_ops_p = true;
2170 switch (gimple_code (stmt))
2172 case GIMPLE_TRY:
2173 case GIMPLE_BIND:
2174 case GIMPLE_CATCH:
2175 case GIMPLE_EH_FILTER:
2176 case GIMPLE_TRANSACTION:
2177 /* Walk the sub-statements. */
2178 *handled_ops_p = false;
2179 break;
2180 case GIMPLE_CALL:
2181 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2183 gsi_remove (gsi_p, true);
2184 if (gsi_end_p (*gsi_p))
2185 return integer_zero_node;
2187 bool found = false;
2188 location_t loc = gimple_location (stmt);
2190 gimple_stmt_iterator gsi2 = *gsi_p;
2191 stmt = gsi_stmt (gsi2);
2192 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2194 /* Go on until the artificial label. */
2195 tree goto_dest = gimple_goto_dest (stmt);
2196 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2198 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2199 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2200 == goto_dest)
2201 break;
2204 /* Not found? Stop. */
2205 if (gsi_end_p (gsi2))
2206 break;
2208 /* Look one past it. */
2209 gsi_next (&gsi2);
2212 /* We're looking for a case label or default label here. */
2213 while (!gsi_end_p (gsi2))
2215 stmt = gsi_stmt (gsi2);
2216 if (gimple_code (stmt) == GIMPLE_LABEL)
2218 tree label = gimple_label_label (as_a <glabel *> (stmt));
2219 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2221 found = true;
2222 break;
2225 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2227 else
2228 /* Something other is not expected. */
2229 break;
2230 gsi_next (&gsi2);
2232 if (!found)
2233 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2234 "a case label or default label");
2236 break;
2237 default:
2238 break;
2240 return NULL_TREE;
2243 /* Expand all FALLTHROUGH () calls in SEQ. */
2245 static void
2246 expand_FALLTHROUGH (gimple_seq *seq_p)
2248 struct walk_stmt_info wi;
2249 memset (&wi, 0, sizeof (wi));
2250 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2254 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2255 branch to. */
2257 static enum gimplify_status
2258 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2260 tree switch_expr = *expr_p;
2261 gimple_seq switch_body_seq = NULL;
2262 enum gimplify_status ret;
2263 tree index_type = TREE_TYPE (switch_expr);
2264 if (index_type == NULL_TREE)
2265 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2267 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2268 fb_rvalue);
2269 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2270 return ret;
2272 if (SWITCH_BODY (switch_expr))
2274 vec<tree> labels;
2275 vec<tree> saved_labels;
2276 hash_set<tree> *saved_live_switch_vars = NULL;
2277 tree default_case = NULL_TREE;
2278 gswitch *switch_stmt;
2280 /* Save old labels, get new ones from body, then restore the old
2281 labels. Save all the things from the switch body to append after. */
2282 saved_labels = gimplify_ctxp->case_labels;
2283 gimplify_ctxp->case_labels.create (8);
2285 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2286 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2287 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2288 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2289 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2290 else
2291 gimplify_ctxp->live_switch_vars = NULL;
2293 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2294 gimplify_ctxp->in_switch_expr = true;
2296 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2298 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2299 maybe_warn_switch_unreachable (switch_body_seq);
2300 maybe_warn_implicit_fallthrough (switch_body_seq);
2301 /* Only do this for the outermost GIMPLE_SWITCH. */
2302 if (!gimplify_ctxp->in_switch_expr)
2303 expand_FALLTHROUGH (&switch_body_seq);
2305 labels = gimplify_ctxp->case_labels;
2306 gimplify_ctxp->case_labels = saved_labels;
2308 if (gimplify_ctxp->live_switch_vars)
2310 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2311 delete gimplify_ctxp->live_switch_vars;
2313 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2315 preprocess_case_label_vec_for_gimple (labels, index_type,
2316 &default_case);
2318 if (!default_case)
2320 glabel *new_default;
2322 default_case
2323 = build_case_label (NULL_TREE, NULL_TREE,
2324 create_artificial_label (UNKNOWN_LOCATION));
2325 new_default = gimple_build_label (CASE_LABEL (default_case));
2326 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2329 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2330 default_case, labels);
2331 gimplify_seq_add_stmt (pre_p, switch_stmt);
2332 gimplify_seq_add_seq (pre_p, switch_body_seq);
2333 labels.release ();
2335 else
2336 gcc_unreachable ();
2338 return GS_ALL_DONE;
2341 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2343 static enum gimplify_status
2344 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2346 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2347 == current_function_decl);
2349 tree label = LABEL_EXPR_LABEL (*expr_p);
2350 glabel *label_stmt = gimple_build_label (label);
2351 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2352 gimplify_seq_add_stmt (pre_p, label_stmt);
2354 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2355 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2356 NOT_TAKEN));
2357 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2358 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2359 TAKEN));
2361 return GS_ALL_DONE;
2364 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2366 static enum gimplify_status
2367 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2369 struct gimplify_ctx *ctxp;
2370 glabel *label_stmt;
2372 /* Invalid programs can play Duff's Device type games with, for example,
2373 #pragma omp parallel. At least in the C front end, we don't
2374 detect such invalid branches until after gimplification, in the
2375 diagnose_omp_blocks pass. */
2376 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2377 if (ctxp->case_labels.exists ())
2378 break;
2380 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2381 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2382 ctxp->case_labels.safe_push (*expr_p);
2383 gimplify_seq_add_stmt (pre_p, label_stmt);
2385 return GS_ALL_DONE;
2388 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2389 if necessary. */
2391 tree
2392 build_and_jump (tree *label_p)
2394 if (label_p == NULL)
2395 /* If there's nowhere to jump, just fall through. */
2396 return NULL_TREE;
2398 if (*label_p == NULL_TREE)
2400 tree label = create_artificial_label (UNKNOWN_LOCATION);
2401 *label_p = label;
2404 return build1 (GOTO_EXPR, void_type_node, *label_p);
2407 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2408 This also involves building a label to jump to and communicating it to
2409 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2411 static enum gimplify_status
2412 gimplify_exit_expr (tree *expr_p)
2414 tree cond = TREE_OPERAND (*expr_p, 0);
2415 tree expr;
2417 expr = build_and_jump (&gimplify_ctxp->exit_label);
2418 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2419 *expr_p = expr;
2421 return GS_OK;
2424 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2425 different from its canonical type, wrap the whole thing inside a
2426 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2427 type.
2429 The canonical type of a COMPONENT_REF is the type of the field being
2430 referenced--unless the field is a bit-field which can be read directly
2431 in a smaller mode, in which case the canonical type is the
2432 sign-appropriate type corresponding to that mode. */
2434 static void
2435 canonicalize_component_ref (tree *expr_p)
2437 tree expr = *expr_p;
2438 tree type;
2440 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2442 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2443 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2444 else
2445 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2447 /* One could argue that all the stuff below is not necessary for
2448 the non-bitfield case and declare it a FE error if type
2449 adjustment would be needed. */
2450 if (TREE_TYPE (expr) != type)
2452 #ifdef ENABLE_TYPES_CHECKING
2453 tree old_type = TREE_TYPE (expr);
2454 #endif
2455 int type_quals;
2457 /* We need to preserve qualifiers and propagate them from
2458 operand 0. */
2459 type_quals = TYPE_QUALS (type)
2460 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2461 if (TYPE_QUALS (type) != type_quals)
2462 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2464 /* Set the type of the COMPONENT_REF to the underlying type. */
2465 TREE_TYPE (expr) = type;
2467 #ifdef ENABLE_TYPES_CHECKING
2468 /* It is now a FE error, if the conversion from the canonical
2469 type to the original expression type is not useless. */
2470 gcc_assert (useless_type_conversion_p (old_type, type));
2471 #endif
2475 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2476 to foo, embed that change in the ADDR_EXPR by converting
2477 T array[U];
2478 (T *)&array
2480 &array[L]
2481 where L is the lower bound. For simplicity, only do this for constant
2482 lower bound.
2483 The constraint is that the type of &array[L] is trivially convertible
2484 to T *. */
2486 static void
2487 canonicalize_addr_expr (tree *expr_p)
2489 tree expr = *expr_p;
2490 tree addr_expr = TREE_OPERAND (expr, 0);
2491 tree datype, ddatype, pddatype;
2493 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2494 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2495 || TREE_CODE (addr_expr) != ADDR_EXPR)
2496 return;
2498 /* The addr_expr type should be a pointer to an array. */
2499 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2500 if (TREE_CODE (datype) != ARRAY_TYPE)
2501 return;
2503 /* The pointer to element type shall be trivially convertible to
2504 the expression pointer type. */
2505 ddatype = TREE_TYPE (datype);
2506 pddatype = build_pointer_type (ddatype);
2507 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2508 pddatype))
2509 return;
2511 /* The lower bound and element sizes must be constant. */
2512 if (!TYPE_SIZE_UNIT (ddatype)
2513 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2514 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2515 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2516 return;
2518 /* All checks succeeded. Build a new node to merge the cast. */
2519 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2520 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2521 NULL_TREE, NULL_TREE);
2522 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2524 /* We can have stripped a required restrict qualifier above. */
2525 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2526 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2529 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2530 underneath as appropriate. */
2532 static enum gimplify_status
2533 gimplify_conversion (tree *expr_p)
2535 location_t loc = EXPR_LOCATION (*expr_p);
2536 gcc_assert (CONVERT_EXPR_P (*expr_p));
2538 /* Then strip away all but the outermost conversion. */
2539 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2541 /* And remove the outermost conversion if it's useless. */
2542 if (tree_ssa_useless_type_conversion (*expr_p))
2543 *expr_p = TREE_OPERAND (*expr_p, 0);
2545 /* If we still have a conversion at the toplevel,
2546 then canonicalize some constructs. */
2547 if (CONVERT_EXPR_P (*expr_p))
2549 tree sub = TREE_OPERAND (*expr_p, 0);
2551 /* If a NOP conversion is changing the type of a COMPONENT_REF
2552 expression, then canonicalize its type now in order to expose more
2553 redundant conversions. */
2554 if (TREE_CODE (sub) == COMPONENT_REF)
2555 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2557 /* If a NOP conversion is changing a pointer to array of foo
2558 to a pointer to foo, embed that change in the ADDR_EXPR. */
2559 else if (TREE_CODE (sub) == ADDR_EXPR)
2560 canonicalize_addr_expr (expr_p);
2563 /* If we have a conversion to a non-register type force the
2564 use of a VIEW_CONVERT_EXPR instead. */
2565 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2566 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2567 TREE_OPERAND (*expr_p, 0));
2569 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2570 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2571 TREE_SET_CODE (*expr_p, NOP_EXPR);
2573 return GS_OK;
2576 /* Nonlocal VLAs seen in the current function. */
2577 static hash_set<tree> *nonlocal_vlas;
2579 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2580 static tree nonlocal_vla_vars;
2582 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2583 DECL_VALUE_EXPR, and it's worth re-examining things. */
2585 static enum gimplify_status
2586 gimplify_var_or_parm_decl (tree *expr_p)
2588 tree decl = *expr_p;
2590 /* ??? If this is a local variable, and it has not been seen in any
2591 outer BIND_EXPR, then it's probably the result of a duplicate
2592 declaration, for which we've already issued an error. It would
2593 be really nice if the front end wouldn't leak these at all.
2594 Currently the only known culprit is C++ destructors, as seen
2595 in g++.old-deja/g++.jason/binding.C. */
2596 if (VAR_P (decl)
2597 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2598 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2599 && decl_function_context (decl) == current_function_decl)
2601 gcc_assert (seen_error ());
2602 return GS_ERROR;
2605 /* When within an OMP context, notice uses of variables. */
2606 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2607 return GS_ALL_DONE;
2609 /* If the decl is an alias for another expression, substitute it now. */
2610 if (DECL_HAS_VALUE_EXPR_P (decl))
2612 tree value_expr = DECL_VALUE_EXPR (decl);
2614 /* For referenced nonlocal VLAs add a decl for debugging purposes
2615 to the current function. */
2616 if (VAR_P (decl)
2617 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2618 && nonlocal_vlas != NULL
2619 && TREE_CODE (value_expr) == INDIRECT_REF
2620 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2621 && decl_function_context (decl) != current_function_decl)
2623 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2624 while (ctx
2625 && (ctx->region_type == ORT_WORKSHARE
2626 || ctx->region_type == ORT_SIMD
2627 || ctx->region_type == ORT_ACC))
2628 ctx = ctx->outer_context;
2629 if (!ctx && !nonlocal_vlas->add (decl))
2631 tree copy = copy_node (decl);
2633 lang_hooks.dup_lang_specific_decl (copy);
2634 SET_DECL_RTL (copy, 0);
2635 TREE_USED (copy) = 1;
2636 DECL_CHAIN (copy) = nonlocal_vla_vars;
2637 nonlocal_vla_vars = copy;
2638 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2639 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2643 *expr_p = unshare_expr (value_expr);
2644 return GS_OK;
2647 return GS_ALL_DONE;
2650 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2652 static void
2653 recalculate_side_effects (tree t)
2655 enum tree_code code = TREE_CODE (t);
2656 int len = TREE_OPERAND_LENGTH (t);
2657 int i;
2659 switch (TREE_CODE_CLASS (code))
2661 case tcc_expression:
2662 switch (code)
2664 case INIT_EXPR:
2665 case MODIFY_EXPR:
2666 case VA_ARG_EXPR:
2667 case PREDECREMENT_EXPR:
2668 case PREINCREMENT_EXPR:
2669 case POSTDECREMENT_EXPR:
2670 case POSTINCREMENT_EXPR:
2671 /* All of these have side-effects, no matter what their
2672 operands are. */
2673 return;
2675 default:
2676 break;
2678 /* Fall through. */
2680 case tcc_comparison: /* a comparison expression */
2681 case tcc_unary: /* a unary arithmetic expression */
2682 case tcc_binary: /* a binary arithmetic expression */
2683 case tcc_reference: /* a reference */
2684 case tcc_vl_exp: /* a function call */
2685 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2686 for (i = 0; i < len; ++i)
2688 tree op = TREE_OPERAND (t, i);
2689 if (op && TREE_SIDE_EFFECTS (op))
2690 TREE_SIDE_EFFECTS (t) = 1;
2692 break;
2694 case tcc_constant:
2695 /* No side-effects. */
2696 return;
2698 default:
2699 gcc_unreachable ();
2703 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2704 node *EXPR_P.
2706 compound_lval
2707 : min_lval '[' val ']'
2708 | min_lval '.' ID
2709 | compound_lval '[' val ']'
2710 | compound_lval '.' ID
2712 This is not part of the original SIMPLE definition, which separates
2713 array and member references, but it seems reasonable to handle them
2714 together. Also, this way we don't run into problems with union
2715 aliasing; gcc requires that for accesses through a union to alias, the
2716 union reference must be explicit, which was not always the case when we
2717 were splitting up array and member refs.
2719 PRE_P points to the sequence where side effects that must happen before
2720 *EXPR_P should be stored.
2722 POST_P points to the sequence where side effects that must happen after
2723 *EXPR_P should be stored. */
2725 static enum gimplify_status
2726 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2727 fallback_t fallback)
2729 tree *p;
2730 enum gimplify_status ret = GS_ALL_DONE, tret;
2731 int i;
2732 location_t loc = EXPR_LOCATION (*expr_p);
2733 tree expr = *expr_p;
2735 /* Create a stack of the subexpressions so later we can walk them in
2736 order from inner to outer. */
2737 auto_vec<tree, 10> expr_stack;
2739 /* We can handle anything that get_inner_reference can deal with. */
2740 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2742 restart:
2743 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2744 if (TREE_CODE (*p) == INDIRECT_REF)
2745 *p = fold_indirect_ref_loc (loc, *p);
2747 if (handled_component_p (*p))
2749 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2750 additional COMPONENT_REFs. */
2751 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2752 && gimplify_var_or_parm_decl (p) == GS_OK)
2753 goto restart;
2754 else
2755 break;
2757 expr_stack.safe_push (*p);
2760 gcc_assert (expr_stack.length ());
2762 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2763 walked through and P points to the innermost expression.
2765 Java requires that we elaborated nodes in source order. That
2766 means we must gimplify the inner expression followed by each of
2767 the indices, in order. But we can't gimplify the inner
2768 expression until we deal with any variable bounds, sizes, or
2769 positions in order to deal with PLACEHOLDER_EXPRs.
2771 So we do this in three steps. First we deal with the annotations
2772 for any variables in the components, then we gimplify the base,
2773 then we gimplify any indices, from left to right. */
2774 for (i = expr_stack.length () - 1; i >= 0; i--)
2776 tree t = expr_stack[i];
2778 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2780 /* Gimplify the low bound and element type size and put them into
2781 the ARRAY_REF. If these values are set, they have already been
2782 gimplified. */
2783 if (TREE_OPERAND (t, 2) == NULL_TREE)
2785 tree low = unshare_expr (array_ref_low_bound (t));
2786 if (!is_gimple_min_invariant (low))
2788 TREE_OPERAND (t, 2) = low;
2789 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2790 post_p, is_gimple_reg,
2791 fb_rvalue);
2792 ret = MIN (ret, tret);
2795 else
2797 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2798 is_gimple_reg, fb_rvalue);
2799 ret = MIN (ret, tret);
2802 if (TREE_OPERAND (t, 3) == NULL_TREE)
2804 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2805 tree elmt_size = unshare_expr (array_ref_element_size (t));
2806 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2808 /* Divide the element size by the alignment of the element
2809 type (above). */
2810 elmt_size
2811 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2813 if (!is_gimple_min_invariant (elmt_size))
2815 TREE_OPERAND (t, 3) = elmt_size;
2816 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2817 post_p, is_gimple_reg,
2818 fb_rvalue);
2819 ret = MIN (ret, tret);
2822 else
2824 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2825 is_gimple_reg, fb_rvalue);
2826 ret = MIN (ret, tret);
2829 else if (TREE_CODE (t) == COMPONENT_REF)
2831 /* Set the field offset into T and gimplify it. */
2832 if (TREE_OPERAND (t, 2) == NULL_TREE)
2834 tree offset = unshare_expr (component_ref_field_offset (t));
2835 tree field = TREE_OPERAND (t, 1);
2836 tree factor
2837 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2839 /* Divide the offset by its alignment. */
2840 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2842 if (!is_gimple_min_invariant (offset))
2844 TREE_OPERAND (t, 2) = offset;
2845 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2846 post_p, is_gimple_reg,
2847 fb_rvalue);
2848 ret = MIN (ret, tret);
2851 else
2853 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2854 is_gimple_reg, fb_rvalue);
2855 ret = MIN (ret, tret);
2860 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2861 so as to match the min_lval predicate. Failure to do so may result
2862 in the creation of large aggregate temporaries. */
2863 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2864 fallback | fb_lvalue);
2865 ret = MIN (ret, tret);
2867 /* And finally, the indices and operands of ARRAY_REF. During this
2868 loop we also remove any useless conversions. */
2869 for (; expr_stack.length () > 0; )
2871 tree t = expr_stack.pop ();
2873 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2875 /* Gimplify the dimension. */
2876 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2878 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2879 is_gimple_val, fb_rvalue);
2880 ret = MIN (ret, tret);
2884 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2886 /* The innermost expression P may have originally had
2887 TREE_SIDE_EFFECTS set which would have caused all the outer
2888 expressions in *EXPR_P leading to P to also have had
2889 TREE_SIDE_EFFECTS set. */
2890 recalculate_side_effects (t);
2893 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2894 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2896 canonicalize_component_ref (expr_p);
2899 expr_stack.release ();
2901 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2903 return ret;
2906 /* Gimplify the self modifying expression pointed to by EXPR_P
2907 (++, --, +=, -=).
2909 PRE_P points to the list where side effects that must happen before
2910 *EXPR_P should be stored.
2912 POST_P points to the list where side effects that must happen after
2913 *EXPR_P should be stored.
2915 WANT_VALUE is nonzero iff we want to use the value of this expression
2916 in another expression.
2918 ARITH_TYPE is the type the computation should be performed in. */
2920 enum gimplify_status
2921 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2922 bool want_value, tree arith_type)
2924 enum tree_code code;
2925 tree lhs, lvalue, rhs, t1;
2926 gimple_seq post = NULL, *orig_post_p = post_p;
2927 bool postfix;
2928 enum tree_code arith_code;
2929 enum gimplify_status ret;
2930 location_t loc = EXPR_LOCATION (*expr_p);
2932 code = TREE_CODE (*expr_p);
2934 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2935 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2937 /* Prefix or postfix? */
2938 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2939 /* Faster to treat as prefix if result is not used. */
2940 postfix = want_value;
2941 else
2942 postfix = false;
2944 /* For postfix, make sure the inner expression's post side effects
2945 are executed after side effects from this expression. */
2946 if (postfix)
2947 post_p = &post;
2949 /* Add or subtract? */
2950 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2951 arith_code = PLUS_EXPR;
2952 else
2953 arith_code = MINUS_EXPR;
2955 /* Gimplify the LHS into a GIMPLE lvalue. */
2956 lvalue = TREE_OPERAND (*expr_p, 0);
2957 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2958 if (ret == GS_ERROR)
2959 return ret;
2961 /* Extract the operands to the arithmetic operation. */
2962 lhs = lvalue;
2963 rhs = TREE_OPERAND (*expr_p, 1);
2965 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2966 that as the result value and in the postqueue operation. */
2967 if (postfix)
2969 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2970 if (ret == GS_ERROR)
2971 return ret;
2973 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2976 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2977 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2979 rhs = convert_to_ptrofftype_loc (loc, rhs);
2980 if (arith_code == MINUS_EXPR)
2981 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2982 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2984 else
2985 t1 = fold_convert (TREE_TYPE (*expr_p),
2986 fold_build2 (arith_code, arith_type,
2987 fold_convert (arith_type, lhs),
2988 fold_convert (arith_type, rhs)));
2990 if (postfix)
2992 gimplify_assign (lvalue, t1, pre_p);
2993 gimplify_seq_add_seq (orig_post_p, post);
2994 *expr_p = lhs;
2995 return GS_ALL_DONE;
2997 else
2999 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3000 return GS_OK;
3004 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3006 static void
3007 maybe_with_size_expr (tree *expr_p)
3009 tree expr = *expr_p;
3010 tree type = TREE_TYPE (expr);
3011 tree size;
3013 /* If we've already wrapped this or the type is error_mark_node, we can't do
3014 anything. */
3015 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3016 || type == error_mark_node)
3017 return;
3019 /* If the size isn't known or is a constant, we have nothing to do. */
3020 size = TYPE_SIZE_UNIT (type);
3021 if (!size || TREE_CODE (size) == INTEGER_CST)
3022 return;
3024 /* Otherwise, make a WITH_SIZE_EXPR. */
3025 size = unshare_expr (size);
3026 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3027 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3030 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3031 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3032 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3033 gimplified to an SSA name. */
3035 enum gimplify_status
3036 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3037 bool allow_ssa)
3039 bool (*test) (tree);
3040 fallback_t fb;
3042 /* In general, we allow lvalues for function arguments to avoid
3043 extra overhead of copying large aggregates out of even larger
3044 aggregates into temporaries only to copy the temporaries to
3045 the argument list. Make optimizers happy by pulling out to
3046 temporaries those types that fit in registers. */
3047 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3048 test = is_gimple_val, fb = fb_rvalue;
3049 else
3051 test = is_gimple_lvalue, fb = fb_either;
3052 /* Also strip a TARGET_EXPR that would force an extra copy. */
3053 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3055 tree init = TARGET_EXPR_INITIAL (*arg_p);
3056 if (init
3057 && !VOID_TYPE_P (TREE_TYPE (init)))
3058 *arg_p = init;
3062 /* If this is a variable sized type, we must remember the size. */
3063 maybe_with_size_expr (arg_p);
3065 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3066 /* Make sure arguments have the same location as the function call
3067 itself. */
3068 protected_set_expr_location (*arg_p, call_location);
3070 /* There is a sequence point before a function call. Side effects in
3071 the argument list must occur before the actual call. So, when
3072 gimplifying arguments, force gimplify_expr to use an internal
3073 post queue which is then appended to the end of PRE_P. */
3074 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3077 /* Don't fold inside offloading or taskreg regions: it can break code by
3078 adding decl references that weren't in the source. We'll do it during
3079 omplower pass instead. */
3081 static bool
3082 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3084 struct gimplify_omp_ctx *ctx;
3085 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3086 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3087 return false;
3088 return fold_stmt (gsi);
3091 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3092 WANT_VALUE is true if the result of the call is desired. */
3094 static enum gimplify_status
3095 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3097 tree fndecl, parms, p, fnptrtype;
3098 enum gimplify_status ret;
3099 int i, nargs;
3100 gcall *call;
3101 bool builtin_va_start_p = false;
3102 location_t loc = EXPR_LOCATION (*expr_p);
3104 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3106 /* For reliable diagnostics during inlining, it is necessary that
3107 every call_expr be annotated with file and line. */
3108 if (! EXPR_HAS_LOCATION (*expr_p))
3109 SET_EXPR_LOCATION (*expr_p, input_location);
3111 /* Gimplify internal functions created in the FEs. */
3112 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3114 if (want_value)
3115 return GS_ALL_DONE;
3117 nargs = call_expr_nargs (*expr_p);
3118 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3119 auto_vec<tree> vargs (nargs);
3121 for (i = 0; i < nargs; i++)
3123 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3124 EXPR_LOCATION (*expr_p));
3125 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3128 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3129 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3130 gimplify_seq_add_stmt (pre_p, call);
3131 return GS_ALL_DONE;
3134 /* This may be a call to a builtin function.
3136 Builtin function calls may be transformed into different
3137 (and more efficient) builtin function calls under certain
3138 circumstances. Unfortunately, gimplification can muck things
3139 up enough that the builtin expanders are not aware that certain
3140 transformations are still valid.
3142 So we attempt transformation/gimplification of the call before
3143 we gimplify the CALL_EXPR. At this time we do not manage to
3144 transform all calls in the same manner as the expanders do, but
3145 we do transform most of them. */
3146 fndecl = get_callee_fndecl (*expr_p);
3147 if (fndecl
3148 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3149 switch (DECL_FUNCTION_CODE (fndecl))
3151 CASE_BUILT_IN_ALLOCA:
3152 /* If the call has been built for a variable-sized object, then we
3153 want to restore the stack level when the enclosing BIND_EXPR is
3154 exited to reclaim the allocated space; otherwise, we precisely
3155 need to do the opposite and preserve the latest stack level. */
3156 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3157 gimplify_ctxp->save_stack = true;
3158 else
3159 gimplify_ctxp->keep_stack = true;
3160 break;
3162 case BUILT_IN_VA_START:
3164 builtin_va_start_p = TRUE;
3165 if (call_expr_nargs (*expr_p) < 2)
3167 error ("too few arguments to function %<va_start%>");
3168 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3169 return GS_OK;
3172 if (fold_builtin_next_arg (*expr_p, true))
3174 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3175 return GS_OK;
3177 break;
3180 default:
3183 if (fndecl && DECL_BUILT_IN (fndecl))
3185 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3186 if (new_tree && new_tree != *expr_p)
3188 /* There was a transformation of this call which computes the
3189 same value, but in a more efficient way. Return and try
3190 again. */
3191 *expr_p = new_tree;
3192 return GS_OK;
3196 /* Remember the original function pointer type. */
3197 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3199 /* There is a sequence point before the call, so any side effects in
3200 the calling expression must occur before the actual call. Force
3201 gimplify_expr to use an internal post queue. */
3202 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3203 is_gimple_call_addr, fb_rvalue);
3205 nargs = call_expr_nargs (*expr_p);
3207 /* Get argument types for verification. */
3208 fndecl = get_callee_fndecl (*expr_p);
3209 parms = NULL_TREE;
3210 if (fndecl)
3211 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3212 else
3213 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3215 if (fndecl && DECL_ARGUMENTS (fndecl))
3216 p = DECL_ARGUMENTS (fndecl);
3217 else if (parms)
3218 p = parms;
3219 else
3220 p = NULL_TREE;
3221 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3224 /* If the last argument is __builtin_va_arg_pack () and it is not
3225 passed as a named argument, decrease the number of CALL_EXPR
3226 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3227 if (!p
3228 && i < nargs
3229 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3231 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3232 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3234 if (last_arg_fndecl
3235 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3236 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3237 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3239 tree call = *expr_p;
3241 --nargs;
3242 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3243 CALL_EXPR_FN (call),
3244 nargs, CALL_EXPR_ARGP (call));
3246 /* Copy all CALL_EXPR flags, location and block, except
3247 CALL_EXPR_VA_ARG_PACK flag. */
3248 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3249 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3250 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3251 = CALL_EXPR_RETURN_SLOT_OPT (call);
3252 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3253 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3255 /* Set CALL_EXPR_VA_ARG_PACK. */
3256 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3260 /* If the call returns twice then after building the CFG the call
3261 argument computations will no longer dominate the call because
3262 we add an abnormal incoming edge to the call. So do not use SSA
3263 vars there. */
3264 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3266 /* Gimplify the function arguments. */
3267 if (nargs > 0)
3269 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3270 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3271 PUSH_ARGS_REVERSED ? i-- : i++)
3273 enum gimplify_status t;
3275 /* Avoid gimplifying the second argument to va_start, which needs to
3276 be the plain PARM_DECL. */
3277 if ((i != 1) || !builtin_va_start_p)
3279 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3280 EXPR_LOCATION (*expr_p), ! returns_twice);
3282 if (t == GS_ERROR)
3283 ret = GS_ERROR;
3288 /* Gimplify the static chain. */
3289 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3291 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3292 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3293 else
3295 enum gimplify_status t;
3296 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3297 EXPR_LOCATION (*expr_p), ! returns_twice);
3298 if (t == GS_ERROR)
3299 ret = GS_ERROR;
3303 /* Verify the function result. */
3304 if (want_value && fndecl
3305 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3307 error_at (loc, "using result of function returning %<void%>");
3308 ret = GS_ERROR;
3311 /* Try this again in case gimplification exposed something. */
3312 if (ret != GS_ERROR)
3314 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3316 if (new_tree && new_tree != *expr_p)
3318 /* There was a transformation of this call which computes the
3319 same value, but in a more efficient way. Return and try
3320 again. */
3321 *expr_p = new_tree;
3322 return GS_OK;
3325 else
3327 *expr_p = error_mark_node;
3328 return GS_ERROR;
3331 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3332 decl. This allows us to eliminate redundant or useless
3333 calls to "const" functions. */
3334 if (TREE_CODE (*expr_p) == CALL_EXPR)
3336 int flags = call_expr_flags (*expr_p);
3337 if (flags & (ECF_CONST | ECF_PURE)
3338 /* An infinite loop is considered a side effect. */
3339 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3340 TREE_SIDE_EFFECTS (*expr_p) = 0;
3343 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3344 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3345 form and delegate the creation of a GIMPLE_CALL to
3346 gimplify_modify_expr. This is always possible because when
3347 WANT_VALUE is true, the caller wants the result of this call into
3348 a temporary, which means that we will emit an INIT_EXPR in
3349 internal_get_tmp_var which will then be handled by
3350 gimplify_modify_expr. */
3351 if (!want_value)
3353 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3354 have to do is replicate it as a GIMPLE_CALL tuple. */
3355 gimple_stmt_iterator gsi;
3356 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3357 notice_special_calls (call);
3358 gimplify_seq_add_stmt (pre_p, call);
3359 gsi = gsi_last (*pre_p);
3360 maybe_fold_stmt (&gsi);
3361 *expr_p = NULL_TREE;
3363 else
3364 /* Remember the original function type. */
3365 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3366 CALL_EXPR_FN (*expr_p));
3368 return ret;
3371 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3372 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3374 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3375 condition is true or false, respectively. If null, we should generate
3376 our own to skip over the evaluation of this specific expression.
3378 LOCUS is the source location of the COND_EXPR.
3380 This function is the tree equivalent of do_jump.
3382 shortcut_cond_r should only be called by shortcut_cond_expr. */
3384 static tree
3385 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3386 location_t locus)
3388 tree local_label = NULL_TREE;
3389 tree t, expr = NULL;
3391 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3392 retain the shortcut semantics. Just insert the gotos here;
3393 shortcut_cond_expr will append the real blocks later. */
3394 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3396 location_t new_locus;
3398 /* Turn if (a && b) into
3400 if (a); else goto no;
3401 if (b) goto yes; else goto no;
3402 (no:) */
3404 if (false_label_p == NULL)
3405 false_label_p = &local_label;
3407 /* Keep the original source location on the first 'if'. */
3408 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3409 append_to_statement_list (t, &expr);
3411 /* Set the source location of the && on the second 'if'. */
3412 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3413 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3414 new_locus);
3415 append_to_statement_list (t, &expr);
3417 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3419 location_t new_locus;
3421 /* Turn if (a || b) into
3423 if (a) goto yes;
3424 if (b) goto yes; else goto no;
3425 (yes:) */
3427 if (true_label_p == NULL)
3428 true_label_p = &local_label;
3430 /* Keep the original source location on the first 'if'. */
3431 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3432 append_to_statement_list (t, &expr);
3434 /* Set the source location of the || on the second 'if'. */
3435 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3436 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3437 new_locus);
3438 append_to_statement_list (t, &expr);
3440 else if (TREE_CODE (pred) == COND_EXPR
3441 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3442 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3444 location_t new_locus;
3446 /* As long as we're messing with gotos, turn if (a ? b : c) into
3447 if (a)
3448 if (b) goto yes; else goto no;
3449 else
3450 if (c) goto yes; else goto no;
3452 Don't do this if one of the arms has void type, which can happen
3453 in C++ when the arm is throw. */
3455 /* Keep the original source location on the first 'if'. Set the source
3456 location of the ? on the second 'if'. */
3457 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3458 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3459 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3460 false_label_p, locus),
3461 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3462 false_label_p, new_locus));
3464 else
3466 expr = build3 (COND_EXPR, void_type_node, pred,
3467 build_and_jump (true_label_p),
3468 build_and_jump (false_label_p));
3469 SET_EXPR_LOCATION (expr, locus);
3472 if (local_label)
3474 t = build1 (LABEL_EXPR, void_type_node, local_label);
3475 append_to_statement_list (t, &expr);
3478 return expr;
3481 /* Given a conditional expression EXPR with short-circuit boolean
3482 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3483 predicate apart into the equivalent sequence of conditionals. */
3485 static tree
3486 shortcut_cond_expr (tree expr)
3488 tree pred = TREE_OPERAND (expr, 0);
3489 tree then_ = TREE_OPERAND (expr, 1);
3490 tree else_ = TREE_OPERAND (expr, 2);
3491 tree true_label, false_label, end_label, t;
3492 tree *true_label_p;
3493 tree *false_label_p;
3494 bool emit_end, emit_false, jump_over_else;
3495 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3496 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3498 /* First do simple transformations. */
3499 if (!else_se)
3501 /* If there is no 'else', turn
3502 if (a && b) then c
3503 into
3504 if (a) if (b) then c. */
3505 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3507 /* Keep the original source location on the first 'if'. */
3508 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3509 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3510 /* Set the source location of the && on the second 'if'. */
3511 if (EXPR_HAS_LOCATION (pred))
3512 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3513 then_ = shortcut_cond_expr (expr);
3514 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3515 pred = TREE_OPERAND (pred, 0);
3516 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3517 SET_EXPR_LOCATION (expr, locus);
3521 if (!then_se)
3523 /* If there is no 'then', turn
3524 if (a || b); else d
3525 into
3526 if (a); else if (b); else d. */
3527 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3529 /* Keep the original source location on the first 'if'. */
3530 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3531 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3532 /* Set the source location of the || on the second 'if'. */
3533 if (EXPR_HAS_LOCATION (pred))
3534 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3535 else_ = shortcut_cond_expr (expr);
3536 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3537 pred = TREE_OPERAND (pred, 0);
3538 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3539 SET_EXPR_LOCATION (expr, locus);
3543 /* If we're done, great. */
3544 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3545 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3546 return expr;
3548 /* Otherwise we need to mess with gotos. Change
3549 if (a) c; else d;
3551 if (a); else goto no;
3552 c; goto end;
3553 no: d; end:
3554 and recursively gimplify the condition. */
3556 true_label = false_label = end_label = NULL_TREE;
3558 /* If our arms just jump somewhere, hijack those labels so we don't
3559 generate jumps to jumps. */
3561 if (then_
3562 && TREE_CODE (then_) == GOTO_EXPR
3563 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3565 true_label = GOTO_DESTINATION (then_);
3566 then_ = NULL;
3567 then_se = false;
3570 if (else_
3571 && TREE_CODE (else_) == GOTO_EXPR
3572 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3574 false_label = GOTO_DESTINATION (else_);
3575 else_ = NULL;
3576 else_se = false;
3579 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3580 if (true_label)
3581 true_label_p = &true_label;
3582 else
3583 true_label_p = NULL;
3585 /* The 'else' branch also needs a label if it contains interesting code. */
3586 if (false_label || else_se)
3587 false_label_p = &false_label;
3588 else
3589 false_label_p = NULL;
3591 /* If there was nothing else in our arms, just forward the label(s). */
3592 if (!then_se && !else_se)
3593 return shortcut_cond_r (pred, true_label_p, false_label_p,
3594 EXPR_LOC_OR_LOC (expr, input_location));
3596 /* If our last subexpression already has a terminal label, reuse it. */
3597 if (else_se)
3598 t = expr_last (else_);
3599 else if (then_se)
3600 t = expr_last (then_);
3601 else
3602 t = NULL;
3603 if (t && TREE_CODE (t) == LABEL_EXPR)
3604 end_label = LABEL_EXPR_LABEL (t);
3606 /* If we don't care about jumping to the 'else' branch, jump to the end
3607 if the condition is false. */
3608 if (!false_label_p)
3609 false_label_p = &end_label;
3611 /* We only want to emit these labels if we aren't hijacking them. */
3612 emit_end = (end_label == NULL_TREE);
3613 emit_false = (false_label == NULL_TREE);
3615 /* We only emit the jump over the else clause if we have to--if the
3616 then clause may fall through. Otherwise we can wind up with a
3617 useless jump and a useless label at the end of gimplified code,
3618 which will cause us to think that this conditional as a whole
3619 falls through even if it doesn't. If we then inline a function
3620 which ends with such a condition, that can cause us to issue an
3621 inappropriate warning about control reaching the end of a
3622 non-void function. */
3623 jump_over_else = block_may_fallthru (then_);
3625 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3626 EXPR_LOC_OR_LOC (expr, input_location));
3628 expr = NULL;
3629 append_to_statement_list (pred, &expr);
3631 append_to_statement_list (then_, &expr);
3632 if (else_se)
3634 if (jump_over_else)
3636 tree last = expr_last (expr);
3637 t = build_and_jump (&end_label);
3638 if (EXPR_HAS_LOCATION (last))
3639 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3640 append_to_statement_list (t, &expr);
3642 if (emit_false)
3644 t = build1 (LABEL_EXPR, void_type_node, false_label);
3645 append_to_statement_list (t, &expr);
3647 append_to_statement_list (else_, &expr);
3649 if (emit_end && end_label)
3651 t = build1 (LABEL_EXPR, void_type_node, end_label);
3652 append_to_statement_list (t, &expr);
3655 return expr;
3658 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3660 tree
3661 gimple_boolify (tree expr)
3663 tree type = TREE_TYPE (expr);
3664 location_t loc = EXPR_LOCATION (expr);
3666 if (TREE_CODE (expr) == NE_EXPR
3667 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3668 && integer_zerop (TREE_OPERAND (expr, 1)))
3670 tree call = TREE_OPERAND (expr, 0);
3671 tree fn = get_callee_fndecl (call);
3673 /* For __builtin_expect ((long) (x), y) recurse into x as well
3674 if x is truth_value_p. */
3675 if (fn
3676 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3677 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3678 && call_expr_nargs (call) == 2)
3680 tree arg = CALL_EXPR_ARG (call, 0);
3681 if (arg)
3683 if (TREE_CODE (arg) == NOP_EXPR
3684 && TREE_TYPE (arg) == TREE_TYPE (call))
3685 arg = TREE_OPERAND (arg, 0);
3686 if (truth_value_p (TREE_CODE (arg)))
3688 arg = gimple_boolify (arg);
3689 CALL_EXPR_ARG (call, 0)
3690 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3696 switch (TREE_CODE (expr))
3698 case TRUTH_AND_EXPR:
3699 case TRUTH_OR_EXPR:
3700 case TRUTH_XOR_EXPR:
3701 case TRUTH_ANDIF_EXPR:
3702 case TRUTH_ORIF_EXPR:
3703 /* Also boolify the arguments of truth exprs. */
3704 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3705 /* FALLTHRU */
3707 case TRUTH_NOT_EXPR:
3708 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3710 /* These expressions always produce boolean results. */
3711 if (TREE_CODE (type) != BOOLEAN_TYPE)
3712 TREE_TYPE (expr) = boolean_type_node;
3713 return expr;
3715 case ANNOTATE_EXPR:
3716 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3718 case annot_expr_ivdep_kind:
3719 case annot_expr_unroll_kind:
3720 case annot_expr_no_vector_kind:
3721 case annot_expr_vector_kind:
3722 case annot_expr_parallel_kind:
3723 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3724 if (TREE_CODE (type) != BOOLEAN_TYPE)
3725 TREE_TYPE (expr) = boolean_type_node;
3726 return expr;
3727 default:
3728 gcc_unreachable ();
3731 default:
3732 if (COMPARISON_CLASS_P (expr))
3734 /* There expressions always prduce boolean results. */
3735 if (TREE_CODE (type) != BOOLEAN_TYPE)
3736 TREE_TYPE (expr) = boolean_type_node;
3737 return expr;
3739 /* Other expressions that get here must have boolean values, but
3740 might need to be converted to the appropriate mode. */
3741 if (TREE_CODE (type) == BOOLEAN_TYPE)
3742 return expr;
3743 return fold_convert_loc (loc, boolean_type_node, expr);
3747 /* Given a conditional expression *EXPR_P without side effects, gimplify
3748 its operands. New statements are inserted to PRE_P. */
3750 static enum gimplify_status
3751 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3753 tree expr = *expr_p, cond;
3754 enum gimplify_status ret, tret;
3755 enum tree_code code;
3757 cond = gimple_boolify (COND_EXPR_COND (expr));
3759 /* We need to handle && and || specially, as their gimplification
3760 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3761 code = TREE_CODE (cond);
3762 if (code == TRUTH_ANDIF_EXPR)
3763 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3764 else if (code == TRUTH_ORIF_EXPR)
3765 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3766 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3767 COND_EXPR_COND (*expr_p) = cond;
3769 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3770 is_gimple_val, fb_rvalue);
3771 ret = MIN (ret, tret);
3772 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3773 is_gimple_val, fb_rvalue);
3775 return MIN (ret, tret);
3778 /* Return true if evaluating EXPR could trap.
3779 EXPR is GENERIC, while tree_could_trap_p can be called
3780 only on GIMPLE. */
3782 static bool
3783 generic_expr_could_trap_p (tree expr)
3785 unsigned i, n;
3787 if (!expr || is_gimple_val (expr))
3788 return false;
3790 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3791 return true;
3793 n = TREE_OPERAND_LENGTH (expr);
3794 for (i = 0; i < n; i++)
3795 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3796 return true;
3798 return false;
3801 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3802 into
3804 if (p) if (p)
3805 t1 = a; a;
3806 else or else
3807 t1 = b; b;
3810 The second form is used when *EXPR_P is of type void.
3812 PRE_P points to the list where side effects that must happen before
3813 *EXPR_P should be stored. */
3815 static enum gimplify_status
3816 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3818 tree expr = *expr_p;
3819 tree type = TREE_TYPE (expr);
3820 location_t loc = EXPR_LOCATION (expr);
3821 tree tmp, arm1, arm2;
3822 enum gimplify_status ret;
3823 tree label_true, label_false, label_cont;
3824 bool have_then_clause_p, have_else_clause_p;
3825 gcond *cond_stmt;
3826 enum tree_code pred_code;
3827 gimple_seq seq = NULL;
3829 /* If this COND_EXPR has a value, copy the values into a temporary within
3830 the arms. */
3831 if (!VOID_TYPE_P (type))
3833 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3834 tree result;
3836 /* If either an rvalue is ok or we do not require an lvalue, create the
3837 temporary. But we cannot do that if the type is addressable. */
3838 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3839 && !TREE_ADDRESSABLE (type))
3841 if (gimplify_ctxp->allow_rhs_cond_expr
3842 /* If either branch has side effects or could trap, it can't be
3843 evaluated unconditionally. */
3844 && !TREE_SIDE_EFFECTS (then_)
3845 && !generic_expr_could_trap_p (then_)
3846 && !TREE_SIDE_EFFECTS (else_)
3847 && !generic_expr_could_trap_p (else_))
3848 return gimplify_pure_cond_expr (expr_p, pre_p);
3850 tmp = create_tmp_var (type, "iftmp");
3851 result = tmp;
3854 /* Otherwise, only create and copy references to the values. */
3855 else
3857 type = build_pointer_type (type);
3859 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3860 then_ = build_fold_addr_expr_loc (loc, then_);
3862 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3863 else_ = build_fold_addr_expr_loc (loc, else_);
3865 expr
3866 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3868 tmp = create_tmp_var (type, "iftmp");
3869 result = build_simple_mem_ref_loc (loc, tmp);
3872 /* Build the new then clause, `tmp = then_;'. But don't build the
3873 assignment if the value is void; in C++ it can be if it's a throw. */
3874 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3875 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3877 /* Similarly, build the new else clause, `tmp = else_;'. */
3878 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3879 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3881 TREE_TYPE (expr) = void_type_node;
3882 recalculate_side_effects (expr);
3884 /* Move the COND_EXPR to the prequeue. */
3885 gimplify_stmt (&expr, pre_p);
3887 *expr_p = result;
3888 return GS_ALL_DONE;
3891 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3892 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3893 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3894 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3896 /* Make sure the condition has BOOLEAN_TYPE. */
3897 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3899 /* Break apart && and || conditions. */
3900 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3901 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3903 expr = shortcut_cond_expr (expr);
3905 if (expr != *expr_p)
3907 *expr_p = expr;
3909 /* We can't rely on gimplify_expr to re-gimplify the expanded
3910 form properly, as cleanups might cause the target labels to be
3911 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3912 set up a conditional context. */
3913 gimple_push_condition ();
3914 gimplify_stmt (expr_p, &seq);
3915 gimple_pop_condition (pre_p);
3916 gimple_seq_add_seq (pre_p, seq);
3918 return GS_ALL_DONE;
3922 /* Now do the normal gimplification. */
3924 /* Gimplify condition. */
3925 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3926 fb_rvalue);
3927 if (ret == GS_ERROR)
3928 return GS_ERROR;
3929 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3931 gimple_push_condition ();
3933 have_then_clause_p = have_else_clause_p = false;
3934 if (TREE_OPERAND (expr, 1) != NULL
3935 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3936 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3937 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3938 == current_function_decl)
3939 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3940 have different locations, otherwise we end up with incorrect
3941 location information on the branches. */
3942 && (optimize
3943 || !EXPR_HAS_LOCATION (expr)
3944 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3945 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3947 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3948 have_then_clause_p = true;
3950 else
3951 label_true = create_artificial_label (UNKNOWN_LOCATION);
3952 if (TREE_OPERAND (expr, 2) != NULL
3953 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3954 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3955 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3956 == current_function_decl)
3957 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3958 have different locations, otherwise we end up with incorrect
3959 location information on the branches. */
3960 && (optimize
3961 || !EXPR_HAS_LOCATION (expr)
3962 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3963 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3965 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3966 have_else_clause_p = true;
3968 else
3969 label_false = create_artificial_label (UNKNOWN_LOCATION);
3971 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3972 &arm2);
3973 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3974 label_false);
3975 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3976 gimplify_seq_add_stmt (&seq, cond_stmt);
3977 gimple_stmt_iterator gsi = gsi_last (seq);
3978 maybe_fold_stmt (&gsi);
3980 label_cont = NULL_TREE;
3981 if (!have_then_clause_p)
3983 /* For if (...) {} else { code; } put label_true after
3984 the else block. */
3985 if (TREE_OPERAND (expr, 1) == NULL_TREE
3986 && !have_else_clause_p
3987 && TREE_OPERAND (expr, 2) != NULL_TREE)
3988 label_cont = label_true;
3989 else
3991 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3992 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3993 /* For if (...) { code; } else {} or
3994 if (...) { code; } else goto label; or
3995 if (...) { code; return; } else { ... }
3996 label_cont isn't needed. */
3997 if (!have_else_clause_p
3998 && TREE_OPERAND (expr, 2) != NULL_TREE
3999 && gimple_seq_may_fallthru (seq))
4001 gimple *g;
4002 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4004 g = gimple_build_goto (label_cont);
4006 /* GIMPLE_COND's are very low level; they have embedded
4007 gotos. This particular embedded goto should not be marked
4008 with the location of the original COND_EXPR, as it would
4009 correspond to the COND_EXPR's condition, not the ELSE or the
4010 THEN arms. To avoid marking it with the wrong location, flag
4011 it as "no location". */
4012 gimple_set_do_not_emit_location (g);
4014 gimplify_seq_add_stmt (&seq, g);
4018 if (!have_else_clause_p)
4020 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4021 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4023 if (label_cont)
4024 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4026 gimple_pop_condition (pre_p);
4027 gimple_seq_add_seq (pre_p, seq);
4029 if (ret == GS_ERROR)
4030 ; /* Do nothing. */
4031 else if (have_then_clause_p || have_else_clause_p)
4032 ret = GS_ALL_DONE;
4033 else
4035 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4036 expr = TREE_OPERAND (expr, 0);
4037 gimplify_stmt (&expr, pre_p);
4040 *expr_p = NULL;
4041 return ret;
4044 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4045 to be marked addressable.
4047 We cannot rely on such an expression being directly markable if a temporary
4048 has been created by the gimplification. In this case, we create another
4049 temporary and initialize it with a copy, which will become a store after we
4050 mark it addressable. This can happen if the front-end passed us something
4051 that it could not mark addressable yet, like a Fortran pass-by-reference
4052 parameter (int) floatvar. */
4054 static void
4055 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4057 while (handled_component_p (*expr_p))
4058 expr_p = &TREE_OPERAND (*expr_p, 0);
4059 if (is_gimple_reg (*expr_p))
4061 /* Do not allow an SSA name as the temporary. */
4062 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4063 DECL_GIMPLE_REG_P (var) = 0;
4064 *expr_p = var;
4068 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4069 a call to __builtin_memcpy. */
4071 static enum gimplify_status
4072 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4073 gimple_seq *seq_p)
4075 tree t, to, to_ptr, from, from_ptr;
4076 gcall *gs;
4077 location_t loc = EXPR_LOCATION (*expr_p);
4079 to = TREE_OPERAND (*expr_p, 0);
4080 from = TREE_OPERAND (*expr_p, 1);
4082 /* Mark the RHS addressable. Beware that it may not be possible to do so
4083 directly if a temporary has been created by the gimplification. */
4084 prepare_gimple_addressable (&from, seq_p);
4086 mark_addressable (from);
4087 from_ptr = build_fold_addr_expr_loc (loc, from);
4088 gimplify_arg (&from_ptr, seq_p, loc);
4090 mark_addressable (to);
4091 to_ptr = build_fold_addr_expr_loc (loc, to);
4092 gimplify_arg (&to_ptr, seq_p, loc);
4094 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4096 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4098 if (want_value)
4100 /* tmp = memcpy() */
4101 t = create_tmp_var (TREE_TYPE (to_ptr));
4102 gimple_call_set_lhs (gs, t);
4103 gimplify_seq_add_stmt (seq_p, gs);
4105 *expr_p = build_simple_mem_ref (t);
4106 return GS_ALL_DONE;
4109 gimplify_seq_add_stmt (seq_p, gs);
4110 *expr_p = NULL;
4111 return GS_ALL_DONE;
4114 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4115 a call to __builtin_memset. In this case we know that the RHS is
4116 a CONSTRUCTOR with an empty element list. */
4118 static enum gimplify_status
4119 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4120 gimple_seq *seq_p)
4122 tree t, from, to, to_ptr;
4123 gcall *gs;
4124 location_t loc = EXPR_LOCATION (*expr_p);
4126 /* Assert our assumptions, to abort instead of producing wrong code
4127 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4128 not be immediately exposed. */
4129 from = TREE_OPERAND (*expr_p, 1);
4130 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4131 from = TREE_OPERAND (from, 0);
4133 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4134 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4136 /* Now proceed. */
4137 to = TREE_OPERAND (*expr_p, 0);
4139 to_ptr = build_fold_addr_expr_loc (loc, to);
4140 gimplify_arg (&to_ptr, seq_p, loc);
4141 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4143 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4145 if (want_value)
4147 /* tmp = memset() */
4148 t = create_tmp_var (TREE_TYPE (to_ptr));
4149 gimple_call_set_lhs (gs, t);
4150 gimplify_seq_add_stmt (seq_p, gs);
4152 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4153 return GS_ALL_DONE;
4156 gimplify_seq_add_stmt (seq_p, gs);
4157 *expr_p = NULL;
4158 return GS_ALL_DONE;
4161 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4162 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4163 assignment. Return non-null if we detect a potential overlap. */
4165 struct gimplify_init_ctor_preeval_data
4167 /* The base decl of the lhs object. May be NULL, in which case we
4168 have to assume the lhs is indirect. */
4169 tree lhs_base_decl;
4171 /* The alias set of the lhs object. */
4172 alias_set_type lhs_alias_set;
4175 static tree
4176 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4178 struct gimplify_init_ctor_preeval_data *data
4179 = (struct gimplify_init_ctor_preeval_data *) xdata;
4180 tree t = *tp;
4182 /* If we find the base object, obviously we have overlap. */
4183 if (data->lhs_base_decl == t)
4184 return t;
4186 /* If the constructor component is indirect, determine if we have a
4187 potential overlap with the lhs. The only bits of information we
4188 have to go on at this point are addressability and alias sets. */
4189 if ((INDIRECT_REF_P (t)
4190 || TREE_CODE (t) == MEM_REF)
4191 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4192 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4193 return t;
4195 /* If the constructor component is a call, determine if it can hide a
4196 potential overlap with the lhs through an INDIRECT_REF like above.
4197 ??? Ugh - this is completely broken. In fact this whole analysis
4198 doesn't look conservative. */
4199 if (TREE_CODE (t) == CALL_EXPR)
4201 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4203 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4204 if (POINTER_TYPE_P (TREE_VALUE (type))
4205 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4206 && alias_sets_conflict_p (data->lhs_alias_set,
4207 get_alias_set
4208 (TREE_TYPE (TREE_VALUE (type)))))
4209 return t;
4212 if (IS_TYPE_OR_DECL_P (t))
4213 *walk_subtrees = 0;
4214 return NULL;
4217 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4218 force values that overlap with the lhs (as described by *DATA)
4219 into temporaries. */
4221 static void
4222 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4223 struct gimplify_init_ctor_preeval_data *data)
4225 enum gimplify_status one;
4227 /* If the value is constant, then there's nothing to pre-evaluate. */
4228 if (TREE_CONSTANT (*expr_p))
4230 /* Ensure it does not have side effects, it might contain a reference to
4231 the object we're initializing. */
4232 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4233 return;
4236 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4237 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4238 return;
4240 /* Recurse for nested constructors. */
4241 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4243 unsigned HOST_WIDE_INT ix;
4244 constructor_elt *ce;
4245 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4247 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4248 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4250 return;
4253 /* If this is a variable sized type, we must remember the size. */
4254 maybe_with_size_expr (expr_p);
4256 /* Gimplify the constructor element to something appropriate for the rhs
4257 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4258 the gimplifier will consider this a store to memory. Doing this
4259 gimplification now means that we won't have to deal with complicated
4260 language-specific trees, nor trees like SAVE_EXPR that can induce
4261 exponential search behavior. */
4262 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4263 if (one == GS_ERROR)
4265 *expr_p = NULL;
4266 return;
4269 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4270 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4271 always be true for all scalars, since is_gimple_mem_rhs insists on a
4272 temporary variable for them. */
4273 if (DECL_P (*expr_p))
4274 return;
4276 /* If this is of variable size, we have no choice but to assume it doesn't
4277 overlap since we can't make a temporary for it. */
4278 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4279 return;
4281 /* Otherwise, we must search for overlap ... */
4282 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4283 return;
4285 /* ... and if found, force the value into a temporary. */
4286 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4289 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4290 a RANGE_EXPR in a CONSTRUCTOR for an array.
4292 var = lower;
4293 loop_entry:
4294 object[var] = value;
4295 if (var == upper)
4296 goto loop_exit;
4297 var = var + 1;
4298 goto loop_entry;
4299 loop_exit:
4301 We increment var _after_ the loop exit check because we might otherwise
4302 fail if upper == TYPE_MAX_VALUE (type for upper).
4304 Note that we never have to deal with SAVE_EXPRs here, because this has
4305 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4307 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4308 gimple_seq *, bool);
4310 static void
4311 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4312 tree value, tree array_elt_type,
4313 gimple_seq *pre_p, bool cleared)
4315 tree loop_entry_label, loop_exit_label, fall_thru_label;
4316 tree var, var_type, cref, tmp;
4318 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4319 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4320 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4322 /* Create and initialize the index variable. */
4323 var_type = TREE_TYPE (upper);
4324 var = create_tmp_var (var_type);
4325 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4327 /* Add the loop entry label. */
4328 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4330 /* Build the reference. */
4331 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4332 var, NULL_TREE, NULL_TREE);
4334 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4335 the store. Otherwise just assign value to the reference. */
4337 if (TREE_CODE (value) == CONSTRUCTOR)
4338 /* NB we might have to call ourself recursively through
4339 gimplify_init_ctor_eval if the value is a constructor. */
4340 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4341 pre_p, cleared);
4342 else
4343 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4345 /* We exit the loop when the index var is equal to the upper bound. */
4346 gimplify_seq_add_stmt (pre_p,
4347 gimple_build_cond (EQ_EXPR, var, upper,
4348 loop_exit_label, fall_thru_label));
4350 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4352 /* Otherwise, increment the index var... */
4353 tmp = build2 (PLUS_EXPR, var_type, var,
4354 fold_convert (var_type, integer_one_node));
4355 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4357 /* ...and jump back to the loop entry. */
4358 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4360 /* Add the loop exit label. */
4361 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4364 /* Return true if FDECL is accessing a field that is zero sized. */
4366 static bool
4367 zero_sized_field_decl (const_tree fdecl)
4369 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4370 && integer_zerop (DECL_SIZE (fdecl)))
4371 return true;
4372 return false;
4375 /* Return true if TYPE is zero sized. */
4377 static bool
4378 zero_sized_type (const_tree type)
4380 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4381 && integer_zerop (TYPE_SIZE (type)))
4382 return true;
4383 return false;
4386 /* A subroutine of gimplify_init_constructor. Generate individual
4387 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4388 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4389 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4390 zeroed first. */
4392 static void
4393 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4394 gimple_seq *pre_p, bool cleared)
4396 tree array_elt_type = NULL;
4397 unsigned HOST_WIDE_INT ix;
4398 tree purpose, value;
4400 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4401 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4403 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4405 tree cref;
4407 /* NULL values are created above for gimplification errors. */
4408 if (value == NULL)
4409 continue;
4411 if (cleared && initializer_zerop (value))
4412 continue;
4414 /* ??? Here's to hoping the front end fills in all of the indices,
4415 so we don't have to figure out what's missing ourselves. */
4416 gcc_assert (purpose);
4418 /* Skip zero-sized fields, unless value has side-effects. This can
4419 happen with calls to functions returning a zero-sized type, which
4420 we shouldn't discard. As a number of downstream passes don't
4421 expect sets of zero-sized fields, we rely on the gimplification of
4422 the MODIFY_EXPR we make below to drop the assignment statement. */
4423 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4424 continue;
4426 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4427 whole range. */
4428 if (TREE_CODE (purpose) == RANGE_EXPR)
4430 tree lower = TREE_OPERAND (purpose, 0);
4431 tree upper = TREE_OPERAND (purpose, 1);
4433 /* If the lower bound is equal to upper, just treat it as if
4434 upper was the index. */
4435 if (simple_cst_equal (lower, upper))
4436 purpose = upper;
4437 else
4439 gimplify_init_ctor_eval_range (object, lower, upper, value,
4440 array_elt_type, pre_p, cleared);
4441 continue;
4445 if (array_elt_type)
4447 /* Do not use bitsizetype for ARRAY_REF indices. */
4448 if (TYPE_DOMAIN (TREE_TYPE (object)))
4449 purpose
4450 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4451 purpose);
4452 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4453 purpose, NULL_TREE, NULL_TREE);
4455 else
4457 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4458 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4459 unshare_expr (object), purpose, NULL_TREE);
4462 if (TREE_CODE (value) == CONSTRUCTOR
4463 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4464 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4465 pre_p, cleared);
4466 else
4468 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4469 gimplify_and_add (init, pre_p);
4470 ggc_free (init);
4475 /* Return the appropriate RHS predicate for this LHS. */
4477 gimple_predicate
4478 rhs_predicate_for (tree lhs)
4480 if (is_gimple_reg (lhs))
4481 return is_gimple_reg_rhs_or_call;
4482 else
4483 return is_gimple_mem_rhs_or_call;
4486 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4487 before the LHS has been gimplified. */
4489 static gimple_predicate
4490 initial_rhs_predicate_for (tree lhs)
4492 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4493 return is_gimple_reg_rhs_or_call;
4494 else
4495 return is_gimple_mem_rhs_or_call;
4498 /* Gimplify a C99 compound literal expression. This just means adding
4499 the DECL_EXPR before the current statement and using its anonymous
4500 decl instead. */
4502 static enum gimplify_status
4503 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4504 bool (*gimple_test_f) (tree),
4505 fallback_t fallback)
4507 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4508 tree decl = DECL_EXPR_DECL (decl_s);
4509 tree init = DECL_INITIAL (decl);
4510 /* Mark the decl as addressable if the compound literal
4511 expression is addressable now, otherwise it is marked too late
4512 after we gimplify the initialization expression. */
4513 if (TREE_ADDRESSABLE (*expr_p))
4514 TREE_ADDRESSABLE (decl) = 1;
4515 /* Otherwise, if we don't need an lvalue and have a literal directly
4516 substitute it. Check if it matches the gimple predicate, as
4517 otherwise we'd generate a new temporary, and we can as well just
4518 use the decl we already have. */
4519 else if (!TREE_ADDRESSABLE (decl)
4520 && init
4521 && (fallback & fb_lvalue) == 0
4522 && gimple_test_f (init))
4524 *expr_p = init;
4525 return GS_OK;
4528 /* Preliminarily mark non-addressed complex variables as eligible
4529 for promotion to gimple registers. We'll transform their uses
4530 as we find them. */
4531 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4532 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4533 && !TREE_THIS_VOLATILE (decl)
4534 && !needs_to_live_in_memory (decl))
4535 DECL_GIMPLE_REG_P (decl) = 1;
4537 /* If the decl is not addressable, then it is being used in some
4538 expression or on the right hand side of a statement, and it can
4539 be put into a readonly data section. */
4540 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4541 TREE_READONLY (decl) = 1;
4543 /* This decl isn't mentioned in the enclosing block, so add it to the
4544 list of temps. FIXME it seems a bit of a kludge to say that
4545 anonymous artificial vars aren't pushed, but everything else is. */
4546 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4547 gimple_add_tmp_var (decl);
4549 gimplify_and_add (decl_s, pre_p);
4550 *expr_p = decl;
4551 return GS_OK;
4554 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4555 return a new CONSTRUCTOR if something changed. */
4557 static tree
4558 optimize_compound_literals_in_ctor (tree orig_ctor)
4560 tree ctor = orig_ctor;
4561 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4562 unsigned int idx, num = vec_safe_length (elts);
4564 for (idx = 0; idx < num; idx++)
4566 tree value = (*elts)[idx].value;
4567 tree newval = value;
4568 if (TREE_CODE (value) == CONSTRUCTOR)
4569 newval = optimize_compound_literals_in_ctor (value);
4570 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4572 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4573 tree decl = DECL_EXPR_DECL (decl_s);
4574 tree init = DECL_INITIAL (decl);
4576 if (!TREE_ADDRESSABLE (value)
4577 && !TREE_ADDRESSABLE (decl)
4578 && init
4579 && TREE_CODE (init) == CONSTRUCTOR)
4580 newval = optimize_compound_literals_in_ctor (init);
4582 if (newval == value)
4583 continue;
4585 if (ctor == orig_ctor)
4587 ctor = copy_node (orig_ctor);
4588 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4589 elts = CONSTRUCTOR_ELTS (ctor);
4591 (*elts)[idx].value = newval;
4593 return ctor;
4596 /* A subroutine of gimplify_modify_expr. Break out elements of a
4597 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4599 Note that we still need to clear any elements that don't have explicit
4600 initializers, so if not all elements are initialized we keep the
4601 original MODIFY_EXPR, we just remove all of the constructor elements.
4603 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4604 GS_ERROR if we would have to create a temporary when gimplifying
4605 this constructor. Otherwise, return GS_OK.
4607 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4609 static enum gimplify_status
4610 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4611 bool want_value, bool notify_temp_creation)
4613 tree object, ctor, type;
4614 enum gimplify_status ret;
4615 vec<constructor_elt, va_gc> *elts;
4617 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4619 if (!notify_temp_creation)
4621 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4622 is_gimple_lvalue, fb_lvalue);
4623 if (ret == GS_ERROR)
4624 return ret;
4627 object = TREE_OPERAND (*expr_p, 0);
4628 ctor = TREE_OPERAND (*expr_p, 1)
4629 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4630 type = TREE_TYPE (ctor);
4631 elts = CONSTRUCTOR_ELTS (ctor);
4632 ret = GS_ALL_DONE;
4634 switch (TREE_CODE (type))
4636 case RECORD_TYPE:
4637 case UNION_TYPE:
4638 case QUAL_UNION_TYPE:
4639 case ARRAY_TYPE:
4641 struct gimplify_init_ctor_preeval_data preeval_data;
4642 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4643 bool cleared, complete_p, valid_const_initializer;
4645 /* Aggregate types must lower constructors to initialization of
4646 individual elements. The exception is that a CONSTRUCTOR node
4647 with no elements indicates zero-initialization of the whole. */
4648 if (vec_safe_is_empty (elts))
4650 if (notify_temp_creation)
4651 return GS_OK;
4652 break;
4655 /* Fetch information about the constructor to direct later processing.
4656 We might want to make static versions of it in various cases, and
4657 can only do so if it known to be a valid constant initializer. */
4658 valid_const_initializer
4659 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4660 &num_ctor_elements, &complete_p);
4662 /* If a const aggregate variable is being initialized, then it
4663 should never be a lose to promote the variable to be static. */
4664 if (valid_const_initializer
4665 && num_nonzero_elements > 1
4666 && TREE_READONLY (object)
4667 && VAR_P (object)
4668 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4670 if (notify_temp_creation)
4671 return GS_ERROR;
4672 DECL_INITIAL (object) = ctor;
4673 TREE_STATIC (object) = 1;
4674 if (!DECL_NAME (object))
4675 DECL_NAME (object) = create_tmp_var_name ("C");
4676 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4678 /* ??? C++ doesn't automatically append a .<number> to the
4679 assembler name, and even when it does, it looks at FE private
4680 data structures to figure out what that number should be,
4681 which are not set for this variable. I suppose this is
4682 important for local statics for inline functions, which aren't
4683 "local" in the object file sense. So in order to get a unique
4684 TU-local symbol, we must invoke the lhd version now. */
4685 lhd_set_decl_assembler_name (object);
4687 *expr_p = NULL_TREE;
4688 break;
4691 /* If there are "lots" of initialized elements, even discounting
4692 those that are not address constants (and thus *must* be
4693 computed at runtime), then partition the constructor into
4694 constant and non-constant parts. Block copy the constant
4695 parts in, then generate code for the non-constant parts. */
4696 /* TODO. There's code in cp/typeck.c to do this. */
4698 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4699 /* store_constructor will ignore the clearing of variable-sized
4700 objects. Initializers for such objects must explicitly set
4701 every field that needs to be set. */
4702 cleared = false;
4703 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4704 /* If the constructor isn't complete, clear the whole object
4705 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4707 ??? This ought not to be needed. For any element not present
4708 in the initializer, we should simply set them to zero. Except
4709 we'd need to *find* the elements that are not present, and that
4710 requires trickery to avoid quadratic compile-time behavior in
4711 large cases or excessive memory use in small cases. */
4712 cleared = true;
4713 else if (num_ctor_elements - num_nonzero_elements
4714 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4715 && num_nonzero_elements < num_ctor_elements / 4)
4716 /* If there are "lots" of zeros, it's more efficient to clear
4717 the memory and then set the nonzero elements. */
4718 cleared = true;
4719 else
4720 cleared = false;
4722 /* If there are "lots" of initialized elements, and all of them
4723 are valid address constants, then the entire initializer can
4724 be dropped to memory, and then memcpy'd out. Don't do this
4725 for sparse arrays, though, as it's more efficient to follow
4726 the standard CONSTRUCTOR behavior of memset followed by
4727 individual element initialization. Also don't do this for small
4728 all-zero initializers (which aren't big enough to merit
4729 clearing), and don't try to make bitwise copies of
4730 TREE_ADDRESSABLE types.
4732 We cannot apply such transformation when compiling chkp static
4733 initializer because creation of initializer image in the memory
4734 will require static initialization of bounds for it. It should
4735 result in another gimplification of similar initializer and we
4736 may fall into infinite loop. */
4737 if (valid_const_initializer
4738 && !(cleared || num_nonzero_elements == 0)
4739 && !TREE_ADDRESSABLE (type)
4740 && (!current_function_decl
4741 || !lookup_attribute ("chkp ctor",
4742 DECL_ATTRIBUTES (current_function_decl))))
4744 HOST_WIDE_INT size = int_size_in_bytes (type);
4745 unsigned int align;
4747 /* ??? We can still get unbounded array types, at least
4748 from the C++ front end. This seems wrong, but attempt
4749 to work around it for now. */
4750 if (size < 0)
4752 size = int_size_in_bytes (TREE_TYPE (object));
4753 if (size >= 0)
4754 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4757 /* Find the maximum alignment we can assume for the object. */
4758 /* ??? Make use of DECL_OFFSET_ALIGN. */
4759 if (DECL_P (object))
4760 align = DECL_ALIGN (object);
4761 else
4762 align = TYPE_ALIGN (type);
4764 /* Do a block move either if the size is so small as to make
4765 each individual move a sub-unit move on average, or if it
4766 is so large as to make individual moves inefficient. */
4767 if (size > 0
4768 && num_nonzero_elements > 1
4769 && (size < num_nonzero_elements
4770 || !can_move_by_pieces (size, align)))
4772 if (notify_temp_creation)
4773 return GS_ERROR;
4775 walk_tree (&ctor, force_labels_r, NULL, NULL);
4776 ctor = tree_output_constant_def (ctor);
4777 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4778 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4779 TREE_OPERAND (*expr_p, 1) = ctor;
4781 /* This is no longer an assignment of a CONSTRUCTOR, but
4782 we still may have processing to do on the LHS. So
4783 pretend we didn't do anything here to let that happen. */
4784 return GS_UNHANDLED;
4788 /* If the target is volatile, we have non-zero elements and more than
4789 one field to assign, initialize the target from a temporary. */
4790 if (TREE_THIS_VOLATILE (object)
4791 && !TREE_ADDRESSABLE (type)
4792 && num_nonzero_elements > 0
4793 && vec_safe_length (elts) > 1)
4795 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4796 TREE_OPERAND (*expr_p, 0) = temp;
4797 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4798 *expr_p,
4799 build2 (MODIFY_EXPR, void_type_node,
4800 object, temp));
4801 return GS_OK;
4804 if (notify_temp_creation)
4805 return GS_OK;
4807 /* If there are nonzero elements and if needed, pre-evaluate to capture
4808 elements overlapping with the lhs into temporaries. We must do this
4809 before clearing to fetch the values before they are zeroed-out. */
4810 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4812 preeval_data.lhs_base_decl = get_base_address (object);
4813 if (!DECL_P (preeval_data.lhs_base_decl))
4814 preeval_data.lhs_base_decl = NULL;
4815 preeval_data.lhs_alias_set = get_alias_set (object);
4817 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4818 pre_p, post_p, &preeval_data);
4821 bool ctor_has_side_effects_p
4822 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4824 if (cleared)
4826 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4827 Note that we still have to gimplify, in order to handle the
4828 case of variable sized types. Avoid shared tree structures. */
4829 CONSTRUCTOR_ELTS (ctor) = NULL;
4830 TREE_SIDE_EFFECTS (ctor) = 0;
4831 object = unshare_expr (object);
4832 gimplify_stmt (expr_p, pre_p);
4835 /* If we have not block cleared the object, or if there are nonzero
4836 elements in the constructor, or if the constructor has side effects,
4837 add assignments to the individual scalar fields of the object. */
4838 if (!cleared
4839 || num_nonzero_elements > 0
4840 || ctor_has_side_effects_p)
4841 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4843 *expr_p = NULL_TREE;
4845 break;
4847 case COMPLEX_TYPE:
4849 tree r, i;
4851 if (notify_temp_creation)
4852 return GS_OK;
4854 /* Extract the real and imaginary parts out of the ctor. */
4855 gcc_assert (elts->length () == 2);
4856 r = (*elts)[0].value;
4857 i = (*elts)[1].value;
4858 if (r == NULL || i == NULL)
4860 tree zero = build_zero_cst (TREE_TYPE (type));
4861 if (r == NULL)
4862 r = zero;
4863 if (i == NULL)
4864 i = zero;
4867 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4868 represent creation of a complex value. */
4869 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4871 ctor = build_complex (type, r, i);
4872 TREE_OPERAND (*expr_p, 1) = ctor;
4874 else
4876 ctor = build2 (COMPLEX_EXPR, type, r, i);
4877 TREE_OPERAND (*expr_p, 1) = ctor;
4878 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4879 pre_p,
4880 post_p,
4881 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4882 fb_rvalue);
4885 break;
4887 case VECTOR_TYPE:
4889 unsigned HOST_WIDE_INT ix;
4890 constructor_elt *ce;
4892 if (notify_temp_creation)
4893 return GS_OK;
4895 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4896 if (TREE_CONSTANT (ctor))
4898 bool constant_p = true;
4899 tree value;
4901 /* Even when ctor is constant, it might contain non-*_CST
4902 elements, such as addresses or trapping values like
4903 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4904 in VECTOR_CST nodes. */
4905 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4906 if (!CONSTANT_CLASS_P (value))
4908 constant_p = false;
4909 break;
4912 if (constant_p)
4914 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4915 break;
4918 TREE_CONSTANT (ctor) = 0;
4921 /* Vector types use CONSTRUCTOR all the way through gimple
4922 compilation as a general initializer. */
4923 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4925 enum gimplify_status tret;
4926 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4927 fb_rvalue);
4928 if (tret == GS_ERROR)
4929 ret = GS_ERROR;
4930 else if (TREE_STATIC (ctor)
4931 && !initializer_constant_valid_p (ce->value,
4932 TREE_TYPE (ce->value)))
4933 TREE_STATIC (ctor) = 0;
4935 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4936 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4938 break;
4940 default:
4941 /* So how did we get a CONSTRUCTOR for a scalar type? */
4942 gcc_unreachable ();
4945 if (ret == GS_ERROR)
4946 return GS_ERROR;
4947 /* If we have gimplified both sides of the initializer but have
4948 not emitted an assignment, do so now. */
4949 if (*expr_p)
4951 tree lhs = TREE_OPERAND (*expr_p, 0);
4952 tree rhs = TREE_OPERAND (*expr_p, 1);
4953 if (want_value && object == lhs)
4954 lhs = unshare_expr (lhs);
4955 gassign *init = gimple_build_assign (lhs, rhs);
4956 gimplify_seq_add_stmt (pre_p, init);
4958 if (want_value)
4960 *expr_p = object;
4961 return GS_OK;
4963 else
4965 *expr_p = NULL;
4966 return GS_ALL_DONE;
4970 /* Given a pointer value OP0, return a simplified version of an
4971 indirection through OP0, or NULL_TREE if no simplification is
4972 possible. This may only be applied to a rhs of an expression.
4973 Note that the resulting type may be different from the type pointed
4974 to in the sense that it is still compatible from the langhooks
4975 point of view. */
4977 static tree
4978 gimple_fold_indirect_ref_rhs (tree t)
4980 return gimple_fold_indirect_ref (t);
4983 /* Subroutine of gimplify_modify_expr to do simplifications of
4984 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4985 something changes. */
4987 static enum gimplify_status
4988 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4989 gimple_seq *pre_p, gimple_seq *post_p,
4990 bool want_value)
4992 enum gimplify_status ret = GS_UNHANDLED;
4993 bool changed;
4997 changed = false;
4998 switch (TREE_CODE (*from_p))
5000 case VAR_DECL:
5001 /* If we're assigning from a read-only variable initialized with
5002 a constructor, do the direct assignment from the constructor,
5003 but only if neither source nor target are volatile since this
5004 latter assignment might end up being done on a per-field basis. */
5005 if (DECL_INITIAL (*from_p)
5006 && TREE_READONLY (*from_p)
5007 && !TREE_THIS_VOLATILE (*from_p)
5008 && !TREE_THIS_VOLATILE (*to_p)
5009 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5011 tree old_from = *from_p;
5012 enum gimplify_status subret;
5014 /* Move the constructor into the RHS. */
5015 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5017 /* Let's see if gimplify_init_constructor will need to put
5018 it in memory. */
5019 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5020 false, true);
5021 if (subret == GS_ERROR)
5023 /* If so, revert the change. */
5024 *from_p = old_from;
5026 else
5028 ret = GS_OK;
5029 changed = true;
5032 break;
5033 case INDIRECT_REF:
5035 /* If we have code like
5037 *(const A*)(A*)&x
5039 where the type of "x" is a (possibly cv-qualified variant
5040 of "A"), treat the entire expression as identical to "x".
5041 This kind of code arises in C++ when an object is bound
5042 to a const reference, and if "x" is a TARGET_EXPR we want
5043 to take advantage of the optimization below. */
5044 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5045 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5046 if (t)
5048 if (TREE_THIS_VOLATILE (t) != volatile_p)
5050 if (DECL_P (t))
5051 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5052 build_fold_addr_expr (t));
5053 if (REFERENCE_CLASS_P (t))
5054 TREE_THIS_VOLATILE (t) = volatile_p;
5056 *from_p = t;
5057 ret = GS_OK;
5058 changed = true;
5060 break;
5063 case TARGET_EXPR:
5065 /* If we are initializing something from a TARGET_EXPR, strip the
5066 TARGET_EXPR and initialize it directly, if possible. This can't
5067 be done if the initializer is void, since that implies that the
5068 temporary is set in some non-trivial way.
5070 ??? What about code that pulls out the temp and uses it
5071 elsewhere? I think that such code never uses the TARGET_EXPR as
5072 an initializer. If I'm wrong, we'll die because the temp won't
5073 have any RTL. In that case, I guess we'll need to replace
5074 references somehow. */
5075 tree init = TARGET_EXPR_INITIAL (*from_p);
5077 if (init
5078 && !VOID_TYPE_P (TREE_TYPE (init)))
5080 *from_p = init;
5081 ret = GS_OK;
5082 changed = true;
5085 break;
5087 case COMPOUND_EXPR:
5088 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5089 caught. */
5090 gimplify_compound_expr (from_p, pre_p, true);
5091 ret = GS_OK;
5092 changed = true;
5093 break;
5095 case CONSTRUCTOR:
5096 /* If we already made some changes, let the front end have a
5097 crack at this before we break it down. */
5098 if (ret != GS_UNHANDLED)
5099 break;
5100 /* If we're initializing from a CONSTRUCTOR, break this into
5101 individual MODIFY_EXPRs. */
5102 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5103 false);
5105 case COND_EXPR:
5106 /* If we're assigning to a non-register type, push the assignment
5107 down into the branches. This is mandatory for ADDRESSABLE types,
5108 since we cannot generate temporaries for such, but it saves a
5109 copy in other cases as well. */
5110 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5112 /* This code should mirror the code in gimplify_cond_expr. */
5113 enum tree_code code = TREE_CODE (*expr_p);
5114 tree cond = *from_p;
5115 tree result = *to_p;
5117 ret = gimplify_expr (&result, pre_p, post_p,
5118 is_gimple_lvalue, fb_lvalue);
5119 if (ret != GS_ERROR)
5120 ret = GS_OK;
5122 /* If we are going to write RESULT more than once, clear
5123 TREE_READONLY flag, otherwise we might incorrectly promote
5124 the variable to static const and initialize it at compile
5125 time in one of the branches. */
5126 if (VAR_P (result)
5127 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5128 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5129 TREE_READONLY (result) = 0;
5130 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5131 TREE_OPERAND (cond, 1)
5132 = build2 (code, void_type_node, result,
5133 TREE_OPERAND (cond, 1));
5134 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5135 TREE_OPERAND (cond, 2)
5136 = build2 (code, void_type_node, unshare_expr (result),
5137 TREE_OPERAND (cond, 2));
5139 TREE_TYPE (cond) = void_type_node;
5140 recalculate_side_effects (cond);
5142 if (want_value)
5144 gimplify_and_add (cond, pre_p);
5145 *expr_p = unshare_expr (result);
5147 else
5148 *expr_p = cond;
5149 return ret;
5151 break;
5153 case CALL_EXPR:
5154 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5155 return slot so that we don't generate a temporary. */
5156 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5157 && aggregate_value_p (*from_p, *from_p))
5159 bool use_target;
5161 if (!(rhs_predicate_for (*to_p))(*from_p))
5162 /* If we need a temporary, *to_p isn't accurate. */
5163 use_target = false;
5164 /* It's OK to use the return slot directly unless it's an NRV. */
5165 else if (TREE_CODE (*to_p) == RESULT_DECL
5166 && DECL_NAME (*to_p) == NULL_TREE
5167 && needs_to_live_in_memory (*to_p))
5168 use_target = true;
5169 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5170 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5171 /* Don't force regs into memory. */
5172 use_target = false;
5173 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5174 /* It's OK to use the target directly if it's being
5175 initialized. */
5176 use_target = true;
5177 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5178 != INTEGER_CST)
5179 /* Always use the target and thus RSO for variable-sized types.
5180 GIMPLE cannot deal with a variable-sized assignment
5181 embedded in a call statement. */
5182 use_target = true;
5183 else if (TREE_CODE (*to_p) != SSA_NAME
5184 && (!is_gimple_variable (*to_p)
5185 || needs_to_live_in_memory (*to_p)))
5186 /* Don't use the original target if it's already addressable;
5187 if its address escapes, and the called function uses the
5188 NRV optimization, a conforming program could see *to_p
5189 change before the called function returns; see c++/19317.
5190 When optimizing, the return_slot pass marks more functions
5191 as safe after we have escape info. */
5192 use_target = false;
5193 else
5194 use_target = true;
5196 if (use_target)
5198 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5199 mark_addressable (*to_p);
5202 break;
5204 case WITH_SIZE_EXPR:
5205 /* Likewise for calls that return an aggregate of non-constant size,
5206 since we would not be able to generate a temporary at all. */
5207 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5209 *from_p = TREE_OPERAND (*from_p, 0);
5210 /* We don't change ret in this case because the
5211 WITH_SIZE_EXPR might have been added in
5212 gimplify_modify_expr, so returning GS_OK would lead to an
5213 infinite loop. */
5214 changed = true;
5216 break;
5218 /* If we're initializing from a container, push the initialization
5219 inside it. */
5220 case CLEANUP_POINT_EXPR:
5221 case BIND_EXPR:
5222 case STATEMENT_LIST:
5224 tree wrap = *from_p;
5225 tree t;
5227 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5228 fb_lvalue);
5229 if (ret != GS_ERROR)
5230 ret = GS_OK;
5232 t = voidify_wrapper_expr (wrap, *expr_p);
5233 gcc_assert (t == *expr_p);
5235 if (want_value)
5237 gimplify_and_add (wrap, pre_p);
5238 *expr_p = unshare_expr (*to_p);
5240 else
5241 *expr_p = wrap;
5242 return GS_OK;
5245 case COMPOUND_LITERAL_EXPR:
5247 tree complit = TREE_OPERAND (*expr_p, 1);
5248 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5249 tree decl = DECL_EXPR_DECL (decl_s);
5250 tree init = DECL_INITIAL (decl);
5252 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5253 into struct T x = { 0, 1, 2 } if the address of the
5254 compound literal has never been taken. */
5255 if (!TREE_ADDRESSABLE (complit)
5256 && !TREE_ADDRESSABLE (decl)
5257 && init)
5259 *expr_p = copy_node (*expr_p);
5260 TREE_OPERAND (*expr_p, 1) = init;
5261 return GS_OK;
5265 default:
5266 break;
5269 while (changed);
5271 return ret;
5275 /* Return true if T looks like a valid GIMPLE statement. */
5277 static bool
5278 is_gimple_stmt (tree t)
5280 const enum tree_code code = TREE_CODE (t);
5282 switch (code)
5284 case NOP_EXPR:
5285 /* The only valid NOP_EXPR is the empty statement. */
5286 return IS_EMPTY_STMT (t);
5288 case BIND_EXPR:
5289 case COND_EXPR:
5290 /* These are only valid if they're void. */
5291 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5293 case SWITCH_EXPR:
5294 case GOTO_EXPR:
5295 case RETURN_EXPR:
5296 case LABEL_EXPR:
5297 case CASE_LABEL_EXPR:
5298 case TRY_CATCH_EXPR:
5299 case TRY_FINALLY_EXPR:
5300 case EH_FILTER_EXPR:
5301 case CATCH_EXPR:
5302 case ASM_EXPR:
5303 case STATEMENT_LIST:
5304 case OACC_PARALLEL:
5305 case OACC_KERNELS:
5306 case OACC_DATA:
5307 case OACC_HOST_DATA:
5308 case OACC_DECLARE:
5309 case OACC_UPDATE:
5310 case OACC_ENTER_DATA:
5311 case OACC_EXIT_DATA:
5312 case OACC_CACHE:
5313 case OMP_PARALLEL:
5314 case OMP_FOR:
5315 case OMP_SIMD:
5316 case OMP_DISTRIBUTE:
5317 case OACC_LOOP:
5318 case OMP_SECTIONS:
5319 case OMP_SECTION:
5320 case OMP_SINGLE:
5321 case OMP_MASTER:
5322 case OMP_TASKGROUP:
5323 case OMP_ORDERED:
5324 case OMP_CRITICAL:
5325 case OMP_TASK:
5326 case OMP_TARGET:
5327 case OMP_TARGET_DATA:
5328 case OMP_TARGET_UPDATE:
5329 case OMP_TARGET_ENTER_DATA:
5330 case OMP_TARGET_EXIT_DATA:
5331 case OMP_TASKLOOP:
5332 case OMP_TEAMS:
5333 /* These are always void. */
5334 return true;
5336 case CALL_EXPR:
5337 case MODIFY_EXPR:
5338 case PREDICT_EXPR:
5339 /* These are valid regardless of their type. */
5340 return true;
5342 default:
5343 return false;
5348 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5349 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5350 DECL_GIMPLE_REG_P set.
5352 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5353 other, unmodified part of the complex object just before the total store.
5354 As a consequence, if the object is still uninitialized, an undefined value
5355 will be loaded into a register, which may result in a spurious exception
5356 if the register is floating-point and the value happens to be a signaling
5357 NaN for example. Then the fully-fledged complex operations lowering pass
5358 followed by a DCE pass are necessary in order to fix things up. */
5360 static enum gimplify_status
5361 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5362 bool want_value)
5364 enum tree_code code, ocode;
5365 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5367 lhs = TREE_OPERAND (*expr_p, 0);
5368 rhs = TREE_OPERAND (*expr_p, 1);
5369 code = TREE_CODE (lhs);
5370 lhs = TREE_OPERAND (lhs, 0);
5372 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5373 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5374 TREE_NO_WARNING (other) = 1;
5375 other = get_formal_tmp_var (other, pre_p);
5377 realpart = code == REALPART_EXPR ? rhs : other;
5378 imagpart = code == REALPART_EXPR ? other : rhs;
5380 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5381 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5382 else
5383 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5385 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5386 *expr_p = (want_value) ? rhs : NULL_TREE;
5388 return GS_ALL_DONE;
5391 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5393 modify_expr
5394 : varname '=' rhs
5395 | '*' ID '=' rhs
5397 PRE_P points to the list where side effects that must happen before
5398 *EXPR_P should be stored.
5400 POST_P points to the list where side effects that must happen after
5401 *EXPR_P should be stored.
5403 WANT_VALUE is nonzero iff we want to use the value of this expression
5404 in another expression. */
5406 static enum gimplify_status
5407 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5408 bool want_value)
5410 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5411 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5412 enum gimplify_status ret = GS_UNHANDLED;
5413 gimple *assign;
5414 location_t loc = EXPR_LOCATION (*expr_p);
5415 gimple_stmt_iterator gsi;
5417 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5418 || TREE_CODE (*expr_p) == INIT_EXPR);
5420 /* Trying to simplify a clobber using normal logic doesn't work,
5421 so handle it here. */
5422 if (TREE_CLOBBER_P (*from_p))
5424 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5425 if (ret == GS_ERROR)
5426 return ret;
5427 gcc_assert (!want_value
5428 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5429 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5430 *expr_p = NULL;
5431 return GS_ALL_DONE;
5434 /* Insert pointer conversions required by the middle-end that are not
5435 required by the frontend. This fixes middle-end type checking for
5436 for example gcc.dg/redecl-6.c. */
5437 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5439 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5440 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5441 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5444 /* See if any simplifications can be done based on what the RHS is. */
5445 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5446 want_value);
5447 if (ret != GS_UNHANDLED)
5448 return ret;
5450 /* For zero sized types only gimplify the left hand side and right hand
5451 side as statements and throw away the assignment. Do this after
5452 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5453 types properly. */
5454 if (zero_sized_type (TREE_TYPE (*from_p))
5455 && !want_value
5456 /* Don't do this for calls that return addressable types, expand_call
5457 relies on those having a lhs. */
5458 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5459 && TREE_CODE (*from_p) == CALL_EXPR))
5461 gimplify_stmt (from_p, pre_p);
5462 gimplify_stmt (to_p, pre_p);
5463 *expr_p = NULL_TREE;
5464 return GS_ALL_DONE;
5467 /* If the value being copied is of variable width, compute the length
5468 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5469 before gimplifying any of the operands so that we can resolve any
5470 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5471 the size of the expression to be copied, not of the destination, so
5472 that is what we must do here. */
5473 maybe_with_size_expr (from_p);
5475 /* As a special case, we have to temporarily allow for assignments
5476 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5477 a toplevel statement, when gimplifying the GENERIC expression
5478 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5479 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5481 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5482 prevent gimplify_expr from trying to create a new temporary for
5483 foo's LHS, we tell it that it should only gimplify until it
5484 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5485 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5486 and all we need to do here is set 'a' to be its LHS. */
5488 /* Gimplify the RHS first for C++17 and bug 71104. */
5489 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5490 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5491 if (ret == GS_ERROR)
5492 return ret;
5494 /* Then gimplify the LHS. */
5495 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5496 twice we have to make sure to gimplify into non-SSA as otherwise
5497 the abnormal edge added later will make those defs not dominate
5498 their uses.
5499 ??? Technically this applies only to the registers used in the
5500 resulting non-register *TO_P. */
5501 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5502 if (saved_into_ssa
5503 && TREE_CODE (*from_p) == CALL_EXPR
5504 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5505 gimplify_ctxp->into_ssa = false;
5506 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5507 gimplify_ctxp->into_ssa = saved_into_ssa;
5508 if (ret == GS_ERROR)
5509 return ret;
5511 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5512 guess for the predicate was wrong. */
5513 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5514 if (final_pred != initial_pred)
5516 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5517 if (ret == GS_ERROR)
5518 return ret;
5521 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5522 size as argument to the call. */
5523 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5525 tree call = TREE_OPERAND (*from_p, 0);
5526 tree vlasize = TREE_OPERAND (*from_p, 1);
5528 if (TREE_CODE (call) == CALL_EXPR
5529 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5531 int nargs = call_expr_nargs (call);
5532 tree type = TREE_TYPE (call);
5533 tree ap = CALL_EXPR_ARG (call, 0);
5534 tree tag = CALL_EXPR_ARG (call, 1);
5535 tree aptag = CALL_EXPR_ARG (call, 2);
5536 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5537 IFN_VA_ARG, type,
5538 nargs + 1, ap, tag,
5539 aptag, vlasize);
5540 TREE_OPERAND (*from_p, 0) = newcall;
5544 /* Now see if the above changed *from_p to something we handle specially. */
5545 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5546 want_value);
5547 if (ret != GS_UNHANDLED)
5548 return ret;
5550 /* If we've got a variable sized assignment between two lvalues (i.e. does
5551 not involve a call), then we can make things a bit more straightforward
5552 by converting the assignment to memcpy or memset. */
5553 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5555 tree from = TREE_OPERAND (*from_p, 0);
5556 tree size = TREE_OPERAND (*from_p, 1);
5558 if (TREE_CODE (from) == CONSTRUCTOR)
5559 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5561 if (is_gimple_addressable (from))
5563 *from_p = from;
5564 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5565 pre_p);
5569 /* Transform partial stores to non-addressable complex variables into
5570 total stores. This allows us to use real instead of virtual operands
5571 for these variables, which improves optimization. */
5572 if ((TREE_CODE (*to_p) == REALPART_EXPR
5573 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5574 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5575 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5577 /* Try to alleviate the effects of the gimplification creating artificial
5578 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5579 make sure not to create DECL_DEBUG_EXPR links across functions. */
5580 if (!gimplify_ctxp->into_ssa
5581 && VAR_P (*from_p)
5582 && DECL_IGNORED_P (*from_p)
5583 && DECL_P (*to_p)
5584 && !DECL_IGNORED_P (*to_p)
5585 && decl_function_context (*to_p) == current_function_decl
5586 && decl_function_context (*from_p) == current_function_decl)
5588 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5589 DECL_NAME (*from_p)
5590 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5591 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5592 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5595 if (want_value && TREE_THIS_VOLATILE (*to_p))
5596 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5598 if (TREE_CODE (*from_p) == CALL_EXPR)
5600 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5601 instead of a GIMPLE_ASSIGN. */
5602 gcall *call_stmt;
5603 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5605 /* Gimplify internal functions created in the FEs. */
5606 int nargs = call_expr_nargs (*from_p), i;
5607 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5608 auto_vec<tree> vargs (nargs);
5610 for (i = 0; i < nargs; i++)
5612 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5613 EXPR_LOCATION (*from_p));
5614 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5616 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5617 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5618 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5620 else
5622 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5623 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5624 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5625 tree fndecl = get_callee_fndecl (*from_p);
5626 if (fndecl
5627 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5628 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5629 && call_expr_nargs (*from_p) == 3)
5630 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5631 CALL_EXPR_ARG (*from_p, 0),
5632 CALL_EXPR_ARG (*from_p, 1),
5633 CALL_EXPR_ARG (*from_p, 2));
5634 else
5636 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5639 notice_special_calls (call_stmt);
5640 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5641 gimple_call_set_lhs (call_stmt, *to_p);
5642 else if (TREE_CODE (*to_p) == SSA_NAME)
5643 /* The above is somewhat premature, avoid ICEing later for a
5644 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5645 ??? This doesn't make it a default-def. */
5646 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5648 assign = call_stmt;
5650 else
5652 assign = gimple_build_assign (*to_p, *from_p);
5653 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5654 if (COMPARISON_CLASS_P (*from_p))
5655 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5658 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5660 /* We should have got an SSA name from the start. */
5661 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5662 || ! gimple_in_ssa_p (cfun));
5665 gimplify_seq_add_stmt (pre_p, assign);
5666 gsi = gsi_last (*pre_p);
5667 maybe_fold_stmt (&gsi);
5669 if (want_value)
5671 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5672 return GS_OK;
5674 else
5675 *expr_p = NULL;
5677 return GS_ALL_DONE;
5680 /* Gimplify a comparison between two variable-sized objects. Do this
5681 with a call to BUILT_IN_MEMCMP. */
5683 static enum gimplify_status
5684 gimplify_variable_sized_compare (tree *expr_p)
5686 location_t loc = EXPR_LOCATION (*expr_p);
5687 tree op0 = TREE_OPERAND (*expr_p, 0);
5688 tree op1 = TREE_OPERAND (*expr_p, 1);
5689 tree t, arg, dest, src, expr;
5691 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5692 arg = unshare_expr (arg);
5693 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5694 src = build_fold_addr_expr_loc (loc, op1);
5695 dest = build_fold_addr_expr_loc (loc, op0);
5696 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5697 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5699 expr
5700 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5701 SET_EXPR_LOCATION (expr, loc);
5702 *expr_p = expr;
5704 return GS_OK;
5707 /* Gimplify a comparison between two aggregate objects of integral scalar
5708 mode as a comparison between the bitwise equivalent scalar values. */
5710 static enum gimplify_status
5711 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5713 location_t loc = EXPR_LOCATION (*expr_p);
5714 tree op0 = TREE_OPERAND (*expr_p, 0);
5715 tree op1 = TREE_OPERAND (*expr_p, 1);
5717 tree type = TREE_TYPE (op0);
5718 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5720 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5721 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5723 *expr_p
5724 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5726 return GS_OK;
5729 /* Gimplify an expression sequence. This function gimplifies each
5730 expression and rewrites the original expression with the last
5731 expression of the sequence in GIMPLE form.
5733 PRE_P points to the list where the side effects for all the
5734 expressions in the sequence will be emitted.
5736 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5738 static enum gimplify_status
5739 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5741 tree t = *expr_p;
5745 tree *sub_p = &TREE_OPERAND (t, 0);
5747 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5748 gimplify_compound_expr (sub_p, pre_p, false);
5749 else
5750 gimplify_stmt (sub_p, pre_p);
5752 t = TREE_OPERAND (t, 1);
5754 while (TREE_CODE (t) == COMPOUND_EXPR);
5756 *expr_p = t;
5757 if (want_value)
5758 return GS_OK;
5759 else
5761 gimplify_stmt (expr_p, pre_p);
5762 return GS_ALL_DONE;
5766 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5767 gimplify. After gimplification, EXPR_P will point to a new temporary
5768 that holds the original value of the SAVE_EXPR node.
5770 PRE_P points to the list where side effects that must happen before
5771 *EXPR_P should be stored. */
5773 static enum gimplify_status
5774 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5776 enum gimplify_status ret = GS_ALL_DONE;
5777 tree val;
5779 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5780 val = TREE_OPERAND (*expr_p, 0);
5782 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5783 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5785 /* The operand may be a void-valued expression. It is
5786 being executed only for its side-effects. */
5787 if (TREE_TYPE (val) == void_type_node)
5789 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5790 is_gimple_stmt, fb_none);
5791 val = NULL;
5793 else
5794 /* The temporary may not be an SSA name as later abnormal and EH
5795 control flow may invalidate use/def domination. */
5796 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5798 TREE_OPERAND (*expr_p, 0) = val;
5799 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5802 *expr_p = val;
5804 return ret;
5807 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5809 unary_expr
5810 : ...
5811 | '&' varname
5814 PRE_P points to the list where side effects that must happen before
5815 *EXPR_P should be stored.
5817 POST_P points to the list where side effects that must happen after
5818 *EXPR_P should be stored. */
5820 static enum gimplify_status
5821 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5823 tree expr = *expr_p;
5824 tree op0 = TREE_OPERAND (expr, 0);
5825 enum gimplify_status ret;
5826 location_t loc = EXPR_LOCATION (*expr_p);
5828 switch (TREE_CODE (op0))
5830 case INDIRECT_REF:
5831 do_indirect_ref:
5832 /* Check if we are dealing with an expression of the form '&*ptr'.
5833 While the front end folds away '&*ptr' into 'ptr', these
5834 expressions may be generated internally by the compiler (e.g.,
5835 builtins like __builtin_va_end). */
5836 /* Caution: the silent array decomposition semantics we allow for
5837 ADDR_EXPR means we can't always discard the pair. */
5838 /* Gimplification of the ADDR_EXPR operand may drop
5839 cv-qualification conversions, so make sure we add them if
5840 needed. */
5842 tree op00 = TREE_OPERAND (op0, 0);
5843 tree t_expr = TREE_TYPE (expr);
5844 tree t_op00 = TREE_TYPE (op00);
5846 if (!useless_type_conversion_p (t_expr, t_op00))
5847 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5848 *expr_p = op00;
5849 ret = GS_OK;
5851 break;
5853 case VIEW_CONVERT_EXPR:
5854 /* Take the address of our operand and then convert it to the type of
5855 this ADDR_EXPR.
5857 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5858 all clear. The impact of this transformation is even less clear. */
5860 /* If the operand is a useless conversion, look through it. Doing so
5861 guarantees that the ADDR_EXPR and its operand will remain of the
5862 same type. */
5863 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5864 op0 = TREE_OPERAND (op0, 0);
5866 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5867 build_fold_addr_expr_loc (loc,
5868 TREE_OPERAND (op0, 0)));
5869 ret = GS_OK;
5870 break;
5872 case MEM_REF:
5873 if (integer_zerop (TREE_OPERAND (op0, 1)))
5874 goto do_indirect_ref;
5876 /* fall through */
5878 default:
5879 /* If we see a call to a declared builtin or see its address
5880 being taken (we can unify those cases here) then we can mark
5881 the builtin for implicit generation by GCC. */
5882 if (TREE_CODE (op0) == FUNCTION_DECL
5883 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5884 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5885 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5887 /* We use fb_either here because the C frontend sometimes takes
5888 the address of a call that returns a struct; see
5889 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5890 the implied temporary explicit. */
5892 /* Make the operand addressable. */
5893 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5894 is_gimple_addressable, fb_either);
5895 if (ret == GS_ERROR)
5896 break;
5898 /* Then mark it. Beware that it may not be possible to do so directly
5899 if a temporary has been created by the gimplification. */
5900 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5902 op0 = TREE_OPERAND (expr, 0);
5904 /* For various reasons, the gimplification of the expression
5905 may have made a new INDIRECT_REF. */
5906 if (TREE_CODE (op0) == INDIRECT_REF)
5907 goto do_indirect_ref;
5909 mark_addressable (TREE_OPERAND (expr, 0));
5911 /* The FEs may end up building ADDR_EXPRs early on a decl with
5912 an incomplete type. Re-build ADDR_EXPRs in canonical form
5913 here. */
5914 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5915 *expr_p = build_fold_addr_expr (op0);
5917 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5918 recompute_tree_invariant_for_addr_expr (*expr_p);
5920 /* If we re-built the ADDR_EXPR add a conversion to the original type
5921 if required. */
5922 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5923 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5925 break;
5928 return ret;
5931 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5932 value; output operands should be a gimple lvalue. */
5934 static enum gimplify_status
5935 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5937 tree expr;
5938 int noutputs;
5939 const char **oconstraints;
5940 int i;
5941 tree link;
5942 const char *constraint;
5943 bool allows_mem, allows_reg, is_inout;
5944 enum gimplify_status ret, tret;
5945 gasm *stmt;
5946 vec<tree, va_gc> *inputs;
5947 vec<tree, va_gc> *outputs;
5948 vec<tree, va_gc> *clobbers;
5949 vec<tree, va_gc> *labels;
5950 tree link_next;
5952 expr = *expr_p;
5953 noutputs = list_length (ASM_OUTPUTS (expr));
5954 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5956 inputs = NULL;
5957 outputs = NULL;
5958 clobbers = NULL;
5959 labels = NULL;
5961 ret = GS_ALL_DONE;
5962 link_next = NULL_TREE;
5963 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5965 bool ok;
5966 size_t constraint_len;
5968 link_next = TREE_CHAIN (link);
5970 oconstraints[i]
5971 = constraint
5972 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5973 constraint_len = strlen (constraint);
5974 if (constraint_len == 0)
5975 continue;
5977 ok = parse_output_constraint (&constraint, i, 0, 0,
5978 &allows_mem, &allows_reg, &is_inout);
5979 if (!ok)
5981 ret = GS_ERROR;
5982 is_inout = false;
5985 if (!allows_reg && allows_mem)
5986 mark_addressable (TREE_VALUE (link));
5988 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5989 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5990 fb_lvalue | fb_mayfail);
5991 if (tret == GS_ERROR)
5993 error ("invalid lvalue in asm output %d", i);
5994 ret = tret;
5997 /* If the constraint does not allow memory make sure we gimplify
5998 it to a register if it is not already but its base is. This
5999 happens for complex and vector components. */
6000 if (!allows_mem)
6002 tree op = TREE_VALUE (link);
6003 if (! is_gimple_val (op)
6004 && is_gimple_reg_type (TREE_TYPE (op))
6005 && is_gimple_reg (get_base_address (op)))
6007 tree tem = create_tmp_reg (TREE_TYPE (op));
6008 tree ass;
6009 if (is_inout)
6011 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6012 tem, unshare_expr (op));
6013 gimplify_and_add (ass, pre_p);
6015 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6016 gimplify_and_add (ass, post_p);
6018 TREE_VALUE (link) = tem;
6019 tret = GS_OK;
6023 vec_safe_push (outputs, link);
6024 TREE_CHAIN (link) = NULL_TREE;
6026 if (is_inout)
6028 /* An input/output operand. To give the optimizers more
6029 flexibility, split it into separate input and output
6030 operands. */
6031 tree input;
6032 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6033 char buf[11];
6035 /* Turn the in/out constraint into an output constraint. */
6036 char *p = xstrdup (constraint);
6037 p[0] = '=';
6038 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6040 /* And add a matching input constraint. */
6041 if (allows_reg)
6043 sprintf (buf, "%u", i);
6045 /* If there are multiple alternatives in the constraint,
6046 handle each of them individually. Those that allow register
6047 will be replaced with operand number, the others will stay
6048 unchanged. */
6049 if (strchr (p, ',') != NULL)
6051 size_t len = 0, buflen = strlen (buf);
6052 char *beg, *end, *str, *dst;
6054 for (beg = p + 1;;)
6056 end = strchr (beg, ',');
6057 if (end == NULL)
6058 end = strchr (beg, '\0');
6059 if ((size_t) (end - beg) < buflen)
6060 len += buflen + 1;
6061 else
6062 len += end - beg + 1;
6063 if (*end)
6064 beg = end + 1;
6065 else
6066 break;
6069 str = (char *) alloca (len);
6070 for (beg = p + 1, dst = str;;)
6072 const char *tem;
6073 bool mem_p, reg_p, inout_p;
6075 end = strchr (beg, ',');
6076 if (end)
6077 *end = '\0';
6078 beg[-1] = '=';
6079 tem = beg - 1;
6080 parse_output_constraint (&tem, i, 0, 0,
6081 &mem_p, &reg_p, &inout_p);
6082 if (dst != str)
6083 *dst++ = ',';
6084 if (reg_p)
6086 memcpy (dst, buf, buflen);
6087 dst += buflen;
6089 else
6091 if (end)
6092 len = end - beg;
6093 else
6094 len = strlen (beg);
6095 memcpy (dst, beg, len);
6096 dst += len;
6098 if (end)
6099 beg = end + 1;
6100 else
6101 break;
6103 *dst = '\0';
6104 input = build_string (dst - str, str);
6106 else
6107 input = build_string (strlen (buf), buf);
6109 else
6110 input = build_string (constraint_len - 1, constraint + 1);
6112 free (p);
6114 input = build_tree_list (build_tree_list (NULL_TREE, input),
6115 unshare_expr (TREE_VALUE (link)));
6116 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6120 link_next = NULL_TREE;
6121 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6123 link_next = TREE_CHAIN (link);
6124 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6125 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6126 oconstraints, &allows_mem, &allows_reg);
6128 /* If we can't make copies, we can only accept memory. */
6129 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6131 if (allows_mem)
6132 allows_reg = 0;
6133 else
6135 error ("impossible constraint in %<asm%>");
6136 error ("non-memory input %d must stay in memory", i);
6137 return GS_ERROR;
6141 /* If the operand is a memory input, it should be an lvalue. */
6142 if (!allows_reg && allows_mem)
6144 tree inputv = TREE_VALUE (link);
6145 STRIP_NOPS (inputv);
6146 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6147 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6148 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6149 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6150 || TREE_CODE (inputv) == MODIFY_EXPR)
6151 TREE_VALUE (link) = error_mark_node;
6152 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6153 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6154 if (tret != GS_ERROR)
6156 /* Unlike output operands, memory inputs are not guaranteed
6157 to be lvalues by the FE, and while the expressions are
6158 marked addressable there, if it is e.g. a statement
6159 expression, temporaries in it might not end up being
6160 addressable. They might be already used in the IL and thus
6161 it is too late to make them addressable now though. */
6162 tree x = TREE_VALUE (link);
6163 while (handled_component_p (x))
6164 x = TREE_OPERAND (x, 0);
6165 if (TREE_CODE (x) == MEM_REF
6166 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6167 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6168 if ((VAR_P (x)
6169 || TREE_CODE (x) == PARM_DECL
6170 || TREE_CODE (x) == RESULT_DECL)
6171 && !TREE_ADDRESSABLE (x)
6172 && is_gimple_reg (x))
6174 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6175 input_location), 0,
6176 "memory input %d is not directly addressable",
6178 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6181 mark_addressable (TREE_VALUE (link));
6182 if (tret == GS_ERROR)
6184 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6185 "memory input %d is not directly addressable", i);
6186 ret = tret;
6189 else
6191 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6192 is_gimple_asm_val, fb_rvalue);
6193 if (tret == GS_ERROR)
6194 ret = tret;
6197 TREE_CHAIN (link) = NULL_TREE;
6198 vec_safe_push (inputs, link);
6201 link_next = NULL_TREE;
6202 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6204 link_next = TREE_CHAIN (link);
6205 TREE_CHAIN (link) = NULL_TREE;
6206 vec_safe_push (clobbers, link);
6209 link_next = NULL_TREE;
6210 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6212 link_next = TREE_CHAIN (link);
6213 TREE_CHAIN (link) = NULL_TREE;
6214 vec_safe_push (labels, link);
6217 /* Do not add ASMs with errors to the gimple IL stream. */
6218 if (ret != GS_ERROR)
6220 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6221 inputs, outputs, clobbers, labels);
6223 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6224 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6226 gimplify_seq_add_stmt (pre_p, stmt);
6229 return ret;
6232 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6233 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6234 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6235 return to this function.
6237 FIXME should we complexify the prequeue handling instead? Or use flags
6238 for all the cleanups and let the optimizer tighten them up? The current
6239 code seems pretty fragile; it will break on a cleanup within any
6240 non-conditional nesting. But any such nesting would be broken, anyway;
6241 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6242 and continues out of it. We can do that at the RTL level, though, so
6243 having an optimizer to tighten up try/finally regions would be a Good
6244 Thing. */
6246 static enum gimplify_status
6247 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6249 gimple_stmt_iterator iter;
6250 gimple_seq body_sequence = NULL;
6252 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6254 /* We only care about the number of conditions between the innermost
6255 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6256 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6257 int old_conds = gimplify_ctxp->conditions;
6258 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6259 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6260 gimplify_ctxp->conditions = 0;
6261 gimplify_ctxp->conditional_cleanups = NULL;
6262 gimplify_ctxp->in_cleanup_point_expr = true;
6264 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6266 gimplify_ctxp->conditions = old_conds;
6267 gimplify_ctxp->conditional_cleanups = old_cleanups;
6268 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6270 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6272 gimple *wce = gsi_stmt (iter);
6274 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6276 if (gsi_one_before_end_p (iter))
6278 /* Note that gsi_insert_seq_before and gsi_remove do not
6279 scan operands, unlike some other sequence mutators. */
6280 if (!gimple_wce_cleanup_eh_only (wce))
6281 gsi_insert_seq_before_without_update (&iter,
6282 gimple_wce_cleanup (wce),
6283 GSI_SAME_STMT);
6284 gsi_remove (&iter, true);
6285 break;
6287 else
6289 gtry *gtry;
6290 gimple_seq seq;
6291 enum gimple_try_flags kind;
6293 if (gimple_wce_cleanup_eh_only (wce))
6294 kind = GIMPLE_TRY_CATCH;
6295 else
6296 kind = GIMPLE_TRY_FINALLY;
6297 seq = gsi_split_seq_after (iter);
6299 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6300 /* Do not use gsi_replace here, as it may scan operands.
6301 We want to do a simple structural modification only. */
6302 gsi_set_stmt (&iter, gtry);
6303 iter = gsi_start (gtry->eval);
6306 else
6307 gsi_next (&iter);
6310 gimplify_seq_add_seq (pre_p, body_sequence);
6311 if (temp)
6313 *expr_p = temp;
6314 return GS_OK;
6316 else
6318 *expr_p = NULL;
6319 return GS_ALL_DONE;
6323 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6324 is the cleanup action required. EH_ONLY is true if the cleanup should
6325 only be executed if an exception is thrown, not on normal exit.
6326 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6327 only valid for clobbers. */
6329 static void
6330 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6331 bool force_uncond = false)
6333 gimple *wce;
6334 gimple_seq cleanup_stmts = NULL;
6336 /* Errors can result in improperly nested cleanups. Which results in
6337 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6338 if (seen_error ())
6339 return;
6341 if (gimple_conditional_context ())
6343 /* If we're in a conditional context, this is more complex. We only
6344 want to run the cleanup if we actually ran the initialization that
6345 necessitates it, but we want to run it after the end of the
6346 conditional context. So we wrap the try/finally around the
6347 condition and use a flag to determine whether or not to actually
6348 run the destructor. Thus
6350 test ? f(A()) : 0
6352 becomes (approximately)
6354 flag = 0;
6355 try {
6356 if (test) { A::A(temp); flag = 1; val = f(temp); }
6357 else { val = 0; }
6358 } finally {
6359 if (flag) A::~A(temp);
6363 if (force_uncond)
6365 gimplify_stmt (&cleanup, &cleanup_stmts);
6366 wce = gimple_build_wce (cleanup_stmts);
6367 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6369 else
6371 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6372 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6373 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6375 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6376 gimplify_stmt (&cleanup, &cleanup_stmts);
6377 wce = gimple_build_wce (cleanup_stmts);
6379 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6380 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6381 gimplify_seq_add_stmt (pre_p, ftrue);
6383 /* Because of this manipulation, and the EH edges that jump
6384 threading cannot redirect, the temporary (VAR) will appear
6385 to be used uninitialized. Don't warn. */
6386 TREE_NO_WARNING (var) = 1;
6389 else
6391 gimplify_stmt (&cleanup, &cleanup_stmts);
6392 wce = gimple_build_wce (cleanup_stmts);
6393 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6394 gimplify_seq_add_stmt (pre_p, wce);
6398 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6400 static enum gimplify_status
6401 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6403 tree targ = *expr_p;
6404 tree temp = TARGET_EXPR_SLOT (targ);
6405 tree init = TARGET_EXPR_INITIAL (targ);
6406 enum gimplify_status ret;
6408 bool unpoison_empty_seq = false;
6409 gimple_stmt_iterator unpoison_it;
6411 if (init)
6413 tree cleanup = NULL_TREE;
6415 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6416 to the temps list. Handle also variable length TARGET_EXPRs. */
6417 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6419 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6420 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6421 gimplify_vla_decl (temp, pre_p);
6423 else
6425 /* Save location where we need to place unpoisoning. It's possible
6426 that a variable will be converted to needs_to_live_in_memory. */
6427 unpoison_it = gsi_last (*pre_p);
6428 unpoison_empty_seq = gsi_end_p (unpoison_it);
6430 gimple_add_tmp_var (temp);
6433 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6434 expression is supposed to initialize the slot. */
6435 if (VOID_TYPE_P (TREE_TYPE (init)))
6436 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6437 else
6439 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6440 init = init_expr;
6441 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6442 init = NULL;
6443 ggc_free (init_expr);
6445 if (ret == GS_ERROR)
6447 /* PR c++/28266 Make sure this is expanded only once. */
6448 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6449 return GS_ERROR;
6451 if (init)
6452 gimplify_and_add (init, pre_p);
6454 /* If needed, push the cleanup for the temp. */
6455 if (TARGET_EXPR_CLEANUP (targ))
6457 if (CLEANUP_EH_ONLY (targ))
6458 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6459 CLEANUP_EH_ONLY (targ), pre_p);
6460 else
6461 cleanup = TARGET_EXPR_CLEANUP (targ);
6464 /* Add a clobber for the temporary going out of scope, like
6465 gimplify_bind_expr. */
6466 if (gimplify_ctxp->in_cleanup_point_expr
6467 && needs_to_live_in_memory (temp))
6469 if (flag_stack_reuse == SR_ALL)
6471 tree clobber = build_constructor (TREE_TYPE (temp),
6472 NULL);
6473 TREE_THIS_VOLATILE (clobber) = true;
6474 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6475 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6477 if (asan_poisoned_variables
6478 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6479 && dbg_cnt (asan_use_after_scope))
6481 tree asan_cleanup = build_asan_poison_call_expr (temp);
6482 if (asan_cleanup)
6484 if (unpoison_empty_seq)
6485 unpoison_it = gsi_start (*pre_p);
6487 asan_poison_variable (temp, false, &unpoison_it,
6488 unpoison_empty_seq);
6489 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6493 if (cleanup)
6494 gimple_push_cleanup (temp, cleanup, false, pre_p);
6496 /* Only expand this once. */
6497 TREE_OPERAND (targ, 3) = init;
6498 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6500 else
6501 /* We should have expanded this before. */
6502 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6504 *expr_p = temp;
6505 return GS_OK;
6508 /* Gimplification of expression trees. */
6510 /* Gimplify an expression which appears at statement context. The
6511 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6512 NULL, a new sequence is allocated.
6514 Return true if we actually added a statement to the queue. */
6516 bool
6517 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6519 gimple_seq_node last;
6521 last = gimple_seq_last (*seq_p);
6522 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6523 return last != gimple_seq_last (*seq_p);
6526 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6527 to CTX. If entries already exist, force them to be some flavor of private.
6528 If there is no enclosing parallel, do nothing. */
6530 void
6531 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6533 splay_tree_node n;
6535 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6536 return;
6540 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6541 if (n != NULL)
6543 if (n->value & GOVD_SHARED)
6544 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6545 else if (n->value & GOVD_MAP)
6546 n->value |= GOVD_MAP_TO_ONLY;
6547 else
6548 return;
6550 else if ((ctx->region_type & ORT_TARGET) != 0)
6552 if (ctx->target_map_scalars_firstprivate)
6553 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6554 else
6555 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6557 else if (ctx->region_type != ORT_WORKSHARE
6558 && ctx->region_type != ORT_SIMD
6559 && ctx->region_type != ORT_ACC
6560 && !(ctx->region_type & ORT_TARGET_DATA))
6561 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6563 ctx = ctx->outer_context;
6565 while (ctx);
6568 /* Similarly for each of the type sizes of TYPE. */
6570 static void
6571 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6573 if (type == NULL || type == error_mark_node)
6574 return;
6575 type = TYPE_MAIN_VARIANT (type);
6577 if (ctx->privatized_types->add (type))
6578 return;
6580 switch (TREE_CODE (type))
6582 case INTEGER_TYPE:
6583 case ENUMERAL_TYPE:
6584 case BOOLEAN_TYPE:
6585 case REAL_TYPE:
6586 case FIXED_POINT_TYPE:
6587 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6588 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6589 break;
6591 case ARRAY_TYPE:
6592 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6593 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6594 break;
6596 case RECORD_TYPE:
6597 case UNION_TYPE:
6598 case QUAL_UNION_TYPE:
6600 tree field;
6601 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6602 if (TREE_CODE (field) == FIELD_DECL)
6604 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6605 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6608 break;
6610 case POINTER_TYPE:
6611 case REFERENCE_TYPE:
6612 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6613 break;
6615 default:
6616 break;
6619 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6620 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6621 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6624 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6626 static void
6627 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6629 splay_tree_node n;
6630 unsigned int nflags;
6631 tree t;
6633 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6634 return;
6636 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6637 there are constructors involved somewhere. Exception is a shared clause,
6638 there is nothing privatized in that case. */
6639 if ((flags & GOVD_SHARED) == 0
6640 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6641 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6642 flags |= GOVD_SEEN;
6644 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6645 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6647 /* We shouldn't be re-adding the decl with the same data
6648 sharing class. */
6649 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6650 nflags = n->value | flags;
6651 /* The only combination of data sharing classes we should see is
6652 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6653 reduction variables to be used in data sharing clauses. */
6654 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6655 || ((nflags & GOVD_DATA_SHARE_CLASS)
6656 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6657 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6658 n->value = nflags;
6659 return;
6662 /* When adding a variable-sized variable, we have to handle all sorts
6663 of additional bits of data: the pointer replacement variable, and
6664 the parameters of the type. */
6665 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6667 /* Add the pointer replacement variable as PRIVATE if the variable
6668 replacement is private, else FIRSTPRIVATE since we'll need the
6669 address of the original variable either for SHARED, or for the
6670 copy into or out of the context. */
6671 if (!(flags & GOVD_LOCAL))
6673 if (flags & GOVD_MAP)
6674 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6675 else if (flags & GOVD_PRIVATE)
6676 nflags = GOVD_PRIVATE;
6677 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6678 && (flags & GOVD_FIRSTPRIVATE))
6679 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6680 else
6681 nflags = GOVD_FIRSTPRIVATE;
6682 nflags |= flags & GOVD_SEEN;
6683 t = DECL_VALUE_EXPR (decl);
6684 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6685 t = TREE_OPERAND (t, 0);
6686 gcc_assert (DECL_P (t));
6687 omp_add_variable (ctx, t, nflags);
6690 /* Add all of the variable and type parameters (which should have
6691 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6692 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6693 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6694 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6696 /* The variable-sized variable itself is never SHARED, only some form
6697 of PRIVATE. The sharing would take place via the pointer variable
6698 which we remapped above. */
6699 if (flags & GOVD_SHARED)
6700 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6701 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6703 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6704 alloca statement we generate for the variable, so make sure it
6705 is available. This isn't automatically needed for the SHARED
6706 case, since we won't be allocating local storage then.
6707 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6708 in this case omp_notice_variable will be called later
6709 on when it is gimplified. */
6710 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6711 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6712 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6714 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6715 && lang_hooks.decls.omp_privatize_by_reference (decl))
6717 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6719 /* Similar to the direct variable sized case above, we'll need the
6720 size of references being privatized. */
6721 if ((flags & GOVD_SHARED) == 0)
6723 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6724 if (DECL_P (t))
6725 omp_notice_variable (ctx, t, true);
6729 if (n != NULL)
6730 n->value |= flags;
6731 else
6732 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6734 /* For reductions clauses in OpenACC loop directives, by default create a
6735 copy clause on the enclosing parallel construct for carrying back the
6736 results. */
6737 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6739 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6740 while (outer_ctx)
6742 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6743 if (n != NULL)
6745 /* Ignore local variables and explicitly declared clauses. */
6746 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6747 break;
6748 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6750 /* According to the OpenACC spec, such a reduction variable
6751 should already have a copy map on a kernels construct,
6752 verify that here. */
6753 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6754 && (n->value & GOVD_MAP));
6756 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6758 /* Remove firstprivate and make it a copy map. */
6759 n->value &= ~GOVD_FIRSTPRIVATE;
6760 n->value |= GOVD_MAP;
6763 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6765 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6766 GOVD_MAP | GOVD_SEEN);
6767 break;
6769 outer_ctx = outer_ctx->outer_context;
6774 /* Notice a threadprivate variable DECL used in OMP context CTX.
6775 This just prints out diagnostics about threadprivate variable uses
6776 in untied tasks. If DECL2 is non-NULL, prevent this warning
6777 on that variable. */
6779 static bool
6780 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6781 tree decl2)
6783 splay_tree_node n;
6784 struct gimplify_omp_ctx *octx;
6786 for (octx = ctx; octx; octx = octx->outer_context)
6787 if ((octx->region_type & ORT_TARGET) != 0)
6789 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6790 if (n == NULL)
6792 error ("threadprivate variable %qE used in target region",
6793 DECL_NAME (decl));
6794 error_at (octx->location, "enclosing target region");
6795 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6797 if (decl2)
6798 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6801 if (ctx->region_type != ORT_UNTIED_TASK)
6802 return false;
6803 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6804 if (n == NULL)
6806 error ("threadprivate variable %qE used in untied task",
6807 DECL_NAME (decl));
6808 error_at (ctx->location, "enclosing task");
6809 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6811 if (decl2)
6812 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6813 return false;
6816 /* Return true if global var DECL is device resident. */
6818 static bool
6819 device_resident_p (tree decl)
6821 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6823 if (!attr)
6824 return false;
6826 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6828 tree c = TREE_VALUE (t);
6829 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6830 return true;
6833 return false;
6836 /* Return true if DECL has an ACC DECLARE attribute. */
6838 static bool
6839 is_oacc_declared (tree decl)
6841 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6842 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6843 return declared != NULL_TREE;
6846 /* Determine outer default flags for DECL mentioned in an OMP region
6847 but not declared in an enclosing clause.
6849 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6850 remapped firstprivate instead of shared. To some extent this is
6851 addressed in omp_firstprivatize_type_sizes, but not
6852 effectively. */
6854 static unsigned
6855 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6856 bool in_code, unsigned flags)
6858 enum omp_clause_default_kind default_kind = ctx->default_kind;
6859 enum omp_clause_default_kind kind;
6861 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6862 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6863 default_kind = kind;
6865 switch (default_kind)
6867 case OMP_CLAUSE_DEFAULT_NONE:
6869 const char *rtype;
6871 if (ctx->region_type & ORT_PARALLEL)
6872 rtype = "parallel";
6873 else if (ctx->region_type & ORT_TASK)
6874 rtype = "task";
6875 else if (ctx->region_type & ORT_TEAMS)
6876 rtype = "teams";
6877 else
6878 gcc_unreachable ();
6880 error ("%qE not specified in enclosing %qs",
6881 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6882 error_at (ctx->location, "enclosing %qs", rtype);
6884 /* FALLTHRU */
6885 case OMP_CLAUSE_DEFAULT_SHARED:
6886 flags |= GOVD_SHARED;
6887 break;
6888 case OMP_CLAUSE_DEFAULT_PRIVATE:
6889 flags |= GOVD_PRIVATE;
6890 break;
6891 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6892 flags |= GOVD_FIRSTPRIVATE;
6893 break;
6894 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6895 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6896 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6897 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6899 omp_notice_variable (octx, decl, in_code);
6900 for (; octx; octx = octx->outer_context)
6902 splay_tree_node n2;
6904 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6905 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6906 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6907 continue;
6908 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6910 flags |= GOVD_FIRSTPRIVATE;
6911 goto found_outer;
6913 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6915 flags |= GOVD_SHARED;
6916 goto found_outer;
6921 if (TREE_CODE (decl) == PARM_DECL
6922 || (!is_global_var (decl)
6923 && DECL_CONTEXT (decl) == current_function_decl))
6924 flags |= GOVD_FIRSTPRIVATE;
6925 else
6926 flags |= GOVD_SHARED;
6927 found_outer:
6928 break;
6930 default:
6931 gcc_unreachable ();
6934 return flags;
6938 /* Determine outer default flags for DECL mentioned in an OACC region
6939 but not declared in an enclosing clause. */
6941 static unsigned
6942 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6944 const char *rkind;
6945 bool on_device = false;
6946 bool declared = is_oacc_declared (decl);
6947 tree type = TREE_TYPE (decl);
6949 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6950 type = TREE_TYPE (type);
6952 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6953 && is_global_var (decl)
6954 && device_resident_p (decl))
6956 on_device = true;
6957 flags |= GOVD_MAP_TO_ONLY;
6960 switch (ctx->region_type)
6962 case ORT_ACC_KERNELS:
6963 rkind = "kernels";
6965 if (AGGREGATE_TYPE_P (type))
6967 /* Aggregates default to 'present_or_copy', or 'present'. */
6968 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6969 flags |= GOVD_MAP;
6970 else
6971 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
6973 else
6974 /* Scalars default to 'copy'. */
6975 flags |= GOVD_MAP | GOVD_MAP_FORCE;
6977 break;
6979 case ORT_ACC_PARALLEL:
6980 rkind = "parallel";
6982 if (on_device || declared)
6983 flags |= GOVD_MAP;
6984 else if (AGGREGATE_TYPE_P (type))
6986 /* Aggregates default to 'present_or_copy', or 'present'. */
6987 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6988 flags |= GOVD_MAP;
6989 else
6990 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
6992 else
6993 /* Scalars default to 'firstprivate'. */
6994 flags |= GOVD_FIRSTPRIVATE;
6996 break;
6998 default:
6999 gcc_unreachable ();
7002 if (DECL_ARTIFICIAL (decl))
7003 ; /* We can get compiler-generated decls, and should not complain
7004 about them. */
7005 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7007 error ("%qE not specified in enclosing OpenACC %qs construct",
7008 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7009 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7011 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7012 ; /* Handled above. */
7013 else
7014 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7016 return flags;
7019 /* Record the fact that DECL was used within the OMP context CTX.
7020 IN_CODE is true when real code uses DECL, and false when we should
7021 merely emit default(none) errors. Return true if DECL is going to
7022 be remapped and thus DECL shouldn't be gimplified into its
7023 DECL_VALUE_EXPR (if any). */
7025 static bool
7026 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7028 splay_tree_node n;
7029 unsigned flags = in_code ? GOVD_SEEN : 0;
7030 bool ret = false, shared;
7032 if (error_operand_p (decl))
7033 return false;
7035 if (ctx->region_type == ORT_NONE)
7036 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7038 if (is_global_var (decl))
7040 /* Threadprivate variables are predetermined. */
7041 if (DECL_THREAD_LOCAL_P (decl))
7042 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7044 if (DECL_HAS_VALUE_EXPR_P (decl))
7046 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7048 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7049 return omp_notice_threadprivate_variable (ctx, decl, value);
7052 if (gimplify_omp_ctxp->outer_context == NULL
7053 && VAR_P (decl)
7054 && oacc_get_fn_attrib (current_function_decl))
7056 location_t loc = DECL_SOURCE_LOCATION (decl);
7058 if (lookup_attribute ("omp declare target link",
7059 DECL_ATTRIBUTES (decl)))
7061 error_at (loc,
7062 "%qE with %<link%> clause used in %<routine%> function",
7063 DECL_NAME (decl));
7064 return false;
7066 else if (!lookup_attribute ("omp declare target",
7067 DECL_ATTRIBUTES (decl)))
7069 error_at (loc,
7070 "%qE requires a %<declare%> directive for use "
7071 "in a %<routine%> function", DECL_NAME (decl));
7072 return false;
7077 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7078 if ((ctx->region_type & ORT_TARGET) != 0)
7080 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7081 if (n == NULL)
7083 unsigned nflags = flags;
7084 if (ctx->target_map_pointers_as_0len_arrays
7085 || ctx->target_map_scalars_firstprivate)
7087 bool is_declare_target = false;
7088 bool is_scalar = false;
7089 if (is_global_var (decl)
7090 && varpool_node::get_create (decl)->offloadable)
7092 struct gimplify_omp_ctx *octx;
7093 for (octx = ctx->outer_context;
7094 octx; octx = octx->outer_context)
7096 n = splay_tree_lookup (octx->variables,
7097 (splay_tree_key)decl);
7098 if (n
7099 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7100 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7101 break;
7103 is_declare_target = octx == NULL;
7105 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7106 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7107 if (is_declare_target)
7109 else if (ctx->target_map_pointers_as_0len_arrays
7110 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7111 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7112 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7113 == POINTER_TYPE)))
7114 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7115 else if (is_scalar)
7116 nflags |= GOVD_FIRSTPRIVATE;
7119 struct gimplify_omp_ctx *octx = ctx->outer_context;
7120 if ((ctx->region_type & ORT_ACC) && octx)
7122 /* Look in outer OpenACC contexts, to see if there's a
7123 data attribute for this variable. */
7124 omp_notice_variable (octx, decl, in_code);
7126 for (; octx; octx = octx->outer_context)
7128 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7129 break;
7130 splay_tree_node n2
7131 = splay_tree_lookup (octx->variables,
7132 (splay_tree_key) decl);
7133 if (n2)
7135 if (octx->region_type == ORT_ACC_HOST_DATA)
7136 error ("variable %qE declared in enclosing "
7137 "%<host_data%> region", DECL_NAME (decl));
7138 nflags |= GOVD_MAP;
7139 if (octx->region_type == ORT_ACC_DATA
7140 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7141 nflags |= GOVD_MAP_0LEN_ARRAY;
7142 goto found_outer;
7148 tree type = TREE_TYPE (decl);
7150 if (nflags == flags
7151 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7152 && lang_hooks.decls.omp_privatize_by_reference (decl))
7153 type = TREE_TYPE (type);
7154 if (nflags == flags
7155 && !lang_hooks.types.omp_mappable_type (type))
7157 error ("%qD referenced in target region does not have "
7158 "a mappable type", decl);
7159 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7161 else if (nflags == flags)
7163 if ((ctx->region_type & ORT_ACC) != 0)
7164 nflags = oacc_default_clause (ctx, decl, flags);
7165 else
7166 nflags |= GOVD_MAP;
7169 found_outer:
7170 omp_add_variable (ctx, decl, nflags);
7172 else
7174 /* If nothing changed, there's nothing left to do. */
7175 if ((n->value & flags) == flags)
7176 return ret;
7177 flags |= n->value;
7178 n->value = flags;
7180 goto do_outer;
7183 if (n == NULL)
7185 if (ctx->region_type == ORT_WORKSHARE
7186 || ctx->region_type == ORT_SIMD
7187 || ctx->region_type == ORT_ACC
7188 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7189 goto do_outer;
7191 flags = omp_default_clause (ctx, decl, in_code, flags);
7193 if ((flags & GOVD_PRIVATE)
7194 && lang_hooks.decls.omp_private_outer_ref (decl))
7195 flags |= GOVD_PRIVATE_OUTER_REF;
7197 omp_add_variable (ctx, decl, flags);
7199 shared = (flags & GOVD_SHARED) != 0;
7200 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7201 goto do_outer;
7204 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7205 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7206 && DECL_SIZE (decl))
7208 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7210 splay_tree_node n2;
7211 tree t = DECL_VALUE_EXPR (decl);
7212 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7213 t = TREE_OPERAND (t, 0);
7214 gcc_assert (DECL_P (t));
7215 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7216 n2->value |= GOVD_SEEN;
7218 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7219 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7220 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7221 != INTEGER_CST))
7223 splay_tree_node n2;
7224 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7225 gcc_assert (DECL_P (t));
7226 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7227 if (n2)
7228 omp_notice_variable (ctx, t, true);
7232 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7233 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7235 /* If nothing changed, there's nothing left to do. */
7236 if ((n->value & flags) == flags)
7237 return ret;
7238 flags |= n->value;
7239 n->value = flags;
7241 do_outer:
7242 /* If the variable is private in the current context, then we don't
7243 need to propagate anything to an outer context. */
7244 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7245 return ret;
7246 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7247 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7248 return ret;
7249 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7250 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7251 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7252 return ret;
7253 if (ctx->outer_context
7254 && omp_notice_variable (ctx->outer_context, decl, in_code))
7255 return true;
7256 return ret;
7259 /* Verify that DECL is private within CTX. If there's specific information
7260 to the contrary in the innermost scope, generate an error. */
7262 static bool
7263 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7265 splay_tree_node n;
7267 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7268 if (n != NULL)
7270 if (n->value & GOVD_SHARED)
7272 if (ctx == gimplify_omp_ctxp)
7274 if (simd)
7275 error ("iteration variable %qE is predetermined linear",
7276 DECL_NAME (decl));
7277 else
7278 error ("iteration variable %qE should be private",
7279 DECL_NAME (decl));
7280 n->value = GOVD_PRIVATE;
7281 return true;
7283 else
7284 return false;
7286 else if ((n->value & GOVD_EXPLICIT) != 0
7287 && (ctx == gimplify_omp_ctxp
7288 || (ctx->region_type == ORT_COMBINED_PARALLEL
7289 && gimplify_omp_ctxp->outer_context == ctx)))
7291 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7292 error ("iteration variable %qE should not be firstprivate",
7293 DECL_NAME (decl));
7294 else if ((n->value & GOVD_REDUCTION) != 0)
7295 error ("iteration variable %qE should not be reduction",
7296 DECL_NAME (decl));
7297 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7298 error ("iteration variable %qE should not be linear",
7299 DECL_NAME (decl));
7300 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7301 error ("iteration variable %qE should not be lastprivate",
7302 DECL_NAME (decl));
7303 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7304 error ("iteration variable %qE should not be private",
7305 DECL_NAME (decl));
7306 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7307 error ("iteration variable %qE is predetermined linear",
7308 DECL_NAME (decl));
7310 return (ctx == gimplify_omp_ctxp
7311 || (ctx->region_type == ORT_COMBINED_PARALLEL
7312 && gimplify_omp_ctxp->outer_context == ctx));
7315 if (ctx->region_type != ORT_WORKSHARE
7316 && ctx->region_type != ORT_SIMD
7317 && ctx->region_type != ORT_ACC)
7318 return false;
7319 else if (ctx->outer_context)
7320 return omp_is_private (ctx->outer_context, decl, simd);
7321 return false;
7324 /* Return true if DECL is private within a parallel region
7325 that binds to the current construct's context or in parallel
7326 region's REDUCTION clause. */
7328 static bool
7329 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7331 splay_tree_node n;
7335 ctx = ctx->outer_context;
7336 if (ctx == NULL)
7338 if (is_global_var (decl))
7339 return false;
7341 /* References might be private, but might be shared too,
7342 when checking for copyprivate, assume they might be
7343 private, otherwise assume they might be shared. */
7344 if (copyprivate)
7345 return true;
7347 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7348 return false;
7350 /* Treat C++ privatized non-static data members outside
7351 of the privatization the same. */
7352 if (omp_member_access_dummy_var (decl))
7353 return false;
7355 return true;
7358 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7360 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7361 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7362 continue;
7364 if (n != NULL)
7366 if ((n->value & GOVD_LOCAL) != 0
7367 && omp_member_access_dummy_var (decl))
7368 return false;
7369 return (n->value & GOVD_SHARED) == 0;
7372 while (ctx->region_type == ORT_WORKSHARE
7373 || ctx->region_type == ORT_SIMD
7374 || ctx->region_type == ORT_ACC);
7375 return false;
7378 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7380 static tree
7381 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7383 tree t = *tp;
7385 /* If this node has been visited, unmark it and keep looking. */
7386 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7387 return t;
7389 if (IS_TYPE_OR_DECL_P (t))
7390 *walk_subtrees = 0;
7391 return NULL_TREE;
7394 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7395 and previous omp contexts. */
7397 static void
7398 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7399 enum omp_region_type region_type,
7400 enum tree_code code)
7402 struct gimplify_omp_ctx *ctx, *outer_ctx;
7403 tree c;
7404 hash_map<tree, tree> *struct_map_to_clause = NULL;
7405 tree *prev_list_p = NULL;
7407 ctx = new_omp_context (region_type);
7408 outer_ctx = ctx->outer_context;
7409 if (code == OMP_TARGET)
7411 if (!lang_GNU_Fortran ())
7412 ctx->target_map_pointers_as_0len_arrays = true;
7413 ctx->target_map_scalars_firstprivate = true;
7415 if (!lang_GNU_Fortran ())
7416 switch (code)
7418 case OMP_TARGET:
7419 case OMP_TARGET_DATA:
7420 case OMP_TARGET_ENTER_DATA:
7421 case OMP_TARGET_EXIT_DATA:
7422 case OACC_DECLARE:
7423 case OACC_HOST_DATA:
7424 ctx->target_firstprivatize_array_bases = true;
7425 default:
7426 break;
7429 while ((c = *list_p) != NULL)
7431 bool remove = false;
7432 bool notice_outer = true;
7433 const char *check_non_private = NULL;
7434 unsigned int flags;
7435 tree decl;
7437 switch (OMP_CLAUSE_CODE (c))
7439 case OMP_CLAUSE_PRIVATE:
7440 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7441 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7443 flags |= GOVD_PRIVATE_OUTER_REF;
7444 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7446 else
7447 notice_outer = false;
7448 goto do_add;
7449 case OMP_CLAUSE_SHARED:
7450 flags = GOVD_SHARED | GOVD_EXPLICIT;
7451 goto do_add;
7452 case OMP_CLAUSE_FIRSTPRIVATE:
7453 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7454 check_non_private = "firstprivate";
7455 goto do_add;
7456 case OMP_CLAUSE_LASTPRIVATE:
7457 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7458 check_non_private = "lastprivate";
7459 decl = OMP_CLAUSE_DECL (c);
7460 if (error_operand_p (decl))
7461 goto do_add;
7462 else if (outer_ctx
7463 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7464 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7465 && splay_tree_lookup (outer_ctx->variables,
7466 (splay_tree_key) decl) == NULL)
7468 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7469 if (outer_ctx->outer_context)
7470 omp_notice_variable (outer_ctx->outer_context, decl, true);
7472 else if (outer_ctx
7473 && (outer_ctx->region_type & ORT_TASK) != 0
7474 && outer_ctx->combined_loop
7475 && splay_tree_lookup (outer_ctx->variables,
7476 (splay_tree_key) decl) == NULL)
7478 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7479 if (outer_ctx->outer_context)
7480 omp_notice_variable (outer_ctx->outer_context, decl, true);
7482 else if (outer_ctx
7483 && (outer_ctx->region_type == ORT_WORKSHARE
7484 || outer_ctx->region_type == ORT_ACC)
7485 && outer_ctx->combined_loop
7486 && splay_tree_lookup (outer_ctx->variables,
7487 (splay_tree_key) decl) == NULL
7488 && !omp_check_private (outer_ctx, decl, false))
7490 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7491 if (outer_ctx->outer_context
7492 && (outer_ctx->outer_context->region_type
7493 == ORT_COMBINED_PARALLEL)
7494 && splay_tree_lookup (outer_ctx->outer_context->variables,
7495 (splay_tree_key) decl) == NULL)
7497 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7498 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7499 if (octx->outer_context)
7501 octx = octx->outer_context;
7502 if (octx->region_type == ORT_WORKSHARE
7503 && octx->combined_loop
7504 && splay_tree_lookup (octx->variables,
7505 (splay_tree_key) decl) == NULL
7506 && !omp_check_private (octx, decl, false))
7508 omp_add_variable (octx, decl,
7509 GOVD_LASTPRIVATE | GOVD_SEEN);
7510 octx = octx->outer_context;
7511 if (octx
7512 && octx->region_type == ORT_COMBINED_TEAMS
7513 && (splay_tree_lookup (octx->variables,
7514 (splay_tree_key) decl)
7515 == NULL))
7517 omp_add_variable (octx, decl,
7518 GOVD_SHARED | GOVD_SEEN);
7519 octx = octx->outer_context;
7522 if (octx)
7523 omp_notice_variable (octx, decl, true);
7526 else if (outer_ctx->outer_context)
7527 omp_notice_variable (outer_ctx->outer_context, decl, true);
7529 goto do_add;
7530 case OMP_CLAUSE_REDUCTION:
7531 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7532 /* OpenACC permits reductions on private variables. */
7533 if (!(region_type & ORT_ACC))
7534 check_non_private = "reduction";
7535 decl = OMP_CLAUSE_DECL (c);
7536 if (TREE_CODE (decl) == MEM_REF)
7538 tree type = TREE_TYPE (decl);
7539 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7540 NULL, is_gimple_val, fb_rvalue, false)
7541 == GS_ERROR)
7543 remove = true;
7544 break;
7546 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7547 if (DECL_P (v))
7549 omp_firstprivatize_variable (ctx, v);
7550 omp_notice_variable (ctx, v, true);
7552 decl = TREE_OPERAND (decl, 0);
7553 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7555 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7556 NULL, is_gimple_val, fb_rvalue, false)
7557 == GS_ERROR)
7559 remove = true;
7560 break;
7562 v = TREE_OPERAND (decl, 1);
7563 if (DECL_P (v))
7565 omp_firstprivatize_variable (ctx, v);
7566 omp_notice_variable (ctx, v, true);
7568 decl = TREE_OPERAND (decl, 0);
7570 if (TREE_CODE (decl) == ADDR_EXPR
7571 || TREE_CODE (decl) == INDIRECT_REF)
7572 decl = TREE_OPERAND (decl, 0);
7574 goto do_add_decl;
7575 case OMP_CLAUSE_LINEAR:
7576 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7577 is_gimple_val, fb_rvalue) == GS_ERROR)
7579 remove = true;
7580 break;
7582 else
7584 if (code == OMP_SIMD
7585 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7587 struct gimplify_omp_ctx *octx = outer_ctx;
7588 if (octx
7589 && octx->region_type == ORT_WORKSHARE
7590 && octx->combined_loop
7591 && !octx->distribute)
7593 if (octx->outer_context
7594 && (octx->outer_context->region_type
7595 == ORT_COMBINED_PARALLEL))
7596 octx = octx->outer_context->outer_context;
7597 else
7598 octx = octx->outer_context;
7600 if (octx
7601 && octx->region_type == ORT_WORKSHARE
7602 && octx->combined_loop
7603 && octx->distribute)
7605 error_at (OMP_CLAUSE_LOCATION (c),
7606 "%<linear%> clause for variable other than "
7607 "loop iterator specified on construct "
7608 "combined with %<distribute%>");
7609 remove = true;
7610 break;
7613 /* For combined #pragma omp parallel for simd, need to put
7614 lastprivate and perhaps firstprivate too on the
7615 parallel. Similarly for #pragma omp for simd. */
7616 struct gimplify_omp_ctx *octx = outer_ctx;
7617 decl = NULL_TREE;
7620 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7621 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7622 break;
7623 decl = OMP_CLAUSE_DECL (c);
7624 if (error_operand_p (decl))
7626 decl = NULL_TREE;
7627 break;
7629 flags = GOVD_SEEN;
7630 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7631 flags |= GOVD_FIRSTPRIVATE;
7632 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7633 flags |= GOVD_LASTPRIVATE;
7634 if (octx
7635 && octx->region_type == ORT_WORKSHARE
7636 && octx->combined_loop)
7638 if (octx->outer_context
7639 && (octx->outer_context->region_type
7640 == ORT_COMBINED_PARALLEL))
7641 octx = octx->outer_context;
7642 else if (omp_check_private (octx, decl, false))
7643 break;
7645 else if (octx
7646 && (octx->region_type & ORT_TASK) != 0
7647 && octx->combined_loop)
7649 else if (octx
7650 && octx->region_type == ORT_COMBINED_PARALLEL
7651 && ctx->region_type == ORT_WORKSHARE
7652 && octx == outer_ctx)
7653 flags = GOVD_SEEN | GOVD_SHARED;
7654 else if (octx
7655 && octx->region_type == ORT_COMBINED_TEAMS)
7656 flags = GOVD_SEEN | GOVD_SHARED;
7657 else if (octx
7658 && octx->region_type == ORT_COMBINED_TARGET)
7660 flags &= ~GOVD_LASTPRIVATE;
7661 if (flags == GOVD_SEEN)
7662 break;
7664 else
7665 break;
7666 splay_tree_node on
7667 = splay_tree_lookup (octx->variables,
7668 (splay_tree_key) decl);
7669 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7671 octx = NULL;
7672 break;
7674 omp_add_variable (octx, decl, flags);
7675 if (octx->outer_context == NULL)
7676 break;
7677 octx = octx->outer_context;
7679 while (1);
7680 if (octx
7681 && decl
7682 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7683 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7684 omp_notice_variable (octx, decl, true);
7686 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7687 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7688 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7690 notice_outer = false;
7691 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7693 goto do_add;
7695 case OMP_CLAUSE_MAP:
7696 decl = OMP_CLAUSE_DECL (c);
7697 if (error_operand_p (decl))
7698 remove = true;
7699 switch (code)
7701 case OMP_TARGET:
7702 break;
7703 case OACC_DATA:
7704 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7705 break;
7706 /* FALLTHRU */
7707 case OMP_TARGET_DATA:
7708 case OMP_TARGET_ENTER_DATA:
7709 case OMP_TARGET_EXIT_DATA:
7710 case OACC_ENTER_DATA:
7711 case OACC_EXIT_DATA:
7712 case OACC_HOST_DATA:
7713 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7714 || (OMP_CLAUSE_MAP_KIND (c)
7715 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7716 /* For target {,enter ,exit }data only the array slice is
7717 mapped, but not the pointer to it. */
7718 remove = true;
7719 break;
7720 default:
7721 break;
7723 if (remove)
7724 break;
7725 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7727 struct gimplify_omp_ctx *octx;
7728 for (octx = outer_ctx; octx; octx = octx->outer_context)
7730 if (octx->region_type != ORT_ACC_HOST_DATA)
7731 break;
7732 splay_tree_node n2
7733 = splay_tree_lookup (octx->variables,
7734 (splay_tree_key) decl);
7735 if (n2)
7736 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7737 "declared in enclosing %<host_data%> region",
7738 DECL_NAME (decl));
7741 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7742 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7743 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7744 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7745 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7747 remove = true;
7748 break;
7750 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7751 || (OMP_CLAUSE_MAP_KIND (c)
7752 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7753 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7755 OMP_CLAUSE_SIZE (c)
7756 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7757 false);
7758 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7759 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7761 if (!DECL_P (decl))
7763 tree d = decl, *pd;
7764 if (TREE_CODE (d) == ARRAY_REF)
7766 while (TREE_CODE (d) == ARRAY_REF)
7767 d = TREE_OPERAND (d, 0);
7768 if (TREE_CODE (d) == COMPONENT_REF
7769 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7770 decl = d;
7772 pd = &OMP_CLAUSE_DECL (c);
7773 if (d == decl
7774 && TREE_CODE (decl) == INDIRECT_REF
7775 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7776 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7777 == REFERENCE_TYPE))
7779 pd = &TREE_OPERAND (decl, 0);
7780 decl = TREE_OPERAND (decl, 0);
7782 if (TREE_CODE (decl) == COMPONENT_REF)
7784 while (TREE_CODE (decl) == COMPONENT_REF)
7785 decl = TREE_OPERAND (decl, 0);
7786 if (TREE_CODE (decl) == INDIRECT_REF
7787 && DECL_P (TREE_OPERAND (decl, 0))
7788 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7789 == REFERENCE_TYPE))
7790 decl = TREE_OPERAND (decl, 0);
7792 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7793 == GS_ERROR)
7795 remove = true;
7796 break;
7798 if (DECL_P (decl))
7800 if (error_operand_p (decl))
7802 remove = true;
7803 break;
7806 tree stype = TREE_TYPE (decl);
7807 if (TREE_CODE (stype) == REFERENCE_TYPE)
7808 stype = TREE_TYPE (stype);
7809 if (TYPE_SIZE_UNIT (stype) == NULL
7810 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7812 error_at (OMP_CLAUSE_LOCATION (c),
7813 "mapping field %qE of variable length "
7814 "structure", OMP_CLAUSE_DECL (c));
7815 remove = true;
7816 break;
7819 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7821 /* Error recovery. */
7822 if (prev_list_p == NULL)
7824 remove = true;
7825 break;
7827 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7829 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7830 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7832 remove = true;
7833 break;
7838 tree offset;
7839 HOST_WIDE_INT bitsize, bitpos;
7840 machine_mode mode;
7841 int unsignedp, reversep, volatilep = 0;
7842 tree base = OMP_CLAUSE_DECL (c);
7843 while (TREE_CODE (base) == ARRAY_REF)
7844 base = TREE_OPERAND (base, 0);
7845 if (TREE_CODE (base) == INDIRECT_REF)
7846 base = TREE_OPERAND (base, 0);
7847 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7848 &mode, &unsignedp, &reversep,
7849 &volatilep);
7850 tree orig_base = base;
7851 if ((TREE_CODE (base) == INDIRECT_REF
7852 || (TREE_CODE (base) == MEM_REF
7853 && integer_zerop (TREE_OPERAND (base, 1))))
7854 && DECL_P (TREE_OPERAND (base, 0))
7855 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7856 == REFERENCE_TYPE))
7857 base = TREE_OPERAND (base, 0);
7858 gcc_assert (base == decl
7859 && (offset == NULL_TREE
7860 || TREE_CODE (offset) == INTEGER_CST));
7862 splay_tree_node n
7863 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7864 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7865 == GOMP_MAP_ALWAYS_POINTER);
7866 if (n == NULL || (n->value & GOVD_MAP) == 0)
7868 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7869 OMP_CLAUSE_MAP);
7870 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7871 if (orig_base != base)
7872 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7873 else
7874 OMP_CLAUSE_DECL (l) = decl;
7875 OMP_CLAUSE_SIZE (l) = size_int (1);
7876 if (struct_map_to_clause == NULL)
7877 struct_map_to_clause = new hash_map<tree, tree>;
7878 struct_map_to_clause->put (decl, l);
7879 if (ptr)
7881 enum gomp_map_kind mkind
7882 = code == OMP_TARGET_EXIT_DATA
7883 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7884 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7885 OMP_CLAUSE_MAP);
7886 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7887 OMP_CLAUSE_DECL (c2)
7888 = unshare_expr (OMP_CLAUSE_DECL (c));
7889 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7890 OMP_CLAUSE_SIZE (c2)
7891 = TYPE_SIZE_UNIT (ptr_type_node);
7892 OMP_CLAUSE_CHAIN (l) = c2;
7893 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7895 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7896 tree c3
7897 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7898 OMP_CLAUSE_MAP);
7899 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7900 OMP_CLAUSE_DECL (c3)
7901 = unshare_expr (OMP_CLAUSE_DECL (c4));
7902 OMP_CLAUSE_SIZE (c3)
7903 = TYPE_SIZE_UNIT (ptr_type_node);
7904 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7905 OMP_CLAUSE_CHAIN (c2) = c3;
7907 *prev_list_p = l;
7908 prev_list_p = NULL;
7910 else
7912 OMP_CLAUSE_CHAIN (l) = c;
7913 *list_p = l;
7914 list_p = &OMP_CLAUSE_CHAIN (l);
7916 if (orig_base != base && code == OMP_TARGET)
7918 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7919 OMP_CLAUSE_MAP);
7920 enum gomp_map_kind mkind
7921 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7922 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7923 OMP_CLAUSE_DECL (c2) = decl;
7924 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7925 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7926 OMP_CLAUSE_CHAIN (l) = c2;
7928 flags = GOVD_MAP | GOVD_EXPLICIT;
7929 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7930 flags |= GOVD_SEEN;
7931 goto do_add_decl;
7933 else
7935 tree *osc = struct_map_to_clause->get (decl);
7936 tree *sc = NULL, *scp = NULL;
7937 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7938 n->value |= GOVD_SEEN;
7939 offset_int o1, o2;
7940 if (offset)
7941 o1 = wi::to_offset (offset);
7942 else
7943 o1 = 0;
7944 if (bitpos)
7945 o1 = o1 + bitpos / BITS_PER_UNIT;
7946 sc = &OMP_CLAUSE_CHAIN (*osc);
7947 if (*sc != c
7948 && (OMP_CLAUSE_MAP_KIND (*sc)
7949 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7950 sc = &OMP_CLAUSE_CHAIN (*sc);
7951 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7952 if (ptr && sc == prev_list_p)
7953 break;
7954 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7955 != COMPONENT_REF
7956 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7957 != INDIRECT_REF)
7958 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7959 != ARRAY_REF))
7960 break;
7961 else
7963 tree offset2;
7964 HOST_WIDE_INT bitsize2, bitpos2;
7965 base = OMP_CLAUSE_DECL (*sc);
7966 if (TREE_CODE (base) == ARRAY_REF)
7968 while (TREE_CODE (base) == ARRAY_REF)
7969 base = TREE_OPERAND (base, 0);
7970 if (TREE_CODE (base) != COMPONENT_REF
7971 || (TREE_CODE (TREE_TYPE (base))
7972 != ARRAY_TYPE))
7973 break;
7975 else if (TREE_CODE (base) == INDIRECT_REF
7976 && (TREE_CODE (TREE_OPERAND (base, 0))
7977 == COMPONENT_REF)
7978 && (TREE_CODE (TREE_TYPE
7979 (TREE_OPERAND (base, 0)))
7980 == REFERENCE_TYPE))
7981 base = TREE_OPERAND (base, 0);
7982 base = get_inner_reference (base, &bitsize2,
7983 &bitpos2, &offset2,
7984 &mode, &unsignedp,
7985 &reversep, &volatilep);
7986 if ((TREE_CODE (base) == INDIRECT_REF
7987 || (TREE_CODE (base) == MEM_REF
7988 && integer_zerop (TREE_OPERAND (base,
7989 1))))
7990 && DECL_P (TREE_OPERAND (base, 0))
7991 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7992 0)))
7993 == REFERENCE_TYPE))
7994 base = TREE_OPERAND (base, 0);
7995 if (base != decl)
7996 break;
7997 if (scp)
7998 continue;
7999 gcc_assert (offset == NULL_TREE
8000 || TREE_CODE (offset) == INTEGER_CST);
8001 tree d1 = OMP_CLAUSE_DECL (*sc);
8002 tree d2 = OMP_CLAUSE_DECL (c);
8003 while (TREE_CODE (d1) == ARRAY_REF)
8004 d1 = TREE_OPERAND (d1, 0);
8005 while (TREE_CODE (d2) == ARRAY_REF)
8006 d2 = TREE_OPERAND (d2, 0);
8007 if (TREE_CODE (d1) == INDIRECT_REF)
8008 d1 = TREE_OPERAND (d1, 0);
8009 if (TREE_CODE (d2) == INDIRECT_REF)
8010 d2 = TREE_OPERAND (d2, 0);
8011 while (TREE_CODE (d1) == COMPONENT_REF)
8012 if (TREE_CODE (d2) == COMPONENT_REF
8013 && TREE_OPERAND (d1, 1)
8014 == TREE_OPERAND (d2, 1))
8016 d1 = TREE_OPERAND (d1, 0);
8017 d2 = TREE_OPERAND (d2, 0);
8019 else
8020 break;
8021 if (d1 == d2)
8023 error_at (OMP_CLAUSE_LOCATION (c),
8024 "%qE appears more than once in map "
8025 "clauses", OMP_CLAUSE_DECL (c));
8026 remove = true;
8027 break;
8029 if (offset2)
8030 o2 = wi::to_offset (offset2);
8031 else
8032 o2 = 0;
8033 if (bitpos2)
8034 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8035 if (wi::ltu_p (o1, o2)
8036 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8038 if (ptr)
8039 scp = sc;
8040 else
8041 break;
8044 if (remove)
8045 break;
8046 OMP_CLAUSE_SIZE (*osc)
8047 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8048 size_one_node);
8049 if (ptr)
8051 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8052 OMP_CLAUSE_MAP);
8053 tree cl = NULL_TREE;
8054 enum gomp_map_kind mkind
8055 = code == OMP_TARGET_EXIT_DATA
8056 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8057 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8058 OMP_CLAUSE_DECL (c2)
8059 = unshare_expr (OMP_CLAUSE_DECL (c));
8060 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8061 OMP_CLAUSE_SIZE (c2)
8062 = TYPE_SIZE_UNIT (ptr_type_node);
8063 cl = scp ? *prev_list_p : c2;
8064 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8066 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8067 tree c3
8068 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8069 OMP_CLAUSE_MAP);
8070 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8071 OMP_CLAUSE_DECL (c3)
8072 = unshare_expr (OMP_CLAUSE_DECL (c4));
8073 OMP_CLAUSE_SIZE (c3)
8074 = TYPE_SIZE_UNIT (ptr_type_node);
8075 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8076 if (!scp)
8077 OMP_CLAUSE_CHAIN (c2) = c3;
8078 else
8079 cl = c3;
8081 if (scp)
8082 *scp = c2;
8083 if (sc == prev_list_p)
8085 *sc = cl;
8086 prev_list_p = NULL;
8088 else
8090 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8091 list_p = prev_list_p;
8092 prev_list_p = NULL;
8093 OMP_CLAUSE_CHAIN (c) = *sc;
8094 *sc = cl;
8095 continue;
8098 else if (*sc != c)
8100 *list_p = OMP_CLAUSE_CHAIN (c);
8101 OMP_CLAUSE_CHAIN (c) = *sc;
8102 *sc = c;
8103 continue;
8107 if (!remove
8108 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8109 && OMP_CLAUSE_CHAIN (c)
8110 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8111 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8112 == GOMP_MAP_ALWAYS_POINTER))
8113 prev_list_p = list_p;
8114 break;
8116 flags = GOVD_MAP | GOVD_EXPLICIT;
8117 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8118 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8119 flags |= GOVD_MAP_ALWAYS_TO;
8120 goto do_add;
8122 case OMP_CLAUSE_DEPEND:
8123 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8125 tree deps = OMP_CLAUSE_DECL (c);
8126 while (deps && TREE_CODE (deps) == TREE_LIST)
8128 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8129 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8130 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8131 pre_p, NULL, is_gimple_val, fb_rvalue);
8132 deps = TREE_CHAIN (deps);
8134 break;
8136 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8137 break;
8138 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8140 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8141 NULL, is_gimple_val, fb_rvalue);
8142 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8144 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8146 remove = true;
8147 break;
8149 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8150 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8151 is_gimple_val, fb_rvalue) == GS_ERROR)
8153 remove = true;
8154 break;
8156 break;
8158 case OMP_CLAUSE_TO:
8159 case OMP_CLAUSE_FROM:
8160 case OMP_CLAUSE__CACHE_:
8161 decl = OMP_CLAUSE_DECL (c);
8162 if (error_operand_p (decl))
8164 remove = true;
8165 break;
8167 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8168 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8169 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8170 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8171 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8173 remove = true;
8174 break;
8176 if (!DECL_P (decl))
8178 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8179 NULL, is_gimple_lvalue, fb_lvalue)
8180 == GS_ERROR)
8182 remove = true;
8183 break;
8185 break;
8187 goto do_notice;
8189 case OMP_CLAUSE_USE_DEVICE_PTR:
8190 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8191 goto do_add;
8192 case OMP_CLAUSE_IS_DEVICE_PTR:
8193 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8194 goto do_add;
8196 do_add:
8197 decl = OMP_CLAUSE_DECL (c);
8198 do_add_decl:
8199 if (error_operand_p (decl))
8201 remove = true;
8202 break;
8204 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8206 tree t = omp_member_access_dummy_var (decl);
8207 if (t)
8209 tree v = DECL_VALUE_EXPR (decl);
8210 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8211 if (outer_ctx)
8212 omp_notice_variable (outer_ctx, t, true);
8215 if (code == OACC_DATA
8216 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8217 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8218 flags |= GOVD_MAP_0LEN_ARRAY;
8219 omp_add_variable (ctx, decl, flags);
8220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8221 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8223 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8224 GOVD_LOCAL | GOVD_SEEN);
8225 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8226 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8227 find_decl_expr,
8228 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8229 NULL) == NULL_TREE)
8230 omp_add_variable (ctx,
8231 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8232 GOVD_LOCAL | GOVD_SEEN);
8233 gimplify_omp_ctxp = ctx;
8234 push_gimplify_context ();
8236 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8237 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8239 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8240 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8241 pop_gimplify_context
8242 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8243 push_gimplify_context ();
8244 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8245 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8246 pop_gimplify_context
8247 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8248 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8249 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8251 gimplify_omp_ctxp = outer_ctx;
8253 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8254 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8256 gimplify_omp_ctxp = ctx;
8257 push_gimplify_context ();
8258 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8260 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8261 NULL, NULL);
8262 TREE_SIDE_EFFECTS (bind) = 1;
8263 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8264 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8266 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8267 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8268 pop_gimplify_context
8269 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8270 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8272 gimplify_omp_ctxp = outer_ctx;
8274 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8275 && OMP_CLAUSE_LINEAR_STMT (c))
8277 gimplify_omp_ctxp = ctx;
8278 push_gimplify_context ();
8279 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8281 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8282 NULL, NULL);
8283 TREE_SIDE_EFFECTS (bind) = 1;
8284 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8285 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8287 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8288 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8289 pop_gimplify_context
8290 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8291 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8293 gimplify_omp_ctxp = outer_ctx;
8295 if (notice_outer)
8296 goto do_notice;
8297 break;
8299 case OMP_CLAUSE_COPYIN:
8300 case OMP_CLAUSE_COPYPRIVATE:
8301 decl = OMP_CLAUSE_DECL (c);
8302 if (error_operand_p (decl))
8304 remove = true;
8305 break;
8307 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8308 && !remove
8309 && !omp_check_private (ctx, decl, true))
8311 remove = true;
8312 if (is_global_var (decl))
8314 if (DECL_THREAD_LOCAL_P (decl))
8315 remove = false;
8316 else if (DECL_HAS_VALUE_EXPR_P (decl))
8318 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8320 if (value
8321 && DECL_P (value)
8322 && DECL_THREAD_LOCAL_P (value))
8323 remove = false;
8326 if (remove)
8327 error_at (OMP_CLAUSE_LOCATION (c),
8328 "copyprivate variable %qE is not threadprivate"
8329 " or private in outer context", DECL_NAME (decl));
8331 do_notice:
8332 if (outer_ctx)
8333 omp_notice_variable (outer_ctx, decl, true);
8334 if (check_non_private
8335 && region_type == ORT_WORKSHARE
8336 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8337 || decl == OMP_CLAUSE_DECL (c)
8338 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8339 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8340 == ADDR_EXPR
8341 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8342 == POINTER_PLUS_EXPR
8343 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8344 (OMP_CLAUSE_DECL (c), 0), 0))
8345 == ADDR_EXPR)))))
8346 && omp_check_private (ctx, decl, false))
8348 error ("%s variable %qE is private in outer context",
8349 check_non_private, DECL_NAME (decl));
8350 remove = true;
8352 break;
8354 case OMP_CLAUSE_IF:
8355 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8356 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8358 const char *p[2];
8359 for (int i = 0; i < 2; i++)
8360 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8362 case OMP_PARALLEL: p[i] = "parallel"; break;
8363 case OMP_TASK: p[i] = "task"; break;
8364 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8365 case OMP_TARGET_DATA: p[i] = "target data"; break;
8366 case OMP_TARGET: p[i] = "target"; break;
8367 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8368 case OMP_TARGET_ENTER_DATA:
8369 p[i] = "target enter data"; break;
8370 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8371 default: gcc_unreachable ();
8373 error_at (OMP_CLAUSE_LOCATION (c),
8374 "expected %qs %<if%> clause modifier rather than %qs",
8375 p[0], p[1]);
8376 remove = true;
8378 /* Fall through. */
8380 case OMP_CLAUSE_FINAL:
8381 OMP_CLAUSE_OPERAND (c, 0)
8382 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8383 /* Fall through. */
8385 case OMP_CLAUSE_SCHEDULE:
8386 case OMP_CLAUSE_NUM_THREADS:
8387 case OMP_CLAUSE_NUM_TEAMS:
8388 case OMP_CLAUSE_THREAD_LIMIT:
8389 case OMP_CLAUSE_DIST_SCHEDULE:
8390 case OMP_CLAUSE_DEVICE:
8391 case OMP_CLAUSE_PRIORITY:
8392 case OMP_CLAUSE_GRAINSIZE:
8393 case OMP_CLAUSE_NUM_TASKS:
8394 case OMP_CLAUSE_HINT:
8395 case OMP_CLAUSE_ASYNC:
8396 case OMP_CLAUSE_WAIT:
8397 case OMP_CLAUSE_NUM_GANGS:
8398 case OMP_CLAUSE_NUM_WORKERS:
8399 case OMP_CLAUSE_VECTOR_LENGTH:
8400 case OMP_CLAUSE_WORKER:
8401 case OMP_CLAUSE_VECTOR:
8402 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8403 is_gimple_val, fb_rvalue) == GS_ERROR)
8404 remove = true;
8405 break;
8407 case OMP_CLAUSE_GANG:
8408 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8409 is_gimple_val, fb_rvalue) == GS_ERROR)
8410 remove = true;
8411 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8412 is_gimple_val, fb_rvalue) == GS_ERROR)
8413 remove = true;
8414 break;
8416 case OMP_CLAUSE_NOWAIT:
8417 case OMP_CLAUSE_ORDERED:
8418 case OMP_CLAUSE_UNTIED:
8419 case OMP_CLAUSE_COLLAPSE:
8420 case OMP_CLAUSE_TILE:
8421 case OMP_CLAUSE_AUTO:
8422 case OMP_CLAUSE_SEQ:
8423 case OMP_CLAUSE_INDEPENDENT:
8424 case OMP_CLAUSE_MERGEABLE:
8425 case OMP_CLAUSE_PROC_BIND:
8426 case OMP_CLAUSE_SAFELEN:
8427 case OMP_CLAUSE_SIMDLEN:
8428 case OMP_CLAUSE_NOGROUP:
8429 case OMP_CLAUSE_THREADS:
8430 case OMP_CLAUSE_SIMD:
8431 break;
8433 case OMP_CLAUSE_DEFAULTMAP:
8434 ctx->target_map_scalars_firstprivate = false;
8435 break;
8437 case OMP_CLAUSE_ALIGNED:
8438 decl = OMP_CLAUSE_DECL (c);
8439 if (error_operand_p (decl))
8441 remove = true;
8442 break;
8444 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8445 is_gimple_val, fb_rvalue) == GS_ERROR)
8447 remove = true;
8448 break;
8450 if (!is_global_var (decl)
8451 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8452 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8453 break;
8455 case OMP_CLAUSE_DEFAULT:
8456 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8457 break;
8459 default:
8460 gcc_unreachable ();
8463 if (code == OACC_DATA
8464 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8465 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8466 remove = true;
8467 if (remove)
8468 *list_p = OMP_CLAUSE_CHAIN (c);
8469 else
8470 list_p = &OMP_CLAUSE_CHAIN (c);
8473 gimplify_omp_ctxp = ctx;
8474 if (struct_map_to_clause)
8475 delete struct_map_to_clause;
8478 /* Return true if DECL is a candidate for shared to firstprivate
8479 optimization. We only consider non-addressable scalars, not
8480 too big, and not references. */
8482 static bool
8483 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8485 if (TREE_ADDRESSABLE (decl))
8486 return false;
8487 tree type = TREE_TYPE (decl);
8488 if (!is_gimple_reg_type (type)
8489 || TREE_CODE (type) == REFERENCE_TYPE
8490 || TREE_ADDRESSABLE (type))
8491 return false;
8492 /* Don't optimize too large decls, as each thread/task will have
8493 its own. */
8494 HOST_WIDE_INT len = int_size_in_bytes (type);
8495 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8496 return false;
8497 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8498 return false;
8499 return true;
8502 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8503 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8504 GOVD_WRITTEN in outer contexts. */
8506 static void
8507 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8509 for (; ctx; ctx = ctx->outer_context)
8511 splay_tree_node n = splay_tree_lookup (ctx->variables,
8512 (splay_tree_key) decl);
8513 if (n == NULL)
8514 continue;
8515 else if (n->value & GOVD_SHARED)
8517 n->value |= GOVD_WRITTEN;
8518 return;
8520 else if (n->value & GOVD_DATA_SHARE_CLASS)
8521 return;
8525 /* Helper callback for walk_gimple_seq to discover possible stores
8526 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8527 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8528 for those. */
8530 static tree
8531 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8533 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8535 *walk_subtrees = 0;
8536 if (!wi->is_lhs)
8537 return NULL_TREE;
8539 tree op = *tp;
8542 if (handled_component_p (op))
8543 op = TREE_OPERAND (op, 0);
8544 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8545 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8546 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8547 else
8548 break;
8550 while (1);
8551 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8552 return NULL_TREE;
8554 omp_mark_stores (gimplify_omp_ctxp, op);
8555 return NULL_TREE;
8558 /* Helper callback for walk_gimple_seq to discover possible stores
8559 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8560 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8561 for those. */
8563 static tree
8564 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8565 bool *handled_ops_p,
8566 struct walk_stmt_info *wi)
8568 gimple *stmt = gsi_stmt (*gsi_p);
8569 switch (gimple_code (stmt))
8571 /* Don't recurse on OpenMP constructs for which
8572 gimplify_adjust_omp_clauses already handled the bodies,
8573 except handle gimple_omp_for_pre_body. */
8574 case GIMPLE_OMP_FOR:
8575 *handled_ops_p = true;
8576 if (gimple_omp_for_pre_body (stmt))
8577 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8578 omp_find_stores_stmt, omp_find_stores_op, wi);
8579 break;
8580 case GIMPLE_OMP_PARALLEL:
8581 case GIMPLE_OMP_TASK:
8582 case GIMPLE_OMP_SECTIONS:
8583 case GIMPLE_OMP_SINGLE:
8584 case GIMPLE_OMP_TARGET:
8585 case GIMPLE_OMP_TEAMS:
8586 case GIMPLE_OMP_CRITICAL:
8587 *handled_ops_p = true;
8588 break;
8589 default:
8590 break;
8592 return NULL_TREE;
8595 struct gimplify_adjust_omp_clauses_data
8597 tree *list_p;
8598 gimple_seq *pre_p;
8601 /* For all variables that were not actually used within the context,
8602 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8604 static int
8605 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8607 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8608 gimple_seq *pre_p
8609 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8610 tree decl = (tree) n->key;
8611 unsigned flags = n->value;
8612 enum omp_clause_code code;
8613 tree clause;
8614 bool private_debug;
8616 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8617 return 0;
8618 if ((flags & GOVD_SEEN) == 0)
8619 return 0;
8620 if (flags & GOVD_DEBUG_PRIVATE)
8622 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8623 private_debug = true;
8625 else if (flags & GOVD_MAP)
8626 private_debug = false;
8627 else
8628 private_debug
8629 = lang_hooks.decls.omp_private_debug_clause (decl,
8630 !!(flags & GOVD_SHARED));
8631 if (private_debug)
8632 code = OMP_CLAUSE_PRIVATE;
8633 else if (flags & GOVD_MAP)
8635 code = OMP_CLAUSE_MAP;
8636 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8637 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8639 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8640 return 0;
8643 else if (flags & GOVD_SHARED)
8645 if (is_global_var (decl))
8647 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8648 while (ctx != NULL)
8650 splay_tree_node on
8651 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8652 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8653 | GOVD_PRIVATE | GOVD_REDUCTION
8654 | GOVD_LINEAR | GOVD_MAP)) != 0)
8655 break;
8656 ctx = ctx->outer_context;
8658 if (ctx == NULL)
8659 return 0;
8661 code = OMP_CLAUSE_SHARED;
8663 else if (flags & GOVD_PRIVATE)
8664 code = OMP_CLAUSE_PRIVATE;
8665 else if (flags & GOVD_FIRSTPRIVATE)
8667 code = OMP_CLAUSE_FIRSTPRIVATE;
8668 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8669 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8670 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8672 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8673 "%<target%> construct", decl);
8674 return 0;
8677 else if (flags & GOVD_LASTPRIVATE)
8678 code = OMP_CLAUSE_LASTPRIVATE;
8679 else if (flags & GOVD_ALIGNED)
8680 return 0;
8681 else
8682 gcc_unreachable ();
8684 if (((flags & GOVD_LASTPRIVATE)
8685 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8686 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8687 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8689 tree chain = *list_p;
8690 clause = build_omp_clause (input_location, code);
8691 OMP_CLAUSE_DECL (clause) = decl;
8692 OMP_CLAUSE_CHAIN (clause) = chain;
8693 if (private_debug)
8694 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8695 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8696 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8697 else if (code == OMP_CLAUSE_SHARED
8698 && (flags & GOVD_WRITTEN) == 0
8699 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8700 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8701 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8702 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8703 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8705 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8706 OMP_CLAUSE_DECL (nc) = decl;
8707 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8708 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8709 OMP_CLAUSE_DECL (clause)
8710 = build_simple_mem_ref_loc (input_location, decl);
8711 OMP_CLAUSE_DECL (clause)
8712 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8713 build_int_cst (build_pointer_type (char_type_node), 0));
8714 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8715 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8716 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8717 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8718 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8719 OMP_CLAUSE_CHAIN (nc) = chain;
8720 OMP_CLAUSE_CHAIN (clause) = nc;
8721 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8722 gimplify_omp_ctxp = ctx->outer_context;
8723 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8724 pre_p, NULL, is_gimple_val, fb_rvalue);
8725 gimplify_omp_ctxp = ctx;
8727 else if (code == OMP_CLAUSE_MAP)
8729 int kind;
8730 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8731 switch (flags & (GOVD_MAP_TO_ONLY
8732 | GOVD_MAP_FORCE
8733 | GOVD_MAP_FORCE_PRESENT))
8735 case 0:
8736 kind = GOMP_MAP_TOFROM;
8737 break;
8738 case GOVD_MAP_FORCE:
8739 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8740 break;
8741 case GOVD_MAP_TO_ONLY:
8742 kind = GOMP_MAP_TO;
8743 break;
8744 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8745 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8746 break;
8747 case GOVD_MAP_FORCE_PRESENT:
8748 kind = GOMP_MAP_FORCE_PRESENT;
8749 break;
8750 default:
8751 gcc_unreachable ();
8753 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8754 if (DECL_SIZE (decl)
8755 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8757 tree decl2 = DECL_VALUE_EXPR (decl);
8758 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8759 decl2 = TREE_OPERAND (decl2, 0);
8760 gcc_assert (DECL_P (decl2));
8761 tree mem = build_simple_mem_ref (decl2);
8762 OMP_CLAUSE_DECL (clause) = mem;
8763 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8764 if (gimplify_omp_ctxp->outer_context)
8766 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8767 omp_notice_variable (ctx, decl2, true);
8768 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8770 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8771 OMP_CLAUSE_MAP);
8772 OMP_CLAUSE_DECL (nc) = decl;
8773 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8774 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8775 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8776 else
8777 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8778 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8779 OMP_CLAUSE_CHAIN (clause) = nc;
8781 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8782 && lang_hooks.decls.omp_privatize_by_reference (decl))
8784 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8785 OMP_CLAUSE_SIZE (clause)
8786 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8787 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8788 gimplify_omp_ctxp = ctx->outer_context;
8789 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8790 pre_p, NULL, is_gimple_val, fb_rvalue);
8791 gimplify_omp_ctxp = ctx;
8792 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8793 OMP_CLAUSE_MAP);
8794 OMP_CLAUSE_DECL (nc) = decl;
8795 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8796 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8797 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8798 OMP_CLAUSE_CHAIN (clause) = nc;
8800 else
8801 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8803 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8805 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8806 OMP_CLAUSE_DECL (nc) = decl;
8807 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8808 OMP_CLAUSE_CHAIN (nc) = chain;
8809 OMP_CLAUSE_CHAIN (clause) = nc;
8810 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8811 gimplify_omp_ctxp = ctx->outer_context;
8812 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8813 gimplify_omp_ctxp = ctx;
8815 *list_p = clause;
8816 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8817 gimplify_omp_ctxp = ctx->outer_context;
8818 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8819 if (gimplify_omp_ctxp)
8820 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8821 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8822 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8823 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8824 true);
8825 gimplify_omp_ctxp = ctx;
8826 return 0;
8829 static void
8830 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8831 enum tree_code code)
8833 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8834 tree c, decl;
8836 if (body)
8838 struct gimplify_omp_ctx *octx;
8839 for (octx = ctx; octx; octx = octx->outer_context)
8840 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8841 break;
8842 if (octx)
8844 struct walk_stmt_info wi;
8845 memset (&wi, 0, sizeof (wi));
8846 walk_gimple_seq (body, omp_find_stores_stmt,
8847 omp_find_stores_op, &wi);
8850 while ((c = *list_p) != NULL)
8852 splay_tree_node n;
8853 bool remove = false;
8855 switch (OMP_CLAUSE_CODE (c))
8857 case OMP_CLAUSE_FIRSTPRIVATE:
8858 if ((ctx->region_type & ORT_TARGET)
8859 && (ctx->region_type & ORT_ACC) == 0
8860 && TYPE_ATOMIC (strip_array_types
8861 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8863 error_at (OMP_CLAUSE_LOCATION (c),
8864 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8865 "%<target%> construct", OMP_CLAUSE_DECL (c));
8866 remove = true;
8867 break;
8869 /* FALLTHRU */
8870 case OMP_CLAUSE_PRIVATE:
8871 case OMP_CLAUSE_SHARED:
8872 case OMP_CLAUSE_LINEAR:
8873 decl = OMP_CLAUSE_DECL (c);
8874 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8875 remove = !(n->value & GOVD_SEEN);
8876 if (! remove)
8878 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8879 if ((n->value & GOVD_DEBUG_PRIVATE)
8880 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8882 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8883 || ((n->value & GOVD_DATA_SHARE_CLASS)
8884 == GOVD_SHARED));
8885 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8886 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8888 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8889 && (n->value & GOVD_WRITTEN) == 0
8890 && DECL_P (decl)
8891 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8892 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8893 else if (DECL_P (decl)
8894 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8895 && (n->value & GOVD_WRITTEN) != 0)
8896 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8897 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8898 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8899 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8901 break;
8903 case OMP_CLAUSE_LASTPRIVATE:
8904 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8905 accurately reflect the presence of a FIRSTPRIVATE clause. */
8906 decl = OMP_CLAUSE_DECL (c);
8907 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8908 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8909 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8910 if (code == OMP_DISTRIBUTE
8911 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8913 remove = true;
8914 error_at (OMP_CLAUSE_LOCATION (c),
8915 "same variable used in %<firstprivate%> and "
8916 "%<lastprivate%> clauses on %<distribute%> "
8917 "construct");
8919 if (!remove
8920 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8921 && DECL_P (decl)
8922 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8923 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8924 break;
8926 case OMP_CLAUSE_ALIGNED:
8927 decl = OMP_CLAUSE_DECL (c);
8928 if (!is_global_var (decl))
8930 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8931 remove = n == NULL || !(n->value & GOVD_SEEN);
8932 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8934 struct gimplify_omp_ctx *octx;
8935 if (n != NULL
8936 && (n->value & (GOVD_DATA_SHARE_CLASS
8937 & ~GOVD_FIRSTPRIVATE)))
8938 remove = true;
8939 else
8940 for (octx = ctx->outer_context; octx;
8941 octx = octx->outer_context)
8943 n = splay_tree_lookup (octx->variables,
8944 (splay_tree_key) decl);
8945 if (n == NULL)
8946 continue;
8947 if (n->value & GOVD_LOCAL)
8948 break;
8949 /* We have to avoid assigning a shared variable
8950 to itself when trying to add
8951 __builtin_assume_aligned. */
8952 if (n->value & GOVD_SHARED)
8954 remove = true;
8955 break;
8960 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8962 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8963 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8964 remove = true;
8966 break;
8968 case OMP_CLAUSE_MAP:
8969 if (code == OMP_TARGET_EXIT_DATA
8970 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8972 remove = true;
8973 break;
8975 decl = OMP_CLAUSE_DECL (c);
8976 /* Data clauses associated with acc parallel reductions must be
8977 compatible with present_or_copy. Warn and adjust the clause
8978 if that is not the case. */
8979 if (ctx->region_type == ORT_ACC_PARALLEL)
8981 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8982 n = NULL;
8984 if (DECL_P (t))
8985 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8987 if (n && (n->value & GOVD_REDUCTION))
8989 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8991 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8992 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8993 && kind != GOMP_MAP_FORCE_PRESENT
8994 && kind != GOMP_MAP_POINTER)
8996 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8997 "incompatible data clause with reduction "
8998 "on %qE; promoting to present_or_copy",
8999 DECL_NAME (t));
9000 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9004 if (!DECL_P (decl))
9006 if ((ctx->region_type & ORT_TARGET) != 0
9007 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9009 if (TREE_CODE (decl) == INDIRECT_REF
9010 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9011 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9012 == REFERENCE_TYPE))
9013 decl = TREE_OPERAND (decl, 0);
9014 if (TREE_CODE (decl) == COMPONENT_REF)
9016 while (TREE_CODE (decl) == COMPONENT_REF)
9017 decl = TREE_OPERAND (decl, 0);
9018 if (DECL_P (decl))
9020 n = splay_tree_lookup (ctx->variables,
9021 (splay_tree_key) decl);
9022 if (!(n->value & GOVD_SEEN))
9023 remove = true;
9027 break;
9029 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9030 if ((ctx->region_type & ORT_TARGET) != 0
9031 && !(n->value & GOVD_SEEN)
9032 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9033 && (!is_global_var (decl)
9034 || !lookup_attribute ("omp declare target link",
9035 DECL_ATTRIBUTES (decl))))
9037 remove = true;
9038 /* For struct element mapping, if struct is never referenced
9039 in target block and none of the mapping has always modifier,
9040 remove all the struct element mappings, which immediately
9041 follow the GOMP_MAP_STRUCT map clause. */
9042 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9044 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9045 while (cnt--)
9046 OMP_CLAUSE_CHAIN (c)
9047 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9050 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9051 && code == OMP_TARGET_EXIT_DATA)
9052 remove = true;
9053 else if (DECL_SIZE (decl)
9054 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9055 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9056 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9057 && (OMP_CLAUSE_MAP_KIND (c)
9058 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9060 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9061 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9062 INTEGER_CST. */
9063 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9065 tree decl2 = DECL_VALUE_EXPR (decl);
9066 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9067 decl2 = TREE_OPERAND (decl2, 0);
9068 gcc_assert (DECL_P (decl2));
9069 tree mem = build_simple_mem_ref (decl2);
9070 OMP_CLAUSE_DECL (c) = mem;
9071 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9072 if (ctx->outer_context)
9074 omp_notice_variable (ctx->outer_context, decl2, true);
9075 omp_notice_variable (ctx->outer_context,
9076 OMP_CLAUSE_SIZE (c), true);
9078 if (((ctx->region_type & ORT_TARGET) != 0
9079 || !ctx->target_firstprivatize_array_bases)
9080 && ((n->value & GOVD_SEEN) == 0
9081 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9083 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9084 OMP_CLAUSE_MAP);
9085 OMP_CLAUSE_DECL (nc) = decl;
9086 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9087 if (ctx->target_firstprivatize_array_bases)
9088 OMP_CLAUSE_SET_MAP_KIND (nc,
9089 GOMP_MAP_FIRSTPRIVATE_POINTER);
9090 else
9091 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9092 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9093 OMP_CLAUSE_CHAIN (c) = nc;
9094 c = nc;
9097 else
9099 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9100 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9101 gcc_assert ((n->value & GOVD_SEEN) == 0
9102 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9103 == 0));
9105 break;
9107 case OMP_CLAUSE_TO:
9108 case OMP_CLAUSE_FROM:
9109 case OMP_CLAUSE__CACHE_:
9110 decl = OMP_CLAUSE_DECL (c);
9111 if (!DECL_P (decl))
9112 break;
9113 if (DECL_SIZE (decl)
9114 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9116 tree decl2 = DECL_VALUE_EXPR (decl);
9117 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9118 decl2 = TREE_OPERAND (decl2, 0);
9119 gcc_assert (DECL_P (decl2));
9120 tree mem = build_simple_mem_ref (decl2);
9121 OMP_CLAUSE_DECL (c) = mem;
9122 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9123 if (ctx->outer_context)
9125 omp_notice_variable (ctx->outer_context, decl2, true);
9126 omp_notice_variable (ctx->outer_context,
9127 OMP_CLAUSE_SIZE (c), true);
9130 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9131 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9132 break;
9134 case OMP_CLAUSE_REDUCTION:
9135 decl = OMP_CLAUSE_DECL (c);
9136 /* OpenACC reductions need a present_or_copy data clause.
9137 Add one if necessary. Error is the reduction is private. */
9138 if (ctx->region_type == ORT_ACC_PARALLEL)
9140 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9141 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9142 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9143 "reduction on %qE", DECL_NAME (decl));
9144 else if ((n->value & GOVD_MAP) == 0)
9146 tree next = OMP_CLAUSE_CHAIN (c);
9147 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9148 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9149 OMP_CLAUSE_DECL (nc) = decl;
9150 OMP_CLAUSE_CHAIN (c) = nc;
9151 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9152 while (1)
9154 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9155 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9156 break;
9157 nc = OMP_CLAUSE_CHAIN (nc);
9159 OMP_CLAUSE_CHAIN (nc) = next;
9160 n->value |= GOVD_MAP;
9163 if (DECL_P (decl)
9164 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9165 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9166 break;
9167 case OMP_CLAUSE_COPYIN:
9168 case OMP_CLAUSE_COPYPRIVATE:
9169 case OMP_CLAUSE_IF:
9170 case OMP_CLAUSE_NUM_THREADS:
9171 case OMP_CLAUSE_NUM_TEAMS:
9172 case OMP_CLAUSE_THREAD_LIMIT:
9173 case OMP_CLAUSE_DIST_SCHEDULE:
9174 case OMP_CLAUSE_DEVICE:
9175 case OMP_CLAUSE_SCHEDULE:
9176 case OMP_CLAUSE_NOWAIT:
9177 case OMP_CLAUSE_ORDERED:
9178 case OMP_CLAUSE_DEFAULT:
9179 case OMP_CLAUSE_UNTIED:
9180 case OMP_CLAUSE_COLLAPSE:
9181 case OMP_CLAUSE_FINAL:
9182 case OMP_CLAUSE_MERGEABLE:
9183 case OMP_CLAUSE_PROC_BIND:
9184 case OMP_CLAUSE_SAFELEN:
9185 case OMP_CLAUSE_SIMDLEN:
9186 case OMP_CLAUSE_DEPEND:
9187 case OMP_CLAUSE_PRIORITY:
9188 case OMP_CLAUSE_GRAINSIZE:
9189 case OMP_CLAUSE_NUM_TASKS:
9190 case OMP_CLAUSE_NOGROUP:
9191 case OMP_CLAUSE_THREADS:
9192 case OMP_CLAUSE_SIMD:
9193 case OMP_CLAUSE_HINT:
9194 case OMP_CLAUSE_DEFAULTMAP:
9195 case OMP_CLAUSE_USE_DEVICE_PTR:
9196 case OMP_CLAUSE_IS_DEVICE_PTR:
9197 case OMP_CLAUSE_ASYNC:
9198 case OMP_CLAUSE_WAIT:
9199 case OMP_CLAUSE_INDEPENDENT:
9200 case OMP_CLAUSE_NUM_GANGS:
9201 case OMP_CLAUSE_NUM_WORKERS:
9202 case OMP_CLAUSE_VECTOR_LENGTH:
9203 case OMP_CLAUSE_GANG:
9204 case OMP_CLAUSE_WORKER:
9205 case OMP_CLAUSE_VECTOR:
9206 case OMP_CLAUSE_AUTO:
9207 case OMP_CLAUSE_SEQ:
9208 case OMP_CLAUSE_TILE:
9209 break;
9211 default:
9212 gcc_unreachable ();
9215 if (remove)
9216 *list_p = OMP_CLAUSE_CHAIN (c);
9217 else
9218 list_p = &OMP_CLAUSE_CHAIN (c);
9221 /* Add in any implicit data sharing. */
9222 struct gimplify_adjust_omp_clauses_data data;
9223 data.list_p = list_p;
9224 data.pre_p = pre_p;
9225 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9227 gimplify_omp_ctxp = ctx->outer_context;
9228 delete_omp_context (ctx);
9231 /* Gimplify OACC_CACHE. */
9233 static void
9234 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9236 tree expr = *expr_p;
9238 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9239 OACC_CACHE);
9240 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9241 OACC_CACHE);
9243 /* TODO: Do something sensible with this information. */
9245 *expr_p = NULL_TREE;
9248 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9249 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9250 kind. The entry kind will replace the one in CLAUSE, while the exit
9251 kind will be used in a new omp_clause and returned to the caller. */
9253 static tree
9254 gimplify_oacc_declare_1 (tree clause)
9256 HOST_WIDE_INT kind, new_op;
9257 bool ret = false;
9258 tree c = NULL;
9260 kind = OMP_CLAUSE_MAP_KIND (clause);
9262 switch (kind)
9264 case GOMP_MAP_ALLOC:
9265 case GOMP_MAP_FORCE_ALLOC:
9266 case GOMP_MAP_FORCE_TO:
9267 new_op = GOMP_MAP_DELETE;
9268 ret = true;
9269 break;
9271 case GOMP_MAP_FORCE_FROM:
9272 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9273 new_op = GOMP_MAP_FORCE_FROM;
9274 ret = true;
9275 break;
9277 case GOMP_MAP_FORCE_TOFROM:
9278 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9279 new_op = GOMP_MAP_FORCE_FROM;
9280 ret = true;
9281 break;
9283 case GOMP_MAP_FROM:
9284 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9285 new_op = GOMP_MAP_FROM;
9286 ret = true;
9287 break;
9289 case GOMP_MAP_TOFROM:
9290 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9291 new_op = GOMP_MAP_FROM;
9292 ret = true;
9293 break;
9295 case GOMP_MAP_DEVICE_RESIDENT:
9296 case GOMP_MAP_FORCE_DEVICEPTR:
9297 case GOMP_MAP_FORCE_PRESENT:
9298 case GOMP_MAP_LINK:
9299 case GOMP_MAP_POINTER:
9300 case GOMP_MAP_TO:
9301 break;
9303 default:
9304 gcc_unreachable ();
9305 break;
9308 if (ret)
9310 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9311 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9312 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9315 return c;
9318 /* Gimplify OACC_DECLARE. */
9320 static void
9321 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9323 tree expr = *expr_p;
9324 gomp_target *stmt;
9325 tree clauses, t, decl;
9327 clauses = OACC_DECLARE_CLAUSES (expr);
9329 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9330 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9332 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9334 decl = OMP_CLAUSE_DECL (t);
9336 if (TREE_CODE (decl) == MEM_REF)
9337 decl = TREE_OPERAND (decl, 0);
9339 if (VAR_P (decl) && !is_oacc_declared (decl))
9341 tree attr = get_identifier ("oacc declare target");
9342 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9343 DECL_ATTRIBUTES (decl));
9346 if (VAR_P (decl)
9347 && !is_global_var (decl)
9348 && DECL_CONTEXT (decl) == current_function_decl)
9350 tree c = gimplify_oacc_declare_1 (t);
9351 if (c)
9353 if (oacc_declare_returns == NULL)
9354 oacc_declare_returns = new hash_map<tree, tree>;
9356 oacc_declare_returns->put (decl, c);
9360 if (gimplify_omp_ctxp)
9361 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9364 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9365 clauses);
9367 gimplify_seq_add_stmt (pre_p, stmt);
9369 *expr_p = NULL_TREE;
9372 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9373 gimplification of the body, as well as scanning the body for used
9374 variables. We need to do this scan now, because variable-sized
9375 decls will be decomposed during gimplification. */
9377 static void
9378 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9380 tree expr = *expr_p;
9381 gimple *g;
9382 gimple_seq body = NULL;
9384 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9385 OMP_PARALLEL_COMBINED (expr)
9386 ? ORT_COMBINED_PARALLEL
9387 : ORT_PARALLEL, OMP_PARALLEL);
9389 push_gimplify_context ();
9391 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9392 if (gimple_code (g) == GIMPLE_BIND)
9393 pop_gimplify_context (g);
9394 else
9395 pop_gimplify_context (NULL);
9397 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9398 OMP_PARALLEL);
9400 g = gimple_build_omp_parallel (body,
9401 OMP_PARALLEL_CLAUSES (expr),
9402 NULL_TREE, NULL_TREE);
9403 if (OMP_PARALLEL_COMBINED (expr))
9404 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9405 gimplify_seq_add_stmt (pre_p, g);
9406 *expr_p = NULL_TREE;
9409 /* Gimplify the contents of an OMP_TASK statement. This involves
9410 gimplification of the body, as well as scanning the body for used
9411 variables. We need to do this scan now, because variable-sized
9412 decls will be decomposed during gimplification. */
9414 static void
9415 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9417 tree expr = *expr_p;
9418 gimple *g;
9419 gimple_seq body = NULL;
9421 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9422 omp_find_clause (OMP_TASK_CLAUSES (expr),
9423 OMP_CLAUSE_UNTIED)
9424 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9426 push_gimplify_context ();
9428 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9429 if (gimple_code (g) == GIMPLE_BIND)
9430 pop_gimplify_context (g);
9431 else
9432 pop_gimplify_context (NULL);
9434 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9435 OMP_TASK);
9437 g = gimple_build_omp_task (body,
9438 OMP_TASK_CLAUSES (expr),
9439 NULL_TREE, NULL_TREE,
9440 NULL_TREE, NULL_TREE, NULL_TREE);
9441 gimplify_seq_add_stmt (pre_p, g);
9442 *expr_p = NULL_TREE;
9445 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9446 with non-NULL OMP_FOR_INIT. */
9448 static tree
9449 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9451 *walk_subtrees = 0;
9452 switch (TREE_CODE (*tp))
9454 case OMP_FOR:
9455 *walk_subtrees = 1;
9456 /* FALLTHRU */
9457 case OMP_SIMD:
9458 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9459 return *tp;
9460 break;
9461 case BIND_EXPR:
9462 case STATEMENT_LIST:
9463 case OMP_PARALLEL:
9464 *walk_subtrees = 1;
9465 break;
9466 default:
9467 break;
9469 return NULL_TREE;
9472 /* Gimplify the gross structure of an OMP_FOR statement. */
9474 static enum gimplify_status
9475 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9477 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9478 enum gimplify_status ret = GS_ALL_DONE;
9479 enum gimplify_status tret;
9480 gomp_for *gfor;
9481 gimple_seq for_body, for_pre_body;
9482 int i;
9483 bitmap has_decl_expr = NULL;
9484 enum omp_region_type ort = ORT_WORKSHARE;
9486 orig_for_stmt = for_stmt = *expr_p;
9488 switch (TREE_CODE (for_stmt))
9490 case OMP_FOR:
9491 case OMP_DISTRIBUTE:
9492 break;
9493 case OACC_LOOP:
9494 ort = ORT_ACC;
9495 break;
9496 case OMP_TASKLOOP:
9497 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9498 ort = ORT_UNTIED_TASK;
9499 else
9500 ort = ORT_TASK;
9501 break;
9502 case OMP_SIMD:
9503 ort = ORT_SIMD;
9504 break;
9505 default:
9506 gcc_unreachable ();
9509 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9510 clause for the IV. */
9511 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9513 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9514 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9515 decl = TREE_OPERAND (t, 0);
9516 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9518 && OMP_CLAUSE_DECL (c) == decl)
9520 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9521 break;
9525 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9527 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9528 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9529 find_combined_omp_for, NULL, NULL);
9530 if (inner_for_stmt == NULL_TREE)
9532 gcc_assert (seen_error ());
9533 *expr_p = NULL_TREE;
9534 return GS_ERROR;
9538 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9539 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9540 TREE_CODE (for_stmt));
9542 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9543 gimplify_omp_ctxp->distribute = true;
9545 /* Handle OMP_FOR_INIT. */
9546 for_pre_body = NULL;
9547 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9549 has_decl_expr = BITMAP_ALLOC (NULL);
9550 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9551 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9552 == VAR_DECL)
9554 t = OMP_FOR_PRE_BODY (for_stmt);
9555 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9557 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9559 tree_stmt_iterator si;
9560 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9561 tsi_next (&si))
9563 t = tsi_stmt (si);
9564 if (TREE_CODE (t) == DECL_EXPR
9565 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9566 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9570 if (OMP_FOR_PRE_BODY (for_stmt))
9572 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9573 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9574 else
9576 struct gimplify_omp_ctx ctx;
9577 memset (&ctx, 0, sizeof (ctx));
9578 ctx.region_type = ORT_NONE;
9579 gimplify_omp_ctxp = &ctx;
9580 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9581 gimplify_omp_ctxp = NULL;
9584 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9586 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9587 for_stmt = inner_for_stmt;
9589 /* For taskloop, need to gimplify the start, end and step before the
9590 taskloop, outside of the taskloop omp context. */
9591 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9593 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9595 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9596 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9598 TREE_OPERAND (t, 1)
9599 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9600 pre_p, NULL, false);
9601 tree c = build_omp_clause (input_location,
9602 OMP_CLAUSE_FIRSTPRIVATE);
9603 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9604 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9605 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9608 /* Handle OMP_FOR_COND. */
9609 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9610 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9612 TREE_OPERAND (t, 1)
9613 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9614 gimple_seq_empty_p (for_pre_body)
9615 ? pre_p : &for_pre_body, NULL,
9616 false);
9617 tree c = build_omp_clause (input_location,
9618 OMP_CLAUSE_FIRSTPRIVATE);
9619 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9620 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9621 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9624 /* Handle OMP_FOR_INCR. */
9625 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9626 if (TREE_CODE (t) == MODIFY_EXPR)
9628 decl = TREE_OPERAND (t, 0);
9629 t = TREE_OPERAND (t, 1);
9630 tree *tp = &TREE_OPERAND (t, 1);
9631 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9632 tp = &TREE_OPERAND (t, 0);
9634 if (!is_gimple_constant (*tp))
9636 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9637 ? pre_p : &for_pre_body;
9638 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9639 tree c = build_omp_clause (input_location,
9640 OMP_CLAUSE_FIRSTPRIVATE);
9641 OMP_CLAUSE_DECL (c) = *tp;
9642 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9643 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9648 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9649 OMP_TASKLOOP);
9652 if (orig_for_stmt != for_stmt)
9653 gimplify_omp_ctxp->combined_loop = true;
9655 for_body = NULL;
9656 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9657 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9658 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9659 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9661 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9662 bool is_doacross = false;
9663 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9665 is_doacross = true;
9666 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9667 (OMP_FOR_INIT (for_stmt))
9668 * 2);
9670 int collapse = 1, tile = 0;
9671 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9672 if (c)
9673 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9674 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9675 if (c)
9676 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9677 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9679 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9680 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9681 decl = TREE_OPERAND (t, 0);
9682 gcc_assert (DECL_P (decl));
9683 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9684 || POINTER_TYPE_P (TREE_TYPE (decl)));
9685 if (is_doacross)
9687 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9688 gimplify_omp_ctxp->loop_iter_var.quick_push
9689 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9690 else
9691 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9692 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9695 /* Make sure the iteration variable is private. */
9696 tree c = NULL_TREE;
9697 tree c2 = NULL_TREE;
9698 if (orig_for_stmt != for_stmt)
9699 /* Do this only on innermost construct for combined ones. */;
9700 else if (ort == ORT_SIMD)
9702 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9703 (splay_tree_key) decl);
9704 omp_is_private (gimplify_omp_ctxp, decl,
9705 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9706 != 1));
9707 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9708 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9709 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9711 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9712 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9713 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9714 if (has_decl_expr
9715 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9717 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9718 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9720 struct gimplify_omp_ctx *outer
9721 = gimplify_omp_ctxp->outer_context;
9722 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9724 if (outer->region_type == ORT_WORKSHARE
9725 && outer->combined_loop)
9727 n = splay_tree_lookup (outer->variables,
9728 (splay_tree_key)decl);
9729 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9731 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9732 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9734 else
9736 struct gimplify_omp_ctx *octx = outer->outer_context;
9737 if (octx
9738 && octx->region_type == ORT_COMBINED_PARALLEL
9739 && octx->outer_context
9740 && (octx->outer_context->region_type
9741 == ORT_WORKSHARE)
9742 && octx->outer_context->combined_loop)
9744 octx = octx->outer_context;
9745 n = splay_tree_lookup (octx->variables,
9746 (splay_tree_key)decl);
9747 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9749 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9750 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9757 OMP_CLAUSE_DECL (c) = decl;
9758 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9759 OMP_FOR_CLAUSES (for_stmt) = c;
9760 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9761 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9763 if (outer->region_type == ORT_WORKSHARE
9764 && outer->combined_loop)
9766 if (outer->outer_context
9767 && (outer->outer_context->region_type
9768 == ORT_COMBINED_PARALLEL))
9769 outer = outer->outer_context;
9770 else if (omp_check_private (outer, decl, false))
9771 outer = NULL;
9773 else if (((outer->region_type & ORT_TASK) != 0)
9774 && outer->combined_loop
9775 && !omp_check_private (gimplify_omp_ctxp,
9776 decl, false))
9778 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9780 omp_notice_variable (outer, decl, true);
9781 outer = NULL;
9783 if (outer)
9785 n = splay_tree_lookup (outer->variables,
9786 (splay_tree_key)decl);
9787 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9789 omp_add_variable (outer, decl,
9790 GOVD_LASTPRIVATE | GOVD_SEEN);
9791 if (outer->region_type == ORT_COMBINED_PARALLEL
9792 && outer->outer_context
9793 && (outer->outer_context->region_type
9794 == ORT_WORKSHARE)
9795 && outer->outer_context->combined_loop)
9797 outer = outer->outer_context;
9798 n = splay_tree_lookup (outer->variables,
9799 (splay_tree_key)decl);
9800 if (omp_check_private (outer, decl, false))
9801 outer = NULL;
9802 else if (n == NULL
9803 || ((n->value & GOVD_DATA_SHARE_CLASS)
9804 == 0))
9805 omp_add_variable (outer, decl,
9806 GOVD_LASTPRIVATE
9807 | GOVD_SEEN);
9808 else
9809 outer = NULL;
9811 if (outer && outer->outer_context
9812 && (outer->outer_context->region_type
9813 == ORT_COMBINED_TEAMS))
9815 outer = outer->outer_context;
9816 n = splay_tree_lookup (outer->variables,
9817 (splay_tree_key)decl);
9818 if (n == NULL
9819 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9820 omp_add_variable (outer, decl,
9821 GOVD_SHARED | GOVD_SEEN);
9822 else
9823 outer = NULL;
9825 if (outer && outer->outer_context)
9826 omp_notice_variable (outer->outer_context, decl,
9827 true);
9832 else
9834 bool lastprivate
9835 = (!has_decl_expr
9836 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9837 struct gimplify_omp_ctx *outer
9838 = gimplify_omp_ctxp->outer_context;
9839 if (outer && lastprivate)
9841 if (outer->region_type == ORT_WORKSHARE
9842 && outer->combined_loop)
9844 n = splay_tree_lookup (outer->variables,
9845 (splay_tree_key)decl);
9846 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9848 lastprivate = false;
9849 outer = NULL;
9851 else if (outer->outer_context
9852 && (outer->outer_context->region_type
9853 == ORT_COMBINED_PARALLEL))
9854 outer = outer->outer_context;
9855 else if (omp_check_private (outer, decl, false))
9856 outer = NULL;
9858 else if (((outer->region_type & ORT_TASK) != 0)
9859 && outer->combined_loop
9860 && !omp_check_private (gimplify_omp_ctxp,
9861 decl, false))
9863 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9865 omp_notice_variable (outer, decl, true);
9866 outer = NULL;
9868 if (outer)
9870 n = splay_tree_lookup (outer->variables,
9871 (splay_tree_key)decl);
9872 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9874 omp_add_variable (outer, decl,
9875 GOVD_LASTPRIVATE | GOVD_SEEN);
9876 if (outer->region_type == ORT_COMBINED_PARALLEL
9877 && outer->outer_context
9878 && (outer->outer_context->region_type
9879 == ORT_WORKSHARE)
9880 && outer->outer_context->combined_loop)
9882 outer = outer->outer_context;
9883 n = splay_tree_lookup (outer->variables,
9884 (splay_tree_key)decl);
9885 if (omp_check_private (outer, decl, false))
9886 outer = NULL;
9887 else if (n == NULL
9888 || ((n->value & GOVD_DATA_SHARE_CLASS)
9889 == 0))
9890 omp_add_variable (outer, decl,
9891 GOVD_LASTPRIVATE
9892 | GOVD_SEEN);
9893 else
9894 outer = NULL;
9896 if (outer && outer->outer_context
9897 && (outer->outer_context->region_type
9898 == ORT_COMBINED_TEAMS))
9900 outer = outer->outer_context;
9901 n = splay_tree_lookup (outer->variables,
9902 (splay_tree_key)decl);
9903 if (n == NULL
9904 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9905 omp_add_variable (outer, decl,
9906 GOVD_SHARED | GOVD_SEEN);
9907 else
9908 outer = NULL;
9910 if (outer && outer->outer_context)
9911 omp_notice_variable (outer->outer_context, decl,
9912 true);
9917 c = build_omp_clause (input_location,
9918 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9919 : OMP_CLAUSE_PRIVATE);
9920 OMP_CLAUSE_DECL (c) = decl;
9921 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9922 OMP_FOR_CLAUSES (for_stmt) = c;
9923 omp_add_variable (gimplify_omp_ctxp, decl,
9924 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9925 | GOVD_EXPLICIT | GOVD_SEEN);
9926 c = NULL_TREE;
9929 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9930 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9931 else
9932 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9934 /* If DECL is not a gimple register, create a temporary variable to act
9935 as an iteration counter. This is valid, since DECL cannot be
9936 modified in the body of the loop. Similarly for any iteration vars
9937 in simd with collapse > 1 where the iterator vars must be
9938 lastprivate. */
9939 if (orig_for_stmt != for_stmt)
9940 var = decl;
9941 else if (!is_gimple_reg (decl)
9942 || (ort == ORT_SIMD
9943 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9945 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9946 /* Make sure omp_add_variable is not called on it prematurely.
9947 We call it ourselves a few lines later. */
9948 gimplify_omp_ctxp = NULL;
9949 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9950 gimplify_omp_ctxp = ctx;
9951 TREE_OPERAND (t, 0) = var;
9953 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9955 if (ort == ORT_SIMD
9956 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9958 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9959 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9960 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9961 OMP_CLAUSE_DECL (c2) = var;
9962 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9963 OMP_FOR_CLAUSES (for_stmt) = c2;
9964 omp_add_variable (gimplify_omp_ctxp, var,
9965 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9966 if (c == NULL_TREE)
9968 c = c2;
9969 c2 = NULL_TREE;
9972 else
9973 omp_add_variable (gimplify_omp_ctxp, var,
9974 GOVD_PRIVATE | GOVD_SEEN);
9976 else
9977 var = decl;
9979 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9980 is_gimple_val, fb_rvalue, false);
9981 ret = MIN (ret, tret);
9982 if (ret == GS_ERROR)
9983 return ret;
9985 /* Handle OMP_FOR_COND. */
9986 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9987 gcc_assert (COMPARISON_CLASS_P (t));
9988 gcc_assert (TREE_OPERAND (t, 0) == decl);
9990 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9991 is_gimple_val, fb_rvalue, false);
9992 ret = MIN (ret, tret);
9994 /* Handle OMP_FOR_INCR. */
9995 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9996 switch (TREE_CODE (t))
9998 case PREINCREMENT_EXPR:
9999 case POSTINCREMENT_EXPR:
10001 tree decl = TREE_OPERAND (t, 0);
10002 /* c_omp_for_incr_canonicalize_ptr() should have been
10003 called to massage things appropriately. */
10004 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10006 if (orig_for_stmt != for_stmt)
10007 break;
10008 t = build_int_cst (TREE_TYPE (decl), 1);
10009 if (c)
10010 OMP_CLAUSE_LINEAR_STEP (c) = t;
10011 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10012 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10013 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10014 break;
10017 case PREDECREMENT_EXPR:
10018 case POSTDECREMENT_EXPR:
10019 /* c_omp_for_incr_canonicalize_ptr() should have been
10020 called to massage things appropriately. */
10021 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10022 if (orig_for_stmt != for_stmt)
10023 break;
10024 t = build_int_cst (TREE_TYPE (decl), -1);
10025 if (c)
10026 OMP_CLAUSE_LINEAR_STEP (c) = t;
10027 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10028 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10029 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10030 break;
10032 case MODIFY_EXPR:
10033 gcc_assert (TREE_OPERAND (t, 0) == decl);
10034 TREE_OPERAND (t, 0) = var;
10036 t = TREE_OPERAND (t, 1);
10037 switch (TREE_CODE (t))
10039 case PLUS_EXPR:
10040 if (TREE_OPERAND (t, 1) == decl)
10042 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10043 TREE_OPERAND (t, 0) = var;
10044 break;
10047 /* Fallthru. */
10048 case MINUS_EXPR:
10049 case POINTER_PLUS_EXPR:
10050 gcc_assert (TREE_OPERAND (t, 0) == decl);
10051 TREE_OPERAND (t, 0) = var;
10052 break;
10053 default:
10054 gcc_unreachable ();
10057 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10058 is_gimple_val, fb_rvalue, false);
10059 ret = MIN (ret, tret);
10060 if (c)
10062 tree step = TREE_OPERAND (t, 1);
10063 tree stept = TREE_TYPE (decl);
10064 if (POINTER_TYPE_P (stept))
10065 stept = sizetype;
10066 step = fold_convert (stept, step);
10067 if (TREE_CODE (t) == MINUS_EXPR)
10068 step = fold_build1 (NEGATE_EXPR, stept, step);
10069 OMP_CLAUSE_LINEAR_STEP (c) = step;
10070 if (step != TREE_OPERAND (t, 1))
10072 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10073 &for_pre_body, NULL,
10074 is_gimple_val, fb_rvalue, false);
10075 ret = MIN (ret, tret);
10078 break;
10080 default:
10081 gcc_unreachable ();
10084 if (c2)
10086 gcc_assert (c);
10087 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10090 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10092 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10093 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10094 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10095 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10096 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10097 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10098 && OMP_CLAUSE_DECL (c) == decl)
10100 if (is_doacross && (collapse == 1 || i >= collapse))
10101 t = var;
10102 else
10104 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10105 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10106 gcc_assert (TREE_OPERAND (t, 0) == var);
10107 t = TREE_OPERAND (t, 1);
10108 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10109 || TREE_CODE (t) == MINUS_EXPR
10110 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10111 gcc_assert (TREE_OPERAND (t, 0) == var);
10112 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10113 is_doacross ? var : decl,
10114 TREE_OPERAND (t, 1));
10116 gimple_seq *seq;
10117 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10118 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10119 else
10120 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10121 gimplify_assign (decl, t, seq);
10126 BITMAP_FREE (has_decl_expr);
10128 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10130 push_gimplify_context ();
10131 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10133 OMP_FOR_BODY (orig_for_stmt)
10134 = build3 (BIND_EXPR, void_type_node, NULL,
10135 OMP_FOR_BODY (orig_for_stmt), NULL);
10136 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10140 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10141 &for_body);
10143 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10145 if (gimple_code (g) == GIMPLE_BIND)
10146 pop_gimplify_context (g);
10147 else
10148 pop_gimplify_context (NULL);
10151 if (orig_for_stmt != for_stmt)
10152 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10154 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10155 decl = TREE_OPERAND (t, 0);
10156 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10157 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10158 gimplify_omp_ctxp = ctx->outer_context;
10159 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10160 gimplify_omp_ctxp = ctx;
10161 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10162 TREE_OPERAND (t, 0) = var;
10163 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10164 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10165 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10168 gimplify_adjust_omp_clauses (pre_p, for_body,
10169 &OMP_FOR_CLAUSES (orig_for_stmt),
10170 TREE_CODE (orig_for_stmt));
10172 int kind;
10173 switch (TREE_CODE (orig_for_stmt))
10175 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10176 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10177 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10178 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10179 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10180 default:
10181 gcc_unreachable ();
10183 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10184 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10185 for_pre_body);
10186 if (orig_for_stmt != for_stmt)
10187 gimple_omp_for_set_combined_p (gfor, true);
10188 if (gimplify_omp_ctxp
10189 && (gimplify_omp_ctxp->combined_loop
10190 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10191 && gimplify_omp_ctxp->outer_context
10192 && gimplify_omp_ctxp->outer_context->combined_loop)))
10194 gimple_omp_for_set_combined_into_p (gfor, true);
10195 if (gimplify_omp_ctxp->combined_loop)
10196 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10197 else
10198 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10201 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10203 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10204 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10205 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10206 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10207 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10208 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10209 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10210 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10213 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10214 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10215 The outer taskloop stands for computing the number of iterations,
10216 counts for collapsed loops and holding taskloop specific clauses.
10217 The task construct stands for the effect of data sharing on the
10218 explicit task it creates and the inner taskloop stands for expansion
10219 of the static loop inside of the explicit task construct. */
10220 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10222 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10223 tree task_clauses = NULL_TREE;
10224 tree c = *gfor_clauses_ptr;
10225 tree *gtask_clauses_ptr = &task_clauses;
10226 tree outer_for_clauses = NULL_TREE;
10227 tree *gforo_clauses_ptr = &outer_for_clauses;
10228 for (; c; c = OMP_CLAUSE_CHAIN (c))
10229 switch (OMP_CLAUSE_CODE (c))
10231 /* These clauses are allowed on task, move them there. */
10232 case OMP_CLAUSE_SHARED:
10233 case OMP_CLAUSE_FIRSTPRIVATE:
10234 case OMP_CLAUSE_DEFAULT:
10235 case OMP_CLAUSE_IF:
10236 case OMP_CLAUSE_UNTIED:
10237 case OMP_CLAUSE_FINAL:
10238 case OMP_CLAUSE_MERGEABLE:
10239 case OMP_CLAUSE_PRIORITY:
10240 *gtask_clauses_ptr = c;
10241 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10242 break;
10243 case OMP_CLAUSE_PRIVATE:
10244 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10246 /* We want private on outer for and firstprivate
10247 on task. */
10248 *gtask_clauses_ptr
10249 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10250 OMP_CLAUSE_FIRSTPRIVATE);
10251 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10252 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10253 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10254 *gforo_clauses_ptr = c;
10255 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10257 else
10259 *gtask_clauses_ptr = c;
10260 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10262 break;
10263 /* These clauses go into outer taskloop clauses. */
10264 case OMP_CLAUSE_GRAINSIZE:
10265 case OMP_CLAUSE_NUM_TASKS:
10266 case OMP_CLAUSE_NOGROUP:
10267 *gforo_clauses_ptr = c;
10268 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10269 break;
10270 /* Taskloop clause we duplicate on both taskloops. */
10271 case OMP_CLAUSE_COLLAPSE:
10272 *gfor_clauses_ptr = c;
10273 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10274 *gforo_clauses_ptr = copy_node (c);
10275 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10276 break;
10277 /* For lastprivate, keep the clause on inner taskloop, and add
10278 a shared clause on task. If the same decl is also firstprivate,
10279 add also firstprivate clause on the inner taskloop. */
10280 case OMP_CLAUSE_LASTPRIVATE:
10281 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10283 /* For taskloop C++ lastprivate IVs, we want:
10284 1) private on outer taskloop
10285 2) firstprivate and shared on task
10286 3) lastprivate on inner taskloop */
10287 *gtask_clauses_ptr
10288 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10289 OMP_CLAUSE_FIRSTPRIVATE);
10290 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10291 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10292 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10293 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10294 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10295 OMP_CLAUSE_PRIVATE);
10296 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10297 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10298 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10299 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10301 *gfor_clauses_ptr = c;
10302 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10303 *gtask_clauses_ptr
10304 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10305 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10306 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10307 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10308 gtask_clauses_ptr
10309 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10310 break;
10311 default:
10312 gcc_unreachable ();
10314 *gfor_clauses_ptr = NULL_TREE;
10315 *gtask_clauses_ptr = NULL_TREE;
10316 *gforo_clauses_ptr = NULL_TREE;
10317 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10318 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10319 NULL_TREE, NULL_TREE, NULL_TREE);
10320 gimple_omp_task_set_taskloop_p (g, true);
10321 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10322 gomp_for *gforo
10323 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10324 gimple_omp_for_collapse (gfor),
10325 gimple_omp_for_pre_body (gfor));
10326 gimple_omp_for_set_pre_body (gfor, NULL);
10327 gimple_omp_for_set_combined_p (gforo, true);
10328 gimple_omp_for_set_combined_into_p (gfor, true);
10329 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10331 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10332 tree v = create_tmp_var (type);
10333 gimple_omp_for_set_index (gforo, i, v);
10334 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10335 gimple_omp_for_set_initial (gforo, i, t);
10336 gimple_omp_for_set_cond (gforo, i,
10337 gimple_omp_for_cond (gfor, i));
10338 t = unshare_expr (gimple_omp_for_final (gfor, i));
10339 gimple_omp_for_set_final (gforo, i, t);
10340 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10341 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10342 TREE_OPERAND (t, 0) = v;
10343 gimple_omp_for_set_incr (gforo, i, t);
10344 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10345 OMP_CLAUSE_DECL (t) = v;
10346 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10347 gimple_omp_for_set_clauses (gforo, t);
10349 gimplify_seq_add_stmt (pre_p, gforo);
10351 else
10352 gimplify_seq_add_stmt (pre_p, gfor);
10353 if (ret != GS_ALL_DONE)
10354 return GS_ERROR;
10355 *expr_p = NULL_TREE;
10356 return GS_ALL_DONE;
10359 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10360 of OMP_TARGET's body. */
10362 static tree
10363 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10365 *walk_subtrees = 0;
10366 switch (TREE_CODE (*tp))
10368 case OMP_TEAMS:
10369 return *tp;
10370 case BIND_EXPR:
10371 case STATEMENT_LIST:
10372 *walk_subtrees = 1;
10373 break;
10374 default:
10375 break;
10377 return NULL_TREE;
10380 /* Helper function of optimize_target_teams, determine if the expression
10381 can be computed safely before the target construct on the host. */
10383 static tree
10384 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10386 splay_tree_node n;
10388 if (TYPE_P (*tp))
10390 *walk_subtrees = 0;
10391 return NULL_TREE;
10393 switch (TREE_CODE (*tp))
10395 case VAR_DECL:
10396 case PARM_DECL:
10397 case RESULT_DECL:
10398 *walk_subtrees = 0;
10399 if (error_operand_p (*tp)
10400 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10401 || DECL_HAS_VALUE_EXPR_P (*tp)
10402 || DECL_THREAD_LOCAL_P (*tp)
10403 || TREE_SIDE_EFFECTS (*tp)
10404 || TREE_THIS_VOLATILE (*tp))
10405 return *tp;
10406 if (is_global_var (*tp)
10407 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10408 || lookup_attribute ("omp declare target link",
10409 DECL_ATTRIBUTES (*tp))))
10410 return *tp;
10411 if (VAR_P (*tp)
10412 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10413 && !is_global_var (*tp)
10414 && decl_function_context (*tp) == current_function_decl)
10415 return *tp;
10416 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10417 (splay_tree_key) *tp);
10418 if (n == NULL)
10420 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10421 return NULL_TREE;
10422 return *tp;
10424 else if (n->value & GOVD_LOCAL)
10425 return *tp;
10426 else if (n->value & GOVD_FIRSTPRIVATE)
10427 return NULL_TREE;
10428 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10429 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10430 return NULL_TREE;
10431 return *tp;
10432 case INTEGER_CST:
10433 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10434 return *tp;
10435 return NULL_TREE;
10436 case TARGET_EXPR:
10437 if (TARGET_EXPR_INITIAL (*tp)
10438 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10439 return *tp;
10440 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10441 walk_subtrees, NULL);
10442 /* Allow some reasonable subset of integral arithmetics. */
10443 case PLUS_EXPR:
10444 case MINUS_EXPR:
10445 case MULT_EXPR:
10446 case TRUNC_DIV_EXPR:
10447 case CEIL_DIV_EXPR:
10448 case FLOOR_DIV_EXPR:
10449 case ROUND_DIV_EXPR:
10450 case TRUNC_MOD_EXPR:
10451 case CEIL_MOD_EXPR:
10452 case FLOOR_MOD_EXPR:
10453 case ROUND_MOD_EXPR:
10454 case RDIV_EXPR:
10455 case EXACT_DIV_EXPR:
10456 case MIN_EXPR:
10457 case MAX_EXPR:
10458 case LSHIFT_EXPR:
10459 case RSHIFT_EXPR:
10460 case BIT_IOR_EXPR:
10461 case BIT_XOR_EXPR:
10462 case BIT_AND_EXPR:
10463 case NEGATE_EXPR:
10464 case ABS_EXPR:
10465 case BIT_NOT_EXPR:
10466 case NON_LVALUE_EXPR:
10467 CASE_CONVERT:
10468 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10469 return *tp;
10470 return NULL_TREE;
10471 /* And disallow anything else, except for comparisons. */
10472 default:
10473 if (COMPARISON_CLASS_P (*tp))
10474 return NULL_TREE;
10475 return *tp;
10479 /* Try to determine if the num_teams and/or thread_limit expressions
10480 can have their values determined already before entering the
10481 target construct.
10482 INTEGER_CSTs trivially are,
10483 integral decls that are firstprivate (explicitly or implicitly)
10484 or explicitly map(always, to:) or map(always, tofrom:) on the target
10485 region too, and expressions involving simple arithmetics on those
10486 too, function calls are not ok, dereferencing something neither etc.
10487 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10488 EXPR based on what we find:
10489 0 stands for clause not specified at all, use implementation default
10490 -1 stands for value that can't be determined easily before entering
10491 the target construct.
10492 If teams construct is not present at all, use 1 for num_teams
10493 and 0 for thread_limit (only one team is involved, and the thread
10494 limit is implementation defined. */
10496 static void
10497 optimize_target_teams (tree target, gimple_seq *pre_p)
10499 tree body = OMP_BODY (target);
10500 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10501 tree num_teams = integer_zero_node;
10502 tree thread_limit = integer_zero_node;
10503 location_t num_teams_loc = EXPR_LOCATION (target);
10504 location_t thread_limit_loc = EXPR_LOCATION (target);
10505 tree c, *p, expr;
10506 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10508 if (teams == NULL_TREE)
10509 num_teams = integer_one_node;
10510 else
10511 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10513 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10515 p = &num_teams;
10516 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10518 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10520 p = &thread_limit;
10521 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10523 else
10524 continue;
10525 expr = OMP_CLAUSE_OPERAND (c, 0);
10526 if (TREE_CODE (expr) == INTEGER_CST)
10528 *p = expr;
10529 continue;
10531 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10533 *p = integer_minus_one_node;
10534 continue;
10536 *p = expr;
10537 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10538 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10539 == GS_ERROR)
10541 gimplify_omp_ctxp = target_ctx;
10542 *p = integer_minus_one_node;
10543 continue;
10545 gimplify_omp_ctxp = target_ctx;
10546 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10547 OMP_CLAUSE_OPERAND (c, 0) = *p;
10549 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10550 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10551 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10552 OMP_TARGET_CLAUSES (target) = c;
10553 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10554 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10555 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10556 OMP_TARGET_CLAUSES (target) = c;
10559 /* Gimplify the gross structure of several OMP constructs. */
10561 static void
10562 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10564 tree expr = *expr_p;
10565 gimple *stmt;
10566 gimple_seq body = NULL;
10567 enum omp_region_type ort;
10569 switch (TREE_CODE (expr))
10571 case OMP_SECTIONS:
10572 case OMP_SINGLE:
10573 ort = ORT_WORKSHARE;
10574 break;
10575 case OMP_TARGET:
10576 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10577 break;
10578 case OACC_KERNELS:
10579 ort = ORT_ACC_KERNELS;
10580 break;
10581 case OACC_PARALLEL:
10582 ort = ORT_ACC_PARALLEL;
10583 break;
10584 case OACC_DATA:
10585 ort = ORT_ACC_DATA;
10586 break;
10587 case OMP_TARGET_DATA:
10588 ort = ORT_TARGET_DATA;
10589 break;
10590 case OMP_TEAMS:
10591 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10592 break;
10593 case OACC_HOST_DATA:
10594 ort = ORT_ACC_HOST_DATA;
10595 break;
10596 default:
10597 gcc_unreachable ();
10599 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10600 TREE_CODE (expr));
10601 if (TREE_CODE (expr) == OMP_TARGET)
10602 optimize_target_teams (expr, pre_p);
10603 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10605 push_gimplify_context ();
10606 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10607 if (gimple_code (g) == GIMPLE_BIND)
10608 pop_gimplify_context (g);
10609 else
10610 pop_gimplify_context (NULL);
10611 if ((ort & ORT_TARGET_DATA) != 0)
10613 enum built_in_function end_ix;
10614 switch (TREE_CODE (expr))
10616 case OACC_DATA:
10617 case OACC_HOST_DATA:
10618 end_ix = BUILT_IN_GOACC_DATA_END;
10619 break;
10620 case OMP_TARGET_DATA:
10621 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10622 break;
10623 default:
10624 gcc_unreachable ();
10626 tree fn = builtin_decl_explicit (end_ix);
10627 g = gimple_build_call (fn, 0);
10628 gimple_seq cleanup = NULL;
10629 gimple_seq_add_stmt (&cleanup, g);
10630 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10631 body = NULL;
10632 gimple_seq_add_stmt (&body, g);
10635 else
10636 gimplify_and_add (OMP_BODY (expr), &body);
10637 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10638 TREE_CODE (expr));
10640 switch (TREE_CODE (expr))
10642 case OACC_DATA:
10643 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10644 OMP_CLAUSES (expr));
10645 break;
10646 case OACC_KERNELS:
10647 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10648 OMP_CLAUSES (expr));
10649 break;
10650 case OACC_HOST_DATA:
10651 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10652 OMP_CLAUSES (expr));
10653 break;
10654 case OACC_PARALLEL:
10655 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10656 OMP_CLAUSES (expr));
10657 break;
10658 case OMP_SECTIONS:
10659 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10660 break;
10661 case OMP_SINGLE:
10662 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10663 break;
10664 case OMP_TARGET:
10665 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10666 OMP_CLAUSES (expr));
10667 break;
10668 case OMP_TARGET_DATA:
10669 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10670 OMP_CLAUSES (expr));
10671 break;
10672 case OMP_TEAMS:
10673 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10674 break;
10675 default:
10676 gcc_unreachable ();
10679 gimplify_seq_add_stmt (pre_p, stmt);
10680 *expr_p = NULL_TREE;
10683 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10684 target update constructs. */
10686 static void
10687 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10689 tree expr = *expr_p;
10690 int kind;
10691 gomp_target *stmt;
10692 enum omp_region_type ort = ORT_WORKSHARE;
10694 switch (TREE_CODE (expr))
10696 case OACC_ENTER_DATA:
10697 case OACC_EXIT_DATA:
10698 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10699 ort = ORT_ACC;
10700 break;
10701 case OACC_UPDATE:
10702 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10703 ort = ORT_ACC;
10704 break;
10705 case OMP_TARGET_UPDATE:
10706 kind = GF_OMP_TARGET_KIND_UPDATE;
10707 break;
10708 case OMP_TARGET_ENTER_DATA:
10709 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10710 break;
10711 case OMP_TARGET_EXIT_DATA:
10712 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10713 break;
10714 default:
10715 gcc_unreachable ();
10717 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10718 ort, TREE_CODE (expr));
10719 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10720 TREE_CODE (expr));
10721 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10723 gimplify_seq_add_stmt (pre_p, stmt);
10724 *expr_p = NULL_TREE;
10727 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10728 stabilized the lhs of the atomic operation as *ADDR. Return true if
10729 EXPR is this stabilized form. */
10731 static bool
10732 goa_lhs_expr_p (tree expr, tree addr)
10734 /* Also include casts to other type variants. The C front end is fond
10735 of adding these for e.g. volatile variables. This is like
10736 STRIP_TYPE_NOPS but includes the main variant lookup. */
10737 STRIP_USELESS_TYPE_CONVERSION (expr);
10739 if (TREE_CODE (expr) == INDIRECT_REF)
10741 expr = TREE_OPERAND (expr, 0);
10742 while (expr != addr
10743 && (CONVERT_EXPR_P (expr)
10744 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10745 && TREE_CODE (expr) == TREE_CODE (addr)
10746 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10748 expr = TREE_OPERAND (expr, 0);
10749 addr = TREE_OPERAND (addr, 0);
10751 if (expr == addr)
10752 return true;
10753 return (TREE_CODE (addr) == ADDR_EXPR
10754 && TREE_CODE (expr) == ADDR_EXPR
10755 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10757 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10758 return true;
10759 return false;
10762 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10763 expression does not involve the lhs, evaluate it into a temporary.
10764 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10765 or -1 if an error was encountered. */
10767 static int
10768 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10769 tree lhs_var)
10771 tree expr = *expr_p;
10772 int saw_lhs;
10774 if (goa_lhs_expr_p (expr, lhs_addr))
10776 *expr_p = lhs_var;
10777 return 1;
10779 if (is_gimple_val (expr))
10780 return 0;
10782 saw_lhs = 0;
10783 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10785 case tcc_binary:
10786 case tcc_comparison:
10787 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10788 lhs_var);
10789 /* FALLTHRU */
10790 case tcc_unary:
10791 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10792 lhs_var);
10793 break;
10794 case tcc_expression:
10795 switch (TREE_CODE (expr))
10797 case TRUTH_ANDIF_EXPR:
10798 case TRUTH_ORIF_EXPR:
10799 case TRUTH_AND_EXPR:
10800 case TRUTH_OR_EXPR:
10801 case TRUTH_XOR_EXPR:
10802 case BIT_INSERT_EXPR:
10803 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10804 lhs_addr, lhs_var);
10805 /* FALLTHRU */
10806 case TRUTH_NOT_EXPR:
10807 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10808 lhs_addr, lhs_var);
10809 break;
10810 case COMPOUND_EXPR:
10811 /* Break out any preevaluations from cp_build_modify_expr. */
10812 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10813 expr = TREE_OPERAND (expr, 1))
10814 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10815 *expr_p = expr;
10816 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10817 default:
10818 break;
10820 break;
10821 case tcc_reference:
10822 if (TREE_CODE (expr) == BIT_FIELD_REF)
10823 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10824 lhs_addr, lhs_var);
10825 break;
10826 default:
10827 break;
10830 if (saw_lhs == 0)
10832 enum gimplify_status gs;
10833 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10834 if (gs != GS_ALL_DONE)
10835 saw_lhs = -1;
10838 return saw_lhs;
10841 /* Gimplify an OMP_ATOMIC statement. */
10843 static enum gimplify_status
10844 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10846 tree addr = TREE_OPERAND (*expr_p, 0);
10847 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10848 ? NULL : TREE_OPERAND (*expr_p, 1);
10849 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10850 tree tmp_load;
10851 gomp_atomic_load *loadstmt;
10852 gomp_atomic_store *storestmt;
10854 tmp_load = create_tmp_reg (type);
10855 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10856 return GS_ERROR;
10858 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10859 != GS_ALL_DONE)
10860 return GS_ERROR;
10862 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10863 gimplify_seq_add_stmt (pre_p, loadstmt);
10864 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10865 != GS_ALL_DONE)
10866 return GS_ERROR;
10868 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10869 rhs = tmp_load;
10870 storestmt = gimple_build_omp_atomic_store (rhs);
10871 gimplify_seq_add_stmt (pre_p, storestmt);
10872 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10874 gimple_omp_atomic_set_seq_cst (loadstmt);
10875 gimple_omp_atomic_set_seq_cst (storestmt);
10877 switch (TREE_CODE (*expr_p))
10879 case OMP_ATOMIC_READ:
10880 case OMP_ATOMIC_CAPTURE_OLD:
10881 *expr_p = tmp_load;
10882 gimple_omp_atomic_set_need_value (loadstmt);
10883 break;
10884 case OMP_ATOMIC_CAPTURE_NEW:
10885 *expr_p = rhs;
10886 gimple_omp_atomic_set_need_value (storestmt);
10887 break;
10888 default:
10889 *expr_p = NULL;
10890 break;
10893 return GS_ALL_DONE;
10896 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10897 body, and adding some EH bits. */
10899 static enum gimplify_status
10900 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10902 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10903 gimple *body_stmt;
10904 gtransaction *trans_stmt;
10905 gimple_seq body = NULL;
10906 int subcode = 0;
10908 /* Wrap the transaction body in a BIND_EXPR so we have a context
10909 where to put decls for OMP. */
10910 if (TREE_CODE (tbody) != BIND_EXPR)
10912 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10913 TREE_SIDE_EFFECTS (bind) = 1;
10914 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10915 TRANSACTION_EXPR_BODY (expr) = bind;
10918 push_gimplify_context ();
10919 temp = voidify_wrapper_expr (*expr_p, NULL);
10921 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10922 pop_gimplify_context (body_stmt);
10924 trans_stmt = gimple_build_transaction (body);
10925 if (TRANSACTION_EXPR_OUTER (expr))
10926 subcode = GTMA_IS_OUTER;
10927 else if (TRANSACTION_EXPR_RELAXED (expr))
10928 subcode = GTMA_IS_RELAXED;
10929 gimple_transaction_set_subcode (trans_stmt, subcode);
10931 gimplify_seq_add_stmt (pre_p, trans_stmt);
10933 if (temp)
10935 *expr_p = temp;
10936 return GS_OK;
10939 *expr_p = NULL_TREE;
10940 return GS_ALL_DONE;
10943 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10944 is the OMP_BODY of the original EXPR (which has already been
10945 gimplified so it's not present in the EXPR).
10947 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10949 static gimple *
10950 gimplify_omp_ordered (tree expr, gimple_seq body)
10952 tree c, decls;
10953 int failures = 0;
10954 unsigned int i;
10955 tree source_c = NULL_TREE;
10956 tree sink_c = NULL_TREE;
10958 if (gimplify_omp_ctxp)
10960 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10961 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10962 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10963 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10964 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10966 error_at (OMP_CLAUSE_LOCATION (c),
10967 "%<ordered%> construct with %<depend%> clause must be "
10968 "closely nested inside a loop with %<ordered%> clause "
10969 "with a parameter");
10970 failures++;
10972 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10973 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10975 bool fail = false;
10976 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10977 decls && TREE_CODE (decls) == TREE_LIST;
10978 decls = TREE_CHAIN (decls), ++i)
10979 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10980 continue;
10981 else if (TREE_VALUE (decls)
10982 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10984 error_at (OMP_CLAUSE_LOCATION (c),
10985 "variable %qE is not an iteration "
10986 "of outermost loop %d, expected %qE",
10987 TREE_VALUE (decls), i + 1,
10988 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10989 fail = true;
10990 failures++;
10992 else
10993 TREE_VALUE (decls)
10994 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10995 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10997 error_at (OMP_CLAUSE_LOCATION (c),
10998 "number of variables in %<depend(sink)%> "
10999 "clause does not match number of "
11000 "iteration variables");
11001 failures++;
11003 sink_c = c;
11005 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11006 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11008 if (source_c)
11010 error_at (OMP_CLAUSE_LOCATION (c),
11011 "more than one %<depend(source)%> clause on an "
11012 "%<ordered%> construct");
11013 failures++;
11015 else
11016 source_c = c;
11019 if (source_c && sink_c)
11021 error_at (OMP_CLAUSE_LOCATION (source_c),
11022 "%<depend(source)%> clause specified together with "
11023 "%<depend(sink:)%> clauses on the same construct");
11024 failures++;
11027 if (failures)
11028 return gimple_build_nop ();
11029 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11032 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11033 expression produces a value to be used as an operand inside a GIMPLE
11034 statement, the value will be stored back in *EXPR_P. This value will
11035 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11036 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11037 emitted in PRE_P and POST_P.
11039 Additionally, this process may overwrite parts of the input
11040 expression during gimplification. Ideally, it should be
11041 possible to do non-destructive gimplification.
11043 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11044 the expression needs to evaluate to a value to be used as
11045 an operand in a GIMPLE statement, this value will be stored in
11046 *EXPR_P on exit. This happens when the caller specifies one
11047 of fb_lvalue or fb_rvalue fallback flags.
11049 PRE_P will contain the sequence of GIMPLE statements corresponding
11050 to the evaluation of EXPR and all the side-effects that must
11051 be executed before the main expression. On exit, the last
11052 statement of PRE_P is the core statement being gimplified. For
11053 instance, when gimplifying 'if (++a)' the last statement in
11054 PRE_P will be 'if (t.1)' where t.1 is the result of
11055 pre-incrementing 'a'.
11057 POST_P will contain the sequence of GIMPLE statements corresponding
11058 to the evaluation of all the side-effects that must be executed
11059 after the main expression. If this is NULL, the post
11060 side-effects are stored at the end of PRE_P.
11062 The reason why the output is split in two is to handle post
11063 side-effects explicitly. In some cases, an expression may have
11064 inner and outer post side-effects which need to be emitted in
11065 an order different from the one given by the recursive
11066 traversal. For instance, for the expression (*p--)++ the post
11067 side-effects of '--' must actually occur *after* the post
11068 side-effects of '++'. However, gimplification will first visit
11069 the inner expression, so if a separate POST sequence was not
11070 used, the resulting sequence would be:
11072 1 t.1 = *p
11073 2 p = p - 1
11074 3 t.2 = t.1 + 1
11075 4 *p = t.2
11077 However, the post-decrement operation in line #2 must not be
11078 evaluated until after the store to *p at line #4, so the
11079 correct sequence should be:
11081 1 t.1 = *p
11082 2 t.2 = t.1 + 1
11083 3 *p = t.2
11084 4 p = p - 1
11086 So, by specifying a separate post queue, it is possible
11087 to emit the post side-effects in the correct order.
11088 If POST_P is NULL, an internal queue will be used. Before
11089 returning to the caller, the sequence POST_P is appended to
11090 the main output sequence PRE_P.
11092 GIMPLE_TEST_F points to a function that takes a tree T and
11093 returns nonzero if T is in the GIMPLE form requested by the
11094 caller. The GIMPLE predicates are in gimple.c.
11096 FALLBACK tells the function what sort of a temporary we want if
11097 gimplification cannot produce an expression that complies with
11098 GIMPLE_TEST_F.
11100 fb_none means that no temporary should be generated
11101 fb_rvalue means that an rvalue is OK to generate
11102 fb_lvalue means that an lvalue is OK to generate
11103 fb_either means that either is OK, but an lvalue is preferable.
11104 fb_mayfail means that gimplification may fail (in which case
11105 GS_ERROR will be returned)
11107 The return value is either GS_ERROR or GS_ALL_DONE, since this
11108 function iterates until EXPR is completely gimplified or an error
11109 occurs. */
11111 enum gimplify_status
11112 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11113 bool (*gimple_test_f) (tree), fallback_t fallback)
11115 tree tmp;
11116 gimple_seq internal_pre = NULL;
11117 gimple_seq internal_post = NULL;
11118 tree save_expr;
11119 bool is_statement;
11120 location_t saved_location;
11121 enum gimplify_status ret;
11122 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11123 tree label;
11125 save_expr = *expr_p;
11126 if (save_expr == NULL_TREE)
11127 return GS_ALL_DONE;
11129 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11130 is_statement = gimple_test_f == is_gimple_stmt;
11131 if (is_statement)
11132 gcc_assert (pre_p);
11134 /* Consistency checks. */
11135 if (gimple_test_f == is_gimple_reg)
11136 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11137 else if (gimple_test_f == is_gimple_val
11138 || gimple_test_f == is_gimple_call_addr
11139 || gimple_test_f == is_gimple_condexpr
11140 || gimple_test_f == is_gimple_mem_rhs
11141 || gimple_test_f == is_gimple_mem_rhs_or_call
11142 || gimple_test_f == is_gimple_reg_rhs
11143 || gimple_test_f == is_gimple_reg_rhs_or_call
11144 || gimple_test_f == is_gimple_asm_val
11145 || gimple_test_f == is_gimple_mem_ref_addr)
11146 gcc_assert (fallback & fb_rvalue);
11147 else if (gimple_test_f == is_gimple_min_lval
11148 || gimple_test_f == is_gimple_lvalue)
11149 gcc_assert (fallback & fb_lvalue);
11150 else if (gimple_test_f == is_gimple_addressable)
11151 gcc_assert (fallback & fb_either);
11152 else if (gimple_test_f == is_gimple_stmt)
11153 gcc_assert (fallback == fb_none);
11154 else
11156 /* We should have recognized the GIMPLE_TEST_F predicate to
11157 know what kind of fallback to use in case a temporary is
11158 needed to hold the value or address of *EXPR_P. */
11159 gcc_unreachable ();
11162 /* We used to check the predicate here and return immediately if it
11163 succeeds. This is wrong; the design is for gimplification to be
11164 idempotent, and for the predicates to only test for valid forms, not
11165 whether they are fully simplified. */
11166 if (pre_p == NULL)
11167 pre_p = &internal_pre;
11169 if (post_p == NULL)
11170 post_p = &internal_post;
11172 /* Remember the last statements added to PRE_P and POST_P. Every
11173 new statement added by the gimplification helpers needs to be
11174 annotated with location information. To centralize the
11175 responsibility, we remember the last statement that had been
11176 added to both queues before gimplifying *EXPR_P. If
11177 gimplification produces new statements in PRE_P and POST_P, those
11178 statements will be annotated with the same location information
11179 as *EXPR_P. */
11180 pre_last_gsi = gsi_last (*pre_p);
11181 post_last_gsi = gsi_last (*post_p);
11183 saved_location = input_location;
11184 if (save_expr != error_mark_node
11185 && EXPR_HAS_LOCATION (*expr_p))
11186 input_location = EXPR_LOCATION (*expr_p);
11188 /* Loop over the specific gimplifiers until the toplevel node
11189 remains the same. */
11192 /* Strip away as many useless type conversions as possible
11193 at the toplevel. */
11194 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11196 /* Remember the expr. */
11197 save_expr = *expr_p;
11199 /* Die, die, die, my darling. */
11200 if (save_expr == error_mark_node
11201 || (TREE_TYPE (save_expr)
11202 && TREE_TYPE (save_expr) == error_mark_node))
11204 ret = GS_ERROR;
11205 break;
11208 /* Do any language-specific gimplification. */
11209 ret = ((enum gimplify_status)
11210 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11211 if (ret == GS_OK)
11213 if (*expr_p == NULL_TREE)
11214 break;
11215 if (*expr_p != save_expr)
11216 continue;
11218 else if (ret != GS_UNHANDLED)
11219 break;
11221 /* Make sure that all the cases set 'ret' appropriately. */
11222 ret = GS_UNHANDLED;
11223 switch (TREE_CODE (*expr_p))
11225 /* First deal with the special cases. */
11227 case POSTINCREMENT_EXPR:
11228 case POSTDECREMENT_EXPR:
11229 case PREINCREMENT_EXPR:
11230 case PREDECREMENT_EXPR:
11231 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11232 fallback != fb_none,
11233 TREE_TYPE (*expr_p));
11234 break;
11236 case VIEW_CONVERT_EXPR:
11237 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11238 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11240 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11241 post_p, is_gimple_val, fb_rvalue);
11242 recalculate_side_effects (*expr_p);
11243 break;
11245 /* Fallthru. */
11247 case ARRAY_REF:
11248 case ARRAY_RANGE_REF:
11249 case REALPART_EXPR:
11250 case IMAGPART_EXPR:
11251 case COMPONENT_REF:
11252 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11253 fallback ? fallback : fb_rvalue);
11254 break;
11256 case COND_EXPR:
11257 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11259 /* C99 code may assign to an array in a structure value of a
11260 conditional expression, and this has undefined behavior
11261 only on execution, so create a temporary if an lvalue is
11262 required. */
11263 if (fallback == fb_lvalue)
11265 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11266 mark_addressable (*expr_p);
11267 ret = GS_OK;
11269 break;
11271 case CALL_EXPR:
11272 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11274 /* C99 code may assign to an array in a structure returned
11275 from a function, and this has undefined behavior only on
11276 execution, so create a temporary if an lvalue is
11277 required. */
11278 if (fallback == fb_lvalue)
11280 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11281 mark_addressable (*expr_p);
11282 ret = GS_OK;
11284 break;
11286 case TREE_LIST:
11287 gcc_unreachable ();
11289 case COMPOUND_EXPR:
11290 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11291 break;
11293 case COMPOUND_LITERAL_EXPR:
11294 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11295 gimple_test_f, fallback);
11296 break;
11298 case MODIFY_EXPR:
11299 case INIT_EXPR:
11300 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11301 fallback != fb_none);
11302 break;
11304 case TRUTH_ANDIF_EXPR:
11305 case TRUTH_ORIF_EXPR:
11307 /* Preserve the original type of the expression and the
11308 source location of the outer expression. */
11309 tree org_type = TREE_TYPE (*expr_p);
11310 *expr_p = gimple_boolify (*expr_p);
11311 *expr_p = build3_loc (input_location, COND_EXPR,
11312 org_type, *expr_p,
11313 fold_convert_loc
11314 (input_location,
11315 org_type, boolean_true_node),
11316 fold_convert_loc
11317 (input_location,
11318 org_type, boolean_false_node));
11319 ret = GS_OK;
11320 break;
11323 case TRUTH_NOT_EXPR:
11325 tree type = TREE_TYPE (*expr_p);
11326 /* The parsers are careful to generate TRUTH_NOT_EXPR
11327 only with operands that are always zero or one.
11328 We do not fold here but handle the only interesting case
11329 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11330 *expr_p = gimple_boolify (*expr_p);
11331 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11332 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11333 TREE_TYPE (*expr_p),
11334 TREE_OPERAND (*expr_p, 0));
11335 else
11336 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11337 TREE_TYPE (*expr_p),
11338 TREE_OPERAND (*expr_p, 0),
11339 build_int_cst (TREE_TYPE (*expr_p), 1));
11340 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11341 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11342 ret = GS_OK;
11343 break;
11346 case ADDR_EXPR:
11347 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11348 break;
11350 case ANNOTATE_EXPR:
11352 tree cond = TREE_OPERAND (*expr_p, 0);
11353 tree kind = TREE_OPERAND (*expr_p, 1);
11354 tree data = TREE_OPERAND (*expr_p, 2);
11355 tree type = TREE_TYPE (cond);
11356 if (!INTEGRAL_TYPE_P (type))
11358 *expr_p = cond;
11359 ret = GS_OK;
11360 break;
11362 tree tmp = create_tmp_var (type);
11363 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11364 gcall *call
11365 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
11366 gimple_call_set_lhs (call, tmp);
11367 gimplify_seq_add_stmt (pre_p, call);
11368 *expr_p = tmp;
11369 ret = GS_ALL_DONE;
11370 break;
11373 case VA_ARG_EXPR:
11374 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11375 break;
11377 CASE_CONVERT:
11378 if (IS_EMPTY_STMT (*expr_p))
11380 ret = GS_ALL_DONE;
11381 break;
11384 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11385 || fallback == fb_none)
11387 /* Just strip a conversion to void (or in void context) and
11388 try again. */
11389 *expr_p = TREE_OPERAND (*expr_p, 0);
11390 ret = GS_OK;
11391 break;
11394 ret = gimplify_conversion (expr_p);
11395 if (ret == GS_ERROR)
11396 break;
11397 if (*expr_p != save_expr)
11398 break;
11399 /* FALLTHRU */
11401 case FIX_TRUNC_EXPR:
11402 /* unary_expr: ... | '(' cast ')' val | ... */
11403 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11404 is_gimple_val, fb_rvalue);
11405 recalculate_side_effects (*expr_p);
11406 break;
11408 case INDIRECT_REF:
11410 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11411 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11412 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11414 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11415 if (*expr_p != save_expr)
11417 ret = GS_OK;
11418 break;
11421 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11422 is_gimple_reg, fb_rvalue);
11423 if (ret == GS_ERROR)
11424 break;
11426 recalculate_side_effects (*expr_p);
11427 *expr_p = fold_build2_loc (input_location, MEM_REF,
11428 TREE_TYPE (*expr_p),
11429 TREE_OPERAND (*expr_p, 0),
11430 build_int_cst (saved_ptr_type, 0));
11431 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11432 TREE_THIS_NOTRAP (*expr_p) = notrap;
11433 ret = GS_OK;
11434 break;
11437 /* We arrive here through the various re-gimplifcation paths. */
11438 case MEM_REF:
11439 /* First try re-folding the whole thing. */
11440 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11441 TREE_OPERAND (*expr_p, 0),
11442 TREE_OPERAND (*expr_p, 1));
11443 if (tmp)
11445 REF_REVERSE_STORAGE_ORDER (tmp)
11446 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11447 *expr_p = tmp;
11448 recalculate_side_effects (*expr_p);
11449 ret = GS_OK;
11450 break;
11452 /* Avoid re-gimplifying the address operand if it is already
11453 in suitable form. Re-gimplifying would mark the address
11454 operand addressable. Always gimplify when not in SSA form
11455 as we still may have to gimplify decls with value-exprs. */
11456 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11457 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11459 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11460 is_gimple_mem_ref_addr, fb_rvalue);
11461 if (ret == GS_ERROR)
11462 break;
11464 recalculate_side_effects (*expr_p);
11465 ret = GS_ALL_DONE;
11466 break;
11468 /* Constants need not be gimplified. */
11469 case INTEGER_CST:
11470 case REAL_CST:
11471 case FIXED_CST:
11472 case STRING_CST:
11473 case COMPLEX_CST:
11474 case VECTOR_CST:
11475 /* Drop the overflow flag on constants, we do not want
11476 that in the GIMPLE IL. */
11477 if (TREE_OVERFLOW_P (*expr_p))
11478 *expr_p = drop_tree_overflow (*expr_p);
11479 ret = GS_ALL_DONE;
11480 break;
11482 case CONST_DECL:
11483 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11484 CONST_DECL node. Otherwise the decl is replaceable by its
11485 value. */
11486 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11487 if (fallback & fb_lvalue)
11488 ret = GS_ALL_DONE;
11489 else
11491 *expr_p = DECL_INITIAL (*expr_p);
11492 ret = GS_OK;
11494 break;
11496 case DECL_EXPR:
11497 ret = gimplify_decl_expr (expr_p, pre_p);
11498 break;
11500 case BIND_EXPR:
11501 ret = gimplify_bind_expr (expr_p, pre_p);
11502 break;
11504 case LOOP_EXPR:
11505 ret = gimplify_loop_expr (expr_p, pre_p);
11506 break;
11508 case SWITCH_EXPR:
11509 ret = gimplify_switch_expr (expr_p, pre_p);
11510 break;
11512 case EXIT_EXPR:
11513 ret = gimplify_exit_expr (expr_p);
11514 break;
11516 case GOTO_EXPR:
11517 /* If the target is not LABEL, then it is a computed jump
11518 and the target needs to be gimplified. */
11519 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11521 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11522 NULL, is_gimple_val, fb_rvalue);
11523 if (ret == GS_ERROR)
11524 break;
11526 gimplify_seq_add_stmt (pre_p,
11527 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11528 ret = GS_ALL_DONE;
11529 break;
11531 case PREDICT_EXPR:
11532 gimplify_seq_add_stmt (pre_p,
11533 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11534 PREDICT_EXPR_OUTCOME (*expr_p)));
11535 ret = GS_ALL_DONE;
11536 break;
11538 case LABEL_EXPR:
11539 ret = gimplify_label_expr (expr_p, pre_p);
11540 label = LABEL_EXPR_LABEL (*expr_p);
11541 gcc_assert (decl_function_context (label) == current_function_decl);
11543 /* If the label is used in a goto statement, or address of the label
11544 is taken, we need to unpoison all variables that were seen so far.
11545 Doing so would prevent us from reporting a false positives. */
11546 if (asan_poisoned_variables
11547 && asan_used_labels != NULL
11548 && asan_used_labels->contains (label))
11549 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11550 break;
11552 case CASE_LABEL_EXPR:
11553 ret = gimplify_case_label_expr (expr_p, pre_p);
11555 if (gimplify_ctxp->live_switch_vars)
11556 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11557 pre_p);
11558 break;
11560 case RETURN_EXPR:
11561 ret = gimplify_return_expr (*expr_p, pre_p);
11562 break;
11564 case CONSTRUCTOR:
11565 /* Don't reduce this in place; let gimplify_init_constructor work its
11566 magic. Buf if we're just elaborating this for side effects, just
11567 gimplify any element that has side-effects. */
11568 if (fallback == fb_none)
11570 unsigned HOST_WIDE_INT ix;
11571 tree val;
11572 tree temp = NULL_TREE;
11573 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11574 if (TREE_SIDE_EFFECTS (val))
11575 append_to_statement_list (val, &temp);
11577 *expr_p = temp;
11578 ret = temp ? GS_OK : GS_ALL_DONE;
11580 /* C99 code may assign to an array in a constructed
11581 structure or union, and this has undefined behavior only
11582 on execution, so create a temporary if an lvalue is
11583 required. */
11584 else if (fallback == fb_lvalue)
11586 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11587 mark_addressable (*expr_p);
11588 ret = GS_OK;
11590 else
11591 ret = GS_ALL_DONE;
11592 break;
11594 /* The following are special cases that are not handled by the
11595 original GIMPLE grammar. */
11597 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11598 eliminated. */
11599 case SAVE_EXPR:
11600 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11601 break;
11603 case BIT_FIELD_REF:
11604 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11605 post_p, is_gimple_lvalue, fb_either);
11606 recalculate_side_effects (*expr_p);
11607 break;
11609 case TARGET_MEM_REF:
11611 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11613 if (TMR_BASE (*expr_p))
11614 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11615 post_p, is_gimple_mem_ref_addr, fb_either);
11616 if (TMR_INDEX (*expr_p))
11617 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11618 post_p, is_gimple_val, fb_rvalue);
11619 if (TMR_INDEX2 (*expr_p))
11620 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11621 post_p, is_gimple_val, fb_rvalue);
11622 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11623 ret = MIN (r0, r1);
11625 break;
11627 case NON_LVALUE_EXPR:
11628 /* This should have been stripped above. */
11629 gcc_unreachable ();
11631 case ASM_EXPR:
11632 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11633 break;
11635 case TRY_FINALLY_EXPR:
11636 case TRY_CATCH_EXPR:
11638 gimple_seq eval, cleanup;
11639 gtry *try_;
11641 /* Calls to destructors are generated automatically in FINALLY/CATCH
11642 block. They should have location as UNKNOWN_LOCATION. However,
11643 gimplify_call_expr will reset these call stmts to input_location
11644 if it finds stmt's location is unknown. To prevent resetting for
11645 destructors, we set the input_location to unknown.
11646 Note that this only affects the destructor calls in FINALLY/CATCH
11647 block, and will automatically reset to its original value by the
11648 end of gimplify_expr. */
11649 input_location = UNKNOWN_LOCATION;
11650 eval = cleanup = NULL;
11651 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11652 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11653 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11654 if (gimple_seq_empty_p (cleanup))
11656 gimple_seq_add_seq (pre_p, eval);
11657 ret = GS_ALL_DONE;
11658 break;
11660 try_ = gimple_build_try (eval, cleanup,
11661 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11662 ? GIMPLE_TRY_FINALLY
11663 : GIMPLE_TRY_CATCH);
11664 if (EXPR_HAS_LOCATION (save_expr))
11665 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11666 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11667 gimple_set_location (try_, saved_location);
11668 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11669 gimple_try_set_catch_is_cleanup (try_,
11670 TRY_CATCH_IS_CLEANUP (*expr_p));
11671 gimplify_seq_add_stmt (pre_p, try_);
11672 ret = GS_ALL_DONE;
11673 break;
11676 case CLEANUP_POINT_EXPR:
11677 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11678 break;
11680 case TARGET_EXPR:
11681 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11682 break;
11684 case CATCH_EXPR:
11686 gimple *c;
11687 gimple_seq handler = NULL;
11688 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11689 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11690 gimplify_seq_add_stmt (pre_p, c);
11691 ret = GS_ALL_DONE;
11692 break;
11695 case EH_FILTER_EXPR:
11697 gimple *ehf;
11698 gimple_seq failure = NULL;
11700 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11701 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11702 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11703 gimplify_seq_add_stmt (pre_p, ehf);
11704 ret = GS_ALL_DONE;
11705 break;
11708 case OBJ_TYPE_REF:
11710 enum gimplify_status r0, r1;
11711 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11712 post_p, is_gimple_val, fb_rvalue);
11713 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11714 post_p, is_gimple_val, fb_rvalue);
11715 TREE_SIDE_EFFECTS (*expr_p) = 0;
11716 ret = MIN (r0, r1);
11718 break;
11720 case LABEL_DECL:
11721 /* We get here when taking the address of a label. We mark
11722 the label as "forced"; meaning it can never be removed and
11723 it is a potential target for any computed goto. */
11724 FORCED_LABEL (*expr_p) = 1;
11725 ret = GS_ALL_DONE;
11726 break;
11728 case STATEMENT_LIST:
11729 ret = gimplify_statement_list (expr_p, pre_p);
11730 break;
11732 case WITH_SIZE_EXPR:
11734 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11735 post_p == &internal_post ? NULL : post_p,
11736 gimple_test_f, fallback);
11737 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11738 is_gimple_val, fb_rvalue);
11739 ret = GS_ALL_DONE;
11741 break;
11743 case VAR_DECL:
11744 case PARM_DECL:
11745 ret = gimplify_var_or_parm_decl (expr_p);
11746 break;
11748 case RESULT_DECL:
11749 /* When within an OMP context, notice uses of variables. */
11750 if (gimplify_omp_ctxp)
11751 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11752 ret = GS_ALL_DONE;
11753 break;
11755 case SSA_NAME:
11756 /* Allow callbacks into the gimplifier during optimization. */
11757 ret = GS_ALL_DONE;
11758 break;
11760 case OMP_PARALLEL:
11761 gimplify_omp_parallel (expr_p, pre_p);
11762 ret = GS_ALL_DONE;
11763 break;
11765 case OMP_TASK:
11766 gimplify_omp_task (expr_p, pre_p);
11767 ret = GS_ALL_DONE;
11768 break;
11770 case OMP_FOR:
11771 case OMP_SIMD:
11772 case OMP_DISTRIBUTE:
11773 case OMP_TASKLOOP:
11774 case OACC_LOOP:
11775 ret = gimplify_omp_for (expr_p, pre_p);
11776 break;
11778 case OACC_CACHE:
11779 gimplify_oacc_cache (expr_p, pre_p);
11780 ret = GS_ALL_DONE;
11781 break;
11783 case OACC_DECLARE:
11784 gimplify_oacc_declare (expr_p, pre_p);
11785 ret = GS_ALL_DONE;
11786 break;
11788 case OACC_HOST_DATA:
11789 case OACC_DATA:
11790 case OACC_KERNELS:
11791 case OACC_PARALLEL:
11792 case OMP_SECTIONS:
11793 case OMP_SINGLE:
11794 case OMP_TARGET:
11795 case OMP_TARGET_DATA:
11796 case OMP_TEAMS:
11797 gimplify_omp_workshare (expr_p, pre_p);
11798 ret = GS_ALL_DONE;
11799 break;
11801 case OACC_ENTER_DATA:
11802 case OACC_EXIT_DATA:
11803 case OACC_UPDATE:
11804 case OMP_TARGET_UPDATE:
11805 case OMP_TARGET_ENTER_DATA:
11806 case OMP_TARGET_EXIT_DATA:
11807 gimplify_omp_target_update (expr_p, pre_p);
11808 ret = GS_ALL_DONE;
11809 break;
11811 case OMP_SECTION:
11812 case OMP_MASTER:
11813 case OMP_TASKGROUP:
11814 case OMP_ORDERED:
11815 case OMP_CRITICAL:
11817 gimple_seq body = NULL;
11818 gimple *g;
11820 gimplify_and_add (OMP_BODY (*expr_p), &body);
11821 switch (TREE_CODE (*expr_p))
11823 case OMP_SECTION:
11824 g = gimple_build_omp_section (body);
11825 break;
11826 case OMP_MASTER:
11827 g = gimple_build_omp_master (body);
11828 break;
11829 case OMP_TASKGROUP:
11831 gimple_seq cleanup = NULL;
11832 tree fn
11833 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11834 g = gimple_build_call (fn, 0);
11835 gimple_seq_add_stmt (&cleanup, g);
11836 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11837 body = NULL;
11838 gimple_seq_add_stmt (&body, g);
11839 g = gimple_build_omp_taskgroup (body);
11841 break;
11842 case OMP_ORDERED:
11843 g = gimplify_omp_ordered (*expr_p, body);
11844 break;
11845 case OMP_CRITICAL:
11846 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11847 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11848 gimplify_adjust_omp_clauses (pre_p, body,
11849 &OMP_CRITICAL_CLAUSES (*expr_p),
11850 OMP_CRITICAL);
11851 g = gimple_build_omp_critical (body,
11852 OMP_CRITICAL_NAME (*expr_p),
11853 OMP_CRITICAL_CLAUSES (*expr_p));
11854 break;
11855 default:
11856 gcc_unreachable ();
11858 gimplify_seq_add_stmt (pre_p, g);
11859 ret = GS_ALL_DONE;
11860 break;
11863 case OMP_ATOMIC:
11864 case OMP_ATOMIC_READ:
11865 case OMP_ATOMIC_CAPTURE_OLD:
11866 case OMP_ATOMIC_CAPTURE_NEW:
11867 ret = gimplify_omp_atomic (expr_p, pre_p);
11868 break;
11870 case TRANSACTION_EXPR:
11871 ret = gimplify_transaction (expr_p, pre_p);
11872 break;
11874 case TRUTH_AND_EXPR:
11875 case TRUTH_OR_EXPR:
11876 case TRUTH_XOR_EXPR:
11878 tree orig_type = TREE_TYPE (*expr_p);
11879 tree new_type, xop0, xop1;
11880 *expr_p = gimple_boolify (*expr_p);
11881 new_type = TREE_TYPE (*expr_p);
11882 if (!useless_type_conversion_p (orig_type, new_type))
11884 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11885 ret = GS_OK;
11886 break;
11889 /* Boolified binary truth expressions are semantically equivalent
11890 to bitwise binary expressions. Canonicalize them to the
11891 bitwise variant. */
11892 switch (TREE_CODE (*expr_p))
11894 case TRUTH_AND_EXPR:
11895 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11896 break;
11897 case TRUTH_OR_EXPR:
11898 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11899 break;
11900 case TRUTH_XOR_EXPR:
11901 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11902 break;
11903 default:
11904 break;
11906 /* Now make sure that operands have compatible type to
11907 expression's new_type. */
11908 xop0 = TREE_OPERAND (*expr_p, 0);
11909 xop1 = TREE_OPERAND (*expr_p, 1);
11910 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11911 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11912 new_type,
11913 xop0);
11914 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11915 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11916 new_type,
11917 xop1);
11918 /* Continue classified as tcc_binary. */
11919 goto expr_2;
11922 case VEC_COND_EXPR:
11924 enum gimplify_status r0, r1, r2;
11926 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11927 post_p, is_gimple_condexpr, fb_rvalue);
11928 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11929 post_p, is_gimple_val, fb_rvalue);
11930 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11931 post_p, is_gimple_val, fb_rvalue);
11933 ret = MIN (MIN (r0, r1), r2);
11934 recalculate_side_effects (*expr_p);
11936 break;
11938 case FMA_EXPR:
11939 case VEC_PERM_EXPR:
11940 /* Classified as tcc_expression. */
11941 goto expr_3;
11943 case BIT_INSERT_EXPR:
11944 /* Argument 3 is a constant. */
11945 goto expr_2;
11947 case POINTER_PLUS_EXPR:
11949 enum gimplify_status r0, r1;
11950 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11951 post_p, is_gimple_val, fb_rvalue);
11952 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11953 post_p, is_gimple_val, fb_rvalue);
11954 recalculate_side_effects (*expr_p);
11955 ret = MIN (r0, r1);
11956 break;
11959 default:
11960 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11962 case tcc_comparison:
11963 /* Handle comparison of objects of non scalar mode aggregates
11964 with a call to memcmp. It would be nice to only have to do
11965 this for variable-sized objects, but then we'd have to allow
11966 the same nest of reference nodes we allow for MODIFY_EXPR and
11967 that's too complex.
11969 Compare scalar mode aggregates as scalar mode values. Using
11970 memcmp for them would be very inefficient at best, and is
11971 plain wrong if bitfields are involved. */
11973 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11975 /* Vector comparisons need no boolification. */
11976 if (TREE_CODE (type) == VECTOR_TYPE)
11977 goto expr_2;
11978 else if (!AGGREGATE_TYPE_P (type))
11980 tree org_type = TREE_TYPE (*expr_p);
11981 *expr_p = gimple_boolify (*expr_p);
11982 if (!useless_type_conversion_p (org_type,
11983 TREE_TYPE (*expr_p)))
11985 *expr_p = fold_convert_loc (input_location,
11986 org_type, *expr_p);
11987 ret = GS_OK;
11989 else
11990 goto expr_2;
11992 else if (TYPE_MODE (type) != BLKmode)
11993 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11994 else
11995 ret = gimplify_variable_sized_compare (expr_p);
11997 break;
12000 /* If *EXPR_P does not need to be special-cased, handle it
12001 according to its class. */
12002 case tcc_unary:
12003 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12004 post_p, is_gimple_val, fb_rvalue);
12005 break;
12007 case tcc_binary:
12008 expr_2:
12010 enum gimplify_status r0, r1;
12012 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12013 post_p, is_gimple_val, fb_rvalue);
12014 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12015 post_p, is_gimple_val, fb_rvalue);
12017 ret = MIN (r0, r1);
12018 break;
12021 expr_3:
12023 enum gimplify_status r0, r1, r2;
12025 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12026 post_p, is_gimple_val, fb_rvalue);
12027 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12028 post_p, is_gimple_val, fb_rvalue);
12029 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12030 post_p, is_gimple_val, fb_rvalue);
12032 ret = MIN (MIN (r0, r1), r2);
12033 break;
12036 case tcc_declaration:
12037 case tcc_constant:
12038 ret = GS_ALL_DONE;
12039 goto dont_recalculate;
12041 default:
12042 gcc_unreachable ();
12045 recalculate_side_effects (*expr_p);
12047 dont_recalculate:
12048 break;
12051 gcc_assert (*expr_p || ret != GS_OK);
12053 while (ret == GS_OK);
12055 /* If we encountered an error_mark somewhere nested inside, either
12056 stub out the statement or propagate the error back out. */
12057 if (ret == GS_ERROR)
12059 if (is_statement)
12060 *expr_p = NULL;
12061 goto out;
12064 /* This was only valid as a return value from the langhook, which
12065 we handled. Make sure it doesn't escape from any other context. */
12066 gcc_assert (ret != GS_UNHANDLED);
12068 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12070 /* We aren't looking for a value, and we don't have a valid
12071 statement. If it doesn't have side-effects, throw it away.
12072 We can also get here with code such as "*&&L;", where L is
12073 a LABEL_DECL that is marked as FORCED_LABEL. */
12074 if (TREE_CODE (*expr_p) == LABEL_DECL
12075 || !TREE_SIDE_EFFECTS (*expr_p))
12076 *expr_p = NULL;
12077 else if (!TREE_THIS_VOLATILE (*expr_p))
12079 /* This is probably a _REF that contains something nested that
12080 has side effects. Recurse through the operands to find it. */
12081 enum tree_code code = TREE_CODE (*expr_p);
12083 switch (code)
12085 case COMPONENT_REF:
12086 case REALPART_EXPR:
12087 case IMAGPART_EXPR:
12088 case VIEW_CONVERT_EXPR:
12089 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12090 gimple_test_f, fallback);
12091 break;
12093 case ARRAY_REF:
12094 case ARRAY_RANGE_REF:
12095 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12096 gimple_test_f, fallback);
12097 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12098 gimple_test_f, fallback);
12099 break;
12101 default:
12102 /* Anything else with side-effects must be converted to
12103 a valid statement before we get here. */
12104 gcc_unreachable ();
12107 *expr_p = NULL;
12109 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12110 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12112 /* Historically, the compiler has treated a bare reference
12113 to a non-BLKmode volatile lvalue as forcing a load. */
12114 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12116 /* Normally, we do not want to create a temporary for a
12117 TREE_ADDRESSABLE type because such a type should not be
12118 copied by bitwise-assignment. However, we make an
12119 exception here, as all we are doing here is ensuring that
12120 we read the bytes that make up the type. We use
12121 create_tmp_var_raw because create_tmp_var will abort when
12122 given a TREE_ADDRESSABLE type. */
12123 tree tmp = create_tmp_var_raw (type, "vol");
12124 gimple_add_tmp_var (tmp);
12125 gimplify_assign (tmp, *expr_p, pre_p);
12126 *expr_p = NULL;
12128 else
12129 /* We can't do anything useful with a volatile reference to
12130 an incomplete type, so just throw it away. Likewise for
12131 a BLKmode type, since any implicit inner load should
12132 already have been turned into an explicit one by the
12133 gimplification process. */
12134 *expr_p = NULL;
12137 /* If we are gimplifying at the statement level, we're done. Tack
12138 everything together and return. */
12139 if (fallback == fb_none || is_statement)
12141 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12142 it out for GC to reclaim it. */
12143 *expr_p = NULL_TREE;
12145 if (!gimple_seq_empty_p (internal_pre)
12146 || !gimple_seq_empty_p (internal_post))
12148 gimplify_seq_add_seq (&internal_pre, internal_post);
12149 gimplify_seq_add_seq (pre_p, internal_pre);
12152 /* The result of gimplifying *EXPR_P is going to be the last few
12153 statements in *PRE_P and *POST_P. Add location information
12154 to all the statements that were added by the gimplification
12155 helpers. */
12156 if (!gimple_seq_empty_p (*pre_p))
12157 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12159 if (!gimple_seq_empty_p (*post_p))
12160 annotate_all_with_location_after (*post_p, post_last_gsi,
12161 input_location);
12163 goto out;
12166 #ifdef ENABLE_GIMPLE_CHECKING
12167 if (*expr_p)
12169 enum tree_code code = TREE_CODE (*expr_p);
12170 /* These expressions should already be in gimple IR form. */
12171 gcc_assert (code != MODIFY_EXPR
12172 && code != ASM_EXPR
12173 && code != BIND_EXPR
12174 && code != CATCH_EXPR
12175 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12176 && code != EH_FILTER_EXPR
12177 && code != GOTO_EXPR
12178 && code != LABEL_EXPR
12179 && code != LOOP_EXPR
12180 && code != SWITCH_EXPR
12181 && code != TRY_FINALLY_EXPR
12182 && code != OACC_PARALLEL
12183 && code != OACC_KERNELS
12184 && code != OACC_DATA
12185 && code != OACC_HOST_DATA
12186 && code != OACC_DECLARE
12187 && code != OACC_UPDATE
12188 && code != OACC_ENTER_DATA
12189 && code != OACC_EXIT_DATA
12190 && code != OACC_CACHE
12191 && code != OMP_CRITICAL
12192 && code != OMP_FOR
12193 && code != OACC_LOOP
12194 && code != OMP_MASTER
12195 && code != OMP_TASKGROUP
12196 && code != OMP_ORDERED
12197 && code != OMP_PARALLEL
12198 && code != OMP_SECTIONS
12199 && code != OMP_SECTION
12200 && code != OMP_SINGLE);
12202 #endif
12204 /* Otherwise we're gimplifying a subexpression, so the resulting
12205 value is interesting. If it's a valid operand that matches
12206 GIMPLE_TEST_F, we're done. Unless we are handling some
12207 post-effects internally; if that's the case, we need to copy into
12208 a temporary before adding the post-effects to POST_P. */
12209 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12210 goto out;
12212 /* Otherwise, we need to create a new temporary for the gimplified
12213 expression. */
12215 /* We can't return an lvalue if we have an internal postqueue. The
12216 object the lvalue refers to would (probably) be modified by the
12217 postqueue; we need to copy the value out first, which means an
12218 rvalue. */
12219 if ((fallback & fb_lvalue)
12220 && gimple_seq_empty_p (internal_post)
12221 && is_gimple_addressable (*expr_p))
12223 /* An lvalue will do. Take the address of the expression, store it
12224 in a temporary, and replace the expression with an INDIRECT_REF of
12225 that temporary. */
12226 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12227 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12228 *expr_p = build_simple_mem_ref (tmp);
12230 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12232 /* An rvalue will do. Assign the gimplified expression into a
12233 new temporary TMP and replace the original expression with
12234 TMP. First, make sure that the expression has a type so that
12235 it can be assigned into a temporary. */
12236 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12237 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12239 else
12241 #ifdef ENABLE_GIMPLE_CHECKING
12242 if (!(fallback & fb_mayfail))
12244 fprintf (stderr, "gimplification failed:\n");
12245 print_generic_expr (stderr, *expr_p);
12246 debug_tree (*expr_p);
12247 internal_error ("gimplification failed");
12249 #endif
12250 gcc_assert (fallback & fb_mayfail);
12252 /* If this is an asm statement, and the user asked for the
12253 impossible, don't die. Fail and let gimplify_asm_expr
12254 issue an error. */
12255 ret = GS_ERROR;
12256 goto out;
12259 /* Make sure the temporary matches our predicate. */
12260 gcc_assert ((*gimple_test_f) (*expr_p));
12262 if (!gimple_seq_empty_p (internal_post))
12264 annotate_all_with_location (internal_post, input_location);
12265 gimplify_seq_add_seq (pre_p, internal_post);
12268 out:
12269 input_location = saved_location;
12270 return ret;
12273 /* Like gimplify_expr but make sure the gimplified result is not itself
12274 a SSA name (but a decl if it were). Temporaries required by
12275 evaluating *EXPR_P may be still SSA names. */
12277 static enum gimplify_status
12278 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12279 bool (*gimple_test_f) (tree), fallback_t fallback,
12280 bool allow_ssa)
12282 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12283 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12284 gimple_test_f, fallback);
12285 if (! allow_ssa
12286 && TREE_CODE (*expr_p) == SSA_NAME)
12288 tree name = *expr_p;
12289 if (was_ssa_name_p)
12290 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12291 else
12293 /* Avoid the extra copy if possible. */
12294 *expr_p = create_tmp_reg (TREE_TYPE (name));
12295 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12296 release_ssa_name (name);
12299 return ret;
12302 /* Look through TYPE for variable-sized objects and gimplify each such
12303 size that we find. Add to LIST_P any statements generated. */
12305 void
12306 gimplify_type_sizes (tree type, gimple_seq *list_p)
12308 tree field, t;
12310 if (type == NULL || type == error_mark_node)
12311 return;
12313 /* We first do the main variant, then copy into any other variants. */
12314 type = TYPE_MAIN_VARIANT (type);
12316 /* Avoid infinite recursion. */
12317 if (TYPE_SIZES_GIMPLIFIED (type))
12318 return;
12320 TYPE_SIZES_GIMPLIFIED (type) = 1;
12322 switch (TREE_CODE (type))
12324 case INTEGER_TYPE:
12325 case ENUMERAL_TYPE:
12326 case BOOLEAN_TYPE:
12327 case REAL_TYPE:
12328 case FIXED_POINT_TYPE:
12329 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12330 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12332 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12334 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12335 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12337 break;
12339 case ARRAY_TYPE:
12340 /* These types may not have declarations, so handle them here. */
12341 gimplify_type_sizes (TREE_TYPE (type), list_p);
12342 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12343 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12344 with assigned stack slots, for -O1+ -g they should be tracked
12345 by VTA. */
12346 if (!(TYPE_NAME (type)
12347 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12348 && DECL_IGNORED_P (TYPE_NAME (type)))
12349 && TYPE_DOMAIN (type)
12350 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12352 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12353 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12354 DECL_IGNORED_P (t) = 0;
12355 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12356 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12357 DECL_IGNORED_P (t) = 0;
12359 break;
12361 case RECORD_TYPE:
12362 case UNION_TYPE:
12363 case QUAL_UNION_TYPE:
12364 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12365 if (TREE_CODE (field) == FIELD_DECL)
12367 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12368 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12369 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12370 gimplify_type_sizes (TREE_TYPE (field), list_p);
12372 break;
12374 case POINTER_TYPE:
12375 case REFERENCE_TYPE:
12376 /* We used to recurse on the pointed-to type here, which turned out to
12377 be incorrect because its definition might refer to variables not
12378 yet initialized at this point if a forward declaration is involved.
12380 It was actually useful for anonymous pointed-to types to ensure
12381 that the sizes evaluation dominates every possible later use of the
12382 values. Restricting to such types here would be safe since there
12383 is no possible forward declaration around, but would introduce an
12384 undesirable middle-end semantic to anonymity. We then defer to
12385 front-ends the responsibility of ensuring that the sizes are
12386 evaluated both early and late enough, e.g. by attaching artificial
12387 type declarations to the tree. */
12388 break;
12390 default:
12391 break;
12394 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12395 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12397 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12399 TYPE_SIZE (t) = TYPE_SIZE (type);
12400 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12401 TYPE_SIZES_GIMPLIFIED (t) = 1;
12405 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12406 a size or position, has had all of its SAVE_EXPRs evaluated.
12407 We add any required statements to *STMT_P. */
12409 void
12410 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12412 tree expr = *expr_p;
12414 /* We don't do anything if the value isn't there, is constant, or contains
12415 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12416 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12417 will want to replace it with a new variable, but that will cause problems
12418 if this type is from outside the function. It's OK to have that here. */
12419 if (is_gimple_sizepos (expr))
12420 return;
12422 *expr_p = unshare_expr (expr);
12424 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12425 if the def vanishes. */
12426 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12429 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12430 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12431 is true, also gimplify the parameters. */
12433 gbind *
12434 gimplify_body (tree fndecl, bool do_parms)
12436 location_t saved_location = input_location;
12437 gimple_seq parm_stmts, seq;
12438 gimple *outer_stmt;
12439 gbind *outer_bind;
12440 struct cgraph_node *cgn;
12442 timevar_push (TV_TREE_GIMPLIFY);
12444 init_tree_ssa (cfun);
12446 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12447 gimplification. */
12448 default_rtl_profile ();
12450 gcc_assert (gimplify_ctxp == NULL);
12451 push_gimplify_context (true);
12453 if (flag_openacc || flag_openmp)
12455 gcc_assert (gimplify_omp_ctxp == NULL);
12456 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12457 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12460 /* Unshare most shared trees in the body and in that of any nested functions.
12461 It would seem we don't have to do this for nested functions because
12462 they are supposed to be output and then the outer function gimplified
12463 first, but the g++ front end doesn't always do it that way. */
12464 unshare_body (fndecl);
12465 unvisit_body (fndecl);
12467 cgn = cgraph_node::get (fndecl);
12468 if (cgn && cgn->origin)
12469 nonlocal_vlas = new hash_set<tree>;
12471 /* Make sure input_location isn't set to something weird. */
12472 input_location = DECL_SOURCE_LOCATION (fndecl);
12474 /* Resolve callee-copies. This has to be done before processing
12475 the body so that DECL_VALUE_EXPR gets processed correctly. */
12476 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12478 /* Gimplify the function's body. */
12479 seq = NULL;
12480 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12481 outer_stmt = gimple_seq_first_stmt (seq);
12482 if (!outer_stmt)
12484 outer_stmt = gimple_build_nop ();
12485 gimplify_seq_add_stmt (&seq, outer_stmt);
12488 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12489 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12490 if (gimple_code (outer_stmt) == GIMPLE_BIND
12491 && gimple_seq_first (seq) == gimple_seq_last (seq))
12492 outer_bind = as_a <gbind *> (outer_stmt);
12493 else
12494 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12496 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12498 /* If we had callee-copies statements, insert them at the beginning
12499 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12500 if (!gimple_seq_empty_p (parm_stmts))
12502 tree parm;
12504 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12505 gimple_bind_set_body (outer_bind, parm_stmts);
12507 for (parm = DECL_ARGUMENTS (current_function_decl);
12508 parm; parm = DECL_CHAIN (parm))
12509 if (DECL_HAS_VALUE_EXPR_P (parm))
12511 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12512 DECL_IGNORED_P (parm) = 0;
12516 if (nonlocal_vlas)
12518 if (nonlocal_vla_vars)
12520 /* tree-nested.c may later on call declare_vars (..., true);
12521 which relies on BLOCK_VARS chain to be the tail of the
12522 gimple_bind_vars chain. Ensure we don't violate that
12523 assumption. */
12524 if (gimple_bind_block (outer_bind)
12525 == DECL_INITIAL (current_function_decl))
12526 declare_vars (nonlocal_vla_vars, outer_bind, true);
12527 else
12528 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12529 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12530 nonlocal_vla_vars);
12531 nonlocal_vla_vars = NULL_TREE;
12533 delete nonlocal_vlas;
12534 nonlocal_vlas = NULL;
12537 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12538 && gimplify_omp_ctxp)
12540 delete_omp_context (gimplify_omp_ctxp);
12541 gimplify_omp_ctxp = NULL;
12544 pop_gimplify_context (outer_bind);
12545 gcc_assert (gimplify_ctxp == NULL);
12547 if (flag_checking && !seen_error ())
12548 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12550 timevar_pop (TV_TREE_GIMPLIFY);
12551 input_location = saved_location;
12553 return outer_bind;
12556 typedef char *char_p; /* For DEF_VEC_P. */
12558 /* Return whether we should exclude FNDECL from instrumentation. */
12560 static bool
12561 flag_instrument_functions_exclude_p (tree fndecl)
12563 vec<char_p> *v;
12565 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12566 if (v && v->length () > 0)
12568 const char *name;
12569 int i;
12570 char *s;
12572 name = lang_hooks.decl_printable_name (fndecl, 0);
12573 FOR_EACH_VEC_ELT (*v, i, s)
12574 if (strstr (name, s) != NULL)
12575 return true;
12578 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12579 if (v && v->length () > 0)
12581 const char *name;
12582 int i;
12583 char *s;
12585 name = DECL_SOURCE_FILE (fndecl);
12586 FOR_EACH_VEC_ELT (*v, i, s)
12587 if (strstr (name, s) != NULL)
12588 return true;
12591 return false;
12594 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12595 node for the function we want to gimplify.
12597 Return the sequence of GIMPLE statements corresponding to the body
12598 of FNDECL. */
12600 void
12601 gimplify_function_tree (tree fndecl)
12603 tree parm, ret;
12604 gimple_seq seq;
12605 gbind *bind;
12607 gcc_assert (!gimple_body (fndecl));
12609 if (DECL_STRUCT_FUNCTION (fndecl))
12610 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12611 else
12612 push_struct_function (fndecl);
12614 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12615 if necessary. */
12616 cfun->curr_properties |= PROP_gimple_lva;
12618 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12620 /* Preliminarily mark non-addressed complex variables as eligible
12621 for promotion to gimple registers. We'll transform their uses
12622 as we find them. */
12623 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12624 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12625 && !TREE_THIS_VOLATILE (parm)
12626 && !needs_to_live_in_memory (parm))
12627 DECL_GIMPLE_REG_P (parm) = 1;
12630 ret = DECL_RESULT (fndecl);
12631 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12632 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12633 && !needs_to_live_in_memory (ret))
12634 DECL_GIMPLE_REG_P (ret) = 1;
12636 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12637 asan_poisoned_variables = new hash_set<tree> ();
12638 bind = gimplify_body (fndecl, true);
12639 if (asan_poisoned_variables)
12641 delete asan_poisoned_variables;
12642 asan_poisoned_variables = NULL;
12645 /* The tree body of the function is no longer needed, replace it
12646 with the new GIMPLE body. */
12647 seq = NULL;
12648 gimple_seq_add_stmt (&seq, bind);
12649 gimple_set_body (fndecl, seq);
12651 /* If we're instrumenting function entry/exit, then prepend the call to
12652 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12653 catch the exit hook. */
12654 /* ??? Add some way to ignore exceptions for this TFE. */
12655 if (flag_instrument_function_entry_exit
12656 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12657 /* Do not instrument extern inline functions. */
12658 && !(DECL_DECLARED_INLINE_P (fndecl)
12659 && DECL_EXTERNAL (fndecl)
12660 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12661 && !flag_instrument_functions_exclude_p (fndecl))
12663 tree x;
12664 gbind *new_bind;
12665 gimple *tf;
12666 gimple_seq cleanup = NULL, body = NULL;
12667 tree tmp_var;
12668 gcall *call;
12670 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12671 call = gimple_build_call (x, 1, integer_zero_node);
12672 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12673 gimple_call_set_lhs (call, tmp_var);
12674 gimplify_seq_add_stmt (&cleanup, call);
12675 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12676 call = gimple_build_call (x, 2,
12677 build_fold_addr_expr (current_function_decl),
12678 tmp_var);
12679 gimplify_seq_add_stmt (&cleanup, call);
12680 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12682 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12683 call = gimple_build_call (x, 1, integer_zero_node);
12684 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12685 gimple_call_set_lhs (call, tmp_var);
12686 gimplify_seq_add_stmt (&body, call);
12687 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12688 call = gimple_build_call (x, 2,
12689 build_fold_addr_expr (current_function_decl),
12690 tmp_var);
12691 gimplify_seq_add_stmt (&body, call);
12692 gimplify_seq_add_stmt (&body, tf);
12693 new_bind = gimple_build_bind (NULL, body, NULL);
12695 /* Replace the current function body with the body
12696 wrapped in the try/finally TF. */
12697 seq = NULL;
12698 gimple_seq_add_stmt (&seq, new_bind);
12699 gimple_set_body (fndecl, seq);
12700 bind = new_bind;
12703 if (sanitize_flags_p (SANITIZE_THREAD))
12705 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12706 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12707 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12708 /* Replace the current function body with the body
12709 wrapped in the try/finally TF. */
12710 seq = NULL;
12711 gimple_seq_add_stmt (&seq, new_bind);
12712 gimple_set_body (fndecl, seq);
12715 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12716 cfun->curr_properties |= PROP_gimple_any;
12718 pop_cfun ();
12720 dump_function (TDI_gimple, fndecl);
12723 /* Return a dummy expression of type TYPE in order to keep going after an
12724 error. */
12726 static tree
12727 dummy_object (tree type)
12729 tree t = build_int_cst (build_pointer_type (type), 0);
12730 return build2 (MEM_REF, type, t, t);
12733 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12734 builtin function, but a very special sort of operator. */
12736 enum gimplify_status
12737 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12738 gimple_seq *post_p ATTRIBUTE_UNUSED)
12740 tree promoted_type, have_va_type;
12741 tree valist = TREE_OPERAND (*expr_p, 0);
12742 tree type = TREE_TYPE (*expr_p);
12743 tree t, tag, aptag;
12744 location_t loc = EXPR_LOCATION (*expr_p);
12746 /* Verify that valist is of the proper type. */
12747 have_va_type = TREE_TYPE (valist);
12748 if (have_va_type == error_mark_node)
12749 return GS_ERROR;
12750 have_va_type = targetm.canonical_va_list_type (have_va_type);
12751 if (have_va_type == NULL_TREE
12752 && POINTER_TYPE_P (TREE_TYPE (valist)))
12753 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12754 have_va_type
12755 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12756 gcc_assert (have_va_type != NULL_TREE);
12758 /* Generate a diagnostic for requesting data of a type that cannot
12759 be passed through `...' due to type promotion at the call site. */
12760 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12761 != type)
12763 static bool gave_help;
12764 bool warned;
12765 /* Use the expansion point to handle cases such as passing bool (defined
12766 in a system header) through `...'. */
12767 source_location xloc
12768 = expansion_point_location_if_in_system_header (loc);
12770 /* Unfortunately, this is merely undefined, rather than a constraint
12771 violation, so we cannot make this an error. If this call is never
12772 executed, the program is still strictly conforming. */
12773 warned = warning_at (xloc, 0,
12774 "%qT is promoted to %qT when passed through %<...%>",
12775 type, promoted_type);
12776 if (!gave_help && warned)
12778 gave_help = true;
12779 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12780 promoted_type, type);
12783 /* We can, however, treat "undefined" any way we please.
12784 Call abort to encourage the user to fix the program. */
12785 if (warned)
12786 inform (xloc, "if this code is reached, the program will abort");
12787 /* Before the abort, allow the evaluation of the va_list
12788 expression to exit or longjmp. */
12789 gimplify_and_add (valist, pre_p);
12790 t = build_call_expr_loc (loc,
12791 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12792 gimplify_and_add (t, pre_p);
12794 /* This is dead code, but go ahead and finish so that the
12795 mode of the result comes out right. */
12796 *expr_p = dummy_object (type);
12797 return GS_ALL_DONE;
12800 tag = build_int_cst (build_pointer_type (type), 0);
12801 aptag = build_int_cst (TREE_TYPE (valist), 0);
12803 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12804 valist, tag, aptag);
12806 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12807 needs to be expanded. */
12808 cfun->curr_properties &= ~PROP_gimple_lva;
12810 return GS_OK;
12813 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12815 DST/SRC are the destination and source respectively. You can pass
12816 ungimplified trees in DST or SRC, in which case they will be
12817 converted to a gimple operand if necessary.
12819 This function returns the newly created GIMPLE_ASSIGN tuple. */
12821 gimple *
12822 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12824 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12825 gimplify_and_add (t, seq_p);
12826 ggc_free (t);
12827 return gimple_seq_last_stmt (*seq_p);
12830 inline hashval_t
12831 gimplify_hasher::hash (const elt_t *p)
12833 tree t = p->val;
12834 return iterative_hash_expr (t, 0);
12837 inline bool
12838 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12840 tree t1 = p1->val;
12841 tree t2 = p2->val;
12842 enum tree_code code = TREE_CODE (t1);
12844 if (TREE_CODE (t2) != code
12845 || TREE_TYPE (t1) != TREE_TYPE (t2))
12846 return false;
12848 if (!operand_equal_p (t1, t2, 0))
12849 return false;
12851 /* Only allow them to compare equal if they also hash equal; otherwise
12852 results are nondeterminate, and we fail bootstrap comparison. */
12853 gcc_checking_assert (hash (p1) == hash (p2));
12855 return true;