Small ChangeLog tweak.
[official-gcc.git] / gcc / gimplify.c
blob455a6993e15feda1fbb7a758341d22b5ed8cb70d
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-general.h"
55 #include "omp-low.h"
56 #include "gimple-low.h"
57 #include "cilk.h"
58 #include "gomp-constants.h"
59 #include "splay-tree.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
62 #include "builtins.h"
63 #include "asan.h"
64 #include "dbgcnt.h"
66 /* Hash set of poisoned variables in a bind expr. */
67 static hash_set<tree> *asan_poisoned_variables = NULL;
69 enum gimplify_omp_var_data
71 GOVD_SEEN = 1,
72 GOVD_EXPLICIT = 2,
73 GOVD_SHARED = 4,
74 GOVD_PRIVATE = 8,
75 GOVD_FIRSTPRIVATE = 16,
76 GOVD_LASTPRIVATE = 32,
77 GOVD_REDUCTION = 64,
78 GOVD_LOCAL = 128,
79 GOVD_MAP = 256,
80 GOVD_DEBUG_PRIVATE = 512,
81 GOVD_PRIVATE_OUTER_REF = 1024,
82 GOVD_LINEAR = 2048,
83 GOVD_ALIGNED = 4096,
85 /* Flag for GOVD_MAP: don't copy back. */
86 GOVD_MAP_TO_ONLY = 8192,
88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
91 GOVD_MAP_0LEN_ARRAY = 32768,
93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
94 GOVD_MAP_ALWAYS_TO = 65536,
96 /* Flag for shared vars that are or might be stored to in the region. */
97 GOVD_WRITTEN = 131072,
99 /* Flag for GOVD_MAP, if it is a forced mapping. */
100 GOVD_MAP_FORCE = 262144,
102 /* Flag for GOVD_MAP: must be present already. */
103 GOVD_MAP_FORCE_PRESENT = 524288,
105 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
106 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
107 | GOVD_LOCAL)
111 enum omp_region_type
113 ORT_WORKSHARE = 0x00,
114 ORT_SIMD = 0x01,
116 ORT_PARALLEL = 0x02,
117 ORT_COMBINED_PARALLEL = 0x03,
119 ORT_TASK = 0x04,
120 ORT_UNTIED_TASK = 0x05,
122 ORT_TEAMS = 0x08,
123 ORT_COMBINED_TEAMS = 0x09,
125 /* Data region. */
126 ORT_TARGET_DATA = 0x10,
128 /* Data region with offloading. */
129 ORT_TARGET = 0x20,
130 ORT_COMBINED_TARGET = 0x21,
132 /* OpenACC variants. */
133 ORT_ACC = 0x40, /* A generic OpenACC region. */
134 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
135 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
136 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
137 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
139 /* Dummy OpenMP region, used to disable expansion of
140 DECL_VALUE_EXPRs in taskloop pre body. */
141 ORT_NONE = 0x100
144 /* Gimplify hashtable helper. */
146 struct gimplify_hasher : free_ptr_hash <elt_t>
148 static inline hashval_t hash (const elt_t *);
149 static inline bool equal (const elt_t *, const elt_t *);
152 struct gimplify_ctx
154 struct gimplify_ctx *prev_context;
156 vec<gbind *> bind_expr_stack;
157 tree temps;
158 gimple_seq conditional_cleanups;
159 tree exit_label;
160 tree return_temp;
162 vec<tree> case_labels;
163 hash_set<tree> *live_switch_vars;
164 /* The formal temporary table. Should this be persistent? */
165 hash_table<gimplify_hasher> *temp_htab;
167 int conditions;
168 unsigned into_ssa : 1;
169 unsigned allow_rhs_cond_expr : 1;
170 unsigned in_cleanup_point_expr : 1;
171 unsigned keep_stack : 1;
172 unsigned save_stack : 1;
173 unsigned in_switch_expr : 1;
176 struct gimplify_omp_ctx
178 struct gimplify_omp_ctx *outer_context;
179 splay_tree variables;
180 hash_set<tree> *privatized_types;
181 /* Iteration variables in an OMP_FOR. */
182 vec<tree> loop_iter_var;
183 location_t location;
184 enum omp_clause_default_kind default_kind;
185 enum omp_region_type region_type;
186 bool combined_loop;
187 bool distribute;
188 bool target_map_scalars_firstprivate;
189 bool target_map_pointers_as_0len_arrays;
190 bool target_firstprivatize_array_bases;
193 static struct gimplify_ctx *gimplify_ctxp;
194 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
196 /* Forward declaration. */
197 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
198 static hash_map<tree, tree> *oacc_declare_returns;
199 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
200 bool (*) (tree), fallback_t, bool);
202 /* Shorter alias name for the above function for use in gimplify.c
203 only. */
205 static inline void
206 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
208 gimple_seq_add_stmt_without_update (seq_p, gs);
211 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
212 NULL, a new sequence is allocated. This function is
213 similar to gimple_seq_add_seq, but does not scan the operands.
214 During gimplification, we need to manipulate statement sequences
215 before the def/use vectors have been constructed. */
217 static void
218 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
220 gimple_stmt_iterator si;
222 if (src == NULL)
223 return;
225 si = gsi_last (*dst_p);
226 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
230 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
231 and popping gimplify contexts. */
233 static struct gimplify_ctx *ctx_pool = NULL;
235 /* Return a gimplify context struct from the pool. */
237 static inline struct gimplify_ctx *
238 ctx_alloc (void)
240 struct gimplify_ctx * c = ctx_pool;
242 if (c)
243 ctx_pool = c->prev_context;
244 else
245 c = XNEW (struct gimplify_ctx);
247 memset (c, '\0', sizeof (*c));
248 return c;
251 /* Put gimplify context C back into the pool. */
253 static inline void
254 ctx_free (struct gimplify_ctx *c)
256 c->prev_context = ctx_pool;
257 ctx_pool = c;
260 /* Free allocated ctx stack memory. */
262 void
263 free_gimplify_stack (void)
265 struct gimplify_ctx *c;
267 while ((c = ctx_pool))
269 ctx_pool = c->prev_context;
270 free (c);
275 /* Set up a context for the gimplifier. */
277 void
278 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
280 struct gimplify_ctx *c = ctx_alloc ();
282 c->prev_context = gimplify_ctxp;
283 gimplify_ctxp = c;
284 gimplify_ctxp->into_ssa = in_ssa;
285 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
288 /* Tear down a context for the gimplifier. If BODY is non-null, then
289 put the temporaries into the outer BIND_EXPR. Otherwise, put them
290 in the local_decls.
292 BODY is not a sequence, but the first tuple in a sequence. */
294 void
295 pop_gimplify_context (gimple *body)
297 struct gimplify_ctx *c = gimplify_ctxp;
299 gcc_assert (c
300 && (!c->bind_expr_stack.exists ()
301 || c->bind_expr_stack.is_empty ()));
302 c->bind_expr_stack.release ();
303 gimplify_ctxp = c->prev_context;
305 if (body)
306 declare_vars (c->temps, body, false);
307 else
308 record_vars (c->temps);
310 delete c->temp_htab;
311 c->temp_htab = NULL;
312 ctx_free (c);
315 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
317 static void
318 gimple_push_bind_expr (gbind *bind_stmt)
320 gimplify_ctxp->bind_expr_stack.reserve (8);
321 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
324 /* Pop the first element off the stack of bindings. */
326 static void
327 gimple_pop_bind_expr (void)
329 gimplify_ctxp->bind_expr_stack.pop ();
332 /* Return the first element of the stack of bindings. */
334 gbind *
335 gimple_current_bind_expr (void)
337 return gimplify_ctxp->bind_expr_stack.last ();
340 /* Return the stack of bindings created during gimplification. */
342 vec<gbind *>
343 gimple_bind_expr_stack (void)
345 return gimplify_ctxp->bind_expr_stack;
348 /* Return true iff there is a COND_EXPR between us and the innermost
349 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
351 static bool
352 gimple_conditional_context (void)
354 return gimplify_ctxp->conditions > 0;
357 /* Note that we've entered a COND_EXPR. */
359 static void
360 gimple_push_condition (void)
362 #ifdef ENABLE_GIMPLE_CHECKING
363 if (gimplify_ctxp->conditions == 0)
364 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
365 #endif
366 ++(gimplify_ctxp->conditions);
369 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
370 now, add any conditional cleanups we've seen to the prequeue. */
372 static void
373 gimple_pop_condition (gimple_seq *pre_p)
375 int conds = --(gimplify_ctxp->conditions);
377 gcc_assert (conds >= 0);
378 if (conds == 0)
380 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
381 gimplify_ctxp->conditional_cleanups = NULL;
385 /* A stable comparison routine for use with splay trees and DECLs. */
387 static int
388 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
390 tree a = (tree) xa;
391 tree b = (tree) xb;
393 return DECL_UID (a) - DECL_UID (b);
396 /* Create a new omp construct that deals with variable remapping. */
398 static struct gimplify_omp_ctx *
399 new_omp_context (enum omp_region_type region_type)
401 struct gimplify_omp_ctx *c;
403 c = XCNEW (struct gimplify_omp_ctx);
404 c->outer_context = gimplify_omp_ctxp;
405 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
406 c->privatized_types = new hash_set<tree>;
407 c->location = input_location;
408 c->region_type = region_type;
409 if ((region_type & ORT_TASK) == 0)
410 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
411 else
412 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
414 return c;
417 /* Destroy an omp construct that deals with variable remapping. */
419 static void
420 delete_omp_context (struct gimplify_omp_ctx *c)
422 splay_tree_delete (c->variables);
423 delete c->privatized_types;
424 c->loop_iter_var.release ();
425 XDELETE (c);
428 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
429 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
431 /* Both gimplify the statement T and append it to *SEQ_P. This function
432 behaves exactly as gimplify_stmt, but you don't have to pass T as a
433 reference. */
435 void
436 gimplify_and_add (tree t, gimple_seq *seq_p)
438 gimplify_stmt (&t, seq_p);
441 /* Gimplify statement T into sequence *SEQ_P, and return the first
442 tuple in the sequence of generated tuples for this statement.
443 Return NULL if gimplifying T produced no tuples. */
445 static gimple *
446 gimplify_and_return_first (tree t, gimple_seq *seq_p)
448 gimple_stmt_iterator last = gsi_last (*seq_p);
450 gimplify_and_add (t, seq_p);
452 if (!gsi_end_p (last))
454 gsi_next (&last);
455 return gsi_stmt (last);
457 else
458 return gimple_seq_first_stmt (*seq_p);
461 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
462 LHS, or for a call argument. */
464 static bool
465 is_gimple_mem_rhs (tree t)
467 /* If we're dealing with a renamable type, either source or dest must be
468 a renamed variable. */
469 if (is_gimple_reg_type (TREE_TYPE (t)))
470 return is_gimple_val (t);
471 else
472 return is_gimple_val (t) || is_gimple_lvalue (t);
475 /* Return true if T is a CALL_EXPR or an expression that can be
476 assigned to a temporary. Note that this predicate should only be
477 used during gimplification. See the rationale for this in
478 gimplify_modify_expr. */
480 static bool
481 is_gimple_reg_rhs_or_call (tree t)
483 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
484 || TREE_CODE (t) == CALL_EXPR);
487 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
488 this predicate should only be used during gimplification. See the
489 rationale for this in gimplify_modify_expr. */
491 static bool
492 is_gimple_mem_rhs_or_call (tree t)
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return (is_gimple_val (t)
500 || is_gimple_lvalue (t)
501 || TREE_CLOBBER_P (t)
502 || TREE_CODE (t) == CALL_EXPR);
505 /* Create a temporary with a name derived from VAL. Subroutine of
506 lookup_tmp_var; nobody else should call this function. */
508 static inline tree
509 create_tmp_from_val (tree val)
511 /* Drop all qualifiers and address-space information from the value type. */
512 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
513 tree var = create_tmp_var (type, get_name (val));
514 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
515 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
516 DECL_GIMPLE_REG_P (var) = 1;
517 return var;
520 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
521 an existing expression temporary. */
523 static tree
524 lookup_tmp_var (tree val, bool is_formal)
526 tree ret;
528 /* If not optimizing, never really reuse a temporary. local-alloc
529 won't allocate any variable that is used in more than one basic
530 block, which means it will go into memory, causing much extra
531 work in reload and final and poorer code generation, outweighing
532 the extra memory allocation here. */
533 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
534 ret = create_tmp_from_val (val);
535 else
537 elt_t elt, *elt_p;
538 elt_t **slot;
540 elt.val = val;
541 if (!gimplify_ctxp->temp_htab)
542 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
543 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
544 if (*slot == NULL)
546 elt_p = XNEW (elt_t);
547 elt_p->val = val;
548 elt_p->temp = ret = create_tmp_from_val (val);
549 *slot = elt_p;
551 else
553 elt_p = *slot;
554 ret = elt_p->temp;
558 return ret;
561 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
563 static tree
564 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
565 bool is_formal, bool allow_ssa)
567 tree t, mod;
569 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
570 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
571 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
572 fb_rvalue);
574 if (allow_ssa
575 && gimplify_ctxp->into_ssa
576 && is_gimple_reg_type (TREE_TYPE (val)))
578 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
579 if (! gimple_in_ssa_p (cfun))
581 const char *name = get_name (val);
582 if (name)
583 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
586 else
587 t = lookup_tmp_var (val, is_formal);
589 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
591 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
593 /* gimplify_modify_expr might want to reduce this further. */
594 gimplify_and_add (mod, pre_p);
595 ggc_free (mod);
597 return t;
600 /* Return a formal temporary variable initialized with VAL. PRE_P is as
601 in gimplify_expr. Only use this function if:
603 1) The value of the unfactored expression represented by VAL will not
604 change between the initialization and use of the temporary, and
605 2) The temporary will not be otherwise modified.
607 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
608 and #2 means it is inappropriate for && temps.
610 For other cases, use get_initialized_tmp_var instead. */
612 tree
613 get_formal_tmp_var (tree val, gimple_seq *pre_p)
615 return internal_get_tmp_var (val, pre_p, NULL, true, true);
618 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
619 are as in gimplify_expr. */
621 tree
622 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
623 bool allow_ssa)
625 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
628 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
629 generate debug info for them; otherwise don't. */
631 void
632 declare_vars (tree vars, gimple *gs, bool debug_info)
634 tree last = vars;
635 if (last)
637 tree temps, block;
639 gbind *scope = as_a <gbind *> (gs);
641 temps = nreverse (last);
643 block = gimple_bind_block (scope);
644 gcc_assert (!block || TREE_CODE (block) == BLOCK);
645 if (!block || !debug_info)
647 DECL_CHAIN (last) = gimple_bind_vars (scope);
648 gimple_bind_set_vars (scope, temps);
650 else
652 /* We need to attach the nodes both to the BIND_EXPR and to its
653 associated BLOCK for debugging purposes. The key point here
654 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
655 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
656 if (BLOCK_VARS (block))
657 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
658 else
660 gimple_bind_set_vars (scope,
661 chainon (gimple_bind_vars (scope), temps));
662 BLOCK_VARS (block) = temps;
668 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
669 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
670 no such upper bound can be obtained. */
672 static void
673 force_constant_size (tree var)
675 /* The only attempt we make is by querying the maximum size of objects
676 of the variable's type. */
678 HOST_WIDE_INT max_size;
680 gcc_assert (VAR_P (var));
682 max_size = max_int_size_in_bytes (TREE_TYPE (var));
684 gcc_assert (max_size >= 0);
686 DECL_SIZE_UNIT (var)
687 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
688 DECL_SIZE (var)
689 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
692 /* Push the temporary variable TMP into the current binding. */
694 void
695 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
697 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
699 /* Later processing assumes that the object size is constant, which might
700 not be true at this point. Force the use of a constant upper bound in
701 this case. */
702 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
703 force_constant_size (tmp);
705 DECL_CONTEXT (tmp) = fn->decl;
706 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
708 record_vars_into (tmp, fn->decl);
711 /* Push the temporary variable TMP into the current binding. */
713 void
714 gimple_add_tmp_var (tree tmp)
716 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
718 /* Later processing assumes that the object size is constant, which might
719 not be true at this point. Force the use of a constant upper bound in
720 this case. */
721 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
722 force_constant_size (tmp);
724 DECL_CONTEXT (tmp) = current_function_decl;
725 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
727 if (gimplify_ctxp)
729 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
730 gimplify_ctxp->temps = tmp;
732 /* Mark temporaries local within the nearest enclosing parallel. */
733 if (gimplify_omp_ctxp)
735 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
736 while (ctx
737 && (ctx->region_type == ORT_WORKSHARE
738 || ctx->region_type == ORT_SIMD
739 || ctx->region_type == ORT_ACC))
740 ctx = ctx->outer_context;
741 if (ctx)
742 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
745 else if (cfun)
746 record_vars (tmp);
747 else
749 gimple_seq body_seq;
751 /* This case is for nested functions. We need to expose the locals
752 they create. */
753 body_seq = gimple_body (current_function_decl);
754 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
760 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
761 nodes that are referenced more than once in GENERIC functions. This is
762 necessary because gimplification (translation into GIMPLE) is performed
763 by modifying tree nodes in-place, so gimplication of a shared node in a
764 first context could generate an invalid GIMPLE form in a second context.
766 This is achieved with a simple mark/copy/unmark algorithm that walks the
767 GENERIC representation top-down, marks nodes with TREE_VISITED the first
768 time it encounters them, duplicates them if they already have TREE_VISITED
769 set, and finally removes the TREE_VISITED marks it has set.
771 The algorithm works only at the function level, i.e. it generates a GENERIC
772 representation of a function with no nodes shared within the function when
773 passed a GENERIC function (except for nodes that are allowed to be shared).
775 At the global level, it is also necessary to unshare tree nodes that are
776 referenced in more than one function, for the same aforementioned reason.
777 This requires some cooperation from the front-end. There are 2 strategies:
779 1. Manual unsharing. The front-end needs to call unshare_expr on every
780 expression that might end up being shared across functions.
782 2. Deep unsharing. This is an extension of regular unsharing. Instead
783 of calling unshare_expr on expressions that might be shared across
784 functions, the front-end pre-marks them with TREE_VISITED. This will
785 ensure that they are unshared on the first reference within functions
786 when the regular unsharing algorithm runs. The counterpart is that
787 this algorithm must look deeper than for manual unsharing, which is
788 specified by LANG_HOOKS_DEEP_UNSHARING.
790 If there are only few specific cases of node sharing across functions, it is
791 probably easier for a front-end to unshare the expressions manually. On the
792 contrary, if the expressions generated at the global level are as widespread
793 as expressions generated within functions, deep unsharing is very likely the
794 way to go. */
796 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
797 These nodes model computations that must be done once. If we were to
798 unshare something like SAVE_EXPR(i++), the gimplification process would
799 create wrong code. However, if DATA is non-null, it must hold a pointer
800 set that is used to unshare the subtrees of these nodes. */
802 static tree
803 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
805 tree t = *tp;
806 enum tree_code code = TREE_CODE (t);
808 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
809 copy their subtrees if we can make sure to do it only once. */
810 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
812 if (data && !((hash_set<tree> *)data)->add (t))
814 else
815 *walk_subtrees = 0;
818 /* Stop at types, decls, constants like copy_tree_r. */
819 else if (TREE_CODE_CLASS (code) == tcc_type
820 || TREE_CODE_CLASS (code) == tcc_declaration
821 || TREE_CODE_CLASS (code) == tcc_constant
822 /* We can't do anything sensible with a BLOCK used as an
823 expression, but we also can't just die when we see it
824 because of non-expression uses. So we avert our eyes
825 and cross our fingers. Silly Java. */
826 || code == BLOCK)
827 *walk_subtrees = 0;
829 /* Cope with the statement expression extension. */
830 else if (code == STATEMENT_LIST)
833 /* Leave the bulk of the work to copy_tree_r itself. */
834 else
835 copy_tree_r (tp, walk_subtrees, NULL);
837 return NULL_TREE;
840 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
841 If *TP has been visited already, then *TP is deeply copied by calling
842 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
844 static tree
845 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
847 tree t = *tp;
848 enum tree_code code = TREE_CODE (t);
850 /* Skip types, decls, and constants. But we do want to look at their
851 types and the bounds of types. Mark them as visited so we properly
852 unmark their subtrees on the unmark pass. If we've already seen them,
853 don't look down further. */
854 if (TREE_CODE_CLASS (code) == tcc_type
855 || TREE_CODE_CLASS (code) == tcc_declaration
856 || TREE_CODE_CLASS (code) == tcc_constant)
858 if (TREE_VISITED (t))
859 *walk_subtrees = 0;
860 else
861 TREE_VISITED (t) = 1;
864 /* If this node has been visited already, unshare it and don't look
865 any deeper. */
866 else if (TREE_VISITED (t))
868 walk_tree (tp, mostly_copy_tree_r, data, NULL);
869 *walk_subtrees = 0;
872 /* Otherwise, mark the node as visited and keep looking. */
873 else
874 TREE_VISITED (t) = 1;
876 return NULL_TREE;
879 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
880 copy_if_shared_r callback unmodified. */
882 static inline void
883 copy_if_shared (tree *tp, void *data)
885 walk_tree (tp, copy_if_shared_r, data, NULL);
888 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
889 any nested functions. */
891 static void
892 unshare_body (tree fndecl)
894 struct cgraph_node *cgn = cgraph_node::get (fndecl);
895 /* If the language requires deep unsharing, we need a pointer set to make
896 sure we don't repeatedly unshare subtrees of unshareable nodes. */
897 hash_set<tree> *visited
898 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
900 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
901 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
902 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
904 delete visited;
906 if (cgn)
907 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
908 unshare_body (cgn->decl);
911 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
912 Subtrees are walked until the first unvisited node is encountered. */
914 static tree
915 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
917 tree t = *tp;
919 /* If this node has been visited, unmark it and keep looking. */
920 if (TREE_VISITED (t))
921 TREE_VISITED (t) = 0;
923 /* Otherwise, don't look any deeper. */
924 else
925 *walk_subtrees = 0;
927 return NULL_TREE;
930 /* Unmark the visited trees rooted at *TP. */
932 static inline void
933 unmark_visited (tree *tp)
935 walk_tree (tp, unmark_visited_r, NULL, NULL);
938 /* Likewise, but mark all trees as not visited. */
940 static void
941 unvisit_body (tree fndecl)
943 struct cgraph_node *cgn = cgraph_node::get (fndecl);
945 unmark_visited (&DECL_SAVED_TREE (fndecl));
946 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
947 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
949 if (cgn)
950 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
951 unvisit_body (cgn->decl);
954 /* Unconditionally make an unshared copy of EXPR. This is used when using
955 stored expressions which span multiple functions, such as BINFO_VTABLE,
956 as the normal unsharing process can't tell that they're shared. */
958 tree
959 unshare_expr (tree expr)
961 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
962 return expr;
965 /* Worker for unshare_expr_without_location. */
967 static tree
968 prune_expr_location (tree *tp, int *walk_subtrees, void *)
970 if (EXPR_P (*tp))
971 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
972 else
973 *walk_subtrees = 0;
974 return NULL_TREE;
977 /* Similar to unshare_expr but also prune all expression locations
978 from EXPR. */
980 tree
981 unshare_expr_without_location (tree expr)
983 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
984 if (EXPR_P (expr))
985 walk_tree (&expr, prune_expr_location, NULL, NULL);
986 return expr;
989 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
990 contain statements and have a value. Assign its value to a temporary
991 and give it void_type_node. Return the temporary, or NULL_TREE if
992 WRAPPER was already void. */
994 tree
995 voidify_wrapper_expr (tree wrapper, tree temp)
997 tree type = TREE_TYPE (wrapper);
998 if (type && !VOID_TYPE_P (type))
1000 tree *p;
1002 /* Set p to point to the body of the wrapper. Loop until we find
1003 something that isn't a wrapper. */
1004 for (p = &wrapper; p && *p; )
1006 switch (TREE_CODE (*p))
1008 case BIND_EXPR:
1009 TREE_SIDE_EFFECTS (*p) = 1;
1010 TREE_TYPE (*p) = void_type_node;
1011 /* For a BIND_EXPR, the body is operand 1. */
1012 p = &BIND_EXPR_BODY (*p);
1013 break;
1015 case CLEANUP_POINT_EXPR:
1016 case TRY_FINALLY_EXPR:
1017 case TRY_CATCH_EXPR:
1018 TREE_SIDE_EFFECTS (*p) = 1;
1019 TREE_TYPE (*p) = void_type_node;
1020 p = &TREE_OPERAND (*p, 0);
1021 break;
1023 case STATEMENT_LIST:
1025 tree_stmt_iterator i = tsi_last (*p);
1026 TREE_SIDE_EFFECTS (*p) = 1;
1027 TREE_TYPE (*p) = void_type_node;
1028 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1030 break;
1032 case COMPOUND_EXPR:
1033 /* Advance to the last statement. Set all container types to
1034 void. */
1035 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1037 TREE_SIDE_EFFECTS (*p) = 1;
1038 TREE_TYPE (*p) = void_type_node;
1040 break;
1042 case TRANSACTION_EXPR:
1043 TREE_SIDE_EFFECTS (*p) = 1;
1044 TREE_TYPE (*p) = void_type_node;
1045 p = &TRANSACTION_EXPR_BODY (*p);
1046 break;
1048 default:
1049 /* Assume that any tree upon which voidify_wrapper_expr is
1050 directly called is a wrapper, and that its body is op0. */
1051 if (p == &wrapper)
1053 TREE_SIDE_EFFECTS (*p) = 1;
1054 TREE_TYPE (*p) = void_type_node;
1055 p = &TREE_OPERAND (*p, 0);
1056 break;
1058 goto out;
1062 out:
1063 if (p == NULL || IS_EMPTY_STMT (*p))
1064 temp = NULL_TREE;
1065 else if (temp)
1067 /* The wrapper is on the RHS of an assignment that we're pushing
1068 down. */
1069 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1070 || TREE_CODE (temp) == MODIFY_EXPR);
1071 TREE_OPERAND (temp, 1) = *p;
1072 *p = temp;
1074 else
1076 temp = create_tmp_var (type, "retval");
1077 *p = build2 (INIT_EXPR, type, temp, *p);
1080 return temp;
1083 return NULL_TREE;
1086 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1087 a temporary through which they communicate. */
1089 static void
1090 build_stack_save_restore (gcall **save, gcall **restore)
1092 tree tmp_var;
1094 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1095 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1096 gimple_call_set_lhs (*save, tmp_var);
1098 *restore
1099 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1100 1, tmp_var);
1103 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1105 static tree
1106 build_asan_poison_call_expr (tree decl)
1108 /* Do not poison variables that have size equal to zero. */
1109 tree unit_size = DECL_SIZE_UNIT (decl);
1110 if (zerop (unit_size))
1111 return NULL_TREE;
1113 tree base = build_fold_addr_expr (decl);
1115 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1116 void_type_node, 3,
1117 build_int_cst (integer_type_node,
1118 ASAN_MARK_POISON),
1119 base, unit_size);
1122 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1123 on POISON flag, shadow memory of a DECL variable. The call will be
1124 put on location identified by IT iterator, where BEFORE flag drives
1125 position where the stmt will be put. */
1127 static void
1128 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1129 bool before)
1131 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1132 if (gimplify_omp_ctxp)
1133 return;
1135 tree unit_size = DECL_SIZE_UNIT (decl);
1136 tree base = build_fold_addr_expr (decl);
1138 /* Do not poison variables that have size equal to zero. */
1139 if (zerop (unit_size))
1140 return;
1142 /* It's necessary to have all stack variables aligned to ASAN granularity
1143 bytes. */
1144 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1145 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1147 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1149 gimple *g
1150 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1151 build_int_cst (integer_type_node, flags),
1152 base, unit_size);
1154 if (before)
1155 gsi_insert_before (it, g, GSI_NEW_STMT);
1156 else
1157 gsi_insert_after (it, g, GSI_NEW_STMT);
1160 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1161 either poisons or unpoisons a DECL. Created statement is appended
1162 to SEQ_P gimple sequence. */
1164 static void
1165 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1167 gimple_stmt_iterator it = gsi_last (*seq_p);
1168 bool before = false;
1170 if (gsi_end_p (it))
1171 before = true;
1173 asan_poison_variable (decl, poison, &it, before);
1176 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1178 static int
1179 sort_by_decl_uid (const void *a, const void *b)
1181 const tree *t1 = (const tree *)a;
1182 const tree *t2 = (const tree *)b;
1184 int uid1 = DECL_UID (*t1);
1185 int uid2 = DECL_UID (*t2);
1187 if (uid1 < uid2)
1188 return -1;
1189 else if (uid1 > uid2)
1190 return 1;
1191 else
1192 return 0;
1195 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1196 depending on POISON flag. Created statement is appended
1197 to SEQ_P gimple sequence. */
1199 static void
1200 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1202 unsigned c = variables->elements ();
1203 if (c == 0)
1204 return;
1206 auto_vec<tree> sorted_variables (c);
1208 for (hash_set<tree>::iterator it = variables->begin ();
1209 it != variables->end (); ++it)
1210 sorted_variables.safe_push (*it);
1212 sorted_variables.qsort (sort_by_decl_uid);
1214 unsigned i;
1215 tree var;
1216 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1218 asan_poison_variable (var, poison, seq_p);
1220 /* Add use_after_scope_memory attribute for the variable in order
1221 to prevent re-written into SSA. */
1222 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1223 DECL_ATTRIBUTES (var)))
1224 DECL_ATTRIBUTES (var)
1225 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1226 integer_one_node,
1227 DECL_ATTRIBUTES (var));
1231 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1233 static enum gimplify_status
1234 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1236 tree bind_expr = *expr_p;
1237 bool old_keep_stack = gimplify_ctxp->keep_stack;
1238 bool old_save_stack = gimplify_ctxp->save_stack;
1239 tree t;
1240 gbind *bind_stmt;
1241 gimple_seq body, cleanup;
1242 gcall *stack_save;
1243 location_t start_locus = 0, end_locus = 0;
1244 tree ret_clauses = NULL;
1246 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1248 /* Mark variables seen in this bind expr. */
1249 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1251 if (VAR_P (t))
1253 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1255 /* Mark variable as local. */
1256 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1257 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1258 || splay_tree_lookup (ctx->variables,
1259 (splay_tree_key) t) == NULL))
1261 if (ctx->region_type == ORT_SIMD
1262 && TREE_ADDRESSABLE (t)
1263 && !TREE_STATIC (t))
1264 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1265 else
1266 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1269 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1271 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1272 cfun->has_local_explicit_reg_vars = true;
1275 /* Preliminarily mark non-addressed complex variables as eligible
1276 for promotion to gimple registers. We'll transform their uses
1277 as we find them. */
1278 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1279 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1280 && !TREE_THIS_VOLATILE (t)
1281 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1282 && !needs_to_live_in_memory (t))
1283 DECL_GIMPLE_REG_P (t) = 1;
1286 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1287 BIND_EXPR_BLOCK (bind_expr));
1288 gimple_push_bind_expr (bind_stmt);
1290 gimplify_ctxp->keep_stack = false;
1291 gimplify_ctxp->save_stack = false;
1293 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1294 body = NULL;
1295 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1296 gimple_bind_set_body (bind_stmt, body);
1298 /* Source location wise, the cleanup code (stack_restore and clobbers)
1299 belongs to the end of the block, so propagate what we have. The
1300 stack_save operation belongs to the beginning of block, which we can
1301 infer from the bind_expr directly if the block has no explicit
1302 assignment. */
1303 if (BIND_EXPR_BLOCK (bind_expr))
1305 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1306 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1308 if (start_locus == 0)
1309 start_locus = EXPR_LOCATION (bind_expr);
1311 cleanup = NULL;
1312 stack_save = NULL;
1314 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1315 the stack space allocated to the VLAs. */
1316 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1318 gcall *stack_restore;
1320 /* Save stack on entry and restore it on exit. Add a try_finally
1321 block to achieve this. */
1322 build_stack_save_restore (&stack_save, &stack_restore);
1324 gimple_set_location (stack_save, start_locus);
1325 gimple_set_location (stack_restore, end_locus);
1327 gimplify_seq_add_stmt (&cleanup, stack_restore);
1330 /* Add clobbers for all variables that go out of scope. */
1331 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1333 if (VAR_P (t)
1334 && !is_global_var (t)
1335 && DECL_CONTEXT (t) == current_function_decl)
1337 if (!DECL_HARD_REGISTER (t)
1338 && !TREE_THIS_VOLATILE (t)
1339 && !DECL_HAS_VALUE_EXPR_P (t)
1340 /* Only care for variables that have to be in memory. Others
1341 will be rewritten into SSA names, hence moved to the
1342 top-level. */
1343 && !is_gimple_reg (t)
1344 && flag_stack_reuse != SR_NONE)
1346 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1347 gimple *clobber_stmt;
1348 TREE_THIS_VOLATILE (clobber) = 1;
1349 clobber_stmt = gimple_build_assign (t, clobber);
1350 gimple_set_location (clobber_stmt, end_locus);
1351 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1354 if (flag_openacc && oacc_declare_returns != NULL)
1356 tree *c = oacc_declare_returns->get (t);
1357 if (c != NULL)
1359 if (ret_clauses)
1360 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1362 ret_clauses = *c;
1364 oacc_declare_returns->remove (t);
1366 if (oacc_declare_returns->elements () == 0)
1368 delete oacc_declare_returns;
1369 oacc_declare_returns = NULL;
1375 if (asan_poisoned_variables != NULL
1376 && asan_poisoned_variables->contains (t))
1378 asan_poisoned_variables->remove (t);
1379 asan_poison_variable (t, true, &cleanup);
1382 if (gimplify_ctxp->live_switch_vars != NULL
1383 && gimplify_ctxp->live_switch_vars->contains (t))
1384 gimplify_ctxp->live_switch_vars->remove (t);
1387 if (ret_clauses)
1389 gomp_target *stmt;
1390 gimple_stmt_iterator si = gsi_start (cleanup);
1392 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1393 ret_clauses);
1394 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1397 if (cleanup)
1399 gtry *gs;
1400 gimple_seq new_body;
1402 new_body = NULL;
1403 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1404 GIMPLE_TRY_FINALLY);
1406 if (stack_save)
1407 gimplify_seq_add_stmt (&new_body, stack_save);
1408 gimplify_seq_add_stmt (&new_body, gs);
1409 gimple_bind_set_body (bind_stmt, new_body);
1412 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1413 if (!gimplify_ctxp->keep_stack)
1414 gimplify_ctxp->keep_stack = old_keep_stack;
1415 gimplify_ctxp->save_stack = old_save_stack;
1417 gimple_pop_bind_expr ();
1419 gimplify_seq_add_stmt (pre_p, bind_stmt);
1421 if (temp)
1423 *expr_p = temp;
1424 return GS_OK;
1427 *expr_p = NULL_TREE;
1428 return GS_ALL_DONE;
1431 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1432 GIMPLE value, it is assigned to a new temporary and the statement is
1433 re-written to return the temporary.
1435 PRE_P points to the sequence where side effects that must happen before
1436 STMT should be stored. */
1438 static enum gimplify_status
1439 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1441 greturn *ret;
1442 tree ret_expr = TREE_OPERAND (stmt, 0);
1443 tree result_decl, result;
1445 if (ret_expr == error_mark_node)
1446 return GS_ERROR;
1448 /* Implicit _Cilk_sync must be inserted right before any return statement
1449 if there is a _Cilk_spawn in the function. If the user has provided a
1450 _Cilk_sync, the optimizer should remove this duplicate one. */
1451 if (fn_contains_cilk_spawn_p (cfun))
1453 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1454 gimplify_and_add (impl_sync, pre_p);
1457 if (!ret_expr
1458 || TREE_CODE (ret_expr) == RESULT_DECL
1459 || ret_expr == error_mark_node)
1461 greturn *ret = gimple_build_return (ret_expr);
1462 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1463 gimplify_seq_add_stmt (pre_p, ret);
1464 return GS_ALL_DONE;
1467 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1468 result_decl = NULL_TREE;
1469 else
1471 result_decl = TREE_OPERAND (ret_expr, 0);
1473 /* See through a return by reference. */
1474 if (TREE_CODE (result_decl) == INDIRECT_REF)
1475 result_decl = TREE_OPERAND (result_decl, 0);
1477 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1478 || TREE_CODE (ret_expr) == INIT_EXPR)
1479 && TREE_CODE (result_decl) == RESULT_DECL);
1482 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1483 Recall that aggregate_value_p is FALSE for any aggregate type that is
1484 returned in registers. If we're returning values in registers, then
1485 we don't want to extend the lifetime of the RESULT_DECL, particularly
1486 across another call. In addition, for those aggregates for which
1487 hard_function_value generates a PARALLEL, we'll die during normal
1488 expansion of structure assignments; there's special code in expand_return
1489 to handle this case that does not exist in expand_expr. */
1490 if (!result_decl)
1491 result = NULL_TREE;
1492 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1494 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1496 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1497 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1498 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1499 should be effectively allocated by the caller, i.e. all calls to
1500 this function must be subject to the Return Slot Optimization. */
1501 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1502 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1504 result = result_decl;
1506 else if (gimplify_ctxp->return_temp)
1507 result = gimplify_ctxp->return_temp;
1508 else
1510 result = create_tmp_reg (TREE_TYPE (result_decl));
1512 /* ??? With complex control flow (usually involving abnormal edges),
1513 we can wind up warning about an uninitialized value for this. Due
1514 to how this variable is constructed and initialized, this is never
1515 true. Give up and never warn. */
1516 TREE_NO_WARNING (result) = 1;
1518 gimplify_ctxp->return_temp = result;
1521 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1522 Then gimplify the whole thing. */
1523 if (result != result_decl)
1524 TREE_OPERAND (ret_expr, 0) = result;
1526 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1528 ret = gimple_build_return (result);
1529 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1530 gimplify_seq_add_stmt (pre_p, ret);
1532 return GS_ALL_DONE;
1535 /* Gimplify a variable-length array DECL. */
1537 static void
1538 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1540 /* This is a variable-sized decl. Simplify its size and mark it
1541 for deferred expansion. */
1542 tree t, addr, ptr_type;
1544 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1545 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1547 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1548 if (DECL_HAS_VALUE_EXPR_P (decl))
1549 return;
1551 /* All occurrences of this decl in final gimplified code will be
1552 replaced by indirection. Setting DECL_VALUE_EXPR does two
1553 things: First, it lets the rest of the gimplifier know what
1554 replacement to use. Second, it lets the debug info know
1555 where to find the value. */
1556 ptr_type = build_pointer_type (TREE_TYPE (decl));
1557 addr = create_tmp_var (ptr_type, get_name (decl));
1558 DECL_IGNORED_P (addr) = 0;
1559 t = build_fold_indirect_ref (addr);
1560 TREE_THIS_NOTRAP (t) = 1;
1561 SET_DECL_VALUE_EXPR (decl, t);
1562 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1564 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1565 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1566 size_int (DECL_ALIGN (decl)));
1567 /* The call has been built for a variable-sized object. */
1568 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1569 t = fold_convert (ptr_type, t);
1570 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1572 gimplify_and_add (t, seq_p);
1575 /* A helper function to be called via walk_tree. Mark all labels under *TP
1576 as being forced. To be called for DECL_INITIAL of static variables. */
1578 static tree
1579 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1581 if (TYPE_P (*tp))
1582 *walk_subtrees = 0;
1583 if (TREE_CODE (*tp) == LABEL_DECL)
1585 FORCED_LABEL (*tp) = 1;
1586 cfun->has_forced_label_in_static = 1;
1589 return NULL_TREE;
1592 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1593 and initialization explicit. */
1595 static enum gimplify_status
1596 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1598 tree stmt = *stmt_p;
1599 tree decl = DECL_EXPR_DECL (stmt);
1601 *stmt_p = NULL_TREE;
1603 if (TREE_TYPE (decl) == error_mark_node)
1604 return GS_ERROR;
1606 if ((TREE_CODE (decl) == TYPE_DECL
1607 || VAR_P (decl))
1608 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1610 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1611 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1612 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1615 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1616 in case its size expressions contain problematic nodes like CALL_EXPR. */
1617 if (TREE_CODE (decl) == TYPE_DECL
1618 && DECL_ORIGINAL_TYPE (decl)
1619 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1621 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1622 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1623 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1626 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1628 tree init = DECL_INITIAL (decl);
1629 bool is_vla = false;
1631 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1632 || (!TREE_STATIC (decl)
1633 && flag_stack_check == GENERIC_STACK_CHECK
1634 && compare_tree_int (DECL_SIZE_UNIT (decl),
1635 STACK_CHECK_MAX_VAR_SIZE) > 0))
1637 gimplify_vla_decl (decl, seq_p);
1638 is_vla = true;
1641 if (asan_poisoned_variables
1642 && !is_vla
1643 && TREE_ADDRESSABLE (decl)
1644 && !TREE_STATIC (decl)
1645 && !DECL_HAS_VALUE_EXPR_P (decl)
1646 && dbg_cnt (asan_use_after_scope))
1648 asan_poisoned_variables->add (decl);
1649 asan_poison_variable (decl, false, seq_p);
1650 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1651 gimplify_ctxp->live_switch_vars->add (decl);
1654 /* Some front ends do not explicitly declare all anonymous
1655 artificial variables. We compensate here by declaring the
1656 variables, though it would be better if the front ends would
1657 explicitly declare them. */
1658 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1659 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1660 gimple_add_tmp_var (decl);
1662 if (init && init != error_mark_node)
1664 if (!TREE_STATIC (decl))
1666 DECL_INITIAL (decl) = NULL_TREE;
1667 init = build2 (INIT_EXPR, void_type_node, decl, init);
1668 gimplify_and_add (init, seq_p);
1669 ggc_free (init);
1671 else
1672 /* We must still examine initializers for static variables
1673 as they may contain a label address. */
1674 walk_tree (&init, force_labels_r, NULL, NULL);
1678 return GS_ALL_DONE;
1681 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1682 and replacing the LOOP_EXPR with goto, but if the loop contains an
1683 EXIT_EXPR, we need to append a label for it to jump to. */
1685 static enum gimplify_status
1686 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1688 tree saved_label = gimplify_ctxp->exit_label;
1689 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1691 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1693 gimplify_ctxp->exit_label = NULL_TREE;
1695 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1697 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1699 if (gimplify_ctxp->exit_label)
1700 gimplify_seq_add_stmt (pre_p,
1701 gimple_build_label (gimplify_ctxp->exit_label));
1703 gimplify_ctxp->exit_label = saved_label;
1705 *expr_p = NULL;
1706 return GS_ALL_DONE;
1709 /* Gimplify a statement list onto a sequence. These may be created either
1710 by an enlightened front-end, or by shortcut_cond_expr. */
1712 static enum gimplify_status
1713 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1715 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1717 tree_stmt_iterator i = tsi_start (*expr_p);
1719 while (!tsi_end_p (i))
1721 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1722 tsi_delink (&i);
1725 if (temp)
1727 *expr_p = temp;
1728 return GS_OK;
1731 return GS_ALL_DONE;
1734 /* Callback for walk_gimple_seq. */
1736 static tree
1737 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1738 struct walk_stmt_info *wi)
1740 gimple *stmt = gsi_stmt (*gsi_p);
1742 *handled_ops_p = true;
1743 switch (gimple_code (stmt))
1745 case GIMPLE_TRY:
1746 /* A compiler-generated cleanup or a user-written try block.
1747 If it's empty, don't dive into it--that would result in
1748 worse location info. */
1749 if (gimple_try_eval (stmt) == NULL)
1751 wi->info = stmt;
1752 return integer_zero_node;
1754 /* Fall through. */
1755 case GIMPLE_BIND:
1756 case GIMPLE_CATCH:
1757 case GIMPLE_EH_FILTER:
1758 case GIMPLE_TRANSACTION:
1759 /* Walk the sub-statements. */
1760 *handled_ops_p = false;
1761 break;
1762 case GIMPLE_CALL:
1763 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1765 *handled_ops_p = false;
1766 break;
1768 /* Fall through. */
1769 default:
1770 /* Save the first "real" statement (not a decl/lexical scope/...). */
1771 wi->info = stmt;
1772 return integer_zero_node;
1774 return NULL_TREE;
1777 /* Possibly warn about unreachable statements between switch's controlling
1778 expression and the first case. SEQ is the body of a switch expression. */
1780 static void
1781 maybe_warn_switch_unreachable (gimple_seq seq)
1783 if (!warn_switch_unreachable
1784 /* This warning doesn't play well with Fortran when optimizations
1785 are on. */
1786 || lang_GNU_Fortran ()
1787 || seq == NULL)
1788 return;
1790 struct walk_stmt_info wi;
1791 memset (&wi, 0, sizeof (wi));
1792 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1793 gimple *stmt = (gimple *) wi.info;
1795 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1797 if (gimple_code (stmt) == GIMPLE_GOTO
1798 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1799 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1800 /* Don't warn for compiler-generated gotos. These occur
1801 in Duff's devices, for example. */;
1802 else
1803 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1804 "statement will never be executed");
1809 /* A label entry that pairs label and a location. */
1810 struct label_entry
1812 tree label;
1813 location_t loc;
1816 /* Find LABEL in vector of label entries VEC. */
1818 static struct label_entry *
1819 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1821 unsigned int i;
1822 struct label_entry *l;
1824 FOR_EACH_VEC_ELT (*vec, i, l)
1825 if (l->label == label)
1826 return l;
1827 return NULL;
1830 /* Return true if LABEL, a LABEL_DECL, represents a case label
1831 in a vector of labels CASES. */
1833 static bool
1834 case_label_p (const vec<tree> *cases, tree label)
1836 unsigned int i;
1837 tree l;
1839 FOR_EACH_VEC_ELT (*cases, i, l)
1840 if (CASE_LABEL (l) == label)
1841 return true;
1842 return false;
1845 /* Find the last statement in a scope STMT. */
1847 static gimple *
1848 last_stmt_in_scope (gimple *stmt)
1850 if (!stmt)
1851 return NULL;
1853 switch (gimple_code (stmt))
1855 case GIMPLE_BIND:
1857 gbind *bind = as_a <gbind *> (stmt);
1858 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1859 return last_stmt_in_scope (stmt);
1862 case GIMPLE_TRY:
1864 gtry *try_stmt = as_a <gtry *> (stmt);
1865 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1866 gimple *last_eval = last_stmt_in_scope (stmt);
1867 if (gimple_stmt_may_fallthru (last_eval)
1868 && (last_eval == NULL
1869 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1870 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1872 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1873 return last_stmt_in_scope (stmt);
1875 else
1876 return last_eval;
1879 default:
1880 return stmt;
1884 /* Collect interesting labels in LABELS and return the statement preceding
1885 another case label, or a user-defined label. */
1887 static gimple *
1888 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1889 auto_vec <struct label_entry> *labels)
1891 gimple *prev = NULL;
1895 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1896 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1898 /* Nested scope. Only look at the last statement of
1899 the innermost scope. */
1900 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1901 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1902 if (last)
1904 prev = last;
1905 /* It might be a label without a location. Use the
1906 location of the scope then. */
1907 if (!gimple_has_location (prev))
1908 gimple_set_location (prev, bind_loc);
1910 gsi_next (gsi_p);
1911 continue;
1914 /* Ifs are tricky. */
1915 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1917 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1918 tree false_lab = gimple_cond_false_label (cond_stmt);
1919 location_t if_loc = gimple_location (cond_stmt);
1921 /* If we have e.g.
1922 if (i > 1) goto <D.2259>; else goto D;
1923 we can't do much with the else-branch. */
1924 if (!DECL_ARTIFICIAL (false_lab))
1925 break;
1927 /* Go on until the false label, then one step back. */
1928 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1930 gimple *stmt = gsi_stmt (*gsi_p);
1931 if (gimple_code (stmt) == GIMPLE_LABEL
1932 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1933 break;
1936 /* Not found? Oops. */
1937 if (gsi_end_p (*gsi_p))
1938 break;
1940 struct label_entry l = { false_lab, if_loc };
1941 labels->safe_push (l);
1943 /* Go to the last statement of the then branch. */
1944 gsi_prev (gsi_p);
1946 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1947 <D.1759>:
1948 <stmt>;
1949 goto <D.1761>;
1950 <D.1760>:
1952 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1953 && !gimple_has_location (gsi_stmt (*gsi_p)))
1955 /* Look at the statement before, it might be
1956 attribute fallthrough, in which case don't warn. */
1957 gsi_prev (gsi_p);
1958 bool fallthru_before_dest
1959 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1960 gsi_next (gsi_p);
1961 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1962 if (!fallthru_before_dest)
1964 struct label_entry l = { goto_dest, if_loc };
1965 labels->safe_push (l);
1968 /* And move back. */
1969 gsi_next (gsi_p);
1972 /* Remember the last statement. Skip labels that are of no interest
1973 to us. */
1974 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1976 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1977 if (find_label_entry (labels, label))
1978 prev = gsi_stmt (*gsi_p);
1980 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1982 else
1983 prev = gsi_stmt (*gsi_p);
1984 gsi_next (gsi_p);
1986 while (!gsi_end_p (*gsi_p)
1987 /* Stop if we find a case or a user-defined label. */
1988 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
1989 || !gimple_has_location (gsi_stmt (*gsi_p))));
1991 return prev;
1994 /* Return true if the switch fallthough warning should occur. LABEL is
1995 the label statement that we're falling through to. */
1997 static bool
1998 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2000 gimple_stmt_iterator gsi = *gsi_p;
2002 /* Don't warn if the label is marked with a "falls through" comment. */
2003 if (FALLTHROUGH_LABEL_P (label))
2004 return false;
2006 /* Don't warn for non-case labels followed by a statement:
2007 case 0:
2008 foo ();
2009 label:
2010 bar ();
2011 as these are likely intentional. */
2012 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2014 tree l;
2015 while (!gsi_end_p (gsi)
2016 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2017 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2018 && !case_label_p (&gimplify_ctxp->case_labels, l))
2019 gsi_next (&gsi);
2020 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2021 return false;
2024 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2025 immediately breaks. */
2026 gsi = *gsi_p;
2028 /* Skip all immediately following labels. */
2029 while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
2030 gsi_next (&gsi);
2032 /* { ... something; default:; } */
2033 if (gsi_end_p (gsi)
2034 /* { ... something; default: break; } or
2035 { ... something; default: goto L; } */
2036 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2037 /* { ... something; default: return; } */
2038 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2039 return false;
2041 return true;
2044 /* Callback for walk_gimple_seq. */
2046 static tree
2047 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2048 struct walk_stmt_info *)
2050 gimple *stmt = gsi_stmt (*gsi_p);
2052 *handled_ops_p = true;
2053 switch (gimple_code (stmt))
2055 case GIMPLE_TRY:
2056 case GIMPLE_BIND:
2057 case GIMPLE_CATCH:
2058 case GIMPLE_EH_FILTER:
2059 case GIMPLE_TRANSACTION:
2060 /* Walk the sub-statements. */
2061 *handled_ops_p = false;
2062 break;
2064 /* Find a sequence of form:
2066 GIMPLE_LABEL
2067 [...]
2068 <may fallthru stmt>
2069 GIMPLE_LABEL
2071 and possibly warn. */
2072 case GIMPLE_LABEL:
2074 /* Found a label. Skip all immediately following labels. */
2075 while (!gsi_end_p (*gsi_p)
2076 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2077 gsi_next (gsi_p);
2079 /* There might be no more statements. */
2080 if (gsi_end_p (*gsi_p))
2081 return integer_zero_node;
2083 /* Vector of labels that fall through. */
2084 auto_vec <struct label_entry> labels;
2085 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2087 /* There might be no more statements. */
2088 if (gsi_end_p (*gsi_p))
2089 return integer_zero_node;
2091 gimple *next = gsi_stmt (*gsi_p);
2092 tree label;
2093 /* If what follows is a label, then we may have a fallthrough. */
2094 if (gimple_code (next) == GIMPLE_LABEL
2095 && gimple_has_location (next)
2096 && (label = gimple_label_label (as_a <glabel *> (next)))
2097 && prev != NULL)
2099 struct label_entry *l;
2100 bool warned_p = false;
2101 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2102 /* Quiet. */;
2103 else if (gimple_code (prev) == GIMPLE_LABEL
2104 && (label = gimple_label_label (as_a <glabel *> (prev)))
2105 && (l = find_label_entry (&labels, label)))
2106 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2107 "this statement may fall through");
2108 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2109 /* Try to be clever and don't warn when the statement
2110 can't actually fall through. */
2111 && gimple_stmt_may_fallthru (prev)
2112 && gimple_has_location (prev))
2113 warned_p = warning_at (gimple_location (prev),
2114 OPT_Wimplicit_fallthrough_,
2115 "this statement may fall through");
2116 if (warned_p)
2117 inform (gimple_location (next), "here");
2119 /* Mark this label as processed so as to prevent multiple
2120 warnings in nested switches. */
2121 FALLTHROUGH_LABEL_P (label) = true;
2123 /* So that next warn_implicit_fallthrough_r will start looking for
2124 a new sequence starting with this label. */
2125 gsi_prev (gsi_p);
2128 break;
2129 default:
2130 break;
2132 return NULL_TREE;
2135 /* Warn when a switch case falls through. */
2137 static void
2138 maybe_warn_implicit_fallthrough (gimple_seq seq)
2140 if (!warn_implicit_fallthrough)
2141 return;
2143 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2144 if (!(lang_GNU_C ()
2145 || lang_GNU_CXX ()
2146 || lang_GNU_OBJC ()))
2147 return;
2149 struct walk_stmt_info wi;
2150 memset (&wi, 0, sizeof (wi));
2151 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2154 /* Callback for walk_gimple_seq. */
2156 static tree
2157 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2158 struct walk_stmt_info *)
2160 gimple *stmt = gsi_stmt (*gsi_p);
2162 *handled_ops_p = true;
2163 switch (gimple_code (stmt))
2165 case GIMPLE_TRY:
2166 case GIMPLE_BIND:
2167 case GIMPLE_CATCH:
2168 case GIMPLE_EH_FILTER:
2169 case GIMPLE_TRANSACTION:
2170 /* Walk the sub-statements. */
2171 *handled_ops_p = false;
2172 break;
2173 case GIMPLE_CALL:
2174 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2176 gsi_remove (gsi_p, true);
2177 if (gsi_end_p (*gsi_p))
2178 return integer_zero_node;
2180 bool found = false;
2181 location_t loc = gimple_location (stmt);
2183 gimple_stmt_iterator gsi2 = *gsi_p;
2184 stmt = gsi_stmt (gsi2);
2185 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2187 /* Go on until the artificial label. */
2188 tree goto_dest = gimple_goto_dest (stmt);
2189 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2191 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2192 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2193 == goto_dest)
2194 break;
2197 /* Not found? Stop. */
2198 if (gsi_end_p (gsi2))
2199 break;
2201 /* Look one past it. */
2202 gsi_next (&gsi2);
2205 /* We're looking for a case label or default label here. */
2206 while (!gsi_end_p (gsi2))
2208 stmt = gsi_stmt (gsi2);
2209 if (gimple_code (stmt) == GIMPLE_LABEL)
2211 tree label = gimple_label_label (as_a <glabel *> (stmt));
2212 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2214 found = true;
2215 break;
2218 else
2219 /* Something other than a label. That's not expected. */
2220 break;
2221 gsi_next (&gsi2);
2223 if (!found)
2224 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2225 "a case label or default label");
2227 break;
2228 default:
2229 break;
2231 return NULL_TREE;
2234 /* Expand all FALLTHROUGH () calls in SEQ. */
2236 static void
2237 expand_FALLTHROUGH (gimple_seq *seq_p)
2239 struct walk_stmt_info wi;
2240 memset (&wi, 0, sizeof (wi));
2241 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2245 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2246 branch to. */
2248 static enum gimplify_status
2249 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2251 tree switch_expr = *expr_p;
2252 gimple_seq switch_body_seq = NULL;
2253 enum gimplify_status ret;
2254 tree index_type = TREE_TYPE (switch_expr);
2255 if (index_type == NULL_TREE)
2256 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2258 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2259 fb_rvalue);
2260 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2261 return ret;
2263 if (SWITCH_BODY (switch_expr))
2265 vec<tree> labels;
2266 vec<tree> saved_labels;
2267 hash_set<tree> *saved_live_switch_vars = NULL;
2268 tree default_case = NULL_TREE;
2269 gswitch *switch_stmt;
2271 /* If someone can be bothered to fill in the labels, they can
2272 be bothered to null out the body too. */
2273 gcc_assert (!SWITCH_LABELS (switch_expr));
2275 /* Save old labels, get new ones from body, then restore the old
2276 labels. Save all the things from the switch body to append after. */
2277 saved_labels = gimplify_ctxp->case_labels;
2278 gimplify_ctxp->case_labels.create (8);
2280 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2281 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2282 if (TREE_CODE (SWITCH_BODY (switch_expr)) == BIND_EXPR)
2283 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2284 else
2285 gimplify_ctxp->live_switch_vars = NULL;
2287 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2288 gimplify_ctxp->in_switch_expr = true;
2290 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2292 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2293 maybe_warn_switch_unreachable (switch_body_seq);
2294 maybe_warn_implicit_fallthrough (switch_body_seq);
2295 /* Only do this for the outermost GIMPLE_SWITCH. */
2296 if (!gimplify_ctxp->in_switch_expr)
2297 expand_FALLTHROUGH (&switch_body_seq);
2299 labels = gimplify_ctxp->case_labels;
2300 gimplify_ctxp->case_labels = saved_labels;
2302 if (gimplify_ctxp->live_switch_vars)
2304 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2305 delete gimplify_ctxp->live_switch_vars;
2307 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2309 preprocess_case_label_vec_for_gimple (labels, index_type,
2310 &default_case);
2312 if (!default_case)
2314 glabel *new_default;
2316 default_case
2317 = build_case_label (NULL_TREE, NULL_TREE,
2318 create_artificial_label (UNKNOWN_LOCATION));
2319 new_default = gimple_build_label (CASE_LABEL (default_case));
2320 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2323 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2324 default_case, labels);
2325 gimplify_seq_add_stmt (pre_p, switch_stmt);
2326 gimplify_seq_add_seq (pre_p, switch_body_seq);
2327 labels.release ();
2329 else
2330 gcc_assert (SWITCH_LABELS (switch_expr));
2332 return GS_ALL_DONE;
2335 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2337 static enum gimplify_status
2338 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2340 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2341 == current_function_decl);
2343 glabel *label_stmt = gimple_build_label (LABEL_EXPR_LABEL (*expr_p));
2344 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2345 gimplify_seq_add_stmt (pre_p, label_stmt);
2347 return GS_ALL_DONE;
2350 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2352 static enum gimplify_status
2353 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2355 struct gimplify_ctx *ctxp;
2356 glabel *label_stmt;
2358 /* Invalid programs can play Duff's Device type games with, for example,
2359 #pragma omp parallel. At least in the C front end, we don't
2360 detect such invalid branches until after gimplification, in the
2361 diagnose_omp_blocks pass. */
2362 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2363 if (ctxp->case_labels.exists ())
2364 break;
2366 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2367 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2368 ctxp->case_labels.safe_push (*expr_p);
2369 gimplify_seq_add_stmt (pre_p, label_stmt);
2371 return GS_ALL_DONE;
2374 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2375 if necessary. */
2377 tree
2378 build_and_jump (tree *label_p)
2380 if (label_p == NULL)
2381 /* If there's nowhere to jump, just fall through. */
2382 return NULL_TREE;
2384 if (*label_p == NULL_TREE)
2386 tree label = create_artificial_label (UNKNOWN_LOCATION);
2387 *label_p = label;
2390 return build1 (GOTO_EXPR, void_type_node, *label_p);
2393 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2394 This also involves building a label to jump to and communicating it to
2395 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2397 static enum gimplify_status
2398 gimplify_exit_expr (tree *expr_p)
2400 tree cond = TREE_OPERAND (*expr_p, 0);
2401 tree expr;
2403 expr = build_and_jump (&gimplify_ctxp->exit_label);
2404 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2405 *expr_p = expr;
2407 return GS_OK;
2410 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2411 different from its canonical type, wrap the whole thing inside a
2412 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2413 type.
2415 The canonical type of a COMPONENT_REF is the type of the field being
2416 referenced--unless the field is a bit-field which can be read directly
2417 in a smaller mode, in which case the canonical type is the
2418 sign-appropriate type corresponding to that mode. */
2420 static void
2421 canonicalize_component_ref (tree *expr_p)
2423 tree expr = *expr_p;
2424 tree type;
2426 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2428 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2429 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2430 else
2431 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2433 /* One could argue that all the stuff below is not necessary for
2434 the non-bitfield case and declare it a FE error if type
2435 adjustment would be needed. */
2436 if (TREE_TYPE (expr) != type)
2438 #ifdef ENABLE_TYPES_CHECKING
2439 tree old_type = TREE_TYPE (expr);
2440 #endif
2441 int type_quals;
2443 /* We need to preserve qualifiers and propagate them from
2444 operand 0. */
2445 type_quals = TYPE_QUALS (type)
2446 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2447 if (TYPE_QUALS (type) != type_quals)
2448 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2450 /* Set the type of the COMPONENT_REF to the underlying type. */
2451 TREE_TYPE (expr) = type;
2453 #ifdef ENABLE_TYPES_CHECKING
2454 /* It is now a FE error, if the conversion from the canonical
2455 type to the original expression type is not useless. */
2456 gcc_assert (useless_type_conversion_p (old_type, type));
2457 #endif
2461 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2462 to foo, embed that change in the ADDR_EXPR by converting
2463 T array[U];
2464 (T *)&array
2466 &array[L]
2467 where L is the lower bound. For simplicity, only do this for constant
2468 lower bound.
2469 The constraint is that the type of &array[L] is trivially convertible
2470 to T *. */
2472 static void
2473 canonicalize_addr_expr (tree *expr_p)
2475 tree expr = *expr_p;
2476 tree addr_expr = TREE_OPERAND (expr, 0);
2477 tree datype, ddatype, pddatype;
2479 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2480 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2481 || TREE_CODE (addr_expr) != ADDR_EXPR)
2482 return;
2484 /* The addr_expr type should be a pointer to an array. */
2485 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2486 if (TREE_CODE (datype) != ARRAY_TYPE)
2487 return;
2489 /* The pointer to element type shall be trivially convertible to
2490 the expression pointer type. */
2491 ddatype = TREE_TYPE (datype);
2492 pddatype = build_pointer_type (ddatype);
2493 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2494 pddatype))
2495 return;
2497 /* The lower bound and element sizes must be constant. */
2498 if (!TYPE_SIZE_UNIT (ddatype)
2499 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2500 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2501 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2502 return;
2504 /* All checks succeeded. Build a new node to merge the cast. */
2505 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2506 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2507 NULL_TREE, NULL_TREE);
2508 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2510 /* We can have stripped a required restrict qualifier above. */
2511 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2512 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2515 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2516 underneath as appropriate. */
2518 static enum gimplify_status
2519 gimplify_conversion (tree *expr_p)
2521 location_t loc = EXPR_LOCATION (*expr_p);
2522 gcc_assert (CONVERT_EXPR_P (*expr_p));
2524 /* Then strip away all but the outermost conversion. */
2525 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2527 /* And remove the outermost conversion if it's useless. */
2528 if (tree_ssa_useless_type_conversion (*expr_p))
2529 *expr_p = TREE_OPERAND (*expr_p, 0);
2531 /* If we still have a conversion at the toplevel,
2532 then canonicalize some constructs. */
2533 if (CONVERT_EXPR_P (*expr_p))
2535 tree sub = TREE_OPERAND (*expr_p, 0);
2537 /* If a NOP conversion is changing the type of a COMPONENT_REF
2538 expression, then canonicalize its type now in order to expose more
2539 redundant conversions. */
2540 if (TREE_CODE (sub) == COMPONENT_REF)
2541 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2543 /* If a NOP conversion is changing a pointer to array of foo
2544 to a pointer to foo, embed that change in the ADDR_EXPR. */
2545 else if (TREE_CODE (sub) == ADDR_EXPR)
2546 canonicalize_addr_expr (expr_p);
2549 /* If we have a conversion to a non-register type force the
2550 use of a VIEW_CONVERT_EXPR instead. */
2551 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2552 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2553 TREE_OPERAND (*expr_p, 0));
2555 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2556 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2557 TREE_SET_CODE (*expr_p, NOP_EXPR);
2559 return GS_OK;
2562 /* Nonlocal VLAs seen in the current function. */
2563 static hash_set<tree> *nonlocal_vlas;
2565 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2566 static tree nonlocal_vla_vars;
2568 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2569 DECL_VALUE_EXPR, and it's worth re-examining things. */
2571 static enum gimplify_status
2572 gimplify_var_or_parm_decl (tree *expr_p)
2574 tree decl = *expr_p;
2576 /* ??? If this is a local variable, and it has not been seen in any
2577 outer BIND_EXPR, then it's probably the result of a duplicate
2578 declaration, for which we've already issued an error. It would
2579 be really nice if the front end wouldn't leak these at all.
2580 Currently the only known culprit is C++ destructors, as seen
2581 in g++.old-deja/g++.jason/binding.C. */
2582 if (VAR_P (decl)
2583 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2584 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2585 && decl_function_context (decl) == current_function_decl)
2587 gcc_assert (seen_error ());
2588 return GS_ERROR;
2591 /* When within an OMP context, notice uses of variables. */
2592 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2593 return GS_ALL_DONE;
2595 /* If the decl is an alias for another expression, substitute it now. */
2596 if (DECL_HAS_VALUE_EXPR_P (decl))
2598 tree value_expr = DECL_VALUE_EXPR (decl);
2600 /* For referenced nonlocal VLAs add a decl for debugging purposes
2601 to the current function. */
2602 if (VAR_P (decl)
2603 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2604 && nonlocal_vlas != NULL
2605 && TREE_CODE (value_expr) == INDIRECT_REF
2606 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2607 && decl_function_context (decl) != current_function_decl)
2609 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2610 while (ctx
2611 && (ctx->region_type == ORT_WORKSHARE
2612 || ctx->region_type == ORT_SIMD
2613 || ctx->region_type == ORT_ACC))
2614 ctx = ctx->outer_context;
2615 if (!ctx && !nonlocal_vlas->add (decl))
2617 tree copy = copy_node (decl);
2619 lang_hooks.dup_lang_specific_decl (copy);
2620 SET_DECL_RTL (copy, 0);
2621 TREE_USED (copy) = 1;
2622 DECL_CHAIN (copy) = nonlocal_vla_vars;
2623 nonlocal_vla_vars = copy;
2624 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2625 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2629 *expr_p = unshare_expr (value_expr);
2630 return GS_OK;
2633 return GS_ALL_DONE;
2636 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2638 static void
2639 recalculate_side_effects (tree t)
2641 enum tree_code code = TREE_CODE (t);
2642 int len = TREE_OPERAND_LENGTH (t);
2643 int i;
2645 switch (TREE_CODE_CLASS (code))
2647 case tcc_expression:
2648 switch (code)
2650 case INIT_EXPR:
2651 case MODIFY_EXPR:
2652 case VA_ARG_EXPR:
2653 case PREDECREMENT_EXPR:
2654 case PREINCREMENT_EXPR:
2655 case POSTDECREMENT_EXPR:
2656 case POSTINCREMENT_EXPR:
2657 /* All of these have side-effects, no matter what their
2658 operands are. */
2659 return;
2661 default:
2662 break;
2664 /* Fall through. */
2666 case tcc_comparison: /* a comparison expression */
2667 case tcc_unary: /* a unary arithmetic expression */
2668 case tcc_binary: /* a binary arithmetic expression */
2669 case tcc_reference: /* a reference */
2670 case tcc_vl_exp: /* a function call */
2671 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2672 for (i = 0; i < len; ++i)
2674 tree op = TREE_OPERAND (t, i);
2675 if (op && TREE_SIDE_EFFECTS (op))
2676 TREE_SIDE_EFFECTS (t) = 1;
2678 break;
2680 case tcc_constant:
2681 /* No side-effects. */
2682 return;
2684 default:
2685 gcc_unreachable ();
2689 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2690 node *EXPR_P.
2692 compound_lval
2693 : min_lval '[' val ']'
2694 | min_lval '.' ID
2695 | compound_lval '[' val ']'
2696 | compound_lval '.' ID
2698 This is not part of the original SIMPLE definition, which separates
2699 array and member references, but it seems reasonable to handle them
2700 together. Also, this way we don't run into problems with union
2701 aliasing; gcc requires that for accesses through a union to alias, the
2702 union reference must be explicit, which was not always the case when we
2703 were splitting up array and member refs.
2705 PRE_P points to the sequence where side effects that must happen before
2706 *EXPR_P should be stored.
2708 POST_P points to the sequence where side effects that must happen after
2709 *EXPR_P should be stored. */
2711 static enum gimplify_status
2712 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2713 fallback_t fallback)
2715 tree *p;
2716 enum gimplify_status ret = GS_ALL_DONE, tret;
2717 int i;
2718 location_t loc = EXPR_LOCATION (*expr_p);
2719 tree expr = *expr_p;
2721 /* Create a stack of the subexpressions so later we can walk them in
2722 order from inner to outer. */
2723 auto_vec<tree, 10> expr_stack;
2725 /* We can handle anything that get_inner_reference can deal with. */
2726 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2728 restart:
2729 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2730 if (TREE_CODE (*p) == INDIRECT_REF)
2731 *p = fold_indirect_ref_loc (loc, *p);
2733 if (handled_component_p (*p))
2735 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2736 additional COMPONENT_REFs. */
2737 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2738 && gimplify_var_or_parm_decl (p) == GS_OK)
2739 goto restart;
2740 else
2741 break;
2743 expr_stack.safe_push (*p);
2746 gcc_assert (expr_stack.length ());
2748 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2749 walked through and P points to the innermost expression.
2751 Java requires that we elaborated nodes in source order. That
2752 means we must gimplify the inner expression followed by each of
2753 the indices, in order. But we can't gimplify the inner
2754 expression until we deal with any variable bounds, sizes, or
2755 positions in order to deal with PLACEHOLDER_EXPRs.
2757 So we do this in three steps. First we deal with the annotations
2758 for any variables in the components, then we gimplify the base,
2759 then we gimplify any indices, from left to right. */
2760 for (i = expr_stack.length () - 1; i >= 0; i--)
2762 tree t = expr_stack[i];
2764 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2766 /* Gimplify the low bound and element type size and put them into
2767 the ARRAY_REF. If these values are set, they have already been
2768 gimplified. */
2769 if (TREE_OPERAND (t, 2) == NULL_TREE)
2771 tree low = unshare_expr (array_ref_low_bound (t));
2772 if (!is_gimple_min_invariant (low))
2774 TREE_OPERAND (t, 2) = low;
2775 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2776 post_p, is_gimple_reg,
2777 fb_rvalue);
2778 ret = MIN (ret, tret);
2781 else
2783 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2784 is_gimple_reg, fb_rvalue);
2785 ret = MIN (ret, tret);
2788 if (TREE_OPERAND (t, 3) == NULL_TREE)
2790 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2791 tree elmt_size = unshare_expr (array_ref_element_size (t));
2792 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2794 /* Divide the element size by the alignment of the element
2795 type (above). */
2796 elmt_size
2797 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2799 if (!is_gimple_min_invariant (elmt_size))
2801 TREE_OPERAND (t, 3) = elmt_size;
2802 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2803 post_p, is_gimple_reg,
2804 fb_rvalue);
2805 ret = MIN (ret, tret);
2808 else
2810 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2811 is_gimple_reg, fb_rvalue);
2812 ret = MIN (ret, tret);
2815 else if (TREE_CODE (t) == COMPONENT_REF)
2817 /* Set the field offset into T and gimplify it. */
2818 if (TREE_OPERAND (t, 2) == NULL_TREE)
2820 tree offset = unshare_expr (component_ref_field_offset (t));
2821 tree field = TREE_OPERAND (t, 1);
2822 tree factor
2823 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2825 /* Divide the offset by its alignment. */
2826 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2828 if (!is_gimple_min_invariant (offset))
2830 TREE_OPERAND (t, 2) = offset;
2831 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2832 post_p, is_gimple_reg,
2833 fb_rvalue);
2834 ret = MIN (ret, tret);
2837 else
2839 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2840 is_gimple_reg, fb_rvalue);
2841 ret = MIN (ret, tret);
2846 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2847 so as to match the min_lval predicate. Failure to do so may result
2848 in the creation of large aggregate temporaries. */
2849 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2850 fallback | fb_lvalue);
2851 ret = MIN (ret, tret);
2853 /* And finally, the indices and operands of ARRAY_REF. During this
2854 loop we also remove any useless conversions. */
2855 for (; expr_stack.length () > 0; )
2857 tree t = expr_stack.pop ();
2859 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2861 /* Gimplify the dimension. */
2862 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2864 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2865 is_gimple_val, fb_rvalue);
2866 ret = MIN (ret, tret);
2870 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2872 /* The innermost expression P may have originally had
2873 TREE_SIDE_EFFECTS set which would have caused all the outer
2874 expressions in *EXPR_P leading to P to also have had
2875 TREE_SIDE_EFFECTS set. */
2876 recalculate_side_effects (t);
2879 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2880 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2882 canonicalize_component_ref (expr_p);
2885 expr_stack.release ();
2887 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2889 return ret;
2892 /* Gimplify the self modifying expression pointed to by EXPR_P
2893 (++, --, +=, -=).
2895 PRE_P points to the list where side effects that must happen before
2896 *EXPR_P should be stored.
2898 POST_P points to the list where side effects that must happen after
2899 *EXPR_P should be stored.
2901 WANT_VALUE is nonzero iff we want to use the value of this expression
2902 in another expression.
2904 ARITH_TYPE is the type the computation should be performed in. */
2906 enum gimplify_status
2907 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2908 bool want_value, tree arith_type)
2910 enum tree_code code;
2911 tree lhs, lvalue, rhs, t1;
2912 gimple_seq post = NULL, *orig_post_p = post_p;
2913 bool postfix;
2914 enum tree_code arith_code;
2915 enum gimplify_status ret;
2916 location_t loc = EXPR_LOCATION (*expr_p);
2918 code = TREE_CODE (*expr_p);
2920 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2921 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2923 /* Prefix or postfix? */
2924 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2925 /* Faster to treat as prefix if result is not used. */
2926 postfix = want_value;
2927 else
2928 postfix = false;
2930 /* For postfix, make sure the inner expression's post side effects
2931 are executed after side effects from this expression. */
2932 if (postfix)
2933 post_p = &post;
2935 /* Add or subtract? */
2936 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2937 arith_code = PLUS_EXPR;
2938 else
2939 arith_code = MINUS_EXPR;
2941 /* Gimplify the LHS into a GIMPLE lvalue. */
2942 lvalue = TREE_OPERAND (*expr_p, 0);
2943 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2944 if (ret == GS_ERROR)
2945 return ret;
2947 /* Extract the operands to the arithmetic operation. */
2948 lhs = lvalue;
2949 rhs = TREE_OPERAND (*expr_p, 1);
2951 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2952 that as the result value and in the postqueue operation. */
2953 if (postfix)
2955 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2956 if (ret == GS_ERROR)
2957 return ret;
2959 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2962 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2963 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2965 rhs = convert_to_ptrofftype_loc (loc, rhs);
2966 if (arith_code == MINUS_EXPR)
2967 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2968 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2970 else
2971 t1 = fold_convert (TREE_TYPE (*expr_p),
2972 fold_build2 (arith_code, arith_type,
2973 fold_convert (arith_type, lhs),
2974 fold_convert (arith_type, rhs)));
2976 if (postfix)
2978 gimplify_assign (lvalue, t1, pre_p);
2979 gimplify_seq_add_seq (orig_post_p, post);
2980 *expr_p = lhs;
2981 return GS_ALL_DONE;
2983 else
2985 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2986 return GS_OK;
2990 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2992 static void
2993 maybe_with_size_expr (tree *expr_p)
2995 tree expr = *expr_p;
2996 tree type = TREE_TYPE (expr);
2997 tree size;
2999 /* If we've already wrapped this or the type is error_mark_node, we can't do
3000 anything. */
3001 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3002 || type == error_mark_node)
3003 return;
3005 /* If the size isn't known or is a constant, we have nothing to do. */
3006 size = TYPE_SIZE_UNIT (type);
3007 if (!size || TREE_CODE (size) == INTEGER_CST)
3008 return;
3010 /* Otherwise, make a WITH_SIZE_EXPR. */
3011 size = unshare_expr (size);
3012 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3013 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3016 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3017 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3018 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3019 gimplified to an SSA name. */
3021 enum gimplify_status
3022 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3023 bool allow_ssa)
3025 bool (*test) (tree);
3026 fallback_t fb;
3028 /* In general, we allow lvalues for function arguments to avoid
3029 extra overhead of copying large aggregates out of even larger
3030 aggregates into temporaries only to copy the temporaries to
3031 the argument list. Make optimizers happy by pulling out to
3032 temporaries those types that fit in registers. */
3033 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3034 test = is_gimple_val, fb = fb_rvalue;
3035 else
3037 test = is_gimple_lvalue, fb = fb_either;
3038 /* Also strip a TARGET_EXPR that would force an extra copy. */
3039 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3041 tree init = TARGET_EXPR_INITIAL (*arg_p);
3042 if (init
3043 && !VOID_TYPE_P (TREE_TYPE (init)))
3044 *arg_p = init;
3048 /* If this is a variable sized type, we must remember the size. */
3049 maybe_with_size_expr (arg_p);
3051 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3052 /* Make sure arguments have the same location as the function call
3053 itself. */
3054 protected_set_expr_location (*arg_p, call_location);
3056 /* There is a sequence point before a function call. Side effects in
3057 the argument list must occur before the actual call. So, when
3058 gimplifying arguments, force gimplify_expr to use an internal
3059 post queue which is then appended to the end of PRE_P. */
3060 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3063 /* Don't fold inside offloading or taskreg regions: it can break code by
3064 adding decl references that weren't in the source. We'll do it during
3065 omplower pass instead. */
3067 static bool
3068 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3070 struct gimplify_omp_ctx *ctx;
3071 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3072 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3073 return false;
3074 return fold_stmt (gsi);
3077 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3078 with the pointer to the proper cilk frame. */
3079 static void
3080 gimplify_cilk_detach (gimple_seq *pre_p)
3082 tree frame = cfun->cilk_frame_decl;
3083 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3084 frame);
3085 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3086 ptrf);
3087 gimplify_seq_add_stmt(pre_p, detach);
3090 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3091 WANT_VALUE is true if the result of the call is desired. */
3093 static enum gimplify_status
3094 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3096 tree fndecl, parms, p, fnptrtype;
3097 enum gimplify_status ret;
3098 int i, nargs;
3099 gcall *call;
3100 bool builtin_va_start_p = false;
3101 location_t loc = EXPR_LOCATION (*expr_p);
3103 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3105 /* For reliable diagnostics during inlining, it is necessary that
3106 every call_expr be annotated with file and line. */
3107 if (! EXPR_HAS_LOCATION (*expr_p))
3108 SET_EXPR_LOCATION (*expr_p, input_location);
3110 /* Gimplify internal functions created in the FEs. */
3111 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3113 if (want_value)
3114 return GS_ALL_DONE;
3116 nargs = call_expr_nargs (*expr_p);
3117 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3118 auto_vec<tree> vargs (nargs);
3120 for (i = 0; i < nargs; i++)
3122 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3123 EXPR_LOCATION (*expr_p));
3124 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3127 if (EXPR_CILK_SPAWN (*expr_p))
3128 gimplify_cilk_detach (pre_p);
3129 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
3130 gimplify_seq_add_stmt (pre_p, call);
3131 return GS_ALL_DONE;
3134 /* This may be a call to a builtin function.
3136 Builtin function calls may be transformed into different
3137 (and more efficient) builtin function calls under certain
3138 circumstances. Unfortunately, gimplification can muck things
3139 up enough that the builtin expanders are not aware that certain
3140 transformations are still valid.
3142 So we attempt transformation/gimplification of the call before
3143 we gimplify the CALL_EXPR. At this time we do not manage to
3144 transform all calls in the same manner as the expanders do, but
3145 we do transform most of them. */
3146 fndecl = get_callee_fndecl (*expr_p);
3147 if (fndecl
3148 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3149 switch (DECL_FUNCTION_CODE (fndecl))
3151 case BUILT_IN_ALLOCA:
3152 case BUILT_IN_ALLOCA_WITH_ALIGN:
3153 /* If the call has been built for a variable-sized object, then we
3154 want to restore the stack level when the enclosing BIND_EXPR is
3155 exited to reclaim the allocated space; otherwise, we precisely
3156 need to do the opposite and preserve the latest stack level. */
3157 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3158 gimplify_ctxp->save_stack = true;
3159 else
3160 gimplify_ctxp->keep_stack = true;
3161 break;
3163 case BUILT_IN_VA_START:
3165 builtin_va_start_p = TRUE;
3166 if (call_expr_nargs (*expr_p) < 2)
3168 error ("too few arguments to function %<va_start%>");
3169 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3170 return GS_OK;
3173 if (fold_builtin_next_arg (*expr_p, true))
3175 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3176 return GS_OK;
3178 break;
3181 default:
3184 if (fndecl && DECL_BUILT_IN (fndecl))
3186 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3187 if (new_tree && new_tree != *expr_p)
3189 /* There was a transformation of this call which computes the
3190 same value, but in a more efficient way. Return and try
3191 again. */
3192 *expr_p = new_tree;
3193 return GS_OK;
3197 /* Remember the original function pointer type. */
3198 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3200 /* There is a sequence point before the call, so any side effects in
3201 the calling expression must occur before the actual call. Force
3202 gimplify_expr to use an internal post queue. */
3203 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3204 is_gimple_call_addr, fb_rvalue);
3206 nargs = call_expr_nargs (*expr_p);
3208 /* Get argument types for verification. */
3209 fndecl = get_callee_fndecl (*expr_p);
3210 parms = NULL_TREE;
3211 if (fndecl)
3212 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3213 else
3214 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3216 if (fndecl && DECL_ARGUMENTS (fndecl))
3217 p = DECL_ARGUMENTS (fndecl);
3218 else if (parms)
3219 p = parms;
3220 else
3221 p = NULL_TREE;
3222 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3225 /* If the last argument is __builtin_va_arg_pack () and it is not
3226 passed as a named argument, decrease the number of CALL_EXPR
3227 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3228 if (!p
3229 && i < nargs
3230 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3232 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3233 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3235 if (last_arg_fndecl
3236 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3237 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3238 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3240 tree call = *expr_p;
3242 --nargs;
3243 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3244 CALL_EXPR_FN (call),
3245 nargs, CALL_EXPR_ARGP (call));
3247 /* Copy all CALL_EXPR flags, location and block, except
3248 CALL_EXPR_VA_ARG_PACK flag. */
3249 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3250 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3251 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3252 = CALL_EXPR_RETURN_SLOT_OPT (call);
3253 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3254 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3256 /* Set CALL_EXPR_VA_ARG_PACK. */
3257 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3261 /* If the call returns twice then after building the CFG the call
3262 argument computations will no longer dominate the call because
3263 we add an abnormal incoming edge to the call. So do not use SSA
3264 vars there. */
3265 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3267 /* Gimplify the function arguments. */
3268 if (nargs > 0)
3270 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3271 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3272 PUSH_ARGS_REVERSED ? i-- : i++)
3274 enum gimplify_status t;
3276 /* Avoid gimplifying the second argument to va_start, which needs to
3277 be the plain PARM_DECL. */
3278 if ((i != 1) || !builtin_va_start_p)
3280 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3281 EXPR_LOCATION (*expr_p), ! returns_twice);
3283 if (t == GS_ERROR)
3284 ret = GS_ERROR;
3289 /* Gimplify the static chain. */
3290 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3292 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3293 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3294 else
3296 enum gimplify_status t;
3297 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3298 EXPR_LOCATION (*expr_p), ! returns_twice);
3299 if (t == GS_ERROR)
3300 ret = GS_ERROR;
3304 /* Verify the function result. */
3305 if (want_value && fndecl
3306 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3308 error_at (loc, "using result of function returning %<void%>");
3309 ret = GS_ERROR;
3312 /* Try this again in case gimplification exposed something. */
3313 if (ret != GS_ERROR)
3315 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3317 if (new_tree && new_tree != *expr_p)
3319 /* There was a transformation of this call which computes the
3320 same value, but in a more efficient way. Return and try
3321 again. */
3322 *expr_p = new_tree;
3323 return GS_OK;
3326 else
3328 *expr_p = error_mark_node;
3329 return GS_ERROR;
3332 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3333 decl. This allows us to eliminate redundant or useless
3334 calls to "const" functions. */
3335 if (TREE_CODE (*expr_p) == CALL_EXPR)
3337 int flags = call_expr_flags (*expr_p);
3338 if (flags & (ECF_CONST | ECF_PURE)
3339 /* An infinite loop is considered a side effect. */
3340 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3341 TREE_SIDE_EFFECTS (*expr_p) = 0;
3344 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3345 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3346 form and delegate the creation of a GIMPLE_CALL to
3347 gimplify_modify_expr. This is always possible because when
3348 WANT_VALUE is true, the caller wants the result of this call into
3349 a temporary, which means that we will emit an INIT_EXPR in
3350 internal_get_tmp_var which will then be handled by
3351 gimplify_modify_expr. */
3352 if (!want_value)
3354 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3355 have to do is replicate it as a GIMPLE_CALL tuple. */
3356 gimple_stmt_iterator gsi;
3357 call = gimple_build_call_from_tree (*expr_p);
3358 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
3359 notice_special_calls (call);
3360 if (EXPR_CILK_SPAWN (*expr_p))
3361 gimplify_cilk_detach (pre_p);
3362 gimplify_seq_add_stmt (pre_p, call);
3363 gsi = gsi_last (*pre_p);
3364 maybe_fold_stmt (&gsi);
3365 *expr_p = NULL_TREE;
3367 else
3368 /* Remember the original function type. */
3369 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3370 CALL_EXPR_FN (*expr_p));
3372 return ret;
3375 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3376 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3378 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3379 condition is true or false, respectively. If null, we should generate
3380 our own to skip over the evaluation of this specific expression.
3382 LOCUS is the source location of the COND_EXPR.
3384 This function is the tree equivalent of do_jump.
3386 shortcut_cond_r should only be called by shortcut_cond_expr. */
3388 static tree
3389 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3390 location_t locus)
3392 tree local_label = NULL_TREE;
3393 tree t, expr = NULL;
3395 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3396 retain the shortcut semantics. Just insert the gotos here;
3397 shortcut_cond_expr will append the real blocks later. */
3398 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3400 location_t new_locus;
3402 /* Turn if (a && b) into
3404 if (a); else goto no;
3405 if (b) goto yes; else goto no;
3406 (no:) */
3408 if (false_label_p == NULL)
3409 false_label_p = &local_label;
3411 /* Keep the original source location on the first 'if'. */
3412 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3413 append_to_statement_list (t, &expr);
3415 /* Set the source location of the && on the second 'if'. */
3416 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3417 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3418 new_locus);
3419 append_to_statement_list (t, &expr);
3421 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3423 location_t new_locus;
3425 /* Turn if (a || b) into
3427 if (a) goto yes;
3428 if (b) goto yes; else goto no;
3429 (yes:) */
3431 if (true_label_p == NULL)
3432 true_label_p = &local_label;
3434 /* Keep the original source location on the first 'if'. */
3435 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3436 append_to_statement_list (t, &expr);
3438 /* Set the source location of the || on the second 'if'. */
3439 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3440 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3441 new_locus);
3442 append_to_statement_list (t, &expr);
3444 else if (TREE_CODE (pred) == COND_EXPR
3445 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3446 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3448 location_t new_locus;
3450 /* As long as we're messing with gotos, turn if (a ? b : c) into
3451 if (a)
3452 if (b) goto yes; else goto no;
3453 else
3454 if (c) goto yes; else goto no;
3456 Don't do this if one of the arms has void type, which can happen
3457 in C++ when the arm is throw. */
3459 /* Keep the original source location on the first 'if'. Set the source
3460 location of the ? on the second 'if'. */
3461 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3462 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3463 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3464 false_label_p, locus),
3465 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3466 false_label_p, new_locus));
3468 else
3470 expr = build3 (COND_EXPR, void_type_node, pred,
3471 build_and_jump (true_label_p),
3472 build_and_jump (false_label_p));
3473 SET_EXPR_LOCATION (expr, locus);
3476 if (local_label)
3478 t = build1 (LABEL_EXPR, void_type_node, local_label);
3479 append_to_statement_list (t, &expr);
3482 return expr;
3485 /* Given a conditional expression EXPR with short-circuit boolean
3486 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3487 predicate apart into the equivalent sequence of conditionals. */
3489 static tree
3490 shortcut_cond_expr (tree expr)
3492 tree pred = TREE_OPERAND (expr, 0);
3493 tree then_ = TREE_OPERAND (expr, 1);
3494 tree else_ = TREE_OPERAND (expr, 2);
3495 tree true_label, false_label, end_label, t;
3496 tree *true_label_p;
3497 tree *false_label_p;
3498 bool emit_end, emit_false, jump_over_else;
3499 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3500 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3502 /* First do simple transformations. */
3503 if (!else_se)
3505 /* If there is no 'else', turn
3506 if (a && b) then c
3507 into
3508 if (a) if (b) then c. */
3509 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3511 /* Keep the original source location on the first 'if'. */
3512 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3513 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3514 /* Set the source location of the && on the second 'if'. */
3515 if (EXPR_HAS_LOCATION (pred))
3516 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3517 then_ = shortcut_cond_expr (expr);
3518 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3519 pred = TREE_OPERAND (pred, 0);
3520 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3521 SET_EXPR_LOCATION (expr, locus);
3525 if (!then_se)
3527 /* If there is no 'then', turn
3528 if (a || b); else d
3529 into
3530 if (a); else if (b); else d. */
3531 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3533 /* Keep the original source location on the first 'if'. */
3534 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3535 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3536 /* Set the source location of the || on the second 'if'. */
3537 if (EXPR_HAS_LOCATION (pred))
3538 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3539 else_ = shortcut_cond_expr (expr);
3540 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3541 pred = TREE_OPERAND (pred, 0);
3542 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3543 SET_EXPR_LOCATION (expr, locus);
3547 /* If we're done, great. */
3548 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3549 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3550 return expr;
3552 /* Otherwise we need to mess with gotos. Change
3553 if (a) c; else d;
3555 if (a); else goto no;
3556 c; goto end;
3557 no: d; end:
3558 and recursively gimplify the condition. */
3560 true_label = false_label = end_label = NULL_TREE;
3562 /* If our arms just jump somewhere, hijack those labels so we don't
3563 generate jumps to jumps. */
3565 if (then_
3566 && TREE_CODE (then_) == GOTO_EXPR
3567 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3569 true_label = GOTO_DESTINATION (then_);
3570 then_ = NULL;
3571 then_se = false;
3574 if (else_
3575 && TREE_CODE (else_) == GOTO_EXPR
3576 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3578 false_label = GOTO_DESTINATION (else_);
3579 else_ = NULL;
3580 else_se = false;
3583 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3584 if (true_label)
3585 true_label_p = &true_label;
3586 else
3587 true_label_p = NULL;
3589 /* The 'else' branch also needs a label if it contains interesting code. */
3590 if (false_label || else_se)
3591 false_label_p = &false_label;
3592 else
3593 false_label_p = NULL;
3595 /* If there was nothing else in our arms, just forward the label(s). */
3596 if (!then_se && !else_se)
3597 return shortcut_cond_r (pred, true_label_p, false_label_p,
3598 EXPR_LOC_OR_LOC (expr, input_location));
3600 /* If our last subexpression already has a terminal label, reuse it. */
3601 if (else_se)
3602 t = expr_last (else_);
3603 else if (then_se)
3604 t = expr_last (then_);
3605 else
3606 t = NULL;
3607 if (t && TREE_CODE (t) == LABEL_EXPR)
3608 end_label = LABEL_EXPR_LABEL (t);
3610 /* If we don't care about jumping to the 'else' branch, jump to the end
3611 if the condition is false. */
3612 if (!false_label_p)
3613 false_label_p = &end_label;
3615 /* We only want to emit these labels if we aren't hijacking them. */
3616 emit_end = (end_label == NULL_TREE);
3617 emit_false = (false_label == NULL_TREE);
3619 /* We only emit the jump over the else clause if we have to--if the
3620 then clause may fall through. Otherwise we can wind up with a
3621 useless jump and a useless label at the end of gimplified code,
3622 which will cause us to think that this conditional as a whole
3623 falls through even if it doesn't. If we then inline a function
3624 which ends with such a condition, that can cause us to issue an
3625 inappropriate warning about control reaching the end of a
3626 non-void function. */
3627 jump_over_else = block_may_fallthru (then_);
3629 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3630 EXPR_LOC_OR_LOC (expr, input_location));
3632 expr = NULL;
3633 append_to_statement_list (pred, &expr);
3635 append_to_statement_list (then_, &expr);
3636 if (else_se)
3638 if (jump_over_else)
3640 tree last = expr_last (expr);
3641 t = build_and_jump (&end_label);
3642 if (EXPR_HAS_LOCATION (last))
3643 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3644 append_to_statement_list (t, &expr);
3646 if (emit_false)
3648 t = build1 (LABEL_EXPR, void_type_node, false_label);
3649 append_to_statement_list (t, &expr);
3651 append_to_statement_list (else_, &expr);
3653 if (emit_end && end_label)
3655 t = build1 (LABEL_EXPR, void_type_node, end_label);
3656 append_to_statement_list (t, &expr);
3659 return expr;
3662 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3664 tree
3665 gimple_boolify (tree expr)
3667 tree type = TREE_TYPE (expr);
3668 location_t loc = EXPR_LOCATION (expr);
3670 if (TREE_CODE (expr) == NE_EXPR
3671 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3672 && integer_zerop (TREE_OPERAND (expr, 1)))
3674 tree call = TREE_OPERAND (expr, 0);
3675 tree fn = get_callee_fndecl (call);
3677 /* For __builtin_expect ((long) (x), y) recurse into x as well
3678 if x is truth_value_p. */
3679 if (fn
3680 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3681 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3682 && call_expr_nargs (call) == 2)
3684 tree arg = CALL_EXPR_ARG (call, 0);
3685 if (arg)
3687 if (TREE_CODE (arg) == NOP_EXPR
3688 && TREE_TYPE (arg) == TREE_TYPE (call))
3689 arg = TREE_OPERAND (arg, 0);
3690 if (truth_value_p (TREE_CODE (arg)))
3692 arg = gimple_boolify (arg);
3693 CALL_EXPR_ARG (call, 0)
3694 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3700 switch (TREE_CODE (expr))
3702 case TRUTH_AND_EXPR:
3703 case TRUTH_OR_EXPR:
3704 case TRUTH_XOR_EXPR:
3705 case TRUTH_ANDIF_EXPR:
3706 case TRUTH_ORIF_EXPR:
3707 /* Also boolify the arguments of truth exprs. */
3708 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3709 /* FALLTHRU */
3711 case TRUTH_NOT_EXPR:
3712 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3714 /* These expressions always produce boolean results. */
3715 if (TREE_CODE (type) != BOOLEAN_TYPE)
3716 TREE_TYPE (expr) = boolean_type_node;
3717 return expr;
3719 case ANNOTATE_EXPR:
3720 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3722 case annot_expr_ivdep_kind:
3723 case annot_expr_no_vector_kind:
3724 case annot_expr_vector_kind:
3725 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3726 if (TREE_CODE (type) != BOOLEAN_TYPE)
3727 TREE_TYPE (expr) = boolean_type_node;
3728 return expr;
3729 default:
3730 gcc_unreachable ();
3733 default:
3734 if (COMPARISON_CLASS_P (expr))
3736 /* There expressions always prduce boolean results. */
3737 if (TREE_CODE (type) != BOOLEAN_TYPE)
3738 TREE_TYPE (expr) = boolean_type_node;
3739 return expr;
3741 /* Other expressions that get here must have boolean values, but
3742 might need to be converted to the appropriate mode. */
3743 if (TREE_CODE (type) == BOOLEAN_TYPE)
3744 return expr;
3745 return fold_convert_loc (loc, boolean_type_node, expr);
3749 /* Given a conditional expression *EXPR_P without side effects, gimplify
3750 its operands. New statements are inserted to PRE_P. */
3752 static enum gimplify_status
3753 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3755 tree expr = *expr_p, cond;
3756 enum gimplify_status ret, tret;
3757 enum tree_code code;
3759 cond = gimple_boolify (COND_EXPR_COND (expr));
3761 /* We need to handle && and || specially, as their gimplification
3762 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3763 code = TREE_CODE (cond);
3764 if (code == TRUTH_ANDIF_EXPR)
3765 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3766 else if (code == TRUTH_ORIF_EXPR)
3767 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3768 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3769 COND_EXPR_COND (*expr_p) = cond;
3771 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3772 is_gimple_val, fb_rvalue);
3773 ret = MIN (ret, tret);
3774 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3775 is_gimple_val, fb_rvalue);
3777 return MIN (ret, tret);
3780 /* Return true if evaluating EXPR could trap.
3781 EXPR is GENERIC, while tree_could_trap_p can be called
3782 only on GIMPLE. */
3784 static bool
3785 generic_expr_could_trap_p (tree expr)
3787 unsigned i, n;
3789 if (!expr || is_gimple_val (expr))
3790 return false;
3792 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3793 return true;
3795 n = TREE_OPERAND_LENGTH (expr);
3796 for (i = 0; i < n; i++)
3797 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3798 return true;
3800 return false;
3803 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3804 into
3806 if (p) if (p)
3807 t1 = a; a;
3808 else or else
3809 t1 = b; b;
3812 The second form is used when *EXPR_P is of type void.
3814 PRE_P points to the list where side effects that must happen before
3815 *EXPR_P should be stored. */
3817 static enum gimplify_status
3818 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3820 tree expr = *expr_p;
3821 tree type = TREE_TYPE (expr);
3822 location_t loc = EXPR_LOCATION (expr);
3823 tree tmp, arm1, arm2;
3824 enum gimplify_status ret;
3825 tree label_true, label_false, label_cont;
3826 bool have_then_clause_p, have_else_clause_p;
3827 gcond *cond_stmt;
3828 enum tree_code pred_code;
3829 gimple_seq seq = NULL;
3831 /* If this COND_EXPR has a value, copy the values into a temporary within
3832 the arms. */
3833 if (!VOID_TYPE_P (type))
3835 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3836 tree result;
3838 /* If either an rvalue is ok or we do not require an lvalue, create the
3839 temporary. But we cannot do that if the type is addressable. */
3840 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3841 && !TREE_ADDRESSABLE (type))
3843 if (gimplify_ctxp->allow_rhs_cond_expr
3844 /* If either branch has side effects or could trap, it can't be
3845 evaluated unconditionally. */
3846 && !TREE_SIDE_EFFECTS (then_)
3847 && !generic_expr_could_trap_p (then_)
3848 && !TREE_SIDE_EFFECTS (else_)
3849 && !generic_expr_could_trap_p (else_))
3850 return gimplify_pure_cond_expr (expr_p, pre_p);
3852 tmp = create_tmp_var (type, "iftmp");
3853 result = tmp;
3856 /* Otherwise, only create and copy references to the values. */
3857 else
3859 type = build_pointer_type (type);
3861 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3862 then_ = build_fold_addr_expr_loc (loc, then_);
3864 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3865 else_ = build_fold_addr_expr_loc (loc, else_);
3867 expr
3868 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3870 tmp = create_tmp_var (type, "iftmp");
3871 result = build_simple_mem_ref_loc (loc, tmp);
3874 /* Build the new then clause, `tmp = then_;'. But don't build the
3875 assignment if the value is void; in C++ it can be if it's a throw. */
3876 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3877 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3879 /* Similarly, build the new else clause, `tmp = else_;'. */
3880 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3881 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3883 TREE_TYPE (expr) = void_type_node;
3884 recalculate_side_effects (expr);
3886 /* Move the COND_EXPR to the prequeue. */
3887 gimplify_stmt (&expr, pre_p);
3889 *expr_p = result;
3890 return GS_ALL_DONE;
3893 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3894 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3895 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3896 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3898 /* Make sure the condition has BOOLEAN_TYPE. */
3899 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3901 /* Break apart && and || conditions. */
3902 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3903 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3905 expr = shortcut_cond_expr (expr);
3907 if (expr != *expr_p)
3909 *expr_p = expr;
3911 /* We can't rely on gimplify_expr to re-gimplify the expanded
3912 form properly, as cleanups might cause the target labels to be
3913 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3914 set up a conditional context. */
3915 gimple_push_condition ();
3916 gimplify_stmt (expr_p, &seq);
3917 gimple_pop_condition (pre_p);
3918 gimple_seq_add_seq (pre_p, seq);
3920 return GS_ALL_DONE;
3924 /* Now do the normal gimplification. */
3926 /* Gimplify condition. */
3927 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3928 fb_rvalue);
3929 if (ret == GS_ERROR)
3930 return GS_ERROR;
3931 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3933 gimple_push_condition ();
3935 have_then_clause_p = have_else_clause_p = false;
3936 if (TREE_OPERAND (expr, 1) != NULL
3937 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3938 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3939 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3940 == current_function_decl)
3941 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3942 have different locations, otherwise we end up with incorrect
3943 location information on the branches. */
3944 && (optimize
3945 || !EXPR_HAS_LOCATION (expr)
3946 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3947 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3949 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3950 have_then_clause_p = true;
3952 else
3953 label_true = create_artificial_label (UNKNOWN_LOCATION);
3954 if (TREE_OPERAND (expr, 2) != NULL
3955 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3956 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3957 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3958 == current_function_decl)
3959 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3960 have different locations, otherwise we end up with incorrect
3961 location information on the branches. */
3962 && (optimize
3963 || !EXPR_HAS_LOCATION (expr)
3964 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3965 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3967 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3968 have_else_clause_p = true;
3970 else
3971 label_false = create_artificial_label (UNKNOWN_LOCATION);
3973 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3974 &arm2);
3975 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3976 label_false);
3977 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3978 gimplify_seq_add_stmt (&seq, cond_stmt);
3979 gimple_stmt_iterator gsi = gsi_last (seq);
3980 maybe_fold_stmt (&gsi);
3982 label_cont = NULL_TREE;
3983 if (!have_then_clause_p)
3985 /* For if (...) {} else { code; } put label_true after
3986 the else block. */
3987 if (TREE_OPERAND (expr, 1) == NULL_TREE
3988 && !have_else_clause_p
3989 && TREE_OPERAND (expr, 2) != NULL_TREE)
3990 label_cont = label_true;
3991 else
3993 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3994 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3995 /* For if (...) { code; } else {} or
3996 if (...) { code; } else goto label; or
3997 if (...) { code; return; } else { ... }
3998 label_cont isn't needed. */
3999 if (!have_else_clause_p
4000 && TREE_OPERAND (expr, 2) != NULL_TREE
4001 && gimple_seq_may_fallthru (seq))
4003 gimple *g;
4004 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4006 g = gimple_build_goto (label_cont);
4008 /* GIMPLE_COND's are very low level; they have embedded
4009 gotos. This particular embedded goto should not be marked
4010 with the location of the original COND_EXPR, as it would
4011 correspond to the COND_EXPR's condition, not the ELSE or the
4012 THEN arms. To avoid marking it with the wrong location, flag
4013 it as "no location". */
4014 gimple_set_do_not_emit_location (g);
4016 gimplify_seq_add_stmt (&seq, g);
4020 if (!have_else_clause_p)
4022 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4023 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4025 if (label_cont)
4026 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4028 gimple_pop_condition (pre_p);
4029 gimple_seq_add_seq (pre_p, seq);
4031 if (ret == GS_ERROR)
4032 ; /* Do nothing. */
4033 else if (have_then_clause_p || have_else_clause_p)
4034 ret = GS_ALL_DONE;
4035 else
4037 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4038 expr = TREE_OPERAND (expr, 0);
4039 gimplify_stmt (&expr, pre_p);
4042 *expr_p = NULL;
4043 return ret;
4046 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4047 to be marked addressable.
4049 We cannot rely on such an expression being directly markable if a temporary
4050 has been created by the gimplification. In this case, we create another
4051 temporary and initialize it with a copy, which will become a store after we
4052 mark it addressable. This can happen if the front-end passed us something
4053 that it could not mark addressable yet, like a Fortran pass-by-reference
4054 parameter (int) floatvar. */
4056 static void
4057 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4059 while (handled_component_p (*expr_p))
4060 expr_p = &TREE_OPERAND (*expr_p, 0);
4061 if (is_gimple_reg (*expr_p))
4063 /* Do not allow an SSA name as the temporary. */
4064 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4065 DECL_GIMPLE_REG_P (var) = 0;
4066 *expr_p = var;
4070 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4071 a call to __builtin_memcpy. */
4073 static enum gimplify_status
4074 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4075 gimple_seq *seq_p)
4077 tree t, to, to_ptr, from, from_ptr;
4078 gcall *gs;
4079 location_t loc = EXPR_LOCATION (*expr_p);
4081 to = TREE_OPERAND (*expr_p, 0);
4082 from = TREE_OPERAND (*expr_p, 1);
4084 /* Mark the RHS addressable. Beware that it may not be possible to do so
4085 directly if a temporary has been created by the gimplification. */
4086 prepare_gimple_addressable (&from, seq_p);
4088 mark_addressable (from);
4089 from_ptr = build_fold_addr_expr_loc (loc, from);
4090 gimplify_arg (&from_ptr, seq_p, loc);
4092 mark_addressable (to);
4093 to_ptr = build_fold_addr_expr_loc (loc, to);
4094 gimplify_arg (&to_ptr, seq_p, loc);
4096 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4098 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4100 if (want_value)
4102 /* tmp = memcpy() */
4103 t = create_tmp_var (TREE_TYPE (to_ptr));
4104 gimple_call_set_lhs (gs, t);
4105 gimplify_seq_add_stmt (seq_p, gs);
4107 *expr_p = build_simple_mem_ref (t);
4108 return GS_ALL_DONE;
4111 gimplify_seq_add_stmt (seq_p, gs);
4112 *expr_p = NULL;
4113 return GS_ALL_DONE;
4116 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4117 a call to __builtin_memset. In this case we know that the RHS is
4118 a CONSTRUCTOR with an empty element list. */
4120 static enum gimplify_status
4121 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4122 gimple_seq *seq_p)
4124 tree t, from, to, to_ptr;
4125 gcall *gs;
4126 location_t loc = EXPR_LOCATION (*expr_p);
4128 /* Assert our assumptions, to abort instead of producing wrong code
4129 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4130 not be immediately exposed. */
4131 from = TREE_OPERAND (*expr_p, 1);
4132 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4133 from = TREE_OPERAND (from, 0);
4135 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4136 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4138 /* Now proceed. */
4139 to = TREE_OPERAND (*expr_p, 0);
4141 to_ptr = build_fold_addr_expr_loc (loc, to);
4142 gimplify_arg (&to_ptr, seq_p, loc);
4143 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4145 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4147 if (want_value)
4149 /* tmp = memset() */
4150 t = create_tmp_var (TREE_TYPE (to_ptr));
4151 gimple_call_set_lhs (gs, t);
4152 gimplify_seq_add_stmt (seq_p, gs);
4154 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4155 return GS_ALL_DONE;
4158 gimplify_seq_add_stmt (seq_p, gs);
4159 *expr_p = NULL;
4160 return GS_ALL_DONE;
4163 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4164 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4165 assignment. Return non-null if we detect a potential overlap. */
4167 struct gimplify_init_ctor_preeval_data
4169 /* The base decl of the lhs object. May be NULL, in which case we
4170 have to assume the lhs is indirect. */
4171 tree lhs_base_decl;
4173 /* The alias set of the lhs object. */
4174 alias_set_type lhs_alias_set;
4177 static tree
4178 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4180 struct gimplify_init_ctor_preeval_data *data
4181 = (struct gimplify_init_ctor_preeval_data *) xdata;
4182 tree t = *tp;
4184 /* If we find the base object, obviously we have overlap. */
4185 if (data->lhs_base_decl == t)
4186 return t;
4188 /* If the constructor component is indirect, determine if we have a
4189 potential overlap with the lhs. The only bits of information we
4190 have to go on at this point are addressability and alias sets. */
4191 if ((INDIRECT_REF_P (t)
4192 || TREE_CODE (t) == MEM_REF)
4193 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4194 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4195 return t;
4197 /* If the constructor component is a call, determine if it can hide a
4198 potential overlap with the lhs through an INDIRECT_REF like above.
4199 ??? Ugh - this is completely broken. In fact this whole analysis
4200 doesn't look conservative. */
4201 if (TREE_CODE (t) == CALL_EXPR)
4203 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4205 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4206 if (POINTER_TYPE_P (TREE_VALUE (type))
4207 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4208 && alias_sets_conflict_p (data->lhs_alias_set,
4209 get_alias_set
4210 (TREE_TYPE (TREE_VALUE (type)))))
4211 return t;
4214 if (IS_TYPE_OR_DECL_P (t))
4215 *walk_subtrees = 0;
4216 return NULL;
4219 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4220 force values that overlap with the lhs (as described by *DATA)
4221 into temporaries. */
4223 static void
4224 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4225 struct gimplify_init_ctor_preeval_data *data)
4227 enum gimplify_status one;
4229 /* If the value is constant, then there's nothing to pre-evaluate. */
4230 if (TREE_CONSTANT (*expr_p))
4232 /* Ensure it does not have side effects, it might contain a reference to
4233 the object we're initializing. */
4234 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4235 return;
4238 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4239 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4240 return;
4242 /* Recurse for nested constructors. */
4243 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4245 unsigned HOST_WIDE_INT ix;
4246 constructor_elt *ce;
4247 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4249 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4250 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4252 return;
4255 /* If this is a variable sized type, we must remember the size. */
4256 maybe_with_size_expr (expr_p);
4258 /* Gimplify the constructor element to something appropriate for the rhs
4259 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4260 the gimplifier will consider this a store to memory. Doing this
4261 gimplification now means that we won't have to deal with complicated
4262 language-specific trees, nor trees like SAVE_EXPR that can induce
4263 exponential search behavior. */
4264 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4265 if (one == GS_ERROR)
4267 *expr_p = NULL;
4268 return;
4271 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4272 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4273 always be true for all scalars, since is_gimple_mem_rhs insists on a
4274 temporary variable for them. */
4275 if (DECL_P (*expr_p))
4276 return;
4278 /* If this is of variable size, we have no choice but to assume it doesn't
4279 overlap since we can't make a temporary for it. */
4280 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4281 return;
4283 /* Otherwise, we must search for overlap ... */
4284 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4285 return;
4287 /* ... and if found, force the value into a temporary. */
4288 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4291 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4292 a RANGE_EXPR in a CONSTRUCTOR for an array.
4294 var = lower;
4295 loop_entry:
4296 object[var] = value;
4297 if (var == upper)
4298 goto loop_exit;
4299 var = var + 1;
4300 goto loop_entry;
4301 loop_exit:
4303 We increment var _after_ the loop exit check because we might otherwise
4304 fail if upper == TYPE_MAX_VALUE (type for upper).
4306 Note that we never have to deal with SAVE_EXPRs here, because this has
4307 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4309 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4310 gimple_seq *, bool);
4312 static void
4313 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4314 tree value, tree array_elt_type,
4315 gimple_seq *pre_p, bool cleared)
4317 tree loop_entry_label, loop_exit_label, fall_thru_label;
4318 tree var, var_type, cref, tmp;
4320 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4321 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4322 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4324 /* Create and initialize the index variable. */
4325 var_type = TREE_TYPE (upper);
4326 var = create_tmp_var (var_type);
4327 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4329 /* Add the loop entry label. */
4330 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4332 /* Build the reference. */
4333 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4334 var, NULL_TREE, NULL_TREE);
4336 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4337 the store. Otherwise just assign value to the reference. */
4339 if (TREE_CODE (value) == CONSTRUCTOR)
4340 /* NB we might have to call ourself recursively through
4341 gimplify_init_ctor_eval if the value is a constructor. */
4342 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4343 pre_p, cleared);
4344 else
4345 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4347 /* We exit the loop when the index var is equal to the upper bound. */
4348 gimplify_seq_add_stmt (pre_p,
4349 gimple_build_cond (EQ_EXPR, var, upper,
4350 loop_exit_label, fall_thru_label));
4352 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4354 /* Otherwise, increment the index var... */
4355 tmp = build2 (PLUS_EXPR, var_type, var,
4356 fold_convert (var_type, integer_one_node));
4357 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4359 /* ...and jump back to the loop entry. */
4360 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4362 /* Add the loop exit label. */
4363 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4366 /* Return true if FDECL is accessing a field that is zero sized. */
4368 static bool
4369 zero_sized_field_decl (const_tree fdecl)
4371 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4372 && integer_zerop (DECL_SIZE (fdecl)))
4373 return true;
4374 return false;
4377 /* Return true if TYPE is zero sized. */
4379 static bool
4380 zero_sized_type (const_tree type)
4382 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4383 && integer_zerop (TYPE_SIZE (type)))
4384 return true;
4385 return false;
4388 /* A subroutine of gimplify_init_constructor. Generate individual
4389 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4390 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4391 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4392 zeroed first. */
4394 static void
4395 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4396 gimple_seq *pre_p, bool cleared)
4398 tree array_elt_type = NULL;
4399 unsigned HOST_WIDE_INT ix;
4400 tree purpose, value;
4402 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4403 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4405 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4407 tree cref;
4409 /* NULL values are created above for gimplification errors. */
4410 if (value == NULL)
4411 continue;
4413 if (cleared && initializer_zerop (value))
4414 continue;
4416 /* ??? Here's to hoping the front end fills in all of the indices,
4417 so we don't have to figure out what's missing ourselves. */
4418 gcc_assert (purpose);
4420 /* Skip zero-sized fields, unless value has side-effects. This can
4421 happen with calls to functions returning a zero-sized type, which
4422 we shouldn't discard. As a number of downstream passes don't
4423 expect sets of zero-sized fields, we rely on the gimplification of
4424 the MODIFY_EXPR we make below to drop the assignment statement. */
4425 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4426 continue;
4428 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4429 whole range. */
4430 if (TREE_CODE (purpose) == RANGE_EXPR)
4432 tree lower = TREE_OPERAND (purpose, 0);
4433 tree upper = TREE_OPERAND (purpose, 1);
4435 /* If the lower bound is equal to upper, just treat it as if
4436 upper was the index. */
4437 if (simple_cst_equal (lower, upper))
4438 purpose = upper;
4439 else
4441 gimplify_init_ctor_eval_range (object, lower, upper, value,
4442 array_elt_type, pre_p, cleared);
4443 continue;
4447 if (array_elt_type)
4449 /* Do not use bitsizetype for ARRAY_REF indices. */
4450 if (TYPE_DOMAIN (TREE_TYPE (object)))
4451 purpose
4452 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4453 purpose);
4454 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4455 purpose, NULL_TREE, NULL_TREE);
4457 else
4459 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4460 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4461 unshare_expr (object), purpose, NULL_TREE);
4464 if (TREE_CODE (value) == CONSTRUCTOR
4465 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4466 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4467 pre_p, cleared);
4468 else
4470 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4471 gimplify_and_add (init, pre_p);
4472 ggc_free (init);
4477 /* Return the appropriate RHS predicate for this LHS. */
4479 gimple_predicate
4480 rhs_predicate_for (tree lhs)
4482 if (is_gimple_reg (lhs))
4483 return is_gimple_reg_rhs_or_call;
4484 else
4485 return is_gimple_mem_rhs_or_call;
4488 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4489 before the LHS has been gimplified. */
4491 static gimple_predicate
4492 initial_rhs_predicate_for (tree lhs)
4494 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4495 return is_gimple_reg_rhs_or_call;
4496 else
4497 return is_gimple_mem_rhs_or_call;
4500 /* Gimplify a C99 compound literal expression. This just means adding
4501 the DECL_EXPR before the current statement and using its anonymous
4502 decl instead. */
4504 static enum gimplify_status
4505 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4506 bool (*gimple_test_f) (tree),
4507 fallback_t fallback)
4509 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4510 tree decl = DECL_EXPR_DECL (decl_s);
4511 tree init = DECL_INITIAL (decl);
4512 /* Mark the decl as addressable if the compound literal
4513 expression is addressable now, otherwise it is marked too late
4514 after we gimplify the initialization expression. */
4515 if (TREE_ADDRESSABLE (*expr_p))
4516 TREE_ADDRESSABLE (decl) = 1;
4517 /* Otherwise, if we don't need an lvalue and have a literal directly
4518 substitute it. Check if it matches the gimple predicate, as
4519 otherwise we'd generate a new temporary, and we can as well just
4520 use the decl we already have. */
4521 else if (!TREE_ADDRESSABLE (decl)
4522 && init
4523 && (fallback & fb_lvalue) == 0
4524 && gimple_test_f (init))
4526 *expr_p = init;
4527 return GS_OK;
4530 /* Preliminarily mark non-addressed complex variables as eligible
4531 for promotion to gimple registers. We'll transform their uses
4532 as we find them. */
4533 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4534 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4535 && !TREE_THIS_VOLATILE (decl)
4536 && !needs_to_live_in_memory (decl))
4537 DECL_GIMPLE_REG_P (decl) = 1;
4539 /* If the decl is not addressable, then it is being used in some
4540 expression or on the right hand side of a statement, and it can
4541 be put into a readonly data section. */
4542 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4543 TREE_READONLY (decl) = 1;
4545 /* This decl isn't mentioned in the enclosing block, so add it to the
4546 list of temps. FIXME it seems a bit of a kludge to say that
4547 anonymous artificial vars aren't pushed, but everything else is. */
4548 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4549 gimple_add_tmp_var (decl);
4551 gimplify_and_add (decl_s, pre_p);
4552 *expr_p = decl;
4553 return GS_OK;
4556 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4557 return a new CONSTRUCTOR if something changed. */
4559 static tree
4560 optimize_compound_literals_in_ctor (tree orig_ctor)
4562 tree ctor = orig_ctor;
4563 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4564 unsigned int idx, num = vec_safe_length (elts);
4566 for (idx = 0; idx < num; idx++)
4568 tree value = (*elts)[idx].value;
4569 tree newval = value;
4570 if (TREE_CODE (value) == CONSTRUCTOR)
4571 newval = optimize_compound_literals_in_ctor (value);
4572 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4574 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4575 tree decl = DECL_EXPR_DECL (decl_s);
4576 tree init = DECL_INITIAL (decl);
4578 if (!TREE_ADDRESSABLE (value)
4579 && !TREE_ADDRESSABLE (decl)
4580 && init
4581 && TREE_CODE (init) == CONSTRUCTOR)
4582 newval = optimize_compound_literals_in_ctor (init);
4584 if (newval == value)
4585 continue;
4587 if (ctor == orig_ctor)
4589 ctor = copy_node (orig_ctor);
4590 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4591 elts = CONSTRUCTOR_ELTS (ctor);
4593 (*elts)[idx].value = newval;
4595 return ctor;
4598 /* A subroutine of gimplify_modify_expr. Break out elements of a
4599 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4601 Note that we still need to clear any elements that don't have explicit
4602 initializers, so if not all elements are initialized we keep the
4603 original MODIFY_EXPR, we just remove all of the constructor elements.
4605 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4606 GS_ERROR if we would have to create a temporary when gimplifying
4607 this constructor. Otherwise, return GS_OK.
4609 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4611 static enum gimplify_status
4612 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4613 bool want_value, bool notify_temp_creation)
4615 tree object, ctor, type;
4616 enum gimplify_status ret;
4617 vec<constructor_elt, va_gc> *elts;
4619 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4621 if (!notify_temp_creation)
4623 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4624 is_gimple_lvalue, fb_lvalue);
4625 if (ret == GS_ERROR)
4626 return ret;
4629 object = TREE_OPERAND (*expr_p, 0);
4630 ctor = TREE_OPERAND (*expr_p, 1)
4631 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4632 type = TREE_TYPE (ctor);
4633 elts = CONSTRUCTOR_ELTS (ctor);
4634 ret = GS_ALL_DONE;
4636 switch (TREE_CODE (type))
4638 case RECORD_TYPE:
4639 case UNION_TYPE:
4640 case QUAL_UNION_TYPE:
4641 case ARRAY_TYPE:
4643 struct gimplify_init_ctor_preeval_data preeval_data;
4644 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4645 bool cleared, complete_p, valid_const_initializer;
4647 /* Aggregate types must lower constructors to initialization of
4648 individual elements. The exception is that a CONSTRUCTOR node
4649 with no elements indicates zero-initialization of the whole. */
4650 if (vec_safe_is_empty (elts))
4652 if (notify_temp_creation)
4653 return GS_OK;
4654 break;
4657 /* Fetch information about the constructor to direct later processing.
4658 We might want to make static versions of it in various cases, and
4659 can only do so if it known to be a valid constant initializer. */
4660 valid_const_initializer
4661 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4662 &num_ctor_elements, &complete_p);
4664 /* If a const aggregate variable is being initialized, then it
4665 should never be a lose to promote the variable to be static. */
4666 if (valid_const_initializer
4667 && num_nonzero_elements > 1
4668 && TREE_READONLY (object)
4669 && VAR_P (object)
4670 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4672 if (notify_temp_creation)
4673 return GS_ERROR;
4674 DECL_INITIAL (object) = ctor;
4675 TREE_STATIC (object) = 1;
4676 if (!DECL_NAME (object))
4677 DECL_NAME (object) = create_tmp_var_name ("C");
4678 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4680 /* ??? C++ doesn't automatically append a .<number> to the
4681 assembler name, and even when it does, it looks at FE private
4682 data structures to figure out what that number should be,
4683 which are not set for this variable. I suppose this is
4684 important for local statics for inline functions, which aren't
4685 "local" in the object file sense. So in order to get a unique
4686 TU-local symbol, we must invoke the lhd version now. */
4687 lhd_set_decl_assembler_name (object);
4689 *expr_p = NULL_TREE;
4690 break;
4693 /* If there are "lots" of initialized elements, even discounting
4694 those that are not address constants (and thus *must* be
4695 computed at runtime), then partition the constructor into
4696 constant and non-constant parts. Block copy the constant
4697 parts in, then generate code for the non-constant parts. */
4698 /* TODO. There's code in cp/typeck.c to do this. */
4700 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4701 /* store_constructor will ignore the clearing of variable-sized
4702 objects. Initializers for such objects must explicitly set
4703 every field that needs to be set. */
4704 cleared = false;
4705 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4706 /* If the constructor isn't complete, clear the whole object
4707 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4709 ??? This ought not to be needed. For any element not present
4710 in the initializer, we should simply set them to zero. Except
4711 we'd need to *find* the elements that are not present, and that
4712 requires trickery to avoid quadratic compile-time behavior in
4713 large cases or excessive memory use in small cases. */
4714 cleared = true;
4715 else if (num_ctor_elements - num_nonzero_elements
4716 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4717 && num_nonzero_elements < num_ctor_elements / 4)
4718 /* If there are "lots" of zeros, it's more efficient to clear
4719 the memory and then set the nonzero elements. */
4720 cleared = true;
4721 else
4722 cleared = false;
4724 /* If there are "lots" of initialized elements, and all of them
4725 are valid address constants, then the entire initializer can
4726 be dropped to memory, and then memcpy'd out. Don't do this
4727 for sparse arrays, though, as it's more efficient to follow
4728 the standard CONSTRUCTOR behavior of memset followed by
4729 individual element initialization. Also don't do this for small
4730 all-zero initializers (which aren't big enough to merit
4731 clearing), and don't try to make bitwise copies of
4732 TREE_ADDRESSABLE types.
4734 We cannot apply such transformation when compiling chkp static
4735 initializer because creation of initializer image in the memory
4736 will require static initialization of bounds for it. It should
4737 result in another gimplification of similar initializer and we
4738 may fall into infinite loop. */
4739 if (valid_const_initializer
4740 && !(cleared || num_nonzero_elements == 0)
4741 && !TREE_ADDRESSABLE (type)
4742 && (!current_function_decl
4743 || !lookup_attribute ("chkp ctor",
4744 DECL_ATTRIBUTES (current_function_decl))))
4746 HOST_WIDE_INT size = int_size_in_bytes (type);
4747 unsigned int align;
4749 /* ??? We can still get unbounded array types, at least
4750 from the C++ front end. This seems wrong, but attempt
4751 to work around it for now. */
4752 if (size < 0)
4754 size = int_size_in_bytes (TREE_TYPE (object));
4755 if (size >= 0)
4756 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4759 /* Find the maximum alignment we can assume for the object. */
4760 /* ??? Make use of DECL_OFFSET_ALIGN. */
4761 if (DECL_P (object))
4762 align = DECL_ALIGN (object);
4763 else
4764 align = TYPE_ALIGN (type);
4766 /* Do a block move either if the size is so small as to make
4767 each individual move a sub-unit move on average, or if it
4768 is so large as to make individual moves inefficient. */
4769 if (size > 0
4770 && num_nonzero_elements > 1
4771 && (size < num_nonzero_elements
4772 || !can_move_by_pieces (size, align)))
4774 if (notify_temp_creation)
4775 return GS_ERROR;
4777 walk_tree (&ctor, force_labels_r, NULL, NULL);
4778 ctor = tree_output_constant_def (ctor);
4779 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4780 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4781 TREE_OPERAND (*expr_p, 1) = ctor;
4783 /* This is no longer an assignment of a CONSTRUCTOR, but
4784 we still may have processing to do on the LHS. So
4785 pretend we didn't do anything here to let that happen. */
4786 return GS_UNHANDLED;
4790 /* If the target is volatile, we have non-zero elements and more than
4791 one field to assign, initialize the target from a temporary. */
4792 if (TREE_THIS_VOLATILE (object)
4793 && !TREE_ADDRESSABLE (type)
4794 && num_nonzero_elements > 0
4795 && vec_safe_length (elts) > 1)
4797 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4798 TREE_OPERAND (*expr_p, 0) = temp;
4799 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4800 *expr_p,
4801 build2 (MODIFY_EXPR, void_type_node,
4802 object, temp));
4803 return GS_OK;
4806 if (notify_temp_creation)
4807 return GS_OK;
4809 /* If there are nonzero elements and if needed, pre-evaluate to capture
4810 elements overlapping with the lhs into temporaries. We must do this
4811 before clearing to fetch the values before they are zeroed-out. */
4812 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4814 preeval_data.lhs_base_decl = get_base_address (object);
4815 if (!DECL_P (preeval_data.lhs_base_decl))
4816 preeval_data.lhs_base_decl = NULL;
4817 preeval_data.lhs_alias_set = get_alias_set (object);
4819 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4820 pre_p, post_p, &preeval_data);
4823 bool ctor_has_side_effects_p
4824 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4826 if (cleared)
4828 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4829 Note that we still have to gimplify, in order to handle the
4830 case of variable sized types. Avoid shared tree structures. */
4831 CONSTRUCTOR_ELTS (ctor) = NULL;
4832 TREE_SIDE_EFFECTS (ctor) = 0;
4833 object = unshare_expr (object);
4834 gimplify_stmt (expr_p, pre_p);
4837 /* If we have not block cleared the object, or if there are nonzero
4838 elements in the constructor, or if the constructor has side effects,
4839 add assignments to the individual scalar fields of the object. */
4840 if (!cleared
4841 || num_nonzero_elements > 0
4842 || ctor_has_side_effects_p)
4843 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4845 *expr_p = NULL_TREE;
4847 break;
4849 case COMPLEX_TYPE:
4851 tree r, i;
4853 if (notify_temp_creation)
4854 return GS_OK;
4856 /* Extract the real and imaginary parts out of the ctor. */
4857 gcc_assert (elts->length () == 2);
4858 r = (*elts)[0].value;
4859 i = (*elts)[1].value;
4860 if (r == NULL || i == NULL)
4862 tree zero = build_zero_cst (TREE_TYPE (type));
4863 if (r == NULL)
4864 r = zero;
4865 if (i == NULL)
4866 i = zero;
4869 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4870 represent creation of a complex value. */
4871 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4873 ctor = build_complex (type, r, i);
4874 TREE_OPERAND (*expr_p, 1) = ctor;
4876 else
4878 ctor = build2 (COMPLEX_EXPR, type, r, i);
4879 TREE_OPERAND (*expr_p, 1) = ctor;
4880 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4881 pre_p,
4882 post_p,
4883 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4884 fb_rvalue);
4887 break;
4889 case VECTOR_TYPE:
4891 unsigned HOST_WIDE_INT ix;
4892 constructor_elt *ce;
4894 if (notify_temp_creation)
4895 return GS_OK;
4897 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4898 if (TREE_CONSTANT (ctor))
4900 bool constant_p = true;
4901 tree value;
4903 /* Even when ctor is constant, it might contain non-*_CST
4904 elements, such as addresses or trapping values like
4905 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4906 in VECTOR_CST nodes. */
4907 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4908 if (!CONSTANT_CLASS_P (value))
4910 constant_p = false;
4911 break;
4914 if (constant_p)
4916 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4917 break;
4920 TREE_CONSTANT (ctor) = 0;
4923 /* Vector types use CONSTRUCTOR all the way through gimple
4924 compilation as a general initializer. */
4925 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4927 enum gimplify_status tret;
4928 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4929 fb_rvalue);
4930 if (tret == GS_ERROR)
4931 ret = GS_ERROR;
4932 else if (TREE_STATIC (ctor)
4933 && !initializer_constant_valid_p (ce->value,
4934 TREE_TYPE (ce->value)))
4935 TREE_STATIC (ctor) = 0;
4937 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4938 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4940 break;
4942 default:
4943 /* So how did we get a CONSTRUCTOR for a scalar type? */
4944 gcc_unreachable ();
4947 if (ret == GS_ERROR)
4948 return GS_ERROR;
4949 /* If we have gimplified both sides of the initializer but have
4950 not emitted an assignment, do so now. */
4951 if (*expr_p)
4953 tree lhs = TREE_OPERAND (*expr_p, 0);
4954 tree rhs = TREE_OPERAND (*expr_p, 1);
4955 if (want_value && object == lhs)
4956 lhs = unshare_expr (lhs);
4957 gassign *init = gimple_build_assign (lhs, rhs);
4958 gimplify_seq_add_stmt (pre_p, init);
4960 if (want_value)
4962 *expr_p = object;
4963 return GS_OK;
4965 else
4967 *expr_p = NULL;
4968 return GS_ALL_DONE;
4972 /* Given a pointer value OP0, return a simplified version of an
4973 indirection through OP0, or NULL_TREE if no simplification is
4974 possible. This may only be applied to a rhs of an expression.
4975 Note that the resulting type may be different from the type pointed
4976 to in the sense that it is still compatible from the langhooks
4977 point of view. */
4979 static tree
4980 gimple_fold_indirect_ref_rhs (tree t)
4982 return gimple_fold_indirect_ref (t);
4985 /* Subroutine of gimplify_modify_expr to do simplifications of
4986 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4987 something changes. */
4989 static enum gimplify_status
4990 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4991 gimple_seq *pre_p, gimple_seq *post_p,
4992 bool want_value)
4994 enum gimplify_status ret = GS_UNHANDLED;
4995 bool changed;
4999 changed = false;
5000 switch (TREE_CODE (*from_p))
5002 case VAR_DECL:
5003 /* If we're assigning from a read-only variable initialized with
5004 a constructor, do the direct assignment from the constructor,
5005 but only if neither source nor target are volatile since this
5006 latter assignment might end up being done on a per-field basis. */
5007 if (DECL_INITIAL (*from_p)
5008 && TREE_READONLY (*from_p)
5009 && !TREE_THIS_VOLATILE (*from_p)
5010 && !TREE_THIS_VOLATILE (*to_p)
5011 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5013 tree old_from = *from_p;
5014 enum gimplify_status subret;
5016 /* Move the constructor into the RHS. */
5017 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5019 /* Let's see if gimplify_init_constructor will need to put
5020 it in memory. */
5021 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5022 false, true);
5023 if (subret == GS_ERROR)
5025 /* If so, revert the change. */
5026 *from_p = old_from;
5028 else
5030 ret = GS_OK;
5031 changed = true;
5034 break;
5035 case INDIRECT_REF:
5037 /* If we have code like
5039 *(const A*)(A*)&x
5041 where the type of "x" is a (possibly cv-qualified variant
5042 of "A"), treat the entire expression as identical to "x".
5043 This kind of code arises in C++ when an object is bound
5044 to a const reference, and if "x" is a TARGET_EXPR we want
5045 to take advantage of the optimization below. */
5046 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5047 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5048 if (t)
5050 if (TREE_THIS_VOLATILE (t) != volatile_p)
5052 if (DECL_P (t))
5053 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5054 build_fold_addr_expr (t));
5055 if (REFERENCE_CLASS_P (t))
5056 TREE_THIS_VOLATILE (t) = volatile_p;
5058 *from_p = t;
5059 ret = GS_OK;
5060 changed = true;
5062 break;
5065 case TARGET_EXPR:
5067 /* If we are initializing something from a TARGET_EXPR, strip the
5068 TARGET_EXPR and initialize it directly, if possible. This can't
5069 be done if the initializer is void, since that implies that the
5070 temporary is set in some non-trivial way.
5072 ??? What about code that pulls out the temp and uses it
5073 elsewhere? I think that such code never uses the TARGET_EXPR as
5074 an initializer. If I'm wrong, we'll die because the temp won't
5075 have any RTL. In that case, I guess we'll need to replace
5076 references somehow. */
5077 tree init = TARGET_EXPR_INITIAL (*from_p);
5079 if (init
5080 && !VOID_TYPE_P (TREE_TYPE (init)))
5082 *from_p = init;
5083 ret = GS_OK;
5084 changed = true;
5087 break;
5089 case COMPOUND_EXPR:
5090 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5091 caught. */
5092 gimplify_compound_expr (from_p, pre_p, true);
5093 ret = GS_OK;
5094 changed = true;
5095 break;
5097 case CONSTRUCTOR:
5098 /* If we already made some changes, let the front end have a
5099 crack at this before we break it down. */
5100 if (ret != GS_UNHANDLED)
5101 break;
5102 /* If we're initializing from a CONSTRUCTOR, break this into
5103 individual MODIFY_EXPRs. */
5104 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5105 false);
5107 case COND_EXPR:
5108 /* If we're assigning to a non-register type, push the assignment
5109 down into the branches. This is mandatory for ADDRESSABLE types,
5110 since we cannot generate temporaries for such, but it saves a
5111 copy in other cases as well. */
5112 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5114 /* This code should mirror the code in gimplify_cond_expr. */
5115 enum tree_code code = TREE_CODE (*expr_p);
5116 tree cond = *from_p;
5117 tree result = *to_p;
5119 ret = gimplify_expr (&result, pre_p, post_p,
5120 is_gimple_lvalue, fb_lvalue);
5121 if (ret != GS_ERROR)
5122 ret = GS_OK;
5124 /* If we are going to write RESULT more than once, clear
5125 TREE_READONLY flag, otherwise we might incorrectly promote
5126 the variable to static const and initialize it at compile
5127 time in one of the branches. */
5128 if (VAR_P (result)
5129 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5130 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5131 TREE_READONLY (result) = 0;
5132 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5133 TREE_OPERAND (cond, 1)
5134 = build2 (code, void_type_node, result,
5135 TREE_OPERAND (cond, 1));
5136 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5137 TREE_OPERAND (cond, 2)
5138 = build2 (code, void_type_node, unshare_expr (result),
5139 TREE_OPERAND (cond, 2));
5141 TREE_TYPE (cond) = void_type_node;
5142 recalculate_side_effects (cond);
5144 if (want_value)
5146 gimplify_and_add (cond, pre_p);
5147 *expr_p = unshare_expr (result);
5149 else
5150 *expr_p = cond;
5151 return ret;
5153 break;
5155 case CALL_EXPR:
5156 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5157 return slot so that we don't generate a temporary. */
5158 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5159 && aggregate_value_p (*from_p, *from_p))
5161 bool use_target;
5163 if (!(rhs_predicate_for (*to_p))(*from_p))
5164 /* If we need a temporary, *to_p isn't accurate. */
5165 use_target = false;
5166 /* It's OK to use the return slot directly unless it's an NRV. */
5167 else if (TREE_CODE (*to_p) == RESULT_DECL
5168 && DECL_NAME (*to_p) == NULL_TREE
5169 && needs_to_live_in_memory (*to_p))
5170 use_target = true;
5171 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5172 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5173 /* Don't force regs into memory. */
5174 use_target = false;
5175 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5176 /* It's OK to use the target directly if it's being
5177 initialized. */
5178 use_target = true;
5179 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5180 != INTEGER_CST)
5181 /* Always use the target and thus RSO for variable-sized types.
5182 GIMPLE cannot deal with a variable-sized assignment
5183 embedded in a call statement. */
5184 use_target = true;
5185 else if (TREE_CODE (*to_p) != SSA_NAME
5186 && (!is_gimple_variable (*to_p)
5187 || needs_to_live_in_memory (*to_p)))
5188 /* Don't use the original target if it's already addressable;
5189 if its address escapes, and the called function uses the
5190 NRV optimization, a conforming program could see *to_p
5191 change before the called function returns; see c++/19317.
5192 When optimizing, the return_slot pass marks more functions
5193 as safe after we have escape info. */
5194 use_target = false;
5195 else
5196 use_target = true;
5198 if (use_target)
5200 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5201 mark_addressable (*to_p);
5204 break;
5206 case WITH_SIZE_EXPR:
5207 /* Likewise for calls that return an aggregate of non-constant size,
5208 since we would not be able to generate a temporary at all. */
5209 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5211 *from_p = TREE_OPERAND (*from_p, 0);
5212 /* We don't change ret in this case because the
5213 WITH_SIZE_EXPR might have been added in
5214 gimplify_modify_expr, so returning GS_OK would lead to an
5215 infinite loop. */
5216 changed = true;
5218 break;
5220 /* If we're initializing from a container, push the initialization
5221 inside it. */
5222 case CLEANUP_POINT_EXPR:
5223 case BIND_EXPR:
5224 case STATEMENT_LIST:
5226 tree wrap = *from_p;
5227 tree t;
5229 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5230 fb_lvalue);
5231 if (ret != GS_ERROR)
5232 ret = GS_OK;
5234 t = voidify_wrapper_expr (wrap, *expr_p);
5235 gcc_assert (t == *expr_p);
5237 if (want_value)
5239 gimplify_and_add (wrap, pre_p);
5240 *expr_p = unshare_expr (*to_p);
5242 else
5243 *expr_p = wrap;
5244 return GS_OK;
5247 case COMPOUND_LITERAL_EXPR:
5249 tree complit = TREE_OPERAND (*expr_p, 1);
5250 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5251 tree decl = DECL_EXPR_DECL (decl_s);
5252 tree init = DECL_INITIAL (decl);
5254 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5255 into struct T x = { 0, 1, 2 } if the address of the
5256 compound literal has never been taken. */
5257 if (!TREE_ADDRESSABLE (complit)
5258 && !TREE_ADDRESSABLE (decl)
5259 && init)
5261 *expr_p = copy_node (*expr_p);
5262 TREE_OPERAND (*expr_p, 1) = init;
5263 return GS_OK;
5267 default:
5268 break;
5271 while (changed);
5273 return ret;
5277 /* Return true if T looks like a valid GIMPLE statement. */
5279 static bool
5280 is_gimple_stmt (tree t)
5282 const enum tree_code code = TREE_CODE (t);
5284 switch (code)
5286 case NOP_EXPR:
5287 /* The only valid NOP_EXPR is the empty statement. */
5288 return IS_EMPTY_STMT (t);
5290 case BIND_EXPR:
5291 case COND_EXPR:
5292 /* These are only valid if they're void. */
5293 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5295 case SWITCH_EXPR:
5296 case GOTO_EXPR:
5297 case RETURN_EXPR:
5298 case LABEL_EXPR:
5299 case CASE_LABEL_EXPR:
5300 case TRY_CATCH_EXPR:
5301 case TRY_FINALLY_EXPR:
5302 case EH_FILTER_EXPR:
5303 case CATCH_EXPR:
5304 case ASM_EXPR:
5305 case STATEMENT_LIST:
5306 case OACC_PARALLEL:
5307 case OACC_KERNELS:
5308 case OACC_DATA:
5309 case OACC_HOST_DATA:
5310 case OACC_DECLARE:
5311 case OACC_UPDATE:
5312 case OACC_ENTER_DATA:
5313 case OACC_EXIT_DATA:
5314 case OACC_CACHE:
5315 case OMP_PARALLEL:
5316 case OMP_FOR:
5317 case OMP_SIMD:
5318 case CILK_SIMD:
5319 case OMP_DISTRIBUTE:
5320 case OACC_LOOP:
5321 case OMP_SECTIONS:
5322 case OMP_SECTION:
5323 case OMP_SINGLE:
5324 case OMP_MASTER:
5325 case OMP_TASKGROUP:
5326 case OMP_ORDERED:
5327 case OMP_CRITICAL:
5328 case OMP_TASK:
5329 case OMP_TARGET:
5330 case OMP_TARGET_DATA:
5331 case OMP_TARGET_UPDATE:
5332 case OMP_TARGET_ENTER_DATA:
5333 case OMP_TARGET_EXIT_DATA:
5334 case OMP_TASKLOOP:
5335 case OMP_TEAMS:
5336 /* These are always void. */
5337 return true;
5339 case CALL_EXPR:
5340 case MODIFY_EXPR:
5341 case PREDICT_EXPR:
5342 /* These are valid regardless of their type. */
5343 return true;
5345 default:
5346 return false;
5351 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5352 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5353 DECL_GIMPLE_REG_P set.
5355 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5356 other, unmodified part of the complex object just before the total store.
5357 As a consequence, if the object is still uninitialized, an undefined value
5358 will be loaded into a register, which may result in a spurious exception
5359 if the register is floating-point and the value happens to be a signaling
5360 NaN for example. Then the fully-fledged complex operations lowering pass
5361 followed by a DCE pass are necessary in order to fix things up. */
5363 static enum gimplify_status
5364 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5365 bool want_value)
5367 enum tree_code code, ocode;
5368 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5370 lhs = TREE_OPERAND (*expr_p, 0);
5371 rhs = TREE_OPERAND (*expr_p, 1);
5372 code = TREE_CODE (lhs);
5373 lhs = TREE_OPERAND (lhs, 0);
5375 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5376 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5377 TREE_NO_WARNING (other) = 1;
5378 other = get_formal_tmp_var (other, pre_p);
5380 realpart = code == REALPART_EXPR ? rhs : other;
5381 imagpart = code == REALPART_EXPR ? other : rhs;
5383 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5384 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5385 else
5386 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5388 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5389 *expr_p = (want_value) ? rhs : NULL_TREE;
5391 return GS_ALL_DONE;
5394 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5396 modify_expr
5397 : varname '=' rhs
5398 | '*' ID '=' rhs
5400 PRE_P points to the list where side effects that must happen before
5401 *EXPR_P should be stored.
5403 POST_P points to the list where side effects that must happen after
5404 *EXPR_P should be stored.
5406 WANT_VALUE is nonzero iff we want to use the value of this expression
5407 in another expression. */
5409 static enum gimplify_status
5410 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5411 bool want_value)
5413 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5414 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5415 enum gimplify_status ret = GS_UNHANDLED;
5416 gimple *assign;
5417 location_t loc = EXPR_LOCATION (*expr_p);
5418 gimple_stmt_iterator gsi;
5420 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5421 || TREE_CODE (*expr_p) == INIT_EXPR);
5423 /* Trying to simplify a clobber using normal logic doesn't work,
5424 so handle it here. */
5425 if (TREE_CLOBBER_P (*from_p))
5427 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5428 if (ret == GS_ERROR)
5429 return ret;
5430 gcc_assert (!want_value
5431 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5432 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5433 *expr_p = NULL;
5434 return GS_ALL_DONE;
5437 /* Insert pointer conversions required by the middle-end that are not
5438 required by the frontend. This fixes middle-end type checking for
5439 for example gcc.dg/redecl-6.c. */
5440 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5442 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5443 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5444 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5447 /* See if any simplifications can be done based on what the RHS is. */
5448 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5449 want_value);
5450 if (ret != GS_UNHANDLED)
5451 return ret;
5453 /* For zero sized types only gimplify the left hand side and right hand
5454 side as statements and throw away the assignment. Do this after
5455 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5456 types properly. */
5457 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
5459 gimplify_stmt (from_p, pre_p);
5460 gimplify_stmt (to_p, pre_p);
5461 *expr_p = NULL_TREE;
5462 return GS_ALL_DONE;
5465 /* If the value being copied is of variable width, compute the length
5466 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5467 before gimplifying any of the operands so that we can resolve any
5468 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5469 the size of the expression to be copied, not of the destination, so
5470 that is what we must do here. */
5471 maybe_with_size_expr (from_p);
5473 /* As a special case, we have to temporarily allow for assignments
5474 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5475 a toplevel statement, when gimplifying the GENERIC expression
5476 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5477 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5479 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5480 prevent gimplify_expr from trying to create a new temporary for
5481 foo's LHS, we tell it that it should only gimplify until it
5482 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5483 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5484 and all we need to do here is set 'a' to be its LHS. */
5486 /* Gimplify the RHS first for C++17 and bug 71104. */
5487 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5488 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5489 if (ret == GS_ERROR)
5490 return ret;
5492 /* Then gimplify the LHS. */
5493 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5494 twice we have to make sure to gimplify into non-SSA as otherwise
5495 the abnormal edge added later will make those defs not dominate
5496 their uses.
5497 ??? Technically this applies only to the registers used in the
5498 resulting non-register *TO_P. */
5499 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5500 if (saved_into_ssa
5501 && TREE_CODE (*from_p) == CALL_EXPR
5502 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5503 gimplify_ctxp->into_ssa = false;
5504 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5505 gimplify_ctxp->into_ssa = saved_into_ssa;
5506 if (ret == GS_ERROR)
5507 return ret;
5509 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5510 guess for the predicate was wrong. */
5511 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5512 if (final_pred != initial_pred)
5514 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5515 if (ret == GS_ERROR)
5516 return ret;
5519 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5520 size as argument to the call. */
5521 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5523 tree call = TREE_OPERAND (*from_p, 0);
5524 tree vlasize = TREE_OPERAND (*from_p, 1);
5526 if (TREE_CODE (call) == CALL_EXPR
5527 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5529 int nargs = call_expr_nargs (call);
5530 tree type = TREE_TYPE (call);
5531 tree ap = CALL_EXPR_ARG (call, 0);
5532 tree tag = CALL_EXPR_ARG (call, 1);
5533 tree aptag = CALL_EXPR_ARG (call, 2);
5534 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5535 IFN_VA_ARG, type,
5536 nargs + 1, ap, tag,
5537 aptag, vlasize);
5538 TREE_OPERAND (*from_p, 0) = newcall;
5542 /* Now see if the above changed *from_p to something we handle specially. */
5543 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5544 want_value);
5545 if (ret != GS_UNHANDLED)
5546 return ret;
5548 /* If we've got a variable sized assignment between two lvalues (i.e. does
5549 not involve a call), then we can make things a bit more straightforward
5550 by converting the assignment to memcpy or memset. */
5551 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5553 tree from = TREE_OPERAND (*from_p, 0);
5554 tree size = TREE_OPERAND (*from_p, 1);
5556 if (TREE_CODE (from) == CONSTRUCTOR)
5557 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5559 if (is_gimple_addressable (from))
5561 *from_p = from;
5562 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5563 pre_p);
5567 /* Transform partial stores to non-addressable complex variables into
5568 total stores. This allows us to use real instead of virtual operands
5569 for these variables, which improves optimization. */
5570 if ((TREE_CODE (*to_p) == REALPART_EXPR
5571 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5572 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5573 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5575 /* Try to alleviate the effects of the gimplification creating artificial
5576 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5577 make sure not to create DECL_DEBUG_EXPR links across functions. */
5578 if (!gimplify_ctxp->into_ssa
5579 && VAR_P (*from_p)
5580 && DECL_IGNORED_P (*from_p)
5581 && DECL_P (*to_p)
5582 && !DECL_IGNORED_P (*to_p)
5583 && decl_function_context (*to_p) == current_function_decl)
5585 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5586 DECL_NAME (*from_p)
5587 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5588 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5589 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5592 if (want_value && TREE_THIS_VOLATILE (*to_p))
5593 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5595 if (TREE_CODE (*from_p) == CALL_EXPR)
5597 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5598 instead of a GIMPLE_ASSIGN. */
5599 gcall *call_stmt;
5600 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5602 /* Gimplify internal functions created in the FEs. */
5603 int nargs = call_expr_nargs (*from_p), i;
5604 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5605 auto_vec<tree> vargs (nargs);
5607 for (i = 0; i < nargs; i++)
5609 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5610 EXPR_LOCATION (*from_p));
5611 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5613 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5614 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5616 else
5618 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5619 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5620 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5621 tree fndecl = get_callee_fndecl (*from_p);
5622 if (fndecl
5623 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5624 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5625 && call_expr_nargs (*from_p) == 3)
5626 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5627 CALL_EXPR_ARG (*from_p, 0),
5628 CALL_EXPR_ARG (*from_p, 1),
5629 CALL_EXPR_ARG (*from_p, 2));
5630 else
5632 call_stmt = gimple_build_call_from_tree (*from_p);
5633 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
5636 notice_special_calls (call_stmt);
5637 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5638 gimple_call_set_lhs (call_stmt, *to_p);
5639 else if (TREE_CODE (*to_p) == SSA_NAME)
5640 /* The above is somewhat premature, avoid ICEing later for a
5641 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5642 ??? This doesn't make it a default-def. */
5643 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5645 if (EXPR_CILK_SPAWN (*from_p))
5646 gimplify_cilk_detach (pre_p);
5647 assign = call_stmt;
5649 else
5651 assign = gimple_build_assign (*to_p, *from_p);
5652 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5653 if (COMPARISON_CLASS_P (*from_p))
5654 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5657 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5659 /* We should have got an SSA name from the start. */
5660 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5661 || ! gimple_in_ssa_p (cfun));
5664 gimplify_seq_add_stmt (pre_p, assign);
5665 gsi = gsi_last (*pre_p);
5666 maybe_fold_stmt (&gsi);
5668 if (want_value)
5670 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5671 return GS_OK;
5673 else
5674 *expr_p = NULL;
5676 return GS_ALL_DONE;
5679 /* Gimplify a comparison between two variable-sized objects. Do this
5680 with a call to BUILT_IN_MEMCMP. */
5682 static enum gimplify_status
5683 gimplify_variable_sized_compare (tree *expr_p)
5685 location_t loc = EXPR_LOCATION (*expr_p);
5686 tree op0 = TREE_OPERAND (*expr_p, 0);
5687 tree op1 = TREE_OPERAND (*expr_p, 1);
5688 tree t, arg, dest, src, expr;
5690 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5691 arg = unshare_expr (arg);
5692 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5693 src = build_fold_addr_expr_loc (loc, op1);
5694 dest = build_fold_addr_expr_loc (loc, op0);
5695 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5696 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5698 expr
5699 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5700 SET_EXPR_LOCATION (expr, loc);
5701 *expr_p = expr;
5703 return GS_OK;
5706 /* Gimplify a comparison between two aggregate objects of integral scalar
5707 mode as a comparison between the bitwise equivalent scalar values. */
5709 static enum gimplify_status
5710 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5712 location_t loc = EXPR_LOCATION (*expr_p);
5713 tree op0 = TREE_OPERAND (*expr_p, 0);
5714 tree op1 = TREE_OPERAND (*expr_p, 1);
5716 tree type = TREE_TYPE (op0);
5717 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5719 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5720 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5722 *expr_p
5723 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5725 return GS_OK;
5728 /* Gimplify an expression sequence. This function gimplifies each
5729 expression and rewrites the original expression with the last
5730 expression of the sequence in GIMPLE form.
5732 PRE_P points to the list where the side effects for all the
5733 expressions in the sequence will be emitted.
5735 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5737 static enum gimplify_status
5738 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5740 tree t = *expr_p;
5744 tree *sub_p = &TREE_OPERAND (t, 0);
5746 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5747 gimplify_compound_expr (sub_p, pre_p, false);
5748 else
5749 gimplify_stmt (sub_p, pre_p);
5751 t = TREE_OPERAND (t, 1);
5753 while (TREE_CODE (t) == COMPOUND_EXPR);
5755 *expr_p = t;
5756 if (want_value)
5757 return GS_OK;
5758 else
5760 gimplify_stmt (expr_p, pre_p);
5761 return GS_ALL_DONE;
5765 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5766 gimplify. After gimplification, EXPR_P will point to a new temporary
5767 that holds the original value of the SAVE_EXPR node.
5769 PRE_P points to the list where side effects that must happen before
5770 *EXPR_P should be stored. */
5772 static enum gimplify_status
5773 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5775 enum gimplify_status ret = GS_ALL_DONE;
5776 tree val;
5778 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5779 val = TREE_OPERAND (*expr_p, 0);
5781 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5782 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5784 /* The operand may be a void-valued expression such as SAVE_EXPRs
5785 generated by the Java frontend for class initialization. It is
5786 being executed only for its side-effects. */
5787 if (TREE_TYPE (val) == void_type_node)
5789 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5790 is_gimple_stmt, fb_none);
5791 val = NULL;
5793 else
5794 /* The temporary may not be an SSA name as later abnormal and EH
5795 control flow may invalidate use/def domination. */
5796 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5798 TREE_OPERAND (*expr_p, 0) = val;
5799 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5802 *expr_p = val;
5804 return ret;
5807 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5809 unary_expr
5810 : ...
5811 | '&' varname
5814 PRE_P points to the list where side effects that must happen before
5815 *EXPR_P should be stored.
5817 POST_P points to the list where side effects that must happen after
5818 *EXPR_P should be stored. */
5820 static enum gimplify_status
5821 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5823 tree expr = *expr_p;
5824 tree op0 = TREE_OPERAND (expr, 0);
5825 enum gimplify_status ret;
5826 location_t loc = EXPR_LOCATION (*expr_p);
5828 switch (TREE_CODE (op0))
5830 case INDIRECT_REF:
5831 do_indirect_ref:
5832 /* Check if we are dealing with an expression of the form '&*ptr'.
5833 While the front end folds away '&*ptr' into 'ptr', these
5834 expressions may be generated internally by the compiler (e.g.,
5835 builtins like __builtin_va_end). */
5836 /* Caution: the silent array decomposition semantics we allow for
5837 ADDR_EXPR means we can't always discard the pair. */
5838 /* Gimplification of the ADDR_EXPR operand may drop
5839 cv-qualification conversions, so make sure we add them if
5840 needed. */
5842 tree op00 = TREE_OPERAND (op0, 0);
5843 tree t_expr = TREE_TYPE (expr);
5844 tree t_op00 = TREE_TYPE (op00);
5846 if (!useless_type_conversion_p (t_expr, t_op00))
5847 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5848 *expr_p = op00;
5849 ret = GS_OK;
5851 break;
5853 case VIEW_CONVERT_EXPR:
5854 /* Take the address of our operand and then convert it to the type of
5855 this ADDR_EXPR.
5857 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5858 all clear. The impact of this transformation is even less clear. */
5860 /* If the operand is a useless conversion, look through it. Doing so
5861 guarantees that the ADDR_EXPR and its operand will remain of the
5862 same type. */
5863 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5864 op0 = TREE_OPERAND (op0, 0);
5866 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5867 build_fold_addr_expr_loc (loc,
5868 TREE_OPERAND (op0, 0)));
5869 ret = GS_OK;
5870 break;
5872 case MEM_REF:
5873 if (integer_zerop (TREE_OPERAND (op0, 1)))
5874 goto do_indirect_ref;
5876 /* fall through */
5878 default:
5879 /* If we see a call to a declared builtin or see its address
5880 being taken (we can unify those cases here) then we can mark
5881 the builtin for implicit generation by GCC. */
5882 if (TREE_CODE (op0) == FUNCTION_DECL
5883 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5884 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5885 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5887 /* We use fb_either here because the C frontend sometimes takes
5888 the address of a call that returns a struct; see
5889 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5890 the implied temporary explicit. */
5892 /* Make the operand addressable. */
5893 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5894 is_gimple_addressable, fb_either);
5895 if (ret == GS_ERROR)
5896 break;
5898 /* Then mark it. Beware that it may not be possible to do so directly
5899 if a temporary has been created by the gimplification. */
5900 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5902 op0 = TREE_OPERAND (expr, 0);
5904 /* For various reasons, the gimplification of the expression
5905 may have made a new INDIRECT_REF. */
5906 if (TREE_CODE (op0) == INDIRECT_REF)
5907 goto do_indirect_ref;
5909 mark_addressable (TREE_OPERAND (expr, 0));
5911 /* The FEs may end up building ADDR_EXPRs early on a decl with
5912 an incomplete type. Re-build ADDR_EXPRs in canonical form
5913 here. */
5914 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5915 *expr_p = build_fold_addr_expr (op0);
5917 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5918 recompute_tree_invariant_for_addr_expr (*expr_p);
5920 /* If we re-built the ADDR_EXPR add a conversion to the original type
5921 if required. */
5922 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5923 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5925 break;
5928 return ret;
5931 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5932 value; output operands should be a gimple lvalue. */
5934 static enum gimplify_status
5935 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5937 tree expr;
5938 int noutputs;
5939 const char **oconstraints;
5940 int i;
5941 tree link;
5942 const char *constraint;
5943 bool allows_mem, allows_reg, is_inout;
5944 enum gimplify_status ret, tret;
5945 gasm *stmt;
5946 vec<tree, va_gc> *inputs;
5947 vec<tree, va_gc> *outputs;
5948 vec<tree, va_gc> *clobbers;
5949 vec<tree, va_gc> *labels;
5950 tree link_next;
5952 expr = *expr_p;
5953 noutputs = list_length (ASM_OUTPUTS (expr));
5954 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5956 inputs = NULL;
5957 outputs = NULL;
5958 clobbers = NULL;
5959 labels = NULL;
5961 ret = GS_ALL_DONE;
5962 link_next = NULL_TREE;
5963 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5965 bool ok;
5966 size_t constraint_len;
5968 link_next = TREE_CHAIN (link);
5970 oconstraints[i]
5971 = constraint
5972 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5973 constraint_len = strlen (constraint);
5974 if (constraint_len == 0)
5975 continue;
5977 ok = parse_output_constraint (&constraint, i, 0, 0,
5978 &allows_mem, &allows_reg, &is_inout);
5979 if (!ok)
5981 ret = GS_ERROR;
5982 is_inout = false;
5985 if (!allows_reg && allows_mem)
5986 mark_addressable (TREE_VALUE (link));
5988 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5989 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5990 fb_lvalue | fb_mayfail);
5991 if (tret == GS_ERROR)
5993 error ("invalid lvalue in asm output %d", i);
5994 ret = tret;
5997 /* If the constraint does not allow memory make sure we gimplify
5998 it to a register if it is not already but its base is. This
5999 happens for complex and vector components. */
6000 if (!allows_mem)
6002 tree op = TREE_VALUE (link);
6003 if (! is_gimple_val (op)
6004 && is_gimple_reg_type (TREE_TYPE (op))
6005 && is_gimple_reg (get_base_address (op)))
6007 tree tem = create_tmp_reg (TREE_TYPE (op));
6008 tree ass;
6009 if (is_inout)
6011 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6012 tem, unshare_expr (op));
6013 gimplify_and_add (ass, pre_p);
6015 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6016 gimplify_and_add (ass, post_p);
6018 TREE_VALUE (link) = tem;
6019 tret = GS_OK;
6023 vec_safe_push (outputs, link);
6024 TREE_CHAIN (link) = NULL_TREE;
6026 if (is_inout)
6028 /* An input/output operand. To give the optimizers more
6029 flexibility, split it into separate input and output
6030 operands. */
6031 tree input;
6032 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6033 char buf[11];
6035 /* Turn the in/out constraint into an output constraint. */
6036 char *p = xstrdup (constraint);
6037 p[0] = '=';
6038 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6040 /* And add a matching input constraint. */
6041 if (allows_reg)
6043 sprintf (buf, "%u", i);
6045 /* If there are multiple alternatives in the constraint,
6046 handle each of them individually. Those that allow register
6047 will be replaced with operand number, the others will stay
6048 unchanged. */
6049 if (strchr (p, ',') != NULL)
6051 size_t len = 0, buflen = strlen (buf);
6052 char *beg, *end, *str, *dst;
6054 for (beg = p + 1;;)
6056 end = strchr (beg, ',');
6057 if (end == NULL)
6058 end = strchr (beg, '\0');
6059 if ((size_t) (end - beg) < buflen)
6060 len += buflen + 1;
6061 else
6062 len += end - beg + 1;
6063 if (*end)
6064 beg = end + 1;
6065 else
6066 break;
6069 str = (char *) alloca (len);
6070 for (beg = p + 1, dst = str;;)
6072 const char *tem;
6073 bool mem_p, reg_p, inout_p;
6075 end = strchr (beg, ',');
6076 if (end)
6077 *end = '\0';
6078 beg[-1] = '=';
6079 tem = beg - 1;
6080 parse_output_constraint (&tem, i, 0, 0,
6081 &mem_p, &reg_p, &inout_p);
6082 if (dst != str)
6083 *dst++ = ',';
6084 if (reg_p)
6086 memcpy (dst, buf, buflen);
6087 dst += buflen;
6089 else
6091 if (end)
6092 len = end - beg;
6093 else
6094 len = strlen (beg);
6095 memcpy (dst, beg, len);
6096 dst += len;
6098 if (end)
6099 beg = end + 1;
6100 else
6101 break;
6103 *dst = '\0';
6104 input = build_string (dst - str, str);
6106 else
6107 input = build_string (strlen (buf), buf);
6109 else
6110 input = build_string (constraint_len - 1, constraint + 1);
6112 free (p);
6114 input = build_tree_list (build_tree_list (NULL_TREE, input),
6115 unshare_expr (TREE_VALUE (link)));
6116 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6120 link_next = NULL_TREE;
6121 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6123 link_next = TREE_CHAIN (link);
6124 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6125 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6126 oconstraints, &allows_mem, &allows_reg);
6128 /* If we can't make copies, we can only accept memory. */
6129 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6131 if (allows_mem)
6132 allows_reg = 0;
6133 else
6135 error ("impossible constraint in %<asm%>");
6136 error ("non-memory input %d must stay in memory", i);
6137 return GS_ERROR;
6141 /* If the operand is a memory input, it should be an lvalue. */
6142 if (!allows_reg && allows_mem)
6144 tree inputv = TREE_VALUE (link);
6145 STRIP_NOPS (inputv);
6146 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6147 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6148 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6149 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6150 || TREE_CODE (inputv) == MODIFY_EXPR)
6151 TREE_VALUE (link) = error_mark_node;
6152 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6153 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6154 if (tret != GS_ERROR)
6156 /* Unlike output operands, memory inputs are not guaranteed
6157 to be lvalues by the FE, and while the expressions are
6158 marked addressable there, if it is e.g. a statement
6159 expression, temporaries in it might not end up being
6160 addressable. They might be already used in the IL and thus
6161 it is too late to make them addressable now though. */
6162 tree x = TREE_VALUE (link);
6163 while (handled_component_p (x))
6164 x = TREE_OPERAND (x, 0);
6165 if (TREE_CODE (x) == MEM_REF
6166 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6167 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6168 if ((VAR_P (x)
6169 || TREE_CODE (x) == PARM_DECL
6170 || TREE_CODE (x) == RESULT_DECL)
6171 && !TREE_ADDRESSABLE (x)
6172 && is_gimple_reg (x))
6174 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6175 input_location), 0,
6176 "memory input %d is not directly addressable",
6178 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6181 mark_addressable (TREE_VALUE (link));
6182 if (tret == GS_ERROR)
6184 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6185 "memory input %d is not directly addressable", i);
6186 ret = tret;
6189 else
6191 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6192 is_gimple_asm_val, fb_rvalue);
6193 if (tret == GS_ERROR)
6194 ret = tret;
6197 TREE_CHAIN (link) = NULL_TREE;
6198 vec_safe_push (inputs, link);
6201 link_next = NULL_TREE;
6202 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6204 link_next = TREE_CHAIN (link);
6205 TREE_CHAIN (link) = NULL_TREE;
6206 vec_safe_push (clobbers, link);
6209 link_next = NULL_TREE;
6210 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6212 link_next = TREE_CHAIN (link);
6213 TREE_CHAIN (link) = NULL_TREE;
6214 vec_safe_push (labels, link);
6217 /* Do not add ASMs with errors to the gimple IL stream. */
6218 if (ret != GS_ERROR)
6220 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6221 inputs, outputs, clobbers, labels);
6223 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6224 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6226 gimplify_seq_add_stmt (pre_p, stmt);
6229 return ret;
6232 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6233 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6234 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6235 return to this function.
6237 FIXME should we complexify the prequeue handling instead? Or use flags
6238 for all the cleanups and let the optimizer tighten them up? The current
6239 code seems pretty fragile; it will break on a cleanup within any
6240 non-conditional nesting. But any such nesting would be broken, anyway;
6241 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6242 and continues out of it. We can do that at the RTL level, though, so
6243 having an optimizer to tighten up try/finally regions would be a Good
6244 Thing. */
6246 static enum gimplify_status
6247 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6249 gimple_stmt_iterator iter;
6250 gimple_seq body_sequence = NULL;
6252 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6254 /* We only care about the number of conditions between the innermost
6255 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6256 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6257 int old_conds = gimplify_ctxp->conditions;
6258 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6259 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6260 gimplify_ctxp->conditions = 0;
6261 gimplify_ctxp->conditional_cleanups = NULL;
6262 gimplify_ctxp->in_cleanup_point_expr = true;
6264 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6266 gimplify_ctxp->conditions = old_conds;
6267 gimplify_ctxp->conditional_cleanups = old_cleanups;
6268 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6270 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6272 gimple *wce = gsi_stmt (iter);
6274 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6276 if (gsi_one_before_end_p (iter))
6278 /* Note that gsi_insert_seq_before and gsi_remove do not
6279 scan operands, unlike some other sequence mutators. */
6280 if (!gimple_wce_cleanup_eh_only (wce))
6281 gsi_insert_seq_before_without_update (&iter,
6282 gimple_wce_cleanup (wce),
6283 GSI_SAME_STMT);
6284 gsi_remove (&iter, true);
6285 break;
6287 else
6289 gtry *gtry;
6290 gimple_seq seq;
6291 enum gimple_try_flags kind;
6293 if (gimple_wce_cleanup_eh_only (wce))
6294 kind = GIMPLE_TRY_CATCH;
6295 else
6296 kind = GIMPLE_TRY_FINALLY;
6297 seq = gsi_split_seq_after (iter);
6299 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6300 /* Do not use gsi_replace here, as it may scan operands.
6301 We want to do a simple structural modification only. */
6302 gsi_set_stmt (&iter, gtry);
6303 iter = gsi_start (gtry->eval);
6306 else
6307 gsi_next (&iter);
6310 gimplify_seq_add_seq (pre_p, body_sequence);
6311 if (temp)
6313 *expr_p = temp;
6314 return GS_OK;
6316 else
6318 *expr_p = NULL;
6319 return GS_ALL_DONE;
6323 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6324 is the cleanup action required. EH_ONLY is true if the cleanup should
6325 only be executed if an exception is thrown, not on normal exit.
6326 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6327 only valid for clobbers. */
6329 static void
6330 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6331 bool force_uncond = false)
6333 gimple *wce;
6334 gimple_seq cleanup_stmts = NULL;
6336 /* Errors can result in improperly nested cleanups. Which results in
6337 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6338 if (seen_error ())
6339 return;
6341 if (gimple_conditional_context ())
6343 /* If we're in a conditional context, this is more complex. We only
6344 want to run the cleanup if we actually ran the initialization that
6345 necessitates it, but we want to run it after the end of the
6346 conditional context. So we wrap the try/finally around the
6347 condition and use a flag to determine whether or not to actually
6348 run the destructor. Thus
6350 test ? f(A()) : 0
6352 becomes (approximately)
6354 flag = 0;
6355 try {
6356 if (test) { A::A(temp); flag = 1; val = f(temp); }
6357 else { val = 0; }
6358 } finally {
6359 if (flag) A::~A(temp);
6363 if (force_uncond)
6365 gimplify_stmt (&cleanup, &cleanup_stmts);
6366 wce = gimple_build_wce (cleanup_stmts);
6367 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6369 else
6371 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6372 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6373 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6375 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6376 gimplify_stmt (&cleanup, &cleanup_stmts);
6377 wce = gimple_build_wce (cleanup_stmts);
6379 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6380 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6381 gimplify_seq_add_stmt (pre_p, ftrue);
6383 /* Because of this manipulation, and the EH edges that jump
6384 threading cannot redirect, the temporary (VAR) will appear
6385 to be used uninitialized. Don't warn. */
6386 TREE_NO_WARNING (var) = 1;
6389 else
6391 gimplify_stmt (&cleanup, &cleanup_stmts);
6392 wce = gimple_build_wce (cleanup_stmts);
6393 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6394 gimplify_seq_add_stmt (pre_p, wce);
6398 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6400 static enum gimplify_status
6401 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6403 tree targ = *expr_p;
6404 tree temp = TARGET_EXPR_SLOT (targ);
6405 tree init = TARGET_EXPR_INITIAL (targ);
6406 enum gimplify_status ret;
6408 bool unpoison_empty_seq = false;
6409 gimple_stmt_iterator unpoison_it;
6411 if (init)
6413 tree cleanup = NULL_TREE;
6415 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6416 to the temps list. Handle also variable length TARGET_EXPRs. */
6417 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6419 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6420 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6421 gimplify_vla_decl (temp, pre_p);
6423 else
6425 /* Save location where we need to place unpoisoning. It's possible
6426 that a variable will be converted to needs_to_live_in_memory. */
6427 unpoison_it = gsi_last (*pre_p);
6428 unpoison_empty_seq = gsi_end_p (unpoison_it);
6430 gimple_add_tmp_var (temp);
6433 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6434 expression is supposed to initialize the slot. */
6435 if (VOID_TYPE_P (TREE_TYPE (init)))
6436 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6437 else
6439 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6440 init = init_expr;
6441 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6442 init = NULL;
6443 ggc_free (init_expr);
6445 if (ret == GS_ERROR)
6447 /* PR c++/28266 Make sure this is expanded only once. */
6448 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6449 return GS_ERROR;
6451 if (init)
6452 gimplify_and_add (init, pre_p);
6454 /* If needed, push the cleanup for the temp. */
6455 if (TARGET_EXPR_CLEANUP (targ))
6457 if (CLEANUP_EH_ONLY (targ))
6458 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6459 CLEANUP_EH_ONLY (targ), pre_p);
6460 else
6461 cleanup = TARGET_EXPR_CLEANUP (targ);
6464 /* Add a clobber for the temporary going out of scope, like
6465 gimplify_bind_expr. */
6466 if (gimplify_ctxp->in_cleanup_point_expr
6467 && needs_to_live_in_memory (temp))
6469 if (flag_stack_reuse == SR_ALL)
6471 tree clobber = build_constructor (TREE_TYPE (temp),
6472 NULL);
6473 TREE_THIS_VOLATILE (clobber) = true;
6474 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6475 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6477 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope))
6479 tree asan_cleanup = build_asan_poison_call_expr (temp);
6480 if (asan_cleanup)
6482 if (unpoison_empty_seq)
6483 unpoison_it = gsi_start (*pre_p);
6485 asan_poison_variable (temp, false, &unpoison_it,
6486 unpoison_empty_seq);
6487 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6491 if (cleanup)
6492 gimple_push_cleanup (temp, cleanup, false, pre_p);
6494 /* Only expand this once. */
6495 TREE_OPERAND (targ, 3) = init;
6496 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6498 else
6499 /* We should have expanded this before. */
6500 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6502 *expr_p = temp;
6503 return GS_OK;
6506 /* Gimplification of expression trees. */
6508 /* Gimplify an expression which appears at statement context. The
6509 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6510 NULL, a new sequence is allocated.
6512 Return true if we actually added a statement to the queue. */
6514 bool
6515 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6517 gimple_seq_node last;
6519 last = gimple_seq_last (*seq_p);
6520 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6521 return last != gimple_seq_last (*seq_p);
6524 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6525 to CTX. If entries already exist, force them to be some flavor of private.
6526 If there is no enclosing parallel, do nothing. */
6528 void
6529 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6531 splay_tree_node n;
6533 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6534 return;
6538 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6539 if (n != NULL)
6541 if (n->value & GOVD_SHARED)
6542 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6543 else if (n->value & GOVD_MAP)
6544 n->value |= GOVD_MAP_TO_ONLY;
6545 else
6546 return;
6548 else if ((ctx->region_type & ORT_TARGET) != 0)
6550 if (ctx->target_map_scalars_firstprivate)
6551 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6552 else
6553 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6555 else if (ctx->region_type != ORT_WORKSHARE
6556 && ctx->region_type != ORT_SIMD
6557 && ctx->region_type != ORT_ACC
6558 && !(ctx->region_type & ORT_TARGET_DATA))
6559 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6561 ctx = ctx->outer_context;
6563 while (ctx);
6566 /* Similarly for each of the type sizes of TYPE. */
6568 static void
6569 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6571 if (type == NULL || type == error_mark_node)
6572 return;
6573 type = TYPE_MAIN_VARIANT (type);
6575 if (ctx->privatized_types->add (type))
6576 return;
6578 switch (TREE_CODE (type))
6580 case INTEGER_TYPE:
6581 case ENUMERAL_TYPE:
6582 case BOOLEAN_TYPE:
6583 case REAL_TYPE:
6584 case FIXED_POINT_TYPE:
6585 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6586 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6587 break;
6589 case ARRAY_TYPE:
6590 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6591 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6592 break;
6594 case RECORD_TYPE:
6595 case UNION_TYPE:
6596 case QUAL_UNION_TYPE:
6598 tree field;
6599 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6600 if (TREE_CODE (field) == FIELD_DECL)
6602 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6603 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6606 break;
6608 case POINTER_TYPE:
6609 case REFERENCE_TYPE:
6610 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6611 break;
6613 default:
6614 break;
6617 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6618 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6619 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6622 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6624 static void
6625 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6627 splay_tree_node n;
6628 unsigned int nflags;
6629 tree t;
6631 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6632 return;
6634 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6635 there are constructors involved somewhere. */
6636 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
6637 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
6638 flags |= GOVD_SEEN;
6640 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6641 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6643 /* We shouldn't be re-adding the decl with the same data
6644 sharing class. */
6645 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6646 nflags = n->value | flags;
6647 /* The only combination of data sharing classes we should see is
6648 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6649 reduction variables to be used in data sharing clauses. */
6650 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6651 || ((nflags & GOVD_DATA_SHARE_CLASS)
6652 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6653 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6654 n->value = nflags;
6655 return;
6658 /* When adding a variable-sized variable, we have to handle all sorts
6659 of additional bits of data: the pointer replacement variable, and
6660 the parameters of the type. */
6661 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6663 /* Add the pointer replacement variable as PRIVATE if the variable
6664 replacement is private, else FIRSTPRIVATE since we'll need the
6665 address of the original variable either for SHARED, or for the
6666 copy into or out of the context. */
6667 if (!(flags & GOVD_LOCAL))
6669 if (flags & GOVD_MAP)
6670 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6671 else if (flags & GOVD_PRIVATE)
6672 nflags = GOVD_PRIVATE;
6673 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6674 && (flags & GOVD_FIRSTPRIVATE))
6675 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6676 else
6677 nflags = GOVD_FIRSTPRIVATE;
6678 nflags |= flags & GOVD_SEEN;
6679 t = DECL_VALUE_EXPR (decl);
6680 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6681 t = TREE_OPERAND (t, 0);
6682 gcc_assert (DECL_P (t));
6683 omp_add_variable (ctx, t, nflags);
6686 /* Add all of the variable and type parameters (which should have
6687 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6688 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6689 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6690 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6692 /* The variable-sized variable itself is never SHARED, only some form
6693 of PRIVATE. The sharing would take place via the pointer variable
6694 which we remapped above. */
6695 if (flags & GOVD_SHARED)
6696 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6697 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6699 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6700 alloca statement we generate for the variable, so make sure it
6701 is available. This isn't automatically needed for the SHARED
6702 case, since we won't be allocating local storage then.
6703 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6704 in this case omp_notice_variable will be called later
6705 on when it is gimplified. */
6706 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6707 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6708 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6710 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6711 && lang_hooks.decls.omp_privatize_by_reference (decl))
6713 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6715 /* Similar to the direct variable sized case above, we'll need the
6716 size of references being privatized. */
6717 if ((flags & GOVD_SHARED) == 0)
6719 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6720 if (DECL_P (t))
6721 omp_notice_variable (ctx, t, true);
6725 if (n != NULL)
6726 n->value |= flags;
6727 else
6728 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6730 /* For reductions clauses in OpenACC loop directives, by default create a
6731 copy clause on the enclosing parallel construct for carrying back the
6732 results. */
6733 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6735 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6736 while (outer_ctx)
6738 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6739 if (n != NULL)
6741 /* Ignore local variables and explicitly declared clauses. */
6742 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6743 break;
6744 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6746 /* According to the OpenACC spec, such a reduction variable
6747 should already have a copy map on a kernels construct,
6748 verify that here. */
6749 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6750 && (n->value & GOVD_MAP));
6752 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6754 /* Remove firstprivate and make it a copy map. */
6755 n->value &= ~GOVD_FIRSTPRIVATE;
6756 n->value |= GOVD_MAP;
6759 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6761 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6762 GOVD_MAP | GOVD_SEEN);
6763 break;
6765 outer_ctx = outer_ctx->outer_context;
6770 /* Notice a threadprivate variable DECL used in OMP context CTX.
6771 This just prints out diagnostics about threadprivate variable uses
6772 in untied tasks. If DECL2 is non-NULL, prevent this warning
6773 on that variable. */
6775 static bool
6776 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6777 tree decl2)
6779 splay_tree_node n;
6780 struct gimplify_omp_ctx *octx;
6782 for (octx = ctx; octx; octx = octx->outer_context)
6783 if ((octx->region_type & ORT_TARGET) != 0)
6785 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6786 if (n == NULL)
6788 error ("threadprivate variable %qE used in target region",
6789 DECL_NAME (decl));
6790 error_at (octx->location, "enclosing target region");
6791 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6793 if (decl2)
6794 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6797 if (ctx->region_type != ORT_UNTIED_TASK)
6798 return false;
6799 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6800 if (n == NULL)
6802 error ("threadprivate variable %qE used in untied task",
6803 DECL_NAME (decl));
6804 error_at (ctx->location, "enclosing task");
6805 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6807 if (decl2)
6808 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6809 return false;
6812 /* Return true if global var DECL is device resident. */
6814 static bool
6815 device_resident_p (tree decl)
6817 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6819 if (!attr)
6820 return false;
6822 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6824 tree c = TREE_VALUE (t);
6825 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6826 return true;
6829 return false;
6832 /* Return true if DECL has an ACC DECLARE attribute. */
6834 static bool
6835 is_oacc_declared (tree decl)
6837 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6838 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6839 return declared != NULL_TREE;
6842 /* Determine outer default flags for DECL mentioned in an OMP region
6843 but not declared in an enclosing clause.
6845 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6846 remapped firstprivate instead of shared. To some extent this is
6847 addressed in omp_firstprivatize_type_sizes, but not
6848 effectively. */
6850 static unsigned
6851 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6852 bool in_code, unsigned flags)
6854 enum omp_clause_default_kind default_kind = ctx->default_kind;
6855 enum omp_clause_default_kind kind;
6857 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6858 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6859 default_kind = kind;
6861 switch (default_kind)
6863 case OMP_CLAUSE_DEFAULT_NONE:
6865 const char *rtype;
6867 if (ctx->region_type & ORT_PARALLEL)
6868 rtype = "parallel";
6869 else if (ctx->region_type & ORT_TASK)
6870 rtype = "task";
6871 else if (ctx->region_type & ORT_TEAMS)
6872 rtype = "teams";
6873 else
6874 gcc_unreachable ();
6876 error ("%qE not specified in enclosing %qs",
6877 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6878 error_at (ctx->location, "enclosing %qs", rtype);
6880 /* FALLTHRU */
6881 case OMP_CLAUSE_DEFAULT_SHARED:
6882 flags |= GOVD_SHARED;
6883 break;
6884 case OMP_CLAUSE_DEFAULT_PRIVATE:
6885 flags |= GOVD_PRIVATE;
6886 break;
6887 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6888 flags |= GOVD_FIRSTPRIVATE;
6889 break;
6890 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6891 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6892 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6893 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6895 omp_notice_variable (octx, decl, in_code);
6896 for (; octx; octx = octx->outer_context)
6898 splay_tree_node n2;
6900 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6901 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6902 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6903 continue;
6904 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6906 flags |= GOVD_FIRSTPRIVATE;
6907 goto found_outer;
6909 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6911 flags |= GOVD_SHARED;
6912 goto found_outer;
6917 if (TREE_CODE (decl) == PARM_DECL
6918 || (!is_global_var (decl)
6919 && DECL_CONTEXT (decl) == current_function_decl))
6920 flags |= GOVD_FIRSTPRIVATE;
6921 else
6922 flags |= GOVD_SHARED;
6923 found_outer:
6924 break;
6926 default:
6927 gcc_unreachable ();
6930 return flags;
6934 /* Determine outer default flags for DECL mentioned in an OACC region
6935 but not declared in an enclosing clause. */
6937 static unsigned
6938 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6940 const char *rkind;
6941 bool on_device = false;
6942 bool declared = is_oacc_declared (decl);
6943 tree type = TREE_TYPE (decl);
6945 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6946 type = TREE_TYPE (type);
6948 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6949 && is_global_var (decl)
6950 && device_resident_p (decl))
6952 on_device = true;
6953 flags |= GOVD_MAP_TO_ONLY;
6956 switch (ctx->region_type)
6958 case ORT_ACC_KERNELS:
6959 rkind = "kernels";
6961 if (AGGREGATE_TYPE_P (type))
6963 /* Aggregates default to 'present_or_copy', or 'present'. */
6964 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6965 flags |= GOVD_MAP;
6966 else
6967 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
6969 else
6970 /* Scalars default to 'copy'. */
6971 flags |= GOVD_MAP | GOVD_MAP_FORCE;
6973 break;
6975 case ORT_ACC_PARALLEL:
6976 rkind = "parallel";
6978 if (on_device || declared)
6979 flags |= GOVD_MAP;
6980 else if (AGGREGATE_TYPE_P (type))
6982 /* Aggregates default to 'present_or_copy', or 'present'. */
6983 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6984 flags |= GOVD_MAP;
6985 else
6986 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
6988 else
6989 /* Scalars default to 'firstprivate'. */
6990 flags |= GOVD_FIRSTPRIVATE;
6992 break;
6994 default:
6995 gcc_unreachable ();
6998 if (DECL_ARTIFICIAL (decl))
6999 ; /* We can get compiler-generated decls, and should not complain
7000 about them. */
7001 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7003 error ("%qE not specified in enclosing OpenACC %qs construct",
7004 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7005 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7007 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7008 ; /* Handled above. */
7009 else
7010 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7012 return flags;
7015 /* Record the fact that DECL was used within the OMP context CTX.
7016 IN_CODE is true when real code uses DECL, and false when we should
7017 merely emit default(none) errors. Return true if DECL is going to
7018 be remapped and thus DECL shouldn't be gimplified into its
7019 DECL_VALUE_EXPR (if any). */
7021 static bool
7022 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7024 splay_tree_node n;
7025 unsigned flags = in_code ? GOVD_SEEN : 0;
7026 bool ret = false, shared;
7028 if (error_operand_p (decl))
7029 return false;
7031 if (ctx->region_type == ORT_NONE)
7032 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7034 if (is_global_var (decl))
7036 /* Threadprivate variables are predetermined. */
7037 if (DECL_THREAD_LOCAL_P (decl))
7038 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7040 if (DECL_HAS_VALUE_EXPR_P (decl))
7042 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7044 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7045 return omp_notice_threadprivate_variable (ctx, decl, value);
7048 if (gimplify_omp_ctxp->outer_context == NULL
7049 && VAR_P (decl)
7050 && oacc_get_fn_attrib (current_function_decl))
7052 location_t loc = DECL_SOURCE_LOCATION (decl);
7054 if (lookup_attribute ("omp declare target link",
7055 DECL_ATTRIBUTES (decl)))
7057 error_at (loc,
7058 "%qE with %<link%> clause used in %<routine%> function",
7059 DECL_NAME (decl));
7060 return false;
7062 else if (!lookup_attribute ("omp declare target",
7063 DECL_ATTRIBUTES (decl)))
7065 error_at (loc,
7066 "%qE requires a %<declare%> directive for use "
7067 "in a %<routine%> function", DECL_NAME (decl));
7068 return false;
7073 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7074 if ((ctx->region_type & ORT_TARGET) != 0)
7076 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7077 if (n == NULL)
7079 unsigned nflags = flags;
7080 if (ctx->target_map_pointers_as_0len_arrays
7081 || ctx->target_map_scalars_firstprivate)
7083 bool is_declare_target = false;
7084 bool is_scalar = false;
7085 if (is_global_var (decl)
7086 && varpool_node::get_create (decl)->offloadable)
7088 struct gimplify_omp_ctx *octx;
7089 for (octx = ctx->outer_context;
7090 octx; octx = octx->outer_context)
7092 n = splay_tree_lookup (octx->variables,
7093 (splay_tree_key)decl);
7094 if (n
7095 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7096 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7097 break;
7099 is_declare_target = octx == NULL;
7101 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7102 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7103 if (is_declare_target)
7105 else if (ctx->target_map_pointers_as_0len_arrays
7106 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7107 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7108 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7109 == POINTER_TYPE)))
7110 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7111 else if (is_scalar)
7112 nflags |= GOVD_FIRSTPRIVATE;
7115 struct gimplify_omp_ctx *octx = ctx->outer_context;
7116 if ((ctx->region_type & ORT_ACC) && octx)
7118 /* Look in outer OpenACC contexts, to see if there's a
7119 data attribute for this variable. */
7120 omp_notice_variable (octx, decl, in_code);
7122 for (; octx; octx = octx->outer_context)
7124 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7125 break;
7126 splay_tree_node n2
7127 = splay_tree_lookup (octx->variables,
7128 (splay_tree_key) decl);
7129 if (n2)
7131 if (octx->region_type == ORT_ACC_HOST_DATA)
7132 error ("variable %qE declared in enclosing "
7133 "%<host_data%> region", DECL_NAME (decl));
7134 nflags |= GOVD_MAP;
7135 if (octx->region_type == ORT_ACC_DATA
7136 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7137 nflags |= GOVD_MAP_0LEN_ARRAY;
7138 goto found_outer;
7144 tree type = TREE_TYPE (decl);
7146 if (nflags == flags
7147 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7148 && lang_hooks.decls.omp_privatize_by_reference (decl))
7149 type = TREE_TYPE (type);
7150 if (nflags == flags
7151 && !lang_hooks.types.omp_mappable_type (type))
7153 error ("%qD referenced in target region does not have "
7154 "a mappable type", decl);
7155 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7157 else if (nflags == flags)
7159 if ((ctx->region_type & ORT_ACC) != 0)
7160 nflags = oacc_default_clause (ctx, decl, flags);
7161 else
7162 nflags |= GOVD_MAP;
7165 found_outer:
7166 omp_add_variable (ctx, decl, nflags);
7168 else
7170 /* If nothing changed, there's nothing left to do. */
7171 if ((n->value & flags) == flags)
7172 return ret;
7173 flags |= n->value;
7174 n->value = flags;
7176 goto do_outer;
7179 if (n == NULL)
7181 if (ctx->region_type == ORT_WORKSHARE
7182 || ctx->region_type == ORT_SIMD
7183 || ctx->region_type == ORT_ACC
7184 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7185 goto do_outer;
7187 flags = omp_default_clause (ctx, decl, in_code, flags);
7189 if ((flags & GOVD_PRIVATE)
7190 && lang_hooks.decls.omp_private_outer_ref (decl))
7191 flags |= GOVD_PRIVATE_OUTER_REF;
7193 omp_add_variable (ctx, decl, flags);
7195 shared = (flags & GOVD_SHARED) != 0;
7196 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7197 goto do_outer;
7200 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7201 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7202 && DECL_SIZE (decl))
7204 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7206 splay_tree_node n2;
7207 tree t = DECL_VALUE_EXPR (decl);
7208 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7209 t = TREE_OPERAND (t, 0);
7210 gcc_assert (DECL_P (t));
7211 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7212 n2->value |= GOVD_SEEN;
7214 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7215 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7216 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7217 != INTEGER_CST))
7219 splay_tree_node n2;
7220 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7221 gcc_assert (DECL_P (t));
7222 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7223 if (n2)
7224 omp_notice_variable (ctx, t, true);
7228 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7229 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7231 /* If nothing changed, there's nothing left to do. */
7232 if ((n->value & flags) == flags)
7233 return ret;
7234 flags |= n->value;
7235 n->value = flags;
7237 do_outer:
7238 /* If the variable is private in the current context, then we don't
7239 need to propagate anything to an outer context. */
7240 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7241 return ret;
7242 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7243 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7244 return ret;
7245 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7246 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7247 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7248 return ret;
7249 if (ctx->outer_context
7250 && omp_notice_variable (ctx->outer_context, decl, in_code))
7251 return true;
7252 return ret;
7255 /* Verify that DECL is private within CTX. If there's specific information
7256 to the contrary in the innermost scope, generate an error. */
7258 static bool
7259 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7261 splay_tree_node n;
7263 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7264 if (n != NULL)
7266 if (n->value & GOVD_SHARED)
7268 if (ctx == gimplify_omp_ctxp)
7270 if (simd)
7271 error ("iteration variable %qE is predetermined linear",
7272 DECL_NAME (decl));
7273 else
7274 error ("iteration variable %qE should be private",
7275 DECL_NAME (decl));
7276 n->value = GOVD_PRIVATE;
7277 return true;
7279 else
7280 return false;
7282 else if ((n->value & GOVD_EXPLICIT) != 0
7283 && (ctx == gimplify_omp_ctxp
7284 || (ctx->region_type == ORT_COMBINED_PARALLEL
7285 && gimplify_omp_ctxp->outer_context == ctx)))
7287 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7288 error ("iteration variable %qE should not be firstprivate",
7289 DECL_NAME (decl));
7290 else if ((n->value & GOVD_REDUCTION) != 0)
7291 error ("iteration variable %qE should not be reduction",
7292 DECL_NAME (decl));
7293 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7294 error ("iteration variable %qE should not be linear",
7295 DECL_NAME (decl));
7296 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7297 error ("iteration variable %qE should not be lastprivate",
7298 DECL_NAME (decl));
7299 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7300 error ("iteration variable %qE should not be private",
7301 DECL_NAME (decl));
7302 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7303 error ("iteration variable %qE is predetermined linear",
7304 DECL_NAME (decl));
7306 return (ctx == gimplify_omp_ctxp
7307 || (ctx->region_type == ORT_COMBINED_PARALLEL
7308 && gimplify_omp_ctxp->outer_context == ctx));
7311 if (ctx->region_type != ORT_WORKSHARE
7312 && ctx->region_type != ORT_SIMD
7313 && ctx->region_type != ORT_ACC)
7314 return false;
7315 else if (ctx->outer_context)
7316 return omp_is_private (ctx->outer_context, decl, simd);
7317 return false;
7320 /* Return true if DECL is private within a parallel region
7321 that binds to the current construct's context or in parallel
7322 region's REDUCTION clause. */
7324 static bool
7325 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7327 splay_tree_node n;
7331 ctx = ctx->outer_context;
7332 if (ctx == NULL)
7334 if (is_global_var (decl))
7335 return false;
7337 /* References might be private, but might be shared too,
7338 when checking for copyprivate, assume they might be
7339 private, otherwise assume they might be shared. */
7340 if (copyprivate)
7341 return true;
7343 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7344 return false;
7346 /* Treat C++ privatized non-static data members outside
7347 of the privatization the same. */
7348 if (omp_member_access_dummy_var (decl))
7349 return false;
7351 return true;
7354 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7356 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7357 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7358 continue;
7360 if (n != NULL)
7362 if ((n->value & GOVD_LOCAL) != 0
7363 && omp_member_access_dummy_var (decl))
7364 return false;
7365 return (n->value & GOVD_SHARED) == 0;
7368 while (ctx->region_type == ORT_WORKSHARE
7369 || ctx->region_type == ORT_SIMD
7370 || ctx->region_type == ORT_ACC);
7371 return false;
7374 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7376 static tree
7377 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7379 tree t = *tp;
7381 /* If this node has been visited, unmark it and keep looking. */
7382 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7383 return t;
7385 if (IS_TYPE_OR_DECL_P (t))
7386 *walk_subtrees = 0;
7387 return NULL_TREE;
7390 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7391 and previous omp contexts. */
7393 static void
7394 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7395 enum omp_region_type region_type,
7396 enum tree_code code)
7398 struct gimplify_omp_ctx *ctx, *outer_ctx;
7399 tree c;
7400 hash_map<tree, tree> *struct_map_to_clause = NULL;
7401 tree *prev_list_p = NULL;
7403 ctx = new_omp_context (region_type);
7404 outer_ctx = ctx->outer_context;
7405 if (code == OMP_TARGET)
7407 if (!lang_GNU_Fortran ())
7408 ctx->target_map_pointers_as_0len_arrays = true;
7409 ctx->target_map_scalars_firstprivate = true;
7411 if (!lang_GNU_Fortran ())
7412 switch (code)
7414 case OMP_TARGET:
7415 case OMP_TARGET_DATA:
7416 case OMP_TARGET_ENTER_DATA:
7417 case OMP_TARGET_EXIT_DATA:
7418 case OACC_DECLARE:
7419 case OACC_HOST_DATA:
7420 ctx->target_firstprivatize_array_bases = true;
7421 default:
7422 break;
7425 while ((c = *list_p) != NULL)
7427 bool remove = false;
7428 bool notice_outer = true;
7429 const char *check_non_private = NULL;
7430 unsigned int flags;
7431 tree decl;
7433 switch (OMP_CLAUSE_CODE (c))
7435 case OMP_CLAUSE_PRIVATE:
7436 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7437 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7439 flags |= GOVD_PRIVATE_OUTER_REF;
7440 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7442 else
7443 notice_outer = false;
7444 goto do_add;
7445 case OMP_CLAUSE_SHARED:
7446 flags = GOVD_SHARED | GOVD_EXPLICIT;
7447 goto do_add;
7448 case OMP_CLAUSE_FIRSTPRIVATE:
7449 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7450 check_non_private = "firstprivate";
7451 goto do_add;
7452 case OMP_CLAUSE_LASTPRIVATE:
7453 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7454 check_non_private = "lastprivate";
7455 decl = OMP_CLAUSE_DECL (c);
7456 if (error_operand_p (decl))
7457 goto do_add;
7458 else if (outer_ctx
7459 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7460 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7461 && splay_tree_lookup (outer_ctx->variables,
7462 (splay_tree_key) decl) == NULL)
7464 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7465 if (outer_ctx->outer_context)
7466 omp_notice_variable (outer_ctx->outer_context, decl, true);
7468 else if (outer_ctx
7469 && (outer_ctx->region_type & ORT_TASK) != 0
7470 && outer_ctx->combined_loop
7471 && splay_tree_lookup (outer_ctx->variables,
7472 (splay_tree_key) decl) == NULL)
7474 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7475 if (outer_ctx->outer_context)
7476 omp_notice_variable (outer_ctx->outer_context, decl, true);
7478 else if (outer_ctx
7479 && (outer_ctx->region_type == ORT_WORKSHARE
7480 || outer_ctx->region_type == ORT_ACC)
7481 && outer_ctx->combined_loop
7482 && splay_tree_lookup (outer_ctx->variables,
7483 (splay_tree_key) decl) == NULL
7484 && !omp_check_private (outer_ctx, decl, false))
7486 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7487 if (outer_ctx->outer_context
7488 && (outer_ctx->outer_context->region_type
7489 == ORT_COMBINED_PARALLEL)
7490 && splay_tree_lookup (outer_ctx->outer_context->variables,
7491 (splay_tree_key) decl) == NULL)
7493 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7494 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7495 if (octx->outer_context)
7497 octx = octx->outer_context;
7498 if (octx->region_type == ORT_WORKSHARE
7499 && octx->combined_loop
7500 && splay_tree_lookup (octx->variables,
7501 (splay_tree_key) decl) == NULL
7502 && !omp_check_private (octx, decl, false))
7504 omp_add_variable (octx, decl,
7505 GOVD_LASTPRIVATE | GOVD_SEEN);
7506 octx = octx->outer_context;
7507 if (octx
7508 && octx->region_type == ORT_COMBINED_TEAMS
7509 && (splay_tree_lookup (octx->variables,
7510 (splay_tree_key) decl)
7511 == NULL))
7513 omp_add_variable (octx, decl,
7514 GOVD_SHARED | GOVD_SEEN);
7515 octx = octx->outer_context;
7518 if (octx)
7519 omp_notice_variable (octx, decl, true);
7522 else if (outer_ctx->outer_context)
7523 omp_notice_variable (outer_ctx->outer_context, decl, true);
7525 goto do_add;
7526 case OMP_CLAUSE_REDUCTION:
7527 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7528 /* OpenACC permits reductions on private variables. */
7529 if (!(region_type & ORT_ACC))
7530 check_non_private = "reduction";
7531 decl = OMP_CLAUSE_DECL (c);
7532 if (TREE_CODE (decl) == MEM_REF)
7534 tree type = TREE_TYPE (decl);
7535 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7536 NULL, is_gimple_val, fb_rvalue, false)
7537 == GS_ERROR)
7539 remove = true;
7540 break;
7542 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7543 if (DECL_P (v))
7545 omp_firstprivatize_variable (ctx, v);
7546 omp_notice_variable (ctx, v, true);
7548 decl = TREE_OPERAND (decl, 0);
7549 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7551 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7552 NULL, is_gimple_val, fb_rvalue, false)
7553 == GS_ERROR)
7555 remove = true;
7556 break;
7558 v = TREE_OPERAND (decl, 1);
7559 if (DECL_P (v))
7561 omp_firstprivatize_variable (ctx, v);
7562 omp_notice_variable (ctx, v, true);
7564 decl = TREE_OPERAND (decl, 0);
7566 if (TREE_CODE (decl) == ADDR_EXPR
7567 || TREE_CODE (decl) == INDIRECT_REF)
7568 decl = TREE_OPERAND (decl, 0);
7570 goto do_add_decl;
7571 case OMP_CLAUSE_LINEAR:
7572 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7573 is_gimple_val, fb_rvalue) == GS_ERROR)
7575 remove = true;
7576 break;
7578 else
7580 if (code == OMP_SIMD
7581 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7583 struct gimplify_omp_ctx *octx = outer_ctx;
7584 if (octx
7585 && octx->region_type == ORT_WORKSHARE
7586 && octx->combined_loop
7587 && !octx->distribute)
7589 if (octx->outer_context
7590 && (octx->outer_context->region_type
7591 == ORT_COMBINED_PARALLEL))
7592 octx = octx->outer_context->outer_context;
7593 else
7594 octx = octx->outer_context;
7596 if (octx
7597 && octx->region_type == ORT_WORKSHARE
7598 && octx->combined_loop
7599 && octx->distribute)
7601 error_at (OMP_CLAUSE_LOCATION (c),
7602 "%<linear%> clause for variable other than "
7603 "loop iterator specified on construct "
7604 "combined with %<distribute%>");
7605 remove = true;
7606 break;
7609 /* For combined #pragma omp parallel for simd, need to put
7610 lastprivate and perhaps firstprivate too on the
7611 parallel. Similarly for #pragma omp for simd. */
7612 struct gimplify_omp_ctx *octx = outer_ctx;
7613 decl = NULL_TREE;
7616 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7617 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7618 break;
7619 decl = OMP_CLAUSE_DECL (c);
7620 if (error_operand_p (decl))
7622 decl = NULL_TREE;
7623 break;
7625 flags = GOVD_SEEN;
7626 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7627 flags |= GOVD_FIRSTPRIVATE;
7628 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7629 flags |= GOVD_LASTPRIVATE;
7630 if (octx
7631 && octx->region_type == ORT_WORKSHARE
7632 && octx->combined_loop)
7634 if (octx->outer_context
7635 && (octx->outer_context->region_type
7636 == ORT_COMBINED_PARALLEL))
7637 octx = octx->outer_context;
7638 else if (omp_check_private (octx, decl, false))
7639 break;
7641 else if (octx
7642 && (octx->region_type & ORT_TASK) != 0
7643 && octx->combined_loop)
7645 else if (octx
7646 && octx->region_type == ORT_COMBINED_PARALLEL
7647 && ctx->region_type == ORT_WORKSHARE
7648 && octx == outer_ctx)
7649 flags = GOVD_SEEN | GOVD_SHARED;
7650 else if (octx
7651 && octx->region_type == ORT_COMBINED_TEAMS)
7652 flags = GOVD_SEEN | GOVD_SHARED;
7653 else if (octx
7654 && octx->region_type == ORT_COMBINED_TARGET)
7656 flags &= ~GOVD_LASTPRIVATE;
7657 if (flags == GOVD_SEEN)
7658 break;
7660 else
7661 break;
7662 splay_tree_node on
7663 = splay_tree_lookup (octx->variables,
7664 (splay_tree_key) decl);
7665 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7667 octx = NULL;
7668 break;
7670 omp_add_variable (octx, decl, flags);
7671 if (octx->outer_context == NULL)
7672 break;
7673 octx = octx->outer_context;
7675 while (1);
7676 if (octx
7677 && decl
7678 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7679 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7680 omp_notice_variable (octx, decl, true);
7682 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7683 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7684 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7686 notice_outer = false;
7687 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7689 goto do_add;
7691 case OMP_CLAUSE_MAP:
7692 decl = OMP_CLAUSE_DECL (c);
7693 if (error_operand_p (decl))
7694 remove = true;
7695 switch (code)
7697 case OMP_TARGET:
7698 break;
7699 case OACC_DATA:
7700 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7701 break;
7702 /* FALLTHRU */
7703 case OMP_TARGET_DATA:
7704 case OMP_TARGET_ENTER_DATA:
7705 case OMP_TARGET_EXIT_DATA:
7706 case OACC_ENTER_DATA:
7707 case OACC_EXIT_DATA:
7708 case OACC_HOST_DATA:
7709 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7710 || (OMP_CLAUSE_MAP_KIND (c)
7711 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7712 /* For target {,enter ,exit }data only the array slice is
7713 mapped, but not the pointer to it. */
7714 remove = true;
7715 break;
7716 default:
7717 break;
7719 if (remove)
7720 break;
7721 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7723 struct gimplify_omp_ctx *octx;
7724 for (octx = outer_ctx; octx; octx = octx->outer_context)
7726 if (octx->region_type != ORT_ACC_HOST_DATA)
7727 break;
7728 splay_tree_node n2
7729 = splay_tree_lookup (octx->variables,
7730 (splay_tree_key) decl);
7731 if (n2)
7732 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7733 "declared in enclosing %<host_data%> region",
7734 DECL_NAME (decl));
7737 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7738 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7739 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7740 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7741 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7743 remove = true;
7744 break;
7746 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7747 || (OMP_CLAUSE_MAP_KIND (c)
7748 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7749 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7751 OMP_CLAUSE_SIZE (c)
7752 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7753 false);
7754 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7755 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7757 if (!DECL_P (decl))
7759 tree d = decl, *pd;
7760 if (TREE_CODE (d) == ARRAY_REF)
7762 while (TREE_CODE (d) == ARRAY_REF)
7763 d = TREE_OPERAND (d, 0);
7764 if (TREE_CODE (d) == COMPONENT_REF
7765 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7766 decl = d;
7768 pd = &OMP_CLAUSE_DECL (c);
7769 if (d == decl
7770 && TREE_CODE (decl) == INDIRECT_REF
7771 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7772 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7773 == REFERENCE_TYPE))
7775 pd = &TREE_OPERAND (decl, 0);
7776 decl = TREE_OPERAND (decl, 0);
7778 if (TREE_CODE (decl) == COMPONENT_REF)
7780 while (TREE_CODE (decl) == COMPONENT_REF)
7781 decl = TREE_OPERAND (decl, 0);
7782 if (TREE_CODE (decl) == INDIRECT_REF
7783 && DECL_P (TREE_OPERAND (decl, 0))
7784 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7785 == REFERENCE_TYPE))
7786 decl = TREE_OPERAND (decl, 0);
7788 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7789 == GS_ERROR)
7791 remove = true;
7792 break;
7794 if (DECL_P (decl))
7796 if (error_operand_p (decl))
7798 remove = true;
7799 break;
7802 tree stype = TREE_TYPE (decl);
7803 if (TREE_CODE (stype) == REFERENCE_TYPE)
7804 stype = TREE_TYPE (stype);
7805 if (TYPE_SIZE_UNIT (stype) == NULL
7806 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7808 error_at (OMP_CLAUSE_LOCATION (c),
7809 "mapping field %qE of variable length "
7810 "structure", OMP_CLAUSE_DECL (c));
7811 remove = true;
7812 break;
7815 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7817 /* Error recovery. */
7818 if (prev_list_p == NULL)
7820 remove = true;
7821 break;
7823 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7825 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7826 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7828 remove = true;
7829 break;
7834 tree offset;
7835 HOST_WIDE_INT bitsize, bitpos;
7836 machine_mode mode;
7837 int unsignedp, reversep, volatilep = 0;
7838 tree base = OMP_CLAUSE_DECL (c);
7839 while (TREE_CODE (base) == ARRAY_REF)
7840 base = TREE_OPERAND (base, 0);
7841 if (TREE_CODE (base) == INDIRECT_REF)
7842 base = TREE_OPERAND (base, 0);
7843 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7844 &mode, &unsignedp, &reversep,
7845 &volatilep);
7846 tree orig_base = base;
7847 if ((TREE_CODE (base) == INDIRECT_REF
7848 || (TREE_CODE (base) == MEM_REF
7849 && integer_zerop (TREE_OPERAND (base, 1))))
7850 && DECL_P (TREE_OPERAND (base, 0))
7851 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7852 == REFERENCE_TYPE))
7853 base = TREE_OPERAND (base, 0);
7854 gcc_assert (base == decl
7855 && (offset == NULL_TREE
7856 || TREE_CODE (offset) == INTEGER_CST));
7858 splay_tree_node n
7859 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7860 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7861 == GOMP_MAP_ALWAYS_POINTER);
7862 if (n == NULL || (n->value & GOVD_MAP) == 0)
7864 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7865 OMP_CLAUSE_MAP);
7866 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7867 if (orig_base != base)
7868 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7869 else
7870 OMP_CLAUSE_DECL (l) = decl;
7871 OMP_CLAUSE_SIZE (l) = size_int (1);
7872 if (struct_map_to_clause == NULL)
7873 struct_map_to_clause = new hash_map<tree, tree>;
7874 struct_map_to_clause->put (decl, l);
7875 if (ptr)
7877 enum gomp_map_kind mkind
7878 = code == OMP_TARGET_EXIT_DATA
7879 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7880 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7881 OMP_CLAUSE_MAP);
7882 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7883 OMP_CLAUSE_DECL (c2)
7884 = unshare_expr (OMP_CLAUSE_DECL (c));
7885 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7886 OMP_CLAUSE_SIZE (c2)
7887 = TYPE_SIZE_UNIT (ptr_type_node);
7888 OMP_CLAUSE_CHAIN (l) = c2;
7889 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7891 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7892 tree c3
7893 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7894 OMP_CLAUSE_MAP);
7895 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7896 OMP_CLAUSE_DECL (c3)
7897 = unshare_expr (OMP_CLAUSE_DECL (c4));
7898 OMP_CLAUSE_SIZE (c3)
7899 = TYPE_SIZE_UNIT (ptr_type_node);
7900 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7901 OMP_CLAUSE_CHAIN (c2) = c3;
7903 *prev_list_p = l;
7904 prev_list_p = NULL;
7906 else
7908 OMP_CLAUSE_CHAIN (l) = c;
7909 *list_p = l;
7910 list_p = &OMP_CLAUSE_CHAIN (l);
7912 if (orig_base != base && code == OMP_TARGET)
7914 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7915 OMP_CLAUSE_MAP);
7916 enum gomp_map_kind mkind
7917 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7918 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7919 OMP_CLAUSE_DECL (c2) = decl;
7920 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7921 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7922 OMP_CLAUSE_CHAIN (l) = c2;
7924 flags = GOVD_MAP | GOVD_EXPLICIT;
7925 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7926 flags |= GOVD_SEEN;
7927 goto do_add_decl;
7929 else
7931 tree *osc = struct_map_to_clause->get (decl);
7932 tree *sc = NULL, *scp = NULL;
7933 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7934 n->value |= GOVD_SEEN;
7935 offset_int o1, o2;
7936 if (offset)
7937 o1 = wi::to_offset (offset);
7938 else
7939 o1 = 0;
7940 if (bitpos)
7941 o1 = o1 + bitpos / BITS_PER_UNIT;
7942 sc = &OMP_CLAUSE_CHAIN (*osc);
7943 if (*sc != c
7944 && (OMP_CLAUSE_MAP_KIND (*sc)
7945 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7946 sc = &OMP_CLAUSE_CHAIN (*sc);
7947 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7948 if (ptr && sc == prev_list_p)
7949 break;
7950 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7951 != COMPONENT_REF
7952 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7953 != INDIRECT_REF)
7954 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7955 != ARRAY_REF))
7956 break;
7957 else
7959 tree offset2;
7960 HOST_WIDE_INT bitsize2, bitpos2;
7961 base = OMP_CLAUSE_DECL (*sc);
7962 if (TREE_CODE (base) == ARRAY_REF)
7964 while (TREE_CODE (base) == ARRAY_REF)
7965 base = TREE_OPERAND (base, 0);
7966 if (TREE_CODE (base) != COMPONENT_REF
7967 || (TREE_CODE (TREE_TYPE (base))
7968 != ARRAY_TYPE))
7969 break;
7971 else if (TREE_CODE (base) == INDIRECT_REF
7972 && (TREE_CODE (TREE_OPERAND (base, 0))
7973 == COMPONENT_REF)
7974 && (TREE_CODE (TREE_TYPE
7975 (TREE_OPERAND (base, 0)))
7976 == REFERENCE_TYPE))
7977 base = TREE_OPERAND (base, 0);
7978 base = get_inner_reference (base, &bitsize2,
7979 &bitpos2, &offset2,
7980 &mode, &unsignedp,
7981 &reversep, &volatilep);
7982 if ((TREE_CODE (base) == INDIRECT_REF
7983 || (TREE_CODE (base) == MEM_REF
7984 && integer_zerop (TREE_OPERAND (base,
7985 1))))
7986 && DECL_P (TREE_OPERAND (base, 0))
7987 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7988 0)))
7989 == REFERENCE_TYPE))
7990 base = TREE_OPERAND (base, 0);
7991 if (base != decl)
7992 break;
7993 if (scp)
7994 continue;
7995 gcc_assert (offset == NULL_TREE
7996 || TREE_CODE (offset) == INTEGER_CST);
7997 tree d1 = OMP_CLAUSE_DECL (*sc);
7998 tree d2 = OMP_CLAUSE_DECL (c);
7999 while (TREE_CODE (d1) == ARRAY_REF)
8000 d1 = TREE_OPERAND (d1, 0);
8001 while (TREE_CODE (d2) == ARRAY_REF)
8002 d2 = TREE_OPERAND (d2, 0);
8003 if (TREE_CODE (d1) == INDIRECT_REF)
8004 d1 = TREE_OPERAND (d1, 0);
8005 if (TREE_CODE (d2) == INDIRECT_REF)
8006 d2 = TREE_OPERAND (d2, 0);
8007 while (TREE_CODE (d1) == COMPONENT_REF)
8008 if (TREE_CODE (d2) == COMPONENT_REF
8009 && TREE_OPERAND (d1, 1)
8010 == TREE_OPERAND (d2, 1))
8012 d1 = TREE_OPERAND (d1, 0);
8013 d2 = TREE_OPERAND (d2, 0);
8015 else
8016 break;
8017 if (d1 == d2)
8019 error_at (OMP_CLAUSE_LOCATION (c),
8020 "%qE appears more than once in map "
8021 "clauses", OMP_CLAUSE_DECL (c));
8022 remove = true;
8023 break;
8025 if (offset2)
8026 o2 = wi::to_offset (offset2);
8027 else
8028 o2 = 0;
8029 if (bitpos2)
8030 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8031 if (wi::ltu_p (o1, o2)
8032 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8034 if (ptr)
8035 scp = sc;
8036 else
8037 break;
8040 if (remove)
8041 break;
8042 OMP_CLAUSE_SIZE (*osc)
8043 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8044 size_one_node);
8045 if (ptr)
8047 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8048 OMP_CLAUSE_MAP);
8049 tree cl = NULL_TREE;
8050 enum gomp_map_kind mkind
8051 = code == OMP_TARGET_EXIT_DATA
8052 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8053 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8054 OMP_CLAUSE_DECL (c2)
8055 = unshare_expr (OMP_CLAUSE_DECL (c));
8056 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8057 OMP_CLAUSE_SIZE (c2)
8058 = TYPE_SIZE_UNIT (ptr_type_node);
8059 cl = scp ? *prev_list_p : c2;
8060 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8062 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8063 tree c3
8064 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8065 OMP_CLAUSE_MAP);
8066 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8067 OMP_CLAUSE_DECL (c3)
8068 = unshare_expr (OMP_CLAUSE_DECL (c4));
8069 OMP_CLAUSE_SIZE (c3)
8070 = TYPE_SIZE_UNIT (ptr_type_node);
8071 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8072 if (!scp)
8073 OMP_CLAUSE_CHAIN (c2) = c3;
8074 else
8075 cl = c3;
8077 if (scp)
8078 *scp = c2;
8079 if (sc == prev_list_p)
8081 *sc = cl;
8082 prev_list_p = NULL;
8084 else
8086 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8087 list_p = prev_list_p;
8088 prev_list_p = NULL;
8089 OMP_CLAUSE_CHAIN (c) = *sc;
8090 *sc = cl;
8091 continue;
8094 else if (*sc != c)
8096 *list_p = OMP_CLAUSE_CHAIN (c);
8097 OMP_CLAUSE_CHAIN (c) = *sc;
8098 *sc = c;
8099 continue;
8103 if (!remove
8104 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8105 && OMP_CLAUSE_CHAIN (c)
8106 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8107 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8108 == GOMP_MAP_ALWAYS_POINTER))
8109 prev_list_p = list_p;
8110 break;
8112 flags = GOVD_MAP | GOVD_EXPLICIT;
8113 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8114 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8115 flags |= GOVD_MAP_ALWAYS_TO;
8116 goto do_add;
8118 case OMP_CLAUSE_DEPEND:
8119 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8121 tree deps = OMP_CLAUSE_DECL (c);
8122 while (deps && TREE_CODE (deps) == TREE_LIST)
8124 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8125 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8126 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8127 pre_p, NULL, is_gimple_val, fb_rvalue);
8128 deps = TREE_CHAIN (deps);
8130 break;
8132 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8133 break;
8134 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8136 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8137 NULL, is_gimple_val, fb_rvalue);
8138 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8140 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8142 remove = true;
8143 break;
8145 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8146 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8147 is_gimple_val, fb_rvalue) == GS_ERROR)
8149 remove = true;
8150 break;
8152 break;
8154 case OMP_CLAUSE_TO:
8155 case OMP_CLAUSE_FROM:
8156 case OMP_CLAUSE__CACHE_:
8157 decl = OMP_CLAUSE_DECL (c);
8158 if (error_operand_p (decl))
8160 remove = true;
8161 break;
8163 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8164 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8165 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8166 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8167 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8169 remove = true;
8170 break;
8172 if (!DECL_P (decl))
8174 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8175 NULL, is_gimple_lvalue, fb_lvalue)
8176 == GS_ERROR)
8178 remove = true;
8179 break;
8181 break;
8183 goto do_notice;
8185 case OMP_CLAUSE_USE_DEVICE_PTR:
8186 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8187 goto do_add;
8188 case OMP_CLAUSE_IS_DEVICE_PTR:
8189 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8190 goto do_add;
8192 do_add:
8193 decl = OMP_CLAUSE_DECL (c);
8194 do_add_decl:
8195 if (error_operand_p (decl))
8197 remove = true;
8198 break;
8200 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8202 tree t = omp_member_access_dummy_var (decl);
8203 if (t)
8205 tree v = DECL_VALUE_EXPR (decl);
8206 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8207 if (outer_ctx)
8208 omp_notice_variable (outer_ctx, t, true);
8211 if (code == OACC_DATA
8212 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8213 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8214 flags |= GOVD_MAP_0LEN_ARRAY;
8215 omp_add_variable (ctx, decl, flags);
8216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8217 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8219 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8220 GOVD_LOCAL | GOVD_SEEN);
8221 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8222 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8223 find_decl_expr,
8224 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8225 NULL) == NULL_TREE)
8226 omp_add_variable (ctx,
8227 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8228 GOVD_LOCAL | GOVD_SEEN);
8229 gimplify_omp_ctxp = ctx;
8230 push_gimplify_context ();
8232 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8233 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8235 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8236 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8237 pop_gimplify_context
8238 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8239 push_gimplify_context ();
8240 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8241 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8242 pop_gimplify_context
8243 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8244 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8245 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8247 gimplify_omp_ctxp = outer_ctx;
8249 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8250 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8252 gimplify_omp_ctxp = ctx;
8253 push_gimplify_context ();
8254 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8256 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8257 NULL, NULL);
8258 TREE_SIDE_EFFECTS (bind) = 1;
8259 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8260 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8262 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8263 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8264 pop_gimplify_context
8265 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8266 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8268 gimplify_omp_ctxp = outer_ctx;
8270 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8271 && OMP_CLAUSE_LINEAR_STMT (c))
8273 gimplify_omp_ctxp = ctx;
8274 push_gimplify_context ();
8275 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8277 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8278 NULL, NULL);
8279 TREE_SIDE_EFFECTS (bind) = 1;
8280 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8281 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8283 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8284 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8285 pop_gimplify_context
8286 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8287 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8289 gimplify_omp_ctxp = outer_ctx;
8291 if (notice_outer)
8292 goto do_notice;
8293 break;
8295 case OMP_CLAUSE_COPYIN:
8296 case OMP_CLAUSE_COPYPRIVATE:
8297 decl = OMP_CLAUSE_DECL (c);
8298 if (error_operand_p (decl))
8300 remove = true;
8301 break;
8303 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8304 && !remove
8305 && !omp_check_private (ctx, decl, true))
8307 remove = true;
8308 if (is_global_var (decl))
8310 if (DECL_THREAD_LOCAL_P (decl))
8311 remove = false;
8312 else if (DECL_HAS_VALUE_EXPR_P (decl))
8314 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8316 if (value
8317 && DECL_P (value)
8318 && DECL_THREAD_LOCAL_P (value))
8319 remove = false;
8322 if (remove)
8323 error_at (OMP_CLAUSE_LOCATION (c),
8324 "copyprivate variable %qE is not threadprivate"
8325 " or private in outer context", DECL_NAME (decl));
8327 do_notice:
8328 if (outer_ctx)
8329 omp_notice_variable (outer_ctx, decl, true);
8330 if (check_non_private
8331 && region_type == ORT_WORKSHARE
8332 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8333 || decl == OMP_CLAUSE_DECL (c)
8334 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8335 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8336 == ADDR_EXPR
8337 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8338 == POINTER_PLUS_EXPR
8339 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8340 (OMP_CLAUSE_DECL (c), 0), 0))
8341 == ADDR_EXPR)))))
8342 && omp_check_private (ctx, decl, false))
8344 error ("%s variable %qE is private in outer context",
8345 check_non_private, DECL_NAME (decl));
8346 remove = true;
8348 break;
8350 case OMP_CLAUSE_IF:
8351 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8352 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8354 const char *p[2];
8355 for (int i = 0; i < 2; i++)
8356 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8358 case OMP_PARALLEL: p[i] = "parallel"; break;
8359 case OMP_TASK: p[i] = "task"; break;
8360 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8361 case OMP_TARGET_DATA: p[i] = "target data"; break;
8362 case OMP_TARGET: p[i] = "target"; break;
8363 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8364 case OMP_TARGET_ENTER_DATA:
8365 p[i] = "target enter data"; break;
8366 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8367 default: gcc_unreachable ();
8369 error_at (OMP_CLAUSE_LOCATION (c),
8370 "expected %qs %<if%> clause modifier rather than %qs",
8371 p[0], p[1]);
8372 remove = true;
8374 /* Fall through. */
8376 case OMP_CLAUSE_FINAL:
8377 OMP_CLAUSE_OPERAND (c, 0)
8378 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8379 /* Fall through. */
8381 case OMP_CLAUSE_SCHEDULE:
8382 case OMP_CLAUSE_NUM_THREADS:
8383 case OMP_CLAUSE_NUM_TEAMS:
8384 case OMP_CLAUSE_THREAD_LIMIT:
8385 case OMP_CLAUSE_DIST_SCHEDULE:
8386 case OMP_CLAUSE_DEVICE:
8387 case OMP_CLAUSE_PRIORITY:
8388 case OMP_CLAUSE_GRAINSIZE:
8389 case OMP_CLAUSE_NUM_TASKS:
8390 case OMP_CLAUSE_HINT:
8391 case OMP_CLAUSE__CILK_FOR_COUNT_:
8392 case OMP_CLAUSE_ASYNC:
8393 case OMP_CLAUSE_WAIT:
8394 case OMP_CLAUSE_NUM_GANGS:
8395 case OMP_CLAUSE_NUM_WORKERS:
8396 case OMP_CLAUSE_VECTOR_LENGTH:
8397 case OMP_CLAUSE_WORKER:
8398 case OMP_CLAUSE_VECTOR:
8399 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8400 is_gimple_val, fb_rvalue) == GS_ERROR)
8401 remove = true;
8402 break;
8404 case OMP_CLAUSE_GANG:
8405 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8406 is_gimple_val, fb_rvalue) == GS_ERROR)
8407 remove = true;
8408 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8409 is_gimple_val, fb_rvalue) == GS_ERROR)
8410 remove = true;
8411 break;
8413 case OMP_CLAUSE_NOWAIT:
8414 case OMP_CLAUSE_ORDERED:
8415 case OMP_CLAUSE_UNTIED:
8416 case OMP_CLAUSE_COLLAPSE:
8417 case OMP_CLAUSE_TILE:
8418 case OMP_CLAUSE_AUTO:
8419 case OMP_CLAUSE_SEQ:
8420 case OMP_CLAUSE_INDEPENDENT:
8421 case OMP_CLAUSE_MERGEABLE:
8422 case OMP_CLAUSE_PROC_BIND:
8423 case OMP_CLAUSE_SAFELEN:
8424 case OMP_CLAUSE_SIMDLEN:
8425 case OMP_CLAUSE_NOGROUP:
8426 case OMP_CLAUSE_THREADS:
8427 case OMP_CLAUSE_SIMD:
8428 break;
8430 case OMP_CLAUSE_DEFAULTMAP:
8431 ctx->target_map_scalars_firstprivate = false;
8432 break;
8434 case OMP_CLAUSE_ALIGNED:
8435 decl = OMP_CLAUSE_DECL (c);
8436 if (error_operand_p (decl))
8438 remove = true;
8439 break;
8441 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8442 is_gimple_val, fb_rvalue) == GS_ERROR)
8444 remove = true;
8445 break;
8447 if (!is_global_var (decl)
8448 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8449 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8450 break;
8452 case OMP_CLAUSE_DEFAULT:
8453 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8454 break;
8456 default:
8457 gcc_unreachable ();
8460 if (code == OACC_DATA
8461 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8462 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8463 remove = true;
8464 if (remove)
8465 *list_p = OMP_CLAUSE_CHAIN (c);
8466 else
8467 list_p = &OMP_CLAUSE_CHAIN (c);
8470 gimplify_omp_ctxp = ctx;
8471 if (struct_map_to_clause)
8472 delete struct_map_to_clause;
8475 /* Return true if DECL is a candidate for shared to firstprivate
8476 optimization. We only consider non-addressable scalars, not
8477 too big, and not references. */
8479 static bool
8480 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8482 if (TREE_ADDRESSABLE (decl))
8483 return false;
8484 tree type = TREE_TYPE (decl);
8485 if (!is_gimple_reg_type (type)
8486 || TREE_CODE (type) == REFERENCE_TYPE
8487 || TREE_ADDRESSABLE (type))
8488 return false;
8489 /* Don't optimize too large decls, as each thread/task will have
8490 its own. */
8491 HOST_WIDE_INT len = int_size_in_bytes (type);
8492 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8493 return false;
8494 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8495 return false;
8496 return true;
8499 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8500 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8501 GOVD_WRITTEN in outer contexts. */
8503 static void
8504 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8506 for (; ctx; ctx = ctx->outer_context)
8508 splay_tree_node n = splay_tree_lookup (ctx->variables,
8509 (splay_tree_key) decl);
8510 if (n == NULL)
8511 continue;
8512 else if (n->value & GOVD_SHARED)
8514 n->value |= GOVD_WRITTEN;
8515 return;
8517 else if (n->value & GOVD_DATA_SHARE_CLASS)
8518 return;
8522 /* Helper callback for walk_gimple_seq to discover possible stores
8523 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8524 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8525 for those. */
8527 static tree
8528 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8530 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8532 *walk_subtrees = 0;
8533 if (!wi->is_lhs)
8534 return NULL_TREE;
8536 tree op = *tp;
8539 if (handled_component_p (op))
8540 op = TREE_OPERAND (op, 0);
8541 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8542 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8543 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8544 else
8545 break;
8547 while (1);
8548 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8549 return NULL_TREE;
8551 omp_mark_stores (gimplify_omp_ctxp, op);
8552 return NULL_TREE;
8555 /* Helper callback for walk_gimple_seq to discover possible stores
8556 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8557 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8558 for those. */
8560 static tree
8561 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8562 bool *handled_ops_p,
8563 struct walk_stmt_info *wi)
8565 gimple *stmt = gsi_stmt (*gsi_p);
8566 switch (gimple_code (stmt))
8568 /* Don't recurse on OpenMP constructs for which
8569 gimplify_adjust_omp_clauses already handled the bodies,
8570 except handle gimple_omp_for_pre_body. */
8571 case GIMPLE_OMP_FOR:
8572 *handled_ops_p = true;
8573 if (gimple_omp_for_pre_body (stmt))
8574 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8575 omp_find_stores_stmt, omp_find_stores_op, wi);
8576 break;
8577 case GIMPLE_OMP_PARALLEL:
8578 case GIMPLE_OMP_TASK:
8579 case GIMPLE_OMP_SECTIONS:
8580 case GIMPLE_OMP_SINGLE:
8581 case GIMPLE_OMP_TARGET:
8582 case GIMPLE_OMP_TEAMS:
8583 case GIMPLE_OMP_CRITICAL:
8584 *handled_ops_p = true;
8585 break;
8586 default:
8587 break;
8589 return NULL_TREE;
8592 struct gimplify_adjust_omp_clauses_data
8594 tree *list_p;
8595 gimple_seq *pre_p;
8598 /* For all variables that were not actually used within the context,
8599 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8601 static int
8602 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8604 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8605 gimple_seq *pre_p
8606 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8607 tree decl = (tree) n->key;
8608 unsigned flags = n->value;
8609 enum omp_clause_code code;
8610 tree clause;
8611 bool private_debug;
8613 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8614 return 0;
8615 if ((flags & GOVD_SEEN) == 0)
8616 return 0;
8617 if (flags & GOVD_DEBUG_PRIVATE)
8619 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8620 private_debug = true;
8622 else if (flags & GOVD_MAP)
8623 private_debug = false;
8624 else
8625 private_debug
8626 = lang_hooks.decls.omp_private_debug_clause (decl,
8627 !!(flags & GOVD_SHARED));
8628 if (private_debug)
8629 code = OMP_CLAUSE_PRIVATE;
8630 else if (flags & GOVD_MAP)
8632 code = OMP_CLAUSE_MAP;
8633 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8634 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8636 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8637 return 0;
8640 else if (flags & GOVD_SHARED)
8642 if (is_global_var (decl))
8644 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8645 while (ctx != NULL)
8647 splay_tree_node on
8648 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8649 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8650 | GOVD_PRIVATE | GOVD_REDUCTION
8651 | GOVD_LINEAR | GOVD_MAP)) != 0)
8652 break;
8653 ctx = ctx->outer_context;
8655 if (ctx == NULL)
8656 return 0;
8658 code = OMP_CLAUSE_SHARED;
8660 else if (flags & GOVD_PRIVATE)
8661 code = OMP_CLAUSE_PRIVATE;
8662 else if (flags & GOVD_FIRSTPRIVATE)
8664 code = OMP_CLAUSE_FIRSTPRIVATE;
8665 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8666 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8667 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8669 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8670 "%<target%> construct", decl);
8671 return 0;
8674 else if (flags & GOVD_LASTPRIVATE)
8675 code = OMP_CLAUSE_LASTPRIVATE;
8676 else if (flags & GOVD_ALIGNED)
8677 return 0;
8678 else
8679 gcc_unreachable ();
8681 if (((flags & GOVD_LASTPRIVATE)
8682 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8683 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8684 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8686 tree chain = *list_p;
8687 clause = build_omp_clause (input_location, code);
8688 OMP_CLAUSE_DECL (clause) = decl;
8689 OMP_CLAUSE_CHAIN (clause) = chain;
8690 if (private_debug)
8691 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8692 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8693 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8694 else if (code == OMP_CLAUSE_SHARED
8695 && (flags & GOVD_WRITTEN) == 0
8696 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8697 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8698 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8699 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8700 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8702 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8703 OMP_CLAUSE_DECL (nc) = decl;
8704 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8705 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8706 OMP_CLAUSE_DECL (clause)
8707 = build_simple_mem_ref_loc (input_location, decl);
8708 OMP_CLAUSE_DECL (clause)
8709 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8710 build_int_cst (build_pointer_type (char_type_node), 0));
8711 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8712 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8713 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8714 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8715 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8716 OMP_CLAUSE_CHAIN (nc) = chain;
8717 OMP_CLAUSE_CHAIN (clause) = nc;
8718 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8719 gimplify_omp_ctxp = ctx->outer_context;
8720 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8721 pre_p, NULL, is_gimple_val, fb_rvalue);
8722 gimplify_omp_ctxp = ctx;
8724 else if (code == OMP_CLAUSE_MAP)
8726 int kind;
8727 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8728 switch (flags & (GOVD_MAP_TO_ONLY
8729 | GOVD_MAP_FORCE
8730 | GOVD_MAP_FORCE_PRESENT))
8732 case 0:
8733 kind = GOMP_MAP_TOFROM;
8734 break;
8735 case GOVD_MAP_FORCE:
8736 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8737 break;
8738 case GOVD_MAP_TO_ONLY:
8739 kind = GOMP_MAP_TO;
8740 break;
8741 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8742 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8743 break;
8744 case GOVD_MAP_FORCE_PRESENT:
8745 kind = GOMP_MAP_FORCE_PRESENT;
8746 break;
8747 default:
8748 gcc_unreachable ();
8750 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8751 if (DECL_SIZE (decl)
8752 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8754 tree decl2 = DECL_VALUE_EXPR (decl);
8755 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8756 decl2 = TREE_OPERAND (decl2, 0);
8757 gcc_assert (DECL_P (decl2));
8758 tree mem = build_simple_mem_ref (decl2);
8759 OMP_CLAUSE_DECL (clause) = mem;
8760 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8761 if (gimplify_omp_ctxp->outer_context)
8763 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8764 omp_notice_variable (ctx, decl2, true);
8765 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8767 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8768 OMP_CLAUSE_MAP);
8769 OMP_CLAUSE_DECL (nc) = decl;
8770 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8771 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8772 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8773 else
8774 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8775 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8776 OMP_CLAUSE_CHAIN (clause) = nc;
8778 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8779 && lang_hooks.decls.omp_privatize_by_reference (decl))
8781 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8782 OMP_CLAUSE_SIZE (clause)
8783 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8784 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8785 gimplify_omp_ctxp = ctx->outer_context;
8786 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8787 pre_p, NULL, is_gimple_val, fb_rvalue);
8788 gimplify_omp_ctxp = ctx;
8789 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8790 OMP_CLAUSE_MAP);
8791 OMP_CLAUSE_DECL (nc) = decl;
8792 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8793 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8794 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8795 OMP_CLAUSE_CHAIN (clause) = nc;
8797 else
8798 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8800 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8802 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8803 OMP_CLAUSE_DECL (nc) = decl;
8804 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8805 OMP_CLAUSE_CHAIN (nc) = chain;
8806 OMP_CLAUSE_CHAIN (clause) = nc;
8807 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8808 gimplify_omp_ctxp = ctx->outer_context;
8809 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8810 gimplify_omp_ctxp = ctx;
8812 *list_p = clause;
8813 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8814 gimplify_omp_ctxp = ctx->outer_context;
8815 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8816 if (gimplify_omp_ctxp)
8817 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8818 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8819 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8820 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8821 true);
8822 gimplify_omp_ctxp = ctx;
8823 return 0;
8826 static void
8827 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8828 enum tree_code code)
8830 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8831 tree c, decl;
8833 if (body)
8835 struct gimplify_omp_ctx *octx;
8836 for (octx = ctx; octx; octx = octx->outer_context)
8837 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8838 break;
8839 if (octx)
8841 struct walk_stmt_info wi;
8842 memset (&wi, 0, sizeof (wi));
8843 walk_gimple_seq (body, omp_find_stores_stmt,
8844 omp_find_stores_op, &wi);
8847 while ((c = *list_p) != NULL)
8849 splay_tree_node n;
8850 bool remove = false;
8852 switch (OMP_CLAUSE_CODE (c))
8854 case OMP_CLAUSE_FIRSTPRIVATE:
8855 if ((ctx->region_type & ORT_TARGET)
8856 && (ctx->region_type & ORT_ACC) == 0
8857 && TYPE_ATOMIC (strip_array_types
8858 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8860 error_at (OMP_CLAUSE_LOCATION (c),
8861 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8862 "%<target%> construct", OMP_CLAUSE_DECL (c));
8863 remove = true;
8864 break;
8866 /* FALLTHRU */
8867 case OMP_CLAUSE_PRIVATE:
8868 case OMP_CLAUSE_SHARED:
8869 case OMP_CLAUSE_LINEAR:
8870 decl = OMP_CLAUSE_DECL (c);
8871 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8872 remove = !(n->value & GOVD_SEEN);
8873 if (! remove)
8875 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8876 if ((n->value & GOVD_DEBUG_PRIVATE)
8877 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8879 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8880 || ((n->value & GOVD_DATA_SHARE_CLASS)
8881 == GOVD_SHARED));
8882 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8883 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8885 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8886 && (n->value & GOVD_WRITTEN) == 0
8887 && DECL_P (decl)
8888 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8889 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8890 else if (DECL_P (decl)
8891 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8892 && (n->value & GOVD_WRITTEN) != 1)
8893 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8894 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8895 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8896 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8898 break;
8900 case OMP_CLAUSE_LASTPRIVATE:
8901 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8902 accurately reflect the presence of a FIRSTPRIVATE clause. */
8903 decl = OMP_CLAUSE_DECL (c);
8904 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8905 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8906 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8907 if (code == OMP_DISTRIBUTE
8908 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8910 remove = true;
8911 error_at (OMP_CLAUSE_LOCATION (c),
8912 "same variable used in %<firstprivate%> and "
8913 "%<lastprivate%> clauses on %<distribute%> "
8914 "construct");
8916 if (!remove
8917 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8918 && DECL_P (decl)
8919 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8920 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8921 break;
8923 case OMP_CLAUSE_ALIGNED:
8924 decl = OMP_CLAUSE_DECL (c);
8925 if (!is_global_var (decl))
8927 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8928 remove = n == NULL || !(n->value & GOVD_SEEN);
8929 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8931 struct gimplify_omp_ctx *octx;
8932 if (n != NULL
8933 && (n->value & (GOVD_DATA_SHARE_CLASS
8934 & ~GOVD_FIRSTPRIVATE)))
8935 remove = true;
8936 else
8937 for (octx = ctx->outer_context; octx;
8938 octx = octx->outer_context)
8940 n = splay_tree_lookup (octx->variables,
8941 (splay_tree_key) decl);
8942 if (n == NULL)
8943 continue;
8944 if (n->value & GOVD_LOCAL)
8945 break;
8946 /* We have to avoid assigning a shared variable
8947 to itself when trying to add
8948 __builtin_assume_aligned. */
8949 if (n->value & GOVD_SHARED)
8951 remove = true;
8952 break;
8957 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8959 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8960 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8961 remove = true;
8963 break;
8965 case OMP_CLAUSE_MAP:
8966 if (code == OMP_TARGET_EXIT_DATA
8967 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8969 remove = true;
8970 break;
8972 decl = OMP_CLAUSE_DECL (c);
8973 /* Data clauses associated with acc parallel reductions must be
8974 compatible with present_or_copy. Warn and adjust the clause
8975 if that is not the case. */
8976 if (ctx->region_type == ORT_ACC_PARALLEL)
8978 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8979 n = NULL;
8981 if (DECL_P (t))
8982 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8984 if (n && (n->value & GOVD_REDUCTION))
8986 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8988 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8989 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8990 && kind != GOMP_MAP_FORCE_PRESENT
8991 && kind != GOMP_MAP_POINTER)
8993 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8994 "incompatible data clause with reduction "
8995 "on %qE; promoting to present_or_copy",
8996 DECL_NAME (t));
8997 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9001 if (!DECL_P (decl))
9003 if ((ctx->region_type & ORT_TARGET) != 0
9004 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9006 if (TREE_CODE (decl) == INDIRECT_REF
9007 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9008 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9009 == REFERENCE_TYPE))
9010 decl = TREE_OPERAND (decl, 0);
9011 if (TREE_CODE (decl) == COMPONENT_REF)
9013 while (TREE_CODE (decl) == COMPONENT_REF)
9014 decl = TREE_OPERAND (decl, 0);
9015 if (DECL_P (decl))
9017 n = splay_tree_lookup (ctx->variables,
9018 (splay_tree_key) decl);
9019 if (!(n->value & GOVD_SEEN))
9020 remove = true;
9024 break;
9026 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9027 if ((ctx->region_type & ORT_TARGET) != 0
9028 && !(n->value & GOVD_SEEN)
9029 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9030 && (!is_global_var (decl)
9031 || !lookup_attribute ("omp declare target link",
9032 DECL_ATTRIBUTES (decl))))
9034 remove = true;
9035 /* For struct element mapping, if struct is never referenced
9036 in target block and none of the mapping has always modifier,
9037 remove all the struct element mappings, which immediately
9038 follow the GOMP_MAP_STRUCT map clause. */
9039 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9041 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9042 while (cnt--)
9043 OMP_CLAUSE_CHAIN (c)
9044 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9047 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9048 && code == OMP_TARGET_EXIT_DATA)
9049 remove = true;
9050 else if (DECL_SIZE (decl)
9051 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9052 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9053 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9054 && (OMP_CLAUSE_MAP_KIND (c)
9055 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9057 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9058 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9059 INTEGER_CST. */
9060 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9062 tree decl2 = DECL_VALUE_EXPR (decl);
9063 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9064 decl2 = TREE_OPERAND (decl2, 0);
9065 gcc_assert (DECL_P (decl2));
9066 tree mem = build_simple_mem_ref (decl2);
9067 OMP_CLAUSE_DECL (c) = mem;
9068 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9069 if (ctx->outer_context)
9071 omp_notice_variable (ctx->outer_context, decl2, true);
9072 omp_notice_variable (ctx->outer_context,
9073 OMP_CLAUSE_SIZE (c), true);
9075 if (((ctx->region_type & ORT_TARGET) != 0
9076 || !ctx->target_firstprivatize_array_bases)
9077 && ((n->value & GOVD_SEEN) == 0
9078 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9080 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9081 OMP_CLAUSE_MAP);
9082 OMP_CLAUSE_DECL (nc) = decl;
9083 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9084 if (ctx->target_firstprivatize_array_bases)
9085 OMP_CLAUSE_SET_MAP_KIND (nc,
9086 GOMP_MAP_FIRSTPRIVATE_POINTER);
9087 else
9088 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9089 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9090 OMP_CLAUSE_CHAIN (c) = nc;
9091 c = nc;
9094 else
9096 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9097 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9098 gcc_assert ((n->value & GOVD_SEEN) == 0
9099 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9100 == 0));
9102 break;
9104 case OMP_CLAUSE_TO:
9105 case OMP_CLAUSE_FROM:
9106 case OMP_CLAUSE__CACHE_:
9107 decl = OMP_CLAUSE_DECL (c);
9108 if (!DECL_P (decl))
9109 break;
9110 if (DECL_SIZE (decl)
9111 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9113 tree decl2 = DECL_VALUE_EXPR (decl);
9114 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9115 decl2 = TREE_OPERAND (decl2, 0);
9116 gcc_assert (DECL_P (decl2));
9117 tree mem = build_simple_mem_ref (decl2);
9118 OMP_CLAUSE_DECL (c) = mem;
9119 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9120 if (ctx->outer_context)
9122 omp_notice_variable (ctx->outer_context, decl2, true);
9123 omp_notice_variable (ctx->outer_context,
9124 OMP_CLAUSE_SIZE (c), true);
9127 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9128 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9129 break;
9131 case OMP_CLAUSE_REDUCTION:
9132 decl = OMP_CLAUSE_DECL (c);
9133 /* OpenACC reductions need a present_or_copy data clause.
9134 Add one if necessary. Error is the reduction is private. */
9135 if (ctx->region_type == ORT_ACC_PARALLEL)
9137 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9138 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9139 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9140 "reduction on %qE", DECL_NAME (decl));
9141 else if ((n->value & GOVD_MAP) == 0)
9143 tree next = OMP_CLAUSE_CHAIN (c);
9144 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9145 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9146 OMP_CLAUSE_DECL (nc) = decl;
9147 OMP_CLAUSE_CHAIN (c) = nc;
9148 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9149 while (1)
9151 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9152 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9153 break;
9154 nc = OMP_CLAUSE_CHAIN (nc);
9156 OMP_CLAUSE_CHAIN (nc) = next;
9157 n->value |= GOVD_MAP;
9160 if (DECL_P (decl)
9161 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9162 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9163 break;
9164 case OMP_CLAUSE_COPYIN:
9165 case OMP_CLAUSE_COPYPRIVATE:
9166 case OMP_CLAUSE_IF:
9167 case OMP_CLAUSE_NUM_THREADS:
9168 case OMP_CLAUSE_NUM_TEAMS:
9169 case OMP_CLAUSE_THREAD_LIMIT:
9170 case OMP_CLAUSE_DIST_SCHEDULE:
9171 case OMP_CLAUSE_DEVICE:
9172 case OMP_CLAUSE_SCHEDULE:
9173 case OMP_CLAUSE_NOWAIT:
9174 case OMP_CLAUSE_ORDERED:
9175 case OMP_CLAUSE_DEFAULT:
9176 case OMP_CLAUSE_UNTIED:
9177 case OMP_CLAUSE_COLLAPSE:
9178 case OMP_CLAUSE_FINAL:
9179 case OMP_CLAUSE_MERGEABLE:
9180 case OMP_CLAUSE_PROC_BIND:
9181 case OMP_CLAUSE_SAFELEN:
9182 case OMP_CLAUSE_SIMDLEN:
9183 case OMP_CLAUSE_DEPEND:
9184 case OMP_CLAUSE_PRIORITY:
9185 case OMP_CLAUSE_GRAINSIZE:
9186 case OMP_CLAUSE_NUM_TASKS:
9187 case OMP_CLAUSE_NOGROUP:
9188 case OMP_CLAUSE_THREADS:
9189 case OMP_CLAUSE_SIMD:
9190 case OMP_CLAUSE_HINT:
9191 case OMP_CLAUSE_DEFAULTMAP:
9192 case OMP_CLAUSE_USE_DEVICE_PTR:
9193 case OMP_CLAUSE_IS_DEVICE_PTR:
9194 case OMP_CLAUSE__CILK_FOR_COUNT_:
9195 case OMP_CLAUSE_ASYNC:
9196 case OMP_CLAUSE_WAIT:
9197 case OMP_CLAUSE_INDEPENDENT:
9198 case OMP_CLAUSE_NUM_GANGS:
9199 case OMP_CLAUSE_NUM_WORKERS:
9200 case OMP_CLAUSE_VECTOR_LENGTH:
9201 case OMP_CLAUSE_GANG:
9202 case OMP_CLAUSE_WORKER:
9203 case OMP_CLAUSE_VECTOR:
9204 case OMP_CLAUSE_AUTO:
9205 case OMP_CLAUSE_SEQ:
9206 case OMP_CLAUSE_TILE:
9207 break;
9209 default:
9210 gcc_unreachable ();
9213 if (remove)
9214 *list_p = OMP_CLAUSE_CHAIN (c);
9215 else
9216 list_p = &OMP_CLAUSE_CHAIN (c);
9219 /* Add in any implicit data sharing. */
9220 struct gimplify_adjust_omp_clauses_data data;
9221 data.list_p = list_p;
9222 data.pre_p = pre_p;
9223 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9225 gimplify_omp_ctxp = ctx->outer_context;
9226 delete_omp_context (ctx);
9229 /* Gimplify OACC_CACHE. */
9231 static void
9232 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9234 tree expr = *expr_p;
9236 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9237 OACC_CACHE);
9238 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9239 OACC_CACHE);
9241 /* TODO: Do something sensible with this information. */
9243 *expr_p = NULL_TREE;
9246 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9247 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9248 kind. The entry kind will replace the one in CLAUSE, while the exit
9249 kind will be used in a new omp_clause and returned to the caller. */
9251 static tree
9252 gimplify_oacc_declare_1 (tree clause)
9254 HOST_WIDE_INT kind, new_op;
9255 bool ret = false;
9256 tree c = NULL;
9258 kind = OMP_CLAUSE_MAP_KIND (clause);
9260 switch (kind)
9262 case GOMP_MAP_ALLOC:
9263 case GOMP_MAP_FORCE_ALLOC:
9264 case GOMP_MAP_FORCE_TO:
9265 new_op = GOMP_MAP_DELETE;
9266 ret = true;
9267 break;
9269 case GOMP_MAP_FORCE_FROM:
9270 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9271 new_op = GOMP_MAP_FORCE_FROM;
9272 ret = true;
9273 break;
9275 case GOMP_MAP_FORCE_TOFROM:
9276 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9277 new_op = GOMP_MAP_FORCE_FROM;
9278 ret = true;
9279 break;
9281 case GOMP_MAP_FROM:
9282 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9283 new_op = GOMP_MAP_FROM;
9284 ret = true;
9285 break;
9287 case GOMP_MAP_TOFROM:
9288 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9289 new_op = GOMP_MAP_FROM;
9290 ret = true;
9291 break;
9293 case GOMP_MAP_DEVICE_RESIDENT:
9294 case GOMP_MAP_FORCE_DEVICEPTR:
9295 case GOMP_MAP_FORCE_PRESENT:
9296 case GOMP_MAP_LINK:
9297 case GOMP_MAP_POINTER:
9298 case GOMP_MAP_TO:
9299 break;
9301 default:
9302 gcc_unreachable ();
9303 break;
9306 if (ret)
9308 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9309 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9310 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9313 return c;
9316 /* Gimplify OACC_DECLARE. */
9318 static void
9319 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9321 tree expr = *expr_p;
9322 gomp_target *stmt;
9323 tree clauses, t, decl;
9325 clauses = OACC_DECLARE_CLAUSES (expr);
9327 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9328 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9330 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9332 decl = OMP_CLAUSE_DECL (t);
9334 if (TREE_CODE (decl) == MEM_REF)
9335 decl = TREE_OPERAND (decl, 0);
9337 if (VAR_P (decl) && !is_oacc_declared (decl))
9339 tree attr = get_identifier ("oacc declare target");
9340 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9341 DECL_ATTRIBUTES (decl));
9344 if (VAR_P (decl)
9345 && !is_global_var (decl)
9346 && DECL_CONTEXT (decl) == current_function_decl)
9348 tree c = gimplify_oacc_declare_1 (t);
9349 if (c)
9351 if (oacc_declare_returns == NULL)
9352 oacc_declare_returns = new hash_map<tree, tree>;
9354 oacc_declare_returns->put (decl, c);
9358 if (gimplify_omp_ctxp)
9359 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9362 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9363 clauses);
9365 gimplify_seq_add_stmt (pre_p, stmt);
9367 *expr_p = NULL_TREE;
9370 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9371 gimplification of the body, as well as scanning the body for used
9372 variables. We need to do this scan now, because variable-sized
9373 decls will be decomposed during gimplification. */
9375 static void
9376 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9378 tree expr = *expr_p;
9379 gimple *g;
9380 gimple_seq body = NULL;
9382 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9383 OMP_PARALLEL_COMBINED (expr)
9384 ? ORT_COMBINED_PARALLEL
9385 : ORT_PARALLEL, OMP_PARALLEL);
9387 push_gimplify_context ();
9389 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9390 if (gimple_code (g) == GIMPLE_BIND)
9391 pop_gimplify_context (g);
9392 else
9393 pop_gimplify_context (NULL);
9395 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9396 OMP_PARALLEL);
9398 g = gimple_build_omp_parallel (body,
9399 OMP_PARALLEL_CLAUSES (expr),
9400 NULL_TREE, NULL_TREE);
9401 if (OMP_PARALLEL_COMBINED (expr))
9402 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9403 gimplify_seq_add_stmt (pre_p, g);
9404 *expr_p = NULL_TREE;
9407 /* Gimplify the contents of an OMP_TASK statement. This involves
9408 gimplification of the body, as well as scanning the body for used
9409 variables. We need to do this scan now, because variable-sized
9410 decls will be decomposed during gimplification. */
9412 static void
9413 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9415 tree expr = *expr_p;
9416 gimple *g;
9417 gimple_seq body = NULL;
9419 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9420 omp_find_clause (OMP_TASK_CLAUSES (expr),
9421 OMP_CLAUSE_UNTIED)
9422 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9424 push_gimplify_context ();
9426 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9427 if (gimple_code (g) == GIMPLE_BIND)
9428 pop_gimplify_context (g);
9429 else
9430 pop_gimplify_context (NULL);
9432 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9433 OMP_TASK);
9435 g = gimple_build_omp_task (body,
9436 OMP_TASK_CLAUSES (expr),
9437 NULL_TREE, NULL_TREE,
9438 NULL_TREE, NULL_TREE, NULL_TREE);
9439 gimplify_seq_add_stmt (pre_p, g);
9440 *expr_p = NULL_TREE;
9443 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9444 with non-NULL OMP_FOR_INIT. */
9446 static tree
9447 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9449 *walk_subtrees = 0;
9450 switch (TREE_CODE (*tp))
9452 case OMP_FOR:
9453 *walk_subtrees = 1;
9454 /* FALLTHRU */
9455 case OMP_SIMD:
9456 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9457 return *tp;
9458 break;
9459 case BIND_EXPR:
9460 case STATEMENT_LIST:
9461 case OMP_PARALLEL:
9462 *walk_subtrees = 1;
9463 break;
9464 default:
9465 break;
9467 return NULL_TREE;
9470 /* Gimplify the gross structure of an OMP_FOR statement. */
9472 static enum gimplify_status
9473 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9475 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9476 enum gimplify_status ret = GS_ALL_DONE;
9477 enum gimplify_status tret;
9478 gomp_for *gfor;
9479 gimple_seq for_body, for_pre_body;
9480 int i;
9481 bitmap has_decl_expr = NULL;
9482 enum omp_region_type ort = ORT_WORKSHARE;
9484 orig_for_stmt = for_stmt = *expr_p;
9486 switch (TREE_CODE (for_stmt))
9488 case OMP_FOR:
9489 case CILK_FOR:
9490 case OMP_DISTRIBUTE:
9491 break;
9492 case OACC_LOOP:
9493 ort = ORT_ACC;
9494 break;
9495 case OMP_TASKLOOP:
9496 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9497 ort = ORT_UNTIED_TASK;
9498 else
9499 ort = ORT_TASK;
9500 break;
9501 case OMP_SIMD:
9502 case CILK_SIMD:
9503 ort = ORT_SIMD;
9504 break;
9505 default:
9506 gcc_unreachable ();
9509 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9510 clause for the IV. */
9511 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9513 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9514 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9515 decl = TREE_OPERAND (t, 0);
9516 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9518 && OMP_CLAUSE_DECL (c) == decl)
9520 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9521 break;
9525 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9527 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9528 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9529 find_combined_omp_for, NULL, NULL);
9530 if (inner_for_stmt == NULL_TREE)
9532 gcc_assert (seen_error ());
9533 *expr_p = NULL_TREE;
9534 return GS_ERROR;
9538 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9539 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9540 TREE_CODE (for_stmt));
9542 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9543 gimplify_omp_ctxp->distribute = true;
9545 /* Handle OMP_FOR_INIT. */
9546 for_pre_body = NULL;
9547 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9549 has_decl_expr = BITMAP_ALLOC (NULL);
9550 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9551 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9552 == VAR_DECL)
9554 t = OMP_FOR_PRE_BODY (for_stmt);
9555 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9557 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9559 tree_stmt_iterator si;
9560 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9561 tsi_next (&si))
9563 t = tsi_stmt (si);
9564 if (TREE_CODE (t) == DECL_EXPR
9565 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9566 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9570 if (OMP_FOR_PRE_BODY (for_stmt))
9572 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9573 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9574 else
9576 struct gimplify_omp_ctx ctx;
9577 memset (&ctx, 0, sizeof (ctx));
9578 ctx.region_type = ORT_NONE;
9579 gimplify_omp_ctxp = &ctx;
9580 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9581 gimplify_omp_ctxp = NULL;
9584 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9586 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9587 for_stmt = inner_for_stmt;
9589 /* For taskloop, need to gimplify the start, end and step before the
9590 taskloop, outside of the taskloop omp context. */
9591 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9593 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9595 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9596 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9598 TREE_OPERAND (t, 1)
9599 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9600 pre_p, NULL, false);
9601 tree c = build_omp_clause (input_location,
9602 OMP_CLAUSE_FIRSTPRIVATE);
9603 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9604 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9605 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9608 /* Handle OMP_FOR_COND. */
9609 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9610 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9612 TREE_OPERAND (t, 1)
9613 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9614 gimple_seq_empty_p (for_pre_body)
9615 ? pre_p : &for_pre_body, NULL,
9616 false);
9617 tree c = build_omp_clause (input_location,
9618 OMP_CLAUSE_FIRSTPRIVATE);
9619 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9620 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9621 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9624 /* Handle OMP_FOR_INCR. */
9625 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9626 if (TREE_CODE (t) == MODIFY_EXPR)
9628 decl = TREE_OPERAND (t, 0);
9629 t = TREE_OPERAND (t, 1);
9630 tree *tp = &TREE_OPERAND (t, 1);
9631 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9632 tp = &TREE_OPERAND (t, 0);
9634 if (!is_gimple_constant (*tp))
9636 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9637 ? pre_p : &for_pre_body;
9638 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9639 tree c = build_omp_clause (input_location,
9640 OMP_CLAUSE_FIRSTPRIVATE);
9641 OMP_CLAUSE_DECL (c) = *tp;
9642 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9643 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9648 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9649 OMP_TASKLOOP);
9652 if (orig_for_stmt != for_stmt)
9653 gimplify_omp_ctxp->combined_loop = true;
9655 for_body = NULL;
9656 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9657 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9658 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9659 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9661 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9662 bool is_doacross = false;
9663 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9665 is_doacross = true;
9666 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9667 (OMP_FOR_INIT (for_stmt))
9668 * 2);
9670 int collapse = 1, tile = 0;
9671 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9672 if (c)
9673 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9674 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9675 if (c)
9676 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9677 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9679 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9680 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9681 decl = TREE_OPERAND (t, 0);
9682 gcc_assert (DECL_P (decl));
9683 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9684 || POINTER_TYPE_P (TREE_TYPE (decl)));
9685 if (is_doacross)
9687 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9688 gimplify_omp_ctxp->loop_iter_var.quick_push
9689 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9690 else
9691 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9692 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9695 /* Make sure the iteration variable is private. */
9696 tree c = NULL_TREE;
9697 tree c2 = NULL_TREE;
9698 if (orig_for_stmt != for_stmt)
9699 /* Do this only on innermost construct for combined ones. */;
9700 else if (ort == ORT_SIMD)
9702 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9703 (splay_tree_key) decl);
9704 omp_is_private (gimplify_omp_ctxp, decl,
9705 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9706 != 1));
9707 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9708 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9709 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9711 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9712 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9713 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9714 if (has_decl_expr
9715 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9717 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9718 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9720 struct gimplify_omp_ctx *outer
9721 = gimplify_omp_ctxp->outer_context;
9722 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9724 if (outer->region_type == ORT_WORKSHARE
9725 && outer->combined_loop)
9727 n = splay_tree_lookup (outer->variables,
9728 (splay_tree_key)decl);
9729 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9731 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9732 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9734 else
9736 struct gimplify_omp_ctx *octx = outer->outer_context;
9737 if (octx
9738 && octx->region_type == ORT_COMBINED_PARALLEL
9739 && octx->outer_context
9740 && (octx->outer_context->region_type
9741 == ORT_WORKSHARE)
9742 && octx->outer_context->combined_loop)
9744 octx = octx->outer_context;
9745 n = splay_tree_lookup (octx->variables,
9746 (splay_tree_key)decl);
9747 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9749 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9750 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9757 OMP_CLAUSE_DECL (c) = decl;
9758 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9759 OMP_FOR_CLAUSES (for_stmt) = c;
9760 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9761 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9763 if (outer->region_type == ORT_WORKSHARE
9764 && outer->combined_loop)
9766 if (outer->outer_context
9767 && (outer->outer_context->region_type
9768 == ORT_COMBINED_PARALLEL))
9769 outer = outer->outer_context;
9770 else if (omp_check_private (outer, decl, false))
9771 outer = NULL;
9773 else if (((outer->region_type & ORT_TASK) != 0)
9774 && outer->combined_loop
9775 && !omp_check_private (gimplify_omp_ctxp,
9776 decl, false))
9778 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9780 omp_notice_variable (outer, decl, true);
9781 outer = NULL;
9783 if (outer)
9785 n = splay_tree_lookup (outer->variables,
9786 (splay_tree_key)decl);
9787 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9789 omp_add_variable (outer, decl,
9790 GOVD_LASTPRIVATE | GOVD_SEEN);
9791 if (outer->region_type == ORT_COMBINED_PARALLEL
9792 && outer->outer_context
9793 && (outer->outer_context->region_type
9794 == ORT_WORKSHARE)
9795 && outer->outer_context->combined_loop)
9797 outer = outer->outer_context;
9798 n = splay_tree_lookup (outer->variables,
9799 (splay_tree_key)decl);
9800 if (omp_check_private (outer, decl, false))
9801 outer = NULL;
9802 else if (n == NULL
9803 || ((n->value & GOVD_DATA_SHARE_CLASS)
9804 == 0))
9805 omp_add_variable (outer, decl,
9806 GOVD_LASTPRIVATE
9807 | GOVD_SEEN);
9808 else
9809 outer = NULL;
9811 if (outer && outer->outer_context
9812 && (outer->outer_context->region_type
9813 == ORT_COMBINED_TEAMS))
9815 outer = outer->outer_context;
9816 n = splay_tree_lookup (outer->variables,
9817 (splay_tree_key)decl);
9818 if (n == NULL
9819 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9820 omp_add_variable (outer, decl,
9821 GOVD_SHARED | GOVD_SEEN);
9822 else
9823 outer = NULL;
9825 if (outer && outer->outer_context)
9826 omp_notice_variable (outer->outer_context, decl,
9827 true);
9832 else
9834 bool lastprivate
9835 = (!has_decl_expr
9836 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9837 struct gimplify_omp_ctx *outer
9838 = gimplify_omp_ctxp->outer_context;
9839 if (outer && lastprivate)
9841 if (outer->region_type == ORT_WORKSHARE
9842 && outer->combined_loop)
9844 n = splay_tree_lookup (outer->variables,
9845 (splay_tree_key)decl);
9846 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9848 lastprivate = false;
9849 outer = NULL;
9851 else if (outer->outer_context
9852 && (outer->outer_context->region_type
9853 == ORT_COMBINED_PARALLEL))
9854 outer = outer->outer_context;
9855 else if (omp_check_private (outer, decl, false))
9856 outer = NULL;
9858 else if (((outer->region_type & ORT_TASK) != 0)
9859 && outer->combined_loop
9860 && !omp_check_private (gimplify_omp_ctxp,
9861 decl, false))
9863 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9865 omp_notice_variable (outer, decl, true);
9866 outer = NULL;
9868 if (outer)
9870 n = splay_tree_lookup (outer->variables,
9871 (splay_tree_key)decl);
9872 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9874 omp_add_variable (outer, decl,
9875 GOVD_LASTPRIVATE | GOVD_SEEN);
9876 if (outer->region_type == ORT_COMBINED_PARALLEL
9877 && outer->outer_context
9878 && (outer->outer_context->region_type
9879 == ORT_WORKSHARE)
9880 && outer->outer_context->combined_loop)
9882 outer = outer->outer_context;
9883 n = splay_tree_lookup (outer->variables,
9884 (splay_tree_key)decl);
9885 if (omp_check_private (outer, decl, false))
9886 outer = NULL;
9887 else if (n == NULL
9888 || ((n->value & GOVD_DATA_SHARE_CLASS)
9889 == 0))
9890 omp_add_variable (outer, decl,
9891 GOVD_LASTPRIVATE
9892 | GOVD_SEEN);
9893 else
9894 outer = NULL;
9896 if (outer && outer->outer_context
9897 && (outer->outer_context->region_type
9898 == ORT_COMBINED_TEAMS))
9900 outer = outer->outer_context;
9901 n = splay_tree_lookup (outer->variables,
9902 (splay_tree_key)decl);
9903 if (n == NULL
9904 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9905 omp_add_variable (outer, decl,
9906 GOVD_SHARED | GOVD_SEEN);
9907 else
9908 outer = NULL;
9910 if (outer && outer->outer_context)
9911 omp_notice_variable (outer->outer_context, decl,
9912 true);
9917 c = build_omp_clause (input_location,
9918 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9919 : OMP_CLAUSE_PRIVATE);
9920 OMP_CLAUSE_DECL (c) = decl;
9921 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9922 OMP_FOR_CLAUSES (for_stmt) = c;
9923 omp_add_variable (gimplify_omp_ctxp, decl,
9924 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9925 | GOVD_EXPLICIT | GOVD_SEEN);
9926 c = NULL_TREE;
9929 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9930 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9931 else
9932 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9934 /* If DECL is not a gimple register, create a temporary variable to act
9935 as an iteration counter. This is valid, since DECL cannot be
9936 modified in the body of the loop. Similarly for any iteration vars
9937 in simd with collapse > 1 where the iterator vars must be
9938 lastprivate. */
9939 if (orig_for_stmt != for_stmt)
9940 var = decl;
9941 else if (!is_gimple_reg (decl)
9942 || (ort == ORT_SIMD
9943 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9945 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9946 /* Make sure omp_add_variable is not called on it prematurely.
9947 We call it ourselves a few lines later. */
9948 gimplify_omp_ctxp = NULL;
9949 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9950 gimplify_omp_ctxp = ctx;
9951 TREE_OPERAND (t, 0) = var;
9953 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9955 if (ort == ORT_SIMD
9956 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9958 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9959 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9960 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9961 OMP_CLAUSE_DECL (c2) = var;
9962 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9963 OMP_FOR_CLAUSES (for_stmt) = c2;
9964 omp_add_variable (gimplify_omp_ctxp, var,
9965 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9966 if (c == NULL_TREE)
9968 c = c2;
9969 c2 = NULL_TREE;
9972 else
9973 omp_add_variable (gimplify_omp_ctxp, var,
9974 GOVD_PRIVATE | GOVD_SEEN);
9976 else
9977 var = decl;
9979 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9980 is_gimple_val, fb_rvalue, false);
9981 ret = MIN (ret, tret);
9982 if (ret == GS_ERROR)
9983 return ret;
9985 /* Handle OMP_FOR_COND. */
9986 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9987 gcc_assert (COMPARISON_CLASS_P (t));
9988 gcc_assert (TREE_OPERAND (t, 0) == decl);
9990 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9991 is_gimple_val, fb_rvalue, false);
9992 ret = MIN (ret, tret);
9994 /* Handle OMP_FOR_INCR. */
9995 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9996 switch (TREE_CODE (t))
9998 case PREINCREMENT_EXPR:
9999 case POSTINCREMENT_EXPR:
10001 tree decl = TREE_OPERAND (t, 0);
10002 /* c_omp_for_incr_canonicalize_ptr() should have been
10003 called to massage things appropriately. */
10004 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10006 if (orig_for_stmt != for_stmt)
10007 break;
10008 t = build_int_cst (TREE_TYPE (decl), 1);
10009 if (c)
10010 OMP_CLAUSE_LINEAR_STEP (c) = t;
10011 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10012 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10013 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10014 break;
10017 case PREDECREMENT_EXPR:
10018 case POSTDECREMENT_EXPR:
10019 /* c_omp_for_incr_canonicalize_ptr() should have been
10020 called to massage things appropriately. */
10021 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10022 if (orig_for_stmt != for_stmt)
10023 break;
10024 t = build_int_cst (TREE_TYPE (decl), -1);
10025 if (c)
10026 OMP_CLAUSE_LINEAR_STEP (c) = t;
10027 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10028 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10029 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10030 break;
10032 case MODIFY_EXPR:
10033 gcc_assert (TREE_OPERAND (t, 0) == decl);
10034 TREE_OPERAND (t, 0) = var;
10036 t = TREE_OPERAND (t, 1);
10037 switch (TREE_CODE (t))
10039 case PLUS_EXPR:
10040 if (TREE_OPERAND (t, 1) == decl)
10042 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10043 TREE_OPERAND (t, 0) = var;
10044 break;
10047 /* Fallthru. */
10048 case MINUS_EXPR:
10049 case POINTER_PLUS_EXPR:
10050 gcc_assert (TREE_OPERAND (t, 0) == decl);
10051 TREE_OPERAND (t, 0) = var;
10052 break;
10053 default:
10054 gcc_unreachable ();
10057 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10058 is_gimple_val, fb_rvalue, false);
10059 ret = MIN (ret, tret);
10060 if (c)
10062 tree step = TREE_OPERAND (t, 1);
10063 tree stept = TREE_TYPE (decl);
10064 if (POINTER_TYPE_P (stept))
10065 stept = sizetype;
10066 step = fold_convert (stept, step);
10067 if (TREE_CODE (t) == MINUS_EXPR)
10068 step = fold_build1 (NEGATE_EXPR, stept, step);
10069 OMP_CLAUSE_LINEAR_STEP (c) = step;
10070 if (step != TREE_OPERAND (t, 1))
10072 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10073 &for_pre_body, NULL,
10074 is_gimple_val, fb_rvalue, false);
10075 ret = MIN (ret, tret);
10078 break;
10080 default:
10081 gcc_unreachable ();
10084 if (c2)
10086 gcc_assert (c);
10087 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10090 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10092 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10093 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10094 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10095 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10096 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10097 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10098 && OMP_CLAUSE_DECL (c) == decl)
10100 if (is_doacross && (collapse == 1 || i >= collapse))
10101 t = var;
10102 else
10104 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10105 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10106 gcc_assert (TREE_OPERAND (t, 0) == var);
10107 t = TREE_OPERAND (t, 1);
10108 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10109 || TREE_CODE (t) == MINUS_EXPR
10110 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10111 gcc_assert (TREE_OPERAND (t, 0) == var);
10112 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10113 is_doacross ? var : decl,
10114 TREE_OPERAND (t, 1));
10116 gimple_seq *seq;
10117 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10118 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10119 else
10120 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10121 gimplify_assign (decl, t, seq);
10126 BITMAP_FREE (has_decl_expr);
10128 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10130 push_gimplify_context ();
10131 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10133 OMP_FOR_BODY (orig_for_stmt)
10134 = build3 (BIND_EXPR, void_type_node, NULL,
10135 OMP_FOR_BODY (orig_for_stmt), NULL);
10136 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10140 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10141 &for_body);
10143 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10145 if (gimple_code (g) == GIMPLE_BIND)
10146 pop_gimplify_context (g);
10147 else
10148 pop_gimplify_context (NULL);
10151 if (orig_for_stmt != for_stmt)
10152 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10154 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10155 decl = TREE_OPERAND (t, 0);
10156 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10157 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10158 gimplify_omp_ctxp = ctx->outer_context;
10159 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10160 gimplify_omp_ctxp = ctx;
10161 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10162 TREE_OPERAND (t, 0) = var;
10163 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10164 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10165 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10168 gimplify_adjust_omp_clauses (pre_p, for_body,
10169 &OMP_FOR_CLAUSES (orig_for_stmt),
10170 TREE_CODE (orig_for_stmt));
10172 int kind;
10173 switch (TREE_CODE (orig_for_stmt))
10175 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10176 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10177 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10178 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10179 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10180 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10181 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10182 default:
10183 gcc_unreachable ();
10185 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10186 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10187 for_pre_body);
10188 if (orig_for_stmt != for_stmt)
10189 gimple_omp_for_set_combined_p (gfor, true);
10190 if (gimplify_omp_ctxp
10191 && (gimplify_omp_ctxp->combined_loop
10192 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10193 && gimplify_omp_ctxp->outer_context
10194 && gimplify_omp_ctxp->outer_context->combined_loop)))
10196 gimple_omp_for_set_combined_into_p (gfor, true);
10197 if (gimplify_omp_ctxp->combined_loop)
10198 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10199 else
10200 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10203 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10205 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10206 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10207 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10208 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10209 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10210 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10211 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10212 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10215 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10216 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10217 The outer taskloop stands for computing the number of iterations,
10218 counts for collapsed loops and holding taskloop specific clauses.
10219 The task construct stands for the effect of data sharing on the
10220 explicit task it creates and the inner taskloop stands for expansion
10221 of the static loop inside of the explicit task construct. */
10222 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10224 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10225 tree task_clauses = NULL_TREE;
10226 tree c = *gfor_clauses_ptr;
10227 tree *gtask_clauses_ptr = &task_clauses;
10228 tree outer_for_clauses = NULL_TREE;
10229 tree *gforo_clauses_ptr = &outer_for_clauses;
10230 for (; c; c = OMP_CLAUSE_CHAIN (c))
10231 switch (OMP_CLAUSE_CODE (c))
10233 /* These clauses are allowed on task, move them there. */
10234 case OMP_CLAUSE_SHARED:
10235 case OMP_CLAUSE_FIRSTPRIVATE:
10236 case OMP_CLAUSE_DEFAULT:
10237 case OMP_CLAUSE_IF:
10238 case OMP_CLAUSE_UNTIED:
10239 case OMP_CLAUSE_FINAL:
10240 case OMP_CLAUSE_MERGEABLE:
10241 case OMP_CLAUSE_PRIORITY:
10242 *gtask_clauses_ptr = c;
10243 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10244 break;
10245 case OMP_CLAUSE_PRIVATE:
10246 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10248 /* We want private on outer for and firstprivate
10249 on task. */
10250 *gtask_clauses_ptr
10251 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10252 OMP_CLAUSE_FIRSTPRIVATE);
10253 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10254 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10255 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10256 *gforo_clauses_ptr = c;
10257 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10259 else
10261 *gtask_clauses_ptr = c;
10262 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10264 break;
10265 /* These clauses go into outer taskloop clauses. */
10266 case OMP_CLAUSE_GRAINSIZE:
10267 case OMP_CLAUSE_NUM_TASKS:
10268 case OMP_CLAUSE_NOGROUP:
10269 *gforo_clauses_ptr = c;
10270 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10271 break;
10272 /* Taskloop clause we duplicate on both taskloops. */
10273 case OMP_CLAUSE_COLLAPSE:
10274 *gfor_clauses_ptr = c;
10275 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10276 *gforo_clauses_ptr = copy_node (c);
10277 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10278 break;
10279 /* For lastprivate, keep the clause on inner taskloop, and add
10280 a shared clause on task. If the same decl is also firstprivate,
10281 add also firstprivate clause on the inner taskloop. */
10282 case OMP_CLAUSE_LASTPRIVATE:
10283 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10285 /* For taskloop C++ lastprivate IVs, we want:
10286 1) private on outer taskloop
10287 2) firstprivate and shared on task
10288 3) lastprivate on inner taskloop */
10289 *gtask_clauses_ptr
10290 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10291 OMP_CLAUSE_FIRSTPRIVATE);
10292 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10293 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10294 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10295 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10296 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10297 OMP_CLAUSE_PRIVATE);
10298 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10299 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10300 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10301 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10303 *gfor_clauses_ptr = c;
10304 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10305 *gtask_clauses_ptr
10306 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10307 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10308 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10309 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10310 gtask_clauses_ptr
10311 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10312 break;
10313 default:
10314 gcc_unreachable ();
10316 *gfor_clauses_ptr = NULL_TREE;
10317 *gtask_clauses_ptr = NULL_TREE;
10318 *gforo_clauses_ptr = NULL_TREE;
10319 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10320 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10321 NULL_TREE, NULL_TREE, NULL_TREE);
10322 gimple_omp_task_set_taskloop_p (g, true);
10323 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10324 gomp_for *gforo
10325 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10326 gimple_omp_for_collapse (gfor),
10327 gimple_omp_for_pre_body (gfor));
10328 gimple_omp_for_set_pre_body (gfor, NULL);
10329 gimple_omp_for_set_combined_p (gforo, true);
10330 gimple_omp_for_set_combined_into_p (gfor, true);
10331 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10333 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10334 tree v = create_tmp_var (type);
10335 gimple_omp_for_set_index (gforo, i, v);
10336 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10337 gimple_omp_for_set_initial (gforo, i, t);
10338 gimple_omp_for_set_cond (gforo, i,
10339 gimple_omp_for_cond (gfor, i));
10340 t = unshare_expr (gimple_omp_for_final (gfor, i));
10341 gimple_omp_for_set_final (gforo, i, t);
10342 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10343 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10344 TREE_OPERAND (t, 0) = v;
10345 gimple_omp_for_set_incr (gforo, i, t);
10346 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10347 OMP_CLAUSE_DECL (t) = v;
10348 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10349 gimple_omp_for_set_clauses (gforo, t);
10351 gimplify_seq_add_stmt (pre_p, gforo);
10353 else
10354 gimplify_seq_add_stmt (pre_p, gfor);
10355 if (ret != GS_ALL_DONE)
10356 return GS_ERROR;
10357 *expr_p = NULL_TREE;
10358 return GS_ALL_DONE;
10361 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10362 of OMP_TARGET's body. */
10364 static tree
10365 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10367 *walk_subtrees = 0;
10368 switch (TREE_CODE (*tp))
10370 case OMP_TEAMS:
10371 return *tp;
10372 case BIND_EXPR:
10373 case STATEMENT_LIST:
10374 *walk_subtrees = 1;
10375 break;
10376 default:
10377 break;
10379 return NULL_TREE;
10382 /* Helper function of optimize_target_teams, determine if the expression
10383 can be computed safely before the target construct on the host. */
10385 static tree
10386 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10388 splay_tree_node n;
10390 if (TYPE_P (*tp))
10392 *walk_subtrees = 0;
10393 return NULL_TREE;
10395 switch (TREE_CODE (*tp))
10397 case VAR_DECL:
10398 case PARM_DECL:
10399 case RESULT_DECL:
10400 *walk_subtrees = 0;
10401 if (error_operand_p (*tp)
10402 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10403 || DECL_HAS_VALUE_EXPR_P (*tp)
10404 || DECL_THREAD_LOCAL_P (*tp)
10405 || TREE_SIDE_EFFECTS (*tp)
10406 || TREE_THIS_VOLATILE (*tp))
10407 return *tp;
10408 if (is_global_var (*tp)
10409 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10410 || lookup_attribute ("omp declare target link",
10411 DECL_ATTRIBUTES (*tp))))
10412 return *tp;
10413 if (VAR_P (*tp)
10414 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10415 && !is_global_var (*tp)
10416 && decl_function_context (*tp) == current_function_decl)
10417 return *tp;
10418 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10419 (splay_tree_key) *tp);
10420 if (n == NULL)
10422 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10423 return NULL_TREE;
10424 return *tp;
10426 else if (n->value & GOVD_LOCAL)
10427 return *tp;
10428 else if (n->value & GOVD_FIRSTPRIVATE)
10429 return NULL_TREE;
10430 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10431 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10432 return NULL_TREE;
10433 return *tp;
10434 case INTEGER_CST:
10435 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10436 return *tp;
10437 return NULL_TREE;
10438 case TARGET_EXPR:
10439 if (TARGET_EXPR_INITIAL (*tp)
10440 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10441 return *tp;
10442 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10443 walk_subtrees, NULL);
10444 /* Allow some reasonable subset of integral arithmetics. */
10445 case PLUS_EXPR:
10446 case MINUS_EXPR:
10447 case MULT_EXPR:
10448 case TRUNC_DIV_EXPR:
10449 case CEIL_DIV_EXPR:
10450 case FLOOR_DIV_EXPR:
10451 case ROUND_DIV_EXPR:
10452 case TRUNC_MOD_EXPR:
10453 case CEIL_MOD_EXPR:
10454 case FLOOR_MOD_EXPR:
10455 case ROUND_MOD_EXPR:
10456 case RDIV_EXPR:
10457 case EXACT_DIV_EXPR:
10458 case MIN_EXPR:
10459 case MAX_EXPR:
10460 case LSHIFT_EXPR:
10461 case RSHIFT_EXPR:
10462 case BIT_IOR_EXPR:
10463 case BIT_XOR_EXPR:
10464 case BIT_AND_EXPR:
10465 case NEGATE_EXPR:
10466 case ABS_EXPR:
10467 case BIT_NOT_EXPR:
10468 case NON_LVALUE_EXPR:
10469 CASE_CONVERT:
10470 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10471 return *tp;
10472 return NULL_TREE;
10473 /* And disallow anything else, except for comparisons. */
10474 default:
10475 if (COMPARISON_CLASS_P (*tp))
10476 return NULL_TREE;
10477 return *tp;
10481 /* Try to determine if the num_teams and/or thread_limit expressions
10482 can have their values determined already before entering the
10483 target construct.
10484 INTEGER_CSTs trivially are,
10485 integral decls that are firstprivate (explicitly or implicitly)
10486 or explicitly map(always, to:) or map(always, tofrom:) on the target
10487 region too, and expressions involving simple arithmetics on those
10488 too, function calls are not ok, dereferencing something neither etc.
10489 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10490 EXPR based on what we find:
10491 0 stands for clause not specified at all, use implementation default
10492 -1 stands for value that can't be determined easily before entering
10493 the target construct.
10494 If teams construct is not present at all, use 1 for num_teams
10495 and 0 for thread_limit (only one team is involved, and the thread
10496 limit is implementation defined. */
10498 static void
10499 optimize_target_teams (tree target, gimple_seq *pre_p)
10501 tree body = OMP_BODY (target);
10502 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10503 tree num_teams = integer_zero_node;
10504 tree thread_limit = integer_zero_node;
10505 location_t num_teams_loc = EXPR_LOCATION (target);
10506 location_t thread_limit_loc = EXPR_LOCATION (target);
10507 tree c, *p, expr;
10508 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10510 if (teams == NULL_TREE)
10511 num_teams = integer_one_node;
10512 else
10513 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10517 p = &num_teams;
10518 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10520 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10522 p = &thread_limit;
10523 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10525 else
10526 continue;
10527 expr = OMP_CLAUSE_OPERAND (c, 0);
10528 if (TREE_CODE (expr) == INTEGER_CST)
10530 *p = expr;
10531 continue;
10533 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10535 *p = integer_minus_one_node;
10536 continue;
10538 *p = expr;
10539 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10540 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10541 == GS_ERROR)
10543 gimplify_omp_ctxp = target_ctx;
10544 *p = integer_minus_one_node;
10545 continue;
10547 gimplify_omp_ctxp = target_ctx;
10548 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10549 OMP_CLAUSE_OPERAND (c, 0) = *p;
10551 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10552 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10553 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10554 OMP_TARGET_CLAUSES (target) = c;
10555 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10556 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10557 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10558 OMP_TARGET_CLAUSES (target) = c;
10561 /* Gimplify the gross structure of several OMP constructs. */
10563 static void
10564 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10566 tree expr = *expr_p;
10567 gimple *stmt;
10568 gimple_seq body = NULL;
10569 enum omp_region_type ort;
10571 switch (TREE_CODE (expr))
10573 case OMP_SECTIONS:
10574 case OMP_SINGLE:
10575 ort = ORT_WORKSHARE;
10576 break;
10577 case OMP_TARGET:
10578 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10579 break;
10580 case OACC_KERNELS:
10581 ort = ORT_ACC_KERNELS;
10582 break;
10583 case OACC_PARALLEL:
10584 ort = ORT_ACC_PARALLEL;
10585 break;
10586 case OACC_DATA:
10587 ort = ORT_ACC_DATA;
10588 break;
10589 case OMP_TARGET_DATA:
10590 ort = ORT_TARGET_DATA;
10591 break;
10592 case OMP_TEAMS:
10593 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10594 break;
10595 case OACC_HOST_DATA:
10596 ort = ORT_ACC_HOST_DATA;
10597 break;
10598 default:
10599 gcc_unreachable ();
10601 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10602 TREE_CODE (expr));
10603 if (TREE_CODE (expr) == OMP_TARGET)
10604 optimize_target_teams (expr, pre_p);
10605 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10607 push_gimplify_context ();
10608 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10609 if (gimple_code (g) == GIMPLE_BIND)
10610 pop_gimplify_context (g);
10611 else
10612 pop_gimplify_context (NULL);
10613 if ((ort & ORT_TARGET_DATA) != 0)
10615 enum built_in_function end_ix;
10616 switch (TREE_CODE (expr))
10618 case OACC_DATA:
10619 case OACC_HOST_DATA:
10620 end_ix = BUILT_IN_GOACC_DATA_END;
10621 break;
10622 case OMP_TARGET_DATA:
10623 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10624 break;
10625 default:
10626 gcc_unreachable ();
10628 tree fn = builtin_decl_explicit (end_ix);
10629 g = gimple_build_call (fn, 0);
10630 gimple_seq cleanup = NULL;
10631 gimple_seq_add_stmt (&cleanup, g);
10632 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10633 body = NULL;
10634 gimple_seq_add_stmt (&body, g);
10637 else
10638 gimplify_and_add (OMP_BODY (expr), &body);
10639 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10640 TREE_CODE (expr));
10642 switch (TREE_CODE (expr))
10644 case OACC_DATA:
10645 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10646 OMP_CLAUSES (expr));
10647 break;
10648 case OACC_KERNELS:
10649 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10650 OMP_CLAUSES (expr));
10651 break;
10652 case OACC_HOST_DATA:
10653 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10654 OMP_CLAUSES (expr));
10655 break;
10656 case OACC_PARALLEL:
10657 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10658 OMP_CLAUSES (expr));
10659 break;
10660 case OMP_SECTIONS:
10661 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10662 break;
10663 case OMP_SINGLE:
10664 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10665 break;
10666 case OMP_TARGET:
10667 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10668 OMP_CLAUSES (expr));
10669 break;
10670 case OMP_TARGET_DATA:
10671 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10672 OMP_CLAUSES (expr));
10673 break;
10674 case OMP_TEAMS:
10675 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10676 break;
10677 default:
10678 gcc_unreachable ();
10681 gimplify_seq_add_stmt (pre_p, stmt);
10682 *expr_p = NULL_TREE;
10685 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10686 target update constructs. */
10688 static void
10689 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10691 tree expr = *expr_p;
10692 int kind;
10693 gomp_target *stmt;
10694 enum omp_region_type ort = ORT_WORKSHARE;
10696 switch (TREE_CODE (expr))
10698 case OACC_ENTER_DATA:
10699 case OACC_EXIT_DATA:
10700 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10701 ort = ORT_ACC;
10702 break;
10703 case OACC_UPDATE:
10704 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10705 ort = ORT_ACC;
10706 break;
10707 case OMP_TARGET_UPDATE:
10708 kind = GF_OMP_TARGET_KIND_UPDATE;
10709 break;
10710 case OMP_TARGET_ENTER_DATA:
10711 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10712 break;
10713 case OMP_TARGET_EXIT_DATA:
10714 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10715 break;
10716 default:
10717 gcc_unreachable ();
10719 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10720 ort, TREE_CODE (expr));
10721 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10722 TREE_CODE (expr));
10723 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10725 gimplify_seq_add_stmt (pre_p, stmt);
10726 *expr_p = NULL_TREE;
10729 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10730 stabilized the lhs of the atomic operation as *ADDR. Return true if
10731 EXPR is this stabilized form. */
10733 static bool
10734 goa_lhs_expr_p (tree expr, tree addr)
10736 /* Also include casts to other type variants. The C front end is fond
10737 of adding these for e.g. volatile variables. This is like
10738 STRIP_TYPE_NOPS but includes the main variant lookup. */
10739 STRIP_USELESS_TYPE_CONVERSION (expr);
10741 if (TREE_CODE (expr) == INDIRECT_REF)
10743 expr = TREE_OPERAND (expr, 0);
10744 while (expr != addr
10745 && (CONVERT_EXPR_P (expr)
10746 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10747 && TREE_CODE (expr) == TREE_CODE (addr)
10748 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10750 expr = TREE_OPERAND (expr, 0);
10751 addr = TREE_OPERAND (addr, 0);
10753 if (expr == addr)
10754 return true;
10755 return (TREE_CODE (addr) == ADDR_EXPR
10756 && TREE_CODE (expr) == ADDR_EXPR
10757 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10759 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10760 return true;
10761 return false;
10764 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10765 expression does not involve the lhs, evaluate it into a temporary.
10766 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10767 or -1 if an error was encountered. */
10769 static int
10770 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10771 tree lhs_var)
10773 tree expr = *expr_p;
10774 int saw_lhs;
10776 if (goa_lhs_expr_p (expr, lhs_addr))
10778 *expr_p = lhs_var;
10779 return 1;
10781 if (is_gimple_val (expr))
10782 return 0;
10784 saw_lhs = 0;
10785 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10787 case tcc_binary:
10788 case tcc_comparison:
10789 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10790 lhs_var);
10791 /* FALLTHRU */
10792 case tcc_unary:
10793 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10794 lhs_var);
10795 break;
10796 case tcc_expression:
10797 switch (TREE_CODE (expr))
10799 case TRUTH_ANDIF_EXPR:
10800 case TRUTH_ORIF_EXPR:
10801 case TRUTH_AND_EXPR:
10802 case TRUTH_OR_EXPR:
10803 case TRUTH_XOR_EXPR:
10804 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10805 lhs_addr, lhs_var);
10806 /* FALLTHRU */
10807 case TRUTH_NOT_EXPR:
10808 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10809 lhs_addr, lhs_var);
10810 break;
10811 case COMPOUND_EXPR:
10812 /* Break out any preevaluations from cp_build_modify_expr. */
10813 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10814 expr = TREE_OPERAND (expr, 1))
10815 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10816 *expr_p = expr;
10817 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10818 default:
10819 break;
10821 break;
10822 default:
10823 break;
10826 if (saw_lhs == 0)
10828 enum gimplify_status gs;
10829 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10830 if (gs != GS_ALL_DONE)
10831 saw_lhs = -1;
10834 return saw_lhs;
10837 /* Gimplify an OMP_ATOMIC statement. */
10839 static enum gimplify_status
10840 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10842 tree addr = TREE_OPERAND (*expr_p, 0);
10843 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10844 ? NULL : TREE_OPERAND (*expr_p, 1);
10845 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10846 tree tmp_load;
10847 gomp_atomic_load *loadstmt;
10848 gomp_atomic_store *storestmt;
10850 tmp_load = create_tmp_reg (type);
10851 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10852 return GS_ERROR;
10854 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10855 != GS_ALL_DONE)
10856 return GS_ERROR;
10858 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10859 gimplify_seq_add_stmt (pre_p, loadstmt);
10860 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10861 != GS_ALL_DONE)
10862 return GS_ERROR;
10864 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10865 rhs = tmp_load;
10866 storestmt = gimple_build_omp_atomic_store (rhs);
10867 gimplify_seq_add_stmt (pre_p, storestmt);
10868 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10870 gimple_omp_atomic_set_seq_cst (loadstmt);
10871 gimple_omp_atomic_set_seq_cst (storestmt);
10873 switch (TREE_CODE (*expr_p))
10875 case OMP_ATOMIC_READ:
10876 case OMP_ATOMIC_CAPTURE_OLD:
10877 *expr_p = tmp_load;
10878 gimple_omp_atomic_set_need_value (loadstmt);
10879 break;
10880 case OMP_ATOMIC_CAPTURE_NEW:
10881 *expr_p = rhs;
10882 gimple_omp_atomic_set_need_value (storestmt);
10883 break;
10884 default:
10885 *expr_p = NULL;
10886 break;
10889 return GS_ALL_DONE;
10892 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10893 body, and adding some EH bits. */
10895 static enum gimplify_status
10896 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10898 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10899 gimple *body_stmt;
10900 gtransaction *trans_stmt;
10901 gimple_seq body = NULL;
10902 int subcode = 0;
10904 /* Wrap the transaction body in a BIND_EXPR so we have a context
10905 where to put decls for OMP. */
10906 if (TREE_CODE (tbody) != BIND_EXPR)
10908 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10909 TREE_SIDE_EFFECTS (bind) = 1;
10910 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10911 TRANSACTION_EXPR_BODY (expr) = bind;
10914 push_gimplify_context ();
10915 temp = voidify_wrapper_expr (*expr_p, NULL);
10917 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10918 pop_gimplify_context (body_stmt);
10920 trans_stmt = gimple_build_transaction (body);
10921 if (TRANSACTION_EXPR_OUTER (expr))
10922 subcode = GTMA_IS_OUTER;
10923 else if (TRANSACTION_EXPR_RELAXED (expr))
10924 subcode = GTMA_IS_RELAXED;
10925 gimple_transaction_set_subcode (trans_stmt, subcode);
10927 gimplify_seq_add_stmt (pre_p, trans_stmt);
10929 if (temp)
10931 *expr_p = temp;
10932 return GS_OK;
10935 *expr_p = NULL_TREE;
10936 return GS_ALL_DONE;
10939 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10940 is the OMP_BODY of the original EXPR (which has already been
10941 gimplified so it's not present in the EXPR).
10943 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10945 static gimple *
10946 gimplify_omp_ordered (tree expr, gimple_seq body)
10948 tree c, decls;
10949 int failures = 0;
10950 unsigned int i;
10951 tree source_c = NULL_TREE;
10952 tree sink_c = NULL_TREE;
10954 if (gimplify_omp_ctxp)
10956 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10957 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10958 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10959 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10960 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10962 error_at (OMP_CLAUSE_LOCATION (c),
10963 "%<ordered%> construct with %<depend%> clause must be "
10964 "closely nested inside a loop with %<ordered%> clause "
10965 "with a parameter");
10966 failures++;
10968 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10969 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10971 bool fail = false;
10972 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10973 decls && TREE_CODE (decls) == TREE_LIST;
10974 decls = TREE_CHAIN (decls), ++i)
10975 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10976 continue;
10977 else if (TREE_VALUE (decls)
10978 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10980 error_at (OMP_CLAUSE_LOCATION (c),
10981 "variable %qE is not an iteration "
10982 "of outermost loop %d, expected %qE",
10983 TREE_VALUE (decls), i + 1,
10984 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10985 fail = true;
10986 failures++;
10988 else
10989 TREE_VALUE (decls)
10990 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10991 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10993 error_at (OMP_CLAUSE_LOCATION (c),
10994 "number of variables in %<depend(sink)%> "
10995 "clause does not match number of "
10996 "iteration variables");
10997 failures++;
10999 sink_c = c;
11001 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11002 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11004 if (source_c)
11006 error_at (OMP_CLAUSE_LOCATION (c),
11007 "more than one %<depend(source)%> clause on an "
11008 "%<ordered%> construct");
11009 failures++;
11011 else
11012 source_c = c;
11015 if (source_c && sink_c)
11017 error_at (OMP_CLAUSE_LOCATION (source_c),
11018 "%<depend(source)%> clause specified together with "
11019 "%<depend(sink:)%> clauses on the same construct");
11020 failures++;
11023 if (failures)
11024 return gimple_build_nop ();
11025 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11028 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11029 expression produces a value to be used as an operand inside a GIMPLE
11030 statement, the value will be stored back in *EXPR_P. This value will
11031 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11032 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11033 emitted in PRE_P and POST_P.
11035 Additionally, this process may overwrite parts of the input
11036 expression during gimplification. Ideally, it should be
11037 possible to do non-destructive gimplification.
11039 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11040 the expression needs to evaluate to a value to be used as
11041 an operand in a GIMPLE statement, this value will be stored in
11042 *EXPR_P on exit. This happens when the caller specifies one
11043 of fb_lvalue or fb_rvalue fallback flags.
11045 PRE_P will contain the sequence of GIMPLE statements corresponding
11046 to the evaluation of EXPR and all the side-effects that must
11047 be executed before the main expression. On exit, the last
11048 statement of PRE_P is the core statement being gimplified. For
11049 instance, when gimplifying 'if (++a)' the last statement in
11050 PRE_P will be 'if (t.1)' where t.1 is the result of
11051 pre-incrementing 'a'.
11053 POST_P will contain the sequence of GIMPLE statements corresponding
11054 to the evaluation of all the side-effects that must be executed
11055 after the main expression. If this is NULL, the post
11056 side-effects are stored at the end of PRE_P.
11058 The reason why the output is split in two is to handle post
11059 side-effects explicitly. In some cases, an expression may have
11060 inner and outer post side-effects which need to be emitted in
11061 an order different from the one given by the recursive
11062 traversal. For instance, for the expression (*p--)++ the post
11063 side-effects of '--' must actually occur *after* the post
11064 side-effects of '++'. However, gimplification will first visit
11065 the inner expression, so if a separate POST sequence was not
11066 used, the resulting sequence would be:
11068 1 t.1 = *p
11069 2 p = p - 1
11070 3 t.2 = t.1 + 1
11071 4 *p = t.2
11073 However, the post-decrement operation in line #2 must not be
11074 evaluated until after the store to *p at line #4, so the
11075 correct sequence should be:
11077 1 t.1 = *p
11078 2 t.2 = t.1 + 1
11079 3 *p = t.2
11080 4 p = p - 1
11082 So, by specifying a separate post queue, it is possible
11083 to emit the post side-effects in the correct order.
11084 If POST_P is NULL, an internal queue will be used. Before
11085 returning to the caller, the sequence POST_P is appended to
11086 the main output sequence PRE_P.
11088 GIMPLE_TEST_F points to a function that takes a tree T and
11089 returns nonzero if T is in the GIMPLE form requested by the
11090 caller. The GIMPLE predicates are in gimple.c.
11092 FALLBACK tells the function what sort of a temporary we want if
11093 gimplification cannot produce an expression that complies with
11094 GIMPLE_TEST_F.
11096 fb_none means that no temporary should be generated
11097 fb_rvalue means that an rvalue is OK to generate
11098 fb_lvalue means that an lvalue is OK to generate
11099 fb_either means that either is OK, but an lvalue is preferable.
11100 fb_mayfail means that gimplification may fail (in which case
11101 GS_ERROR will be returned)
11103 The return value is either GS_ERROR or GS_ALL_DONE, since this
11104 function iterates until EXPR is completely gimplified or an error
11105 occurs. */
11107 enum gimplify_status
11108 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11109 bool (*gimple_test_f) (tree), fallback_t fallback)
11111 tree tmp;
11112 gimple_seq internal_pre = NULL;
11113 gimple_seq internal_post = NULL;
11114 tree save_expr;
11115 bool is_statement;
11116 location_t saved_location;
11117 enum gimplify_status ret;
11118 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11119 tree label;
11121 save_expr = *expr_p;
11122 if (save_expr == NULL_TREE)
11123 return GS_ALL_DONE;
11125 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11126 is_statement = gimple_test_f == is_gimple_stmt;
11127 if (is_statement)
11128 gcc_assert (pre_p);
11130 /* Consistency checks. */
11131 if (gimple_test_f == is_gimple_reg)
11132 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11133 else if (gimple_test_f == is_gimple_val
11134 || gimple_test_f == is_gimple_call_addr
11135 || gimple_test_f == is_gimple_condexpr
11136 || gimple_test_f == is_gimple_mem_rhs
11137 || gimple_test_f == is_gimple_mem_rhs_or_call
11138 || gimple_test_f == is_gimple_reg_rhs
11139 || gimple_test_f == is_gimple_reg_rhs_or_call
11140 || gimple_test_f == is_gimple_asm_val
11141 || gimple_test_f == is_gimple_mem_ref_addr)
11142 gcc_assert (fallback & fb_rvalue);
11143 else if (gimple_test_f == is_gimple_min_lval
11144 || gimple_test_f == is_gimple_lvalue)
11145 gcc_assert (fallback & fb_lvalue);
11146 else if (gimple_test_f == is_gimple_addressable)
11147 gcc_assert (fallback & fb_either);
11148 else if (gimple_test_f == is_gimple_stmt)
11149 gcc_assert (fallback == fb_none);
11150 else
11152 /* We should have recognized the GIMPLE_TEST_F predicate to
11153 know what kind of fallback to use in case a temporary is
11154 needed to hold the value or address of *EXPR_P. */
11155 gcc_unreachable ();
11158 /* We used to check the predicate here and return immediately if it
11159 succeeds. This is wrong; the design is for gimplification to be
11160 idempotent, and for the predicates to only test for valid forms, not
11161 whether they are fully simplified. */
11162 if (pre_p == NULL)
11163 pre_p = &internal_pre;
11165 if (post_p == NULL)
11166 post_p = &internal_post;
11168 /* Remember the last statements added to PRE_P and POST_P. Every
11169 new statement added by the gimplification helpers needs to be
11170 annotated with location information. To centralize the
11171 responsibility, we remember the last statement that had been
11172 added to both queues before gimplifying *EXPR_P. If
11173 gimplification produces new statements in PRE_P and POST_P, those
11174 statements will be annotated with the same location information
11175 as *EXPR_P. */
11176 pre_last_gsi = gsi_last (*pre_p);
11177 post_last_gsi = gsi_last (*post_p);
11179 saved_location = input_location;
11180 if (save_expr != error_mark_node
11181 && EXPR_HAS_LOCATION (*expr_p))
11182 input_location = EXPR_LOCATION (*expr_p);
11184 /* Loop over the specific gimplifiers until the toplevel node
11185 remains the same. */
11188 /* Strip away as many useless type conversions as possible
11189 at the toplevel. */
11190 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11192 /* Remember the expr. */
11193 save_expr = *expr_p;
11195 /* Die, die, die, my darling. */
11196 if (save_expr == error_mark_node
11197 || (TREE_TYPE (save_expr)
11198 && TREE_TYPE (save_expr) == error_mark_node))
11200 ret = GS_ERROR;
11201 break;
11204 /* Do any language-specific gimplification. */
11205 ret = ((enum gimplify_status)
11206 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11207 if (ret == GS_OK)
11209 if (*expr_p == NULL_TREE)
11210 break;
11211 if (*expr_p != save_expr)
11212 continue;
11214 else if (ret != GS_UNHANDLED)
11215 break;
11217 /* Make sure that all the cases set 'ret' appropriately. */
11218 ret = GS_UNHANDLED;
11219 switch (TREE_CODE (*expr_p))
11221 /* First deal with the special cases. */
11223 case POSTINCREMENT_EXPR:
11224 case POSTDECREMENT_EXPR:
11225 case PREINCREMENT_EXPR:
11226 case PREDECREMENT_EXPR:
11227 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11228 fallback != fb_none,
11229 TREE_TYPE (*expr_p));
11230 break;
11232 case VIEW_CONVERT_EXPR:
11233 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11234 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11236 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11237 post_p, is_gimple_val, fb_rvalue);
11238 recalculate_side_effects (*expr_p);
11239 break;
11241 /* Fallthru. */
11243 case ARRAY_REF:
11244 case ARRAY_RANGE_REF:
11245 case REALPART_EXPR:
11246 case IMAGPART_EXPR:
11247 case COMPONENT_REF:
11248 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11249 fallback ? fallback : fb_rvalue);
11250 break;
11252 case COND_EXPR:
11253 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11255 /* C99 code may assign to an array in a structure value of a
11256 conditional expression, and this has undefined behavior
11257 only on execution, so create a temporary if an lvalue is
11258 required. */
11259 if (fallback == fb_lvalue)
11261 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11262 mark_addressable (*expr_p);
11263 ret = GS_OK;
11265 break;
11267 case CALL_EXPR:
11268 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11270 /* C99 code may assign to an array in a structure returned
11271 from a function, and this has undefined behavior only on
11272 execution, so create a temporary if an lvalue is
11273 required. */
11274 if (fallback == fb_lvalue)
11276 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11277 mark_addressable (*expr_p);
11278 ret = GS_OK;
11280 break;
11282 case TREE_LIST:
11283 gcc_unreachable ();
11285 case COMPOUND_EXPR:
11286 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11287 break;
11289 case COMPOUND_LITERAL_EXPR:
11290 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11291 gimple_test_f, fallback);
11292 break;
11294 case MODIFY_EXPR:
11295 case INIT_EXPR:
11296 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11297 fallback != fb_none);
11298 break;
11300 case TRUTH_ANDIF_EXPR:
11301 case TRUTH_ORIF_EXPR:
11303 /* Preserve the original type of the expression and the
11304 source location of the outer expression. */
11305 tree org_type = TREE_TYPE (*expr_p);
11306 *expr_p = gimple_boolify (*expr_p);
11307 *expr_p = build3_loc (input_location, COND_EXPR,
11308 org_type, *expr_p,
11309 fold_convert_loc
11310 (input_location,
11311 org_type, boolean_true_node),
11312 fold_convert_loc
11313 (input_location,
11314 org_type, boolean_false_node));
11315 ret = GS_OK;
11316 break;
11319 case TRUTH_NOT_EXPR:
11321 tree type = TREE_TYPE (*expr_p);
11322 /* The parsers are careful to generate TRUTH_NOT_EXPR
11323 only with operands that are always zero or one.
11324 We do not fold here but handle the only interesting case
11325 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11326 *expr_p = gimple_boolify (*expr_p);
11327 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11328 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11329 TREE_TYPE (*expr_p),
11330 TREE_OPERAND (*expr_p, 0));
11331 else
11332 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11333 TREE_TYPE (*expr_p),
11334 TREE_OPERAND (*expr_p, 0),
11335 build_int_cst (TREE_TYPE (*expr_p), 1));
11336 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11337 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11338 ret = GS_OK;
11339 break;
11342 case ADDR_EXPR:
11343 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11344 break;
11346 case ANNOTATE_EXPR:
11348 tree cond = TREE_OPERAND (*expr_p, 0);
11349 tree kind = TREE_OPERAND (*expr_p, 1);
11350 tree type = TREE_TYPE (cond);
11351 if (!INTEGRAL_TYPE_P (type))
11353 *expr_p = cond;
11354 ret = GS_OK;
11355 break;
11357 tree tmp = create_tmp_var (type);
11358 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11359 gcall *call
11360 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11361 gimple_call_set_lhs (call, tmp);
11362 gimplify_seq_add_stmt (pre_p, call);
11363 *expr_p = tmp;
11364 ret = GS_ALL_DONE;
11365 break;
11368 case VA_ARG_EXPR:
11369 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11370 break;
11372 CASE_CONVERT:
11373 if (IS_EMPTY_STMT (*expr_p))
11375 ret = GS_ALL_DONE;
11376 break;
11379 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11380 || fallback == fb_none)
11382 /* Just strip a conversion to void (or in void context) and
11383 try again. */
11384 *expr_p = TREE_OPERAND (*expr_p, 0);
11385 ret = GS_OK;
11386 break;
11389 ret = gimplify_conversion (expr_p);
11390 if (ret == GS_ERROR)
11391 break;
11392 if (*expr_p != save_expr)
11393 break;
11394 /* FALLTHRU */
11396 case FIX_TRUNC_EXPR:
11397 /* unary_expr: ... | '(' cast ')' val | ... */
11398 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11399 is_gimple_val, fb_rvalue);
11400 recalculate_side_effects (*expr_p);
11401 break;
11403 case INDIRECT_REF:
11405 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11406 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11407 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11409 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11410 if (*expr_p != save_expr)
11412 ret = GS_OK;
11413 break;
11416 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11417 is_gimple_reg, fb_rvalue);
11418 if (ret == GS_ERROR)
11419 break;
11421 recalculate_side_effects (*expr_p);
11422 *expr_p = fold_build2_loc (input_location, MEM_REF,
11423 TREE_TYPE (*expr_p),
11424 TREE_OPERAND (*expr_p, 0),
11425 build_int_cst (saved_ptr_type, 0));
11426 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11427 TREE_THIS_NOTRAP (*expr_p) = notrap;
11428 ret = GS_OK;
11429 break;
11432 /* We arrive here through the various re-gimplifcation paths. */
11433 case MEM_REF:
11434 /* First try re-folding the whole thing. */
11435 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11436 TREE_OPERAND (*expr_p, 0),
11437 TREE_OPERAND (*expr_p, 1));
11438 if (tmp)
11440 REF_REVERSE_STORAGE_ORDER (tmp)
11441 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11442 *expr_p = tmp;
11443 recalculate_side_effects (*expr_p);
11444 ret = GS_OK;
11445 break;
11447 /* Avoid re-gimplifying the address operand if it is already
11448 in suitable form. Re-gimplifying would mark the address
11449 operand addressable. Always gimplify when not in SSA form
11450 as we still may have to gimplify decls with value-exprs. */
11451 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11452 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11454 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11455 is_gimple_mem_ref_addr, fb_rvalue);
11456 if (ret == GS_ERROR)
11457 break;
11459 recalculate_side_effects (*expr_p);
11460 ret = GS_ALL_DONE;
11461 break;
11463 /* Constants need not be gimplified. */
11464 case INTEGER_CST:
11465 case REAL_CST:
11466 case FIXED_CST:
11467 case STRING_CST:
11468 case COMPLEX_CST:
11469 case VECTOR_CST:
11470 /* Drop the overflow flag on constants, we do not want
11471 that in the GIMPLE IL. */
11472 if (TREE_OVERFLOW_P (*expr_p))
11473 *expr_p = drop_tree_overflow (*expr_p);
11474 ret = GS_ALL_DONE;
11475 break;
11477 case CONST_DECL:
11478 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11479 CONST_DECL node. Otherwise the decl is replaceable by its
11480 value. */
11481 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11482 if (fallback & fb_lvalue)
11483 ret = GS_ALL_DONE;
11484 else
11486 *expr_p = DECL_INITIAL (*expr_p);
11487 ret = GS_OK;
11489 break;
11491 case DECL_EXPR:
11492 ret = gimplify_decl_expr (expr_p, pre_p);
11493 break;
11495 case BIND_EXPR:
11496 ret = gimplify_bind_expr (expr_p, pre_p);
11497 break;
11499 case LOOP_EXPR:
11500 ret = gimplify_loop_expr (expr_p, pre_p);
11501 break;
11503 case SWITCH_EXPR:
11504 ret = gimplify_switch_expr (expr_p, pre_p);
11505 break;
11507 case EXIT_EXPR:
11508 ret = gimplify_exit_expr (expr_p);
11509 break;
11511 case GOTO_EXPR:
11512 /* If the target is not LABEL, then it is a computed jump
11513 and the target needs to be gimplified. */
11514 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11516 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11517 NULL, is_gimple_val, fb_rvalue);
11518 if (ret == GS_ERROR)
11519 break;
11521 gimplify_seq_add_stmt (pre_p,
11522 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11523 ret = GS_ALL_DONE;
11524 break;
11526 case PREDICT_EXPR:
11527 gimplify_seq_add_stmt (pre_p,
11528 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11529 PREDICT_EXPR_OUTCOME (*expr_p)));
11530 ret = GS_ALL_DONE;
11531 break;
11533 case LABEL_EXPR:
11534 ret = gimplify_label_expr (expr_p, pre_p);
11535 label = LABEL_EXPR_LABEL (*expr_p);
11536 gcc_assert (decl_function_context (label) == current_function_decl);
11538 /* If the label is used in a goto statement, or address of the label
11539 is taken, we need to unpoison all variables that were seen so far.
11540 Doing so would prevent us from reporting a false positives. */
11541 if (asan_poisoned_variables
11542 && asan_used_labels != NULL
11543 && asan_used_labels->contains (label))
11544 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11545 break;
11547 case CASE_LABEL_EXPR:
11548 ret = gimplify_case_label_expr (expr_p, pre_p);
11550 if (gimplify_ctxp->live_switch_vars)
11551 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11552 pre_p);
11553 break;
11555 case RETURN_EXPR:
11556 ret = gimplify_return_expr (*expr_p, pre_p);
11557 break;
11559 case CONSTRUCTOR:
11560 /* Don't reduce this in place; let gimplify_init_constructor work its
11561 magic. Buf if we're just elaborating this for side effects, just
11562 gimplify any element that has side-effects. */
11563 if (fallback == fb_none)
11565 unsigned HOST_WIDE_INT ix;
11566 tree val;
11567 tree temp = NULL_TREE;
11568 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11569 if (TREE_SIDE_EFFECTS (val))
11570 append_to_statement_list (val, &temp);
11572 *expr_p = temp;
11573 ret = temp ? GS_OK : GS_ALL_DONE;
11575 /* C99 code may assign to an array in a constructed
11576 structure or union, and this has undefined behavior only
11577 on execution, so create a temporary if an lvalue is
11578 required. */
11579 else if (fallback == fb_lvalue)
11581 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11582 mark_addressable (*expr_p);
11583 ret = GS_OK;
11585 else
11586 ret = GS_ALL_DONE;
11587 break;
11589 /* The following are special cases that are not handled by the
11590 original GIMPLE grammar. */
11592 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11593 eliminated. */
11594 case SAVE_EXPR:
11595 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11596 break;
11598 case BIT_FIELD_REF:
11599 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11600 post_p, is_gimple_lvalue, fb_either);
11601 recalculate_side_effects (*expr_p);
11602 break;
11604 case TARGET_MEM_REF:
11606 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11608 if (TMR_BASE (*expr_p))
11609 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11610 post_p, is_gimple_mem_ref_addr, fb_either);
11611 if (TMR_INDEX (*expr_p))
11612 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11613 post_p, is_gimple_val, fb_rvalue);
11614 if (TMR_INDEX2 (*expr_p))
11615 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11616 post_p, is_gimple_val, fb_rvalue);
11617 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11618 ret = MIN (r0, r1);
11620 break;
11622 case NON_LVALUE_EXPR:
11623 /* This should have been stripped above. */
11624 gcc_unreachable ();
11626 case ASM_EXPR:
11627 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11628 break;
11630 case TRY_FINALLY_EXPR:
11631 case TRY_CATCH_EXPR:
11633 gimple_seq eval, cleanup;
11634 gtry *try_;
11636 /* Calls to destructors are generated automatically in FINALLY/CATCH
11637 block. They should have location as UNKNOWN_LOCATION. However,
11638 gimplify_call_expr will reset these call stmts to input_location
11639 if it finds stmt's location is unknown. To prevent resetting for
11640 destructors, we set the input_location to unknown.
11641 Note that this only affects the destructor calls in FINALLY/CATCH
11642 block, and will automatically reset to its original value by the
11643 end of gimplify_expr. */
11644 input_location = UNKNOWN_LOCATION;
11645 eval = cleanup = NULL;
11646 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11647 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11648 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11649 if (gimple_seq_empty_p (cleanup))
11651 gimple_seq_add_seq (pre_p, eval);
11652 ret = GS_ALL_DONE;
11653 break;
11655 try_ = gimple_build_try (eval, cleanup,
11656 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11657 ? GIMPLE_TRY_FINALLY
11658 : GIMPLE_TRY_CATCH);
11659 if (EXPR_HAS_LOCATION (save_expr))
11660 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11661 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11662 gimple_set_location (try_, saved_location);
11663 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11664 gimple_try_set_catch_is_cleanup (try_,
11665 TRY_CATCH_IS_CLEANUP (*expr_p));
11666 gimplify_seq_add_stmt (pre_p, try_);
11667 ret = GS_ALL_DONE;
11668 break;
11671 case CLEANUP_POINT_EXPR:
11672 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11673 break;
11675 case TARGET_EXPR:
11676 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11677 break;
11679 case CATCH_EXPR:
11681 gimple *c;
11682 gimple_seq handler = NULL;
11683 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11684 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11685 gimplify_seq_add_stmt (pre_p, c);
11686 ret = GS_ALL_DONE;
11687 break;
11690 case EH_FILTER_EXPR:
11692 gimple *ehf;
11693 gimple_seq failure = NULL;
11695 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11696 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11697 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11698 gimplify_seq_add_stmt (pre_p, ehf);
11699 ret = GS_ALL_DONE;
11700 break;
11703 case OBJ_TYPE_REF:
11705 enum gimplify_status r0, r1;
11706 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11707 post_p, is_gimple_val, fb_rvalue);
11708 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11709 post_p, is_gimple_val, fb_rvalue);
11710 TREE_SIDE_EFFECTS (*expr_p) = 0;
11711 ret = MIN (r0, r1);
11713 break;
11715 case LABEL_DECL:
11716 /* We get here when taking the address of a label. We mark
11717 the label as "forced"; meaning it can never be removed and
11718 it is a potential target for any computed goto. */
11719 FORCED_LABEL (*expr_p) = 1;
11720 ret = GS_ALL_DONE;
11721 break;
11723 case STATEMENT_LIST:
11724 ret = gimplify_statement_list (expr_p, pre_p);
11725 break;
11727 case WITH_SIZE_EXPR:
11729 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11730 post_p == &internal_post ? NULL : post_p,
11731 gimple_test_f, fallback);
11732 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11733 is_gimple_val, fb_rvalue);
11734 ret = GS_ALL_DONE;
11736 break;
11738 case VAR_DECL:
11739 case PARM_DECL:
11740 ret = gimplify_var_or_parm_decl (expr_p);
11741 break;
11743 case RESULT_DECL:
11744 /* When within an OMP context, notice uses of variables. */
11745 if (gimplify_omp_ctxp)
11746 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11747 ret = GS_ALL_DONE;
11748 break;
11750 case SSA_NAME:
11751 /* Allow callbacks into the gimplifier during optimization. */
11752 ret = GS_ALL_DONE;
11753 break;
11755 case OMP_PARALLEL:
11756 gimplify_omp_parallel (expr_p, pre_p);
11757 ret = GS_ALL_DONE;
11758 break;
11760 case OMP_TASK:
11761 gimplify_omp_task (expr_p, pre_p);
11762 ret = GS_ALL_DONE;
11763 break;
11765 case OMP_FOR:
11766 case OMP_SIMD:
11767 case CILK_SIMD:
11768 case CILK_FOR:
11769 case OMP_DISTRIBUTE:
11770 case OMP_TASKLOOP:
11771 case OACC_LOOP:
11772 ret = gimplify_omp_for (expr_p, pre_p);
11773 break;
11775 case OACC_CACHE:
11776 gimplify_oacc_cache (expr_p, pre_p);
11777 ret = GS_ALL_DONE;
11778 break;
11780 case OACC_DECLARE:
11781 gimplify_oacc_declare (expr_p, pre_p);
11782 ret = GS_ALL_DONE;
11783 break;
11785 case OACC_HOST_DATA:
11786 case OACC_DATA:
11787 case OACC_KERNELS:
11788 case OACC_PARALLEL:
11789 case OMP_SECTIONS:
11790 case OMP_SINGLE:
11791 case OMP_TARGET:
11792 case OMP_TARGET_DATA:
11793 case OMP_TEAMS:
11794 gimplify_omp_workshare (expr_p, pre_p);
11795 ret = GS_ALL_DONE;
11796 break;
11798 case OACC_ENTER_DATA:
11799 case OACC_EXIT_DATA:
11800 case OACC_UPDATE:
11801 case OMP_TARGET_UPDATE:
11802 case OMP_TARGET_ENTER_DATA:
11803 case OMP_TARGET_EXIT_DATA:
11804 gimplify_omp_target_update (expr_p, pre_p);
11805 ret = GS_ALL_DONE;
11806 break;
11808 case OMP_SECTION:
11809 case OMP_MASTER:
11810 case OMP_TASKGROUP:
11811 case OMP_ORDERED:
11812 case OMP_CRITICAL:
11814 gimple_seq body = NULL;
11815 gimple *g;
11817 gimplify_and_add (OMP_BODY (*expr_p), &body);
11818 switch (TREE_CODE (*expr_p))
11820 case OMP_SECTION:
11821 g = gimple_build_omp_section (body);
11822 break;
11823 case OMP_MASTER:
11824 g = gimple_build_omp_master (body);
11825 break;
11826 case OMP_TASKGROUP:
11828 gimple_seq cleanup = NULL;
11829 tree fn
11830 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11831 g = gimple_build_call (fn, 0);
11832 gimple_seq_add_stmt (&cleanup, g);
11833 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11834 body = NULL;
11835 gimple_seq_add_stmt (&body, g);
11836 g = gimple_build_omp_taskgroup (body);
11838 break;
11839 case OMP_ORDERED:
11840 g = gimplify_omp_ordered (*expr_p, body);
11841 break;
11842 case OMP_CRITICAL:
11843 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11844 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11845 gimplify_adjust_omp_clauses (pre_p, body,
11846 &OMP_CRITICAL_CLAUSES (*expr_p),
11847 OMP_CRITICAL);
11848 g = gimple_build_omp_critical (body,
11849 OMP_CRITICAL_NAME (*expr_p),
11850 OMP_CRITICAL_CLAUSES (*expr_p));
11851 break;
11852 default:
11853 gcc_unreachable ();
11855 gimplify_seq_add_stmt (pre_p, g);
11856 ret = GS_ALL_DONE;
11857 break;
11860 case OMP_ATOMIC:
11861 case OMP_ATOMIC_READ:
11862 case OMP_ATOMIC_CAPTURE_OLD:
11863 case OMP_ATOMIC_CAPTURE_NEW:
11864 ret = gimplify_omp_atomic (expr_p, pre_p);
11865 break;
11867 case TRANSACTION_EXPR:
11868 ret = gimplify_transaction (expr_p, pre_p);
11869 break;
11871 case TRUTH_AND_EXPR:
11872 case TRUTH_OR_EXPR:
11873 case TRUTH_XOR_EXPR:
11875 tree orig_type = TREE_TYPE (*expr_p);
11876 tree new_type, xop0, xop1;
11877 *expr_p = gimple_boolify (*expr_p);
11878 new_type = TREE_TYPE (*expr_p);
11879 if (!useless_type_conversion_p (orig_type, new_type))
11881 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11882 ret = GS_OK;
11883 break;
11886 /* Boolified binary truth expressions are semantically equivalent
11887 to bitwise binary expressions. Canonicalize them to the
11888 bitwise variant. */
11889 switch (TREE_CODE (*expr_p))
11891 case TRUTH_AND_EXPR:
11892 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11893 break;
11894 case TRUTH_OR_EXPR:
11895 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11896 break;
11897 case TRUTH_XOR_EXPR:
11898 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11899 break;
11900 default:
11901 break;
11903 /* Now make sure that operands have compatible type to
11904 expression's new_type. */
11905 xop0 = TREE_OPERAND (*expr_p, 0);
11906 xop1 = TREE_OPERAND (*expr_p, 1);
11907 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11908 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11909 new_type,
11910 xop0);
11911 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11912 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11913 new_type,
11914 xop1);
11915 /* Continue classified as tcc_binary. */
11916 goto expr_2;
11919 case VEC_COND_EXPR:
11921 enum gimplify_status r0, r1, r2;
11923 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11924 post_p, is_gimple_condexpr, fb_rvalue);
11925 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11926 post_p, is_gimple_val, fb_rvalue);
11927 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11928 post_p, is_gimple_val, fb_rvalue);
11930 ret = MIN (MIN (r0, r1), r2);
11931 recalculate_side_effects (*expr_p);
11933 break;
11935 case FMA_EXPR:
11936 case VEC_PERM_EXPR:
11937 /* Classified as tcc_expression. */
11938 goto expr_3;
11940 case BIT_INSERT_EXPR:
11941 /* Argument 3 is a constant. */
11942 goto expr_2;
11944 case POINTER_PLUS_EXPR:
11946 enum gimplify_status r0, r1;
11947 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11948 post_p, is_gimple_val, fb_rvalue);
11949 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11950 post_p, is_gimple_val, fb_rvalue);
11951 recalculate_side_effects (*expr_p);
11952 ret = MIN (r0, r1);
11953 break;
11956 case CILK_SYNC_STMT:
11958 if (!fn_contains_cilk_spawn_p (cfun))
11960 error_at (EXPR_LOCATION (*expr_p),
11961 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11962 ret = GS_ERROR;
11964 else
11966 gimplify_cilk_sync (expr_p, pre_p);
11967 ret = GS_ALL_DONE;
11969 break;
11972 default:
11973 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11975 case tcc_comparison:
11976 /* Handle comparison of objects of non scalar mode aggregates
11977 with a call to memcmp. It would be nice to only have to do
11978 this for variable-sized objects, but then we'd have to allow
11979 the same nest of reference nodes we allow for MODIFY_EXPR and
11980 that's too complex.
11982 Compare scalar mode aggregates as scalar mode values. Using
11983 memcmp for them would be very inefficient at best, and is
11984 plain wrong if bitfields are involved. */
11986 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11988 /* Vector comparisons need no boolification. */
11989 if (TREE_CODE (type) == VECTOR_TYPE)
11990 goto expr_2;
11991 else if (!AGGREGATE_TYPE_P (type))
11993 tree org_type = TREE_TYPE (*expr_p);
11994 *expr_p = gimple_boolify (*expr_p);
11995 if (!useless_type_conversion_p (org_type,
11996 TREE_TYPE (*expr_p)))
11998 *expr_p = fold_convert_loc (input_location,
11999 org_type, *expr_p);
12000 ret = GS_OK;
12002 else
12003 goto expr_2;
12005 else if (TYPE_MODE (type) != BLKmode)
12006 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12007 else
12008 ret = gimplify_variable_sized_compare (expr_p);
12010 break;
12013 /* If *EXPR_P does not need to be special-cased, handle it
12014 according to its class. */
12015 case tcc_unary:
12016 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12017 post_p, is_gimple_val, fb_rvalue);
12018 break;
12020 case tcc_binary:
12021 expr_2:
12023 enum gimplify_status r0, r1;
12025 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12026 post_p, is_gimple_val, fb_rvalue);
12027 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12028 post_p, is_gimple_val, fb_rvalue);
12030 ret = MIN (r0, r1);
12031 break;
12034 expr_3:
12036 enum gimplify_status r0, r1, r2;
12038 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12039 post_p, is_gimple_val, fb_rvalue);
12040 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12041 post_p, is_gimple_val, fb_rvalue);
12042 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12043 post_p, is_gimple_val, fb_rvalue);
12045 ret = MIN (MIN (r0, r1), r2);
12046 break;
12049 case tcc_declaration:
12050 case tcc_constant:
12051 ret = GS_ALL_DONE;
12052 goto dont_recalculate;
12054 default:
12055 gcc_unreachable ();
12058 recalculate_side_effects (*expr_p);
12060 dont_recalculate:
12061 break;
12064 gcc_assert (*expr_p || ret != GS_OK);
12066 while (ret == GS_OK);
12068 /* If we encountered an error_mark somewhere nested inside, either
12069 stub out the statement or propagate the error back out. */
12070 if (ret == GS_ERROR)
12072 if (is_statement)
12073 *expr_p = NULL;
12074 goto out;
12077 /* This was only valid as a return value from the langhook, which
12078 we handled. Make sure it doesn't escape from any other context. */
12079 gcc_assert (ret != GS_UNHANDLED);
12081 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12083 /* We aren't looking for a value, and we don't have a valid
12084 statement. If it doesn't have side-effects, throw it away.
12085 We can also get here with code such as "*&&L;", where L is
12086 a LABEL_DECL that is marked as FORCED_LABEL. */
12087 if (TREE_CODE (*expr_p) == LABEL_DECL
12088 || !TREE_SIDE_EFFECTS (*expr_p))
12089 *expr_p = NULL;
12090 else if (!TREE_THIS_VOLATILE (*expr_p))
12092 /* This is probably a _REF that contains something nested that
12093 has side effects. Recurse through the operands to find it. */
12094 enum tree_code code = TREE_CODE (*expr_p);
12096 switch (code)
12098 case COMPONENT_REF:
12099 case REALPART_EXPR:
12100 case IMAGPART_EXPR:
12101 case VIEW_CONVERT_EXPR:
12102 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12103 gimple_test_f, fallback);
12104 break;
12106 case ARRAY_REF:
12107 case ARRAY_RANGE_REF:
12108 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12109 gimple_test_f, fallback);
12110 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12111 gimple_test_f, fallback);
12112 break;
12114 default:
12115 /* Anything else with side-effects must be converted to
12116 a valid statement before we get here. */
12117 gcc_unreachable ();
12120 *expr_p = NULL;
12122 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12123 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12125 /* Historically, the compiler has treated a bare reference
12126 to a non-BLKmode volatile lvalue as forcing a load. */
12127 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12129 /* Normally, we do not want to create a temporary for a
12130 TREE_ADDRESSABLE type because such a type should not be
12131 copied by bitwise-assignment. However, we make an
12132 exception here, as all we are doing here is ensuring that
12133 we read the bytes that make up the type. We use
12134 create_tmp_var_raw because create_tmp_var will abort when
12135 given a TREE_ADDRESSABLE type. */
12136 tree tmp = create_tmp_var_raw (type, "vol");
12137 gimple_add_tmp_var (tmp);
12138 gimplify_assign (tmp, *expr_p, pre_p);
12139 *expr_p = NULL;
12141 else
12142 /* We can't do anything useful with a volatile reference to
12143 an incomplete type, so just throw it away. Likewise for
12144 a BLKmode type, since any implicit inner load should
12145 already have been turned into an explicit one by the
12146 gimplification process. */
12147 *expr_p = NULL;
12150 /* If we are gimplifying at the statement level, we're done. Tack
12151 everything together and return. */
12152 if (fallback == fb_none || is_statement)
12154 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12155 it out for GC to reclaim it. */
12156 *expr_p = NULL_TREE;
12158 if (!gimple_seq_empty_p (internal_pre)
12159 || !gimple_seq_empty_p (internal_post))
12161 gimplify_seq_add_seq (&internal_pre, internal_post);
12162 gimplify_seq_add_seq (pre_p, internal_pre);
12165 /* The result of gimplifying *EXPR_P is going to be the last few
12166 statements in *PRE_P and *POST_P. Add location information
12167 to all the statements that were added by the gimplification
12168 helpers. */
12169 if (!gimple_seq_empty_p (*pre_p))
12170 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12172 if (!gimple_seq_empty_p (*post_p))
12173 annotate_all_with_location_after (*post_p, post_last_gsi,
12174 input_location);
12176 goto out;
12179 #ifdef ENABLE_GIMPLE_CHECKING
12180 if (*expr_p)
12182 enum tree_code code = TREE_CODE (*expr_p);
12183 /* These expressions should already be in gimple IR form. */
12184 gcc_assert (code != MODIFY_EXPR
12185 && code != ASM_EXPR
12186 && code != BIND_EXPR
12187 && code != CATCH_EXPR
12188 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12189 && code != EH_FILTER_EXPR
12190 && code != GOTO_EXPR
12191 && code != LABEL_EXPR
12192 && code != LOOP_EXPR
12193 && code != SWITCH_EXPR
12194 && code != TRY_FINALLY_EXPR
12195 && code != OACC_PARALLEL
12196 && code != OACC_KERNELS
12197 && code != OACC_DATA
12198 && code != OACC_HOST_DATA
12199 && code != OACC_DECLARE
12200 && code != OACC_UPDATE
12201 && code != OACC_ENTER_DATA
12202 && code != OACC_EXIT_DATA
12203 && code != OACC_CACHE
12204 && code != OMP_CRITICAL
12205 && code != OMP_FOR
12206 && code != OACC_LOOP
12207 && code != OMP_MASTER
12208 && code != OMP_TASKGROUP
12209 && code != OMP_ORDERED
12210 && code != OMP_PARALLEL
12211 && code != OMP_SECTIONS
12212 && code != OMP_SECTION
12213 && code != OMP_SINGLE);
12215 #endif
12217 /* Otherwise we're gimplifying a subexpression, so the resulting
12218 value is interesting. If it's a valid operand that matches
12219 GIMPLE_TEST_F, we're done. Unless we are handling some
12220 post-effects internally; if that's the case, we need to copy into
12221 a temporary before adding the post-effects to POST_P. */
12222 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12223 goto out;
12225 /* Otherwise, we need to create a new temporary for the gimplified
12226 expression. */
12228 /* We can't return an lvalue if we have an internal postqueue. The
12229 object the lvalue refers to would (probably) be modified by the
12230 postqueue; we need to copy the value out first, which means an
12231 rvalue. */
12232 if ((fallback & fb_lvalue)
12233 && gimple_seq_empty_p (internal_post)
12234 && is_gimple_addressable (*expr_p))
12236 /* An lvalue will do. Take the address of the expression, store it
12237 in a temporary, and replace the expression with an INDIRECT_REF of
12238 that temporary. */
12239 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12240 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12241 *expr_p = build_simple_mem_ref (tmp);
12243 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12245 /* An rvalue will do. Assign the gimplified expression into a
12246 new temporary TMP and replace the original expression with
12247 TMP. First, make sure that the expression has a type so that
12248 it can be assigned into a temporary. */
12249 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12250 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12252 else
12254 #ifdef ENABLE_GIMPLE_CHECKING
12255 if (!(fallback & fb_mayfail))
12257 fprintf (stderr, "gimplification failed:\n");
12258 print_generic_expr (stderr, *expr_p);
12259 debug_tree (*expr_p);
12260 internal_error ("gimplification failed");
12262 #endif
12263 gcc_assert (fallback & fb_mayfail);
12265 /* If this is an asm statement, and the user asked for the
12266 impossible, don't die. Fail and let gimplify_asm_expr
12267 issue an error. */
12268 ret = GS_ERROR;
12269 goto out;
12272 /* Make sure the temporary matches our predicate. */
12273 gcc_assert ((*gimple_test_f) (*expr_p));
12275 if (!gimple_seq_empty_p (internal_post))
12277 annotate_all_with_location (internal_post, input_location);
12278 gimplify_seq_add_seq (pre_p, internal_post);
12281 out:
12282 input_location = saved_location;
12283 return ret;
12286 /* Like gimplify_expr but make sure the gimplified result is not itself
12287 a SSA name (but a decl if it were). Temporaries required by
12288 evaluating *EXPR_P may be still SSA names. */
12290 static enum gimplify_status
12291 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12292 bool (*gimple_test_f) (tree), fallback_t fallback,
12293 bool allow_ssa)
12295 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12296 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12297 gimple_test_f, fallback);
12298 if (! allow_ssa
12299 && TREE_CODE (*expr_p) == SSA_NAME)
12301 tree name = *expr_p;
12302 if (was_ssa_name_p)
12303 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12304 else
12306 /* Avoid the extra copy if possible. */
12307 *expr_p = create_tmp_reg (TREE_TYPE (name));
12308 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12309 release_ssa_name (name);
12312 return ret;
12315 /* Look through TYPE for variable-sized objects and gimplify each such
12316 size that we find. Add to LIST_P any statements generated. */
12318 void
12319 gimplify_type_sizes (tree type, gimple_seq *list_p)
12321 tree field, t;
12323 if (type == NULL || type == error_mark_node)
12324 return;
12326 /* We first do the main variant, then copy into any other variants. */
12327 type = TYPE_MAIN_VARIANT (type);
12329 /* Avoid infinite recursion. */
12330 if (TYPE_SIZES_GIMPLIFIED (type))
12331 return;
12333 TYPE_SIZES_GIMPLIFIED (type) = 1;
12335 switch (TREE_CODE (type))
12337 case INTEGER_TYPE:
12338 case ENUMERAL_TYPE:
12339 case BOOLEAN_TYPE:
12340 case REAL_TYPE:
12341 case FIXED_POINT_TYPE:
12342 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12343 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12345 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12347 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12348 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12350 break;
12352 case ARRAY_TYPE:
12353 /* These types may not have declarations, so handle them here. */
12354 gimplify_type_sizes (TREE_TYPE (type), list_p);
12355 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12356 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12357 with assigned stack slots, for -O1+ -g they should be tracked
12358 by VTA. */
12359 if (!(TYPE_NAME (type)
12360 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12361 && DECL_IGNORED_P (TYPE_NAME (type)))
12362 && TYPE_DOMAIN (type)
12363 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12365 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12366 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12367 DECL_IGNORED_P (t) = 0;
12368 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12369 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12370 DECL_IGNORED_P (t) = 0;
12372 break;
12374 case RECORD_TYPE:
12375 case UNION_TYPE:
12376 case QUAL_UNION_TYPE:
12377 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12378 if (TREE_CODE (field) == FIELD_DECL)
12380 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12381 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12382 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12383 gimplify_type_sizes (TREE_TYPE (field), list_p);
12385 break;
12387 case POINTER_TYPE:
12388 case REFERENCE_TYPE:
12389 /* We used to recurse on the pointed-to type here, which turned out to
12390 be incorrect because its definition might refer to variables not
12391 yet initialized at this point if a forward declaration is involved.
12393 It was actually useful for anonymous pointed-to types to ensure
12394 that the sizes evaluation dominates every possible later use of the
12395 values. Restricting to such types here would be safe since there
12396 is no possible forward declaration around, but would introduce an
12397 undesirable middle-end semantic to anonymity. We then defer to
12398 front-ends the responsibility of ensuring that the sizes are
12399 evaluated both early and late enough, e.g. by attaching artificial
12400 type declarations to the tree. */
12401 break;
12403 default:
12404 break;
12407 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12408 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12410 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12412 TYPE_SIZE (t) = TYPE_SIZE (type);
12413 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12414 TYPE_SIZES_GIMPLIFIED (t) = 1;
12418 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12419 a size or position, has had all of its SAVE_EXPRs evaluated.
12420 We add any required statements to *STMT_P. */
12422 void
12423 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12425 tree expr = *expr_p;
12427 /* We don't do anything if the value isn't there, is constant, or contains
12428 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12429 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12430 will want to replace it with a new variable, but that will cause problems
12431 if this type is from outside the function. It's OK to have that here. */
12432 if (is_gimple_sizepos (expr))
12433 return;
12435 *expr_p = unshare_expr (expr);
12437 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12438 if the def vanishes. */
12439 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12442 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12443 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12444 is true, also gimplify the parameters. */
12446 gbind *
12447 gimplify_body (tree fndecl, bool do_parms)
12449 location_t saved_location = input_location;
12450 gimple_seq parm_stmts, seq;
12451 gimple *outer_stmt;
12452 gbind *outer_bind;
12453 struct cgraph_node *cgn;
12455 timevar_push (TV_TREE_GIMPLIFY);
12457 init_tree_ssa (cfun);
12459 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12460 gimplification. */
12461 default_rtl_profile ();
12463 gcc_assert (gimplify_ctxp == NULL);
12464 push_gimplify_context (true);
12466 if (flag_openacc || flag_openmp)
12468 gcc_assert (gimplify_omp_ctxp == NULL);
12469 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12470 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12473 /* Unshare most shared trees in the body and in that of any nested functions.
12474 It would seem we don't have to do this for nested functions because
12475 they are supposed to be output and then the outer function gimplified
12476 first, but the g++ front end doesn't always do it that way. */
12477 unshare_body (fndecl);
12478 unvisit_body (fndecl);
12480 cgn = cgraph_node::get (fndecl);
12481 if (cgn && cgn->origin)
12482 nonlocal_vlas = new hash_set<tree>;
12484 /* Make sure input_location isn't set to something weird. */
12485 input_location = DECL_SOURCE_LOCATION (fndecl);
12487 /* Resolve callee-copies. This has to be done before processing
12488 the body so that DECL_VALUE_EXPR gets processed correctly. */
12489 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12491 /* Gimplify the function's body. */
12492 seq = NULL;
12493 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12494 outer_stmt = gimple_seq_first_stmt (seq);
12495 if (!outer_stmt)
12497 outer_stmt = gimple_build_nop ();
12498 gimplify_seq_add_stmt (&seq, outer_stmt);
12501 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12502 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12503 if (gimple_code (outer_stmt) == GIMPLE_BIND
12504 && gimple_seq_first (seq) == gimple_seq_last (seq))
12505 outer_bind = as_a <gbind *> (outer_stmt);
12506 else
12507 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12509 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12511 /* If we had callee-copies statements, insert them at the beginning
12512 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12513 if (!gimple_seq_empty_p (parm_stmts))
12515 tree parm;
12517 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12518 gimple_bind_set_body (outer_bind, parm_stmts);
12520 for (parm = DECL_ARGUMENTS (current_function_decl);
12521 parm; parm = DECL_CHAIN (parm))
12522 if (DECL_HAS_VALUE_EXPR_P (parm))
12524 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12525 DECL_IGNORED_P (parm) = 0;
12529 if (nonlocal_vlas)
12531 if (nonlocal_vla_vars)
12533 /* tree-nested.c may later on call declare_vars (..., true);
12534 which relies on BLOCK_VARS chain to be the tail of the
12535 gimple_bind_vars chain. Ensure we don't violate that
12536 assumption. */
12537 if (gimple_bind_block (outer_bind)
12538 == DECL_INITIAL (current_function_decl))
12539 declare_vars (nonlocal_vla_vars, outer_bind, true);
12540 else
12541 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12542 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12543 nonlocal_vla_vars);
12544 nonlocal_vla_vars = NULL_TREE;
12546 delete nonlocal_vlas;
12547 nonlocal_vlas = NULL;
12550 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12551 && gimplify_omp_ctxp)
12553 delete_omp_context (gimplify_omp_ctxp);
12554 gimplify_omp_ctxp = NULL;
12557 pop_gimplify_context (outer_bind);
12558 gcc_assert (gimplify_ctxp == NULL);
12560 if (flag_checking && !seen_error ())
12561 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12563 timevar_pop (TV_TREE_GIMPLIFY);
12564 input_location = saved_location;
12566 return outer_bind;
12569 typedef char *char_p; /* For DEF_VEC_P. */
12571 /* Return whether we should exclude FNDECL from instrumentation. */
12573 static bool
12574 flag_instrument_functions_exclude_p (tree fndecl)
12576 vec<char_p> *v;
12578 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12579 if (v && v->length () > 0)
12581 const char *name;
12582 int i;
12583 char *s;
12585 name = lang_hooks.decl_printable_name (fndecl, 0);
12586 FOR_EACH_VEC_ELT (*v, i, s)
12587 if (strstr (name, s) != NULL)
12588 return true;
12591 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12592 if (v && v->length () > 0)
12594 const char *name;
12595 int i;
12596 char *s;
12598 name = DECL_SOURCE_FILE (fndecl);
12599 FOR_EACH_VEC_ELT (*v, i, s)
12600 if (strstr (name, s) != NULL)
12601 return true;
12604 return false;
12607 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12608 node for the function we want to gimplify.
12610 Return the sequence of GIMPLE statements corresponding to the body
12611 of FNDECL. */
12613 void
12614 gimplify_function_tree (tree fndecl)
12616 tree parm, ret;
12617 gimple_seq seq;
12618 gbind *bind;
12620 gcc_assert (!gimple_body (fndecl));
12622 if (DECL_STRUCT_FUNCTION (fndecl))
12623 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12624 else
12625 push_struct_function (fndecl);
12627 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12628 if necessary. */
12629 cfun->curr_properties |= PROP_gimple_lva;
12631 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12633 /* Preliminarily mark non-addressed complex variables as eligible
12634 for promotion to gimple registers. We'll transform their uses
12635 as we find them. */
12636 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12637 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12638 && !TREE_THIS_VOLATILE (parm)
12639 && !needs_to_live_in_memory (parm))
12640 DECL_GIMPLE_REG_P (parm) = 1;
12643 ret = DECL_RESULT (fndecl);
12644 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12645 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12646 && !needs_to_live_in_memory (ret))
12647 DECL_GIMPLE_REG_P (ret) = 1;
12649 if (asan_sanitize_use_after_scope () && !asan_no_sanitize_address_p ())
12650 asan_poisoned_variables = new hash_set<tree> ();
12651 bind = gimplify_body (fndecl, true);
12652 if (asan_poisoned_variables)
12654 delete asan_poisoned_variables;
12655 asan_poisoned_variables = NULL;
12658 /* The tree body of the function is no longer needed, replace it
12659 with the new GIMPLE body. */
12660 seq = NULL;
12661 gimple_seq_add_stmt (&seq, bind);
12662 gimple_set_body (fndecl, seq);
12664 /* If we're instrumenting function entry/exit, then prepend the call to
12665 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12666 catch the exit hook. */
12667 /* ??? Add some way to ignore exceptions for this TFE. */
12668 if (flag_instrument_function_entry_exit
12669 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12670 /* Do not instrument extern inline functions. */
12671 && !(DECL_DECLARED_INLINE_P (fndecl)
12672 && DECL_EXTERNAL (fndecl)
12673 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12674 && !flag_instrument_functions_exclude_p (fndecl))
12676 tree x;
12677 gbind *new_bind;
12678 gimple *tf;
12679 gimple_seq cleanup = NULL, body = NULL;
12680 tree tmp_var;
12681 gcall *call;
12683 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12684 call = gimple_build_call (x, 1, integer_zero_node);
12685 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12686 gimple_call_set_lhs (call, tmp_var);
12687 gimplify_seq_add_stmt (&cleanup, call);
12688 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12689 call = gimple_build_call (x, 2,
12690 build_fold_addr_expr (current_function_decl),
12691 tmp_var);
12692 gimplify_seq_add_stmt (&cleanup, call);
12693 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12695 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12696 call = gimple_build_call (x, 1, integer_zero_node);
12697 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12698 gimple_call_set_lhs (call, tmp_var);
12699 gimplify_seq_add_stmt (&body, call);
12700 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12701 call = gimple_build_call (x, 2,
12702 build_fold_addr_expr (current_function_decl),
12703 tmp_var);
12704 gimplify_seq_add_stmt (&body, call);
12705 gimplify_seq_add_stmt (&body, tf);
12706 new_bind = gimple_build_bind (NULL, body, NULL);
12708 /* Replace the current function body with the body
12709 wrapped in the try/finally TF. */
12710 seq = NULL;
12711 gimple_seq_add_stmt (&seq, new_bind);
12712 gimple_set_body (fndecl, seq);
12713 bind = new_bind;
12716 if ((flag_sanitize & SANITIZE_THREAD) != 0
12717 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
12719 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12720 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12721 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12722 /* Replace the current function body with the body
12723 wrapped in the try/finally TF. */
12724 seq = NULL;
12725 gimple_seq_add_stmt (&seq, new_bind);
12726 gimple_set_body (fndecl, seq);
12729 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12730 cfun->curr_properties |= PROP_gimple_any;
12732 pop_cfun ();
12734 dump_function (TDI_generic, fndecl);
12737 /* Return a dummy expression of type TYPE in order to keep going after an
12738 error. */
12740 static tree
12741 dummy_object (tree type)
12743 tree t = build_int_cst (build_pointer_type (type), 0);
12744 return build2 (MEM_REF, type, t, t);
12747 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12748 builtin function, but a very special sort of operator. */
12750 enum gimplify_status
12751 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12752 gimple_seq *post_p ATTRIBUTE_UNUSED)
12754 tree promoted_type, have_va_type;
12755 tree valist = TREE_OPERAND (*expr_p, 0);
12756 tree type = TREE_TYPE (*expr_p);
12757 tree t, tag, aptag;
12758 location_t loc = EXPR_LOCATION (*expr_p);
12760 /* Verify that valist is of the proper type. */
12761 have_va_type = TREE_TYPE (valist);
12762 if (have_va_type == error_mark_node)
12763 return GS_ERROR;
12764 have_va_type = targetm.canonical_va_list_type (have_va_type);
12765 if (have_va_type == NULL_TREE
12766 && POINTER_TYPE_P (TREE_TYPE (valist)))
12767 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12768 have_va_type
12769 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12770 gcc_assert (have_va_type != NULL_TREE);
12772 /* Generate a diagnostic for requesting data of a type that cannot
12773 be passed through `...' due to type promotion at the call site. */
12774 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12775 != type)
12777 static bool gave_help;
12778 bool warned;
12779 /* Use the expansion point to handle cases such as passing bool (defined
12780 in a system header) through `...'. */
12781 source_location xloc
12782 = expansion_point_location_if_in_system_header (loc);
12784 /* Unfortunately, this is merely undefined, rather than a constraint
12785 violation, so we cannot make this an error. If this call is never
12786 executed, the program is still strictly conforming. */
12787 warned = warning_at (xloc, 0,
12788 "%qT is promoted to %qT when passed through %<...%>",
12789 type, promoted_type);
12790 if (!gave_help && warned)
12792 gave_help = true;
12793 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12794 promoted_type, type);
12797 /* We can, however, treat "undefined" any way we please.
12798 Call abort to encourage the user to fix the program. */
12799 if (warned)
12800 inform (xloc, "if this code is reached, the program will abort");
12801 /* Before the abort, allow the evaluation of the va_list
12802 expression to exit or longjmp. */
12803 gimplify_and_add (valist, pre_p);
12804 t = build_call_expr_loc (loc,
12805 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12806 gimplify_and_add (t, pre_p);
12808 /* This is dead code, but go ahead and finish so that the
12809 mode of the result comes out right. */
12810 *expr_p = dummy_object (type);
12811 return GS_ALL_DONE;
12814 tag = build_int_cst (build_pointer_type (type), 0);
12815 aptag = build_int_cst (TREE_TYPE (valist), 0);
12817 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12818 valist, tag, aptag);
12820 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12821 needs to be expanded. */
12822 cfun->curr_properties &= ~PROP_gimple_lva;
12824 return GS_OK;
12827 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12829 DST/SRC are the destination and source respectively. You can pass
12830 ungimplified trees in DST or SRC, in which case they will be
12831 converted to a gimple operand if necessary.
12833 This function returns the newly created GIMPLE_ASSIGN tuple. */
12835 gimple *
12836 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12838 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12839 gimplify_and_add (t, seq_p);
12840 ggc_free (t);
12841 return gimple_seq_last_stmt (*seq_p);
12844 inline hashval_t
12845 gimplify_hasher::hash (const elt_t *p)
12847 tree t = p->val;
12848 return iterative_hash_expr (t, 0);
12851 inline bool
12852 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12854 tree t1 = p1->val;
12855 tree t2 = p2->val;
12856 enum tree_code code = TREE_CODE (t1);
12858 if (TREE_CODE (t2) != code
12859 || TREE_TYPE (t1) != TREE_TYPE (t2))
12860 return false;
12862 if (!operand_equal_p (t1, t2, 0))
12863 return false;
12865 /* Only allow them to compare equal if they also hash equal; otherwise
12866 results are nondeterminate, and we fail bootstrap comparison. */
12867 gcc_checking_assert (hash (p1) == hash (p2));
12869 return true;