PR c/69389
[official-gcc.git] / gcc / gimplify.c
blobed2ec646da2ec451f5879d62c5b7fab01fa6cb44
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-general.h"
55 #include "omp-low.h"
56 #include "gimple-low.h"
57 #include "cilk.h"
58 #include "gomp-constants.h"
59 #include "splay-tree.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
62 #include "builtins.h"
63 #include "asan.h"
64 #include "dbgcnt.h"
66 /* Hash set of poisoned variables in a bind expr. */
67 static hash_set<tree> *asan_poisoned_variables = NULL;
69 enum gimplify_omp_var_data
71 GOVD_SEEN = 1,
72 GOVD_EXPLICIT = 2,
73 GOVD_SHARED = 4,
74 GOVD_PRIVATE = 8,
75 GOVD_FIRSTPRIVATE = 16,
76 GOVD_LASTPRIVATE = 32,
77 GOVD_REDUCTION = 64,
78 GOVD_LOCAL = 128,
79 GOVD_MAP = 256,
80 GOVD_DEBUG_PRIVATE = 512,
81 GOVD_PRIVATE_OUTER_REF = 1024,
82 GOVD_LINEAR = 2048,
83 GOVD_ALIGNED = 4096,
85 /* Flag for GOVD_MAP: don't copy back. */
86 GOVD_MAP_TO_ONLY = 8192,
88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
91 GOVD_MAP_0LEN_ARRAY = 32768,
93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
94 GOVD_MAP_ALWAYS_TO = 65536,
96 /* Flag for shared vars that are or might be stored to in the region. */
97 GOVD_WRITTEN = 131072,
99 /* Flag for GOVD_MAP, if it is a forced mapping. */
100 GOVD_MAP_FORCE = 262144,
102 /* Flag for GOVD_MAP: must be present already. */
103 GOVD_MAP_FORCE_PRESENT = 524288,
105 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
106 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
107 | GOVD_LOCAL)
111 enum omp_region_type
113 ORT_WORKSHARE = 0x00,
114 ORT_SIMD = 0x01,
116 ORT_PARALLEL = 0x02,
117 ORT_COMBINED_PARALLEL = 0x03,
119 ORT_TASK = 0x04,
120 ORT_UNTIED_TASK = 0x05,
122 ORT_TEAMS = 0x08,
123 ORT_COMBINED_TEAMS = 0x09,
125 /* Data region. */
126 ORT_TARGET_DATA = 0x10,
128 /* Data region with offloading. */
129 ORT_TARGET = 0x20,
130 ORT_COMBINED_TARGET = 0x21,
132 /* OpenACC variants. */
133 ORT_ACC = 0x40, /* A generic OpenACC region. */
134 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
135 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
136 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
137 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
139 /* Dummy OpenMP region, used to disable expansion of
140 DECL_VALUE_EXPRs in taskloop pre body. */
141 ORT_NONE = 0x100
144 /* Gimplify hashtable helper. */
146 struct gimplify_hasher : free_ptr_hash <elt_t>
148 static inline hashval_t hash (const elt_t *);
149 static inline bool equal (const elt_t *, const elt_t *);
152 struct gimplify_ctx
154 struct gimplify_ctx *prev_context;
156 vec<gbind *> bind_expr_stack;
157 tree temps;
158 gimple_seq conditional_cleanups;
159 tree exit_label;
160 tree return_temp;
162 vec<tree> case_labels;
163 hash_set<tree> *live_switch_vars;
164 /* The formal temporary table. Should this be persistent? */
165 hash_table<gimplify_hasher> *temp_htab;
167 int conditions;
168 unsigned into_ssa : 1;
169 unsigned allow_rhs_cond_expr : 1;
170 unsigned in_cleanup_point_expr : 1;
171 unsigned keep_stack : 1;
172 unsigned save_stack : 1;
173 unsigned in_switch_expr : 1;
176 struct gimplify_omp_ctx
178 struct gimplify_omp_ctx *outer_context;
179 splay_tree variables;
180 hash_set<tree> *privatized_types;
181 /* Iteration variables in an OMP_FOR. */
182 vec<tree> loop_iter_var;
183 location_t location;
184 enum omp_clause_default_kind default_kind;
185 enum omp_region_type region_type;
186 bool combined_loop;
187 bool distribute;
188 bool target_map_scalars_firstprivate;
189 bool target_map_pointers_as_0len_arrays;
190 bool target_firstprivatize_array_bases;
193 static struct gimplify_ctx *gimplify_ctxp;
194 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
196 /* Forward declaration. */
197 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
198 static hash_map<tree, tree> *oacc_declare_returns;
199 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
200 bool (*) (tree), fallback_t, bool);
202 /* Shorter alias name for the above function for use in gimplify.c
203 only. */
205 static inline void
206 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
208 gimple_seq_add_stmt_without_update (seq_p, gs);
211 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
212 NULL, a new sequence is allocated. This function is
213 similar to gimple_seq_add_seq, but does not scan the operands.
214 During gimplification, we need to manipulate statement sequences
215 before the def/use vectors have been constructed. */
217 static void
218 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
220 gimple_stmt_iterator si;
222 if (src == NULL)
223 return;
225 si = gsi_last (*dst_p);
226 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
230 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
231 and popping gimplify contexts. */
233 static struct gimplify_ctx *ctx_pool = NULL;
235 /* Return a gimplify context struct from the pool. */
237 static inline struct gimplify_ctx *
238 ctx_alloc (void)
240 struct gimplify_ctx * c = ctx_pool;
242 if (c)
243 ctx_pool = c->prev_context;
244 else
245 c = XNEW (struct gimplify_ctx);
247 memset (c, '\0', sizeof (*c));
248 return c;
251 /* Put gimplify context C back into the pool. */
253 static inline void
254 ctx_free (struct gimplify_ctx *c)
256 c->prev_context = ctx_pool;
257 ctx_pool = c;
260 /* Free allocated ctx stack memory. */
262 void
263 free_gimplify_stack (void)
265 struct gimplify_ctx *c;
267 while ((c = ctx_pool))
269 ctx_pool = c->prev_context;
270 free (c);
275 /* Set up a context for the gimplifier. */
277 void
278 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
280 struct gimplify_ctx *c = ctx_alloc ();
282 c->prev_context = gimplify_ctxp;
283 gimplify_ctxp = c;
284 gimplify_ctxp->into_ssa = in_ssa;
285 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
288 /* Tear down a context for the gimplifier. If BODY is non-null, then
289 put the temporaries into the outer BIND_EXPR. Otherwise, put them
290 in the local_decls.
292 BODY is not a sequence, but the first tuple in a sequence. */
294 void
295 pop_gimplify_context (gimple *body)
297 struct gimplify_ctx *c = gimplify_ctxp;
299 gcc_assert (c
300 && (!c->bind_expr_stack.exists ()
301 || c->bind_expr_stack.is_empty ()));
302 c->bind_expr_stack.release ();
303 gimplify_ctxp = c->prev_context;
305 if (body)
306 declare_vars (c->temps, body, false);
307 else
308 record_vars (c->temps);
310 delete c->temp_htab;
311 c->temp_htab = NULL;
312 ctx_free (c);
315 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
317 static void
318 gimple_push_bind_expr (gbind *bind_stmt)
320 gimplify_ctxp->bind_expr_stack.reserve (8);
321 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
324 /* Pop the first element off the stack of bindings. */
326 static void
327 gimple_pop_bind_expr (void)
329 gimplify_ctxp->bind_expr_stack.pop ();
332 /* Return the first element of the stack of bindings. */
334 gbind *
335 gimple_current_bind_expr (void)
337 return gimplify_ctxp->bind_expr_stack.last ();
340 /* Return the stack of bindings created during gimplification. */
342 vec<gbind *>
343 gimple_bind_expr_stack (void)
345 return gimplify_ctxp->bind_expr_stack;
348 /* Return true iff there is a COND_EXPR between us and the innermost
349 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
351 static bool
352 gimple_conditional_context (void)
354 return gimplify_ctxp->conditions > 0;
357 /* Note that we've entered a COND_EXPR. */
359 static void
360 gimple_push_condition (void)
362 #ifdef ENABLE_GIMPLE_CHECKING
363 if (gimplify_ctxp->conditions == 0)
364 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
365 #endif
366 ++(gimplify_ctxp->conditions);
369 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
370 now, add any conditional cleanups we've seen to the prequeue. */
372 static void
373 gimple_pop_condition (gimple_seq *pre_p)
375 int conds = --(gimplify_ctxp->conditions);
377 gcc_assert (conds >= 0);
378 if (conds == 0)
380 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
381 gimplify_ctxp->conditional_cleanups = NULL;
385 /* A stable comparison routine for use with splay trees and DECLs. */
387 static int
388 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
390 tree a = (tree) xa;
391 tree b = (tree) xb;
393 return DECL_UID (a) - DECL_UID (b);
396 /* Create a new omp construct that deals with variable remapping. */
398 static struct gimplify_omp_ctx *
399 new_omp_context (enum omp_region_type region_type)
401 struct gimplify_omp_ctx *c;
403 c = XCNEW (struct gimplify_omp_ctx);
404 c->outer_context = gimplify_omp_ctxp;
405 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
406 c->privatized_types = new hash_set<tree>;
407 c->location = input_location;
408 c->region_type = region_type;
409 if ((region_type & ORT_TASK) == 0)
410 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
411 else
412 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
414 return c;
417 /* Destroy an omp construct that deals with variable remapping. */
419 static void
420 delete_omp_context (struct gimplify_omp_ctx *c)
422 splay_tree_delete (c->variables);
423 delete c->privatized_types;
424 c->loop_iter_var.release ();
425 XDELETE (c);
428 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
429 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
431 /* Both gimplify the statement T and append it to *SEQ_P. This function
432 behaves exactly as gimplify_stmt, but you don't have to pass T as a
433 reference. */
435 void
436 gimplify_and_add (tree t, gimple_seq *seq_p)
438 gimplify_stmt (&t, seq_p);
441 /* Gimplify statement T into sequence *SEQ_P, and return the first
442 tuple in the sequence of generated tuples for this statement.
443 Return NULL if gimplifying T produced no tuples. */
445 static gimple *
446 gimplify_and_return_first (tree t, gimple_seq *seq_p)
448 gimple_stmt_iterator last = gsi_last (*seq_p);
450 gimplify_and_add (t, seq_p);
452 if (!gsi_end_p (last))
454 gsi_next (&last);
455 return gsi_stmt (last);
457 else
458 return gimple_seq_first_stmt (*seq_p);
461 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
462 LHS, or for a call argument. */
464 static bool
465 is_gimple_mem_rhs (tree t)
467 /* If we're dealing with a renamable type, either source or dest must be
468 a renamed variable. */
469 if (is_gimple_reg_type (TREE_TYPE (t)))
470 return is_gimple_val (t);
471 else
472 return is_gimple_val (t) || is_gimple_lvalue (t);
475 /* Return true if T is a CALL_EXPR or an expression that can be
476 assigned to a temporary. Note that this predicate should only be
477 used during gimplification. See the rationale for this in
478 gimplify_modify_expr. */
480 static bool
481 is_gimple_reg_rhs_or_call (tree t)
483 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
484 || TREE_CODE (t) == CALL_EXPR);
487 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
488 this predicate should only be used during gimplification. See the
489 rationale for this in gimplify_modify_expr. */
491 static bool
492 is_gimple_mem_rhs_or_call (tree t)
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return (is_gimple_val (t)
500 || is_gimple_lvalue (t)
501 || TREE_CLOBBER_P (t)
502 || TREE_CODE (t) == CALL_EXPR);
505 /* Create a temporary with a name derived from VAL. Subroutine of
506 lookup_tmp_var; nobody else should call this function. */
508 static inline tree
509 create_tmp_from_val (tree val)
511 /* Drop all qualifiers and address-space information from the value type. */
512 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
513 tree var = create_tmp_var (type, get_name (val));
514 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
515 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
516 DECL_GIMPLE_REG_P (var) = 1;
517 return var;
520 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
521 an existing expression temporary. */
523 static tree
524 lookup_tmp_var (tree val, bool is_formal)
526 tree ret;
528 /* If not optimizing, never really reuse a temporary. local-alloc
529 won't allocate any variable that is used in more than one basic
530 block, which means it will go into memory, causing much extra
531 work in reload and final and poorer code generation, outweighing
532 the extra memory allocation here. */
533 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
534 ret = create_tmp_from_val (val);
535 else
537 elt_t elt, *elt_p;
538 elt_t **slot;
540 elt.val = val;
541 if (!gimplify_ctxp->temp_htab)
542 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
543 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
544 if (*slot == NULL)
546 elt_p = XNEW (elt_t);
547 elt_p->val = val;
548 elt_p->temp = ret = create_tmp_from_val (val);
549 *slot = elt_p;
551 else
553 elt_p = *slot;
554 ret = elt_p->temp;
558 return ret;
561 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
563 static tree
564 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
565 bool is_formal, bool allow_ssa)
567 tree t, mod;
569 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
570 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
571 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
572 fb_rvalue);
574 if (allow_ssa
575 && gimplify_ctxp->into_ssa
576 && is_gimple_reg_type (TREE_TYPE (val)))
578 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
579 if (! gimple_in_ssa_p (cfun))
581 const char *name = get_name (val);
582 if (name)
583 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
586 else
587 t = lookup_tmp_var (val, is_formal);
589 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
591 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
593 /* gimplify_modify_expr might want to reduce this further. */
594 gimplify_and_add (mod, pre_p);
595 ggc_free (mod);
597 return t;
600 /* Return a formal temporary variable initialized with VAL. PRE_P is as
601 in gimplify_expr. Only use this function if:
603 1) The value of the unfactored expression represented by VAL will not
604 change between the initialization and use of the temporary, and
605 2) The temporary will not be otherwise modified.
607 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
608 and #2 means it is inappropriate for && temps.
610 For other cases, use get_initialized_tmp_var instead. */
612 tree
613 get_formal_tmp_var (tree val, gimple_seq *pre_p)
615 return internal_get_tmp_var (val, pre_p, NULL, true, true);
618 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
619 are as in gimplify_expr. */
621 tree
622 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
623 bool allow_ssa)
625 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
628 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
629 generate debug info for them; otherwise don't. */
631 void
632 declare_vars (tree vars, gimple *gs, bool debug_info)
634 tree last = vars;
635 if (last)
637 tree temps, block;
639 gbind *scope = as_a <gbind *> (gs);
641 temps = nreverse (last);
643 block = gimple_bind_block (scope);
644 gcc_assert (!block || TREE_CODE (block) == BLOCK);
645 if (!block || !debug_info)
647 DECL_CHAIN (last) = gimple_bind_vars (scope);
648 gimple_bind_set_vars (scope, temps);
650 else
652 /* We need to attach the nodes both to the BIND_EXPR and to its
653 associated BLOCK for debugging purposes. The key point here
654 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
655 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
656 if (BLOCK_VARS (block))
657 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
658 else
660 gimple_bind_set_vars (scope,
661 chainon (gimple_bind_vars (scope), temps));
662 BLOCK_VARS (block) = temps;
668 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
669 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
670 no such upper bound can be obtained. */
672 static void
673 force_constant_size (tree var)
675 /* The only attempt we make is by querying the maximum size of objects
676 of the variable's type. */
678 HOST_WIDE_INT max_size;
680 gcc_assert (VAR_P (var));
682 max_size = max_int_size_in_bytes (TREE_TYPE (var));
684 gcc_assert (max_size >= 0);
686 DECL_SIZE_UNIT (var)
687 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
688 DECL_SIZE (var)
689 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
692 /* Push the temporary variable TMP into the current binding. */
694 void
695 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
697 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
699 /* Later processing assumes that the object size is constant, which might
700 not be true at this point. Force the use of a constant upper bound in
701 this case. */
702 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
703 force_constant_size (tmp);
705 DECL_CONTEXT (tmp) = fn->decl;
706 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
708 record_vars_into (tmp, fn->decl);
711 /* Push the temporary variable TMP into the current binding. */
713 void
714 gimple_add_tmp_var (tree tmp)
716 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
718 /* Later processing assumes that the object size is constant, which might
719 not be true at this point. Force the use of a constant upper bound in
720 this case. */
721 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
722 force_constant_size (tmp);
724 DECL_CONTEXT (tmp) = current_function_decl;
725 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
727 if (gimplify_ctxp)
729 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
730 gimplify_ctxp->temps = tmp;
732 /* Mark temporaries local within the nearest enclosing parallel. */
733 if (gimplify_omp_ctxp)
735 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
736 while (ctx
737 && (ctx->region_type == ORT_WORKSHARE
738 || ctx->region_type == ORT_SIMD
739 || ctx->region_type == ORT_ACC))
740 ctx = ctx->outer_context;
741 if (ctx)
742 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
745 else if (cfun)
746 record_vars (tmp);
747 else
749 gimple_seq body_seq;
751 /* This case is for nested functions. We need to expose the locals
752 they create. */
753 body_seq = gimple_body (current_function_decl);
754 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
760 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
761 nodes that are referenced more than once in GENERIC functions. This is
762 necessary because gimplification (translation into GIMPLE) is performed
763 by modifying tree nodes in-place, so gimplication of a shared node in a
764 first context could generate an invalid GIMPLE form in a second context.
766 This is achieved with a simple mark/copy/unmark algorithm that walks the
767 GENERIC representation top-down, marks nodes with TREE_VISITED the first
768 time it encounters them, duplicates them if they already have TREE_VISITED
769 set, and finally removes the TREE_VISITED marks it has set.
771 The algorithm works only at the function level, i.e. it generates a GENERIC
772 representation of a function with no nodes shared within the function when
773 passed a GENERIC function (except for nodes that are allowed to be shared).
775 At the global level, it is also necessary to unshare tree nodes that are
776 referenced in more than one function, for the same aforementioned reason.
777 This requires some cooperation from the front-end. There are 2 strategies:
779 1. Manual unsharing. The front-end needs to call unshare_expr on every
780 expression that might end up being shared across functions.
782 2. Deep unsharing. This is an extension of regular unsharing. Instead
783 of calling unshare_expr on expressions that might be shared across
784 functions, the front-end pre-marks them with TREE_VISITED. This will
785 ensure that they are unshared on the first reference within functions
786 when the regular unsharing algorithm runs. The counterpart is that
787 this algorithm must look deeper than for manual unsharing, which is
788 specified by LANG_HOOKS_DEEP_UNSHARING.
790 If there are only few specific cases of node sharing across functions, it is
791 probably easier for a front-end to unshare the expressions manually. On the
792 contrary, if the expressions generated at the global level are as widespread
793 as expressions generated within functions, deep unsharing is very likely the
794 way to go. */
796 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
797 These nodes model computations that must be done once. If we were to
798 unshare something like SAVE_EXPR(i++), the gimplification process would
799 create wrong code. However, if DATA is non-null, it must hold a pointer
800 set that is used to unshare the subtrees of these nodes. */
802 static tree
803 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
805 tree t = *tp;
806 enum tree_code code = TREE_CODE (t);
808 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
809 copy their subtrees if we can make sure to do it only once. */
810 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
812 if (data && !((hash_set<tree> *)data)->add (t))
814 else
815 *walk_subtrees = 0;
818 /* Stop at types, decls, constants like copy_tree_r. */
819 else if (TREE_CODE_CLASS (code) == tcc_type
820 || TREE_CODE_CLASS (code) == tcc_declaration
821 || TREE_CODE_CLASS (code) == tcc_constant)
822 *walk_subtrees = 0;
824 /* Cope with the statement expression extension. */
825 else if (code == STATEMENT_LIST)
828 /* Leave the bulk of the work to copy_tree_r itself. */
829 else
830 copy_tree_r (tp, walk_subtrees, NULL);
832 return NULL_TREE;
835 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
836 If *TP has been visited already, then *TP is deeply copied by calling
837 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
839 static tree
840 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
842 tree t = *tp;
843 enum tree_code code = TREE_CODE (t);
845 /* Skip types, decls, and constants. But we do want to look at their
846 types and the bounds of types. Mark them as visited so we properly
847 unmark their subtrees on the unmark pass. If we've already seen them,
848 don't look down further. */
849 if (TREE_CODE_CLASS (code) == tcc_type
850 || TREE_CODE_CLASS (code) == tcc_declaration
851 || TREE_CODE_CLASS (code) == tcc_constant)
853 if (TREE_VISITED (t))
854 *walk_subtrees = 0;
855 else
856 TREE_VISITED (t) = 1;
859 /* If this node has been visited already, unshare it and don't look
860 any deeper. */
861 else if (TREE_VISITED (t))
863 walk_tree (tp, mostly_copy_tree_r, data, NULL);
864 *walk_subtrees = 0;
867 /* Otherwise, mark the node as visited and keep looking. */
868 else
869 TREE_VISITED (t) = 1;
871 return NULL_TREE;
874 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
875 copy_if_shared_r callback unmodified. */
877 static inline void
878 copy_if_shared (tree *tp, void *data)
880 walk_tree (tp, copy_if_shared_r, data, NULL);
883 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
884 any nested functions. */
886 static void
887 unshare_body (tree fndecl)
889 struct cgraph_node *cgn = cgraph_node::get (fndecl);
890 /* If the language requires deep unsharing, we need a pointer set to make
891 sure we don't repeatedly unshare subtrees of unshareable nodes. */
892 hash_set<tree> *visited
893 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
895 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
896 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
897 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
899 delete visited;
901 if (cgn)
902 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
903 unshare_body (cgn->decl);
906 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
907 Subtrees are walked until the first unvisited node is encountered. */
909 static tree
910 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
912 tree t = *tp;
914 /* If this node has been visited, unmark it and keep looking. */
915 if (TREE_VISITED (t))
916 TREE_VISITED (t) = 0;
918 /* Otherwise, don't look any deeper. */
919 else
920 *walk_subtrees = 0;
922 return NULL_TREE;
925 /* Unmark the visited trees rooted at *TP. */
927 static inline void
928 unmark_visited (tree *tp)
930 walk_tree (tp, unmark_visited_r, NULL, NULL);
933 /* Likewise, but mark all trees as not visited. */
935 static void
936 unvisit_body (tree fndecl)
938 struct cgraph_node *cgn = cgraph_node::get (fndecl);
940 unmark_visited (&DECL_SAVED_TREE (fndecl));
941 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
942 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
944 if (cgn)
945 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
946 unvisit_body (cgn->decl);
949 /* Unconditionally make an unshared copy of EXPR. This is used when using
950 stored expressions which span multiple functions, such as BINFO_VTABLE,
951 as the normal unsharing process can't tell that they're shared. */
953 tree
954 unshare_expr (tree expr)
956 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
957 return expr;
960 /* Worker for unshare_expr_without_location. */
962 static tree
963 prune_expr_location (tree *tp, int *walk_subtrees, void *)
965 if (EXPR_P (*tp))
966 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
967 else
968 *walk_subtrees = 0;
969 return NULL_TREE;
972 /* Similar to unshare_expr but also prune all expression locations
973 from EXPR. */
975 tree
976 unshare_expr_without_location (tree expr)
978 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
979 if (EXPR_P (expr))
980 walk_tree (&expr, prune_expr_location, NULL, NULL);
981 return expr;
984 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
985 contain statements and have a value. Assign its value to a temporary
986 and give it void_type_node. Return the temporary, or NULL_TREE if
987 WRAPPER was already void. */
989 tree
990 voidify_wrapper_expr (tree wrapper, tree temp)
992 tree type = TREE_TYPE (wrapper);
993 if (type && !VOID_TYPE_P (type))
995 tree *p;
997 /* Set p to point to the body of the wrapper. Loop until we find
998 something that isn't a wrapper. */
999 for (p = &wrapper; p && *p; )
1001 switch (TREE_CODE (*p))
1003 case BIND_EXPR:
1004 TREE_SIDE_EFFECTS (*p) = 1;
1005 TREE_TYPE (*p) = void_type_node;
1006 /* For a BIND_EXPR, the body is operand 1. */
1007 p = &BIND_EXPR_BODY (*p);
1008 break;
1010 case CLEANUP_POINT_EXPR:
1011 case TRY_FINALLY_EXPR:
1012 case TRY_CATCH_EXPR:
1013 TREE_SIDE_EFFECTS (*p) = 1;
1014 TREE_TYPE (*p) = void_type_node;
1015 p = &TREE_OPERAND (*p, 0);
1016 break;
1018 case STATEMENT_LIST:
1020 tree_stmt_iterator i = tsi_last (*p);
1021 TREE_SIDE_EFFECTS (*p) = 1;
1022 TREE_TYPE (*p) = void_type_node;
1023 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1025 break;
1027 case COMPOUND_EXPR:
1028 /* Advance to the last statement. Set all container types to
1029 void. */
1030 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1032 TREE_SIDE_EFFECTS (*p) = 1;
1033 TREE_TYPE (*p) = void_type_node;
1035 break;
1037 case TRANSACTION_EXPR:
1038 TREE_SIDE_EFFECTS (*p) = 1;
1039 TREE_TYPE (*p) = void_type_node;
1040 p = &TRANSACTION_EXPR_BODY (*p);
1041 break;
1043 default:
1044 /* Assume that any tree upon which voidify_wrapper_expr is
1045 directly called is a wrapper, and that its body is op0. */
1046 if (p == &wrapper)
1048 TREE_SIDE_EFFECTS (*p) = 1;
1049 TREE_TYPE (*p) = void_type_node;
1050 p = &TREE_OPERAND (*p, 0);
1051 break;
1053 goto out;
1057 out:
1058 if (p == NULL || IS_EMPTY_STMT (*p))
1059 temp = NULL_TREE;
1060 else if (temp)
1062 /* The wrapper is on the RHS of an assignment that we're pushing
1063 down. */
1064 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1065 || TREE_CODE (temp) == MODIFY_EXPR);
1066 TREE_OPERAND (temp, 1) = *p;
1067 *p = temp;
1069 else
1071 temp = create_tmp_var (type, "retval");
1072 *p = build2 (INIT_EXPR, type, temp, *p);
1075 return temp;
1078 return NULL_TREE;
1081 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1082 a temporary through which they communicate. */
1084 static void
1085 build_stack_save_restore (gcall **save, gcall **restore)
1087 tree tmp_var;
1089 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1090 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1091 gimple_call_set_lhs (*save, tmp_var);
1093 *restore
1094 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1095 1, tmp_var);
1098 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1100 static tree
1101 build_asan_poison_call_expr (tree decl)
1103 /* Do not poison variables that have size equal to zero. */
1104 tree unit_size = DECL_SIZE_UNIT (decl);
1105 if (zerop (unit_size))
1106 return NULL_TREE;
1108 tree base = build_fold_addr_expr (decl);
1110 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1111 void_type_node, 3,
1112 build_int_cst (integer_type_node,
1113 ASAN_MARK_POISON),
1114 base, unit_size);
1117 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1118 on POISON flag, shadow memory of a DECL variable. The call will be
1119 put on location identified by IT iterator, where BEFORE flag drives
1120 position where the stmt will be put. */
1122 static void
1123 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1124 bool before)
1126 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1127 if (gimplify_omp_ctxp)
1128 return;
1130 tree unit_size = DECL_SIZE_UNIT (decl);
1131 tree base = build_fold_addr_expr (decl);
1133 /* Do not poison variables that have size equal to zero. */
1134 if (zerop (unit_size))
1135 return;
1137 /* It's necessary to have all stack variables aligned to ASAN granularity
1138 bytes. */
1139 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1140 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1142 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1144 gimple *g
1145 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1146 build_int_cst (integer_type_node, flags),
1147 base, unit_size);
1149 if (before)
1150 gsi_insert_before (it, g, GSI_NEW_STMT);
1151 else
1152 gsi_insert_after (it, g, GSI_NEW_STMT);
1155 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1156 either poisons or unpoisons a DECL. Created statement is appended
1157 to SEQ_P gimple sequence. */
1159 static void
1160 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1162 gimple_stmt_iterator it = gsi_last (*seq_p);
1163 bool before = false;
1165 if (gsi_end_p (it))
1166 before = true;
1168 asan_poison_variable (decl, poison, &it, before);
1171 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1173 static int
1174 sort_by_decl_uid (const void *a, const void *b)
1176 const tree *t1 = (const tree *)a;
1177 const tree *t2 = (const tree *)b;
1179 int uid1 = DECL_UID (*t1);
1180 int uid2 = DECL_UID (*t2);
1182 if (uid1 < uid2)
1183 return -1;
1184 else if (uid1 > uid2)
1185 return 1;
1186 else
1187 return 0;
1190 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1191 depending on POISON flag. Created statement is appended
1192 to SEQ_P gimple sequence. */
1194 static void
1195 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1197 unsigned c = variables->elements ();
1198 if (c == 0)
1199 return;
1201 auto_vec<tree> sorted_variables (c);
1203 for (hash_set<tree>::iterator it = variables->begin ();
1204 it != variables->end (); ++it)
1205 sorted_variables.safe_push (*it);
1207 sorted_variables.qsort (sort_by_decl_uid);
1209 unsigned i;
1210 tree var;
1211 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1213 asan_poison_variable (var, poison, seq_p);
1215 /* Add use_after_scope_memory attribute for the variable in order
1216 to prevent re-written into SSA. */
1217 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1218 DECL_ATTRIBUTES (var)))
1219 DECL_ATTRIBUTES (var)
1220 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1221 integer_one_node,
1222 DECL_ATTRIBUTES (var));
1226 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1228 static enum gimplify_status
1229 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1231 tree bind_expr = *expr_p;
1232 bool old_keep_stack = gimplify_ctxp->keep_stack;
1233 bool old_save_stack = gimplify_ctxp->save_stack;
1234 tree t;
1235 gbind *bind_stmt;
1236 gimple_seq body, cleanup;
1237 gcall *stack_save;
1238 location_t start_locus = 0, end_locus = 0;
1239 tree ret_clauses = NULL;
1241 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1243 /* Mark variables seen in this bind expr. */
1244 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1246 if (VAR_P (t))
1248 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1250 /* Mark variable as local. */
1251 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1252 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1253 || splay_tree_lookup (ctx->variables,
1254 (splay_tree_key) t) == NULL))
1256 if (ctx->region_type == ORT_SIMD
1257 && TREE_ADDRESSABLE (t)
1258 && !TREE_STATIC (t))
1259 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1260 else
1261 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1264 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1266 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1267 cfun->has_local_explicit_reg_vars = true;
1270 /* Preliminarily mark non-addressed complex variables as eligible
1271 for promotion to gimple registers. We'll transform their uses
1272 as we find them. */
1273 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1274 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1275 && !TREE_THIS_VOLATILE (t)
1276 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1277 && !needs_to_live_in_memory (t))
1278 DECL_GIMPLE_REG_P (t) = 1;
1281 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1282 BIND_EXPR_BLOCK (bind_expr));
1283 gimple_push_bind_expr (bind_stmt);
1285 gimplify_ctxp->keep_stack = false;
1286 gimplify_ctxp->save_stack = false;
1288 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1289 body = NULL;
1290 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1291 gimple_bind_set_body (bind_stmt, body);
1293 /* Source location wise, the cleanup code (stack_restore and clobbers)
1294 belongs to the end of the block, so propagate what we have. The
1295 stack_save operation belongs to the beginning of block, which we can
1296 infer from the bind_expr directly if the block has no explicit
1297 assignment. */
1298 if (BIND_EXPR_BLOCK (bind_expr))
1300 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1301 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1303 if (start_locus == 0)
1304 start_locus = EXPR_LOCATION (bind_expr);
1306 cleanup = NULL;
1307 stack_save = NULL;
1309 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1310 the stack space allocated to the VLAs. */
1311 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1313 gcall *stack_restore;
1315 /* Save stack on entry and restore it on exit. Add a try_finally
1316 block to achieve this. */
1317 build_stack_save_restore (&stack_save, &stack_restore);
1319 gimple_set_location (stack_save, start_locus);
1320 gimple_set_location (stack_restore, end_locus);
1322 gimplify_seq_add_stmt (&cleanup, stack_restore);
1325 /* Add clobbers for all variables that go out of scope. */
1326 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1328 if (VAR_P (t)
1329 && !is_global_var (t)
1330 && DECL_CONTEXT (t) == current_function_decl)
1332 if (!DECL_HARD_REGISTER (t)
1333 && !TREE_THIS_VOLATILE (t)
1334 && !DECL_HAS_VALUE_EXPR_P (t)
1335 /* Only care for variables that have to be in memory. Others
1336 will be rewritten into SSA names, hence moved to the
1337 top-level. */
1338 && !is_gimple_reg (t)
1339 && flag_stack_reuse != SR_NONE)
1341 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1342 gimple *clobber_stmt;
1343 TREE_THIS_VOLATILE (clobber) = 1;
1344 clobber_stmt = gimple_build_assign (t, clobber);
1345 gimple_set_location (clobber_stmt, end_locus);
1346 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1349 if (flag_openacc && oacc_declare_returns != NULL)
1351 tree *c = oacc_declare_returns->get (t);
1352 if (c != NULL)
1354 if (ret_clauses)
1355 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1357 ret_clauses = *c;
1359 oacc_declare_returns->remove (t);
1361 if (oacc_declare_returns->elements () == 0)
1363 delete oacc_declare_returns;
1364 oacc_declare_returns = NULL;
1370 if (asan_poisoned_variables != NULL
1371 && asan_poisoned_variables->contains (t))
1373 asan_poisoned_variables->remove (t);
1374 asan_poison_variable (t, true, &cleanup);
1377 if (gimplify_ctxp->live_switch_vars != NULL
1378 && gimplify_ctxp->live_switch_vars->contains (t))
1379 gimplify_ctxp->live_switch_vars->remove (t);
1382 if (ret_clauses)
1384 gomp_target *stmt;
1385 gimple_stmt_iterator si = gsi_start (cleanup);
1387 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1388 ret_clauses);
1389 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1392 if (cleanup)
1394 gtry *gs;
1395 gimple_seq new_body;
1397 new_body = NULL;
1398 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1399 GIMPLE_TRY_FINALLY);
1401 if (stack_save)
1402 gimplify_seq_add_stmt (&new_body, stack_save);
1403 gimplify_seq_add_stmt (&new_body, gs);
1404 gimple_bind_set_body (bind_stmt, new_body);
1407 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1408 if (!gimplify_ctxp->keep_stack)
1409 gimplify_ctxp->keep_stack = old_keep_stack;
1410 gimplify_ctxp->save_stack = old_save_stack;
1412 gimple_pop_bind_expr ();
1414 gimplify_seq_add_stmt (pre_p, bind_stmt);
1416 if (temp)
1418 *expr_p = temp;
1419 return GS_OK;
1422 *expr_p = NULL_TREE;
1423 return GS_ALL_DONE;
1426 /* Maybe add early return predict statement to PRE_P sequence. */
1428 static void
1429 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1431 /* If we are not in a conditional context, add PREDICT statement. */
1432 if (gimple_conditional_context ())
1434 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1435 NOT_TAKEN);
1436 gimplify_seq_add_stmt (pre_p, predict);
1440 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1441 GIMPLE value, it is assigned to a new temporary and the statement is
1442 re-written to return the temporary.
1444 PRE_P points to the sequence where side effects that must happen before
1445 STMT should be stored. */
1447 static enum gimplify_status
1448 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1450 greturn *ret;
1451 tree ret_expr = TREE_OPERAND (stmt, 0);
1452 tree result_decl, result;
1454 if (ret_expr == error_mark_node)
1455 return GS_ERROR;
1457 /* Implicit _Cilk_sync must be inserted right before any return statement
1458 if there is a _Cilk_spawn in the function. If the user has provided a
1459 _Cilk_sync, the optimizer should remove this duplicate one. */
1460 if (fn_contains_cilk_spawn_p (cfun))
1462 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1463 gimplify_and_add (impl_sync, pre_p);
1466 if (!ret_expr
1467 || TREE_CODE (ret_expr) == RESULT_DECL
1468 || ret_expr == error_mark_node)
1470 maybe_add_early_return_predict_stmt (pre_p);
1471 greturn *ret = gimple_build_return (ret_expr);
1472 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1473 gimplify_seq_add_stmt (pre_p, ret);
1474 return GS_ALL_DONE;
1477 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1478 result_decl = NULL_TREE;
1479 else
1481 result_decl = TREE_OPERAND (ret_expr, 0);
1483 /* See through a return by reference. */
1484 if (TREE_CODE (result_decl) == INDIRECT_REF)
1485 result_decl = TREE_OPERAND (result_decl, 0);
1487 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1488 || TREE_CODE (ret_expr) == INIT_EXPR)
1489 && TREE_CODE (result_decl) == RESULT_DECL);
1492 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1493 Recall that aggregate_value_p is FALSE for any aggregate type that is
1494 returned in registers. If we're returning values in registers, then
1495 we don't want to extend the lifetime of the RESULT_DECL, particularly
1496 across another call. In addition, for those aggregates for which
1497 hard_function_value generates a PARALLEL, we'll die during normal
1498 expansion of structure assignments; there's special code in expand_return
1499 to handle this case that does not exist in expand_expr. */
1500 if (!result_decl)
1501 result = NULL_TREE;
1502 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1504 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1506 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1507 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1508 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1509 should be effectively allocated by the caller, i.e. all calls to
1510 this function must be subject to the Return Slot Optimization. */
1511 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1512 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1514 result = result_decl;
1516 else if (gimplify_ctxp->return_temp)
1517 result = gimplify_ctxp->return_temp;
1518 else
1520 result = create_tmp_reg (TREE_TYPE (result_decl));
1522 /* ??? With complex control flow (usually involving abnormal edges),
1523 we can wind up warning about an uninitialized value for this. Due
1524 to how this variable is constructed and initialized, this is never
1525 true. Give up and never warn. */
1526 TREE_NO_WARNING (result) = 1;
1528 gimplify_ctxp->return_temp = result;
1531 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1532 Then gimplify the whole thing. */
1533 if (result != result_decl)
1534 TREE_OPERAND (ret_expr, 0) = result;
1536 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1538 maybe_add_early_return_predict_stmt (pre_p);
1539 ret = gimple_build_return (result);
1540 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1541 gimplify_seq_add_stmt (pre_p, ret);
1543 return GS_ALL_DONE;
1546 /* Gimplify a variable-length array DECL. */
1548 static void
1549 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1551 /* This is a variable-sized decl. Simplify its size and mark it
1552 for deferred expansion. */
1553 tree t, addr, ptr_type;
1555 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1556 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1558 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1559 if (DECL_HAS_VALUE_EXPR_P (decl))
1560 return;
1562 /* All occurrences of this decl in final gimplified code will be
1563 replaced by indirection. Setting DECL_VALUE_EXPR does two
1564 things: First, it lets the rest of the gimplifier know what
1565 replacement to use. Second, it lets the debug info know
1566 where to find the value. */
1567 ptr_type = build_pointer_type (TREE_TYPE (decl));
1568 addr = create_tmp_var (ptr_type, get_name (decl));
1569 DECL_IGNORED_P (addr) = 0;
1570 t = build_fold_indirect_ref (addr);
1571 TREE_THIS_NOTRAP (t) = 1;
1572 SET_DECL_VALUE_EXPR (decl, t);
1573 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1575 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1576 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1577 size_int (DECL_ALIGN (decl)));
1578 /* The call has been built for a variable-sized object. */
1579 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1580 t = fold_convert (ptr_type, t);
1581 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1583 gimplify_and_add (t, seq_p);
1586 /* A helper function to be called via walk_tree. Mark all labels under *TP
1587 as being forced. To be called for DECL_INITIAL of static variables. */
1589 static tree
1590 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1592 if (TYPE_P (*tp))
1593 *walk_subtrees = 0;
1594 if (TREE_CODE (*tp) == LABEL_DECL)
1596 FORCED_LABEL (*tp) = 1;
1597 cfun->has_forced_label_in_static = 1;
1600 return NULL_TREE;
1603 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1604 and initialization explicit. */
1606 static enum gimplify_status
1607 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1609 tree stmt = *stmt_p;
1610 tree decl = DECL_EXPR_DECL (stmt);
1612 *stmt_p = NULL_TREE;
1614 if (TREE_TYPE (decl) == error_mark_node)
1615 return GS_ERROR;
1617 if ((TREE_CODE (decl) == TYPE_DECL
1618 || VAR_P (decl))
1619 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1621 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1622 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1623 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1626 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1627 in case its size expressions contain problematic nodes like CALL_EXPR. */
1628 if (TREE_CODE (decl) == TYPE_DECL
1629 && DECL_ORIGINAL_TYPE (decl)
1630 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1632 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1633 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1634 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1637 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1639 tree init = DECL_INITIAL (decl);
1640 bool is_vla = false;
1642 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1643 || (!TREE_STATIC (decl)
1644 && flag_stack_check == GENERIC_STACK_CHECK
1645 && compare_tree_int (DECL_SIZE_UNIT (decl),
1646 STACK_CHECK_MAX_VAR_SIZE) > 0))
1648 gimplify_vla_decl (decl, seq_p);
1649 is_vla = true;
1652 if (asan_poisoned_variables
1653 && !is_vla
1654 && TREE_ADDRESSABLE (decl)
1655 && !TREE_STATIC (decl)
1656 && !DECL_HAS_VALUE_EXPR_P (decl)
1657 && dbg_cnt (asan_use_after_scope))
1659 asan_poisoned_variables->add (decl);
1660 asan_poison_variable (decl, false, seq_p);
1661 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1662 gimplify_ctxp->live_switch_vars->add (decl);
1665 /* Some front ends do not explicitly declare all anonymous
1666 artificial variables. We compensate here by declaring the
1667 variables, though it would be better if the front ends would
1668 explicitly declare them. */
1669 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1670 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1671 gimple_add_tmp_var (decl);
1673 if (init && init != error_mark_node)
1675 if (!TREE_STATIC (decl))
1677 DECL_INITIAL (decl) = NULL_TREE;
1678 init = build2 (INIT_EXPR, void_type_node, decl, init);
1679 gimplify_and_add (init, seq_p);
1680 ggc_free (init);
1682 else
1683 /* We must still examine initializers for static variables
1684 as they may contain a label address. */
1685 walk_tree (&init, force_labels_r, NULL, NULL);
1689 return GS_ALL_DONE;
1692 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1693 and replacing the LOOP_EXPR with goto, but if the loop contains an
1694 EXIT_EXPR, we need to append a label for it to jump to. */
1696 static enum gimplify_status
1697 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1699 tree saved_label = gimplify_ctxp->exit_label;
1700 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1702 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1704 gimplify_ctxp->exit_label = NULL_TREE;
1706 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1708 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1710 if (gimplify_ctxp->exit_label)
1711 gimplify_seq_add_stmt (pre_p,
1712 gimple_build_label (gimplify_ctxp->exit_label));
1714 gimplify_ctxp->exit_label = saved_label;
1716 *expr_p = NULL;
1717 return GS_ALL_DONE;
1720 /* Gimplify a statement list onto a sequence. These may be created either
1721 by an enlightened front-end, or by shortcut_cond_expr. */
1723 static enum gimplify_status
1724 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1726 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1728 tree_stmt_iterator i = tsi_start (*expr_p);
1730 while (!tsi_end_p (i))
1732 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1733 tsi_delink (&i);
1736 if (temp)
1738 *expr_p = temp;
1739 return GS_OK;
1742 return GS_ALL_DONE;
1745 /* Callback for walk_gimple_seq. */
1747 static tree
1748 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1749 struct walk_stmt_info *wi)
1751 gimple *stmt = gsi_stmt (*gsi_p);
1753 *handled_ops_p = true;
1754 switch (gimple_code (stmt))
1756 case GIMPLE_TRY:
1757 /* A compiler-generated cleanup or a user-written try block.
1758 If it's empty, don't dive into it--that would result in
1759 worse location info. */
1760 if (gimple_try_eval (stmt) == NULL)
1762 wi->info = stmt;
1763 return integer_zero_node;
1765 /* Fall through. */
1766 case GIMPLE_BIND:
1767 case GIMPLE_CATCH:
1768 case GIMPLE_EH_FILTER:
1769 case GIMPLE_TRANSACTION:
1770 /* Walk the sub-statements. */
1771 *handled_ops_p = false;
1772 break;
1773 case GIMPLE_CALL:
1774 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1776 *handled_ops_p = false;
1777 break;
1779 /* Fall through. */
1780 default:
1781 /* Save the first "real" statement (not a decl/lexical scope/...). */
1782 wi->info = stmt;
1783 return integer_zero_node;
1785 return NULL_TREE;
1788 /* Possibly warn about unreachable statements between switch's controlling
1789 expression and the first case. SEQ is the body of a switch expression. */
1791 static void
1792 maybe_warn_switch_unreachable (gimple_seq seq)
1794 if (!warn_switch_unreachable
1795 /* This warning doesn't play well with Fortran when optimizations
1796 are on. */
1797 || lang_GNU_Fortran ()
1798 || seq == NULL)
1799 return;
1801 struct walk_stmt_info wi;
1802 memset (&wi, 0, sizeof (wi));
1803 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1804 gimple *stmt = (gimple *) wi.info;
1806 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1808 if (gimple_code (stmt) == GIMPLE_GOTO
1809 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1810 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1811 /* Don't warn for compiler-generated gotos. These occur
1812 in Duff's devices, for example. */;
1813 else
1814 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1815 "statement will never be executed");
1820 /* A label entry that pairs label and a location. */
1821 struct label_entry
1823 tree label;
1824 location_t loc;
1827 /* Find LABEL in vector of label entries VEC. */
1829 static struct label_entry *
1830 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1832 unsigned int i;
1833 struct label_entry *l;
1835 FOR_EACH_VEC_ELT (*vec, i, l)
1836 if (l->label == label)
1837 return l;
1838 return NULL;
1841 /* Return true if LABEL, a LABEL_DECL, represents a case label
1842 in a vector of labels CASES. */
1844 static bool
1845 case_label_p (const vec<tree> *cases, tree label)
1847 unsigned int i;
1848 tree l;
1850 FOR_EACH_VEC_ELT (*cases, i, l)
1851 if (CASE_LABEL (l) == label)
1852 return true;
1853 return false;
1856 /* Find the last statement in a scope STMT. */
1858 static gimple *
1859 last_stmt_in_scope (gimple *stmt)
1861 if (!stmt)
1862 return NULL;
1864 switch (gimple_code (stmt))
1866 case GIMPLE_BIND:
1868 gbind *bind = as_a <gbind *> (stmt);
1869 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1870 return last_stmt_in_scope (stmt);
1873 case GIMPLE_TRY:
1875 gtry *try_stmt = as_a <gtry *> (stmt);
1876 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1877 gimple *last_eval = last_stmt_in_scope (stmt);
1878 if (gimple_stmt_may_fallthru (last_eval)
1879 && (last_eval == NULL
1880 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1881 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1883 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1884 return last_stmt_in_scope (stmt);
1886 else
1887 return last_eval;
1890 default:
1891 return stmt;
1895 /* Collect interesting labels in LABELS and return the statement preceding
1896 another case label, or a user-defined label. */
1898 static gimple *
1899 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1900 auto_vec <struct label_entry> *labels)
1902 gimple *prev = NULL;
1906 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1907 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1909 /* Nested scope. Only look at the last statement of
1910 the innermost scope. */
1911 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1912 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1913 if (last)
1915 prev = last;
1916 /* It might be a label without a location. Use the
1917 location of the scope then. */
1918 if (!gimple_has_location (prev))
1919 gimple_set_location (prev, bind_loc);
1921 gsi_next (gsi_p);
1922 continue;
1925 /* Ifs are tricky. */
1926 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1928 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1929 tree false_lab = gimple_cond_false_label (cond_stmt);
1930 location_t if_loc = gimple_location (cond_stmt);
1932 /* If we have e.g.
1933 if (i > 1) goto <D.2259>; else goto D;
1934 we can't do much with the else-branch. */
1935 if (!DECL_ARTIFICIAL (false_lab))
1936 break;
1938 /* Go on until the false label, then one step back. */
1939 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1941 gimple *stmt = gsi_stmt (*gsi_p);
1942 if (gimple_code (stmt) == GIMPLE_LABEL
1943 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1944 break;
1947 /* Not found? Oops. */
1948 if (gsi_end_p (*gsi_p))
1949 break;
1951 struct label_entry l = { false_lab, if_loc };
1952 labels->safe_push (l);
1954 /* Go to the last statement of the then branch. */
1955 gsi_prev (gsi_p);
1957 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1958 <D.1759>:
1959 <stmt>;
1960 goto <D.1761>;
1961 <D.1760>:
1963 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1964 && !gimple_has_location (gsi_stmt (*gsi_p)))
1966 /* Look at the statement before, it might be
1967 attribute fallthrough, in which case don't warn. */
1968 gsi_prev (gsi_p);
1969 bool fallthru_before_dest
1970 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1971 gsi_next (gsi_p);
1972 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1973 if (!fallthru_before_dest)
1975 struct label_entry l = { goto_dest, if_loc };
1976 labels->safe_push (l);
1979 /* And move back. */
1980 gsi_next (gsi_p);
1983 /* Remember the last statement. Skip labels that are of no interest
1984 to us. */
1985 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1987 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1988 if (find_label_entry (labels, label))
1989 prev = gsi_stmt (*gsi_p);
1991 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1993 else
1994 prev = gsi_stmt (*gsi_p);
1995 gsi_next (gsi_p);
1997 while (!gsi_end_p (*gsi_p)
1998 /* Stop if we find a case or a user-defined label. */
1999 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2000 || !gimple_has_location (gsi_stmt (*gsi_p))));
2002 return prev;
2005 /* Return true if the switch fallthough warning should occur. LABEL is
2006 the label statement that we're falling through to. */
2008 static bool
2009 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2011 gimple_stmt_iterator gsi = *gsi_p;
2013 /* Don't warn if the label is marked with a "falls through" comment. */
2014 if (FALLTHROUGH_LABEL_P (label))
2015 return false;
2017 /* Don't warn for non-case labels followed by a statement:
2018 case 0:
2019 foo ();
2020 label:
2021 bar ();
2022 as these are likely intentional. */
2023 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2025 tree l;
2026 while (!gsi_end_p (gsi)
2027 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2028 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2029 && !case_label_p (&gimplify_ctxp->case_labels, l))
2030 gsi_next (&gsi);
2031 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2032 return false;
2035 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2036 immediately breaks. */
2037 gsi = *gsi_p;
2039 /* Skip all immediately following labels. */
2040 while (!gsi_end_p (gsi)
2041 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2042 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2043 gsi_next (&gsi);
2045 /* { ... something; default:; } */
2046 if (gsi_end_p (gsi)
2047 /* { ... something; default: break; } or
2048 { ... something; default: goto L; } */
2049 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2050 /* { ... something; default: return; } */
2051 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2052 return false;
2054 return true;
2057 /* Callback for walk_gimple_seq. */
2059 static tree
2060 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2061 struct walk_stmt_info *)
2063 gimple *stmt = gsi_stmt (*gsi_p);
2065 *handled_ops_p = true;
2066 switch (gimple_code (stmt))
2068 case GIMPLE_TRY:
2069 case GIMPLE_BIND:
2070 case GIMPLE_CATCH:
2071 case GIMPLE_EH_FILTER:
2072 case GIMPLE_TRANSACTION:
2073 /* Walk the sub-statements. */
2074 *handled_ops_p = false;
2075 break;
2077 /* Find a sequence of form:
2079 GIMPLE_LABEL
2080 [...]
2081 <may fallthru stmt>
2082 GIMPLE_LABEL
2084 and possibly warn. */
2085 case GIMPLE_LABEL:
2087 /* Found a label. Skip all immediately following labels. */
2088 while (!gsi_end_p (*gsi_p)
2089 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2090 gsi_next (gsi_p);
2092 /* There might be no more statements. */
2093 if (gsi_end_p (*gsi_p))
2094 return integer_zero_node;
2096 /* Vector of labels that fall through. */
2097 auto_vec <struct label_entry> labels;
2098 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2100 /* There might be no more statements. */
2101 if (gsi_end_p (*gsi_p))
2102 return integer_zero_node;
2104 gimple *next = gsi_stmt (*gsi_p);
2105 tree label;
2106 /* If what follows is a label, then we may have a fallthrough. */
2107 if (gimple_code (next) == GIMPLE_LABEL
2108 && gimple_has_location (next)
2109 && (label = gimple_label_label (as_a <glabel *> (next)))
2110 && prev != NULL)
2112 struct label_entry *l;
2113 bool warned_p = false;
2114 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2115 /* Quiet. */;
2116 else if (gimple_code (prev) == GIMPLE_LABEL
2117 && (label = gimple_label_label (as_a <glabel *> (prev)))
2118 && (l = find_label_entry (&labels, label)))
2119 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2120 "this statement may fall through");
2121 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2122 /* Try to be clever and don't warn when the statement
2123 can't actually fall through. */
2124 && gimple_stmt_may_fallthru (prev)
2125 && gimple_has_location (prev))
2126 warned_p = warning_at (gimple_location (prev),
2127 OPT_Wimplicit_fallthrough_,
2128 "this statement may fall through");
2129 if (warned_p)
2130 inform (gimple_location (next), "here");
2132 /* Mark this label as processed so as to prevent multiple
2133 warnings in nested switches. */
2134 FALLTHROUGH_LABEL_P (label) = true;
2136 /* So that next warn_implicit_fallthrough_r will start looking for
2137 a new sequence starting with this label. */
2138 gsi_prev (gsi_p);
2141 break;
2142 default:
2143 break;
2145 return NULL_TREE;
2148 /* Warn when a switch case falls through. */
2150 static void
2151 maybe_warn_implicit_fallthrough (gimple_seq seq)
2153 if (!warn_implicit_fallthrough)
2154 return;
2156 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2157 if (!(lang_GNU_C ()
2158 || lang_GNU_CXX ()
2159 || lang_GNU_OBJC ()))
2160 return;
2162 struct walk_stmt_info wi;
2163 memset (&wi, 0, sizeof (wi));
2164 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2167 /* Callback for walk_gimple_seq. */
2169 static tree
2170 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2171 struct walk_stmt_info *)
2173 gimple *stmt = gsi_stmt (*gsi_p);
2175 *handled_ops_p = true;
2176 switch (gimple_code (stmt))
2178 case GIMPLE_TRY:
2179 case GIMPLE_BIND:
2180 case GIMPLE_CATCH:
2181 case GIMPLE_EH_FILTER:
2182 case GIMPLE_TRANSACTION:
2183 /* Walk the sub-statements. */
2184 *handled_ops_p = false;
2185 break;
2186 case GIMPLE_CALL:
2187 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2189 gsi_remove (gsi_p, true);
2190 if (gsi_end_p (*gsi_p))
2191 return integer_zero_node;
2193 bool found = false;
2194 location_t loc = gimple_location (stmt);
2196 gimple_stmt_iterator gsi2 = *gsi_p;
2197 stmt = gsi_stmt (gsi2);
2198 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2200 /* Go on until the artificial label. */
2201 tree goto_dest = gimple_goto_dest (stmt);
2202 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2204 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2205 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2206 == goto_dest)
2207 break;
2210 /* Not found? Stop. */
2211 if (gsi_end_p (gsi2))
2212 break;
2214 /* Look one past it. */
2215 gsi_next (&gsi2);
2218 /* We're looking for a case label or default label here. */
2219 while (!gsi_end_p (gsi2))
2221 stmt = gsi_stmt (gsi2);
2222 if (gimple_code (stmt) == GIMPLE_LABEL)
2224 tree label = gimple_label_label (as_a <glabel *> (stmt));
2225 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2227 found = true;
2228 break;
2231 else
2232 /* Something other than a label. That's not expected. */
2233 break;
2234 gsi_next (&gsi2);
2236 if (!found)
2237 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2238 "a case label or default label");
2240 break;
2241 default:
2242 break;
2244 return NULL_TREE;
2247 /* Expand all FALLTHROUGH () calls in SEQ. */
2249 static void
2250 expand_FALLTHROUGH (gimple_seq *seq_p)
2252 struct walk_stmt_info wi;
2253 memset (&wi, 0, sizeof (wi));
2254 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2258 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2259 branch to. */
2261 static enum gimplify_status
2262 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2264 tree switch_expr = *expr_p;
2265 gimple_seq switch_body_seq = NULL;
2266 enum gimplify_status ret;
2267 tree index_type = TREE_TYPE (switch_expr);
2268 if (index_type == NULL_TREE)
2269 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2271 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2272 fb_rvalue);
2273 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2274 return ret;
2276 if (SWITCH_BODY (switch_expr))
2278 vec<tree> labels;
2279 vec<tree> saved_labels;
2280 hash_set<tree> *saved_live_switch_vars = NULL;
2281 tree default_case = NULL_TREE;
2282 gswitch *switch_stmt;
2284 /* If someone can be bothered to fill in the labels, they can
2285 be bothered to null out the body too. */
2286 gcc_assert (!SWITCH_LABELS (switch_expr));
2288 /* Save old labels, get new ones from body, then restore the old
2289 labels. Save all the things from the switch body to append after. */
2290 saved_labels = gimplify_ctxp->case_labels;
2291 gimplify_ctxp->case_labels.create (8);
2293 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2294 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2295 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2296 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2297 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2298 else
2299 gimplify_ctxp->live_switch_vars = NULL;
2301 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2302 gimplify_ctxp->in_switch_expr = true;
2304 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2306 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2307 maybe_warn_switch_unreachable (switch_body_seq);
2308 maybe_warn_implicit_fallthrough (switch_body_seq);
2309 /* Only do this for the outermost GIMPLE_SWITCH. */
2310 if (!gimplify_ctxp->in_switch_expr)
2311 expand_FALLTHROUGH (&switch_body_seq);
2313 labels = gimplify_ctxp->case_labels;
2314 gimplify_ctxp->case_labels = saved_labels;
2316 if (gimplify_ctxp->live_switch_vars)
2318 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2319 delete gimplify_ctxp->live_switch_vars;
2321 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2323 preprocess_case_label_vec_for_gimple (labels, index_type,
2324 &default_case);
2326 if (!default_case)
2328 glabel *new_default;
2330 default_case
2331 = build_case_label (NULL_TREE, NULL_TREE,
2332 create_artificial_label (UNKNOWN_LOCATION));
2333 new_default = gimple_build_label (CASE_LABEL (default_case));
2334 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2337 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2338 default_case, labels);
2339 gimplify_seq_add_stmt (pre_p, switch_stmt);
2340 gimplify_seq_add_seq (pre_p, switch_body_seq);
2341 labels.release ();
2343 else
2344 gcc_assert (SWITCH_LABELS (switch_expr));
2346 return GS_ALL_DONE;
2349 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2351 static enum gimplify_status
2352 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2354 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2355 == current_function_decl);
2357 tree label = LABEL_EXPR_LABEL (*expr_p);
2358 glabel *label_stmt = gimple_build_label (label);
2359 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2360 gimplify_seq_add_stmt (pre_p, label_stmt);
2362 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2363 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2364 NOT_TAKEN));
2365 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2366 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2367 TAKEN));
2369 return GS_ALL_DONE;
2372 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2374 static enum gimplify_status
2375 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2377 struct gimplify_ctx *ctxp;
2378 glabel *label_stmt;
2380 /* Invalid programs can play Duff's Device type games with, for example,
2381 #pragma omp parallel. At least in the C front end, we don't
2382 detect such invalid branches until after gimplification, in the
2383 diagnose_omp_blocks pass. */
2384 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2385 if (ctxp->case_labels.exists ())
2386 break;
2388 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2389 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2390 ctxp->case_labels.safe_push (*expr_p);
2391 gimplify_seq_add_stmt (pre_p, label_stmt);
2393 return GS_ALL_DONE;
2396 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2397 if necessary. */
2399 tree
2400 build_and_jump (tree *label_p)
2402 if (label_p == NULL)
2403 /* If there's nowhere to jump, just fall through. */
2404 return NULL_TREE;
2406 if (*label_p == NULL_TREE)
2408 tree label = create_artificial_label (UNKNOWN_LOCATION);
2409 *label_p = label;
2412 return build1 (GOTO_EXPR, void_type_node, *label_p);
2415 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2416 This also involves building a label to jump to and communicating it to
2417 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2419 static enum gimplify_status
2420 gimplify_exit_expr (tree *expr_p)
2422 tree cond = TREE_OPERAND (*expr_p, 0);
2423 tree expr;
2425 expr = build_and_jump (&gimplify_ctxp->exit_label);
2426 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2427 *expr_p = expr;
2429 return GS_OK;
2432 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2433 different from its canonical type, wrap the whole thing inside a
2434 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2435 type.
2437 The canonical type of a COMPONENT_REF is the type of the field being
2438 referenced--unless the field is a bit-field which can be read directly
2439 in a smaller mode, in which case the canonical type is the
2440 sign-appropriate type corresponding to that mode. */
2442 static void
2443 canonicalize_component_ref (tree *expr_p)
2445 tree expr = *expr_p;
2446 tree type;
2448 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2450 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2451 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2452 else
2453 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2455 /* One could argue that all the stuff below is not necessary for
2456 the non-bitfield case and declare it a FE error if type
2457 adjustment would be needed. */
2458 if (TREE_TYPE (expr) != type)
2460 #ifdef ENABLE_TYPES_CHECKING
2461 tree old_type = TREE_TYPE (expr);
2462 #endif
2463 int type_quals;
2465 /* We need to preserve qualifiers and propagate them from
2466 operand 0. */
2467 type_quals = TYPE_QUALS (type)
2468 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2469 if (TYPE_QUALS (type) != type_quals)
2470 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2472 /* Set the type of the COMPONENT_REF to the underlying type. */
2473 TREE_TYPE (expr) = type;
2475 #ifdef ENABLE_TYPES_CHECKING
2476 /* It is now a FE error, if the conversion from the canonical
2477 type to the original expression type is not useless. */
2478 gcc_assert (useless_type_conversion_p (old_type, type));
2479 #endif
2483 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2484 to foo, embed that change in the ADDR_EXPR by converting
2485 T array[U];
2486 (T *)&array
2488 &array[L]
2489 where L is the lower bound. For simplicity, only do this for constant
2490 lower bound.
2491 The constraint is that the type of &array[L] is trivially convertible
2492 to T *. */
2494 static void
2495 canonicalize_addr_expr (tree *expr_p)
2497 tree expr = *expr_p;
2498 tree addr_expr = TREE_OPERAND (expr, 0);
2499 tree datype, ddatype, pddatype;
2501 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2502 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2503 || TREE_CODE (addr_expr) != ADDR_EXPR)
2504 return;
2506 /* The addr_expr type should be a pointer to an array. */
2507 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2508 if (TREE_CODE (datype) != ARRAY_TYPE)
2509 return;
2511 /* The pointer to element type shall be trivially convertible to
2512 the expression pointer type. */
2513 ddatype = TREE_TYPE (datype);
2514 pddatype = build_pointer_type (ddatype);
2515 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2516 pddatype))
2517 return;
2519 /* The lower bound and element sizes must be constant. */
2520 if (!TYPE_SIZE_UNIT (ddatype)
2521 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2522 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2523 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2524 return;
2526 /* All checks succeeded. Build a new node to merge the cast. */
2527 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2528 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2529 NULL_TREE, NULL_TREE);
2530 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2532 /* We can have stripped a required restrict qualifier above. */
2533 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2534 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2537 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2538 underneath as appropriate. */
2540 static enum gimplify_status
2541 gimplify_conversion (tree *expr_p)
2543 location_t loc = EXPR_LOCATION (*expr_p);
2544 gcc_assert (CONVERT_EXPR_P (*expr_p));
2546 /* Then strip away all but the outermost conversion. */
2547 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2549 /* And remove the outermost conversion if it's useless. */
2550 if (tree_ssa_useless_type_conversion (*expr_p))
2551 *expr_p = TREE_OPERAND (*expr_p, 0);
2553 /* If we still have a conversion at the toplevel,
2554 then canonicalize some constructs. */
2555 if (CONVERT_EXPR_P (*expr_p))
2557 tree sub = TREE_OPERAND (*expr_p, 0);
2559 /* If a NOP conversion is changing the type of a COMPONENT_REF
2560 expression, then canonicalize its type now in order to expose more
2561 redundant conversions. */
2562 if (TREE_CODE (sub) == COMPONENT_REF)
2563 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2565 /* If a NOP conversion is changing a pointer to array of foo
2566 to a pointer to foo, embed that change in the ADDR_EXPR. */
2567 else if (TREE_CODE (sub) == ADDR_EXPR)
2568 canonicalize_addr_expr (expr_p);
2571 /* If we have a conversion to a non-register type force the
2572 use of a VIEW_CONVERT_EXPR instead. */
2573 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2574 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2575 TREE_OPERAND (*expr_p, 0));
2577 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2578 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2579 TREE_SET_CODE (*expr_p, NOP_EXPR);
2581 return GS_OK;
2584 /* Nonlocal VLAs seen in the current function. */
2585 static hash_set<tree> *nonlocal_vlas;
2587 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2588 static tree nonlocal_vla_vars;
2590 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2591 DECL_VALUE_EXPR, and it's worth re-examining things. */
2593 static enum gimplify_status
2594 gimplify_var_or_parm_decl (tree *expr_p)
2596 tree decl = *expr_p;
2598 /* ??? If this is a local variable, and it has not been seen in any
2599 outer BIND_EXPR, then it's probably the result of a duplicate
2600 declaration, for which we've already issued an error. It would
2601 be really nice if the front end wouldn't leak these at all.
2602 Currently the only known culprit is C++ destructors, as seen
2603 in g++.old-deja/g++.jason/binding.C. */
2604 if (VAR_P (decl)
2605 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2606 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2607 && decl_function_context (decl) == current_function_decl)
2609 gcc_assert (seen_error ());
2610 return GS_ERROR;
2613 /* When within an OMP context, notice uses of variables. */
2614 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2615 return GS_ALL_DONE;
2617 /* If the decl is an alias for another expression, substitute it now. */
2618 if (DECL_HAS_VALUE_EXPR_P (decl))
2620 tree value_expr = DECL_VALUE_EXPR (decl);
2622 /* For referenced nonlocal VLAs add a decl for debugging purposes
2623 to the current function. */
2624 if (VAR_P (decl)
2625 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2626 && nonlocal_vlas != NULL
2627 && TREE_CODE (value_expr) == INDIRECT_REF
2628 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2629 && decl_function_context (decl) != current_function_decl)
2631 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2632 while (ctx
2633 && (ctx->region_type == ORT_WORKSHARE
2634 || ctx->region_type == ORT_SIMD
2635 || ctx->region_type == ORT_ACC))
2636 ctx = ctx->outer_context;
2637 if (!ctx && !nonlocal_vlas->add (decl))
2639 tree copy = copy_node (decl);
2641 lang_hooks.dup_lang_specific_decl (copy);
2642 SET_DECL_RTL (copy, 0);
2643 TREE_USED (copy) = 1;
2644 DECL_CHAIN (copy) = nonlocal_vla_vars;
2645 nonlocal_vla_vars = copy;
2646 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2647 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2651 *expr_p = unshare_expr (value_expr);
2652 return GS_OK;
2655 return GS_ALL_DONE;
2658 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2660 static void
2661 recalculate_side_effects (tree t)
2663 enum tree_code code = TREE_CODE (t);
2664 int len = TREE_OPERAND_LENGTH (t);
2665 int i;
2667 switch (TREE_CODE_CLASS (code))
2669 case tcc_expression:
2670 switch (code)
2672 case INIT_EXPR:
2673 case MODIFY_EXPR:
2674 case VA_ARG_EXPR:
2675 case PREDECREMENT_EXPR:
2676 case PREINCREMENT_EXPR:
2677 case POSTDECREMENT_EXPR:
2678 case POSTINCREMENT_EXPR:
2679 /* All of these have side-effects, no matter what their
2680 operands are. */
2681 return;
2683 default:
2684 break;
2686 /* Fall through. */
2688 case tcc_comparison: /* a comparison expression */
2689 case tcc_unary: /* a unary arithmetic expression */
2690 case tcc_binary: /* a binary arithmetic expression */
2691 case tcc_reference: /* a reference */
2692 case tcc_vl_exp: /* a function call */
2693 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2694 for (i = 0; i < len; ++i)
2696 tree op = TREE_OPERAND (t, i);
2697 if (op && TREE_SIDE_EFFECTS (op))
2698 TREE_SIDE_EFFECTS (t) = 1;
2700 break;
2702 case tcc_constant:
2703 /* No side-effects. */
2704 return;
2706 default:
2707 gcc_unreachable ();
2711 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2712 node *EXPR_P.
2714 compound_lval
2715 : min_lval '[' val ']'
2716 | min_lval '.' ID
2717 | compound_lval '[' val ']'
2718 | compound_lval '.' ID
2720 This is not part of the original SIMPLE definition, which separates
2721 array and member references, but it seems reasonable to handle them
2722 together. Also, this way we don't run into problems with union
2723 aliasing; gcc requires that for accesses through a union to alias, the
2724 union reference must be explicit, which was not always the case when we
2725 were splitting up array and member refs.
2727 PRE_P points to the sequence where side effects that must happen before
2728 *EXPR_P should be stored.
2730 POST_P points to the sequence where side effects that must happen after
2731 *EXPR_P should be stored. */
2733 static enum gimplify_status
2734 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2735 fallback_t fallback)
2737 tree *p;
2738 enum gimplify_status ret = GS_ALL_DONE, tret;
2739 int i;
2740 location_t loc = EXPR_LOCATION (*expr_p);
2741 tree expr = *expr_p;
2743 /* Create a stack of the subexpressions so later we can walk them in
2744 order from inner to outer. */
2745 auto_vec<tree, 10> expr_stack;
2747 /* We can handle anything that get_inner_reference can deal with. */
2748 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2750 restart:
2751 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2752 if (TREE_CODE (*p) == INDIRECT_REF)
2753 *p = fold_indirect_ref_loc (loc, *p);
2755 if (handled_component_p (*p))
2757 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2758 additional COMPONENT_REFs. */
2759 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2760 && gimplify_var_or_parm_decl (p) == GS_OK)
2761 goto restart;
2762 else
2763 break;
2765 expr_stack.safe_push (*p);
2768 gcc_assert (expr_stack.length ());
2770 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2771 walked through and P points to the innermost expression.
2773 Java requires that we elaborated nodes in source order. That
2774 means we must gimplify the inner expression followed by each of
2775 the indices, in order. But we can't gimplify the inner
2776 expression until we deal with any variable bounds, sizes, or
2777 positions in order to deal with PLACEHOLDER_EXPRs.
2779 So we do this in three steps. First we deal with the annotations
2780 for any variables in the components, then we gimplify the base,
2781 then we gimplify any indices, from left to right. */
2782 for (i = expr_stack.length () - 1; i >= 0; i--)
2784 tree t = expr_stack[i];
2786 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2788 /* Gimplify the low bound and element type size and put them into
2789 the ARRAY_REF. If these values are set, they have already been
2790 gimplified. */
2791 if (TREE_OPERAND (t, 2) == NULL_TREE)
2793 tree low = unshare_expr (array_ref_low_bound (t));
2794 if (!is_gimple_min_invariant (low))
2796 TREE_OPERAND (t, 2) = low;
2797 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2798 post_p, is_gimple_reg,
2799 fb_rvalue);
2800 ret = MIN (ret, tret);
2803 else
2805 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2806 is_gimple_reg, fb_rvalue);
2807 ret = MIN (ret, tret);
2810 if (TREE_OPERAND (t, 3) == NULL_TREE)
2812 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2813 tree elmt_size = unshare_expr (array_ref_element_size (t));
2814 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2816 /* Divide the element size by the alignment of the element
2817 type (above). */
2818 elmt_size
2819 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2821 if (!is_gimple_min_invariant (elmt_size))
2823 TREE_OPERAND (t, 3) = elmt_size;
2824 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2825 post_p, is_gimple_reg,
2826 fb_rvalue);
2827 ret = MIN (ret, tret);
2830 else
2832 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2833 is_gimple_reg, fb_rvalue);
2834 ret = MIN (ret, tret);
2837 else if (TREE_CODE (t) == COMPONENT_REF)
2839 /* Set the field offset into T and gimplify it. */
2840 if (TREE_OPERAND (t, 2) == NULL_TREE)
2842 tree offset = unshare_expr (component_ref_field_offset (t));
2843 tree field = TREE_OPERAND (t, 1);
2844 tree factor
2845 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2847 /* Divide the offset by its alignment. */
2848 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2850 if (!is_gimple_min_invariant (offset))
2852 TREE_OPERAND (t, 2) = offset;
2853 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2854 post_p, is_gimple_reg,
2855 fb_rvalue);
2856 ret = MIN (ret, tret);
2859 else
2861 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2862 is_gimple_reg, fb_rvalue);
2863 ret = MIN (ret, tret);
2868 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2869 so as to match the min_lval predicate. Failure to do so may result
2870 in the creation of large aggregate temporaries. */
2871 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2872 fallback | fb_lvalue);
2873 ret = MIN (ret, tret);
2875 /* And finally, the indices and operands of ARRAY_REF. During this
2876 loop we also remove any useless conversions. */
2877 for (; expr_stack.length () > 0; )
2879 tree t = expr_stack.pop ();
2881 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2883 /* Gimplify the dimension. */
2884 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2886 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2887 is_gimple_val, fb_rvalue);
2888 ret = MIN (ret, tret);
2892 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2894 /* The innermost expression P may have originally had
2895 TREE_SIDE_EFFECTS set which would have caused all the outer
2896 expressions in *EXPR_P leading to P to also have had
2897 TREE_SIDE_EFFECTS set. */
2898 recalculate_side_effects (t);
2901 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2902 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2904 canonicalize_component_ref (expr_p);
2907 expr_stack.release ();
2909 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2911 return ret;
2914 /* Gimplify the self modifying expression pointed to by EXPR_P
2915 (++, --, +=, -=).
2917 PRE_P points to the list where side effects that must happen before
2918 *EXPR_P should be stored.
2920 POST_P points to the list where side effects that must happen after
2921 *EXPR_P should be stored.
2923 WANT_VALUE is nonzero iff we want to use the value of this expression
2924 in another expression.
2926 ARITH_TYPE is the type the computation should be performed in. */
2928 enum gimplify_status
2929 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2930 bool want_value, tree arith_type)
2932 enum tree_code code;
2933 tree lhs, lvalue, rhs, t1;
2934 gimple_seq post = NULL, *orig_post_p = post_p;
2935 bool postfix;
2936 enum tree_code arith_code;
2937 enum gimplify_status ret;
2938 location_t loc = EXPR_LOCATION (*expr_p);
2940 code = TREE_CODE (*expr_p);
2942 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2943 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2945 /* Prefix or postfix? */
2946 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2947 /* Faster to treat as prefix if result is not used. */
2948 postfix = want_value;
2949 else
2950 postfix = false;
2952 /* For postfix, make sure the inner expression's post side effects
2953 are executed after side effects from this expression. */
2954 if (postfix)
2955 post_p = &post;
2957 /* Add or subtract? */
2958 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2959 arith_code = PLUS_EXPR;
2960 else
2961 arith_code = MINUS_EXPR;
2963 /* Gimplify the LHS into a GIMPLE lvalue. */
2964 lvalue = TREE_OPERAND (*expr_p, 0);
2965 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2966 if (ret == GS_ERROR)
2967 return ret;
2969 /* Extract the operands to the arithmetic operation. */
2970 lhs = lvalue;
2971 rhs = TREE_OPERAND (*expr_p, 1);
2973 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2974 that as the result value and in the postqueue operation. */
2975 if (postfix)
2977 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2978 if (ret == GS_ERROR)
2979 return ret;
2981 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2984 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2985 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2987 rhs = convert_to_ptrofftype_loc (loc, rhs);
2988 if (arith_code == MINUS_EXPR)
2989 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2990 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2992 else
2993 t1 = fold_convert (TREE_TYPE (*expr_p),
2994 fold_build2 (arith_code, arith_type,
2995 fold_convert (arith_type, lhs),
2996 fold_convert (arith_type, rhs)));
2998 if (postfix)
3000 gimplify_assign (lvalue, t1, pre_p);
3001 gimplify_seq_add_seq (orig_post_p, post);
3002 *expr_p = lhs;
3003 return GS_ALL_DONE;
3005 else
3007 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3008 return GS_OK;
3012 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3014 static void
3015 maybe_with_size_expr (tree *expr_p)
3017 tree expr = *expr_p;
3018 tree type = TREE_TYPE (expr);
3019 tree size;
3021 /* If we've already wrapped this or the type is error_mark_node, we can't do
3022 anything. */
3023 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3024 || type == error_mark_node)
3025 return;
3027 /* If the size isn't known or is a constant, we have nothing to do. */
3028 size = TYPE_SIZE_UNIT (type);
3029 if (!size || TREE_CODE (size) == INTEGER_CST)
3030 return;
3032 /* Otherwise, make a WITH_SIZE_EXPR. */
3033 size = unshare_expr (size);
3034 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3035 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3038 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3039 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3040 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3041 gimplified to an SSA name. */
3043 enum gimplify_status
3044 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3045 bool allow_ssa)
3047 bool (*test) (tree);
3048 fallback_t fb;
3050 /* In general, we allow lvalues for function arguments to avoid
3051 extra overhead of copying large aggregates out of even larger
3052 aggregates into temporaries only to copy the temporaries to
3053 the argument list. Make optimizers happy by pulling out to
3054 temporaries those types that fit in registers. */
3055 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3056 test = is_gimple_val, fb = fb_rvalue;
3057 else
3059 test = is_gimple_lvalue, fb = fb_either;
3060 /* Also strip a TARGET_EXPR that would force an extra copy. */
3061 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3063 tree init = TARGET_EXPR_INITIAL (*arg_p);
3064 if (init
3065 && !VOID_TYPE_P (TREE_TYPE (init)))
3066 *arg_p = init;
3070 /* If this is a variable sized type, we must remember the size. */
3071 maybe_with_size_expr (arg_p);
3073 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3074 /* Make sure arguments have the same location as the function call
3075 itself. */
3076 protected_set_expr_location (*arg_p, call_location);
3078 /* There is a sequence point before a function call. Side effects in
3079 the argument list must occur before the actual call. So, when
3080 gimplifying arguments, force gimplify_expr to use an internal
3081 post queue which is then appended to the end of PRE_P. */
3082 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3085 /* Don't fold inside offloading or taskreg regions: it can break code by
3086 adding decl references that weren't in the source. We'll do it during
3087 omplower pass instead. */
3089 static bool
3090 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3092 struct gimplify_omp_ctx *ctx;
3093 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3094 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3095 return false;
3096 return fold_stmt (gsi);
3099 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3100 with the pointer to the proper cilk frame. */
3101 static void
3102 gimplify_cilk_detach (gimple_seq *pre_p)
3104 tree frame = cfun->cilk_frame_decl;
3105 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3106 frame);
3107 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3108 ptrf);
3109 gimplify_seq_add_stmt(pre_p, detach);
3112 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3113 WANT_VALUE is true if the result of the call is desired. */
3115 static enum gimplify_status
3116 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3118 tree fndecl, parms, p, fnptrtype;
3119 enum gimplify_status ret;
3120 int i, nargs;
3121 gcall *call;
3122 bool builtin_va_start_p = false;
3123 location_t loc = EXPR_LOCATION (*expr_p);
3125 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3127 /* For reliable diagnostics during inlining, it is necessary that
3128 every call_expr be annotated with file and line. */
3129 if (! EXPR_HAS_LOCATION (*expr_p))
3130 SET_EXPR_LOCATION (*expr_p, input_location);
3132 /* Gimplify internal functions created in the FEs. */
3133 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3135 if (want_value)
3136 return GS_ALL_DONE;
3138 nargs = call_expr_nargs (*expr_p);
3139 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3140 auto_vec<tree> vargs (nargs);
3142 for (i = 0; i < nargs; i++)
3144 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3145 EXPR_LOCATION (*expr_p));
3146 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3149 if (EXPR_CILK_SPAWN (*expr_p))
3150 gimplify_cilk_detach (pre_p);
3151 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
3152 gimplify_seq_add_stmt (pre_p, call);
3153 return GS_ALL_DONE;
3156 /* This may be a call to a builtin function.
3158 Builtin function calls may be transformed into different
3159 (and more efficient) builtin function calls under certain
3160 circumstances. Unfortunately, gimplification can muck things
3161 up enough that the builtin expanders are not aware that certain
3162 transformations are still valid.
3164 So we attempt transformation/gimplification of the call before
3165 we gimplify the CALL_EXPR. At this time we do not manage to
3166 transform all calls in the same manner as the expanders do, but
3167 we do transform most of them. */
3168 fndecl = get_callee_fndecl (*expr_p);
3169 if (fndecl
3170 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3171 switch (DECL_FUNCTION_CODE (fndecl))
3173 case BUILT_IN_ALLOCA:
3174 case BUILT_IN_ALLOCA_WITH_ALIGN:
3175 /* If the call has been built for a variable-sized object, then we
3176 want to restore the stack level when the enclosing BIND_EXPR is
3177 exited to reclaim the allocated space; otherwise, we precisely
3178 need to do the opposite and preserve the latest stack level. */
3179 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3180 gimplify_ctxp->save_stack = true;
3181 else
3182 gimplify_ctxp->keep_stack = true;
3183 break;
3185 case BUILT_IN_VA_START:
3187 builtin_va_start_p = TRUE;
3188 if (call_expr_nargs (*expr_p) < 2)
3190 error ("too few arguments to function %<va_start%>");
3191 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3192 return GS_OK;
3195 if (fold_builtin_next_arg (*expr_p, true))
3197 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3198 return GS_OK;
3200 break;
3203 default:
3206 if (fndecl && DECL_BUILT_IN (fndecl))
3208 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3209 if (new_tree && new_tree != *expr_p)
3211 /* There was a transformation of this call which computes the
3212 same value, but in a more efficient way. Return and try
3213 again. */
3214 *expr_p = new_tree;
3215 return GS_OK;
3219 /* Remember the original function pointer type. */
3220 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3222 /* There is a sequence point before the call, so any side effects in
3223 the calling expression must occur before the actual call. Force
3224 gimplify_expr to use an internal post queue. */
3225 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3226 is_gimple_call_addr, fb_rvalue);
3228 nargs = call_expr_nargs (*expr_p);
3230 /* Get argument types for verification. */
3231 fndecl = get_callee_fndecl (*expr_p);
3232 parms = NULL_TREE;
3233 if (fndecl)
3234 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3235 else
3236 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3238 if (fndecl && DECL_ARGUMENTS (fndecl))
3239 p = DECL_ARGUMENTS (fndecl);
3240 else if (parms)
3241 p = parms;
3242 else
3243 p = NULL_TREE;
3244 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3247 /* If the last argument is __builtin_va_arg_pack () and it is not
3248 passed as a named argument, decrease the number of CALL_EXPR
3249 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3250 if (!p
3251 && i < nargs
3252 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3254 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3255 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3257 if (last_arg_fndecl
3258 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3259 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3260 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3262 tree call = *expr_p;
3264 --nargs;
3265 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3266 CALL_EXPR_FN (call),
3267 nargs, CALL_EXPR_ARGP (call));
3269 /* Copy all CALL_EXPR flags, location and block, except
3270 CALL_EXPR_VA_ARG_PACK flag. */
3271 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3272 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3273 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3274 = CALL_EXPR_RETURN_SLOT_OPT (call);
3275 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3276 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3278 /* Set CALL_EXPR_VA_ARG_PACK. */
3279 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3283 /* If the call returns twice then after building the CFG the call
3284 argument computations will no longer dominate the call because
3285 we add an abnormal incoming edge to the call. So do not use SSA
3286 vars there. */
3287 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3289 /* Gimplify the function arguments. */
3290 if (nargs > 0)
3292 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3293 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3294 PUSH_ARGS_REVERSED ? i-- : i++)
3296 enum gimplify_status t;
3298 /* Avoid gimplifying the second argument to va_start, which needs to
3299 be the plain PARM_DECL. */
3300 if ((i != 1) || !builtin_va_start_p)
3302 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3303 EXPR_LOCATION (*expr_p), ! returns_twice);
3305 if (t == GS_ERROR)
3306 ret = GS_ERROR;
3311 /* Gimplify the static chain. */
3312 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3314 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3315 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3316 else
3318 enum gimplify_status t;
3319 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3320 EXPR_LOCATION (*expr_p), ! returns_twice);
3321 if (t == GS_ERROR)
3322 ret = GS_ERROR;
3326 /* Verify the function result. */
3327 if (want_value && fndecl
3328 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3330 error_at (loc, "using result of function returning %<void%>");
3331 ret = GS_ERROR;
3334 /* Try this again in case gimplification exposed something. */
3335 if (ret != GS_ERROR)
3337 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3339 if (new_tree && new_tree != *expr_p)
3341 /* There was a transformation of this call which computes the
3342 same value, but in a more efficient way. Return and try
3343 again. */
3344 *expr_p = new_tree;
3345 return GS_OK;
3348 else
3350 *expr_p = error_mark_node;
3351 return GS_ERROR;
3354 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3355 decl. This allows us to eliminate redundant or useless
3356 calls to "const" functions. */
3357 if (TREE_CODE (*expr_p) == CALL_EXPR)
3359 int flags = call_expr_flags (*expr_p);
3360 if (flags & (ECF_CONST | ECF_PURE)
3361 /* An infinite loop is considered a side effect. */
3362 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3363 TREE_SIDE_EFFECTS (*expr_p) = 0;
3366 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3367 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3368 form and delegate the creation of a GIMPLE_CALL to
3369 gimplify_modify_expr. This is always possible because when
3370 WANT_VALUE is true, the caller wants the result of this call into
3371 a temporary, which means that we will emit an INIT_EXPR in
3372 internal_get_tmp_var which will then be handled by
3373 gimplify_modify_expr. */
3374 if (!want_value)
3376 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3377 have to do is replicate it as a GIMPLE_CALL tuple. */
3378 gimple_stmt_iterator gsi;
3379 call = gimple_build_call_from_tree (*expr_p);
3380 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
3381 notice_special_calls (call);
3382 if (EXPR_CILK_SPAWN (*expr_p))
3383 gimplify_cilk_detach (pre_p);
3384 gimplify_seq_add_stmt (pre_p, call);
3385 gsi = gsi_last (*pre_p);
3386 maybe_fold_stmt (&gsi);
3387 *expr_p = NULL_TREE;
3389 else
3390 /* Remember the original function type. */
3391 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3392 CALL_EXPR_FN (*expr_p));
3394 return ret;
3397 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3398 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3400 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3401 condition is true or false, respectively. If null, we should generate
3402 our own to skip over the evaluation of this specific expression.
3404 LOCUS is the source location of the COND_EXPR.
3406 This function is the tree equivalent of do_jump.
3408 shortcut_cond_r should only be called by shortcut_cond_expr. */
3410 static tree
3411 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3412 location_t locus)
3414 tree local_label = NULL_TREE;
3415 tree t, expr = NULL;
3417 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3418 retain the shortcut semantics. Just insert the gotos here;
3419 shortcut_cond_expr will append the real blocks later. */
3420 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3422 location_t new_locus;
3424 /* Turn if (a && b) into
3426 if (a); else goto no;
3427 if (b) goto yes; else goto no;
3428 (no:) */
3430 if (false_label_p == NULL)
3431 false_label_p = &local_label;
3433 /* Keep the original source location on the first 'if'. */
3434 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3435 append_to_statement_list (t, &expr);
3437 /* Set the source location of the && on the second 'if'. */
3438 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3439 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3440 new_locus);
3441 append_to_statement_list (t, &expr);
3443 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3445 location_t new_locus;
3447 /* Turn if (a || b) into
3449 if (a) goto yes;
3450 if (b) goto yes; else goto no;
3451 (yes:) */
3453 if (true_label_p == NULL)
3454 true_label_p = &local_label;
3456 /* Keep the original source location on the first 'if'. */
3457 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3458 append_to_statement_list (t, &expr);
3460 /* Set the source location of the || on the second 'if'. */
3461 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3462 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3463 new_locus);
3464 append_to_statement_list (t, &expr);
3466 else if (TREE_CODE (pred) == COND_EXPR
3467 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3468 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3470 location_t new_locus;
3472 /* As long as we're messing with gotos, turn if (a ? b : c) into
3473 if (a)
3474 if (b) goto yes; else goto no;
3475 else
3476 if (c) goto yes; else goto no;
3478 Don't do this if one of the arms has void type, which can happen
3479 in C++ when the arm is throw. */
3481 /* Keep the original source location on the first 'if'. Set the source
3482 location of the ? on the second 'if'. */
3483 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3484 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3485 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3486 false_label_p, locus),
3487 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3488 false_label_p, new_locus));
3490 else
3492 expr = build3 (COND_EXPR, void_type_node, pred,
3493 build_and_jump (true_label_p),
3494 build_and_jump (false_label_p));
3495 SET_EXPR_LOCATION (expr, locus);
3498 if (local_label)
3500 t = build1 (LABEL_EXPR, void_type_node, local_label);
3501 append_to_statement_list (t, &expr);
3504 return expr;
3507 /* Given a conditional expression EXPR with short-circuit boolean
3508 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3509 predicate apart into the equivalent sequence of conditionals. */
3511 static tree
3512 shortcut_cond_expr (tree expr)
3514 tree pred = TREE_OPERAND (expr, 0);
3515 tree then_ = TREE_OPERAND (expr, 1);
3516 tree else_ = TREE_OPERAND (expr, 2);
3517 tree true_label, false_label, end_label, t;
3518 tree *true_label_p;
3519 tree *false_label_p;
3520 bool emit_end, emit_false, jump_over_else;
3521 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3522 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3524 /* First do simple transformations. */
3525 if (!else_se)
3527 /* If there is no 'else', turn
3528 if (a && b) then c
3529 into
3530 if (a) if (b) then c. */
3531 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3533 /* Keep the original source location on the first 'if'. */
3534 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3535 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3536 /* Set the source location of the && on the second 'if'. */
3537 if (EXPR_HAS_LOCATION (pred))
3538 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3539 then_ = shortcut_cond_expr (expr);
3540 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3541 pred = TREE_OPERAND (pred, 0);
3542 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3543 SET_EXPR_LOCATION (expr, locus);
3547 if (!then_se)
3549 /* If there is no 'then', turn
3550 if (a || b); else d
3551 into
3552 if (a); else if (b); else d. */
3553 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3555 /* Keep the original source location on the first 'if'. */
3556 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3557 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3558 /* Set the source location of the || on the second 'if'. */
3559 if (EXPR_HAS_LOCATION (pred))
3560 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3561 else_ = shortcut_cond_expr (expr);
3562 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3563 pred = TREE_OPERAND (pred, 0);
3564 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3565 SET_EXPR_LOCATION (expr, locus);
3569 /* If we're done, great. */
3570 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3571 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3572 return expr;
3574 /* Otherwise we need to mess with gotos. Change
3575 if (a) c; else d;
3577 if (a); else goto no;
3578 c; goto end;
3579 no: d; end:
3580 and recursively gimplify the condition. */
3582 true_label = false_label = end_label = NULL_TREE;
3584 /* If our arms just jump somewhere, hijack those labels so we don't
3585 generate jumps to jumps. */
3587 if (then_
3588 && TREE_CODE (then_) == GOTO_EXPR
3589 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3591 true_label = GOTO_DESTINATION (then_);
3592 then_ = NULL;
3593 then_se = false;
3596 if (else_
3597 && TREE_CODE (else_) == GOTO_EXPR
3598 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3600 false_label = GOTO_DESTINATION (else_);
3601 else_ = NULL;
3602 else_se = false;
3605 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3606 if (true_label)
3607 true_label_p = &true_label;
3608 else
3609 true_label_p = NULL;
3611 /* The 'else' branch also needs a label if it contains interesting code. */
3612 if (false_label || else_se)
3613 false_label_p = &false_label;
3614 else
3615 false_label_p = NULL;
3617 /* If there was nothing else in our arms, just forward the label(s). */
3618 if (!then_se && !else_se)
3619 return shortcut_cond_r (pred, true_label_p, false_label_p,
3620 EXPR_LOC_OR_LOC (expr, input_location));
3622 /* If our last subexpression already has a terminal label, reuse it. */
3623 if (else_se)
3624 t = expr_last (else_);
3625 else if (then_se)
3626 t = expr_last (then_);
3627 else
3628 t = NULL;
3629 if (t && TREE_CODE (t) == LABEL_EXPR)
3630 end_label = LABEL_EXPR_LABEL (t);
3632 /* If we don't care about jumping to the 'else' branch, jump to the end
3633 if the condition is false. */
3634 if (!false_label_p)
3635 false_label_p = &end_label;
3637 /* We only want to emit these labels if we aren't hijacking them. */
3638 emit_end = (end_label == NULL_TREE);
3639 emit_false = (false_label == NULL_TREE);
3641 /* We only emit the jump over the else clause if we have to--if the
3642 then clause may fall through. Otherwise we can wind up with a
3643 useless jump and a useless label at the end of gimplified code,
3644 which will cause us to think that this conditional as a whole
3645 falls through even if it doesn't. If we then inline a function
3646 which ends with such a condition, that can cause us to issue an
3647 inappropriate warning about control reaching the end of a
3648 non-void function. */
3649 jump_over_else = block_may_fallthru (then_);
3651 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3652 EXPR_LOC_OR_LOC (expr, input_location));
3654 expr = NULL;
3655 append_to_statement_list (pred, &expr);
3657 append_to_statement_list (then_, &expr);
3658 if (else_se)
3660 if (jump_over_else)
3662 tree last = expr_last (expr);
3663 t = build_and_jump (&end_label);
3664 if (EXPR_HAS_LOCATION (last))
3665 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3666 append_to_statement_list (t, &expr);
3668 if (emit_false)
3670 t = build1 (LABEL_EXPR, void_type_node, false_label);
3671 append_to_statement_list (t, &expr);
3673 append_to_statement_list (else_, &expr);
3675 if (emit_end && end_label)
3677 t = build1 (LABEL_EXPR, void_type_node, end_label);
3678 append_to_statement_list (t, &expr);
3681 return expr;
3684 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3686 tree
3687 gimple_boolify (tree expr)
3689 tree type = TREE_TYPE (expr);
3690 location_t loc = EXPR_LOCATION (expr);
3692 if (TREE_CODE (expr) == NE_EXPR
3693 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3694 && integer_zerop (TREE_OPERAND (expr, 1)))
3696 tree call = TREE_OPERAND (expr, 0);
3697 tree fn = get_callee_fndecl (call);
3699 /* For __builtin_expect ((long) (x), y) recurse into x as well
3700 if x is truth_value_p. */
3701 if (fn
3702 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3703 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3704 && call_expr_nargs (call) == 2)
3706 tree arg = CALL_EXPR_ARG (call, 0);
3707 if (arg)
3709 if (TREE_CODE (arg) == NOP_EXPR
3710 && TREE_TYPE (arg) == TREE_TYPE (call))
3711 arg = TREE_OPERAND (arg, 0);
3712 if (truth_value_p (TREE_CODE (arg)))
3714 arg = gimple_boolify (arg);
3715 CALL_EXPR_ARG (call, 0)
3716 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3722 switch (TREE_CODE (expr))
3724 case TRUTH_AND_EXPR:
3725 case TRUTH_OR_EXPR:
3726 case TRUTH_XOR_EXPR:
3727 case TRUTH_ANDIF_EXPR:
3728 case TRUTH_ORIF_EXPR:
3729 /* Also boolify the arguments of truth exprs. */
3730 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3731 /* FALLTHRU */
3733 case TRUTH_NOT_EXPR:
3734 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3736 /* These expressions always produce boolean results. */
3737 if (TREE_CODE (type) != BOOLEAN_TYPE)
3738 TREE_TYPE (expr) = boolean_type_node;
3739 return expr;
3741 case ANNOTATE_EXPR:
3742 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3744 case annot_expr_ivdep_kind:
3745 case annot_expr_no_vector_kind:
3746 case annot_expr_vector_kind:
3747 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3748 if (TREE_CODE (type) != BOOLEAN_TYPE)
3749 TREE_TYPE (expr) = boolean_type_node;
3750 return expr;
3751 default:
3752 gcc_unreachable ();
3755 default:
3756 if (COMPARISON_CLASS_P (expr))
3758 /* There expressions always prduce boolean results. */
3759 if (TREE_CODE (type) != BOOLEAN_TYPE)
3760 TREE_TYPE (expr) = boolean_type_node;
3761 return expr;
3763 /* Other expressions that get here must have boolean values, but
3764 might need to be converted to the appropriate mode. */
3765 if (TREE_CODE (type) == BOOLEAN_TYPE)
3766 return expr;
3767 return fold_convert_loc (loc, boolean_type_node, expr);
3771 /* Given a conditional expression *EXPR_P without side effects, gimplify
3772 its operands. New statements are inserted to PRE_P. */
3774 static enum gimplify_status
3775 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3777 tree expr = *expr_p, cond;
3778 enum gimplify_status ret, tret;
3779 enum tree_code code;
3781 cond = gimple_boolify (COND_EXPR_COND (expr));
3783 /* We need to handle && and || specially, as their gimplification
3784 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3785 code = TREE_CODE (cond);
3786 if (code == TRUTH_ANDIF_EXPR)
3787 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3788 else if (code == TRUTH_ORIF_EXPR)
3789 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3790 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3791 COND_EXPR_COND (*expr_p) = cond;
3793 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3794 is_gimple_val, fb_rvalue);
3795 ret = MIN (ret, tret);
3796 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3797 is_gimple_val, fb_rvalue);
3799 return MIN (ret, tret);
3802 /* Return true if evaluating EXPR could trap.
3803 EXPR is GENERIC, while tree_could_trap_p can be called
3804 only on GIMPLE. */
3806 static bool
3807 generic_expr_could_trap_p (tree expr)
3809 unsigned i, n;
3811 if (!expr || is_gimple_val (expr))
3812 return false;
3814 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3815 return true;
3817 n = TREE_OPERAND_LENGTH (expr);
3818 for (i = 0; i < n; i++)
3819 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3820 return true;
3822 return false;
3825 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3826 into
3828 if (p) if (p)
3829 t1 = a; a;
3830 else or else
3831 t1 = b; b;
3834 The second form is used when *EXPR_P is of type void.
3836 PRE_P points to the list where side effects that must happen before
3837 *EXPR_P should be stored. */
3839 static enum gimplify_status
3840 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3842 tree expr = *expr_p;
3843 tree type = TREE_TYPE (expr);
3844 location_t loc = EXPR_LOCATION (expr);
3845 tree tmp, arm1, arm2;
3846 enum gimplify_status ret;
3847 tree label_true, label_false, label_cont;
3848 bool have_then_clause_p, have_else_clause_p;
3849 gcond *cond_stmt;
3850 enum tree_code pred_code;
3851 gimple_seq seq = NULL;
3853 /* If this COND_EXPR has a value, copy the values into a temporary within
3854 the arms. */
3855 if (!VOID_TYPE_P (type))
3857 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3858 tree result;
3860 /* If either an rvalue is ok or we do not require an lvalue, create the
3861 temporary. But we cannot do that if the type is addressable. */
3862 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3863 && !TREE_ADDRESSABLE (type))
3865 if (gimplify_ctxp->allow_rhs_cond_expr
3866 /* If either branch has side effects or could trap, it can't be
3867 evaluated unconditionally. */
3868 && !TREE_SIDE_EFFECTS (then_)
3869 && !generic_expr_could_trap_p (then_)
3870 && !TREE_SIDE_EFFECTS (else_)
3871 && !generic_expr_could_trap_p (else_))
3872 return gimplify_pure_cond_expr (expr_p, pre_p);
3874 tmp = create_tmp_var (type, "iftmp");
3875 result = tmp;
3878 /* Otherwise, only create and copy references to the values. */
3879 else
3881 type = build_pointer_type (type);
3883 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3884 then_ = build_fold_addr_expr_loc (loc, then_);
3886 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3887 else_ = build_fold_addr_expr_loc (loc, else_);
3889 expr
3890 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3892 tmp = create_tmp_var (type, "iftmp");
3893 result = build_simple_mem_ref_loc (loc, tmp);
3896 /* Build the new then clause, `tmp = then_;'. But don't build the
3897 assignment if the value is void; in C++ it can be if it's a throw. */
3898 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3899 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3901 /* Similarly, build the new else clause, `tmp = else_;'. */
3902 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3903 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3905 TREE_TYPE (expr) = void_type_node;
3906 recalculate_side_effects (expr);
3908 /* Move the COND_EXPR to the prequeue. */
3909 gimplify_stmt (&expr, pre_p);
3911 *expr_p = result;
3912 return GS_ALL_DONE;
3915 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3916 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3917 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3918 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3920 /* Make sure the condition has BOOLEAN_TYPE. */
3921 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3923 /* Break apart && and || conditions. */
3924 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3925 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3927 expr = shortcut_cond_expr (expr);
3929 if (expr != *expr_p)
3931 *expr_p = expr;
3933 /* We can't rely on gimplify_expr to re-gimplify the expanded
3934 form properly, as cleanups might cause the target labels to be
3935 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3936 set up a conditional context. */
3937 gimple_push_condition ();
3938 gimplify_stmt (expr_p, &seq);
3939 gimple_pop_condition (pre_p);
3940 gimple_seq_add_seq (pre_p, seq);
3942 return GS_ALL_DONE;
3946 /* Now do the normal gimplification. */
3948 /* Gimplify condition. */
3949 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3950 fb_rvalue);
3951 if (ret == GS_ERROR)
3952 return GS_ERROR;
3953 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3955 gimple_push_condition ();
3957 have_then_clause_p = have_else_clause_p = false;
3958 if (TREE_OPERAND (expr, 1) != NULL
3959 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3960 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3961 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3962 == current_function_decl)
3963 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3964 have different locations, otherwise we end up with incorrect
3965 location information on the branches. */
3966 && (optimize
3967 || !EXPR_HAS_LOCATION (expr)
3968 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3969 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3971 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3972 have_then_clause_p = true;
3974 else
3975 label_true = create_artificial_label (UNKNOWN_LOCATION);
3976 if (TREE_OPERAND (expr, 2) != NULL
3977 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3978 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3979 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3980 == current_function_decl)
3981 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3982 have different locations, otherwise we end up with incorrect
3983 location information on the branches. */
3984 && (optimize
3985 || !EXPR_HAS_LOCATION (expr)
3986 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3987 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3989 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3990 have_else_clause_p = true;
3992 else
3993 label_false = create_artificial_label (UNKNOWN_LOCATION);
3995 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3996 &arm2);
3997 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3998 label_false);
3999 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4000 gimplify_seq_add_stmt (&seq, cond_stmt);
4001 gimple_stmt_iterator gsi = gsi_last (seq);
4002 maybe_fold_stmt (&gsi);
4004 label_cont = NULL_TREE;
4005 if (!have_then_clause_p)
4007 /* For if (...) {} else { code; } put label_true after
4008 the else block. */
4009 if (TREE_OPERAND (expr, 1) == NULL_TREE
4010 && !have_else_clause_p
4011 && TREE_OPERAND (expr, 2) != NULL_TREE)
4012 label_cont = label_true;
4013 else
4015 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4016 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4017 /* For if (...) { code; } else {} or
4018 if (...) { code; } else goto label; or
4019 if (...) { code; return; } else { ... }
4020 label_cont isn't needed. */
4021 if (!have_else_clause_p
4022 && TREE_OPERAND (expr, 2) != NULL_TREE
4023 && gimple_seq_may_fallthru (seq))
4025 gimple *g;
4026 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4028 g = gimple_build_goto (label_cont);
4030 /* GIMPLE_COND's are very low level; they have embedded
4031 gotos. This particular embedded goto should not be marked
4032 with the location of the original COND_EXPR, as it would
4033 correspond to the COND_EXPR's condition, not the ELSE or the
4034 THEN arms. To avoid marking it with the wrong location, flag
4035 it as "no location". */
4036 gimple_set_do_not_emit_location (g);
4038 gimplify_seq_add_stmt (&seq, g);
4042 if (!have_else_clause_p)
4044 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4045 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4047 if (label_cont)
4048 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4050 gimple_pop_condition (pre_p);
4051 gimple_seq_add_seq (pre_p, seq);
4053 if (ret == GS_ERROR)
4054 ; /* Do nothing. */
4055 else if (have_then_clause_p || have_else_clause_p)
4056 ret = GS_ALL_DONE;
4057 else
4059 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4060 expr = TREE_OPERAND (expr, 0);
4061 gimplify_stmt (&expr, pre_p);
4064 *expr_p = NULL;
4065 return ret;
4068 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4069 to be marked addressable.
4071 We cannot rely on such an expression being directly markable if a temporary
4072 has been created by the gimplification. In this case, we create another
4073 temporary and initialize it with a copy, which will become a store after we
4074 mark it addressable. This can happen if the front-end passed us something
4075 that it could not mark addressable yet, like a Fortran pass-by-reference
4076 parameter (int) floatvar. */
4078 static void
4079 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4081 while (handled_component_p (*expr_p))
4082 expr_p = &TREE_OPERAND (*expr_p, 0);
4083 if (is_gimple_reg (*expr_p))
4085 /* Do not allow an SSA name as the temporary. */
4086 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4087 DECL_GIMPLE_REG_P (var) = 0;
4088 *expr_p = var;
4092 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4093 a call to __builtin_memcpy. */
4095 static enum gimplify_status
4096 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4097 gimple_seq *seq_p)
4099 tree t, to, to_ptr, from, from_ptr;
4100 gcall *gs;
4101 location_t loc = EXPR_LOCATION (*expr_p);
4103 to = TREE_OPERAND (*expr_p, 0);
4104 from = TREE_OPERAND (*expr_p, 1);
4106 /* Mark the RHS addressable. Beware that it may not be possible to do so
4107 directly if a temporary has been created by the gimplification. */
4108 prepare_gimple_addressable (&from, seq_p);
4110 mark_addressable (from);
4111 from_ptr = build_fold_addr_expr_loc (loc, from);
4112 gimplify_arg (&from_ptr, seq_p, loc);
4114 mark_addressable (to);
4115 to_ptr = build_fold_addr_expr_loc (loc, to);
4116 gimplify_arg (&to_ptr, seq_p, loc);
4118 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4120 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4122 if (want_value)
4124 /* tmp = memcpy() */
4125 t = create_tmp_var (TREE_TYPE (to_ptr));
4126 gimple_call_set_lhs (gs, t);
4127 gimplify_seq_add_stmt (seq_p, gs);
4129 *expr_p = build_simple_mem_ref (t);
4130 return GS_ALL_DONE;
4133 gimplify_seq_add_stmt (seq_p, gs);
4134 *expr_p = NULL;
4135 return GS_ALL_DONE;
4138 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4139 a call to __builtin_memset. In this case we know that the RHS is
4140 a CONSTRUCTOR with an empty element list. */
4142 static enum gimplify_status
4143 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4144 gimple_seq *seq_p)
4146 tree t, from, to, to_ptr;
4147 gcall *gs;
4148 location_t loc = EXPR_LOCATION (*expr_p);
4150 /* Assert our assumptions, to abort instead of producing wrong code
4151 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4152 not be immediately exposed. */
4153 from = TREE_OPERAND (*expr_p, 1);
4154 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4155 from = TREE_OPERAND (from, 0);
4157 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4158 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4160 /* Now proceed. */
4161 to = TREE_OPERAND (*expr_p, 0);
4163 to_ptr = build_fold_addr_expr_loc (loc, to);
4164 gimplify_arg (&to_ptr, seq_p, loc);
4165 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4167 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4169 if (want_value)
4171 /* tmp = memset() */
4172 t = create_tmp_var (TREE_TYPE (to_ptr));
4173 gimple_call_set_lhs (gs, t);
4174 gimplify_seq_add_stmt (seq_p, gs);
4176 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4177 return GS_ALL_DONE;
4180 gimplify_seq_add_stmt (seq_p, gs);
4181 *expr_p = NULL;
4182 return GS_ALL_DONE;
4185 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4186 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4187 assignment. Return non-null if we detect a potential overlap. */
4189 struct gimplify_init_ctor_preeval_data
4191 /* The base decl of the lhs object. May be NULL, in which case we
4192 have to assume the lhs is indirect. */
4193 tree lhs_base_decl;
4195 /* The alias set of the lhs object. */
4196 alias_set_type lhs_alias_set;
4199 static tree
4200 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4202 struct gimplify_init_ctor_preeval_data *data
4203 = (struct gimplify_init_ctor_preeval_data *) xdata;
4204 tree t = *tp;
4206 /* If we find the base object, obviously we have overlap. */
4207 if (data->lhs_base_decl == t)
4208 return t;
4210 /* If the constructor component is indirect, determine if we have a
4211 potential overlap with the lhs. The only bits of information we
4212 have to go on at this point are addressability and alias sets. */
4213 if ((INDIRECT_REF_P (t)
4214 || TREE_CODE (t) == MEM_REF)
4215 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4216 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4217 return t;
4219 /* If the constructor component is a call, determine if it can hide a
4220 potential overlap with the lhs through an INDIRECT_REF like above.
4221 ??? Ugh - this is completely broken. In fact this whole analysis
4222 doesn't look conservative. */
4223 if (TREE_CODE (t) == CALL_EXPR)
4225 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4227 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4228 if (POINTER_TYPE_P (TREE_VALUE (type))
4229 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4230 && alias_sets_conflict_p (data->lhs_alias_set,
4231 get_alias_set
4232 (TREE_TYPE (TREE_VALUE (type)))))
4233 return t;
4236 if (IS_TYPE_OR_DECL_P (t))
4237 *walk_subtrees = 0;
4238 return NULL;
4241 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4242 force values that overlap with the lhs (as described by *DATA)
4243 into temporaries. */
4245 static void
4246 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4247 struct gimplify_init_ctor_preeval_data *data)
4249 enum gimplify_status one;
4251 /* If the value is constant, then there's nothing to pre-evaluate. */
4252 if (TREE_CONSTANT (*expr_p))
4254 /* Ensure it does not have side effects, it might contain a reference to
4255 the object we're initializing. */
4256 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4257 return;
4260 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4261 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4262 return;
4264 /* Recurse for nested constructors. */
4265 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4267 unsigned HOST_WIDE_INT ix;
4268 constructor_elt *ce;
4269 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4271 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4272 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4274 return;
4277 /* If this is a variable sized type, we must remember the size. */
4278 maybe_with_size_expr (expr_p);
4280 /* Gimplify the constructor element to something appropriate for the rhs
4281 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4282 the gimplifier will consider this a store to memory. Doing this
4283 gimplification now means that we won't have to deal with complicated
4284 language-specific trees, nor trees like SAVE_EXPR that can induce
4285 exponential search behavior. */
4286 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4287 if (one == GS_ERROR)
4289 *expr_p = NULL;
4290 return;
4293 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4294 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4295 always be true for all scalars, since is_gimple_mem_rhs insists on a
4296 temporary variable for them. */
4297 if (DECL_P (*expr_p))
4298 return;
4300 /* If this is of variable size, we have no choice but to assume it doesn't
4301 overlap since we can't make a temporary for it. */
4302 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4303 return;
4305 /* Otherwise, we must search for overlap ... */
4306 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4307 return;
4309 /* ... and if found, force the value into a temporary. */
4310 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4313 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4314 a RANGE_EXPR in a CONSTRUCTOR for an array.
4316 var = lower;
4317 loop_entry:
4318 object[var] = value;
4319 if (var == upper)
4320 goto loop_exit;
4321 var = var + 1;
4322 goto loop_entry;
4323 loop_exit:
4325 We increment var _after_ the loop exit check because we might otherwise
4326 fail if upper == TYPE_MAX_VALUE (type for upper).
4328 Note that we never have to deal with SAVE_EXPRs here, because this has
4329 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4331 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4332 gimple_seq *, bool);
4334 static void
4335 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4336 tree value, tree array_elt_type,
4337 gimple_seq *pre_p, bool cleared)
4339 tree loop_entry_label, loop_exit_label, fall_thru_label;
4340 tree var, var_type, cref, tmp;
4342 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4343 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4344 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4346 /* Create and initialize the index variable. */
4347 var_type = TREE_TYPE (upper);
4348 var = create_tmp_var (var_type);
4349 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4351 /* Add the loop entry label. */
4352 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4354 /* Build the reference. */
4355 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4356 var, NULL_TREE, NULL_TREE);
4358 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4359 the store. Otherwise just assign value to the reference. */
4361 if (TREE_CODE (value) == CONSTRUCTOR)
4362 /* NB we might have to call ourself recursively through
4363 gimplify_init_ctor_eval if the value is a constructor. */
4364 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4365 pre_p, cleared);
4366 else
4367 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4369 /* We exit the loop when the index var is equal to the upper bound. */
4370 gimplify_seq_add_stmt (pre_p,
4371 gimple_build_cond (EQ_EXPR, var, upper,
4372 loop_exit_label, fall_thru_label));
4374 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4376 /* Otherwise, increment the index var... */
4377 tmp = build2 (PLUS_EXPR, var_type, var,
4378 fold_convert (var_type, integer_one_node));
4379 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4381 /* ...and jump back to the loop entry. */
4382 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4384 /* Add the loop exit label. */
4385 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4388 /* Return true if FDECL is accessing a field that is zero sized. */
4390 static bool
4391 zero_sized_field_decl (const_tree fdecl)
4393 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4394 && integer_zerop (DECL_SIZE (fdecl)))
4395 return true;
4396 return false;
4399 /* Return true if TYPE is zero sized. */
4401 static bool
4402 zero_sized_type (const_tree type)
4404 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4405 && integer_zerop (TYPE_SIZE (type)))
4406 return true;
4407 return false;
4410 /* A subroutine of gimplify_init_constructor. Generate individual
4411 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4412 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4413 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4414 zeroed first. */
4416 static void
4417 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4418 gimple_seq *pre_p, bool cleared)
4420 tree array_elt_type = NULL;
4421 unsigned HOST_WIDE_INT ix;
4422 tree purpose, value;
4424 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4425 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4427 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4429 tree cref;
4431 /* NULL values are created above for gimplification errors. */
4432 if (value == NULL)
4433 continue;
4435 if (cleared && initializer_zerop (value))
4436 continue;
4438 /* ??? Here's to hoping the front end fills in all of the indices,
4439 so we don't have to figure out what's missing ourselves. */
4440 gcc_assert (purpose);
4442 /* Skip zero-sized fields, unless value has side-effects. This can
4443 happen with calls to functions returning a zero-sized type, which
4444 we shouldn't discard. As a number of downstream passes don't
4445 expect sets of zero-sized fields, we rely on the gimplification of
4446 the MODIFY_EXPR we make below to drop the assignment statement. */
4447 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4448 continue;
4450 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4451 whole range. */
4452 if (TREE_CODE (purpose) == RANGE_EXPR)
4454 tree lower = TREE_OPERAND (purpose, 0);
4455 tree upper = TREE_OPERAND (purpose, 1);
4457 /* If the lower bound is equal to upper, just treat it as if
4458 upper was the index. */
4459 if (simple_cst_equal (lower, upper))
4460 purpose = upper;
4461 else
4463 gimplify_init_ctor_eval_range (object, lower, upper, value,
4464 array_elt_type, pre_p, cleared);
4465 continue;
4469 if (array_elt_type)
4471 /* Do not use bitsizetype for ARRAY_REF indices. */
4472 if (TYPE_DOMAIN (TREE_TYPE (object)))
4473 purpose
4474 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4475 purpose);
4476 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4477 purpose, NULL_TREE, NULL_TREE);
4479 else
4481 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4482 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4483 unshare_expr (object), purpose, NULL_TREE);
4486 if (TREE_CODE (value) == CONSTRUCTOR
4487 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4488 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4489 pre_p, cleared);
4490 else
4492 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4493 gimplify_and_add (init, pre_p);
4494 ggc_free (init);
4499 /* Return the appropriate RHS predicate for this LHS. */
4501 gimple_predicate
4502 rhs_predicate_for (tree lhs)
4504 if (is_gimple_reg (lhs))
4505 return is_gimple_reg_rhs_or_call;
4506 else
4507 return is_gimple_mem_rhs_or_call;
4510 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4511 before the LHS has been gimplified. */
4513 static gimple_predicate
4514 initial_rhs_predicate_for (tree lhs)
4516 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4517 return is_gimple_reg_rhs_or_call;
4518 else
4519 return is_gimple_mem_rhs_or_call;
4522 /* Gimplify a C99 compound literal expression. This just means adding
4523 the DECL_EXPR before the current statement and using its anonymous
4524 decl instead. */
4526 static enum gimplify_status
4527 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4528 bool (*gimple_test_f) (tree),
4529 fallback_t fallback)
4531 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4532 tree decl = DECL_EXPR_DECL (decl_s);
4533 tree init = DECL_INITIAL (decl);
4534 /* Mark the decl as addressable if the compound literal
4535 expression is addressable now, otherwise it is marked too late
4536 after we gimplify the initialization expression. */
4537 if (TREE_ADDRESSABLE (*expr_p))
4538 TREE_ADDRESSABLE (decl) = 1;
4539 /* Otherwise, if we don't need an lvalue and have a literal directly
4540 substitute it. Check if it matches the gimple predicate, as
4541 otherwise we'd generate a new temporary, and we can as well just
4542 use the decl we already have. */
4543 else if (!TREE_ADDRESSABLE (decl)
4544 && init
4545 && (fallback & fb_lvalue) == 0
4546 && gimple_test_f (init))
4548 *expr_p = init;
4549 return GS_OK;
4552 /* Preliminarily mark non-addressed complex variables as eligible
4553 for promotion to gimple registers. We'll transform their uses
4554 as we find them. */
4555 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4556 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4557 && !TREE_THIS_VOLATILE (decl)
4558 && !needs_to_live_in_memory (decl))
4559 DECL_GIMPLE_REG_P (decl) = 1;
4561 /* If the decl is not addressable, then it is being used in some
4562 expression or on the right hand side of a statement, and it can
4563 be put into a readonly data section. */
4564 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4565 TREE_READONLY (decl) = 1;
4567 /* This decl isn't mentioned in the enclosing block, so add it to the
4568 list of temps. FIXME it seems a bit of a kludge to say that
4569 anonymous artificial vars aren't pushed, but everything else is. */
4570 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4571 gimple_add_tmp_var (decl);
4573 gimplify_and_add (decl_s, pre_p);
4574 *expr_p = decl;
4575 return GS_OK;
4578 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4579 return a new CONSTRUCTOR if something changed. */
4581 static tree
4582 optimize_compound_literals_in_ctor (tree orig_ctor)
4584 tree ctor = orig_ctor;
4585 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4586 unsigned int idx, num = vec_safe_length (elts);
4588 for (idx = 0; idx < num; idx++)
4590 tree value = (*elts)[idx].value;
4591 tree newval = value;
4592 if (TREE_CODE (value) == CONSTRUCTOR)
4593 newval = optimize_compound_literals_in_ctor (value);
4594 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4596 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4597 tree decl = DECL_EXPR_DECL (decl_s);
4598 tree init = DECL_INITIAL (decl);
4600 if (!TREE_ADDRESSABLE (value)
4601 && !TREE_ADDRESSABLE (decl)
4602 && init
4603 && TREE_CODE (init) == CONSTRUCTOR)
4604 newval = optimize_compound_literals_in_ctor (init);
4606 if (newval == value)
4607 continue;
4609 if (ctor == orig_ctor)
4611 ctor = copy_node (orig_ctor);
4612 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4613 elts = CONSTRUCTOR_ELTS (ctor);
4615 (*elts)[idx].value = newval;
4617 return ctor;
4620 /* A subroutine of gimplify_modify_expr. Break out elements of a
4621 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4623 Note that we still need to clear any elements that don't have explicit
4624 initializers, so if not all elements are initialized we keep the
4625 original MODIFY_EXPR, we just remove all of the constructor elements.
4627 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4628 GS_ERROR if we would have to create a temporary when gimplifying
4629 this constructor. Otherwise, return GS_OK.
4631 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4633 static enum gimplify_status
4634 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4635 bool want_value, bool notify_temp_creation)
4637 tree object, ctor, type;
4638 enum gimplify_status ret;
4639 vec<constructor_elt, va_gc> *elts;
4641 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4643 if (!notify_temp_creation)
4645 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4646 is_gimple_lvalue, fb_lvalue);
4647 if (ret == GS_ERROR)
4648 return ret;
4651 object = TREE_OPERAND (*expr_p, 0);
4652 ctor = TREE_OPERAND (*expr_p, 1)
4653 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4654 type = TREE_TYPE (ctor);
4655 elts = CONSTRUCTOR_ELTS (ctor);
4656 ret = GS_ALL_DONE;
4658 switch (TREE_CODE (type))
4660 case RECORD_TYPE:
4661 case UNION_TYPE:
4662 case QUAL_UNION_TYPE:
4663 case ARRAY_TYPE:
4665 struct gimplify_init_ctor_preeval_data preeval_data;
4666 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4667 bool cleared, complete_p, valid_const_initializer;
4669 /* Aggregate types must lower constructors to initialization of
4670 individual elements. The exception is that a CONSTRUCTOR node
4671 with no elements indicates zero-initialization of the whole. */
4672 if (vec_safe_is_empty (elts))
4674 if (notify_temp_creation)
4675 return GS_OK;
4676 break;
4679 /* Fetch information about the constructor to direct later processing.
4680 We might want to make static versions of it in various cases, and
4681 can only do so if it known to be a valid constant initializer. */
4682 valid_const_initializer
4683 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4684 &num_ctor_elements, &complete_p);
4686 /* If a const aggregate variable is being initialized, then it
4687 should never be a lose to promote the variable to be static. */
4688 if (valid_const_initializer
4689 && num_nonzero_elements > 1
4690 && TREE_READONLY (object)
4691 && VAR_P (object)
4692 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4694 if (notify_temp_creation)
4695 return GS_ERROR;
4696 DECL_INITIAL (object) = ctor;
4697 TREE_STATIC (object) = 1;
4698 if (!DECL_NAME (object))
4699 DECL_NAME (object) = create_tmp_var_name ("C");
4700 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4702 /* ??? C++ doesn't automatically append a .<number> to the
4703 assembler name, and even when it does, it looks at FE private
4704 data structures to figure out what that number should be,
4705 which are not set for this variable. I suppose this is
4706 important for local statics for inline functions, which aren't
4707 "local" in the object file sense. So in order to get a unique
4708 TU-local symbol, we must invoke the lhd version now. */
4709 lhd_set_decl_assembler_name (object);
4711 *expr_p = NULL_TREE;
4712 break;
4715 /* If there are "lots" of initialized elements, even discounting
4716 those that are not address constants (and thus *must* be
4717 computed at runtime), then partition the constructor into
4718 constant and non-constant parts. Block copy the constant
4719 parts in, then generate code for the non-constant parts. */
4720 /* TODO. There's code in cp/typeck.c to do this. */
4722 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4723 /* store_constructor will ignore the clearing of variable-sized
4724 objects. Initializers for such objects must explicitly set
4725 every field that needs to be set. */
4726 cleared = false;
4727 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4728 /* If the constructor isn't complete, clear the whole object
4729 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4731 ??? This ought not to be needed. For any element not present
4732 in the initializer, we should simply set them to zero. Except
4733 we'd need to *find* the elements that are not present, and that
4734 requires trickery to avoid quadratic compile-time behavior in
4735 large cases or excessive memory use in small cases. */
4736 cleared = true;
4737 else if (num_ctor_elements - num_nonzero_elements
4738 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4739 && num_nonzero_elements < num_ctor_elements / 4)
4740 /* If there are "lots" of zeros, it's more efficient to clear
4741 the memory and then set the nonzero elements. */
4742 cleared = true;
4743 else
4744 cleared = false;
4746 /* If there are "lots" of initialized elements, and all of them
4747 are valid address constants, then the entire initializer can
4748 be dropped to memory, and then memcpy'd out. Don't do this
4749 for sparse arrays, though, as it's more efficient to follow
4750 the standard CONSTRUCTOR behavior of memset followed by
4751 individual element initialization. Also don't do this for small
4752 all-zero initializers (which aren't big enough to merit
4753 clearing), and don't try to make bitwise copies of
4754 TREE_ADDRESSABLE types.
4756 We cannot apply such transformation when compiling chkp static
4757 initializer because creation of initializer image in the memory
4758 will require static initialization of bounds for it. It should
4759 result in another gimplification of similar initializer and we
4760 may fall into infinite loop. */
4761 if (valid_const_initializer
4762 && !(cleared || num_nonzero_elements == 0)
4763 && !TREE_ADDRESSABLE (type)
4764 && (!current_function_decl
4765 || !lookup_attribute ("chkp ctor",
4766 DECL_ATTRIBUTES (current_function_decl))))
4768 HOST_WIDE_INT size = int_size_in_bytes (type);
4769 unsigned int align;
4771 /* ??? We can still get unbounded array types, at least
4772 from the C++ front end. This seems wrong, but attempt
4773 to work around it for now. */
4774 if (size < 0)
4776 size = int_size_in_bytes (TREE_TYPE (object));
4777 if (size >= 0)
4778 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4781 /* Find the maximum alignment we can assume for the object. */
4782 /* ??? Make use of DECL_OFFSET_ALIGN. */
4783 if (DECL_P (object))
4784 align = DECL_ALIGN (object);
4785 else
4786 align = TYPE_ALIGN (type);
4788 /* Do a block move either if the size is so small as to make
4789 each individual move a sub-unit move on average, or if it
4790 is so large as to make individual moves inefficient. */
4791 if (size > 0
4792 && num_nonzero_elements > 1
4793 && (size < num_nonzero_elements
4794 || !can_move_by_pieces (size, align)))
4796 if (notify_temp_creation)
4797 return GS_ERROR;
4799 walk_tree (&ctor, force_labels_r, NULL, NULL);
4800 ctor = tree_output_constant_def (ctor);
4801 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4802 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4803 TREE_OPERAND (*expr_p, 1) = ctor;
4805 /* This is no longer an assignment of a CONSTRUCTOR, but
4806 we still may have processing to do on the LHS. So
4807 pretend we didn't do anything here to let that happen. */
4808 return GS_UNHANDLED;
4812 /* If the target is volatile, we have non-zero elements and more than
4813 one field to assign, initialize the target from a temporary. */
4814 if (TREE_THIS_VOLATILE (object)
4815 && !TREE_ADDRESSABLE (type)
4816 && num_nonzero_elements > 0
4817 && vec_safe_length (elts) > 1)
4819 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4820 TREE_OPERAND (*expr_p, 0) = temp;
4821 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4822 *expr_p,
4823 build2 (MODIFY_EXPR, void_type_node,
4824 object, temp));
4825 return GS_OK;
4828 if (notify_temp_creation)
4829 return GS_OK;
4831 /* If there are nonzero elements and if needed, pre-evaluate to capture
4832 elements overlapping with the lhs into temporaries. We must do this
4833 before clearing to fetch the values before they are zeroed-out. */
4834 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4836 preeval_data.lhs_base_decl = get_base_address (object);
4837 if (!DECL_P (preeval_data.lhs_base_decl))
4838 preeval_data.lhs_base_decl = NULL;
4839 preeval_data.lhs_alias_set = get_alias_set (object);
4841 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4842 pre_p, post_p, &preeval_data);
4845 bool ctor_has_side_effects_p
4846 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4848 if (cleared)
4850 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4851 Note that we still have to gimplify, in order to handle the
4852 case of variable sized types. Avoid shared tree structures. */
4853 CONSTRUCTOR_ELTS (ctor) = NULL;
4854 TREE_SIDE_EFFECTS (ctor) = 0;
4855 object = unshare_expr (object);
4856 gimplify_stmt (expr_p, pre_p);
4859 /* If we have not block cleared the object, or if there are nonzero
4860 elements in the constructor, or if the constructor has side effects,
4861 add assignments to the individual scalar fields of the object. */
4862 if (!cleared
4863 || num_nonzero_elements > 0
4864 || ctor_has_side_effects_p)
4865 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4867 *expr_p = NULL_TREE;
4869 break;
4871 case COMPLEX_TYPE:
4873 tree r, i;
4875 if (notify_temp_creation)
4876 return GS_OK;
4878 /* Extract the real and imaginary parts out of the ctor. */
4879 gcc_assert (elts->length () == 2);
4880 r = (*elts)[0].value;
4881 i = (*elts)[1].value;
4882 if (r == NULL || i == NULL)
4884 tree zero = build_zero_cst (TREE_TYPE (type));
4885 if (r == NULL)
4886 r = zero;
4887 if (i == NULL)
4888 i = zero;
4891 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4892 represent creation of a complex value. */
4893 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4895 ctor = build_complex (type, r, i);
4896 TREE_OPERAND (*expr_p, 1) = ctor;
4898 else
4900 ctor = build2 (COMPLEX_EXPR, type, r, i);
4901 TREE_OPERAND (*expr_p, 1) = ctor;
4902 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4903 pre_p,
4904 post_p,
4905 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4906 fb_rvalue);
4909 break;
4911 case VECTOR_TYPE:
4913 unsigned HOST_WIDE_INT ix;
4914 constructor_elt *ce;
4916 if (notify_temp_creation)
4917 return GS_OK;
4919 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4920 if (TREE_CONSTANT (ctor))
4922 bool constant_p = true;
4923 tree value;
4925 /* Even when ctor is constant, it might contain non-*_CST
4926 elements, such as addresses or trapping values like
4927 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4928 in VECTOR_CST nodes. */
4929 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4930 if (!CONSTANT_CLASS_P (value))
4932 constant_p = false;
4933 break;
4936 if (constant_p)
4938 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4939 break;
4942 TREE_CONSTANT (ctor) = 0;
4945 /* Vector types use CONSTRUCTOR all the way through gimple
4946 compilation as a general initializer. */
4947 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4949 enum gimplify_status tret;
4950 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4951 fb_rvalue);
4952 if (tret == GS_ERROR)
4953 ret = GS_ERROR;
4954 else if (TREE_STATIC (ctor)
4955 && !initializer_constant_valid_p (ce->value,
4956 TREE_TYPE (ce->value)))
4957 TREE_STATIC (ctor) = 0;
4959 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4960 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4962 break;
4964 default:
4965 /* So how did we get a CONSTRUCTOR for a scalar type? */
4966 gcc_unreachable ();
4969 if (ret == GS_ERROR)
4970 return GS_ERROR;
4971 /* If we have gimplified both sides of the initializer but have
4972 not emitted an assignment, do so now. */
4973 if (*expr_p)
4975 tree lhs = TREE_OPERAND (*expr_p, 0);
4976 tree rhs = TREE_OPERAND (*expr_p, 1);
4977 if (want_value && object == lhs)
4978 lhs = unshare_expr (lhs);
4979 gassign *init = gimple_build_assign (lhs, rhs);
4980 gimplify_seq_add_stmt (pre_p, init);
4982 if (want_value)
4984 *expr_p = object;
4985 return GS_OK;
4987 else
4989 *expr_p = NULL;
4990 return GS_ALL_DONE;
4994 /* Given a pointer value OP0, return a simplified version of an
4995 indirection through OP0, or NULL_TREE if no simplification is
4996 possible. This may only be applied to a rhs of an expression.
4997 Note that the resulting type may be different from the type pointed
4998 to in the sense that it is still compatible from the langhooks
4999 point of view. */
5001 static tree
5002 gimple_fold_indirect_ref_rhs (tree t)
5004 return gimple_fold_indirect_ref (t);
5007 /* Subroutine of gimplify_modify_expr to do simplifications of
5008 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5009 something changes. */
5011 static enum gimplify_status
5012 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5013 gimple_seq *pre_p, gimple_seq *post_p,
5014 bool want_value)
5016 enum gimplify_status ret = GS_UNHANDLED;
5017 bool changed;
5021 changed = false;
5022 switch (TREE_CODE (*from_p))
5024 case VAR_DECL:
5025 /* If we're assigning from a read-only variable initialized with
5026 a constructor, do the direct assignment from the constructor,
5027 but only if neither source nor target are volatile since this
5028 latter assignment might end up being done on a per-field basis. */
5029 if (DECL_INITIAL (*from_p)
5030 && TREE_READONLY (*from_p)
5031 && !TREE_THIS_VOLATILE (*from_p)
5032 && !TREE_THIS_VOLATILE (*to_p)
5033 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5035 tree old_from = *from_p;
5036 enum gimplify_status subret;
5038 /* Move the constructor into the RHS. */
5039 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5041 /* Let's see if gimplify_init_constructor will need to put
5042 it in memory. */
5043 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5044 false, true);
5045 if (subret == GS_ERROR)
5047 /* If so, revert the change. */
5048 *from_p = old_from;
5050 else
5052 ret = GS_OK;
5053 changed = true;
5056 break;
5057 case INDIRECT_REF:
5059 /* If we have code like
5061 *(const A*)(A*)&x
5063 where the type of "x" is a (possibly cv-qualified variant
5064 of "A"), treat the entire expression as identical to "x".
5065 This kind of code arises in C++ when an object is bound
5066 to a const reference, and if "x" is a TARGET_EXPR we want
5067 to take advantage of the optimization below. */
5068 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5069 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5070 if (t)
5072 if (TREE_THIS_VOLATILE (t) != volatile_p)
5074 if (DECL_P (t))
5075 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5076 build_fold_addr_expr (t));
5077 if (REFERENCE_CLASS_P (t))
5078 TREE_THIS_VOLATILE (t) = volatile_p;
5080 *from_p = t;
5081 ret = GS_OK;
5082 changed = true;
5084 break;
5087 case TARGET_EXPR:
5089 /* If we are initializing something from a TARGET_EXPR, strip the
5090 TARGET_EXPR and initialize it directly, if possible. This can't
5091 be done if the initializer is void, since that implies that the
5092 temporary is set in some non-trivial way.
5094 ??? What about code that pulls out the temp and uses it
5095 elsewhere? I think that such code never uses the TARGET_EXPR as
5096 an initializer. If I'm wrong, we'll die because the temp won't
5097 have any RTL. In that case, I guess we'll need to replace
5098 references somehow. */
5099 tree init = TARGET_EXPR_INITIAL (*from_p);
5101 if (init
5102 && !VOID_TYPE_P (TREE_TYPE (init)))
5104 *from_p = init;
5105 ret = GS_OK;
5106 changed = true;
5109 break;
5111 case COMPOUND_EXPR:
5112 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5113 caught. */
5114 gimplify_compound_expr (from_p, pre_p, true);
5115 ret = GS_OK;
5116 changed = true;
5117 break;
5119 case CONSTRUCTOR:
5120 /* If we already made some changes, let the front end have a
5121 crack at this before we break it down. */
5122 if (ret != GS_UNHANDLED)
5123 break;
5124 /* If we're initializing from a CONSTRUCTOR, break this into
5125 individual MODIFY_EXPRs. */
5126 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5127 false);
5129 case COND_EXPR:
5130 /* If we're assigning to a non-register type, push the assignment
5131 down into the branches. This is mandatory for ADDRESSABLE types,
5132 since we cannot generate temporaries for such, but it saves a
5133 copy in other cases as well. */
5134 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5136 /* This code should mirror the code in gimplify_cond_expr. */
5137 enum tree_code code = TREE_CODE (*expr_p);
5138 tree cond = *from_p;
5139 tree result = *to_p;
5141 ret = gimplify_expr (&result, pre_p, post_p,
5142 is_gimple_lvalue, fb_lvalue);
5143 if (ret != GS_ERROR)
5144 ret = GS_OK;
5146 /* If we are going to write RESULT more than once, clear
5147 TREE_READONLY flag, otherwise we might incorrectly promote
5148 the variable to static const and initialize it at compile
5149 time in one of the branches. */
5150 if (VAR_P (result)
5151 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5152 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5153 TREE_READONLY (result) = 0;
5154 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5155 TREE_OPERAND (cond, 1)
5156 = build2 (code, void_type_node, result,
5157 TREE_OPERAND (cond, 1));
5158 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5159 TREE_OPERAND (cond, 2)
5160 = build2 (code, void_type_node, unshare_expr (result),
5161 TREE_OPERAND (cond, 2));
5163 TREE_TYPE (cond) = void_type_node;
5164 recalculate_side_effects (cond);
5166 if (want_value)
5168 gimplify_and_add (cond, pre_p);
5169 *expr_p = unshare_expr (result);
5171 else
5172 *expr_p = cond;
5173 return ret;
5175 break;
5177 case CALL_EXPR:
5178 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5179 return slot so that we don't generate a temporary. */
5180 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5181 && aggregate_value_p (*from_p, *from_p))
5183 bool use_target;
5185 if (!(rhs_predicate_for (*to_p))(*from_p))
5186 /* If we need a temporary, *to_p isn't accurate. */
5187 use_target = false;
5188 /* It's OK to use the return slot directly unless it's an NRV. */
5189 else if (TREE_CODE (*to_p) == RESULT_DECL
5190 && DECL_NAME (*to_p) == NULL_TREE
5191 && needs_to_live_in_memory (*to_p))
5192 use_target = true;
5193 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5194 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5195 /* Don't force regs into memory. */
5196 use_target = false;
5197 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5198 /* It's OK to use the target directly if it's being
5199 initialized. */
5200 use_target = true;
5201 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5202 != INTEGER_CST)
5203 /* Always use the target and thus RSO for variable-sized types.
5204 GIMPLE cannot deal with a variable-sized assignment
5205 embedded in a call statement. */
5206 use_target = true;
5207 else if (TREE_CODE (*to_p) != SSA_NAME
5208 && (!is_gimple_variable (*to_p)
5209 || needs_to_live_in_memory (*to_p)))
5210 /* Don't use the original target if it's already addressable;
5211 if its address escapes, and the called function uses the
5212 NRV optimization, a conforming program could see *to_p
5213 change before the called function returns; see c++/19317.
5214 When optimizing, the return_slot pass marks more functions
5215 as safe after we have escape info. */
5216 use_target = false;
5217 else
5218 use_target = true;
5220 if (use_target)
5222 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5223 mark_addressable (*to_p);
5226 break;
5228 case WITH_SIZE_EXPR:
5229 /* Likewise for calls that return an aggregate of non-constant size,
5230 since we would not be able to generate a temporary at all. */
5231 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5233 *from_p = TREE_OPERAND (*from_p, 0);
5234 /* We don't change ret in this case because the
5235 WITH_SIZE_EXPR might have been added in
5236 gimplify_modify_expr, so returning GS_OK would lead to an
5237 infinite loop. */
5238 changed = true;
5240 break;
5242 /* If we're initializing from a container, push the initialization
5243 inside it. */
5244 case CLEANUP_POINT_EXPR:
5245 case BIND_EXPR:
5246 case STATEMENT_LIST:
5248 tree wrap = *from_p;
5249 tree t;
5251 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5252 fb_lvalue);
5253 if (ret != GS_ERROR)
5254 ret = GS_OK;
5256 t = voidify_wrapper_expr (wrap, *expr_p);
5257 gcc_assert (t == *expr_p);
5259 if (want_value)
5261 gimplify_and_add (wrap, pre_p);
5262 *expr_p = unshare_expr (*to_p);
5264 else
5265 *expr_p = wrap;
5266 return GS_OK;
5269 case COMPOUND_LITERAL_EXPR:
5271 tree complit = TREE_OPERAND (*expr_p, 1);
5272 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5273 tree decl = DECL_EXPR_DECL (decl_s);
5274 tree init = DECL_INITIAL (decl);
5276 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5277 into struct T x = { 0, 1, 2 } if the address of the
5278 compound literal has never been taken. */
5279 if (!TREE_ADDRESSABLE (complit)
5280 && !TREE_ADDRESSABLE (decl)
5281 && init)
5283 *expr_p = copy_node (*expr_p);
5284 TREE_OPERAND (*expr_p, 1) = init;
5285 return GS_OK;
5289 default:
5290 break;
5293 while (changed);
5295 return ret;
5299 /* Return true if T looks like a valid GIMPLE statement. */
5301 static bool
5302 is_gimple_stmt (tree t)
5304 const enum tree_code code = TREE_CODE (t);
5306 switch (code)
5308 case NOP_EXPR:
5309 /* The only valid NOP_EXPR is the empty statement. */
5310 return IS_EMPTY_STMT (t);
5312 case BIND_EXPR:
5313 case COND_EXPR:
5314 /* These are only valid if they're void. */
5315 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5317 case SWITCH_EXPR:
5318 case GOTO_EXPR:
5319 case RETURN_EXPR:
5320 case LABEL_EXPR:
5321 case CASE_LABEL_EXPR:
5322 case TRY_CATCH_EXPR:
5323 case TRY_FINALLY_EXPR:
5324 case EH_FILTER_EXPR:
5325 case CATCH_EXPR:
5326 case ASM_EXPR:
5327 case STATEMENT_LIST:
5328 case OACC_PARALLEL:
5329 case OACC_KERNELS:
5330 case OACC_DATA:
5331 case OACC_HOST_DATA:
5332 case OACC_DECLARE:
5333 case OACC_UPDATE:
5334 case OACC_ENTER_DATA:
5335 case OACC_EXIT_DATA:
5336 case OACC_CACHE:
5337 case OMP_PARALLEL:
5338 case OMP_FOR:
5339 case OMP_SIMD:
5340 case CILK_SIMD:
5341 case OMP_DISTRIBUTE:
5342 case OACC_LOOP:
5343 case OMP_SECTIONS:
5344 case OMP_SECTION:
5345 case OMP_SINGLE:
5346 case OMP_MASTER:
5347 case OMP_TASKGROUP:
5348 case OMP_ORDERED:
5349 case OMP_CRITICAL:
5350 case OMP_TASK:
5351 case OMP_TARGET:
5352 case OMP_TARGET_DATA:
5353 case OMP_TARGET_UPDATE:
5354 case OMP_TARGET_ENTER_DATA:
5355 case OMP_TARGET_EXIT_DATA:
5356 case OMP_TASKLOOP:
5357 case OMP_TEAMS:
5358 /* These are always void. */
5359 return true;
5361 case CALL_EXPR:
5362 case MODIFY_EXPR:
5363 case PREDICT_EXPR:
5364 /* These are valid regardless of their type. */
5365 return true;
5367 default:
5368 return false;
5373 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5374 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5375 DECL_GIMPLE_REG_P set.
5377 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5378 other, unmodified part of the complex object just before the total store.
5379 As a consequence, if the object is still uninitialized, an undefined value
5380 will be loaded into a register, which may result in a spurious exception
5381 if the register is floating-point and the value happens to be a signaling
5382 NaN for example. Then the fully-fledged complex operations lowering pass
5383 followed by a DCE pass are necessary in order to fix things up. */
5385 static enum gimplify_status
5386 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5387 bool want_value)
5389 enum tree_code code, ocode;
5390 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5392 lhs = TREE_OPERAND (*expr_p, 0);
5393 rhs = TREE_OPERAND (*expr_p, 1);
5394 code = TREE_CODE (lhs);
5395 lhs = TREE_OPERAND (lhs, 0);
5397 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5398 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5399 TREE_NO_WARNING (other) = 1;
5400 other = get_formal_tmp_var (other, pre_p);
5402 realpart = code == REALPART_EXPR ? rhs : other;
5403 imagpart = code == REALPART_EXPR ? other : rhs;
5405 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5406 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5407 else
5408 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5410 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5411 *expr_p = (want_value) ? rhs : NULL_TREE;
5413 return GS_ALL_DONE;
5416 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5418 modify_expr
5419 : varname '=' rhs
5420 | '*' ID '=' rhs
5422 PRE_P points to the list where side effects that must happen before
5423 *EXPR_P should be stored.
5425 POST_P points to the list where side effects that must happen after
5426 *EXPR_P should be stored.
5428 WANT_VALUE is nonzero iff we want to use the value of this expression
5429 in another expression. */
5431 static enum gimplify_status
5432 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5433 bool want_value)
5435 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5436 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5437 enum gimplify_status ret = GS_UNHANDLED;
5438 gimple *assign;
5439 location_t loc = EXPR_LOCATION (*expr_p);
5440 gimple_stmt_iterator gsi;
5442 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5443 || TREE_CODE (*expr_p) == INIT_EXPR);
5445 /* Trying to simplify a clobber using normal logic doesn't work,
5446 so handle it here. */
5447 if (TREE_CLOBBER_P (*from_p))
5449 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5450 if (ret == GS_ERROR)
5451 return ret;
5452 gcc_assert (!want_value
5453 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5454 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5455 *expr_p = NULL;
5456 return GS_ALL_DONE;
5459 /* Insert pointer conversions required by the middle-end that are not
5460 required by the frontend. This fixes middle-end type checking for
5461 for example gcc.dg/redecl-6.c. */
5462 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5464 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5465 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5466 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5469 /* See if any simplifications can be done based on what the RHS is. */
5470 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5471 want_value);
5472 if (ret != GS_UNHANDLED)
5473 return ret;
5475 /* For zero sized types only gimplify the left hand side and right hand
5476 side as statements and throw away the assignment. Do this after
5477 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5478 types properly. */
5479 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
5481 gimplify_stmt (from_p, pre_p);
5482 gimplify_stmt (to_p, pre_p);
5483 *expr_p = NULL_TREE;
5484 return GS_ALL_DONE;
5487 /* If the value being copied is of variable width, compute the length
5488 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5489 before gimplifying any of the operands so that we can resolve any
5490 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5491 the size of the expression to be copied, not of the destination, so
5492 that is what we must do here. */
5493 maybe_with_size_expr (from_p);
5495 /* As a special case, we have to temporarily allow for assignments
5496 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5497 a toplevel statement, when gimplifying the GENERIC expression
5498 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5499 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5501 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5502 prevent gimplify_expr from trying to create a new temporary for
5503 foo's LHS, we tell it that it should only gimplify until it
5504 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5505 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5506 and all we need to do here is set 'a' to be its LHS. */
5508 /* Gimplify the RHS first for C++17 and bug 71104. */
5509 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5510 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5511 if (ret == GS_ERROR)
5512 return ret;
5514 /* Then gimplify the LHS. */
5515 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5516 twice we have to make sure to gimplify into non-SSA as otherwise
5517 the abnormal edge added later will make those defs not dominate
5518 their uses.
5519 ??? Technically this applies only to the registers used in the
5520 resulting non-register *TO_P. */
5521 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5522 if (saved_into_ssa
5523 && TREE_CODE (*from_p) == CALL_EXPR
5524 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5525 gimplify_ctxp->into_ssa = false;
5526 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5527 gimplify_ctxp->into_ssa = saved_into_ssa;
5528 if (ret == GS_ERROR)
5529 return ret;
5531 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5532 guess for the predicate was wrong. */
5533 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5534 if (final_pred != initial_pred)
5536 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5537 if (ret == GS_ERROR)
5538 return ret;
5541 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5542 size as argument to the call. */
5543 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5545 tree call = TREE_OPERAND (*from_p, 0);
5546 tree vlasize = TREE_OPERAND (*from_p, 1);
5548 if (TREE_CODE (call) == CALL_EXPR
5549 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5551 int nargs = call_expr_nargs (call);
5552 tree type = TREE_TYPE (call);
5553 tree ap = CALL_EXPR_ARG (call, 0);
5554 tree tag = CALL_EXPR_ARG (call, 1);
5555 tree aptag = CALL_EXPR_ARG (call, 2);
5556 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5557 IFN_VA_ARG, type,
5558 nargs + 1, ap, tag,
5559 aptag, vlasize);
5560 TREE_OPERAND (*from_p, 0) = newcall;
5564 /* Now see if the above changed *from_p to something we handle specially. */
5565 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5566 want_value);
5567 if (ret != GS_UNHANDLED)
5568 return ret;
5570 /* If we've got a variable sized assignment between two lvalues (i.e. does
5571 not involve a call), then we can make things a bit more straightforward
5572 by converting the assignment to memcpy or memset. */
5573 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5575 tree from = TREE_OPERAND (*from_p, 0);
5576 tree size = TREE_OPERAND (*from_p, 1);
5578 if (TREE_CODE (from) == CONSTRUCTOR)
5579 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5581 if (is_gimple_addressable (from))
5583 *from_p = from;
5584 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5585 pre_p);
5589 /* Transform partial stores to non-addressable complex variables into
5590 total stores. This allows us to use real instead of virtual operands
5591 for these variables, which improves optimization. */
5592 if ((TREE_CODE (*to_p) == REALPART_EXPR
5593 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5594 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5595 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5597 /* Try to alleviate the effects of the gimplification creating artificial
5598 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5599 make sure not to create DECL_DEBUG_EXPR links across functions. */
5600 if (!gimplify_ctxp->into_ssa
5601 && VAR_P (*from_p)
5602 && DECL_IGNORED_P (*from_p)
5603 && DECL_P (*to_p)
5604 && !DECL_IGNORED_P (*to_p)
5605 && decl_function_context (*to_p) == current_function_decl
5606 && decl_function_context (*from_p) == current_function_decl)
5608 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5609 DECL_NAME (*from_p)
5610 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5611 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5612 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5615 if (want_value && TREE_THIS_VOLATILE (*to_p))
5616 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5618 if (TREE_CODE (*from_p) == CALL_EXPR)
5620 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5621 instead of a GIMPLE_ASSIGN. */
5622 gcall *call_stmt;
5623 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5625 /* Gimplify internal functions created in the FEs. */
5626 int nargs = call_expr_nargs (*from_p), i;
5627 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5628 auto_vec<tree> vargs (nargs);
5630 for (i = 0; i < nargs; i++)
5632 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5633 EXPR_LOCATION (*from_p));
5634 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5636 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5637 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5639 else
5641 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5642 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5643 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5644 tree fndecl = get_callee_fndecl (*from_p);
5645 if (fndecl
5646 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5647 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5648 && call_expr_nargs (*from_p) == 3)
5649 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5650 CALL_EXPR_ARG (*from_p, 0),
5651 CALL_EXPR_ARG (*from_p, 1),
5652 CALL_EXPR_ARG (*from_p, 2));
5653 else
5655 call_stmt = gimple_build_call_from_tree (*from_p);
5656 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
5659 notice_special_calls (call_stmt);
5660 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5661 gimple_call_set_lhs (call_stmt, *to_p);
5662 else if (TREE_CODE (*to_p) == SSA_NAME)
5663 /* The above is somewhat premature, avoid ICEing later for a
5664 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5665 ??? This doesn't make it a default-def. */
5666 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5668 if (EXPR_CILK_SPAWN (*from_p))
5669 gimplify_cilk_detach (pre_p);
5670 assign = call_stmt;
5672 else
5674 assign = gimple_build_assign (*to_p, *from_p);
5675 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5676 if (COMPARISON_CLASS_P (*from_p))
5677 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5680 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5682 /* We should have got an SSA name from the start. */
5683 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5684 || ! gimple_in_ssa_p (cfun));
5687 gimplify_seq_add_stmt (pre_p, assign);
5688 gsi = gsi_last (*pre_p);
5689 maybe_fold_stmt (&gsi);
5691 if (want_value)
5693 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5694 return GS_OK;
5696 else
5697 *expr_p = NULL;
5699 return GS_ALL_DONE;
5702 /* Gimplify a comparison between two variable-sized objects. Do this
5703 with a call to BUILT_IN_MEMCMP. */
5705 static enum gimplify_status
5706 gimplify_variable_sized_compare (tree *expr_p)
5708 location_t loc = EXPR_LOCATION (*expr_p);
5709 tree op0 = TREE_OPERAND (*expr_p, 0);
5710 tree op1 = TREE_OPERAND (*expr_p, 1);
5711 tree t, arg, dest, src, expr;
5713 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5714 arg = unshare_expr (arg);
5715 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5716 src = build_fold_addr_expr_loc (loc, op1);
5717 dest = build_fold_addr_expr_loc (loc, op0);
5718 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5719 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5721 expr
5722 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5723 SET_EXPR_LOCATION (expr, loc);
5724 *expr_p = expr;
5726 return GS_OK;
5729 /* Gimplify a comparison between two aggregate objects of integral scalar
5730 mode as a comparison between the bitwise equivalent scalar values. */
5732 static enum gimplify_status
5733 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5735 location_t loc = EXPR_LOCATION (*expr_p);
5736 tree op0 = TREE_OPERAND (*expr_p, 0);
5737 tree op1 = TREE_OPERAND (*expr_p, 1);
5739 tree type = TREE_TYPE (op0);
5740 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5742 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5743 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5745 *expr_p
5746 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5748 return GS_OK;
5751 /* Gimplify an expression sequence. This function gimplifies each
5752 expression and rewrites the original expression with the last
5753 expression of the sequence in GIMPLE form.
5755 PRE_P points to the list where the side effects for all the
5756 expressions in the sequence will be emitted.
5758 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5760 static enum gimplify_status
5761 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5763 tree t = *expr_p;
5767 tree *sub_p = &TREE_OPERAND (t, 0);
5769 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5770 gimplify_compound_expr (sub_p, pre_p, false);
5771 else
5772 gimplify_stmt (sub_p, pre_p);
5774 t = TREE_OPERAND (t, 1);
5776 while (TREE_CODE (t) == COMPOUND_EXPR);
5778 *expr_p = t;
5779 if (want_value)
5780 return GS_OK;
5781 else
5783 gimplify_stmt (expr_p, pre_p);
5784 return GS_ALL_DONE;
5788 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5789 gimplify. After gimplification, EXPR_P will point to a new temporary
5790 that holds the original value of the SAVE_EXPR node.
5792 PRE_P points to the list where side effects that must happen before
5793 *EXPR_P should be stored. */
5795 static enum gimplify_status
5796 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5798 enum gimplify_status ret = GS_ALL_DONE;
5799 tree val;
5801 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5802 val = TREE_OPERAND (*expr_p, 0);
5804 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5805 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5807 /* The operand may be a void-valued expression. It is
5808 being executed only for its side-effects. */
5809 if (TREE_TYPE (val) == void_type_node)
5811 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5812 is_gimple_stmt, fb_none);
5813 val = NULL;
5815 else
5816 /* The temporary may not be an SSA name as later abnormal and EH
5817 control flow may invalidate use/def domination. */
5818 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5820 TREE_OPERAND (*expr_p, 0) = val;
5821 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5824 *expr_p = val;
5826 return ret;
5829 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5831 unary_expr
5832 : ...
5833 | '&' varname
5836 PRE_P points to the list where side effects that must happen before
5837 *EXPR_P should be stored.
5839 POST_P points to the list where side effects that must happen after
5840 *EXPR_P should be stored. */
5842 static enum gimplify_status
5843 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5845 tree expr = *expr_p;
5846 tree op0 = TREE_OPERAND (expr, 0);
5847 enum gimplify_status ret;
5848 location_t loc = EXPR_LOCATION (*expr_p);
5850 switch (TREE_CODE (op0))
5852 case INDIRECT_REF:
5853 do_indirect_ref:
5854 /* Check if we are dealing with an expression of the form '&*ptr'.
5855 While the front end folds away '&*ptr' into 'ptr', these
5856 expressions may be generated internally by the compiler (e.g.,
5857 builtins like __builtin_va_end). */
5858 /* Caution: the silent array decomposition semantics we allow for
5859 ADDR_EXPR means we can't always discard the pair. */
5860 /* Gimplification of the ADDR_EXPR operand may drop
5861 cv-qualification conversions, so make sure we add them if
5862 needed. */
5864 tree op00 = TREE_OPERAND (op0, 0);
5865 tree t_expr = TREE_TYPE (expr);
5866 tree t_op00 = TREE_TYPE (op00);
5868 if (!useless_type_conversion_p (t_expr, t_op00))
5869 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5870 *expr_p = op00;
5871 ret = GS_OK;
5873 break;
5875 case VIEW_CONVERT_EXPR:
5876 /* Take the address of our operand and then convert it to the type of
5877 this ADDR_EXPR.
5879 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5880 all clear. The impact of this transformation is even less clear. */
5882 /* If the operand is a useless conversion, look through it. Doing so
5883 guarantees that the ADDR_EXPR and its operand will remain of the
5884 same type. */
5885 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5886 op0 = TREE_OPERAND (op0, 0);
5888 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5889 build_fold_addr_expr_loc (loc,
5890 TREE_OPERAND (op0, 0)));
5891 ret = GS_OK;
5892 break;
5894 case MEM_REF:
5895 if (integer_zerop (TREE_OPERAND (op0, 1)))
5896 goto do_indirect_ref;
5898 /* fall through */
5900 default:
5901 /* If we see a call to a declared builtin or see its address
5902 being taken (we can unify those cases here) then we can mark
5903 the builtin for implicit generation by GCC. */
5904 if (TREE_CODE (op0) == FUNCTION_DECL
5905 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5906 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5907 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5909 /* We use fb_either here because the C frontend sometimes takes
5910 the address of a call that returns a struct; see
5911 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5912 the implied temporary explicit. */
5914 /* Make the operand addressable. */
5915 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5916 is_gimple_addressable, fb_either);
5917 if (ret == GS_ERROR)
5918 break;
5920 /* Then mark it. Beware that it may not be possible to do so directly
5921 if a temporary has been created by the gimplification. */
5922 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5924 op0 = TREE_OPERAND (expr, 0);
5926 /* For various reasons, the gimplification of the expression
5927 may have made a new INDIRECT_REF. */
5928 if (TREE_CODE (op0) == INDIRECT_REF)
5929 goto do_indirect_ref;
5931 mark_addressable (TREE_OPERAND (expr, 0));
5933 /* The FEs may end up building ADDR_EXPRs early on a decl with
5934 an incomplete type. Re-build ADDR_EXPRs in canonical form
5935 here. */
5936 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5937 *expr_p = build_fold_addr_expr (op0);
5939 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5940 recompute_tree_invariant_for_addr_expr (*expr_p);
5942 /* If we re-built the ADDR_EXPR add a conversion to the original type
5943 if required. */
5944 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5945 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5947 break;
5950 return ret;
5953 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5954 value; output operands should be a gimple lvalue. */
5956 static enum gimplify_status
5957 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5959 tree expr;
5960 int noutputs;
5961 const char **oconstraints;
5962 int i;
5963 tree link;
5964 const char *constraint;
5965 bool allows_mem, allows_reg, is_inout;
5966 enum gimplify_status ret, tret;
5967 gasm *stmt;
5968 vec<tree, va_gc> *inputs;
5969 vec<tree, va_gc> *outputs;
5970 vec<tree, va_gc> *clobbers;
5971 vec<tree, va_gc> *labels;
5972 tree link_next;
5974 expr = *expr_p;
5975 noutputs = list_length (ASM_OUTPUTS (expr));
5976 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5978 inputs = NULL;
5979 outputs = NULL;
5980 clobbers = NULL;
5981 labels = NULL;
5983 ret = GS_ALL_DONE;
5984 link_next = NULL_TREE;
5985 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5987 bool ok;
5988 size_t constraint_len;
5990 link_next = TREE_CHAIN (link);
5992 oconstraints[i]
5993 = constraint
5994 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5995 constraint_len = strlen (constraint);
5996 if (constraint_len == 0)
5997 continue;
5999 ok = parse_output_constraint (&constraint, i, 0, 0,
6000 &allows_mem, &allows_reg, &is_inout);
6001 if (!ok)
6003 ret = GS_ERROR;
6004 is_inout = false;
6007 if (!allows_reg && allows_mem)
6008 mark_addressable (TREE_VALUE (link));
6010 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6011 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6012 fb_lvalue | fb_mayfail);
6013 if (tret == GS_ERROR)
6015 error ("invalid lvalue in asm output %d", i);
6016 ret = tret;
6019 /* If the constraint does not allow memory make sure we gimplify
6020 it to a register if it is not already but its base is. This
6021 happens for complex and vector components. */
6022 if (!allows_mem)
6024 tree op = TREE_VALUE (link);
6025 if (! is_gimple_val (op)
6026 && is_gimple_reg_type (TREE_TYPE (op))
6027 && is_gimple_reg (get_base_address (op)))
6029 tree tem = create_tmp_reg (TREE_TYPE (op));
6030 tree ass;
6031 if (is_inout)
6033 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6034 tem, unshare_expr (op));
6035 gimplify_and_add (ass, pre_p);
6037 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6038 gimplify_and_add (ass, post_p);
6040 TREE_VALUE (link) = tem;
6041 tret = GS_OK;
6045 vec_safe_push (outputs, link);
6046 TREE_CHAIN (link) = NULL_TREE;
6048 if (is_inout)
6050 /* An input/output operand. To give the optimizers more
6051 flexibility, split it into separate input and output
6052 operands. */
6053 tree input;
6054 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6055 char buf[11];
6057 /* Turn the in/out constraint into an output constraint. */
6058 char *p = xstrdup (constraint);
6059 p[0] = '=';
6060 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6062 /* And add a matching input constraint. */
6063 if (allows_reg)
6065 sprintf (buf, "%u", i);
6067 /* If there are multiple alternatives in the constraint,
6068 handle each of them individually. Those that allow register
6069 will be replaced with operand number, the others will stay
6070 unchanged. */
6071 if (strchr (p, ',') != NULL)
6073 size_t len = 0, buflen = strlen (buf);
6074 char *beg, *end, *str, *dst;
6076 for (beg = p + 1;;)
6078 end = strchr (beg, ',');
6079 if (end == NULL)
6080 end = strchr (beg, '\0');
6081 if ((size_t) (end - beg) < buflen)
6082 len += buflen + 1;
6083 else
6084 len += end - beg + 1;
6085 if (*end)
6086 beg = end + 1;
6087 else
6088 break;
6091 str = (char *) alloca (len);
6092 for (beg = p + 1, dst = str;;)
6094 const char *tem;
6095 bool mem_p, reg_p, inout_p;
6097 end = strchr (beg, ',');
6098 if (end)
6099 *end = '\0';
6100 beg[-1] = '=';
6101 tem = beg - 1;
6102 parse_output_constraint (&tem, i, 0, 0,
6103 &mem_p, &reg_p, &inout_p);
6104 if (dst != str)
6105 *dst++ = ',';
6106 if (reg_p)
6108 memcpy (dst, buf, buflen);
6109 dst += buflen;
6111 else
6113 if (end)
6114 len = end - beg;
6115 else
6116 len = strlen (beg);
6117 memcpy (dst, beg, len);
6118 dst += len;
6120 if (end)
6121 beg = end + 1;
6122 else
6123 break;
6125 *dst = '\0';
6126 input = build_string (dst - str, str);
6128 else
6129 input = build_string (strlen (buf), buf);
6131 else
6132 input = build_string (constraint_len - 1, constraint + 1);
6134 free (p);
6136 input = build_tree_list (build_tree_list (NULL_TREE, input),
6137 unshare_expr (TREE_VALUE (link)));
6138 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6142 link_next = NULL_TREE;
6143 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6145 link_next = TREE_CHAIN (link);
6146 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6147 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6148 oconstraints, &allows_mem, &allows_reg);
6150 /* If we can't make copies, we can only accept memory. */
6151 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6153 if (allows_mem)
6154 allows_reg = 0;
6155 else
6157 error ("impossible constraint in %<asm%>");
6158 error ("non-memory input %d must stay in memory", i);
6159 return GS_ERROR;
6163 /* If the operand is a memory input, it should be an lvalue. */
6164 if (!allows_reg && allows_mem)
6166 tree inputv = TREE_VALUE (link);
6167 STRIP_NOPS (inputv);
6168 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6169 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6170 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6171 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6172 || TREE_CODE (inputv) == MODIFY_EXPR)
6173 TREE_VALUE (link) = error_mark_node;
6174 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6175 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6176 if (tret != GS_ERROR)
6178 /* Unlike output operands, memory inputs are not guaranteed
6179 to be lvalues by the FE, and while the expressions are
6180 marked addressable there, if it is e.g. a statement
6181 expression, temporaries in it might not end up being
6182 addressable. They might be already used in the IL and thus
6183 it is too late to make them addressable now though. */
6184 tree x = TREE_VALUE (link);
6185 while (handled_component_p (x))
6186 x = TREE_OPERAND (x, 0);
6187 if (TREE_CODE (x) == MEM_REF
6188 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6189 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6190 if ((VAR_P (x)
6191 || TREE_CODE (x) == PARM_DECL
6192 || TREE_CODE (x) == RESULT_DECL)
6193 && !TREE_ADDRESSABLE (x)
6194 && is_gimple_reg (x))
6196 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6197 input_location), 0,
6198 "memory input %d is not directly addressable",
6200 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6203 mark_addressable (TREE_VALUE (link));
6204 if (tret == GS_ERROR)
6206 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6207 "memory input %d is not directly addressable", i);
6208 ret = tret;
6211 else
6213 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6214 is_gimple_asm_val, fb_rvalue);
6215 if (tret == GS_ERROR)
6216 ret = tret;
6219 TREE_CHAIN (link) = NULL_TREE;
6220 vec_safe_push (inputs, link);
6223 link_next = NULL_TREE;
6224 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6226 link_next = TREE_CHAIN (link);
6227 TREE_CHAIN (link) = NULL_TREE;
6228 vec_safe_push (clobbers, link);
6231 link_next = NULL_TREE;
6232 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6234 link_next = TREE_CHAIN (link);
6235 TREE_CHAIN (link) = NULL_TREE;
6236 vec_safe_push (labels, link);
6239 /* Do not add ASMs with errors to the gimple IL stream. */
6240 if (ret != GS_ERROR)
6242 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6243 inputs, outputs, clobbers, labels);
6245 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6246 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6248 gimplify_seq_add_stmt (pre_p, stmt);
6251 return ret;
6254 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6255 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6256 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6257 return to this function.
6259 FIXME should we complexify the prequeue handling instead? Or use flags
6260 for all the cleanups and let the optimizer tighten them up? The current
6261 code seems pretty fragile; it will break on a cleanup within any
6262 non-conditional nesting. But any such nesting would be broken, anyway;
6263 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6264 and continues out of it. We can do that at the RTL level, though, so
6265 having an optimizer to tighten up try/finally regions would be a Good
6266 Thing. */
6268 static enum gimplify_status
6269 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6271 gimple_stmt_iterator iter;
6272 gimple_seq body_sequence = NULL;
6274 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6276 /* We only care about the number of conditions between the innermost
6277 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6278 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6279 int old_conds = gimplify_ctxp->conditions;
6280 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6281 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6282 gimplify_ctxp->conditions = 0;
6283 gimplify_ctxp->conditional_cleanups = NULL;
6284 gimplify_ctxp->in_cleanup_point_expr = true;
6286 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6288 gimplify_ctxp->conditions = old_conds;
6289 gimplify_ctxp->conditional_cleanups = old_cleanups;
6290 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6292 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6294 gimple *wce = gsi_stmt (iter);
6296 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6298 if (gsi_one_before_end_p (iter))
6300 /* Note that gsi_insert_seq_before and gsi_remove do not
6301 scan operands, unlike some other sequence mutators. */
6302 if (!gimple_wce_cleanup_eh_only (wce))
6303 gsi_insert_seq_before_without_update (&iter,
6304 gimple_wce_cleanup (wce),
6305 GSI_SAME_STMT);
6306 gsi_remove (&iter, true);
6307 break;
6309 else
6311 gtry *gtry;
6312 gimple_seq seq;
6313 enum gimple_try_flags kind;
6315 if (gimple_wce_cleanup_eh_only (wce))
6316 kind = GIMPLE_TRY_CATCH;
6317 else
6318 kind = GIMPLE_TRY_FINALLY;
6319 seq = gsi_split_seq_after (iter);
6321 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6322 /* Do not use gsi_replace here, as it may scan operands.
6323 We want to do a simple structural modification only. */
6324 gsi_set_stmt (&iter, gtry);
6325 iter = gsi_start (gtry->eval);
6328 else
6329 gsi_next (&iter);
6332 gimplify_seq_add_seq (pre_p, body_sequence);
6333 if (temp)
6335 *expr_p = temp;
6336 return GS_OK;
6338 else
6340 *expr_p = NULL;
6341 return GS_ALL_DONE;
6345 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6346 is the cleanup action required. EH_ONLY is true if the cleanup should
6347 only be executed if an exception is thrown, not on normal exit.
6348 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6349 only valid for clobbers. */
6351 static void
6352 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6353 bool force_uncond = false)
6355 gimple *wce;
6356 gimple_seq cleanup_stmts = NULL;
6358 /* Errors can result in improperly nested cleanups. Which results in
6359 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6360 if (seen_error ())
6361 return;
6363 if (gimple_conditional_context ())
6365 /* If we're in a conditional context, this is more complex. We only
6366 want to run the cleanup if we actually ran the initialization that
6367 necessitates it, but we want to run it after the end of the
6368 conditional context. So we wrap the try/finally around the
6369 condition and use a flag to determine whether or not to actually
6370 run the destructor. Thus
6372 test ? f(A()) : 0
6374 becomes (approximately)
6376 flag = 0;
6377 try {
6378 if (test) { A::A(temp); flag = 1; val = f(temp); }
6379 else { val = 0; }
6380 } finally {
6381 if (flag) A::~A(temp);
6385 if (force_uncond)
6387 gimplify_stmt (&cleanup, &cleanup_stmts);
6388 wce = gimple_build_wce (cleanup_stmts);
6389 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6391 else
6393 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6394 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6395 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6397 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6398 gimplify_stmt (&cleanup, &cleanup_stmts);
6399 wce = gimple_build_wce (cleanup_stmts);
6401 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6402 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6403 gimplify_seq_add_stmt (pre_p, ftrue);
6405 /* Because of this manipulation, and the EH edges that jump
6406 threading cannot redirect, the temporary (VAR) will appear
6407 to be used uninitialized. Don't warn. */
6408 TREE_NO_WARNING (var) = 1;
6411 else
6413 gimplify_stmt (&cleanup, &cleanup_stmts);
6414 wce = gimple_build_wce (cleanup_stmts);
6415 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6416 gimplify_seq_add_stmt (pre_p, wce);
6420 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6422 static enum gimplify_status
6423 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6425 tree targ = *expr_p;
6426 tree temp = TARGET_EXPR_SLOT (targ);
6427 tree init = TARGET_EXPR_INITIAL (targ);
6428 enum gimplify_status ret;
6430 bool unpoison_empty_seq = false;
6431 gimple_stmt_iterator unpoison_it;
6433 if (init)
6435 tree cleanup = NULL_TREE;
6437 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6438 to the temps list. Handle also variable length TARGET_EXPRs. */
6439 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6441 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6442 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6443 gimplify_vla_decl (temp, pre_p);
6445 else
6447 /* Save location where we need to place unpoisoning. It's possible
6448 that a variable will be converted to needs_to_live_in_memory. */
6449 unpoison_it = gsi_last (*pre_p);
6450 unpoison_empty_seq = gsi_end_p (unpoison_it);
6452 gimple_add_tmp_var (temp);
6455 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6456 expression is supposed to initialize the slot. */
6457 if (VOID_TYPE_P (TREE_TYPE (init)))
6458 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6459 else
6461 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6462 init = init_expr;
6463 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6464 init = NULL;
6465 ggc_free (init_expr);
6467 if (ret == GS_ERROR)
6469 /* PR c++/28266 Make sure this is expanded only once. */
6470 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6471 return GS_ERROR;
6473 if (init)
6474 gimplify_and_add (init, pre_p);
6476 /* If needed, push the cleanup for the temp. */
6477 if (TARGET_EXPR_CLEANUP (targ))
6479 if (CLEANUP_EH_ONLY (targ))
6480 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6481 CLEANUP_EH_ONLY (targ), pre_p);
6482 else
6483 cleanup = TARGET_EXPR_CLEANUP (targ);
6486 /* Add a clobber for the temporary going out of scope, like
6487 gimplify_bind_expr. */
6488 if (gimplify_ctxp->in_cleanup_point_expr
6489 && needs_to_live_in_memory (temp))
6491 if (flag_stack_reuse == SR_ALL)
6493 tree clobber = build_constructor (TREE_TYPE (temp),
6494 NULL);
6495 TREE_THIS_VOLATILE (clobber) = true;
6496 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6497 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6499 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope))
6501 tree asan_cleanup = build_asan_poison_call_expr (temp);
6502 if (asan_cleanup)
6504 if (unpoison_empty_seq)
6505 unpoison_it = gsi_start (*pre_p);
6507 asan_poison_variable (temp, false, &unpoison_it,
6508 unpoison_empty_seq);
6509 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6513 if (cleanup)
6514 gimple_push_cleanup (temp, cleanup, false, pre_p);
6516 /* Only expand this once. */
6517 TREE_OPERAND (targ, 3) = init;
6518 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6520 else
6521 /* We should have expanded this before. */
6522 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6524 *expr_p = temp;
6525 return GS_OK;
6528 /* Gimplification of expression trees. */
6530 /* Gimplify an expression which appears at statement context. The
6531 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6532 NULL, a new sequence is allocated.
6534 Return true if we actually added a statement to the queue. */
6536 bool
6537 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6539 gimple_seq_node last;
6541 last = gimple_seq_last (*seq_p);
6542 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6543 return last != gimple_seq_last (*seq_p);
6546 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6547 to CTX. If entries already exist, force them to be some flavor of private.
6548 If there is no enclosing parallel, do nothing. */
6550 void
6551 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6553 splay_tree_node n;
6555 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6556 return;
6560 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6561 if (n != NULL)
6563 if (n->value & GOVD_SHARED)
6564 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6565 else if (n->value & GOVD_MAP)
6566 n->value |= GOVD_MAP_TO_ONLY;
6567 else
6568 return;
6570 else if ((ctx->region_type & ORT_TARGET) != 0)
6572 if (ctx->target_map_scalars_firstprivate)
6573 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6574 else
6575 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6577 else if (ctx->region_type != ORT_WORKSHARE
6578 && ctx->region_type != ORT_SIMD
6579 && ctx->region_type != ORT_ACC
6580 && !(ctx->region_type & ORT_TARGET_DATA))
6581 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6583 ctx = ctx->outer_context;
6585 while (ctx);
6588 /* Similarly for each of the type sizes of TYPE. */
6590 static void
6591 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6593 if (type == NULL || type == error_mark_node)
6594 return;
6595 type = TYPE_MAIN_VARIANT (type);
6597 if (ctx->privatized_types->add (type))
6598 return;
6600 switch (TREE_CODE (type))
6602 case INTEGER_TYPE:
6603 case ENUMERAL_TYPE:
6604 case BOOLEAN_TYPE:
6605 case REAL_TYPE:
6606 case FIXED_POINT_TYPE:
6607 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6608 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6609 break;
6611 case ARRAY_TYPE:
6612 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6613 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6614 break;
6616 case RECORD_TYPE:
6617 case UNION_TYPE:
6618 case QUAL_UNION_TYPE:
6620 tree field;
6621 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6622 if (TREE_CODE (field) == FIELD_DECL)
6624 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6625 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6628 break;
6630 case POINTER_TYPE:
6631 case REFERENCE_TYPE:
6632 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6633 break;
6635 default:
6636 break;
6639 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6640 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6641 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6644 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6646 static void
6647 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6649 splay_tree_node n;
6650 unsigned int nflags;
6651 tree t;
6653 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6654 return;
6656 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6657 there are constructors involved somewhere. Exception is a shared clause,
6658 there is nothing privatized in that case. */
6659 if ((flags & GOVD_SHARED) == 0
6660 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6661 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6662 flags |= GOVD_SEEN;
6664 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6665 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6667 /* We shouldn't be re-adding the decl with the same data
6668 sharing class. */
6669 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6670 nflags = n->value | flags;
6671 /* The only combination of data sharing classes we should see is
6672 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6673 reduction variables to be used in data sharing clauses. */
6674 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6675 || ((nflags & GOVD_DATA_SHARE_CLASS)
6676 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6677 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6678 n->value = nflags;
6679 return;
6682 /* When adding a variable-sized variable, we have to handle all sorts
6683 of additional bits of data: the pointer replacement variable, and
6684 the parameters of the type. */
6685 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6687 /* Add the pointer replacement variable as PRIVATE if the variable
6688 replacement is private, else FIRSTPRIVATE since we'll need the
6689 address of the original variable either for SHARED, or for the
6690 copy into or out of the context. */
6691 if (!(flags & GOVD_LOCAL))
6693 if (flags & GOVD_MAP)
6694 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6695 else if (flags & GOVD_PRIVATE)
6696 nflags = GOVD_PRIVATE;
6697 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6698 && (flags & GOVD_FIRSTPRIVATE))
6699 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6700 else
6701 nflags = GOVD_FIRSTPRIVATE;
6702 nflags |= flags & GOVD_SEEN;
6703 t = DECL_VALUE_EXPR (decl);
6704 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6705 t = TREE_OPERAND (t, 0);
6706 gcc_assert (DECL_P (t));
6707 omp_add_variable (ctx, t, nflags);
6710 /* Add all of the variable and type parameters (which should have
6711 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6712 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6713 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6714 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6716 /* The variable-sized variable itself is never SHARED, only some form
6717 of PRIVATE. The sharing would take place via the pointer variable
6718 which we remapped above. */
6719 if (flags & GOVD_SHARED)
6720 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6721 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6723 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6724 alloca statement we generate for the variable, so make sure it
6725 is available. This isn't automatically needed for the SHARED
6726 case, since we won't be allocating local storage then.
6727 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6728 in this case omp_notice_variable will be called later
6729 on when it is gimplified. */
6730 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6731 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6732 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6734 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6735 && lang_hooks.decls.omp_privatize_by_reference (decl))
6737 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6739 /* Similar to the direct variable sized case above, we'll need the
6740 size of references being privatized. */
6741 if ((flags & GOVD_SHARED) == 0)
6743 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6744 if (DECL_P (t))
6745 omp_notice_variable (ctx, t, true);
6749 if (n != NULL)
6750 n->value |= flags;
6751 else
6752 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6754 /* For reductions clauses in OpenACC loop directives, by default create a
6755 copy clause on the enclosing parallel construct for carrying back the
6756 results. */
6757 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6759 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6760 while (outer_ctx)
6762 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6763 if (n != NULL)
6765 /* Ignore local variables and explicitly declared clauses. */
6766 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6767 break;
6768 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6770 /* According to the OpenACC spec, such a reduction variable
6771 should already have a copy map on a kernels construct,
6772 verify that here. */
6773 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6774 && (n->value & GOVD_MAP));
6776 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6778 /* Remove firstprivate and make it a copy map. */
6779 n->value &= ~GOVD_FIRSTPRIVATE;
6780 n->value |= GOVD_MAP;
6783 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6785 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6786 GOVD_MAP | GOVD_SEEN);
6787 break;
6789 outer_ctx = outer_ctx->outer_context;
6794 /* Notice a threadprivate variable DECL used in OMP context CTX.
6795 This just prints out diagnostics about threadprivate variable uses
6796 in untied tasks. If DECL2 is non-NULL, prevent this warning
6797 on that variable. */
6799 static bool
6800 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6801 tree decl2)
6803 splay_tree_node n;
6804 struct gimplify_omp_ctx *octx;
6806 for (octx = ctx; octx; octx = octx->outer_context)
6807 if ((octx->region_type & ORT_TARGET) != 0)
6809 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6810 if (n == NULL)
6812 error ("threadprivate variable %qE used in target region",
6813 DECL_NAME (decl));
6814 error_at (octx->location, "enclosing target region");
6815 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6817 if (decl2)
6818 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6821 if (ctx->region_type != ORT_UNTIED_TASK)
6822 return false;
6823 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6824 if (n == NULL)
6826 error ("threadprivate variable %qE used in untied task",
6827 DECL_NAME (decl));
6828 error_at (ctx->location, "enclosing task");
6829 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6831 if (decl2)
6832 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6833 return false;
6836 /* Return true if global var DECL is device resident. */
6838 static bool
6839 device_resident_p (tree decl)
6841 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6843 if (!attr)
6844 return false;
6846 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6848 tree c = TREE_VALUE (t);
6849 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6850 return true;
6853 return false;
6856 /* Return true if DECL has an ACC DECLARE attribute. */
6858 static bool
6859 is_oacc_declared (tree decl)
6861 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6862 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6863 return declared != NULL_TREE;
6866 /* Determine outer default flags for DECL mentioned in an OMP region
6867 but not declared in an enclosing clause.
6869 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6870 remapped firstprivate instead of shared. To some extent this is
6871 addressed in omp_firstprivatize_type_sizes, but not
6872 effectively. */
6874 static unsigned
6875 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6876 bool in_code, unsigned flags)
6878 enum omp_clause_default_kind default_kind = ctx->default_kind;
6879 enum omp_clause_default_kind kind;
6881 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6882 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6883 default_kind = kind;
6885 switch (default_kind)
6887 case OMP_CLAUSE_DEFAULT_NONE:
6889 const char *rtype;
6891 if (ctx->region_type & ORT_PARALLEL)
6892 rtype = "parallel";
6893 else if (ctx->region_type & ORT_TASK)
6894 rtype = "task";
6895 else if (ctx->region_type & ORT_TEAMS)
6896 rtype = "teams";
6897 else
6898 gcc_unreachable ();
6900 error ("%qE not specified in enclosing %qs",
6901 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6902 error_at (ctx->location, "enclosing %qs", rtype);
6904 /* FALLTHRU */
6905 case OMP_CLAUSE_DEFAULT_SHARED:
6906 flags |= GOVD_SHARED;
6907 break;
6908 case OMP_CLAUSE_DEFAULT_PRIVATE:
6909 flags |= GOVD_PRIVATE;
6910 break;
6911 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6912 flags |= GOVD_FIRSTPRIVATE;
6913 break;
6914 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6915 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6916 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6917 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6919 omp_notice_variable (octx, decl, in_code);
6920 for (; octx; octx = octx->outer_context)
6922 splay_tree_node n2;
6924 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6925 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6926 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6927 continue;
6928 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6930 flags |= GOVD_FIRSTPRIVATE;
6931 goto found_outer;
6933 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6935 flags |= GOVD_SHARED;
6936 goto found_outer;
6941 if (TREE_CODE (decl) == PARM_DECL
6942 || (!is_global_var (decl)
6943 && DECL_CONTEXT (decl) == current_function_decl))
6944 flags |= GOVD_FIRSTPRIVATE;
6945 else
6946 flags |= GOVD_SHARED;
6947 found_outer:
6948 break;
6950 default:
6951 gcc_unreachable ();
6954 return flags;
6958 /* Determine outer default flags for DECL mentioned in an OACC region
6959 but not declared in an enclosing clause. */
6961 static unsigned
6962 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6964 const char *rkind;
6965 bool on_device = false;
6966 bool declared = is_oacc_declared (decl);
6967 tree type = TREE_TYPE (decl);
6969 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6970 type = TREE_TYPE (type);
6972 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6973 && is_global_var (decl)
6974 && device_resident_p (decl))
6976 on_device = true;
6977 flags |= GOVD_MAP_TO_ONLY;
6980 switch (ctx->region_type)
6982 case ORT_ACC_KERNELS:
6983 rkind = "kernels";
6985 if (AGGREGATE_TYPE_P (type))
6987 /* Aggregates default to 'present_or_copy', or 'present'. */
6988 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6989 flags |= GOVD_MAP;
6990 else
6991 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
6993 else
6994 /* Scalars default to 'copy'. */
6995 flags |= GOVD_MAP | GOVD_MAP_FORCE;
6997 break;
6999 case ORT_ACC_PARALLEL:
7000 rkind = "parallel";
7002 if (on_device || declared)
7003 flags |= GOVD_MAP;
7004 else if (AGGREGATE_TYPE_P (type))
7006 /* Aggregates default to 'present_or_copy', or 'present'. */
7007 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7008 flags |= GOVD_MAP;
7009 else
7010 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7012 else
7013 /* Scalars default to 'firstprivate'. */
7014 flags |= GOVD_FIRSTPRIVATE;
7016 break;
7018 default:
7019 gcc_unreachable ();
7022 if (DECL_ARTIFICIAL (decl))
7023 ; /* We can get compiler-generated decls, and should not complain
7024 about them. */
7025 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7027 error ("%qE not specified in enclosing OpenACC %qs construct",
7028 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7029 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7031 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7032 ; /* Handled above. */
7033 else
7034 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7036 return flags;
7039 /* Record the fact that DECL was used within the OMP context CTX.
7040 IN_CODE is true when real code uses DECL, and false when we should
7041 merely emit default(none) errors. Return true if DECL is going to
7042 be remapped and thus DECL shouldn't be gimplified into its
7043 DECL_VALUE_EXPR (if any). */
7045 static bool
7046 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7048 splay_tree_node n;
7049 unsigned flags = in_code ? GOVD_SEEN : 0;
7050 bool ret = false, shared;
7052 if (error_operand_p (decl))
7053 return false;
7055 if (ctx->region_type == ORT_NONE)
7056 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7058 if (is_global_var (decl))
7060 /* Threadprivate variables are predetermined. */
7061 if (DECL_THREAD_LOCAL_P (decl))
7062 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7064 if (DECL_HAS_VALUE_EXPR_P (decl))
7066 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7068 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7069 return omp_notice_threadprivate_variable (ctx, decl, value);
7072 if (gimplify_omp_ctxp->outer_context == NULL
7073 && VAR_P (decl)
7074 && oacc_get_fn_attrib (current_function_decl))
7076 location_t loc = DECL_SOURCE_LOCATION (decl);
7078 if (lookup_attribute ("omp declare target link",
7079 DECL_ATTRIBUTES (decl)))
7081 error_at (loc,
7082 "%qE with %<link%> clause used in %<routine%> function",
7083 DECL_NAME (decl));
7084 return false;
7086 else if (!lookup_attribute ("omp declare target",
7087 DECL_ATTRIBUTES (decl)))
7089 error_at (loc,
7090 "%qE requires a %<declare%> directive for use "
7091 "in a %<routine%> function", DECL_NAME (decl));
7092 return false;
7097 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7098 if ((ctx->region_type & ORT_TARGET) != 0)
7100 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7101 if (n == NULL)
7103 unsigned nflags = flags;
7104 if (ctx->target_map_pointers_as_0len_arrays
7105 || ctx->target_map_scalars_firstprivate)
7107 bool is_declare_target = false;
7108 bool is_scalar = false;
7109 if (is_global_var (decl)
7110 && varpool_node::get_create (decl)->offloadable)
7112 struct gimplify_omp_ctx *octx;
7113 for (octx = ctx->outer_context;
7114 octx; octx = octx->outer_context)
7116 n = splay_tree_lookup (octx->variables,
7117 (splay_tree_key)decl);
7118 if (n
7119 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7120 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7121 break;
7123 is_declare_target = octx == NULL;
7125 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7126 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7127 if (is_declare_target)
7129 else if (ctx->target_map_pointers_as_0len_arrays
7130 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7131 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7132 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7133 == POINTER_TYPE)))
7134 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7135 else if (is_scalar)
7136 nflags |= GOVD_FIRSTPRIVATE;
7139 struct gimplify_omp_ctx *octx = ctx->outer_context;
7140 if ((ctx->region_type & ORT_ACC) && octx)
7142 /* Look in outer OpenACC contexts, to see if there's a
7143 data attribute for this variable. */
7144 omp_notice_variable (octx, decl, in_code);
7146 for (; octx; octx = octx->outer_context)
7148 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7149 break;
7150 splay_tree_node n2
7151 = splay_tree_lookup (octx->variables,
7152 (splay_tree_key) decl);
7153 if (n2)
7155 if (octx->region_type == ORT_ACC_HOST_DATA)
7156 error ("variable %qE declared in enclosing "
7157 "%<host_data%> region", DECL_NAME (decl));
7158 nflags |= GOVD_MAP;
7159 if (octx->region_type == ORT_ACC_DATA
7160 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7161 nflags |= GOVD_MAP_0LEN_ARRAY;
7162 goto found_outer;
7168 tree type = TREE_TYPE (decl);
7170 if (nflags == flags
7171 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7172 && lang_hooks.decls.omp_privatize_by_reference (decl))
7173 type = TREE_TYPE (type);
7174 if (nflags == flags
7175 && !lang_hooks.types.omp_mappable_type (type))
7177 error ("%qD referenced in target region does not have "
7178 "a mappable type", decl);
7179 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7181 else if (nflags == flags)
7183 if ((ctx->region_type & ORT_ACC) != 0)
7184 nflags = oacc_default_clause (ctx, decl, flags);
7185 else
7186 nflags |= GOVD_MAP;
7189 found_outer:
7190 omp_add_variable (ctx, decl, nflags);
7192 else
7194 /* If nothing changed, there's nothing left to do. */
7195 if ((n->value & flags) == flags)
7196 return ret;
7197 flags |= n->value;
7198 n->value = flags;
7200 goto do_outer;
7203 if (n == NULL)
7205 if (ctx->region_type == ORT_WORKSHARE
7206 || ctx->region_type == ORT_SIMD
7207 || ctx->region_type == ORT_ACC
7208 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7209 goto do_outer;
7211 flags = omp_default_clause (ctx, decl, in_code, flags);
7213 if ((flags & GOVD_PRIVATE)
7214 && lang_hooks.decls.omp_private_outer_ref (decl))
7215 flags |= GOVD_PRIVATE_OUTER_REF;
7217 omp_add_variable (ctx, decl, flags);
7219 shared = (flags & GOVD_SHARED) != 0;
7220 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7221 goto do_outer;
7224 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7225 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7226 && DECL_SIZE (decl))
7228 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7230 splay_tree_node n2;
7231 tree t = DECL_VALUE_EXPR (decl);
7232 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7233 t = TREE_OPERAND (t, 0);
7234 gcc_assert (DECL_P (t));
7235 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7236 n2->value |= GOVD_SEEN;
7238 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7239 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7240 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7241 != INTEGER_CST))
7243 splay_tree_node n2;
7244 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7245 gcc_assert (DECL_P (t));
7246 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7247 if (n2)
7248 omp_notice_variable (ctx, t, true);
7252 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7253 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7255 /* If nothing changed, there's nothing left to do. */
7256 if ((n->value & flags) == flags)
7257 return ret;
7258 flags |= n->value;
7259 n->value = flags;
7261 do_outer:
7262 /* If the variable is private in the current context, then we don't
7263 need to propagate anything to an outer context. */
7264 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7265 return ret;
7266 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7267 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7268 return ret;
7269 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7270 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7271 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7272 return ret;
7273 if (ctx->outer_context
7274 && omp_notice_variable (ctx->outer_context, decl, in_code))
7275 return true;
7276 return ret;
7279 /* Verify that DECL is private within CTX. If there's specific information
7280 to the contrary in the innermost scope, generate an error. */
7282 static bool
7283 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7285 splay_tree_node n;
7287 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7288 if (n != NULL)
7290 if (n->value & GOVD_SHARED)
7292 if (ctx == gimplify_omp_ctxp)
7294 if (simd)
7295 error ("iteration variable %qE is predetermined linear",
7296 DECL_NAME (decl));
7297 else
7298 error ("iteration variable %qE should be private",
7299 DECL_NAME (decl));
7300 n->value = GOVD_PRIVATE;
7301 return true;
7303 else
7304 return false;
7306 else if ((n->value & GOVD_EXPLICIT) != 0
7307 && (ctx == gimplify_omp_ctxp
7308 || (ctx->region_type == ORT_COMBINED_PARALLEL
7309 && gimplify_omp_ctxp->outer_context == ctx)))
7311 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7312 error ("iteration variable %qE should not be firstprivate",
7313 DECL_NAME (decl));
7314 else if ((n->value & GOVD_REDUCTION) != 0)
7315 error ("iteration variable %qE should not be reduction",
7316 DECL_NAME (decl));
7317 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7318 error ("iteration variable %qE should not be linear",
7319 DECL_NAME (decl));
7320 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7321 error ("iteration variable %qE should not be lastprivate",
7322 DECL_NAME (decl));
7323 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7324 error ("iteration variable %qE should not be private",
7325 DECL_NAME (decl));
7326 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7327 error ("iteration variable %qE is predetermined linear",
7328 DECL_NAME (decl));
7330 return (ctx == gimplify_omp_ctxp
7331 || (ctx->region_type == ORT_COMBINED_PARALLEL
7332 && gimplify_omp_ctxp->outer_context == ctx));
7335 if (ctx->region_type != ORT_WORKSHARE
7336 && ctx->region_type != ORT_SIMD
7337 && ctx->region_type != ORT_ACC)
7338 return false;
7339 else if (ctx->outer_context)
7340 return omp_is_private (ctx->outer_context, decl, simd);
7341 return false;
7344 /* Return true if DECL is private within a parallel region
7345 that binds to the current construct's context or in parallel
7346 region's REDUCTION clause. */
7348 static bool
7349 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7351 splay_tree_node n;
7355 ctx = ctx->outer_context;
7356 if (ctx == NULL)
7358 if (is_global_var (decl))
7359 return false;
7361 /* References might be private, but might be shared too,
7362 when checking for copyprivate, assume they might be
7363 private, otherwise assume they might be shared. */
7364 if (copyprivate)
7365 return true;
7367 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7368 return false;
7370 /* Treat C++ privatized non-static data members outside
7371 of the privatization the same. */
7372 if (omp_member_access_dummy_var (decl))
7373 return false;
7375 return true;
7378 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7380 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7381 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7382 continue;
7384 if (n != NULL)
7386 if ((n->value & GOVD_LOCAL) != 0
7387 && omp_member_access_dummy_var (decl))
7388 return false;
7389 return (n->value & GOVD_SHARED) == 0;
7392 while (ctx->region_type == ORT_WORKSHARE
7393 || ctx->region_type == ORT_SIMD
7394 || ctx->region_type == ORT_ACC);
7395 return false;
7398 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7400 static tree
7401 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7403 tree t = *tp;
7405 /* If this node has been visited, unmark it and keep looking. */
7406 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7407 return t;
7409 if (IS_TYPE_OR_DECL_P (t))
7410 *walk_subtrees = 0;
7411 return NULL_TREE;
7414 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7415 and previous omp contexts. */
7417 static void
7418 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7419 enum omp_region_type region_type,
7420 enum tree_code code)
7422 struct gimplify_omp_ctx *ctx, *outer_ctx;
7423 tree c;
7424 hash_map<tree, tree> *struct_map_to_clause = NULL;
7425 tree *prev_list_p = NULL;
7427 ctx = new_omp_context (region_type);
7428 outer_ctx = ctx->outer_context;
7429 if (code == OMP_TARGET)
7431 if (!lang_GNU_Fortran ())
7432 ctx->target_map_pointers_as_0len_arrays = true;
7433 ctx->target_map_scalars_firstprivate = true;
7435 if (!lang_GNU_Fortran ())
7436 switch (code)
7438 case OMP_TARGET:
7439 case OMP_TARGET_DATA:
7440 case OMP_TARGET_ENTER_DATA:
7441 case OMP_TARGET_EXIT_DATA:
7442 case OACC_DECLARE:
7443 case OACC_HOST_DATA:
7444 ctx->target_firstprivatize_array_bases = true;
7445 default:
7446 break;
7449 while ((c = *list_p) != NULL)
7451 bool remove = false;
7452 bool notice_outer = true;
7453 const char *check_non_private = NULL;
7454 unsigned int flags;
7455 tree decl;
7457 switch (OMP_CLAUSE_CODE (c))
7459 case OMP_CLAUSE_PRIVATE:
7460 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7461 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7463 flags |= GOVD_PRIVATE_OUTER_REF;
7464 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7466 else
7467 notice_outer = false;
7468 goto do_add;
7469 case OMP_CLAUSE_SHARED:
7470 flags = GOVD_SHARED | GOVD_EXPLICIT;
7471 goto do_add;
7472 case OMP_CLAUSE_FIRSTPRIVATE:
7473 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7474 check_non_private = "firstprivate";
7475 goto do_add;
7476 case OMP_CLAUSE_LASTPRIVATE:
7477 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7478 check_non_private = "lastprivate";
7479 decl = OMP_CLAUSE_DECL (c);
7480 if (error_operand_p (decl))
7481 goto do_add;
7482 else if (outer_ctx
7483 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7484 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7485 && splay_tree_lookup (outer_ctx->variables,
7486 (splay_tree_key) decl) == NULL)
7488 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7489 if (outer_ctx->outer_context)
7490 omp_notice_variable (outer_ctx->outer_context, decl, true);
7492 else if (outer_ctx
7493 && (outer_ctx->region_type & ORT_TASK) != 0
7494 && outer_ctx->combined_loop
7495 && splay_tree_lookup (outer_ctx->variables,
7496 (splay_tree_key) decl) == NULL)
7498 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7499 if (outer_ctx->outer_context)
7500 omp_notice_variable (outer_ctx->outer_context, decl, true);
7502 else if (outer_ctx
7503 && (outer_ctx->region_type == ORT_WORKSHARE
7504 || outer_ctx->region_type == ORT_ACC)
7505 && outer_ctx->combined_loop
7506 && splay_tree_lookup (outer_ctx->variables,
7507 (splay_tree_key) decl) == NULL
7508 && !omp_check_private (outer_ctx, decl, false))
7510 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7511 if (outer_ctx->outer_context
7512 && (outer_ctx->outer_context->region_type
7513 == ORT_COMBINED_PARALLEL)
7514 && splay_tree_lookup (outer_ctx->outer_context->variables,
7515 (splay_tree_key) decl) == NULL)
7517 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7518 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7519 if (octx->outer_context)
7521 octx = octx->outer_context;
7522 if (octx->region_type == ORT_WORKSHARE
7523 && octx->combined_loop
7524 && splay_tree_lookup (octx->variables,
7525 (splay_tree_key) decl) == NULL
7526 && !omp_check_private (octx, decl, false))
7528 omp_add_variable (octx, decl,
7529 GOVD_LASTPRIVATE | GOVD_SEEN);
7530 octx = octx->outer_context;
7531 if (octx
7532 && octx->region_type == ORT_COMBINED_TEAMS
7533 && (splay_tree_lookup (octx->variables,
7534 (splay_tree_key) decl)
7535 == NULL))
7537 omp_add_variable (octx, decl,
7538 GOVD_SHARED | GOVD_SEEN);
7539 octx = octx->outer_context;
7542 if (octx)
7543 omp_notice_variable (octx, decl, true);
7546 else if (outer_ctx->outer_context)
7547 omp_notice_variable (outer_ctx->outer_context, decl, true);
7549 goto do_add;
7550 case OMP_CLAUSE_REDUCTION:
7551 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7552 /* OpenACC permits reductions on private variables. */
7553 if (!(region_type & ORT_ACC))
7554 check_non_private = "reduction";
7555 decl = OMP_CLAUSE_DECL (c);
7556 if (TREE_CODE (decl) == MEM_REF)
7558 tree type = TREE_TYPE (decl);
7559 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7560 NULL, is_gimple_val, fb_rvalue, false)
7561 == GS_ERROR)
7563 remove = true;
7564 break;
7566 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7567 if (DECL_P (v))
7569 omp_firstprivatize_variable (ctx, v);
7570 omp_notice_variable (ctx, v, true);
7572 decl = TREE_OPERAND (decl, 0);
7573 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7575 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7576 NULL, is_gimple_val, fb_rvalue, false)
7577 == GS_ERROR)
7579 remove = true;
7580 break;
7582 v = TREE_OPERAND (decl, 1);
7583 if (DECL_P (v))
7585 omp_firstprivatize_variable (ctx, v);
7586 omp_notice_variable (ctx, v, true);
7588 decl = TREE_OPERAND (decl, 0);
7590 if (TREE_CODE (decl) == ADDR_EXPR
7591 || TREE_CODE (decl) == INDIRECT_REF)
7592 decl = TREE_OPERAND (decl, 0);
7594 goto do_add_decl;
7595 case OMP_CLAUSE_LINEAR:
7596 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7597 is_gimple_val, fb_rvalue) == GS_ERROR)
7599 remove = true;
7600 break;
7602 else
7604 if (code == OMP_SIMD
7605 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7607 struct gimplify_omp_ctx *octx = outer_ctx;
7608 if (octx
7609 && octx->region_type == ORT_WORKSHARE
7610 && octx->combined_loop
7611 && !octx->distribute)
7613 if (octx->outer_context
7614 && (octx->outer_context->region_type
7615 == ORT_COMBINED_PARALLEL))
7616 octx = octx->outer_context->outer_context;
7617 else
7618 octx = octx->outer_context;
7620 if (octx
7621 && octx->region_type == ORT_WORKSHARE
7622 && octx->combined_loop
7623 && octx->distribute)
7625 error_at (OMP_CLAUSE_LOCATION (c),
7626 "%<linear%> clause for variable other than "
7627 "loop iterator specified on construct "
7628 "combined with %<distribute%>");
7629 remove = true;
7630 break;
7633 /* For combined #pragma omp parallel for simd, need to put
7634 lastprivate and perhaps firstprivate too on the
7635 parallel. Similarly for #pragma omp for simd. */
7636 struct gimplify_omp_ctx *octx = outer_ctx;
7637 decl = NULL_TREE;
7640 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7641 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7642 break;
7643 decl = OMP_CLAUSE_DECL (c);
7644 if (error_operand_p (decl))
7646 decl = NULL_TREE;
7647 break;
7649 flags = GOVD_SEEN;
7650 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7651 flags |= GOVD_FIRSTPRIVATE;
7652 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7653 flags |= GOVD_LASTPRIVATE;
7654 if (octx
7655 && octx->region_type == ORT_WORKSHARE
7656 && octx->combined_loop)
7658 if (octx->outer_context
7659 && (octx->outer_context->region_type
7660 == ORT_COMBINED_PARALLEL))
7661 octx = octx->outer_context;
7662 else if (omp_check_private (octx, decl, false))
7663 break;
7665 else if (octx
7666 && (octx->region_type & ORT_TASK) != 0
7667 && octx->combined_loop)
7669 else if (octx
7670 && octx->region_type == ORT_COMBINED_PARALLEL
7671 && ctx->region_type == ORT_WORKSHARE
7672 && octx == outer_ctx)
7673 flags = GOVD_SEEN | GOVD_SHARED;
7674 else if (octx
7675 && octx->region_type == ORT_COMBINED_TEAMS)
7676 flags = GOVD_SEEN | GOVD_SHARED;
7677 else if (octx
7678 && octx->region_type == ORT_COMBINED_TARGET)
7680 flags &= ~GOVD_LASTPRIVATE;
7681 if (flags == GOVD_SEEN)
7682 break;
7684 else
7685 break;
7686 splay_tree_node on
7687 = splay_tree_lookup (octx->variables,
7688 (splay_tree_key) decl);
7689 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7691 octx = NULL;
7692 break;
7694 omp_add_variable (octx, decl, flags);
7695 if (octx->outer_context == NULL)
7696 break;
7697 octx = octx->outer_context;
7699 while (1);
7700 if (octx
7701 && decl
7702 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7703 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7704 omp_notice_variable (octx, decl, true);
7706 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7707 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7708 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7710 notice_outer = false;
7711 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7713 goto do_add;
7715 case OMP_CLAUSE_MAP:
7716 decl = OMP_CLAUSE_DECL (c);
7717 if (error_operand_p (decl))
7718 remove = true;
7719 switch (code)
7721 case OMP_TARGET:
7722 break;
7723 case OACC_DATA:
7724 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7725 break;
7726 /* FALLTHRU */
7727 case OMP_TARGET_DATA:
7728 case OMP_TARGET_ENTER_DATA:
7729 case OMP_TARGET_EXIT_DATA:
7730 case OACC_ENTER_DATA:
7731 case OACC_EXIT_DATA:
7732 case OACC_HOST_DATA:
7733 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7734 || (OMP_CLAUSE_MAP_KIND (c)
7735 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7736 /* For target {,enter ,exit }data only the array slice is
7737 mapped, but not the pointer to it. */
7738 remove = true;
7739 break;
7740 default:
7741 break;
7743 if (remove)
7744 break;
7745 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7747 struct gimplify_omp_ctx *octx;
7748 for (octx = outer_ctx; octx; octx = octx->outer_context)
7750 if (octx->region_type != ORT_ACC_HOST_DATA)
7751 break;
7752 splay_tree_node n2
7753 = splay_tree_lookup (octx->variables,
7754 (splay_tree_key) decl);
7755 if (n2)
7756 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7757 "declared in enclosing %<host_data%> region",
7758 DECL_NAME (decl));
7761 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7762 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7763 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7764 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7765 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7767 remove = true;
7768 break;
7770 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7771 || (OMP_CLAUSE_MAP_KIND (c)
7772 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7773 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7775 OMP_CLAUSE_SIZE (c)
7776 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7777 false);
7778 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7779 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7781 if (!DECL_P (decl))
7783 tree d = decl, *pd;
7784 if (TREE_CODE (d) == ARRAY_REF)
7786 while (TREE_CODE (d) == ARRAY_REF)
7787 d = TREE_OPERAND (d, 0);
7788 if (TREE_CODE (d) == COMPONENT_REF
7789 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7790 decl = d;
7792 pd = &OMP_CLAUSE_DECL (c);
7793 if (d == decl
7794 && TREE_CODE (decl) == INDIRECT_REF
7795 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7796 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7797 == REFERENCE_TYPE))
7799 pd = &TREE_OPERAND (decl, 0);
7800 decl = TREE_OPERAND (decl, 0);
7802 if (TREE_CODE (decl) == COMPONENT_REF)
7804 while (TREE_CODE (decl) == COMPONENT_REF)
7805 decl = TREE_OPERAND (decl, 0);
7806 if (TREE_CODE (decl) == INDIRECT_REF
7807 && DECL_P (TREE_OPERAND (decl, 0))
7808 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7809 == REFERENCE_TYPE))
7810 decl = TREE_OPERAND (decl, 0);
7812 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7813 == GS_ERROR)
7815 remove = true;
7816 break;
7818 if (DECL_P (decl))
7820 if (error_operand_p (decl))
7822 remove = true;
7823 break;
7826 tree stype = TREE_TYPE (decl);
7827 if (TREE_CODE (stype) == REFERENCE_TYPE)
7828 stype = TREE_TYPE (stype);
7829 if (TYPE_SIZE_UNIT (stype) == NULL
7830 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7832 error_at (OMP_CLAUSE_LOCATION (c),
7833 "mapping field %qE of variable length "
7834 "structure", OMP_CLAUSE_DECL (c));
7835 remove = true;
7836 break;
7839 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7841 /* Error recovery. */
7842 if (prev_list_p == NULL)
7844 remove = true;
7845 break;
7847 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7849 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7850 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7852 remove = true;
7853 break;
7858 tree offset;
7859 HOST_WIDE_INT bitsize, bitpos;
7860 machine_mode mode;
7861 int unsignedp, reversep, volatilep = 0;
7862 tree base = OMP_CLAUSE_DECL (c);
7863 while (TREE_CODE (base) == ARRAY_REF)
7864 base = TREE_OPERAND (base, 0);
7865 if (TREE_CODE (base) == INDIRECT_REF)
7866 base = TREE_OPERAND (base, 0);
7867 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7868 &mode, &unsignedp, &reversep,
7869 &volatilep);
7870 tree orig_base = base;
7871 if ((TREE_CODE (base) == INDIRECT_REF
7872 || (TREE_CODE (base) == MEM_REF
7873 && integer_zerop (TREE_OPERAND (base, 1))))
7874 && DECL_P (TREE_OPERAND (base, 0))
7875 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7876 == REFERENCE_TYPE))
7877 base = TREE_OPERAND (base, 0);
7878 gcc_assert (base == decl
7879 && (offset == NULL_TREE
7880 || TREE_CODE (offset) == INTEGER_CST));
7882 splay_tree_node n
7883 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7884 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7885 == GOMP_MAP_ALWAYS_POINTER);
7886 if (n == NULL || (n->value & GOVD_MAP) == 0)
7888 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7889 OMP_CLAUSE_MAP);
7890 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7891 if (orig_base != base)
7892 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7893 else
7894 OMP_CLAUSE_DECL (l) = decl;
7895 OMP_CLAUSE_SIZE (l) = size_int (1);
7896 if (struct_map_to_clause == NULL)
7897 struct_map_to_clause = new hash_map<tree, tree>;
7898 struct_map_to_clause->put (decl, l);
7899 if (ptr)
7901 enum gomp_map_kind mkind
7902 = code == OMP_TARGET_EXIT_DATA
7903 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7904 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7905 OMP_CLAUSE_MAP);
7906 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7907 OMP_CLAUSE_DECL (c2)
7908 = unshare_expr (OMP_CLAUSE_DECL (c));
7909 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7910 OMP_CLAUSE_SIZE (c2)
7911 = TYPE_SIZE_UNIT (ptr_type_node);
7912 OMP_CLAUSE_CHAIN (l) = c2;
7913 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7915 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7916 tree c3
7917 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7918 OMP_CLAUSE_MAP);
7919 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7920 OMP_CLAUSE_DECL (c3)
7921 = unshare_expr (OMP_CLAUSE_DECL (c4));
7922 OMP_CLAUSE_SIZE (c3)
7923 = TYPE_SIZE_UNIT (ptr_type_node);
7924 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7925 OMP_CLAUSE_CHAIN (c2) = c3;
7927 *prev_list_p = l;
7928 prev_list_p = NULL;
7930 else
7932 OMP_CLAUSE_CHAIN (l) = c;
7933 *list_p = l;
7934 list_p = &OMP_CLAUSE_CHAIN (l);
7936 if (orig_base != base && code == OMP_TARGET)
7938 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7939 OMP_CLAUSE_MAP);
7940 enum gomp_map_kind mkind
7941 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7942 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7943 OMP_CLAUSE_DECL (c2) = decl;
7944 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7945 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7946 OMP_CLAUSE_CHAIN (l) = c2;
7948 flags = GOVD_MAP | GOVD_EXPLICIT;
7949 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7950 flags |= GOVD_SEEN;
7951 goto do_add_decl;
7953 else
7955 tree *osc = struct_map_to_clause->get (decl);
7956 tree *sc = NULL, *scp = NULL;
7957 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7958 n->value |= GOVD_SEEN;
7959 offset_int o1, o2;
7960 if (offset)
7961 o1 = wi::to_offset (offset);
7962 else
7963 o1 = 0;
7964 if (bitpos)
7965 o1 = o1 + bitpos / BITS_PER_UNIT;
7966 sc = &OMP_CLAUSE_CHAIN (*osc);
7967 if (*sc != c
7968 && (OMP_CLAUSE_MAP_KIND (*sc)
7969 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7970 sc = &OMP_CLAUSE_CHAIN (*sc);
7971 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7972 if (ptr && sc == prev_list_p)
7973 break;
7974 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7975 != COMPONENT_REF
7976 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7977 != INDIRECT_REF)
7978 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7979 != ARRAY_REF))
7980 break;
7981 else
7983 tree offset2;
7984 HOST_WIDE_INT bitsize2, bitpos2;
7985 base = OMP_CLAUSE_DECL (*sc);
7986 if (TREE_CODE (base) == ARRAY_REF)
7988 while (TREE_CODE (base) == ARRAY_REF)
7989 base = TREE_OPERAND (base, 0);
7990 if (TREE_CODE (base) != COMPONENT_REF
7991 || (TREE_CODE (TREE_TYPE (base))
7992 != ARRAY_TYPE))
7993 break;
7995 else if (TREE_CODE (base) == INDIRECT_REF
7996 && (TREE_CODE (TREE_OPERAND (base, 0))
7997 == COMPONENT_REF)
7998 && (TREE_CODE (TREE_TYPE
7999 (TREE_OPERAND (base, 0)))
8000 == REFERENCE_TYPE))
8001 base = TREE_OPERAND (base, 0);
8002 base = get_inner_reference (base, &bitsize2,
8003 &bitpos2, &offset2,
8004 &mode, &unsignedp,
8005 &reversep, &volatilep);
8006 if ((TREE_CODE (base) == INDIRECT_REF
8007 || (TREE_CODE (base) == MEM_REF
8008 && integer_zerop (TREE_OPERAND (base,
8009 1))))
8010 && DECL_P (TREE_OPERAND (base, 0))
8011 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8012 0)))
8013 == REFERENCE_TYPE))
8014 base = TREE_OPERAND (base, 0);
8015 if (base != decl)
8016 break;
8017 if (scp)
8018 continue;
8019 gcc_assert (offset == NULL_TREE
8020 || TREE_CODE (offset) == INTEGER_CST);
8021 tree d1 = OMP_CLAUSE_DECL (*sc);
8022 tree d2 = OMP_CLAUSE_DECL (c);
8023 while (TREE_CODE (d1) == ARRAY_REF)
8024 d1 = TREE_OPERAND (d1, 0);
8025 while (TREE_CODE (d2) == ARRAY_REF)
8026 d2 = TREE_OPERAND (d2, 0);
8027 if (TREE_CODE (d1) == INDIRECT_REF)
8028 d1 = TREE_OPERAND (d1, 0);
8029 if (TREE_CODE (d2) == INDIRECT_REF)
8030 d2 = TREE_OPERAND (d2, 0);
8031 while (TREE_CODE (d1) == COMPONENT_REF)
8032 if (TREE_CODE (d2) == COMPONENT_REF
8033 && TREE_OPERAND (d1, 1)
8034 == TREE_OPERAND (d2, 1))
8036 d1 = TREE_OPERAND (d1, 0);
8037 d2 = TREE_OPERAND (d2, 0);
8039 else
8040 break;
8041 if (d1 == d2)
8043 error_at (OMP_CLAUSE_LOCATION (c),
8044 "%qE appears more than once in map "
8045 "clauses", OMP_CLAUSE_DECL (c));
8046 remove = true;
8047 break;
8049 if (offset2)
8050 o2 = wi::to_offset (offset2);
8051 else
8052 o2 = 0;
8053 if (bitpos2)
8054 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8055 if (wi::ltu_p (o1, o2)
8056 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8058 if (ptr)
8059 scp = sc;
8060 else
8061 break;
8064 if (remove)
8065 break;
8066 OMP_CLAUSE_SIZE (*osc)
8067 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8068 size_one_node);
8069 if (ptr)
8071 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8072 OMP_CLAUSE_MAP);
8073 tree cl = NULL_TREE;
8074 enum gomp_map_kind mkind
8075 = code == OMP_TARGET_EXIT_DATA
8076 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8077 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8078 OMP_CLAUSE_DECL (c2)
8079 = unshare_expr (OMP_CLAUSE_DECL (c));
8080 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8081 OMP_CLAUSE_SIZE (c2)
8082 = TYPE_SIZE_UNIT (ptr_type_node);
8083 cl = scp ? *prev_list_p : c2;
8084 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8086 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8087 tree c3
8088 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8089 OMP_CLAUSE_MAP);
8090 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8091 OMP_CLAUSE_DECL (c3)
8092 = unshare_expr (OMP_CLAUSE_DECL (c4));
8093 OMP_CLAUSE_SIZE (c3)
8094 = TYPE_SIZE_UNIT (ptr_type_node);
8095 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8096 if (!scp)
8097 OMP_CLAUSE_CHAIN (c2) = c3;
8098 else
8099 cl = c3;
8101 if (scp)
8102 *scp = c2;
8103 if (sc == prev_list_p)
8105 *sc = cl;
8106 prev_list_p = NULL;
8108 else
8110 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8111 list_p = prev_list_p;
8112 prev_list_p = NULL;
8113 OMP_CLAUSE_CHAIN (c) = *sc;
8114 *sc = cl;
8115 continue;
8118 else if (*sc != c)
8120 *list_p = OMP_CLAUSE_CHAIN (c);
8121 OMP_CLAUSE_CHAIN (c) = *sc;
8122 *sc = c;
8123 continue;
8127 if (!remove
8128 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8129 && OMP_CLAUSE_CHAIN (c)
8130 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8131 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8132 == GOMP_MAP_ALWAYS_POINTER))
8133 prev_list_p = list_p;
8134 break;
8136 flags = GOVD_MAP | GOVD_EXPLICIT;
8137 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8138 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8139 flags |= GOVD_MAP_ALWAYS_TO;
8140 goto do_add;
8142 case OMP_CLAUSE_DEPEND:
8143 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8145 tree deps = OMP_CLAUSE_DECL (c);
8146 while (deps && TREE_CODE (deps) == TREE_LIST)
8148 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8149 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8150 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8151 pre_p, NULL, is_gimple_val, fb_rvalue);
8152 deps = TREE_CHAIN (deps);
8154 break;
8156 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8157 break;
8158 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8160 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8161 NULL, is_gimple_val, fb_rvalue);
8162 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8164 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8166 remove = true;
8167 break;
8169 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8170 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8171 is_gimple_val, fb_rvalue) == GS_ERROR)
8173 remove = true;
8174 break;
8176 break;
8178 case OMP_CLAUSE_TO:
8179 case OMP_CLAUSE_FROM:
8180 case OMP_CLAUSE__CACHE_:
8181 decl = OMP_CLAUSE_DECL (c);
8182 if (error_operand_p (decl))
8184 remove = true;
8185 break;
8187 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8188 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8189 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8190 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8191 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8193 remove = true;
8194 break;
8196 if (!DECL_P (decl))
8198 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8199 NULL, is_gimple_lvalue, fb_lvalue)
8200 == GS_ERROR)
8202 remove = true;
8203 break;
8205 break;
8207 goto do_notice;
8209 case OMP_CLAUSE_USE_DEVICE_PTR:
8210 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8211 goto do_add;
8212 case OMP_CLAUSE_IS_DEVICE_PTR:
8213 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8214 goto do_add;
8216 do_add:
8217 decl = OMP_CLAUSE_DECL (c);
8218 do_add_decl:
8219 if (error_operand_p (decl))
8221 remove = true;
8222 break;
8224 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8226 tree t = omp_member_access_dummy_var (decl);
8227 if (t)
8229 tree v = DECL_VALUE_EXPR (decl);
8230 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8231 if (outer_ctx)
8232 omp_notice_variable (outer_ctx, t, true);
8235 if (code == OACC_DATA
8236 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8237 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8238 flags |= GOVD_MAP_0LEN_ARRAY;
8239 omp_add_variable (ctx, decl, flags);
8240 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8241 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8243 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8244 GOVD_LOCAL | GOVD_SEEN);
8245 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8246 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8247 find_decl_expr,
8248 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8249 NULL) == NULL_TREE)
8250 omp_add_variable (ctx,
8251 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8252 GOVD_LOCAL | GOVD_SEEN);
8253 gimplify_omp_ctxp = ctx;
8254 push_gimplify_context ();
8256 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8257 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8259 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8260 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8261 pop_gimplify_context
8262 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8263 push_gimplify_context ();
8264 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8265 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8266 pop_gimplify_context
8267 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8268 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8269 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8271 gimplify_omp_ctxp = outer_ctx;
8273 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8274 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8276 gimplify_omp_ctxp = ctx;
8277 push_gimplify_context ();
8278 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8280 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8281 NULL, NULL);
8282 TREE_SIDE_EFFECTS (bind) = 1;
8283 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8284 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8286 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8287 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8288 pop_gimplify_context
8289 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8290 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8292 gimplify_omp_ctxp = outer_ctx;
8294 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8295 && OMP_CLAUSE_LINEAR_STMT (c))
8297 gimplify_omp_ctxp = ctx;
8298 push_gimplify_context ();
8299 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8301 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8302 NULL, NULL);
8303 TREE_SIDE_EFFECTS (bind) = 1;
8304 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8305 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8307 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8308 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8309 pop_gimplify_context
8310 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8311 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8313 gimplify_omp_ctxp = outer_ctx;
8315 if (notice_outer)
8316 goto do_notice;
8317 break;
8319 case OMP_CLAUSE_COPYIN:
8320 case OMP_CLAUSE_COPYPRIVATE:
8321 decl = OMP_CLAUSE_DECL (c);
8322 if (error_operand_p (decl))
8324 remove = true;
8325 break;
8327 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8328 && !remove
8329 && !omp_check_private (ctx, decl, true))
8331 remove = true;
8332 if (is_global_var (decl))
8334 if (DECL_THREAD_LOCAL_P (decl))
8335 remove = false;
8336 else if (DECL_HAS_VALUE_EXPR_P (decl))
8338 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8340 if (value
8341 && DECL_P (value)
8342 && DECL_THREAD_LOCAL_P (value))
8343 remove = false;
8346 if (remove)
8347 error_at (OMP_CLAUSE_LOCATION (c),
8348 "copyprivate variable %qE is not threadprivate"
8349 " or private in outer context", DECL_NAME (decl));
8351 do_notice:
8352 if (outer_ctx)
8353 omp_notice_variable (outer_ctx, decl, true);
8354 if (check_non_private
8355 && region_type == ORT_WORKSHARE
8356 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8357 || decl == OMP_CLAUSE_DECL (c)
8358 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8359 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8360 == ADDR_EXPR
8361 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8362 == POINTER_PLUS_EXPR
8363 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8364 (OMP_CLAUSE_DECL (c), 0), 0))
8365 == ADDR_EXPR)))))
8366 && omp_check_private (ctx, decl, false))
8368 error ("%s variable %qE is private in outer context",
8369 check_non_private, DECL_NAME (decl));
8370 remove = true;
8372 break;
8374 case OMP_CLAUSE_IF:
8375 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8376 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8378 const char *p[2];
8379 for (int i = 0; i < 2; i++)
8380 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8382 case OMP_PARALLEL: p[i] = "parallel"; break;
8383 case OMP_TASK: p[i] = "task"; break;
8384 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8385 case OMP_TARGET_DATA: p[i] = "target data"; break;
8386 case OMP_TARGET: p[i] = "target"; break;
8387 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8388 case OMP_TARGET_ENTER_DATA:
8389 p[i] = "target enter data"; break;
8390 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8391 default: gcc_unreachable ();
8393 error_at (OMP_CLAUSE_LOCATION (c),
8394 "expected %qs %<if%> clause modifier rather than %qs",
8395 p[0], p[1]);
8396 remove = true;
8398 /* Fall through. */
8400 case OMP_CLAUSE_FINAL:
8401 OMP_CLAUSE_OPERAND (c, 0)
8402 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8403 /* Fall through. */
8405 case OMP_CLAUSE_SCHEDULE:
8406 case OMP_CLAUSE_NUM_THREADS:
8407 case OMP_CLAUSE_NUM_TEAMS:
8408 case OMP_CLAUSE_THREAD_LIMIT:
8409 case OMP_CLAUSE_DIST_SCHEDULE:
8410 case OMP_CLAUSE_DEVICE:
8411 case OMP_CLAUSE_PRIORITY:
8412 case OMP_CLAUSE_GRAINSIZE:
8413 case OMP_CLAUSE_NUM_TASKS:
8414 case OMP_CLAUSE_HINT:
8415 case OMP_CLAUSE__CILK_FOR_COUNT_:
8416 case OMP_CLAUSE_ASYNC:
8417 case OMP_CLAUSE_WAIT:
8418 case OMP_CLAUSE_NUM_GANGS:
8419 case OMP_CLAUSE_NUM_WORKERS:
8420 case OMP_CLAUSE_VECTOR_LENGTH:
8421 case OMP_CLAUSE_WORKER:
8422 case OMP_CLAUSE_VECTOR:
8423 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8424 is_gimple_val, fb_rvalue) == GS_ERROR)
8425 remove = true;
8426 break;
8428 case OMP_CLAUSE_GANG:
8429 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8430 is_gimple_val, fb_rvalue) == GS_ERROR)
8431 remove = true;
8432 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8433 is_gimple_val, fb_rvalue) == GS_ERROR)
8434 remove = true;
8435 break;
8437 case OMP_CLAUSE_NOWAIT:
8438 case OMP_CLAUSE_ORDERED:
8439 case OMP_CLAUSE_UNTIED:
8440 case OMP_CLAUSE_COLLAPSE:
8441 case OMP_CLAUSE_TILE:
8442 case OMP_CLAUSE_AUTO:
8443 case OMP_CLAUSE_SEQ:
8444 case OMP_CLAUSE_INDEPENDENT:
8445 case OMP_CLAUSE_MERGEABLE:
8446 case OMP_CLAUSE_PROC_BIND:
8447 case OMP_CLAUSE_SAFELEN:
8448 case OMP_CLAUSE_SIMDLEN:
8449 case OMP_CLAUSE_NOGROUP:
8450 case OMP_CLAUSE_THREADS:
8451 case OMP_CLAUSE_SIMD:
8452 break;
8454 case OMP_CLAUSE_DEFAULTMAP:
8455 ctx->target_map_scalars_firstprivate = false;
8456 break;
8458 case OMP_CLAUSE_ALIGNED:
8459 decl = OMP_CLAUSE_DECL (c);
8460 if (error_operand_p (decl))
8462 remove = true;
8463 break;
8465 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8466 is_gimple_val, fb_rvalue) == GS_ERROR)
8468 remove = true;
8469 break;
8471 if (!is_global_var (decl)
8472 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8473 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8474 break;
8476 case OMP_CLAUSE_DEFAULT:
8477 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8478 break;
8480 default:
8481 gcc_unreachable ();
8484 if (code == OACC_DATA
8485 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8486 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8487 remove = true;
8488 if (remove)
8489 *list_p = OMP_CLAUSE_CHAIN (c);
8490 else
8491 list_p = &OMP_CLAUSE_CHAIN (c);
8494 gimplify_omp_ctxp = ctx;
8495 if (struct_map_to_clause)
8496 delete struct_map_to_clause;
8499 /* Return true if DECL is a candidate for shared to firstprivate
8500 optimization. We only consider non-addressable scalars, not
8501 too big, and not references. */
8503 static bool
8504 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8506 if (TREE_ADDRESSABLE (decl))
8507 return false;
8508 tree type = TREE_TYPE (decl);
8509 if (!is_gimple_reg_type (type)
8510 || TREE_CODE (type) == REFERENCE_TYPE
8511 || TREE_ADDRESSABLE (type))
8512 return false;
8513 /* Don't optimize too large decls, as each thread/task will have
8514 its own. */
8515 HOST_WIDE_INT len = int_size_in_bytes (type);
8516 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8517 return false;
8518 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8519 return false;
8520 return true;
8523 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8524 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8525 GOVD_WRITTEN in outer contexts. */
8527 static void
8528 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8530 for (; ctx; ctx = ctx->outer_context)
8532 splay_tree_node n = splay_tree_lookup (ctx->variables,
8533 (splay_tree_key) decl);
8534 if (n == NULL)
8535 continue;
8536 else if (n->value & GOVD_SHARED)
8538 n->value |= GOVD_WRITTEN;
8539 return;
8541 else if (n->value & GOVD_DATA_SHARE_CLASS)
8542 return;
8546 /* Helper callback for walk_gimple_seq to discover possible stores
8547 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8548 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8549 for those. */
8551 static tree
8552 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8554 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8556 *walk_subtrees = 0;
8557 if (!wi->is_lhs)
8558 return NULL_TREE;
8560 tree op = *tp;
8563 if (handled_component_p (op))
8564 op = TREE_OPERAND (op, 0);
8565 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8566 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8567 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8568 else
8569 break;
8571 while (1);
8572 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8573 return NULL_TREE;
8575 omp_mark_stores (gimplify_omp_ctxp, op);
8576 return NULL_TREE;
8579 /* Helper callback for walk_gimple_seq to discover possible stores
8580 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8581 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8582 for those. */
8584 static tree
8585 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8586 bool *handled_ops_p,
8587 struct walk_stmt_info *wi)
8589 gimple *stmt = gsi_stmt (*gsi_p);
8590 switch (gimple_code (stmt))
8592 /* Don't recurse on OpenMP constructs for which
8593 gimplify_adjust_omp_clauses already handled the bodies,
8594 except handle gimple_omp_for_pre_body. */
8595 case GIMPLE_OMP_FOR:
8596 *handled_ops_p = true;
8597 if (gimple_omp_for_pre_body (stmt))
8598 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8599 omp_find_stores_stmt, omp_find_stores_op, wi);
8600 break;
8601 case GIMPLE_OMP_PARALLEL:
8602 case GIMPLE_OMP_TASK:
8603 case GIMPLE_OMP_SECTIONS:
8604 case GIMPLE_OMP_SINGLE:
8605 case GIMPLE_OMP_TARGET:
8606 case GIMPLE_OMP_TEAMS:
8607 case GIMPLE_OMP_CRITICAL:
8608 *handled_ops_p = true;
8609 break;
8610 default:
8611 break;
8613 return NULL_TREE;
8616 struct gimplify_adjust_omp_clauses_data
8618 tree *list_p;
8619 gimple_seq *pre_p;
8622 /* For all variables that were not actually used within the context,
8623 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8625 static int
8626 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8628 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8629 gimple_seq *pre_p
8630 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8631 tree decl = (tree) n->key;
8632 unsigned flags = n->value;
8633 enum omp_clause_code code;
8634 tree clause;
8635 bool private_debug;
8637 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8638 return 0;
8639 if ((flags & GOVD_SEEN) == 0)
8640 return 0;
8641 if (flags & GOVD_DEBUG_PRIVATE)
8643 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8644 private_debug = true;
8646 else if (flags & GOVD_MAP)
8647 private_debug = false;
8648 else
8649 private_debug
8650 = lang_hooks.decls.omp_private_debug_clause (decl,
8651 !!(flags & GOVD_SHARED));
8652 if (private_debug)
8653 code = OMP_CLAUSE_PRIVATE;
8654 else if (flags & GOVD_MAP)
8656 code = OMP_CLAUSE_MAP;
8657 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8658 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8660 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8661 return 0;
8664 else if (flags & GOVD_SHARED)
8666 if (is_global_var (decl))
8668 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8669 while (ctx != NULL)
8671 splay_tree_node on
8672 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8673 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8674 | GOVD_PRIVATE | GOVD_REDUCTION
8675 | GOVD_LINEAR | GOVD_MAP)) != 0)
8676 break;
8677 ctx = ctx->outer_context;
8679 if (ctx == NULL)
8680 return 0;
8682 code = OMP_CLAUSE_SHARED;
8684 else if (flags & GOVD_PRIVATE)
8685 code = OMP_CLAUSE_PRIVATE;
8686 else if (flags & GOVD_FIRSTPRIVATE)
8688 code = OMP_CLAUSE_FIRSTPRIVATE;
8689 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8690 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8691 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8693 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8694 "%<target%> construct", decl);
8695 return 0;
8698 else if (flags & GOVD_LASTPRIVATE)
8699 code = OMP_CLAUSE_LASTPRIVATE;
8700 else if (flags & GOVD_ALIGNED)
8701 return 0;
8702 else
8703 gcc_unreachable ();
8705 if (((flags & GOVD_LASTPRIVATE)
8706 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8707 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8708 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8710 tree chain = *list_p;
8711 clause = build_omp_clause (input_location, code);
8712 OMP_CLAUSE_DECL (clause) = decl;
8713 OMP_CLAUSE_CHAIN (clause) = chain;
8714 if (private_debug)
8715 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8716 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8717 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8718 else if (code == OMP_CLAUSE_SHARED
8719 && (flags & GOVD_WRITTEN) == 0
8720 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8721 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8722 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8723 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8724 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8726 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8727 OMP_CLAUSE_DECL (nc) = decl;
8728 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8729 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8730 OMP_CLAUSE_DECL (clause)
8731 = build_simple_mem_ref_loc (input_location, decl);
8732 OMP_CLAUSE_DECL (clause)
8733 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8734 build_int_cst (build_pointer_type (char_type_node), 0));
8735 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8736 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8737 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8738 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8739 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8740 OMP_CLAUSE_CHAIN (nc) = chain;
8741 OMP_CLAUSE_CHAIN (clause) = nc;
8742 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8743 gimplify_omp_ctxp = ctx->outer_context;
8744 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8745 pre_p, NULL, is_gimple_val, fb_rvalue);
8746 gimplify_omp_ctxp = ctx;
8748 else if (code == OMP_CLAUSE_MAP)
8750 int kind;
8751 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8752 switch (flags & (GOVD_MAP_TO_ONLY
8753 | GOVD_MAP_FORCE
8754 | GOVD_MAP_FORCE_PRESENT))
8756 case 0:
8757 kind = GOMP_MAP_TOFROM;
8758 break;
8759 case GOVD_MAP_FORCE:
8760 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8761 break;
8762 case GOVD_MAP_TO_ONLY:
8763 kind = GOMP_MAP_TO;
8764 break;
8765 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8766 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8767 break;
8768 case GOVD_MAP_FORCE_PRESENT:
8769 kind = GOMP_MAP_FORCE_PRESENT;
8770 break;
8771 default:
8772 gcc_unreachable ();
8774 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8775 if (DECL_SIZE (decl)
8776 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8778 tree decl2 = DECL_VALUE_EXPR (decl);
8779 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8780 decl2 = TREE_OPERAND (decl2, 0);
8781 gcc_assert (DECL_P (decl2));
8782 tree mem = build_simple_mem_ref (decl2);
8783 OMP_CLAUSE_DECL (clause) = mem;
8784 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8785 if (gimplify_omp_ctxp->outer_context)
8787 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8788 omp_notice_variable (ctx, decl2, true);
8789 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8791 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8792 OMP_CLAUSE_MAP);
8793 OMP_CLAUSE_DECL (nc) = decl;
8794 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8795 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8796 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8797 else
8798 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8799 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8800 OMP_CLAUSE_CHAIN (clause) = nc;
8802 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8803 && lang_hooks.decls.omp_privatize_by_reference (decl))
8805 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8806 OMP_CLAUSE_SIZE (clause)
8807 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8808 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8809 gimplify_omp_ctxp = ctx->outer_context;
8810 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8811 pre_p, NULL, is_gimple_val, fb_rvalue);
8812 gimplify_omp_ctxp = ctx;
8813 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8814 OMP_CLAUSE_MAP);
8815 OMP_CLAUSE_DECL (nc) = decl;
8816 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8817 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8818 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8819 OMP_CLAUSE_CHAIN (clause) = nc;
8821 else
8822 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8824 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8826 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8827 OMP_CLAUSE_DECL (nc) = decl;
8828 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8829 OMP_CLAUSE_CHAIN (nc) = chain;
8830 OMP_CLAUSE_CHAIN (clause) = nc;
8831 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8832 gimplify_omp_ctxp = ctx->outer_context;
8833 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8834 gimplify_omp_ctxp = ctx;
8836 *list_p = clause;
8837 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8838 gimplify_omp_ctxp = ctx->outer_context;
8839 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8840 if (gimplify_omp_ctxp)
8841 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8842 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8843 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8844 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8845 true);
8846 gimplify_omp_ctxp = ctx;
8847 return 0;
8850 static void
8851 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8852 enum tree_code code)
8854 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8855 tree c, decl;
8857 if (body)
8859 struct gimplify_omp_ctx *octx;
8860 for (octx = ctx; octx; octx = octx->outer_context)
8861 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8862 break;
8863 if (octx)
8865 struct walk_stmt_info wi;
8866 memset (&wi, 0, sizeof (wi));
8867 walk_gimple_seq (body, omp_find_stores_stmt,
8868 omp_find_stores_op, &wi);
8871 while ((c = *list_p) != NULL)
8873 splay_tree_node n;
8874 bool remove = false;
8876 switch (OMP_CLAUSE_CODE (c))
8878 case OMP_CLAUSE_FIRSTPRIVATE:
8879 if ((ctx->region_type & ORT_TARGET)
8880 && (ctx->region_type & ORT_ACC) == 0
8881 && TYPE_ATOMIC (strip_array_types
8882 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8884 error_at (OMP_CLAUSE_LOCATION (c),
8885 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8886 "%<target%> construct", OMP_CLAUSE_DECL (c));
8887 remove = true;
8888 break;
8890 /* FALLTHRU */
8891 case OMP_CLAUSE_PRIVATE:
8892 case OMP_CLAUSE_SHARED:
8893 case OMP_CLAUSE_LINEAR:
8894 decl = OMP_CLAUSE_DECL (c);
8895 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8896 remove = !(n->value & GOVD_SEEN);
8897 if (! remove)
8899 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8900 if ((n->value & GOVD_DEBUG_PRIVATE)
8901 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8903 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8904 || ((n->value & GOVD_DATA_SHARE_CLASS)
8905 == GOVD_SHARED));
8906 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8907 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8910 && (n->value & GOVD_WRITTEN) == 0
8911 && DECL_P (decl)
8912 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8913 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8914 else if (DECL_P (decl)
8915 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8916 && (n->value & GOVD_WRITTEN) != 1)
8917 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8918 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8919 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8920 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8922 break;
8924 case OMP_CLAUSE_LASTPRIVATE:
8925 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8926 accurately reflect the presence of a FIRSTPRIVATE clause. */
8927 decl = OMP_CLAUSE_DECL (c);
8928 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8929 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8930 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8931 if (code == OMP_DISTRIBUTE
8932 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8934 remove = true;
8935 error_at (OMP_CLAUSE_LOCATION (c),
8936 "same variable used in %<firstprivate%> and "
8937 "%<lastprivate%> clauses on %<distribute%> "
8938 "construct");
8940 if (!remove
8941 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8942 && DECL_P (decl)
8943 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8944 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8945 break;
8947 case OMP_CLAUSE_ALIGNED:
8948 decl = OMP_CLAUSE_DECL (c);
8949 if (!is_global_var (decl))
8951 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8952 remove = n == NULL || !(n->value & GOVD_SEEN);
8953 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8955 struct gimplify_omp_ctx *octx;
8956 if (n != NULL
8957 && (n->value & (GOVD_DATA_SHARE_CLASS
8958 & ~GOVD_FIRSTPRIVATE)))
8959 remove = true;
8960 else
8961 for (octx = ctx->outer_context; octx;
8962 octx = octx->outer_context)
8964 n = splay_tree_lookup (octx->variables,
8965 (splay_tree_key) decl);
8966 if (n == NULL)
8967 continue;
8968 if (n->value & GOVD_LOCAL)
8969 break;
8970 /* We have to avoid assigning a shared variable
8971 to itself when trying to add
8972 __builtin_assume_aligned. */
8973 if (n->value & GOVD_SHARED)
8975 remove = true;
8976 break;
8981 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8983 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8984 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8985 remove = true;
8987 break;
8989 case OMP_CLAUSE_MAP:
8990 if (code == OMP_TARGET_EXIT_DATA
8991 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8993 remove = true;
8994 break;
8996 decl = OMP_CLAUSE_DECL (c);
8997 /* Data clauses associated with acc parallel reductions must be
8998 compatible with present_or_copy. Warn and adjust the clause
8999 if that is not the case. */
9000 if (ctx->region_type == ORT_ACC_PARALLEL)
9002 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9003 n = NULL;
9005 if (DECL_P (t))
9006 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9008 if (n && (n->value & GOVD_REDUCTION))
9010 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9012 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9013 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9014 && kind != GOMP_MAP_FORCE_PRESENT
9015 && kind != GOMP_MAP_POINTER)
9017 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9018 "incompatible data clause with reduction "
9019 "on %qE; promoting to present_or_copy",
9020 DECL_NAME (t));
9021 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9025 if (!DECL_P (decl))
9027 if ((ctx->region_type & ORT_TARGET) != 0
9028 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9030 if (TREE_CODE (decl) == INDIRECT_REF
9031 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9032 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9033 == REFERENCE_TYPE))
9034 decl = TREE_OPERAND (decl, 0);
9035 if (TREE_CODE (decl) == COMPONENT_REF)
9037 while (TREE_CODE (decl) == COMPONENT_REF)
9038 decl = TREE_OPERAND (decl, 0);
9039 if (DECL_P (decl))
9041 n = splay_tree_lookup (ctx->variables,
9042 (splay_tree_key) decl);
9043 if (!(n->value & GOVD_SEEN))
9044 remove = true;
9048 break;
9050 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9051 if ((ctx->region_type & ORT_TARGET) != 0
9052 && !(n->value & GOVD_SEEN)
9053 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9054 && (!is_global_var (decl)
9055 || !lookup_attribute ("omp declare target link",
9056 DECL_ATTRIBUTES (decl))))
9058 remove = true;
9059 /* For struct element mapping, if struct is never referenced
9060 in target block and none of the mapping has always modifier,
9061 remove all the struct element mappings, which immediately
9062 follow the GOMP_MAP_STRUCT map clause. */
9063 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9065 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9066 while (cnt--)
9067 OMP_CLAUSE_CHAIN (c)
9068 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9071 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9072 && code == OMP_TARGET_EXIT_DATA)
9073 remove = true;
9074 else if (DECL_SIZE (decl)
9075 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9076 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9077 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9078 && (OMP_CLAUSE_MAP_KIND (c)
9079 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9081 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9082 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9083 INTEGER_CST. */
9084 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9086 tree decl2 = DECL_VALUE_EXPR (decl);
9087 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9088 decl2 = TREE_OPERAND (decl2, 0);
9089 gcc_assert (DECL_P (decl2));
9090 tree mem = build_simple_mem_ref (decl2);
9091 OMP_CLAUSE_DECL (c) = mem;
9092 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9093 if (ctx->outer_context)
9095 omp_notice_variable (ctx->outer_context, decl2, true);
9096 omp_notice_variable (ctx->outer_context,
9097 OMP_CLAUSE_SIZE (c), true);
9099 if (((ctx->region_type & ORT_TARGET) != 0
9100 || !ctx->target_firstprivatize_array_bases)
9101 && ((n->value & GOVD_SEEN) == 0
9102 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9104 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9105 OMP_CLAUSE_MAP);
9106 OMP_CLAUSE_DECL (nc) = decl;
9107 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9108 if (ctx->target_firstprivatize_array_bases)
9109 OMP_CLAUSE_SET_MAP_KIND (nc,
9110 GOMP_MAP_FIRSTPRIVATE_POINTER);
9111 else
9112 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9113 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9114 OMP_CLAUSE_CHAIN (c) = nc;
9115 c = nc;
9118 else
9120 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9121 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9122 gcc_assert ((n->value & GOVD_SEEN) == 0
9123 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9124 == 0));
9126 break;
9128 case OMP_CLAUSE_TO:
9129 case OMP_CLAUSE_FROM:
9130 case OMP_CLAUSE__CACHE_:
9131 decl = OMP_CLAUSE_DECL (c);
9132 if (!DECL_P (decl))
9133 break;
9134 if (DECL_SIZE (decl)
9135 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9137 tree decl2 = DECL_VALUE_EXPR (decl);
9138 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9139 decl2 = TREE_OPERAND (decl2, 0);
9140 gcc_assert (DECL_P (decl2));
9141 tree mem = build_simple_mem_ref (decl2);
9142 OMP_CLAUSE_DECL (c) = mem;
9143 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9144 if (ctx->outer_context)
9146 omp_notice_variable (ctx->outer_context, decl2, true);
9147 omp_notice_variable (ctx->outer_context,
9148 OMP_CLAUSE_SIZE (c), true);
9151 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9152 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9153 break;
9155 case OMP_CLAUSE_REDUCTION:
9156 decl = OMP_CLAUSE_DECL (c);
9157 /* OpenACC reductions need a present_or_copy data clause.
9158 Add one if necessary. Error is the reduction is private. */
9159 if (ctx->region_type == ORT_ACC_PARALLEL)
9161 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9162 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9163 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9164 "reduction on %qE", DECL_NAME (decl));
9165 else if ((n->value & GOVD_MAP) == 0)
9167 tree next = OMP_CLAUSE_CHAIN (c);
9168 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9169 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9170 OMP_CLAUSE_DECL (nc) = decl;
9171 OMP_CLAUSE_CHAIN (c) = nc;
9172 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9173 while (1)
9175 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9176 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9177 break;
9178 nc = OMP_CLAUSE_CHAIN (nc);
9180 OMP_CLAUSE_CHAIN (nc) = next;
9181 n->value |= GOVD_MAP;
9184 if (DECL_P (decl)
9185 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9186 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9187 break;
9188 case OMP_CLAUSE_COPYIN:
9189 case OMP_CLAUSE_COPYPRIVATE:
9190 case OMP_CLAUSE_IF:
9191 case OMP_CLAUSE_NUM_THREADS:
9192 case OMP_CLAUSE_NUM_TEAMS:
9193 case OMP_CLAUSE_THREAD_LIMIT:
9194 case OMP_CLAUSE_DIST_SCHEDULE:
9195 case OMP_CLAUSE_DEVICE:
9196 case OMP_CLAUSE_SCHEDULE:
9197 case OMP_CLAUSE_NOWAIT:
9198 case OMP_CLAUSE_ORDERED:
9199 case OMP_CLAUSE_DEFAULT:
9200 case OMP_CLAUSE_UNTIED:
9201 case OMP_CLAUSE_COLLAPSE:
9202 case OMP_CLAUSE_FINAL:
9203 case OMP_CLAUSE_MERGEABLE:
9204 case OMP_CLAUSE_PROC_BIND:
9205 case OMP_CLAUSE_SAFELEN:
9206 case OMP_CLAUSE_SIMDLEN:
9207 case OMP_CLAUSE_DEPEND:
9208 case OMP_CLAUSE_PRIORITY:
9209 case OMP_CLAUSE_GRAINSIZE:
9210 case OMP_CLAUSE_NUM_TASKS:
9211 case OMP_CLAUSE_NOGROUP:
9212 case OMP_CLAUSE_THREADS:
9213 case OMP_CLAUSE_SIMD:
9214 case OMP_CLAUSE_HINT:
9215 case OMP_CLAUSE_DEFAULTMAP:
9216 case OMP_CLAUSE_USE_DEVICE_PTR:
9217 case OMP_CLAUSE_IS_DEVICE_PTR:
9218 case OMP_CLAUSE__CILK_FOR_COUNT_:
9219 case OMP_CLAUSE_ASYNC:
9220 case OMP_CLAUSE_WAIT:
9221 case OMP_CLAUSE_INDEPENDENT:
9222 case OMP_CLAUSE_NUM_GANGS:
9223 case OMP_CLAUSE_NUM_WORKERS:
9224 case OMP_CLAUSE_VECTOR_LENGTH:
9225 case OMP_CLAUSE_GANG:
9226 case OMP_CLAUSE_WORKER:
9227 case OMP_CLAUSE_VECTOR:
9228 case OMP_CLAUSE_AUTO:
9229 case OMP_CLAUSE_SEQ:
9230 case OMP_CLAUSE_TILE:
9231 break;
9233 default:
9234 gcc_unreachable ();
9237 if (remove)
9238 *list_p = OMP_CLAUSE_CHAIN (c);
9239 else
9240 list_p = &OMP_CLAUSE_CHAIN (c);
9243 /* Add in any implicit data sharing. */
9244 struct gimplify_adjust_omp_clauses_data data;
9245 data.list_p = list_p;
9246 data.pre_p = pre_p;
9247 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9249 gimplify_omp_ctxp = ctx->outer_context;
9250 delete_omp_context (ctx);
9253 /* Gimplify OACC_CACHE. */
9255 static void
9256 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9258 tree expr = *expr_p;
9260 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9261 OACC_CACHE);
9262 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9263 OACC_CACHE);
9265 /* TODO: Do something sensible with this information. */
9267 *expr_p = NULL_TREE;
9270 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9271 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9272 kind. The entry kind will replace the one in CLAUSE, while the exit
9273 kind will be used in a new omp_clause and returned to the caller. */
9275 static tree
9276 gimplify_oacc_declare_1 (tree clause)
9278 HOST_WIDE_INT kind, new_op;
9279 bool ret = false;
9280 tree c = NULL;
9282 kind = OMP_CLAUSE_MAP_KIND (clause);
9284 switch (kind)
9286 case GOMP_MAP_ALLOC:
9287 case GOMP_MAP_FORCE_ALLOC:
9288 case GOMP_MAP_FORCE_TO:
9289 new_op = GOMP_MAP_DELETE;
9290 ret = true;
9291 break;
9293 case GOMP_MAP_FORCE_FROM:
9294 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9295 new_op = GOMP_MAP_FORCE_FROM;
9296 ret = true;
9297 break;
9299 case GOMP_MAP_FORCE_TOFROM:
9300 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9301 new_op = GOMP_MAP_FORCE_FROM;
9302 ret = true;
9303 break;
9305 case GOMP_MAP_FROM:
9306 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9307 new_op = GOMP_MAP_FROM;
9308 ret = true;
9309 break;
9311 case GOMP_MAP_TOFROM:
9312 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9313 new_op = GOMP_MAP_FROM;
9314 ret = true;
9315 break;
9317 case GOMP_MAP_DEVICE_RESIDENT:
9318 case GOMP_MAP_FORCE_DEVICEPTR:
9319 case GOMP_MAP_FORCE_PRESENT:
9320 case GOMP_MAP_LINK:
9321 case GOMP_MAP_POINTER:
9322 case GOMP_MAP_TO:
9323 break;
9325 default:
9326 gcc_unreachable ();
9327 break;
9330 if (ret)
9332 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9333 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9334 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9337 return c;
9340 /* Gimplify OACC_DECLARE. */
9342 static void
9343 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9345 tree expr = *expr_p;
9346 gomp_target *stmt;
9347 tree clauses, t, decl;
9349 clauses = OACC_DECLARE_CLAUSES (expr);
9351 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9352 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9354 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9356 decl = OMP_CLAUSE_DECL (t);
9358 if (TREE_CODE (decl) == MEM_REF)
9359 decl = TREE_OPERAND (decl, 0);
9361 if (VAR_P (decl) && !is_oacc_declared (decl))
9363 tree attr = get_identifier ("oacc declare target");
9364 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9365 DECL_ATTRIBUTES (decl));
9368 if (VAR_P (decl)
9369 && !is_global_var (decl)
9370 && DECL_CONTEXT (decl) == current_function_decl)
9372 tree c = gimplify_oacc_declare_1 (t);
9373 if (c)
9375 if (oacc_declare_returns == NULL)
9376 oacc_declare_returns = new hash_map<tree, tree>;
9378 oacc_declare_returns->put (decl, c);
9382 if (gimplify_omp_ctxp)
9383 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9386 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9387 clauses);
9389 gimplify_seq_add_stmt (pre_p, stmt);
9391 *expr_p = NULL_TREE;
9394 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9395 gimplification of the body, as well as scanning the body for used
9396 variables. We need to do this scan now, because variable-sized
9397 decls will be decomposed during gimplification. */
9399 static void
9400 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9402 tree expr = *expr_p;
9403 gimple *g;
9404 gimple_seq body = NULL;
9406 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9407 OMP_PARALLEL_COMBINED (expr)
9408 ? ORT_COMBINED_PARALLEL
9409 : ORT_PARALLEL, OMP_PARALLEL);
9411 push_gimplify_context ();
9413 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9414 if (gimple_code (g) == GIMPLE_BIND)
9415 pop_gimplify_context (g);
9416 else
9417 pop_gimplify_context (NULL);
9419 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9420 OMP_PARALLEL);
9422 g = gimple_build_omp_parallel (body,
9423 OMP_PARALLEL_CLAUSES (expr),
9424 NULL_TREE, NULL_TREE);
9425 if (OMP_PARALLEL_COMBINED (expr))
9426 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9427 gimplify_seq_add_stmt (pre_p, g);
9428 *expr_p = NULL_TREE;
9431 /* Gimplify the contents of an OMP_TASK statement. This involves
9432 gimplification of the body, as well as scanning the body for used
9433 variables. We need to do this scan now, because variable-sized
9434 decls will be decomposed during gimplification. */
9436 static void
9437 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9439 tree expr = *expr_p;
9440 gimple *g;
9441 gimple_seq body = NULL;
9443 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9444 omp_find_clause (OMP_TASK_CLAUSES (expr),
9445 OMP_CLAUSE_UNTIED)
9446 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9448 push_gimplify_context ();
9450 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9451 if (gimple_code (g) == GIMPLE_BIND)
9452 pop_gimplify_context (g);
9453 else
9454 pop_gimplify_context (NULL);
9456 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9457 OMP_TASK);
9459 g = gimple_build_omp_task (body,
9460 OMP_TASK_CLAUSES (expr),
9461 NULL_TREE, NULL_TREE,
9462 NULL_TREE, NULL_TREE, NULL_TREE);
9463 gimplify_seq_add_stmt (pre_p, g);
9464 *expr_p = NULL_TREE;
9467 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9468 with non-NULL OMP_FOR_INIT. */
9470 static tree
9471 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9473 *walk_subtrees = 0;
9474 switch (TREE_CODE (*tp))
9476 case OMP_FOR:
9477 *walk_subtrees = 1;
9478 /* FALLTHRU */
9479 case OMP_SIMD:
9480 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9481 return *tp;
9482 break;
9483 case BIND_EXPR:
9484 case STATEMENT_LIST:
9485 case OMP_PARALLEL:
9486 *walk_subtrees = 1;
9487 break;
9488 default:
9489 break;
9491 return NULL_TREE;
9494 /* Gimplify the gross structure of an OMP_FOR statement. */
9496 static enum gimplify_status
9497 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9499 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9500 enum gimplify_status ret = GS_ALL_DONE;
9501 enum gimplify_status tret;
9502 gomp_for *gfor;
9503 gimple_seq for_body, for_pre_body;
9504 int i;
9505 bitmap has_decl_expr = NULL;
9506 enum omp_region_type ort = ORT_WORKSHARE;
9508 orig_for_stmt = for_stmt = *expr_p;
9510 switch (TREE_CODE (for_stmt))
9512 case OMP_FOR:
9513 case CILK_FOR:
9514 case OMP_DISTRIBUTE:
9515 break;
9516 case OACC_LOOP:
9517 ort = ORT_ACC;
9518 break;
9519 case OMP_TASKLOOP:
9520 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9521 ort = ORT_UNTIED_TASK;
9522 else
9523 ort = ORT_TASK;
9524 break;
9525 case OMP_SIMD:
9526 case CILK_SIMD:
9527 ort = ORT_SIMD;
9528 break;
9529 default:
9530 gcc_unreachable ();
9533 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9534 clause for the IV. */
9535 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9537 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9538 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9539 decl = TREE_OPERAND (t, 0);
9540 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9541 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9542 && OMP_CLAUSE_DECL (c) == decl)
9544 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9545 break;
9549 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9551 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9552 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9553 find_combined_omp_for, NULL, NULL);
9554 if (inner_for_stmt == NULL_TREE)
9556 gcc_assert (seen_error ());
9557 *expr_p = NULL_TREE;
9558 return GS_ERROR;
9562 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9563 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9564 TREE_CODE (for_stmt));
9566 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9567 gimplify_omp_ctxp->distribute = true;
9569 /* Handle OMP_FOR_INIT. */
9570 for_pre_body = NULL;
9571 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9573 has_decl_expr = BITMAP_ALLOC (NULL);
9574 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9575 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9576 == VAR_DECL)
9578 t = OMP_FOR_PRE_BODY (for_stmt);
9579 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9581 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9583 tree_stmt_iterator si;
9584 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9585 tsi_next (&si))
9587 t = tsi_stmt (si);
9588 if (TREE_CODE (t) == DECL_EXPR
9589 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9590 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9594 if (OMP_FOR_PRE_BODY (for_stmt))
9596 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9597 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9598 else
9600 struct gimplify_omp_ctx ctx;
9601 memset (&ctx, 0, sizeof (ctx));
9602 ctx.region_type = ORT_NONE;
9603 gimplify_omp_ctxp = &ctx;
9604 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9605 gimplify_omp_ctxp = NULL;
9608 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9610 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9611 for_stmt = inner_for_stmt;
9613 /* For taskloop, need to gimplify the start, end and step before the
9614 taskloop, outside of the taskloop omp context. */
9615 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9617 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9619 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9620 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9622 TREE_OPERAND (t, 1)
9623 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9624 pre_p, NULL, false);
9625 tree c = build_omp_clause (input_location,
9626 OMP_CLAUSE_FIRSTPRIVATE);
9627 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9628 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9629 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9632 /* Handle OMP_FOR_COND. */
9633 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9634 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9636 TREE_OPERAND (t, 1)
9637 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9638 gimple_seq_empty_p (for_pre_body)
9639 ? pre_p : &for_pre_body, NULL,
9640 false);
9641 tree c = build_omp_clause (input_location,
9642 OMP_CLAUSE_FIRSTPRIVATE);
9643 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9644 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9645 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9648 /* Handle OMP_FOR_INCR. */
9649 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9650 if (TREE_CODE (t) == MODIFY_EXPR)
9652 decl = TREE_OPERAND (t, 0);
9653 t = TREE_OPERAND (t, 1);
9654 tree *tp = &TREE_OPERAND (t, 1);
9655 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9656 tp = &TREE_OPERAND (t, 0);
9658 if (!is_gimple_constant (*tp))
9660 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9661 ? pre_p : &for_pre_body;
9662 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9663 tree c = build_omp_clause (input_location,
9664 OMP_CLAUSE_FIRSTPRIVATE);
9665 OMP_CLAUSE_DECL (c) = *tp;
9666 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9667 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9672 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9673 OMP_TASKLOOP);
9676 if (orig_for_stmt != for_stmt)
9677 gimplify_omp_ctxp->combined_loop = true;
9679 for_body = NULL;
9680 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9681 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9682 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9683 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9685 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9686 bool is_doacross = false;
9687 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9689 is_doacross = true;
9690 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9691 (OMP_FOR_INIT (for_stmt))
9692 * 2);
9694 int collapse = 1, tile = 0;
9695 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9696 if (c)
9697 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9698 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9699 if (c)
9700 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9701 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9703 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9704 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9705 decl = TREE_OPERAND (t, 0);
9706 gcc_assert (DECL_P (decl));
9707 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9708 || POINTER_TYPE_P (TREE_TYPE (decl)));
9709 if (is_doacross)
9711 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9712 gimplify_omp_ctxp->loop_iter_var.quick_push
9713 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9714 else
9715 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9716 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9719 /* Make sure the iteration variable is private. */
9720 tree c = NULL_TREE;
9721 tree c2 = NULL_TREE;
9722 if (orig_for_stmt != for_stmt)
9723 /* Do this only on innermost construct for combined ones. */;
9724 else if (ort == ORT_SIMD)
9726 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9727 (splay_tree_key) decl);
9728 omp_is_private (gimplify_omp_ctxp, decl,
9729 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9730 != 1));
9731 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9732 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9733 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9735 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9736 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9737 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9738 if (has_decl_expr
9739 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9741 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9742 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9744 struct gimplify_omp_ctx *outer
9745 = gimplify_omp_ctxp->outer_context;
9746 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9748 if (outer->region_type == ORT_WORKSHARE
9749 && outer->combined_loop)
9751 n = splay_tree_lookup (outer->variables,
9752 (splay_tree_key)decl);
9753 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9755 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9756 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9758 else
9760 struct gimplify_omp_ctx *octx = outer->outer_context;
9761 if (octx
9762 && octx->region_type == ORT_COMBINED_PARALLEL
9763 && octx->outer_context
9764 && (octx->outer_context->region_type
9765 == ORT_WORKSHARE)
9766 && octx->outer_context->combined_loop)
9768 octx = octx->outer_context;
9769 n = splay_tree_lookup (octx->variables,
9770 (splay_tree_key)decl);
9771 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9773 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9774 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9781 OMP_CLAUSE_DECL (c) = decl;
9782 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9783 OMP_FOR_CLAUSES (for_stmt) = c;
9784 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9785 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9787 if (outer->region_type == ORT_WORKSHARE
9788 && outer->combined_loop)
9790 if (outer->outer_context
9791 && (outer->outer_context->region_type
9792 == ORT_COMBINED_PARALLEL))
9793 outer = outer->outer_context;
9794 else if (omp_check_private (outer, decl, false))
9795 outer = NULL;
9797 else if (((outer->region_type & ORT_TASK) != 0)
9798 && outer->combined_loop
9799 && !omp_check_private (gimplify_omp_ctxp,
9800 decl, false))
9802 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9804 omp_notice_variable (outer, decl, true);
9805 outer = NULL;
9807 if (outer)
9809 n = splay_tree_lookup (outer->variables,
9810 (splay_tree_key)decl);
9811 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9813 omp_add_variable (outer, decl,
9814 GOVD_LASTPRIVATE | GOVD_SEEN);
9815 if (outer->region_type == ORT_COMBINED_PARALLEL
9816 && outer->outer_context
9817 && (outer->outer_context->region_type
9818 == ORT_WORKSHARE)
9819 && outer->outer_context->combined_loop)
9821 outer = outer->outer_context;
9822 n = splay_tree_lookup (outer->variables,
9823 (splay_tree_key)decl);
9824 if (omp_check_private (outer, decl, false))
9825 outer = NULL;
9826 else if (n == NULL
9827 || ((n->value & GOVD_DATA_SHARE_CLASS)
9828 == 0))
9829 omp_add_variable (outer, decl,
9830 GOVD_LASTPRIVATE
9831 | GOVD_SEEN);
9832 else
9833 outer = NULL;
9835 if (outer && outer->outer_context
9836 && (outer->outer_context->region_type
9837 == ORT_COMBINED_TEAMS))
9839 outer = outer->outer_context;
9840 n = splay_tree_lookup (outer->variables,
9841 (splay_tree_key)decl);
9842 if (n == NULL
9843 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9844 omp_add_variable (outer, decl,
9845 GOVD_SHARED | GOVD_SEEN);
9846 else
9847 outer = NULL;
9849 if (outer && outer->outer_context)
9850 omp_notice_variable (outer->outer_context, decl,
9851 true);
9856 else
9858 bool lastprivate
9859 = (!has_decl_expr
9860 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9861 struct gimplify_omp_ctx *outer
9862 = gimplify_omp_ctxp->outer_context;
9863 if (outer && lastprivate)
9865 if (outer->region_type == ORT_WORKSHARE
9866 && outer->combined_loop)
9868 n = splay_tree_lookup (outer->variables,
9869 (splay_tree_key)decl);
9870 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9872 lastprivate = false;
9873 outer = NULL;
9875 else if (outer->outer_context
9876 && (outer->outer_context->region_type
9877 == ORT_COMBINED_PARALLEL))
9878 outer = outer->outer_context;
9879 else if (omp_check_private (outer, decl, false))
9880 outer = NULL;
9882 else if (((outer->region_type & ORT_TASK) != 0)
9883 && outer->combined_loop
9884 && !omp_check_private (gimplify_omp_ctxp,
9885 decl, false))
9887 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9889 omp_notice_variable (outer, decl, true);
9890 outer = NULL;
9892 if (outer)
9894 n = splay_tree_lookup (outer->variables,
9895 (splay_tree_key)decl);
9896 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9898 omp_add_variable (outer, decl,
9899 GOVD_LASTPRIVATE | GOVD_SEEN);
9900 if (outer->region_type == ORT_COMBINED_PARALLEL
9901 && outer->outer_context
9902 && (outer->outer_context->region_type
9903 == ORT_WORKSHARE)
9904 && outer->outer_context->combined_loop)
9906 outer = outer->outer_context;
9907 n = splay_tree_lookup (outer->variables,
9908 (splay_tree_key)decl);
9909 if (omp_check_private (outer, decl, false))
9910 outer = NULL;
9911 else if (n == NULL
9912 || ((n->value & GOVD_DATA_SHARE_CLASS)
9913 == 0))
9914 omp_add_variable (outer, decl,
9915 GOVD_LASTPRIVATE
9916 | GOVD_SEEN);
9917 else
9918 outer = NULL;
9920 if (outer && outer->outer_context
9921 && (outer->outer_context->region_type
9922 == ORT_COMBINED_TEAMS))
9924 outer = outer->outer_context;
9925 n = splay_tree_lookup (outer->variables,
9926 (splay_tree_key)decl);
9927 if (n == NULL
9928 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9929 omp_add_variable (outer, decl,
9930 GOVD_SHARED | GOVD_SEEN);
9931 else
9932 outer = NULL;
9934 if (outer && outer->outer_context)
9935 omp_notice_variable (outer->outer_context, decl,
9936 true);
9941 c = build_omp_clause (input_location,
9942 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9943 : OMP_CLAUSE_PRIVATE);
9944 OMP_CLAUSE_DECL (c) = decl;
9945 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9946 OMP_FOR_CLAUSES (for_stmt) = c;
9947 omp_add_variable (gimplify_omp_ctxp, decl,
9948 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9949 | GOVD_EXPLICIT | GOVD_SEEN);
9950 c = NULL_TREE;
9953 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9954 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9955 else
9956 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9958 /* If DECL is not a gimple register, create a temporary variable to act
9959 as an iteration counter. This is valid, since DECL cannot be
9960 modified in the body of the loop. Similarly for any iteration vars
9961 in simd with collapse > 1 where the iterator vars must be
9962 lastprivate. */
9963 if (orig_for_stmt != for_stmt)
9964 var = decl;
9965 else if (!is_gimple_reg (decl)
9966 || (ort == ORT_SIMD
9967 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9969 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9970 /* Make sure omp_add_variable is not called on it prematurely.
9971 We call it ourselves a few lines later. */
9972 gimplify_omp_ctxp = NULL;
9973 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9974 gimplify_omp_ctxp = ctx;
9975 TREE_OPERAND (t, 0) = var;
9977 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9979 if (ort == ORT_SIMD
9980 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9982 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9983 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9984 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9985 OMP_CLAUSE_DECL (c2) = var;
9986 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9987 OMP_FOR_CLAUSES (for_stmt) = c2;
9988 omp_add_variable (gimplify_omp_ctxp, var,
9989 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9990 if (c == NULL_TREE)
9992 c = c2;
9993 c2 = NULL_TREE;
9996 else
9997 omp_add_variable (gimplify_omp_ctxp, var,
9998 GOVD_PRIVATE | GOVD_SEEN);
10000 else
10001 var = decl;
10003 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10004 is_gimple_val, fb_rvalue, false);
10005 ret = MIN (ret, tret);
10006 if (ret == GS_ERROR)
10007 return ret;
10009 /* Handle OMP_FOR_COND. */
10010 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10011 gcc_assert (COMPARISON_CLASS_P (t));
10012 gcc_assert (TREE_OPERAND (t, 0) == decl);
10014 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10015 is_gimple_val, fb_rvalue, false);
10016 ret = MIN (ret, tret);
10018 /* Handle OMP_FOR_INCR. */
10019 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10020 switch (TREE_CODE (t))
10022 case PREINCREMENT_EXPR:
10023 case POSTINCREMENT_EXPR:
10025 tree decl = TREE_OPERAND (t, 0);
10026 /* c_omp_for_incr_canonicalize_ptr() should have been
10027 called to massage things appropriately. */
10028 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10030 if (orig_for_stmt != for_stmt)
10031 break;
10032 t = build_int_cst (TREE_TYPE (decl), 1);
10033 if (c)
10034 OMP_CLAUSE_LINEAR_STEP (c) = t;
10035 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10036 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10037 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10038 break;
10041 case PREDECREMENT_EXPR:
10042 case POSTDECREMENT_EXPR:
10043 /* c_omp_for_incr_canonicalize_ptr() should have been
10044 called to massage things appropriately. */
10045 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10046 if (orig_for_stmt != for_stmt)
10047 break;
10048 t = build_int_cst (TREE_TYPE (decl), -1);
10049 if (c)
10050 OMP_CLAUSE_LINEAR_STEP (c) = t;
10051 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10052 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10053 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10054 break;
10056 case MODIFY_EXPR:
10057 gcc_assert (TREE_OPERAND (t, 0) == decl);
10058 TREE_OPERAND (t, 0) = var;
10060 t = TREE_OPERAND (t, 1);
10061 switch (TREE_CODE (t))
10063 case PLUS_EXPR:
10064 if (TREE_OPERAND (t, 1) == decl)
10066 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10067 TREE_OPERAND (t, 0) = var;
10068 break;
10071 /* Fallthru. */
10072 case MINUS_EXPR:
10073 case POINTER_PLUS_EXPR:
10074 gcc_assert (TREE_OPERAND (t, 0) == decl);
10075 TREE_OPERAND (t, 0) = var;
10076 break;
10077 default:
10078 gcc_unreachable ();
10081 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10082 is_gimple_val, fb_rvalue, false);
10083 ret = MIN (ret, tret);
10084 if (c)
10086 tree step = TREE_OPERAND (t, 1);
10087 tree stept = TREE_TYPE (decl);
10088 if (POINTER_TYPE_P (stept))
10089 stept = sizetype;
10090 step = fold_convert (stept, step);
10091 if (TREE_CODE (t) == MINUS_EXPR)
10092 step = fold_build1 (NEGATE_EXPR, stept, step);
10093 OMP_CLAUSE_LINEAR_STEP (c) = step;
10094 if (step != TREE_OPERAND (t, 1))
10096 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10097 &for_pre_body, NULL,
10098 is_gimple_val, fb_rvalue, false);
10099 ret = MIN (ret, tret);
10102 break;
10104 default:
10105 gcc_unreachable ();
10108 if (c2)
10110 gcc_assert (c);
10111 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10114 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10116 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10117 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10118 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10119 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10120 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10121 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10122 && OMP_CLAUSE_DECL (c) == decl)
10124 if (is_doacross && (collapse == 1 || i >= collapse))
10125 t = var;
10126 else
10128 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10129 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10130 gcc_assert (TREE_OPERAND (t, 0) == var);
10131 t = TREE_OPERAND (t, 1);
10132 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10133 || TREE_CODE (t) == MINUS_EXPR
10134 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10135 gcc_assert (TREE_OPERAND (t, 0) == var);
10136 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10137 is_doacross ? var : decl,
10138 TREE_OPERAND (t, 1));
10140 gimple_seq *seq;
10141 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10142 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10143 else
10144 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10145 gimplify_assign (decl, t, seq);
10150 BITMAP_FREE (has_decl_expr);
10152 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10154 push_gimplify_context ();
10155 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10157 OMP_FOR_BODY (orig_for_stmt)
10158 = build3 (BIND_EXPR, void_type_node, NULL,
10159 OMP_FOR_BODY (orig_for_stmt), NULL);
10160 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10164 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10165 &for_body);
10167 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10169 if (gimple_code (g) == GIMPLE_BIND)
10170 pop_gimplify_context (g);
10171 else
10172 pop_gimplify_context (NULL);
10175 if (orig_for_stmt != for_stmt)
10176 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10178 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10179 decl = TREE_OPERAND (t, 0);
10180 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10181 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10182 gimplify_omp_ctxp = ctx->outer_context;
10183 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10184 gimplify_omp_ctxp = ctx;
10185 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10186 TREE_OPERAND (t, 0) = var;
10187 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10188 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10189 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10192 gimplify_adjust_omp_clauses (pre_p, for_body,
10193 &OMP_FOR_CLAUSES (orig_for_stmt),
10194 TREE_CODE (orig_for_stmt));
10196 int kind;
10197 switch (TREE_CODE (orig_for_stmt))
10199 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10200 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10201 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10202 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10203 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10204 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10205 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10206 default:
10207 gcc_unreachable ();
10209 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10210 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10211 for_pre_body);
10212 if (orig_for_stmt != for_stmt)
10213 gimple_omp_for_set_combined_p (gfor, true);
10214 if (gimplify_omp_ctxp
10215 && (gimplify_omp_ctxp->combined_loop
10216 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10217 && gimplify_omp_ctxp->outer_context
10218 && gimplify_omp_ctxp->outer_context->combined_loop)))
10220 gimple_omp_for_set_combined_into_p (gfor, true);
10221 if (gimplify_omp_ctxp->combined_loop)
10222 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10223 else
10224 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10227 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10229 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10230 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10231 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10232 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10233 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10234 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10235 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10236 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10239 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10240 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10241 The outer taskloop stands for computing the number of iterations,
10242 counts for collapsed loops and holding taskloop specific clauses.
10243 The task construct stands for the effect of data sharing on the
10244 explicit task it creates and the inner taskloop stands for expansion
10245 of the static loop inside of the explicit task construct. */
10246 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10248 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10249 tree task_clauses = NULL_TREE;
10250 tree c = *gfor_clauses_ptr;
10251 tree *gtask_clauses_ptr = &task_clauses;
10252 tree outer_for_clauses = NULL_TREE;
10253 tree *gforo_clauses_ptr = &outer_for_clauses;
10254 for (; c; c = OMP_CLAUSE_CHAIN (c))
10255 switch (OMP_CLAUSE_CODE (c))
10257 /* These clauses are allowed on task, move them there. */
10258 case OMP_CLAUSE_SHARED:
10259 case OMP_CLAUSE_FIRSTPRIVATE:
10260 case OMP_CLAUSE_DEFAULT:
10261 case OMP_CLAUSE_IF:
10262 case OMP_CLAUSE_UNTIED:
10263 case OMP_CLAUSE_FINAL:
10264 case OMP_CLAUSE_MERGEABLE:
10265 case OMP_CLAUSE_PRIORITY:
10266 *gtask_clauses_ptr = c;
10267 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10268 break;
10269 case OMP_CLAUSE_PRIVATE:
10270 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10272 /* We want private on outer for and firstprivate
10273 on task. */
10274 *gtask_clauses_ptr
10275 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10276 OMP_CLAUSE_FIRSTPRIVATE);
10277 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10278 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10279 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10280 *gforo_clauses_ptr = c;
10281 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10283 else
10285 *gtask_clauses_ptr = c;
10286 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10288 break;
10289 /* These clauses go into outer taskloop clauses. */
10290 case OMP_CLAUSE_GRAINSIZE:
10291 case OMP_CLAUSE_NUM_TASKS:
10292 case OMP_CLAUSE_NOGROUP:
10293 *gforo_clauses_ptr = c;
10294 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10295 break;
10296 /* Taskloop clause we duplicate on both taskloops. */
10297 case OMP_CLAUSE_COLLAPSE:
10298 *gfor_clauses_ptr = c;
10299 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10300 *gforo_clauses_ptr = copy_node (c);
10301 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10302 break;
10303 /* For lastprivate, keep the clause on inner taskloop, and add
10304 a shared clause on task. If the same decl is also firstprivate,
10305 add also firstprivate clause on the inner taskloop. */
10306 case OMP_CLAUSE_LASTPRIVATE:
10307 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10309 /* For taskloop C++ lastprivate IVs, we want:
10310 1) private on outer taskloop
10311 2) firstprivate and shared on task
10312 3) lastprivate on inner taskloop */
10313 *gtask_clauses_ptr
10314 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10315 OMP_CLAUSE_FIRSTPRIVATE);
10316 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10317 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10318 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10319 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10320 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10321 OMP_CLAUSE_PRIVATE);
10322 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10323 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10324 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10325 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10327 *gfor_clauses_ptr = c;
10328 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10329 *gtask_clauses_ptr
10330 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10331 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10332 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10333 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10334 gtask_clauses_ptr
10335 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10336 break;
10337 default:
10338 gcc_unreachable ();
10340 *gfor_clauses_ptr = NULL_TREE;
10341 *gtask_clauses_ptr = NULL_TREE;
10342 *gforo_clauses_ptr = NULL_TREE;
10343 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10344 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10345 NULL_TREE, NULL_TREE, NULL_TREE);
10346 gimple_omp_task_set_taskloop_p (g, true);
10347 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10348 gomp_for *gforo
10349 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10350 gimple_omp_for_collapse (gfor),
10351 gimple_omp_for_pre_body (gfor));
10352 gimple_omp_for_set_pre_body (gfor, NULL);
10353 gimple_omp_for_set_combined_p (gforo, true);
10354 gimple_omp_for_set_combined_into_p (gfor, true);
10355 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10357 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10358 tree v = create_tmp_var (type);
10359 gimple_omp_for_set_index (gforo, i, v);
10360 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10361 gimple_omp_for_set_initial (gforo, i, t);
10362 gimple_omp_for_set_cond (gforo, i,
10363 gimple_omp_for_cond (gfor, i));
10364 t = unshare_expr (gimple_omp_for_final (gfor, i));
10365 gimple_omp_for_set_final (gforo, i, t);
10366 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10367 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10368 TREE_OPERAND (t, 0) = v;
10369 gimple_omp_for_set_incr (gforo, i, t);
10370 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10371 OMP_CLAUSE_DECL (t) = v;
10372 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10373 gimple_omp_for_set_clauses (gforo, t);
10375 gimplify_seq_add_stmt (pre_p, gforo);
10377 else
10378 gimplify_seq_add_stmt (pre_p, gfor);
10379 if (ret != GS_ALL_DONE)
10380 return GS_ERROR;
10381 *expr_p = NULL_TREE;
10382 return GS_ALL_DONE;
10385 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10386 of OMP_TARGET's body. */
10388 static tree
10389 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10391 *walk_subtrees = 0;
10392 switch (TREE_CODE (*tp))
10394 case OMP_TEAMS:
10395 return *tp;
10396 case BIND_EXPR:
10397 case STATEMENT_LIST:
10398 *walk_subtrees = 1;
10399 break;
10400 default:
10401 break;
10403 return NULL_TREE;
10406 /* Helper function of optimize_target_teams, determine if the expression
10407 can be computed safely before the target construct on the host. */
10409 static tree
10410 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10412 splay_tree_node n;
10414 if (TYPE_P (*tp))
10416 *walk_subtrees = 0;
10417 return NULL_TREE;
10419 switch (TREE_CODE (*tp))
10421 case VAR_DECL:
10422 case PARM_DECL:
10423 case RESULT_DECL:
10424 *walk_subtrees = 0;
10425 if (error_operand_p (*tp)
10426 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10427 || DECL_HAS_VALUE_EXPR_P (*tp)
10428 || DECL_THREAD_LOCAL_P (*tp)
10429 || TREE_SIDE_EFFECTS (*tp)
10430 || TREE_THIS_VOLATILE (*tp))
10431 return *tp;
10432 if (is_global_var (*tp)
10433 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10434 || lookup_attribute ("omp declare target link",
10435 DECL_ATTRIBUTES (*tp))))
10436 return *tp;
10437 if (VAR_P (*tp)
10438 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10439 && !is_global_var (*tp)
10440 && decl_function_context (*tp) == current_function_decl)
10441 return *tp;
10442 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10443 (splay_tree_key) *tp);
10444 if (n == NULL)
10446 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10447 return NULL_TREE;
10448 return *tp;
10450 else if (n->value & GOVD_LOCAL)
10451 return *tp;
10452 else if (n->value & GOVD_FIRSTPRIVATE)
10453 return NULL_TREE;
10454 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10455 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10456 return NULL_TREE;
10457 return *tp;
10458 case INTEGER_CST:
10459 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10460 return *tp;
10461 return NULL_TREE;
10462 case TARGET_EXPR:
10463 if (TARGET_EXPR_INITIAL (*tp)
10464 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10465 return *tp;
10466 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10467 walk_subtrees, NULL);
10468 /* Allow some reasonable subset of integral arithmetics. */
10469 case PLUS_EXPR:
10470 case MINUS_EXPR:
10471 case MULT_EXPR:
10472 case TRUNC_DIV_EXPR:
10473 case CEIL_DIV_EXPR:
10474 case FLOOR_DIV_EXPR:
10475 case ROUND_DIV_EXPR:
10476 case TRUNC_MOD_EXPR:
10477 case CEIL_MOD_EXPR:
10478 case FLOOR_MOD_EXPR:
10479 case ROUND_MOD_EXPR:
10480 case RDIV_EXPR:
10481 case EXACT_DIV_EXPR:
10482 case MIN_EXPR:
10483 case MAX_EXPR:
10484 case LSHIFT_EXPR:
10485 case RSHIFT_EXPR:
10486 case BIT_IOR_EXPR:
10487 case BIT_XOR_EXPR:
10488 case BIT_AND_EXPR:
10489 case NEGATE_EXPR:
10490 case ABS_EXPR:
10491 case BIT_NOT_EXPR:
10492 case NON_LVALUE_EXPR:
10493 CASE_CONVERT:
10494 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10495 return *tp;
10496 return NULL_TREE;
10497 /* And disallow anything else, except for comparisons. */
10498 default:
10499 if (COMPARISON_CLASS_P (*tp))
10500 return NULL_TREE;
10501 return *tp;
10505 /* Try to determine if the num_teams and/or thread_limit expressions
10506 can have their values determined already before entering the
10507 target construct.
10508 INTEGER_CSTs trivially are,
10509 integral decls that are firstprivate (explicitly or implicitly)
10510 or explicitly map(always, to:) or map(always, tofrom:) on the target
10511 region too, and expressions involving simple arithmetics on those
10512 too, function calls are not ok, dereferencing something neither etc.
10513 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10514 EXPR based on what we find:
10515 0 stands for clause not specified at all, use implementation default
10516 -1 stands for value that can't be determined easily before entering
10517 the target construct.
10518 If teams construct is not present at all, use 1 for num_teams
10519 and 0 for thread_limit (only one team is involved, and the thread
10520 limit is implementation defined. */
10522 static void
10523 optimize_target_teams (tree target, gimple_seq *pre_p)
10525 tree body = OMP_BODY (target);
10526 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10527 tree num_teams = integer_zero_node;
10528 tree thread_limit = integer_zero_node;
10529 location_t num_teams_loc = EXPR_LOCATION (target);
10530 location_t thread_limit_loc = EXPR_LOCATION (target);
10531 tree c, *p, expr;
10532 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10534 if (teams == NULL_TREE)
10535 num_teams = integer_one_node;
10536 else
10537 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10539 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10541 p = &num_teams;
10542 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10544 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10546 p = &thread_limit;
10547 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10549 else
10550 continue;
10551 expr = OMP_CLAUSE_OPERAND (c, 0);
10552 if (TREE_CODE (expr) == INTEGER_CST)
10554 *p = expr;
10555 continue;
10557 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10559 *p = integer_minus_one_node;
10560 continue;
10562 *p = expr;
10563 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10564 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10565 == GS_ERROR)
10567 gimplify_omp_ctxp = target_ctx;
10568 *p = integer_minus_one_node;
10569 continue;
10571 gimplify_omp_ctxp = target_ctx;
10572 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10573 OMP_CLAUSE_OPERAND (c, 0) = *p;
10575 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10576 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10577 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10578 OMP_TARGET_CLAUSES (target) = c;
10579 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10580 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10581 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10582 OMP_TARGET_CLAUSES (target) = c;
10585 /* Gimplify the gross structure of several OMP constructs. */
10587 static void
10588 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10590 tree expr = *expr_p;
10591 gimple *stmt;
10592 gimple_seq body = NULL;
10593 enum omp_region_type ort;
10595 switch (TREE_CODE (expr))
10597 case OMP_SECTIONS:
10598 case OMP_SINGLE:
10599 ort = ORT_WORKSHARE;
10600 break;
10601 case OMP_TARGET:
10602 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10603 break;
10604 case OACC_KERNELS:
10605 ort = ORT_ACC_KERNELS;
10606 break;
10607 case OACC_PARALLEL:
10608 ort = ORT_ACC_PARALLEL;
10609 break;
10610 case OACC_DATA:
10611 ort = ORT_ACC_DATA;
10612 break;
10613 case OMP_TARGET_DATA:
10614 ort = ORT_TARGET_DATA;
10615 break;
10616 case OMP_TEAMS:
10617 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10618 break;
10619 case OACC_HOST_DATA:
10620 ort = ORT_ACC_HOST_DATA;
10621 break;
10622 default:
10623 gcc_unreachable ();
10625 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10626 TREE_CODE (expr));
10627 if (TREE_CODE (expr) == OMP_TARGET)
10628 optimize_target_teams (expr, pre_p);
10629 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10631 push_gimplify_context ();
10632 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10633 if (gimple_code (g) == GIMPLE_BIND)
10634 pop_gimplify_context (g);
10635 else
10636 pop_gimplify_context (NULL);
10637 if ((ort & ORT_TARGET_DATA) != 0)
10639 enum built_in_function end_ix;
10640 switch (TREE_CODE (expr))
10642 case OACC_DATA:
10643 case OACC_HOST_DATA:
10644 end_ix = BUILT_IN_GOACC_DATA_END;
10645 break;
10646 case OMP_TARGET_DATA:
10647 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10648 break;
10649 default:
10650 gcc_unreachable ();
10652 tree fn = builtin_decl_explicit (end_ix);
10653 g = gimple_build_call (fn, 0);
10654 gimple_seq cleanup = NULL;
10655 gimple_seq_add_stmt (&cleanup, g);
10656 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10657 body = NULL;
10658 gimple_seq_add_stmt (&body, g);
10661 else
10662 gimplify_and_add (OMP_BODY (expr), &body);
10663 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10664 TREE_CODE (expr));
10666 switch (TREE_CODE (expr))
10668 case OACC_DATA:
10669 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10670 OMP_CLAUSES (expr));
10671 break;
10672 case OACC_KERNELS:
10673 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10674 OMP_CLAUSES (expr));
10675 break;
10676 case OACC_HOST_DATA:
10677 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10678 OMP_CLAUSES (expr));
10679 break;
10680 case OACC_PARALLEL:
10681 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10682 OMP_CLAUSES (expr));
10683 break;
10684 case OMP_SECTIONS:
10685 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10686 break;
10687 case OMP_SINGLE:
10688 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10689 break;
10690 case OMP_TARGET:
10691 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10692 OMP_CLAUSES (expr));
10693 break;
10694 case OMP_TARGET_DATA:
10695 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10696 OMP_CLAUSES (expr));
10697 break;
10698 case OMP_TEAMS:
10699 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10700 break;
10701 default:
10702 gcc_unreachable ();
10705 gimplify_seq_add_stmt (pre_p, stmt);
10706 *expr_p = NULL_TREE;
10709 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10710 target update constructs. */
10712 static void
10713 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10715 tree expr = *expr_p;
10716 int kind;
10717 gomp_target *stmt;
10718 enum omp_region_type ort = ORT_WORKSHARE;
10720 switch (TREE_CODE (expr))
10722 case OACC_ENTER_DATA:
10723 case OACC_EXIT_DATA:
10724 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10725 ort = ORT_ACC;
10726 break;
10727 case OACC_UPDATE:
10728 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10729 ort = ORT_ACC;
10730 break;
10731 case OMP_TARGET_UPDATE:
10732 kind = GF_OMP_TARGET_KIND_UPDATE;
10733 break;
10734 case OMP_TARGET_ENTER_DATA:
10735 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10736 break;
10737 case OMP_TARGET_EXIT_DATA:
10738 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10739 break;
10740 default:
10741 gcc_unreachable ();
10743 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10744 ort, TREE_CODE (expr));
10745 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10746 TREE_CODE (expr));
10747 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10749 gimplify_seq_add_stmt (pre_p, stmt);
10750 *expr_p = NULL_TREE;
10753 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10754 stabilized the lhs of the atomic operation as *ADDR. Return true if
10755 EXPR is this stabilized form. */
10757 static bool
10758 goa_lhs_expr_p (tree expr, tree addr)
10760 /* Also include casts to other type variants. The C front end is fond
10761 of adding these for e.g. volatile variables. This is like
10762 STRIP_TYPE_NOPS but includes the main variant lookup. */
10763 STRIP_USELESS_TYPE_CONVERSION (expr);
10765 if (TREE_CODE (expr) == INDIRECT_REF)
10767 expr = TREE_OPERAND (expr, 0);
10768 while (expr != addr
10769 && (CONVERT_EXPR_P (expr)
10770 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10771 && TREE_CODE (expr) == TREE_CODE (addr)
10772 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10774 expr = TREE_OPERAND (expr, 0);
10775 addr = TREE_OPERAND (addr, 0);
10777 if (expr == addr)
10778 return true;
10779 return (TREE_CODE (addr) == ADDR_EXPR
10780 && TREE_CODE (expr) == ADDR_EXPR
10781 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10783 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10784 return true;
10785 return false;
10788 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10789 expression does not involve the lhs, evaluate it into a temporary.
10790 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10791 or -1 if an error was encountered. */
10793 static int
10794 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10795 tree lhs_var)
10797 tree expr = *expr_p;
10798 int saw_lhs;
10800 if (goa_lhs_expr_p (expr, lhs_addr))
10802 *expr_p = lhs_var;
10803 return 1;
10805 if (is_gimple_val (expr))
10806 return 0;
10808 saw_lhs = 0;
10809 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10811 case tcc_binary:
10812 case tcc_comparison:
10813 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10814 lhs_var);
10815 /* FALLTHRU */
10816 case tcc_unary:
10817 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10818 lhs_var);
10819 break;
10820 case tcc_expression:
10821 switch (TREE_CODE (expr))
10823 case TRUTH_ANDIF_EXPR:
10824 case TRUTH_ORIF_EXPR:
10825 case TRUTH_AND_EXPR:
10826 case TRUTH_OR_EXPR:
10827 case TRUTH_XOR_EXPR:
10828 case BIT_INSERT_EXPR:
10829 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10830 lhs_addr, lhs_var);
10831 /* FALLTHRU */
10832 case TRUTH_NOT_EXPR:
10833 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10834 lhs_addr, lhs_var);
10835 break;
10836 case COMPOUND_EXPR:
10837 /* Break out any preevaluations from cp_build_modify_expr. */
10838 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10839 expr = TREE_OPERAND (expr, 1))
10840 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10841 *expr_p = expr;
10842 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10843 default:
10844 break;
10846 break;
10847 case tcc_reference:
10848 if (TREE_CODE (expr) == BIT_FIELD_REF)
10849 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10850 lhs_addr, lhs_var);
10851 break;
10852 default:
10853 break;
10856 if (saw_lhs == 0)
10858 enum gimplify_status gs;
10859 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10860 if (gs != GS_ALL_DONE)
10861 saw_lhs = -1;
10864 return saw_lhs;
10867 /* Gimplify an OMP_ATOMIC statement. */
10869 static enum gimplify_status
10870 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10872 tree addr = TREE_OPERAND (*expr_p, 0);
10873 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10874 ? NULL : TREE_OPERAND (*expr_p, 1);
10875 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10876 tree tmp_load;
10877 gomp_atomic_load *loadstmt;
10878 gomp_atomic_store *storestmt;
10880 tmp_load = create_tmp_reg (type);
10881 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10882 return GS_ERROR;
10884 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10885 != GS_ALL_DONE)
10886 return GS_ERROR;
10888 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10889 gimplify_seq_add_stmt (pre_p, loadstmt);
10890 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10891 != GS_ALL_DONE)
10892 return GS_ERROR;
10894 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10895 rhs = tmp_load;
10896 storestmt = gimple_build_omp_atomic_store (rhs);
10897 gimplify_seq_add_stmt (pre_p, storestmt);
10898 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10900 gimple_omp_atomic_set_seq_cst (loadstmt);
10901 gimple_omp_atomic_set_seq_cst (storestmt);
10903 switch (TREE_CODE (*expr_p))
10905 case OMP_ATOMIC_READ:
10906 case OMP_ATOMIC_CAPTURE_OLD:
10907 *expr_p = tmp_load;
10908 gimple_omp_atomic_set_need_value (loadstmt);
10909 break;
10910 case OMP_ATOMIC_CAPTURE_NEW:
10911 *expr_p = rhs;
10912 gimple_omp_atomic_set_need_value (storestmt);
10913 break;
10914 default:
10915 *expr_p = NULL;
10916 break;
10919 return GS_ALL_DONE;
10922 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10923 body, and adding some EH bits. */
10925 static enum gimplify_status
10926 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10928 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10929 gimple *body_stmt;
10930 gtransaction *trans_stmt;
10931 gimple_seq body = NULL;
10932 int subcode = 0;
10934 /* Wrap the transaction body in a BIND_EXPR so we have a context
10935 where to put decls for OMP. */
10936 if (TREE_CODE (tbody) != BIND_EXPR)
10938 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10939 TREE_SIDE_EFFECTS (bind) = 1;
10940 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10941 TRANSACTION_EXPR_BODY (expr) = bind;
10944 push_gimplify_context ();
10945 temp = voidify_wrapper_expr (*expr_p, NULL);
10947 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10948 pop_gimplify_context (body_stmt);
10950 trans_stmt = gimple_build_transaction (body);
10951 if (TRANSACTION_EXPR_OUTER (expr))
10952 subcode = GTMA_IS_OUTER;
10953 else if (TRANSACTION_EXPR_RELAXED (expr))
10954 subcode = GTMA_IS_RELAXED;
10955 gimple_transaction_set_subcode (trans_stmt, subcode);
10957 gimplify_seq_add_stmt (pre_p, trans_stmt);
10959 if (temp)
10961 *expr_p = temp;
10962 return GS_OK;
10965 *expr_p = NULL_TREE;
10966 return GS_ALL_DONE;
10969 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10970 is the OMP_BODY of the original EXPR (which has already been
10971 gimplified so it's not present in the EXPR).
10973 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10975 static gimple *
10976 gimplify_omp_ordered (tree expr, gimple_seq body)
10978 tree c, decls;
10979 int failures = 0;
10980 unsigned int i;
10981 tree source_c = NULL_TREE;
10982 tree sink_c = NULL_TREE;
10984 if (gimplify_omp_ctxp)
10986 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10987 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10988 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10989 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10990 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10992 error_at (OMP_CLAUSE_LOCATION (c),
10993 "%<ordered%> construct with %<depend%> clause must be "
10994 "closely nested inside a loop with %<ordered%> clause "
10995 "with a parameter");
10996 failures++;
10998 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10999 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11001 bool fail = false;
11002 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11003 decls && TREE_CODE (decls) == TREE_LIST;
11004 decls = TREE_CHAIN (decls), ++i)
11005 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11006 continue;
11007 else if (TREE_VALUE (decls)
11008 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11010 error_at (OMP_CLAUSE_LOCATION (c),
11011 "variable %qE is not an iteration "
11012 "of outermost loop %d, expected %qE",
11013 TREE_VALUE (decls), i + 1,
11014 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11015 fail = true;
11016 failures++;
11018 else
11019 TREE_VALUE (decls)
11020 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11021 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11023 error_at (OMP_CLAUSE_LOCATION (c),
11024 "number of variables in %<depend(sink)%> "
11025 "clause does not match number of "
11026 "iteration variables");
11027 failures++;
11029 sink_c = c;
11031 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11032 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11034 if (source_c)
11036 error_at (OMP_CLAUSE_LOCATION (c),
11037 "more than one %<depend(source)%> clause on an "
11038 "%<ordered%> construct");
11039 failures++;
11041 else
11042 source_c = c;
11045 if (source_c && sink_c)
11047 error_at (OMP_CLAUSE_LOCATION (source_c),
11048 "%<depend(source)%> clause specified together with "
11049 "%<depend(sink:)%> clauses on the same construct");
11050 failures++;
11053 if (failures)
11054 return gimple_build_nop ();
11055 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11058 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11059 expression produces a value to be used as an operand inside a GIMPLE
11060 statement, the value will be stored back in *EXPR_P. This value will
11061 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11062 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11063 emitted in PRE_P and POST_P.
11065 Additionally, this process may overwrite parts of the input
11066 expression during gimplification. Ideally, it should be
11067 possible to do non-destructive gimplification.
11069 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11070 the expression needs to evaluate to a value to be used as
11071 an operand in a GIMPLE statement, this value will be stored in
11072 *EXPR_P on exit. This happens when the caller specifies one
11073 of fb_lvalue or fb_rvalue fallback flags.
11075 PRE_P will contain the sequence of GIMPLE statements corresponding
11076 to the evaluation of EXPR and all the side-effects that must
11077 be executed before the main expression. On exit, the last
11078 statement of PRE_P is the core statement being gimplified. For
11079 instance, when gimplifying 'if (++a)' the last statement in
11080 PRE_P will be 'if (t.1)' where t.1 is the result of
11081 pre-incrementing 'a'.
11083 POST_P will contain the sequence of GIMPLE statements corresponding
11084 to the evaluation of all the side-effects that must be executed
11085 after the main expression. If this is NULL, the post
11086 side-effects are stored at the end of PRE_P.
11088 The reason why the output is split in two is to handle post
11089 side-effects explicitly. In some cases, an expression may have
11090 inner and outer post side-effects which need to be emitted in
11091 an order different from the one given by the recursive
11092 traversal. For instance, for the expression (*p--)++ the post
11093 side-effects of '--' must actually occur *after* the post
11094 side-effects of '++'. However, gimplification will first visit
11095 the inner expression, so if a separate POST sequence was not
11096 used, the resulting sequence would be:
11098 1 t.1 = *p
11099 2 p = p - 1
11100 3 t.2 = t.1 + 1
11101 4 *p = t.2
11103 However, the post-decrement operation in line #2 must not be
11104 evaluated until after the store to *p at line #4, so the
11105 correct sequence should be:
11107 1 t.1 = *p
11108 2 t.2 = t.1 + 1
11109 3 *p = t.2
11110 4 p = p - 1
11112 So, by specifying a separate post queue, it is possible
11113 to emit the post side-effects in the correct order.
11114 If POST_P is NULL, an internal queue will be used. Before
11115 returning to the caller, the sequence POST_P is appended to
11116 the main output sequence PRE_P.
11118 GIMPLE_TEST_F points to a function that takes a tree T and
11119 returns nonzero if T is in the GIMPLE form requested by the
11120 caller. The GIMPLE predicates are in gimple.c.
11122 FALLBACK tells the function what sort of a temporary we want if
11123 gimplification cannot produce an expression that complies with
11124 GIMPLE_TEST_F.
11126 fb_none means that no temporary should be generated
11127 fb_rvalue means that an rvalue is OK to generate
11128 fb_lvalue means that an lvalue is OK to generate
11129 fb_either means that either is OK, but an lvalue is preferable.
11130 fb_mayfail means that gimplification may fail (in which case
11131 GS_ERROR will be returned)
11133 The return value is either GS_ERROR or GS_ALL_DONE, since this
11134 function iterates until EXPR is completely gimplified or an error
11135 occurs. */
11137 enum gimplify_status
11138 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11139 bool (*gimple_test_f) (tree), fallback_t fallback)
11141 tree tmp;
11142 gimple_seq internal_pre = NULL;
11143 gimple_seq internal_post = NULL;
11144 tree save_expr;
11145 bool is_statement;
11146 location_t saved_location;
11147 enum gimplify_status ret;
11148 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11149 tree label;
11151 save_expr = *expr_p;
11152 if (save_expr == NULL_TREE)
11153 return GS_ALL_DONE;
11155 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11156 is_statement = gimple_test_f == is_gimple_stmt;
11157 if (is_statement)
11158 gcc_assert (pre_p);
11160 /* Consistency checks. */
11161 if (gimple_test_f == is_gimple_reg)
11162 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11163 else if (gimple_test_f == is_gimple_val
11164 || gimple_test_f == is_gimple_call_addr
11165 || gimple_test_f == is_gimple_condexpr
11166 || gimple_test_f == is_gimple_mem_rhs
11167 || gimple_test_f == is_gimple_mem_rhs_or_call
11168 || gimple_test_f == is_gimple_reg_rhs
11169 || gimple_test_f == is_gimple_reg_rhs_or_call
11170 || gimple_test_f == is_gimple_asm_val
11171 || gimple_test_f == is_gimple_mem_ref_addr)
11172 gcc_assert (fallback & fb_rvalue);
11173 else if (gimple_test_f == is_gimple_min_lval
11174 || gimple_test_f == is_gimple_lvalue)
11175 gcc_assert (fallback & fb_lvalue);
11176 else if (gimple_test_f == is_gimple_addressable)
11177 gcc_assert (fallback & fb_either);
11178 else if (gimple_test_f == is_gimple_stmt)
11179 gcc_assert (fallback == fb_none);
11180 else
11182 /* We should have recognized the GIMPLE_TEST_F predicate to
11183 know what kind of fallback to use in case a temporary is
11184 needed to hold the value or address of *EXPR_P. */
11185 gcc_unreachable ();
11188 /* We used to check the predicate here and return immediately if it
11189 succeeds. This is wrong; the design is for gimplification to be
11190 idempotent, and for the predicates to only test for valid forms, not
11191 whether they are fully simplified. */
11192 if (pre_p == NULL)
11193 pre_p = &internal_pre;
11195 if (post_p == NULL)
11196 post_p = &internal_post;
11198 /* Remember the last statements added to PRE_P and POST_P. Every
11199 new statement added by the gimplification helpers needs to be
11200 annotated with location information. To centralize the
11201 responsibility, we remember the last statement that had been
11202 added to both queues before gimplifying *EXPR_P. If
11203 gimplification produces new statements in PRE_P and POST_P, those
11204 statements will be annotated with the same location information
11205 as *EXPR_P. */
11206 pre_last_gsi = gsi_last (*pre_p);
11207 post_last_gsi = gsi_last (*post_p);
11209 saved_location = input_location;
11210 if (save_expr != error_mark_node
11211 && EXPR_HAS_LOCATION (*expr_p))
11212 input_location = EXPR_LOCATION (*expr_p);
11214 /* Loop over the specific gimplifiers until the toplevel node
11215 remains the same. */
11218 /* Strip away as many useless type conversions as possible
11219 at the toplevel. */
11220 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11222 /* Remember the expr. */
11223 save_expr = *expr_p;
11225 /* Die, die, die, my darling. */
11226 if (save_expr == error_mark_node
11227 || (TREE_TYPE (save_expr)
11228 && TREE_TYPE (save_expr) == error_mark_node))
11230 ret = GS_ERROR;
11231 break;
11234 /* Do any language-specific gimplification. */
11235 ret = ((enum gimplify_status)
11236 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11237 if (ret == GS_OK)
11239 if (*expr_p == NULL_TREE)
11240 break;
11241 if (*expr_p != save_expr)
11242 continue;
11244 else if (ret != GS_UNHANDLED)
11245 break;
11247 /* Make sure that all the cases set 'ret' appropriately. */
11248 ret = GS_UNHANDLED;
11249 switch (TREE_CODE (*expr_p))
11251 /* First deal with the special cases. */
11253 case POSTINCREMENT_EXPR:
11254 case POSTDECREMENT_EXPR:
11255 case PREINCREMENT_EXPR:
11256 case PREDECREMENT_EXPR:
11257 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11258 fallback != fb_none,
11259 TREE_TYPE (*expr_p));
11260 break;
11262 case VIEW_CONVERT_EXPR:
11263 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11264 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11266 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11267 post_p, is_gimple_val, fb_rvalue);
11268 recalculate_side_effects (*expr_p);
11269 break;
11271 /* Fallthru. */
11273 case ARRAY_REF:
11274 case ARRAY_RANGE_REF:
11275 case REALPART_EXPR:
11276 case IMAGPART_EXPR:
11277 case COMPONENT_REF:
11278 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11279 fallback ? fallback : fb_rvalue);
11280 break;
11282 case COND_EXPR:
11283 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11285 /* C99 code may assign to an array in a structure value of a
11286 conditional expression, and this has undefined behavior
11287 only on execution, so create a temporary if an lvalue is
11288 required. */
11289 if (fallback == fb_lvalue)
11291 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11292 mark_addressable (*expr_p);
11293 ret = GS_OK;
11295 break;
11297 case CALL_EXPR:
11298 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11300 /* C99 code may assign to an array in a structure returned
11301 from a function, and this has undefined behavior only on
11302 execution, so create a temporary if an lvalue is
11303 required. */
11304 if (fallback == fb_lvalue)
11306 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11307 mark_addressable (*expr_p);
11308 ret = GS_OK;
11310 break;
11312 case TREE_LIST:
11313 gcc_unreachable ();
11315 case COMPOUND_EXPR:
11316 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11317 break;
11319 case COMPOUND_LITERAL_EXPR:
11320 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11321 gimple_test_f, fallback);
11322 break;
11324 case MODIFY_EXPR:
11325 case INIT_EXPR:
11326 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11327 fallback != fb_none);
11328 break;
11330 case TRUTH_ANDIF_EXPR:
11331 case TRUTH_ORIF_EXPR:
11333 /* Preserve the original type of the expression and the
11334 source location of the outer expression. */
11335 tree org_type = TREE_TYPE (*expr_p);
11336 *expr_p = gimple_boolify (*expr_p);
11337 *expr_p = build3_loc (input_location, COND_EXPR,
11338 org_type, *expr_p,
11339 fold_convert_loc
11340 (input_location,
11341 org_type, boolean_true_node),
11342 fold_convert_loc
11343 (input_location,
11344 org_type, boolean_false_node));
11345 ret = GS_OK;
11346 break;
11349 case TRUTH_NOT_EXPR:
11351 tree type = TREE_TYPE (*expr_p);
11352 /* The parsers are careful to generate TRUTH_NOT_EXPR
11353 only with operands that are always zero or one.
11354 We do not fold here but handle the only interesting case
11355 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11356 *expr_p = gimple_boolify (*expr_p);
11357 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11358 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11359 TREE_TYPE (*expr_p),
11360 TREE_OPERAND (*expr_p, 0));
11361 else
11362 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11363 TREE_TYPE (*expr_p),
11364 TREE_OPERAND (*expr_p, 0),
11365 build_int_cst (TREE_TYPE (*expr_p), 1));
11366 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11367 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11368 ret = GS_OK;
11369 break;
11372 case ADDR_EXPR:
11373 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11374 break;
11376 case ANNOTATE_EXPR:
11378 tree cond = TREE_OPERAND (*expr_p, 0);
11379 tree kind = TREE_OPERAND (*expr_p, 1);
11380 tree type = TREE_TYPE (cond);
11381 if (!INTEGRAL_TYPE_P (type))
11383 *expr_p = cond;
11384 ret = GS_OK;
11385 break;
11387 tree tmp = create_tmp_var (type);
11388 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11389 gcall *call
11390 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11391 gimple_call_set_lhs (call, tmp);
11392 gimplify_seq_add_stmt (pre_p, call);
11393 *expr_p = tmp;
11394 ret = GS_ALL_DONE;
11395 break;
11398 case VA_ARG_EXPR:
11399 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11400 break;
11402 CASE_CONVERT:
11403 if (IS_EMPTY_STMT (*expr_p))
11405 ret = GS_ALL_DONE;
11406 break;
11409 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11410 || fallback == fb_none)
11412 /* Just strip a conversion to void (or in void context) and
11413 try again. */
11414 *expr_p = TREE_OPERAND (*expr_p, 0);
11415 ret = GS_OK;
11416 break;
11419 ret = gimplify_conversion (expr_p);
11420 if (ret == GS_ERROR)
11421 break;
11422 if (*expr_p != save_expr)
11423 break;
11424 /* FALLTHRU */
11426 case FIX_TRUNC_EXPR:
11427 /* unary_expr: ... | '(' cast ')' val | ... */
11428 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11429 is_gimple_val, fb_rvalue);
11430 recalculate_side_effects (*expr_p);
11431 break;
11433 case INDIRECT_REF:
11435 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11436 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11437 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11439 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11440 if (*expr_p != save_expr)
11442 ret = GS_OK;
11443 break;
11446 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11447 is_gimple_reg, fb_rvalue);
11448 if (ret == GS_ERROR)
11449 break;
11451 recalculate_side_effects (*expr_p);
11452 *expr_p = fold_build2_loc (input_location, MEM_REF,
11453 TREE_TYPE (*expr_p),
11454 TREE_OPERAND (*expr_p, 0),
11455 build_int_cst (saved_ptr_type, 0));
11456 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11457 TREE_THIS_NOTRAP (*expr_p) = notrap;
11458 ret = GS_OK;
11459 break;
11462 /* We arrive here through the various re-gimplifcation paths. */
11463 case MEM_REF:
11464 /* First try re-folding the whole thing. */
11465 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11466 TREE_OPERAND (*expr_p, 0),
11467 TREE_OPERAND (*expr_p, 1));
11468 if (tmp)
11470 REF_REVERSE_STORAGE_ORDER (tmp)
11471 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11472 *expr_p = tmp;
11473 recalculate_side_effects (*expr_p);
11474 ret = GS_OK;
11475 break;
11477 /* Avoid re-gimplifying the address operand if it is already
11478 in suitable form. Re-gimplifying would mark the address
11479 operand addressable. Always gimplify when not in SSA form
11480 as we still may have to gimplify decls with value-exprs. */
11481 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11482 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11484 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11485 is_gimple_mem_ref_addr, fb_rvalue);
11486 if (ret == GS_ERROR)
11487 break;
11489 recalculate_side_effects (*expr_p);
11490 ret = GS_ALL_DONE;
11491 break;
11493 /* Constants need not be gimplified. */
11494 case INTEGER_CST:
11495 case REAL_CST:
11496 case FIXED_CST:
11497 case STRING_CST:
11498 case COMPLEX_CST:
11499 case VECTOR_CST:
11500 /* Drop the overflow flag on constants, we do not want
11501 that in the GIMPLE IL. */
11502 if (TREE_OVERFLOW_P (*expr_p))
11503 *expr_p = drop_tree_overflow (*expr_p);
11504 ret = GS_ALL_DONE;
11505 break;
11507 case CONST_DECL:
11508 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11509 CONST_DECL node. Otherwise the decl is replaceable by its
11510 value. */
11511 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11512 if (fallback & fb_lvalue)
11513 ret = GS_ALL_DONE;
11514 else
11516 *expr_p = DECL_INITIAL (*expr_p);
11517 ret = GS_OK;
11519 break;
11521 case DECL_EXPR:
11522 ret = gimplify_decl_expr (expr_p, pre_p);
11523 break;
11525 case BIND_EXPR:
11526 ret = gimplify_bind_expr (expr_p, pre_p);
11527 break;
11529 case LOOP_EXPR:
11530 ret = gimplify_loop_expr (expr_p, pre_p);
11531 break;
11533 case SWITCH_EXPR:
11534 ret = gimplify_switch_expr (expr_p, pre_p);
11535 break;
11537 case EXIT_EXPR:
11538 ret = gimplify_exit_expr (expr_p);
11539 break;
11541 case GOTO_EXPR:
11542 /* If the target is not LABEL, then it is a computed jump
11543 and the target needs to be gimplified. */
11544 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11546 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11547 NULL, is_gimple_val, fb_rvalue);
11548 if (ret == GS_ERROR)
11549 break;
11551 gimplify_seq_add_stmt (pre_p,
11552 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11553 ret = GS_ALL_DONE;
11554 break;
11556 case PREDICT_EXPR:
11557 gimplify_seq_add_stmt (pre_p,
11558 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11559 PREDICT_EXPR_OUTCOME (*expr_p)));
11560 ret = GS_ALL_DONE;
11561 break;
11563 case LABEL_EXPR:
11564 ret = gimplify_label_expr (expr_p, pre_p);
11565 label = LABEL_EXPR_LABEL (*expr_p);
11566 gcc_assert (decl_function_context (label) == current_function_decl);
11568 /* If the label is used in a goto statement, or address of the label
11569 is taken, we need to unpoison all variables that were seen so far.
11570 Doing so would prevent us from reporting a false positives. */
11571 if (asan_poisoned_variables
11572 && asan_used_labels != NULL
11573 && asan_used_labels->contains (label))
11574 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11575 break;
11577 case CASE_LABEL_EXPR:
11578 ret = gimplify_case_label_expr (expr_p, pre_p);
11580 if (gimplify_ctxp->live_switch_vars)
11581 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11582 pre_p);
11583 break;
11585 case RETURN_EXPR:
11586 ret = gimplify_return_expr (*expr_p, pre_p);
11587 break;
11589 case CONSTRUCTOR:
11590 /* Don't reduce this in place; let gimplify_init_constructor work its
11591 magic. Buf if we're just elaborating this for side effects, just
11592 gimplify any element that has side-effects. */
11593 if (fallback == fb_none)
11595 unsigned HOST_WIDE_INT ix;
11596 tree val;
11597 tree temp = NULL_TREE;
11598 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11599 if (TREE_SIDE_EFFECTS (val))
11600 append_to_statement_list (val, &temp);
11602 *expr_p = temp;
11603 ret = temp ? GS_OK : GS_ALL_DONE;
11605 /* C99 code may assign to an array in a constructed
11606 structure or union, and this has undefined behavior only
11607 on execution, so create a temporary if an lvalue is
11608 required. */
11609 else if (fallback == fb_lvalue)
11611 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11612 mark_addressable (*expr_p);
11613 ret = GS_OK;
11615 else
11616 ret = GS_ALL_DONE;
11617 break;
11619 /* The following are special cases that are not handled by the
11620 original GIMPLE grammar. */
11622 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11623 eliminated. */
11624 case SAVE_EXPR:
11625 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11626 break;
11628 case BIT_FIELD_REF:
11629 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11630 post_p, is_gimple_lvalue, fb_either);
11631 recalculate_side_effects (*expr_p);
11632 break;
11634 case TARGET_MEM_REF:
11636 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11638 if (TMR_BASE (*expr_p))
11639 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11640 post_p, is_gimple_mem_ref_addr, fb_either);
11641 if (TMR_INDEX (*expr_p))
11642 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11643 post_p, is_gimple_val, fb_rvalue);
11644 if (TMR_INDEX2 (*expr_p))
11645 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11646 post_p, is_gimple_val, fb_rvalue);
11647 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11648 ret = MIN (r0, r1);
11650 break;
11652 case NON_LVALUE_EXPR:
11653 /* This should have been stripped above. */
11654 gcc_unreachable ();
11656 case ASM_EXPR:
11657 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11658 break;
11660 case TRY_FINALLY_EXPR:
11661 case TRY_CATCH_EXPR:
11663 gimple_seq eval, cleanup;
11664 gtry *try_;
11666 /* Calls to destructors are generated automatically in FINALLY/CATCH
11667 block. They should have location as UNKNOWN_LOCATION. However,
11668 gimplify_call_expr will reset these call stmts to input_location
11669 if it finds stmt's location is unknown. To prevent resetting for
11670 destructors, we set the input_location to unknown.
11671 Note that this only affects the destructor calls in FINALLY/CATCH
11672 block, and will automatically reset to its original value by the
11673 end of gimplify_expr. */
11674 input_location = UNKNOWN_LOCATION;
11675 eval = cleanup = NULL;
11676 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11677 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11678 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11679 if (gimple_seq_empty_p (cleanup))
11681 gimple_seq_add_seq (pre_p, eval);
11682 ret = GS_ALL_DONE;
11683 break;
11685 try_ = gimple_build_try (eval, cleanup,
11686 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11687 ? GIMPLE_TRY_FINALLY
11688 : GIMPLE_TRY_CATCH);
11689 if (EXPR_HAS_LOCATION (save_expr))
11690 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11691 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11692 gimple_set_location (try_, saved_location);
11693 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11694 gimple_try_set_catch_is_cleanup (try_,
11695 TRY_CATCH_IS_CLEANUP (*expr_p));
11696 gimplify_seq_add_stmt (pre_p, try_);
11697 ret = GS_ALL_DONE;
11698 break;
11701 case CLEANUP_POINT_EXPR:
11702 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11703 break;
11705 case TARGET_EXPR:
11706 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11707 break;
11709 case CATCH_EXPR:
11711 gimple *c;
11712 gimple_seq handler = NULL;
11713 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11714 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11715 gimplify_seq_add_stmt (pre_p, c);
11716 ret = GS_ALL_DONE;
11717 break;
11720 case EH_FILTER_EXPR:
11722 gimple *ehf;
11723 gimple_seq failure = NULL;
11725 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11726 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11727 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11728 gimplify_seq_add_stmt (pre_p, ehf);
11729 ret = GS_ALL_DONE;
11730 break;
11733 case OBJ_TYPE_REF:
11735 enum gimplify_status r0, r1;
11736 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11737 post_p, is_gimple_val, fb_rvalue);
11738 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11739 post_p, is_gimple_val, fb_rvalue);
11740 TREE_SIDE_EFFECTS (*expr_p) = 0;
11741 ret = MIN (r0, r1);
11743 break;
11745 case LABEL_DECL:
11746 /* We get here when taking the address of a label. We mark
11747 the label as "forced"; meaning it can never be removed and
11748 it is a potential target for any computed goto. */
11749 FORCED_LABEL (*expr_p) = 1;
11750 ret = GS_ALL_DONE;
11751 break;
11753 case STATEMENT_LIST:
11754 ret = gimplify_statement_list (expr_p, pre_p);
11755 break;
11757 case WITH_SIZE_EXPR:
11759 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11760 post_p == &internal_post ? NULL : post_p,
11761 gimple_test_f, fallback);
11762 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11763 is_gimple_val, fb_rvalue);
11764 ret = GS_ALL_DONE;
11766 break;
11768 case VAR_DECL:
11769 case PARM_DECL:
11770 ret = gimplify_var_or_parm_decl (expr_p);
11771 break;
11773 case RESULT_DECL:
11774 /* When within an OMP context, notice uses of variables. */
11775 if (gimplify_omp_ctxp)
11776 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11777 ret = GS_ALL_DONE;
11778 break;
11780 case SSA_NAME:
11781 /* Allow callbacks into the gimplifier during optimization. */
11782 ret = GS_ALL_DONE;
11783 break;
11785 case OMP_PARALLEL:
11786 gimplify_omp_parallel (expr_p, pre_p);
11787 ret = GS_ALL_DONE;
11788 break;
11790 case OMP_TASK:
11791 gimplify_omp_task (expr_p, pre_p);
11792 ret = GS_ALL_DONE;
11793 break;
11795 case OMP_FOR:
11796 case OMP_SIMD:
11797 case CILK_SIMD:
11798 case CILK_FOR:
11799 case OMP_DISTRIBUTE:
11800 case OMP_TASKLOOP:
11801 case OACC_LOOP:
11802 ret = gimplify_omp_for (expr_p, pre_p);
11803 break;
11805 case OACC_CACHE:
11806 gimplify_oacc_cache (expr_p, pre_p);
11807 ret = GS_ALL_DONE;
11808 break;
11810 case OACC_DECLARE:
11811 gimplify_oacc_declare (expr_p, pre_p);
11812 ret = GS_ALL_DONE;
11813 break;
11815 case OACC_HOST_DATA:
11816 case OACC_DATA:
11817 case OACC_KERNELS:
11818 case OACC_PARALLEL:
11819 case OMP_SECTIONS:
11820 case OMP_SINGLE:
11821 case OMP_TARGET:
11822 case OMP_TARGET_DATA:
11823 case OMP_TEAMS:
11824 gimplify_omp_workshare (expr_p, pre_p);
11825 ret = GS_ALL_DONE;
11826 break;
11828 case OACC_ENTER_DATA:
11829 case OACC_EXIT_DATA:
11830 case OACC_UPDATE:
11831 case OMP_TARGET_UPDATE:
11832 case OMP_TARGET_ENTER_DATA:
11833 case OMP_TARGET_EXIT_DATA:
11834 gimplify_omp_target_update (expr_p, pre_p);
11835 ret = GS_ALL_DONE;
11836 break;
11838 case OMP_SECTION:
11839 case OMP_MASTER:
11840 case OMP_TASKGROUP:
11841 case OMP_ORDERED:
11842 case OMP_CRITICAL:
11844 gimple_seq body = NULL;
11845 gimple *g;
11847 gimplify_and_add (OMP_BODY (*expr_p), &body);
11848 switch (TREE_CODE (*expr_p))
11850 case OMP_SECTION:
11851 g = gimple_build_omp_section (body);
11852 break;
11853 case OMP_MASTER:
11854 g = gimple_build_omp_master (body);
11855 break;
11856 case OMP_TASKGROUP:
11858 gimple_seq cleanup = NULL;
11859 tree fn
11860 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11861 g = gimple_build_call (fn, 0);
11862 gimple_seq_add_stmt (&cleanup, g);
11863 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11864 body = NULL;
11865 gimple_seq_add_stmt (&body, g);
11866 g = gimple_build_omp_taskgroup (body);
11868 break;
11869 case OMP_ORDERED:
11870 g = gimplify_omp_ordered (*expr_p, body);
11871 break;
11872 case OMP_CRITICAL:
11873 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11874 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11875 gimplify_adjust_omp_clauses (pre_p, body,
11876 &OMP_CRITICAL_CLAUSES (*expr_p),
11877 OMP_CRITICAL);
11878 g = gimple_build_omp_critical (body,
11879 OMP_CRITICAL_NAME (*expr_p),
11880 OMP_CRITICAL_CLAUSES (*expr_p));
11881 break;
11882 default:
11883 gcc_unreachable ();
11885 gimplify_seq_add_stmt (pre_p, g);
11886 ret = GS_ALL_DONE;
11887 break;
11890 case OMP_ATOMIC:
11891 case OMP_ATOMIC_READ:
11892 case OMP_ATOMIC_CAPTURE_OLD:
11893 case OMP_ATOMIC_CAPTURE_NEW:
11894 ret = gimplify_omp_atomic (expr_p, pre_p);
11895 break;
11897 case TRANSACTION_EXPR:
11898 ret = gimplify_transaction (expr_p, pre_p);
11899 break;
11901 case TRUTH_AND_EXPR:
11902 case TRUTH_OR_EXPR:
11903 case TRUTH_XOR_EXPR:
11905 tree orig_type = TREE_TYPE (*expr_p);
11906 tree new_type, xop0, xop1;
11907 *expr_p = gimple_boolify (*expr_p);
11908 new_type = TREE_TYPE (*expr_p);
11909 if (!useless_type_conversion_p (orig_type, new_type))
11911 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11912 ret = GS_OK;
11913 break;
11916 /* Boolified binary truth expressions are semantically equivalent
11917 to bitwise binary expressions. Canonicalize them to the
11918 bitwise variant. */
11919 switch (TREE_CODE (*expr_p))
11921 case TRUTH_AND_EXPR:
11922 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11923 break;
11924 case TRUTH_OR_EXPR:
11925 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11926 break;
11927 case TRUTH_XOR_EXPR:
11928 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11929 break;
11930 default:
11931 break;
11933 /* Now make sure that operands have compatible type to
11934 expression's new_type. */
11935 xop0 = TREE_OPERAND (*expr_p, 0);
11936 xop1 = TREE_OPERAND (*expr_p, 1);
11937 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11938 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11939 new_type,
11940 xop0);
11941 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11942 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11943 new_type,
11944 xop1);
11945 /* Continue classified as tcc_binary. */
11946 goto expr_2;
11949 case VEC_COND_EXPR:
11951 enum gimplify_status r0, r1, r2;
11953 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11954 post_p, is_gimple_condexpr, fb_rvalue);
11955 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11956 post_p, is_gimple_val, fb_rvalue);
11957 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11958 post_p, is_gimple_val, fb_rvalue);
11960 ret = MIN (MIN (r0, r1), r2);
11961 recalculate_side_effects (*expr_p);
11963 break;
11965 case FMA_EXPR:
11966 case VEC_PERM_EXPR:
11967 /* Classified as tcc_expression. */
11968 goto expr_3;
11970 case BIT_INSERT_EXPR:
11971 /* Argument 3 is a constant. */
11972 goto expr_2;
11974 case POINTER_PLUS_EXPR:
11976 enum gimplify_status r0, r1;
11977 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11978 post_p, is_gimple_val, fb_rvalue);
11979 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11980 post_p, is_gimple_val, fb_rvalue);
11981 recalculate_side_effects (*expr_p);
11982 ret = MIN (r0, r1);
11983 break;
11986 case CILK_SYNC_STMT:
11988 if (!fn_contains_cilk_spawn_p (cfun))
11990 error_at (EXPR_LOCATION (*expr_p),
11991 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11992 ret = GS_ERROR;
11994 else
11996 gimplify_cilk_sync (expr_p, pre_p);
11997 ret = GS_ALL_DONE;
11999 break;
12002 default:
12003 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12005 case tcc_comparison:
12006 /* Handle comparison of objects of non scalar mode aggregates
12007 with a call to memcmp. It would be nice to only have to do
12008 this for variable-sized objects, but then we'd have to allow
12009 the same nest of reference nodes we allow for MODIFY_EXPR and
12010 that's too complex.
12012 Compare scalar mode aggregates as scalar mode values. Using
12013 memcmp for them would be very inefficient at best, and is
12014 plain wrong if bitfields are involved. */
12016 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12018 /* Vector comparisons need no boolification. */
12019 if (TREE_CODE (type) == VECTOR_TYPE)
12020 goto expr_2;
12021 else if (!AGGREGATE_TYPE_P (type))
12023 tree org_type = TREE_TYPE (*expr_p);
12024 *expr_p = gimple_boolify (*expr_p);
12025 if (!useless_type_conversion_p (org_type,
12026 TREE_TYPE (*expr_p)))
12028 *expr_p = fold_convert_loc (input_location,
12029 org_type, *expr_p);
12030 ret = GS_OK;
12032 else
12033 goto expr_2;
12035 else if (TYPE_MODE (type) != BLKmode)
12036 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12037 else
12038 ret = gimplify_variable_sized_compare (expr_p);
12040 break;
12043 /* If *EXPR_P does not need to be special-cased, handle it
12044 according to its class. */
12045 case tcc_unary:
12046 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12047 post_p, is_gimple_val, fb_rvalue);
12048 break;
12050 case tcc_binary:
12051 expr_2:
12053 enum gimplify_status r0, r1;
12055 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12056 post_p, is_gimple_val, fb_rvalue);
12057 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12058 post_p, is_gimple_val, fb_rvalue);
12060 ret = MIN (r0, r1);
12061 break;
12064 expr_3:
12066 enum gimplify_status r0, r1, r2;
12068 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12069 post_p, is_gimple_val, fb_rvalue);
12070 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12071 post_p, is_gimple_val, fb_rvalue);
12072 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12073 post_p, is_gimple_val, fb_rvalue);
12075 ret = MIN (MIN (r0, r1), r2);
12076 break;
12079 case tcc_declaration:
12080 case tcc_constant:
12081 ret = GS_ALL_DONE;
12082 goto dont_recalculate;
12084 default:
12085 gcc_unreachable ();
12088 recalculate_side_effects (*expr_p);
12090 dont_recalculate:
12091 break;
12094 gcc_assert (*expr_p || ret != GS_OK);
12096 while (ret == GS_OK);
12098 /* If we encountered an error_mark somewhere nested inside, either
12099 stub out the statement or propagate the error back out. */
12100 if (ret == GS_ERROR)
12102 if (is_statement)
12103 *expr_p = NULL;
12104 goto out;
12107 /* This was only valid as a return value from the langhook, which
12108 we handled. Make sure it doesn't escape from any other context. */
12109 gcc_assert (ret != GS_UNHANDLED);
12111 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12113 /* We aren't looking for a value, and we don't have a valid
12114 statement. If it doesn't have side-effects, throw it away.
12115 We can also get here with code such as "*&&L;", where L is
12116 a LABEL_DECL that is marked as FORCED_LABEL. */
12117 if (TREE_CODE (*expr_p) == LABEL_DECL
12118 || !TREE_SIDE_EFFECTS (*expr_p))
12119 *expr_p = NULL;
12120 else if (!TREE_THIS_VOLATILE (*expr_p))
12122 /* This is probably a _REF that contains something nested that
12123 has side effects. Recurse through the operands to find it. */
12124 enum tree_code code = TREE_CODE (*expr_p);
12126 switch (code)
12128 case COMPONENT_REF:
12129 case REALPART_EXPR:
12130 case IMAGPART_EXPR:
12131 case VIEW_CONVERT_EXPR:
12132 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12133 gimple_test_f, fallback);
12134 break;
12136 case ARRAY_REF:
12137 case ARRAY_RANGE_REF:
12138 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12139 gimple_test_f, fallback);
12140 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12141 gimple_test_f, fallback);
12142 break;
12144 default:
12145 /* Anything else with side-effects must be converted to
12146 a valid statement before we get here. */
12147 gcc_unreachable ();
12150 *expr_p = NULL;
12152 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12153 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12155 /* Historically, the compiler has treated a bare reference
12156 to a non-BLKmode volatile lvalue as forcing a load. */
12157 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12159 /* Normally, we do not want to create a temporary for a
12160 TREE_ADDRESSABLE type because such a type should not be
12161 copied by bitwise-assignment. However, we make an
12162 exception here, as all we are doing here is ensuring that
12163 we read the bytes that make up the type. We use
12164 create_tmp_var_raw because create_tmp_var will abort when
12165 given a TREE_ADDRESSABLE type. */
12166 tree tmp = create_tmp_var_raw (type, "vol");
12167 gimple_add_tmp_var (tmp);
12168 gimplify_assign (tmp, *expr_p, pre_p);
12169 *expr_p = NULL;
12171 else
12172 /* We can't do anything useful with a volatile reference to
12173 an incomplete type, so just throw it away. Likewise for
12174 a BLKmode type, since any implicit inner load should
12175 already have been turned into an explicit one by the
12176 gimplification process. */
12177 *expr_p = NULL;
12180 /* If we are gimplifying at the statement level, we're done. Tack
12181 everything together and return. */
12182 if (fallback == fb_none || is_statement)
12184 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12185 it out for GC to reclaim it. */
12186 *expr_p = NULL_TREE;
12188 if (!gimple_seq_empty_p (internal_pre)
12189 || !gimple_seq_empty_p (internal_post))
12191 gimplify_seq_add_seq (&internal_pre, internal_post);
12192 gimplify_seq_add_seq (pre_p, internal_pre);
12195 /* The result of gimplifying *EXPR_P is going to be the last few
12196 statements in *PRE_P and *POST_P. Add location information
12197 to all the statements that were added by the gimplification
12198 helpers. */
12199 if (!gimple_seq_empty_p (*pre_p))
12200 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12202 if (!gimple_seq_empty_p (*post_p))
12203 annotate_all_with_location_after (*post_p, post_last_gsi,
12204 input_location);
12206 goto out;
12209 #ifdef ENABLE_GIMPLE_CHECKING
12210 if (*expr_p)
12212 enum tree_code code = TREE_CODE (*expr_p);
12213 /* These expressions should already be in gimple IR form. */
12214 gcc_assert (code != MODIFY_EXPR
12215 && code != ASM_EXPR
12216 && code != BIND_EXPR
12217 && code != CATCH_EXPR
12218 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12219 && code != EH_FILTER_EXPR
12220 && code != GOTO_EXPR
12221 && code != LABEL_EXPR
12222 && code != LOOP_EXPR
12223 && code != SWITCH_EXPR
12224 && code != TRY_FINALLY_EXPR
12225 && code != OACC_PARALLEL
12226 && code != OACC_KERNELS
12227 && code != OACC_DATA
12228 && code != OACC_HOST_DATA
12229 && code != OACC_DECLARE
12230 && code != OACC_UPDATE
12231 && code != OACC_ENTER_DATA
12232 && code != OACC_EXIT_DATA
12233 && code != OACC_CACHE
12234 && code != OMP_CRITICAL
12235 && code != OMP_FOR
12236 && code != OACC_LOOP
12237 && code != OMP_MASTER
12238 && code != OMP_TASKGROUP
12239 && code != OMP_ORDERED
12240 && code != OMP_PARALLEL
12241 && code != OMP_SECTIONS
12242 && code != OMP_SECTION
12243 && code != OMP_SINGLE);
12245 #endif
12247 /* Otherwise we're gimplifying a subexpression, so the resulting
12248 value is interesting. If it's a valid operand that matches
12249 GIMPLE_TEST_F, we're done. Unless we are handling some
12250 post-effects internally; if that's the case, we need to copy into
12251 a temporary before adding the post-effects to POST_P. */
12252 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12253 goto out;
12255 /* Otherwise, we need to create a new temporary for the gimplified
12256 expression. */
12258 /* We can't return an lvalue if we have an internal postqueue. The
12259 object the lvalue refers to would (probably) be modified by the
12260 postqueue; we need to copy the value out first, which means an
12261 rvalue. */
12262 if ((fallback & fb_lvalue)
12263 && gimple_seq_empty_p (internal_post)
12264 && is_gimple_addressable (*expr_p))
12266 /* An lvalue will do. Take the address of the expression, store it
12267 in a temporary, and replace the expression with an INDIRECT_REF of
12268 that temporary. */
12269 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12270 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12271 *expr_p = build_simple_mem_ref (tmp);
12273 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12275 /* An rvalue will do. Assign the gimplified expression into a
12276 new temporary TMP and replace the original expression with
12277 TMP. First, make sure that the expression has a type so that
12278 it can be assigned into a temporary. */
12279 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12280 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12282 else
12284 #ifdef ENABLE_GIMPLE_CHECKING
12285 if (!(fallback & fb_mayfail))
12287 fprintf (stderr, "gimplification failed:\n");
12288 print_generic_expr (stderr, *expr_p);
12289 debug_tree (*expr_p);
12290 internal_error ("gimplification failed");
12292 #endif
12293 gcc_assert (fallback & fb_mayfail);
12295 /* If this is an asm statement, and the user asked for the
12296 impossible, don't die. Fail and let gimplify_asm_expr
12297 issue an error. */
12298 ret = GS_ERROR;
12299 goto out;
12302 /* Make sure the temporary matches our predicate. */
12303 gcc_assert ((*gimple_test_f) (*expr_p));
12305 if (!gimple_seq_empty_p (internal_post))
12307 annotate_all_with_location (internal_post, input_location);
12308 gimplify_seq_add_seq (pre_p, internal_post);
12311 out:
12312 input_location = saved_location;
12313 return ret;
12316 /* Like gimplify_expr but make sure the gimplified result is not itself
12317 a SSA name (but a decl if it were). Temporaries required by
12318 evaluating *EXPR_P may be still SSA names. */
12320 static enum gimplify_status
12321 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12322 bool (*gimple_test_f) (tree), fallback_t fallback,
12323 bool allow_ssa)
12325 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12326 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12327 gimple_test_f, fallback);
12328 if (! allow_ssa
12329 && TREE_CODE (*expr_p) == SSA_NAME)
12331 tree name = *expr_p;
12332 if (was_ssa_name_p)
12333 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12334 else
12336 /* Avoid the extra copy if possible. */
12337 *expr_p = create_tmp_reg (TREE_TYPE (name));
12338 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12339 release_ssa_name (name);
12342 return ret;
12345 /* Look through TYPE for variable-sized objects and gimplify each such
12346 size that we find. Add to LIST_P any statements generated. */
12348 void
12349 gimplify_type_sizes (tree type, gimple_seq *list_p)
12351 tree field, t;
12353 if (type == NULL || type == error_mark_node)
12354 return;
12356 /* We first do the main variant, then copy into any other variants. */
12357 type = TYPE_MAIN_VARIANT (type);
12359 /* Avoid infinite recursion. */
12360 if (TYPE_SIZES_GIMPLIFIED (type))
12361 return;
12363 TYPE_SIZES_GIMPLIFIED (type) = 1;
12365 switch (TREE_CODE (type))
12367 case INTEGER_TYPE:
12368 case ENUMERAL_TYPE:
12369 case BOOLEAN_TYPE:
12370 case REAL_TYPE:
12371 case FIXED_POINT_TYPE:
12372 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12373 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12375 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12377 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12378 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12380 break;
12382 case ARRAY_TYPE:
12383 /* These types may not have declarations, so handle them here. */
12384 gimplify_type_sizes (TREE_TYPE (type), list_p);
12385 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12386 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12387 with assigned stack slots, for -O1+ -g they should be tracked
12388 by VTA. */
12389 if (!(TYPE_NAME (type)
12390 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12391 && DECL_IGNORED_P (TYPE_NAME (type)))
12392 && TYPE_DOMAIN (type)
12393 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12395 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12396 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12397 DECL_IGNORED_P (t) = 0;
12398 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12399 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12400 DECL_IGNORED_P (t) = 0;
12402 break;
12404 case RECORD_TYPE:
12405 case UNION_TYPE:
12406 case QUAL_UNION_TYPE:
12407 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12408 if (TREE_CODE (field) == FIELD_DECL)
12410 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12411 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12412 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12413 gimplify_type_sizes (TREE_TYPE (field), list_p);
12415 break;
12417 case POINTER_TYPE:
12418 case REFERENCE_TYPE:
12419 /* We used to recurse on the pointed-to type here, which turned out to
12420 be incorrect because its definition might refer to variables not
12421 yet initialized at this point if a forward declaration is involved.
12423 It was actually useful for anonymous pointed-to types to ensure
12424 that the sizes evaluation dominates every possible later use of the
12425 values. Restricting to such types here would be safe since there
12426 is no possible forward declaration around, but would introduce an
12427 undesirable middle-end semantic to anonymity. We then defer to
12428 front-ends the responsibility of ensuring that the sizes are
12429 evaluated both early and late enough, e.g. by attaching artificial
12430 type declarations to the tree. */
12431 break;
12433 default:
12434 break;
12437 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12438 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12440 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12442 TYPE_SIZE (t) = TYPE_SIZE (type);
12443 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12444 TYPE_SIZES_GIMPLIFIED (t) = 1;
12448 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12449 a size or position, has had all of its SAVE_EXPRs evaluated.
12450 We add any required statements to *STMT_P. */
12452 void
12453 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12455 tree expr = *expr_p;
12457 /* We don't do anything if the value isn't there, is constant, or contains
12458 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12459 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12460 will want to replace it with a new variable, but that will cause problems
12461 if this type is from outside the function. It's OK to have that here. */
12462 if (is_gimple_sizepos (expr))
12463 return;
12465 *expr_p = unshare_expr (expr);
12467 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12468 if the def vanishes. */
12469 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12472 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12473 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12474 is true, also gimplify the parameters. */
12476 gbind *
12477 gimplify_body (tree fndecl, bool do_parms)
12479 location_t saved_location = input_location;
12480 gimple_seq parm_stmts, seq;
12481 gimple *outer_stmt;
12482 gbind *outer_bind;
12483 struct cgraph_node *cgn;
12485 timevar_push (TV_TREE_GIMPLIFY);
12487 init_tree_ssa (cfun);
12489 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12490 gimplification. */
12491 default_rtl_profile ();
12493 gcc_assert (gimplify_ctxp == NULL);
12494 push_gimplify_context (true);
12496 if (flag_openacc || flag_openmp)
12498 gcc_assert (gimplify_omp_ctxp == NULL);
12499 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12500 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12503 /* Unshare most shared trees in the body and in that of any nested functions.
12504 It would seem we don't have to do this for nested functions because
12505 they are supposed to be output and then the outer function gimplified
12506 first, but the g++ front end doesn't always do it that way. */
12507 unshare_body (fndecl);
12508 unvisit_body (fndecl);
12510 cgn = cgraph_node::get (fndecl);
12511 if (cgn && cgn->origin)
12512 nonlocal_vlas = new hash_set<tree>;
12514 /* Make sure input_location isn't set to something weird. */
12515 input_location = DECL_SOURCE_LOCATION (fndecl);
12517 /* Resolve callee-copies. This has to be done before processing
12518 the body so that DECL_VALUE_EXPR gets processed correctly. */
12519 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12521 /* Gimplify the function's body. */
12522 seq = NULL;
12523 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12524 outer_stmt = gimple_seq_first_stmt (seq);
12525 if (!outer_stmt)
12527 outer_stmt = gimple_build_nop ();
12528 gimplify_seq_add_stmt (&seq, outer_stmt);
12531 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12532 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12533 if (gimple_code (outer_stmt) == GIMPLE_BIND
12534 && gimple_seq_first (seq) == gimple_seq_last (seq))
12535 outer_bind = as_a <gbind *> (outer_stmt);
12536 else
12537 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12539 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12541 /* If we had callee-copies statements, insert them at the beginning
12542 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12543 if (!gimple_seq_empty_p (parm_stmts))
12545 tree parm;
12547 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12548 gimple_bind_set_body (outer_bind, parm_stmts);
12550 for (parm = DECL_ARGUMENTS (current_function_decl);
12551 parm; parm = DECL_CHAIN (parm))
12552 if (DECL_HAS_VALUE_EXPR_P (parm))
12554 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12555 DECL_IGNORED_P (parm) = 0;
12559 if (nonlocal_vlas)
12561 if (nonlocal_vla_vars)
12563 /* tree-nested.c may later on call declare_vars (..., true);
12564 which relies on BLOCK_VARS chain to be the tail of the
12565 gimple_bind_vars chain. Ensure we don't violate that
12566 assumption. */
12567 if (gimple_bind_block (outer_bind)
12568 == DECL_INITIAL (current_function_decl))
12569 declare_vars (nonlocal_vla_vars, outer_bind, true);
12570 else
12571 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12572 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12573 nonlocal_vla_vars);
12574 nonlocal_vla_vars = NULL_TREE;
12576 delete nonlocal_vlas;
12577 nonlocal_vlas = NULL;
12580 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12581 && gimplify_omp_ctxp)
12583 delete_omp_context (gimplify_omp_ctxp);
12584 gimplify_omp_ctxp = NULL;
12587 pop_gimplify_context (outer_bind);
12588 gcc_assert (gimplify_ctxp == NULL);
12590 if (flag_checking && !seen_error ())
12591 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12593 timevar_pop (TV_TREE_GIMPLIFY);
12594 input_location = saved_location;
12596 return outer_bind;
12599 typedef char *char_p; /* For DEF_VEC_P. */
12601 /* Return whether we should exclude FNDECL from instrumentation. */
12603 static bool
12604 flag_instrument_functions_exclude_p (tree fndecl)
12606 vec<char_p> *v;
12608 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12609 if (v && v->length () > 0)
12611 const char *name;
12612 int i;
12613 char *s;
12615 name = lang_hooks.decl_printable_name (fndecl, 0);
12616 FOR_EACH_VEC_ELT (*v, i, s)
12617 if (strstr (name, s) != NULL)
12618 return true;
12621 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12622 if (v && v->length () > 0)
12624 const char *name;
12625 int i;
12626 char *s;
12628 name = DECL_SOURCE_FILE (fndecl);
12629 FOR_EACH_VEC_ELT (*v, i, s)
12630 if (strstr (name, s) != NULL)
12631 return true;
12634 return false;
12637 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12638 node for the function we want to gimplify.
12640 Return the sequence of GIMPLE statements corresponding to the body
12641 of FNDECL. */
12643 void
12644 gimplify_function_tree (tree fndecl)
12646 tree parm, ret;
12647 gimple_seq seq;
12648 gbind *bind;
12650 gcc_assert (!gimple_body (fndecl));
12652 if (DECL_STRUCT_FUNCTION (fndecl))
12653 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12654 else
12655 push_struct_function (fndecl);
12657 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12658 if necessary. */
12659 cfun->curr_properties |= PROP_gimple_lva;
12661 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12663 /* Preliminarily mark non-addressed complex variables as eligible
12664 for promotion to gimple registers. We'll transform their uses
12665 as we find them. */
12666 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12667 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12668 && !TREE_THIS_VOLATILE (parm)
12669 && !needs_to_live_in_memory (parm))
12670 DECL_GIMPLE_REG_P (parm) = 1;
12673 ret = DECL_RESULT (fndecl);
12674 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12675 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12676 && !needs_to_live_in_memory (ret))
12677 DECL_GIMPLE_REG_P (ret) = 1;
12679 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12680 asan_poisoned_variables = new hash_set<tree> ();
12681 bind = gimplify_body (fndecl, true);
12682 if (asan_poisoned_variables)
12684 delete asan_poisoned_variables;
12685 asan_poisoned_variables = NULL;
12688 /* The tree body of the function is no longer needed, replace it
12689 with the new GIMPLE body. */
12690 seq = NULL;
12691 gimple_seq_add_stmt (&seq, bind);
12692 gimple_set_body (fndecl, seq);
12694 /* If we're instrumenting function entry/exit, then prepend the call to
12695 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12696 catch the exit hook. */
12697 /* ??? Add some way to ignore exceptions for this TFE. */
12698 if (flag_instrument_function_entry_exit
12699 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12700 /* Do not instrument extern inline functions. */
12701 && !(DECL_DECLARED_INLINE_P (fndecl)
12702 && DECL_EXTERNAL (fndecl)
12703 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12704 && !flag_instrument_functions_exclude_p (fndecl))
12706 tree x;
12707 gbind *new_bind;
12708 gimple *tf;
12709 gimple_seq cleanup = NULL, body = NULL;
12710 tree tmp_var;
12711 gcall *call;
12713 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12714 call = gimple_build_call (x, 1, integer_zero_node);
12715 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12716 gimple_call_set_lhs (call, tmp_var);
12717 gimplify_seq_add_stmt (&cleanup, call);
12718 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12719 call = gimple_build_call (x, 2,
12720 build_fold_addr_expr (current_function_decl),
12721 tmp_var);
12722 gimplify_seq_add_stmt (&cleanup, call);
12723 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12725 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12726 call = gimple_build_call (x, 1, integer_zero_node);
12727 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12728 gimple_call_set_lhs (call, tmp_var);
12729 gimplify_seq_add_stmt (&body, call);
12730 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12731 call = gimple_build_call (x, 2,
12732 build_fold_addr_expr (current_function_decl),
12733 tmp_var);
12734 gimplify_seq_add_stmt (&body, call);
12735 gimplify_seq_add_stmt (&body, tf);
12736 new_bind = gimple_build_bind (NULL, body, NULL);
12738 /* Replace the current function body with the body
12739 wrapped in the try/finally TF. */
12740 seq = NULL;
12741 gimple_seq_add_stmt (&seq, new_bind);
12742 gimple_set_body (fndecl, seq);
12743 bind = new_bind;
12746 if (sanitize_flags_p (SANITIZE_THREAD))
12748 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12749 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12750 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12751 /* Replace the current function body with the body
12752 wrapped in the try/finally TF. */
12753 seq = NULL;
12754 gimple_seq_add_stmt (&seq, new_bind);
12755 gimple_set_body (fndecl, seq);
12758 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12759 cfun->curr_properties |= PROP_gimple_any;
12761 pop_cfun ();
12763 dump_function (TDI_gimple, fndecl);
12766 /* Return a dummy expression of type TYPE in order to keep going after an
12767 error. */
12769 static tree
12770 dummy_object (tree type)
12772 tree t = build_int_cst (build_pointer_type (type), 0);
12773 return build2 (MEM_REF, type, t, t);
12776 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12777 builtin function, but a very special sort of operator. */
12779 enum gimplify_status
12780 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12781 gimple_seq *post_p ATTRIBUTE_UNUSED)
12783 tree promoted_type, have_va_type;
12784 tree valist = TREE_OPERAND (*expr_p, 0);
12785 tree type = TREE_TYPE (*expr_p);
12786 tree t, tag, aptag;
12787 location_t loc = EXPR_LOCATION (*expr_p);
12789 /* Verify that valist is of the proper type. */
12790 have_va_type = TREE_TYPE (valist);
12791 if (have_va_type == error_mark_node)
12792 return GS_ERROR;
12793 have_va_type = targetm.canonical_va_list_type (have_va_type);
12794 if (have_va_type == NULL_TREE
12795 && POINTER_TYPE_P (TREE_TYPE (valist)))
12796 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12797 have_va_type
12798 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12799 gcc_assert (have_va_type != NULL_TREE);
12801 /* Generate a diagnostic for requesting data of a type that cannot
12802 be passed through `...' due to type promotion at the call site. */
12803 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12804 != type)
12806 static bool gave_help;
12807 bool warned;
12808 /* Use the expansion point to handle cases such as passing bool (defined
12809 in a system header) through `...'. */
12810 source_location xloc
12811 = expansion_point_location_if_in_system_header (loc);
12813 /* Unfortunately, this is merely undefined, rather than a constraint
12814 violation, so we cannot make this an error. If this call is never
12815 executed, the program is still strictly conforming. */
12816 warned = warning_at (xloc, 0,
12817 "%qT is promoted to %qT when passed through %<...%>",
12818 type, promoted_type);
12819 if (!gave_help && warned)
12821 gave_help = true;
12822 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12823 promoted_type, type);
12826 /* We can, however, treat "undefined" any way we please.
12827 Call abort to encourage the user to fix the program. */
12828 if (warned)
12829 inform (xloc, "if this code is reached, the program will abort");
12830 /* Before the abort, allow the evaluation of the va_list
12831 expression to exit or longjmp. */
12832 gimplify_and_add (valist, pre_p);
12833 t = build_call_expr_loc (loc,
12834 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12835 gimplify_and_add (t, pre_p);
12837 /* This is dead code, but go ahead and finish so that the
12838 mode of the result comes out right. */
12839 *expr_p = dummy_object (type);
12840 return GS_ALL_DONE;
12843 tag = build_int_cst (build_pointer_type (type), 0);
12844 aptag = build_int_cst (TREE_TYPE (valist), 0);
12846 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12847 valist, tag, aptag);
12849 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12850 needs to be expanded. */
12851 cfun->curr_properties &= ~PROP_gimple_lva;
12853 return GS_OK;
12856 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12858 DST/SRC are the destination and source respectively. You can pass
12859 ungimplified trees in DST or SRC, in which case they will be
12860 converted to a gimple operand if necessary.
12862 This function returns the newly created GIMPLE_ASSIGN tuple. */
12864 gimple *
12865 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12867 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12868 gimplify_and_add (t, seq_p);
12869 ggc_free (t);
12870 return gimple_seq_last_stmt (*seq_p);
12873 inline hashval_t
12874 gimplify_hasher::hash (const elt_t *p)
12876 tree t = p->val;
12877 return iterative_hash_expr (t, 0);
12880 inline bool
12881 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12883 tree t1 = p1->val;
12884 tree t2 = p2->val;
12885 enum tree_code code = TREE_CODE (t1);
12887 if (TREE_CODE (t2) != code
12888 || TREE_TYPE (t1) != TREE_TYPE (t2))
12889 return false;
12891 if (!operand_equal_p (t1, t2, 0))
12892 return false;
12894 /* Only allow them to compare equal if they also hash equal; otherwise
12895 results are nondeterminate, and we fail bootstrap comparison. */
12896 gcc_checking_assert (hash (p1) == hash (p2));
12898 return true;