* asan.c (handle_builtin_alloca): Deal with all alloca variants.
[official-gcc.git] / gcc / gimplify.c
blob2c1ec852210a413f4565951bb398604c7ffb864b
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-general.h"
55 #include "omp-low.h"
56 #include "gimple-low.h"
57 #include "cilk.h"
58 #include "gomp-constants.h"
59 #include "splay-tree.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "dbgcnt.h"
68 /* Hash set of poisoned variables in a bind expr. */
69 static hash_set<tree> *asan_poisoned_variables = NULL;
71 enum gimplify_omp_var_data
73 GOVD_SEEN = 1,
74 GOVD_EXPLICIT = 2,
75 GOVD_SHARED = 4,
76 GOVD_PRIVATE = 8,
77 GOVD_FIRSTPRIVATE = 16,
78 GOVD_LASTPRIVATE = 32,
79 GOVD_REDUCTION = 64,
80 GOVD_LOCAL = 128,
81 GOVD_MAP = 256,
82 GOVD_DEBUG_PRIVATE = 512,
83 GOVD_PRIVATE_OUTER_REF = 1024,
84 GOVD_LINEAR = 2048,
85 GOVD_ALIGNED = 4096,
87 /* Flag for GOVD_MAP: don't copy back. */
88 GOVD_MAP_TO_ONLY = 8192,
90 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
91 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
93 GOVD_MAP_0LEN_ARRAY = 32768,
95 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
96 GOVD_MAP_ALWAYS_TO = 65536,
98 /* Flag for shared vars that are or might be stored to in the region. */
99 GOVD_WRITTEN = 131072,
101 /* Flag for GOVD_MAP, if it is a forced mapping. */
102 GOVD_MAP_FORCE = 262144,
104 /* Flag for GOVD_MAP: must be present already. */
105 GOVD_MAP_FORCE_PRESENT = 524288,
107 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
108 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
109 | GOVD_LOCAL)
113 enum omp_region_type
115 ORT_WORKSHARE = 0x00,
116 ORT_SIMD = 0x01,
118 ORT_PARALLEL = 0x02,
119 ORT_COMBINED_PARALLEL = 0x03,
121 ORT_TASK = 0x04,
122 ORT_UNTIED_TASK = 0x05,
124 ORT_TEAMS = 0x08,
125 ORT_COMBINED_TEAMS = 0x09,
127 /* Data region. */
128 ORT_TARGET_DATA = 0x10,
130 /* Data region with offloading. */
131 ORT_TARGET = 0x20,
132 ORT_COMBINED_TARGET = 0x21,
134 /* OpenACC variants. */
135 ORT_ACC = 0x40, /* A generic OpenACC region. */
136 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
137 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
138 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
139 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
141 /* Dummy OpenMP region, used to disable expansion of
142 DECL_VALUE_EXPRs in taskloop pre body. */
143 ORT_NONE = 0x100
146 /* Gimplify hashtable helper. */
148 struct gimplify_hasher : free_ptr_hash <elt_t>
150 static inline hashval_t hash (const elt_t *);
151 static inline bool equal (const elt_t *, const elt_t *);
154 struct gimplify_ctx
156 struct gimplify_ctx *prev_context;
158 vec<gbind *> bind_expr_stack;
159 tree temps;
160 gimple_seq conditional_cleanups;
161 tree exit_label;
162 tree return_temp;
164 vec<tree> case_labels;
165 hash_set<tree> *live_switch_vars;
166 /* The formal temporary table. Should this be persistent? */
167 hash_table<gimplify_hasher> *temp_htab;
169 int conditions;
170 unsigned into_ssa : 1;
171 unsigned allow_rhs_cond_expr : 1;
172 unsigned in_cleanup_point_expr : 1;
173 unsigned keep_stack : 1;
174 unsigned save_stack : 1;
175 unsigned in_switch_expr : 1;
178 struct gimplify_omp_ctx
180 struct gimplify_omp_ctx *outer_context;
181 splay_tree variables;
182 hash_set<tree> *privatized_types;
183 /* Iteration variables in an OMP_FOR. */
184 vec<tree> loop_iter_var;
185 location_t location;
186 enum omp_clause_default_kind default_kind;
187 enum omp_region_type region_type;
188 bool combined_loop;
189 bool distribute;
190 bool target_map_scalars_firstprivate;
191 bool target_map_pointers_as_0len_arrays;
192 bool target_firstprivatize_array_bases;
195 static struct gimplify_ctx *gimplify_ctxp;
196 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
198 /* Forward declaration. */
199 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
200 static hash_map<tree, tree> *oacc_declare_returns;
201 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
202 bool (*) (tree), fallback_t, bool);
204 /* Shorter alias name for the above function for use in gimplify.c
205 only. */
207 static inline void
208 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
210 gimple_seq_add_stmt_without_update (seq_p, gs);
213 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
214 NULL, a new sequence is allocated. This function is
215 similar to gimple_seq_add_seq, but does not scan the operands.
216 During gimplification, we need to manipulate statement sequences
217 before the def/use vectors have been constructed. */
219 static void
220 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
222 gimple_stmt_iterator si;
224 if (src == NULL)
225 return;
227 si = gsi_last (*dst_p);
228 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
232 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
233 and popping gimplify contexts. */
235 static struct gimplify_ctx *ctx_pool = NULL;
237 /* Return a gimplify context struct from the pool. */
239 static inline struct gimplify_ctx *
240 ctx_alloc (void)
242 struct gimplify_ctx * c = ctx_pool;
244 if (c)
245 ctx_pool = c->prev_context;
246 else
247 c = XNEW (struct gimplify_ctx);
249 memset (c, '\0', sizeof (*c));
250 return c;
253 /* Put gimplify context C back into the pool. */
255 static inline void
256 ctx_free (struct gimplify_ctx *c)
258 c->prev_context = ctx_pool;
259 ctx_pool = c;
262 /* Free allocated ctx stack memory. */
264 void
265 free_gimplify_stack (void)
267 struct gimplify_ctx *c;
269 while ((c = ctx_pool))
271 ctx_pool = c->prev_context;
272 free (c);
277 /* Set up a context for the gimplifier. */
279 void
280 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
282 struct gimplify_ctx *c = ctx_alloc ();
284 c->prev_context = gimplify_ctxp;
285 gimplify_ctxp = c;
286 gimplify_ctxp->into_ssa = in_ssa;
287 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
290 /* Tear down a context for the gimplifier. If BODY is non-null, then
291 put the temporaries into the outer BIND_EXPR. Otherwise, put them
292 in the local_decls.
294 BODY is not a sequence, but the first tuple in a sequence. */
296 void
297 pop_gimplify_context (gimple *body)
299 struct gimplify_ctx *c = gimplify_ctxp;
301 gcc_assert (c
302 && (!c->bind_expr_stack.exists ()
303 || c->bind_expr_stack.is_empty ()));
304 c->bind_expr_stack.release ();
305 gimplify_ctxp = c->prev_context;
307 if (body)
308 declare_vars (c->temps, body, false);
309 else
310 record_vars (c->temps);
312 delete c->temp_htab;
313 c->temp_htab = NULL;
314 ctx_free (c);
317 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
319 static void
320 gimple_push_bind_expr (gbind *bind_stmt)
322 gimplify_ctxp->bind_expr_stack.reserve (8);
323 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
326 /* Pop the first element off the stack of bindings. */
328 static void
329 gimple_pop_bind_expr (void)
331 gimplify_ctxp->bind_expr_stack.pop ();
334 /* Return the first element of the stack of bindings. */
336 gbind *
337 gimple_current_bind_expr (void)
339 return gimplify_ctxp->bind_expr_stack.last ();
342 /* Return the stack of bindings created during gimplification. */
344 vec<gbind *>
345 gimple_bind_expr_stack (void)
347 return gimplify_ctxp->bind_expr_stack;
350 /* Return true iff there is a COND_EXPR between us and the innermost
351 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
353 static bool
354 gimple_conditional_context (void)
356 return gimplify_ctxp->conditions > 0;
359 /* Note that we've entered a COND_EXPR. */
361 static void
362 gimple_push_condition (void)
364 #ifdef ENABLE_GIMPLE_CHECKING
365 if (gimplify_ctxp->conditions == 0)
366 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
367 #endif
368 ++(gimplify_ctxp->conditions);
371 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
372 now, add any conditional cleanups we've seen to the prequeue. */
374 static void
375 gimple_pop_condition (gimple_seq *pre_p)
377 int conds = --(gimplify_ctxp->conditions);
379 gcc_assert (conds >= 0);
380 if (conds == 0)
382 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
383 gimplify_ctxp->conditional_cleanups = NULL;
387 /* A stable comparison routine for use with splay trees and DECLs. */
389 static int
390 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
392 tree a = (tree) xa;
393 tree b = (tree) xb;
395 return DECL_UID (a) - DECL_UID (b);
398 /* Create a new omp construct that deals with variable remapping. */
400 static struct gimplify_omp_ctx *
401 new_omp_context (enum omp_region_type region_type)
403 struct gimplify_omp_ctx *c;
405 c = XCNEW (struct gimplify_omp_ctx);
406 c->outer_context = gimplify_omp_ctxp;
407 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
408 c->privatized_types = new hash_set<tree>;
409 c->location = input_location;
410 c->region_type = region_type;
411 if ((region_type & ORT_TASK) == 0)
412 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
413 else
414 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
416 return c;
419 /* Destroy an omp construct that deals with variable remapping. */
421 static void
422 delete_omp_context (struct gimplify_omp_ctx *c)
424 splay_tree_delete (c->variables);
425 delete c->privatized_types;
426 c->loop_iter_var.release ();
427 XDELETE (c);
430 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
431 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
433 /* Both gimplify the statement T and append it to *SEQ_P. This function
434 behaves exactly as gimplify_stmt, but you don't have to pass T as a
435 reference. */
437 void
438 gimplify_and_add (tree t, gimple_seq *seq_p)
440 gimplify_stmt (&t, seq_p);
443 /* Gimplify statement T into sequence *SEQ_P, and return the first
444 tuple in the sequence of generated tuples for this statement.
445 Return NULL if gimplifying T produced no tuples. */
447 static gimple *
448 gimplify_and_return_first (tree t, gimple_seq *seq_p)
450 gimple_stmt_iterator last = gsi_last (*seq_p);
452 gimplify_and_add (t, seq_p);
454 if (!gsi_end_p (last))
456 gsi_next (&last);
457 return gsi_stmt (last);
459 else
460 return gimple_seq_first_stmt (*seq_p);
463 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
464 LHS, or for a call argument. */
466 static bool
467 is_gimple_mem_rhs (tree t)
469 /* If we're dealing with a renamable type, either source or dest must be
470 a renamed variable. */
471 if (is_gimple_reg_type (TREE_TYPE (t)))
472 return is_gimple_val (t);
473 else
474 return is_gimple_val (t) || is_gimple_lvalue (t);
477 /* Return true if T is a CALL_EXPR or an expression that can be
478 assigned to a temporary. Note that this predicate should only be
479 used during gimplification. See the rationale for this in
480 gimplify_modify_expr. */
482 static bool
483 is_gimple_reg_rhs_or_call (tree t)
485 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
486 || TREE_CODE (t) == CALL_EXPR);
489 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
490 this predicate should only be used during gimplification. See the
491 rationale for this in gimplify_modify_expr. */
493 static bool
494 is_gimple_mem_rhs_or_call (tree t)
496 /* If we're dealing with a renamable type, either source or dest must be
497 a renamed variable. */
498 if (is_gimple_reg_type (TREE_TYPE (t)))
499 return is_gimple_val (t);
500 else
501 return (is_gimple_val (t)
502 || is_gimple_lvalue (t)
503 || TREE_CLOBBER_P (t)
504 || TREE_CODE (t) == CALL_EXPR);
507 /* Create a temporary with a name derived from VAL. Subroutine of
508 lookup_tmp_var; nobody else should call this function. */
510 static inline tree
511 create_tmp_from_val (tree val)
513 /* Drop all qualifiers and address-space information from the value type. */
514 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
515 tree var = create_tmp_var (type, get_name (val));
516 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
517 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
518 DECL_GIMPLE_REG_P (var) = 1;
519 return var;
522 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
523 an existing expression temporary. */
525 static tree
526 lookup_tmp_var (tree val, bool is_formal)
528 tree ret;
530 /* If not optimizing, never really reuse a temporary. local-alloc
531 won't allocate any variable that is used in more than one basic
532 block, which means it will go into memory, causing much extra
533 work in reload and final and poorer code generation, outweighing
534 the extra memory allocation here. */
535 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
536 ret = create_tmp_from_val (val);
537 else
539 elt_t elt, *elt_p;
540 elt_t **slot;
542 elt.val = val;
543 if (!gimplify_ctxp->temp_htab)
544 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
545 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
546 if (*slot == NULL)
548 elt_p = XNEW (elt_t);
549 elt_p->val = val;
550 elt_p->temp = ret = create_tmp_from_val (val);
551 *slot = elt_p;
553 else
555 elt_p = *slot;
556 ret = elt_p->temp;
560 return ret;
563 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
565 static tree
566 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
567 bool is_formal, bool allow_ssa)
569 tree t, mod;
571 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
572 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
573 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
574 fb_rvalue);
576 if (allow_ssa
577 && gimplify_ctxp->into_ssa
578 && is_gimple_reg_type (TREE_TYPE (val)))
580 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
581 if (! gimple_in_ssa_p (cfun))
583 const char *name = get_name (val);
584 if (name)
585 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
588 else
589 t = lookup_tmp_var (val, is_formal);
591 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
593 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
595 /* gimplify_modify_expr might want to reduce this further. */
596 gimplify_and_add (mod, pre_p);
597 ggc_free (mod);
599 return t;
602 /* Return a formal temporary variable initialized with VAL. PRE_P is as
603 in gimplify_expr. Only use this function if:
605 1) The value of the unfactored expression represented by VAL will not
606 change between the initialization and use of the temporary, and
607 2) The temporary will not be otherwise modified.
609 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
610 and #2 means it is inappropriate for && temps.
612 For other cases, use get_initialized_tmp_var instead. */
614 tree
615 get_formal_tmp_var (tree val, gimple_seq *pre_p)
617 return internal_get_tmp_var (val, pre_p, NULL, true, true);
620 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
621 are as in gimplify_expr. */
623 tree
624 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
625 bool allow_ssa)
627 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
630 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
631 generate debug info for them; otherwise don't. */
633 void
634 declare_vars (tree vars, gimple *gs, bool debug_info)
636 tree last = vars;
637 if (last)
639 tree temps, block;
641 gbind *scope = as_a <gbind *> (gs);
643 temps = nreverse (last);
645 block = gimple_bind_block (scope);
646 gcc_assert (!block || TREE_CODE (block) == BLOCK);
647 if (!block || !debug_info)
649 DECL_CHAIN (last) = gimple_bind_vars (scope);
650 gimple_bind_set_vars (scope, temps);
652 else
654 /* We need to attach the nodes both to the BIND_EXPR and to its
655 associated BLOCK for debugging purposes. The key point here
656 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
657 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
658 if (BLOCK_VARS (block))
659 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
660 else
662 gimple_bind_set_vars (scope,
663 chainon (gimple_bind_vars (scope), temps));
664 BLOCK_VARS (block) = temps;
670 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
671 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
672 no such upper bound can be obtained. */
674 static void
675 force_constant_size (tree var)
677 /* The only attempt we make is by querying the maximum size of objects
678 of the variable's type. */
680 HOST_WIDE_INT max_size;
682 gcc_assert (VAR_P (var));
684 max_size = max_int_size_in_bytes (TREE_TYPE (var));
686 gcc_assert (max_size >= 0);
688 DECL_SIZE_UNIT (var)
689 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
690 DECL_SIZE (var)
691 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
694 /* Push the temporary variable TMP into the current binding. */
696 void
697 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
699 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
701 /* Later processing assumes that the object size is constant, which might
702 not be true at this point. Force the use of a constant upper bound in
703 this case. */
704 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
705 force_constant_size (tmp);
707 DECL_CONTEXT (tmp) = fn->decl;
708 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
710 record_vars_into (tmp, fn->decl);
713 /* Push the temporary variable TMP into the current binding. */
715 void
716 gimple_add_tmp_var (tree tmp)
718 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
720 /* Later processing assumes that the object size is constant, which might
721 not be true at this point. Force the use of a constant upper bound in
722 this case. */
723 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
724 force_constant_size (tmp);
726 DECL_CONTEXT (tmp) = current_function_decl;
727 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
729 if (gimplify_ctxp)
731 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
732 gimplify_ctxp->temps = tmp;
734 /* Mark temporaries local within the nearest enclosing parallel. */
735 if (gimplify_omp_ctxp)
737 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
738 while (ctx
739 && (ctx->region_type == ORT_WORKSHARE
740 || ctx->region_type == ORT_SIMD
741 || ctx->region_type == ORT_ACC))
742 ctx = ctx->outer_context;
743 if (ctx)
744 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
747 else if (cfun)
748 record_vars (tmp);
749 else
751 gimple_seq body_seq;
753 /* This case is for nested functions. We need to expose the locals
754 they create. */
755 body_seq = gimple_body (current_function_decl);
756 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
762 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
763 nodes that are referenced more than once in GENERIC functions. This is
764 necessary because gimplification (translation into GIMPLE) is performed
765 by modifying tree nodes in-place, so gimplication of a shared node in a
766 first context could generate an invalid GIMPLE form in a second context.
768 This is achieved with a simple mark/copy/unmark algorithm that walks the
769 GENERIC representation top-down, marks nodes with TREE_VISITED the first
770 time it encounters them, duplicates them if they already have TREE_VISITED
771 set, and finally removes the TREE_VISITED marks it has set.
773 The algorithm works only at the function level, i.e. it generates a GENERIC
774 representation of a function with no nodes shared within the function when
775 passed a GENERIC function (except for nodes that are allowed to be shared).
777 At the global level, it is also necessary to unshare tree nodes that are
778 referenced in more than one function, for the same aforementioned reason.
779 This requires some cooperation from the front-end. There are 2 strategies:
781 1. Manual unsharing. The front-end needs to call unshare_expr on every
782 expression that might end up being shared across functions.
784 2. Deep unsharing. This is an extension of regular unsharing. Instead
785 of calling unshare_expr on expressions that might be shared across
786 functions, the front-end pre-marks them with TREE_VISITED. This will
787 ensure that they are unshared on the first reference within functions
788 when the regular unsharing algorithm runs. The counterpart is that
789 this algorithm must look deeper than for manual unsharing, which is
790 specified by LANG_HOOKS_DEEP_UNSHARING.
792 If there are only few specific cases of node sharing across functions, it is
793 probably easier for a front-end to unshare the expressions manually. On the
794 contrary, if the expressions generated at the global level are as widespread
795 as expressions generated within functions, deep unsharing is very likely the
796 way to go. */
798 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
799 These nodes model computations that must be done once. If we were to
800 unshare something like SAVE_EXPR(i++), the gimplification process would
801 create wrong code. However, if DATA is non-null, it must hold a pointer
802 set that is used to unshare the subtrees of these nodes. */
804 static tree
805 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
807 tree t = *tp;
808 enum tree_code code = TREE_CODE (t);
810 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
811 copy their subtrees if we can make sure to do it only once. */
812 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
814 if (data && !((hash_set<tree> *)data)->add (t))
816 else
817 *walk_subtrees = 0;
820 /* Stop at types, decls, constants like copy_tree_r. */
821 else if (TREE_CODE_CLASS (code) == tcc_type
822 || TREE_CODE_CLASS (code) == tcc_declaration
823 || TREE_CODE_CLASS (code) == tcc_constant)
824 *walk_subtrees = 0;
826 /* Cope with the statement expression extension. */
827 else if (code == STATEMENT_LIST)
830 /* Leave the bulk of the work to copy_tree_r itself. */
831 else
832 copy_tree_r (tp, walk_subtrees, NULL);
834 return NULL_TREE;
837 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
838 If *TP has been visited already, then *TP is deeply copied by calling
839 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
841 static tree
842 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
844 tree t = *tp;
845 enum tree_code code = TREE_CODE (t);
847 /* Skip types, decls, and constants. But we do want to look at their
848 types and the bounds of types. Mark them as visited so we properly
849 unmark their subtrees on the unmark pass. If we've already seen them,
850 don't look down further. */
851 if (TREE_CODE_CLASS (code) == tcc_type
852 || TREE_CODE_CLASS (code) == tcc_declaration
853 || TREE_CODE_CLASS (code) == tcc_constant)
855 if (TREE_VISITED (t))
856 *walk_subtrees = 0;
857 else
858 TREE_VISITED (t) = 1;
861 /* If this node has been visited already, unshare it and don't look
862 any deeper. */
863 else if (TREE_VISITED (t))
865 walk_tree (tp, mostly_copy_tree_r, data, NULL);
866 *walk_subtrees = 0;
869 /* Otherwise, mark the node as visited and keep looking. */
870 else
871 TREE_VISITED (t) = 1;
873 return NULL_TREE;
876 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
877 copy_if_shared_r callback unmodified. */
879 static inline void
880 copy_if_shared (tree *tp, void *data)
882 walk_tree (tp, copy_if_shared_r, data, NULL);
885 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
886 any nested functions. */
888 static void
889 unshare_body (tree fndecl)
891 struct cgraph_node *cgn = cgraph_node::get (fndecl);
892 /* If the language requires deep unsharing, we need a pointer set to make
893 sure we don't repeatedly unshare subtrees of unshareable nodes. */
894 hash_set<tree> *visited
895 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
897 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
898 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
899 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
901 delete visited;
903 if (cgn)
904 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
905 unshare_body (cgn->decl);
908 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
909 Subtrees are walked until the first unvisited node is encountered. */
911 static tree
912 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
914 tree t = *tp;
916 /* If this node has been visited, unmark it and keep looking. */
917 if (TREE_VISITED (t))
918 TREE_VISITED (t) = 0;
920 /* Otherwise, don't look any deeper. */
921 else
922 *walk_subtrees = 0;
924 return NULL_TREE;
927 /* Unmark the visited trees rooted at *TP. */
929 static inline void
930 unmark_visited (tree *tp)
932 walk_tree (tp, unmark_visited_r, NULL, NULL);
935 /* Likewise, but mark all trees as not visited. */
937 static void
938 unvisit_body (tree fndecl)
940 struct cgraph_node *cgn = cgraph_node::get (fndecl);
942 unmark_visited (&DECL_SAVED_TREE (fndecl));
943 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
944 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
946 if (cgn)
947 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
948 unvisit_body (cgn->decl);
951 /* Unconditionally make an unshared copy of EXPR. This is used when using
952 stored expressions which span multiple functions, such as BINFO_VTABLE,
953 as the normal unsharing process can't tell that they're shared. */
955 tree
956 unshare_expr (tree expr)
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 return expr;
962 /* Worker for unshare_expr_without_location. */
964 static tree
965 prune_expr_location (tree *tp, int *walk_subtrees, void *)
967 if (EXPR_P (*tp))
968 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
969 else
970 *walk_subtrees = 0;
971 return NULL_TREE;
974 /* Similar to unshare_expr but also prune all expression locations
975 from EXPR. */
977 tree
978 unshare_expr_without_location (tree expr)
980 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
981 if (EXPR_P (expr))
982 walk_tree (&expr, prune_expr_location, NULL, NULL);
983 return expr;
986 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
987 contain statements and have a value. Assign its value to a temporary
988 and give it void_type_node. Return the temporary, or NULL_TREE if
989 WRAPPER was already void. */
991 tree
992 voidify_wrapper_expr (tree wrapper, tree temp)
994 tree type = TREE_TYPE (wrapper);
995 if (type && !VOID_TYPE_P (type))
997 tree *p;
999 /* Set p to point to the body of the wrapper. Loop until we find
1000 something that isn't a wrapper. */
1001 for (p = &wrapper; p && *p; )
1003 switch (TREE_CODE (*p))
1005 case BIND_EXPR:
1006 TREE_SIDE_EFFECTS (*p) = 1;
1007 TREE_TYPE (*p) = void_type_node;
1008 /* For a BIND_EXPR, the body is operand 1. */
1009 p = &BIND_EXPR_BODY (*p);
1010 break;
1012 case CLEANUP_POINT_EXPR:
1013 case TRY_FINALLY_EXPR:
1014 case TRY_CATCH_EXPR:
1015 TREE_SIDE_EFFECTS (*p) = 1;
1016 TREE_TYPE (*p) = void_type_node;
1017 p = &TREE_OPERAND (*p, 0);
1018 break;
1020 case STATEMENT_LIST:
1022 tree_stmt_iterator i = tsi_last (*p);
1023 TREE_SIDE_EFFECTS (*p) = 1;
1024 TREE_TYPE (*p) = void_type_node;
1025 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1027 break;
1029 case COMPOUND_EXPR:
1030 /* Advance to the last statement. Set all container types to
1031 void. */
1032 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1034 TREE_SIDE_EFFECTS (*p) = 1;
1035 TREE_TYPE (*p) = void_type_node;
1037 break;
1039 case TRANSACTION_EXPR:
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TRANSACTION_EXPR_BODY (*p);
1043 break;
1045 default:
1046 /* Assume that any tree upon which voidify_wrapper_expr is
1047 directly called is a wrapper, and that its body is op0. */
1048 if (p == &wrapper)
1050 TREE_SIDE_EFFECTS (*p) = 1;
1051 TREE_TYPE (*p) = void_type_node;
1052 p = &TREE_OPERAND (*p, 0);
1053 break;
1055 goto out;
1059 out:
1060 if (p == NULL || IS_EMPTY_STMT (*p))
1061 temp = NULL_TREE;
1062 else if (temp)
1064 /* The wrapper is on the RHS of an assignment that we're pushing
1065 down. */
1066 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1067 || TREE_CODE (temp) == MODIFY_EXPR);
1068 TREE_OPERAND (temp, 1) = *p;
1069 *p = temp;
1071 else
1073 temp = create_tmp_var (type, "retval");
1074 *p = build2 (INIT_EXPR, type, temp, *p);
1077 return temp;
1080 return NULL_TREE;
1083 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1084 a temporary through which they communicate. */
1086 static void
1087 build_stack_save_restore (gcall **save, gcall **restore)
1089 tree tmp_var;
1091 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1092 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1093 gimple_call_set_lhs (*save, tmp_var);
1095 *restore
1096 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1097 1, tmp_var);
1100 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1102 static tree
1103 build_asan_poison_call_expr (tree decl)
1105 /* Do not poison variables that have size equal to zero. */
1106 tree unit_size = DECL_SIZE_UNIT (decl);
1107 if (zerop (unit_size))
1108 return NULL_TREE;
1110 tree base = build_fold_addr_expr (decl);
1112 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1113 void_type_node, 3,
1114 build_int_cst (integer_type_node,
1115 ASAN_MARK_POISON),
1116 base, unit_size);
1119 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1120 on POISON flag, shadow memory of a DECL variable. The call will be
1121 put on location identified by IT iterator, where BEFORE flag drives
1122 position where the stmt will be put. */
1124 static void
1125 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1126 bool before)
1128 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1129 if (gimplify_omp_ctxp)
1130 return;
1132 tree unit_size = DECL_SIZE_UNIT (decl);
1133 tree base = build_fold_addr_expr (decl);
1135 /* Do not poison variables that have size equal to zero. */
1136 if (zerop (unit_size))
1137 return;
1139 /* It's necessary to have all stack variables aligned to ASAN granularity
1140 bytes. */
1141 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1142 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1144 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1146 gimple *g
1147 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1148 build_int_cst (integer_type_node, flags),
1149 base, unit_size);
1151 if (before)
1152 gsi_insert_before (it, g, GSI_NEW_STMT);
1153 else
1154 gsi_insert_after (it, g, GSI_NEW_STMT);
1157 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1158 either poisons or unpoisons a DECL. Created statement is appended
1159 to SEQ_P gimple sequence. */
1161 static void
1162 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1164 gimple_stmt_iterator it = gsi_last (*seq_p);
1165 bool before = false;
1167 if (gsi_end_p (it))
1168 before = true;
1170 asan_poison_variable (decl, poison, &it, before);
1173 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1175 static int
1176 sort_by_decl_uid (const void *a, const void *b)
1178 const tree *t1 = (const tree *)a;
1179 const tree *t2 = (const tree *)b;
1181 int uid1 = DECL_UID (*t1);
1182 int uid2 = DECL_UID (*t2);
1184 if (uid1 < uid2)
1185 return -1;
1186 else if (uid1 > uid2)
1187 return 1;
1188 else
1189 return 0;
1192 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1193 depending on POISON flag. Created statement is appended
1194 to SEQ_P gimple sequence. */
1196 static void
1197 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1199 unsigned c = variables->elements ();
1200 if (c == 0)
1201 return;
1203 auto_vec<tree> sorted_variables (c);
1205 for (hash_set<tree>::iterator it = variables->begin ();
1206 it != variables->end (); ++it)
1207 sorted_variables.safe_push (*it);
1209 sorted_variables.qsort (sort_by_decl_uid);
1211 unsigned i;
1212 tree var;
1213 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1215 asan_poison_variable (var, poison, seq_p);
1217 /* Add use_after_scope_memory attribute for the variable in order
1218 to prevent re-written into SSA. */
1219 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1220 DECL_ATTRIBUTES (var)))
1221 DECL_ATTRIBUTES (var)
1222 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1223 integer_one_node,
1224 DECL_ATTRIBUTES (var));
1228 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1230 static enum gimplify_status
1231 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1233 tree bind_expr = *expr_p;
1234 bool old_keep_stack = gimplify_ctxp->keep_stack;
1235 bool old_save_stack = gimplify_ctxp->save_stack;
1236 tree t;
1237 gbind *bind_stmt;
1238 gimple_seq body, cleanup;
1239 gcall *stack_save;
1240 location_t start_locus = 0, end_locus = 0;
1241 tree ret_clauses = NULL;
1243 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1245 /* Mark variables seen in this bind expr. */
1246 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1248 if (VAR_P (t))
1250 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1252 /* Mark variable as local. */
1253 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1254 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1255 || splay_tree_lookup (ctx->variables,
1256 (splay_tree_key) t) == NULL))
1258 if (ctx->region_type == ORT_SIMD
1259 && TREE_ADDRESSABLE (t)
1260 && !TREE_STATIC (t))
1261 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1262 else
1263 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1266 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1268 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1269 cfun->has_local_explicit_reg_vars = true;
1272 /* Preliminarily mark non-addressed complex variables as eligible
1273 for promotion to gimple registers. We'll transform their uses
1274 as we find them. */
1275 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1276 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1277 && !TREE_THIS_VOLATILE (t)
1278 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1279 && !needs_to_live_in_memory (t))
1280 DECL_GIMPLE_REG_P (t) = 1;
1283 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1284 BIND_EXPR_BLOCK (bind_expr));
1285 gimple_push_bind_expr (bind_stmt);
1287 gimplify_ctxp->keep_stack = false;
1288 gimplify_ctxp->save_stack = false;
1290 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1291 body = NULL;
1292 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1293 gimple_bind_set_body (bind_stmt, body);
1295 /* Source location wise, the cleanup code (stack_restore and clobbers)
1296 belongs to the end of the block, so propagate what we have. The
1297 stack_save operation belongs to the beginning of block, which we can
1298 infer from the bind_expr directly if the block has no explicit
1299 assignment. */
1300 if (BIND_EXPR_BLOCK (bind_expr))
1302 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1303 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1305 if (start_locus == 0)
1306 start_locus = EXPR_LOCATION (bind_expr);
1308 cleanup = NULL;
1309 stack_save = NULL;
1311 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1312 the stack space allocated to the VLAs. */
1313 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1315 gcall *stack_restore;
1317 /* Save stack on entry and restore it on exit. Add a try_finally
1318 block to achieve this. */
1319 build_stack_save_restore (&stack_save, &stack_restore);
1321 gimple_set_location (stack_save, start_locus);
1322 gimple_set_location (stack_restore, end_locus);
1324 gimplify_seq_add_stmt (&cleanup, stack_restore);
1327 /* Add clobbers for all variables that go out of scope. */
1328 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1330 if (VAR_P (t)
1331 && !is_global_var (t)
1332 && DECL_CONTEXT (t) == current_function_decl)
1334 if (!DECL_HARD_REGISTER (t)
1335 && !TREE_THIS_VOLATILE (t)
1336 && !DECL_HAS_VALUE_EXPR_P (t)
1337 /* Only care for variables that have to be in memory. Others
1338 will be rewritten into SSA names, hence moved to the
1339 top-level. */
1340 && !is_gimple_reg (t)
1341 && flag_stack_reuse != SR_NONE)
1343 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1344 gimple *clobber_stmt;
1345 TREE_THIS_VOLATILE (clobber) = 1;
1346 clobber_stmt = gimple_build_assign (t, clobber);
1347 gimple_set_location (clobber_stmt, end_locus);
1348 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1351 if (flag_openacc && oacc_declare_returns != NULL)
1353 tree *c = oacc_declare_returns->get (t);
1354 if (c != NULL)
1356 if (ret_clauses)
1357 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1359 ret_clauses = *c;
1361 oacc_declare_returns->remove (t);
1363 if (oacc_declare_returns->elements () == 0)
1365 delete oacc_declare_returns;
1366 oacc_declare_returns = NULL;
1372 if (asan_poisoned_variables != NULL
1373 && asan_poisoned_variables->contains (t))
1375 asan_poisoned_variables->remove (t);
1376 asan_poison_variable (t, true, &cleanup);
1379 if (gimplify_ctxp->live_switch_vars != NULL
1380 && gimplify_ctxp->live_switch_vars->contains (t))
1381 gimplify_ctxp->live_switch_vars->remove (t);
1384 if (ret_clauses)
1386 gomp_target *stmt;
1387 gimple_stmt_iterator si = gsi_start (cleanup);
1389 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1390 ret_clauses);
1391 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1394 if (cleanup)
1396 gtry *gs;
1397 gimple_seq new_body;
1399 new_body = NULL;
1400 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1401 GIMPLE_TRY_FINALLY);
1403 if (stack_save)
1404 gimplify_seq_add_stmt (&new_body, stack_save);
1405 gimplify_seq_add_stmt (&new_body, gs);
1406 gimple_bind_set_body (bind_stmt, new_body);
1409 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1410 if (!gimplify_ctxp->keep_stack)
1411 gimplify_ctxp->keep_stack = old_keep_stack;
1412 gimplify_ctxp->save_stack = old_save_stack;
1414 gimple_pop_bind_expr ();
1416 gimplify_seq_add_stmt (pre_p, bind_stmt);
1418 if (temp)
1420 *expr_p = temp;
1421 return GS_OK;
1424 *expr_p = NULL_TREE;
1425 return GS_ALL_DONE;
1428 /* Maybe add early return predict statement to PRE_P sequence. */
1430 static void
1431 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1433 /* If we are not in a conditional context, add PREDICT statement. */
1434 if (gimple_conditional_context ())
1436 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1437 NOT_TAKEN);
1438 gimplify_seq_add_stmt (pre_p, predict);
1442 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1443 GIMPLE value, it is assigned to a new temporary and the statement is
1444 re-written to return the temporary.
1446 PRE_P points to the sequence where side effects that must happen before
1447 STMT should be stored. */
1449 static enum gimplify_status
1450 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1452 greturn *ret;
1453 tree ret_expr = TREE_OPERAND (stmt, 0);
1454 tree result_decl, result;
1456 if (ret_expr == error_mark_node)
1457 return GS_ERROR;
1459 /* Implicit _Cilk_sync must be inserted right before any return statement
1460 if there is a _Cilk_spawn in the function. If the user has provided a
1461 _Cilk_sync, the optimizer should remove this duplicate one. */
1462 if (fn_contains_cilk_spawn_p (cfun))
1464 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1465 gimplify_and_add (impl_sync, pre_p);
1468 if (!ret_expr
1469 || TREE_CODE (ret_expr) == RESULT_DECL
1470 || ret_expr == error_mark_node)
1472 maybe_add_early_return_predict_stmt (pre_p);
1473 greturn *ret = gimple_build_return (ret_expr);
1474 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1475 gimplify_seq_add_stmt (pre_p, ret);
1476 return GS_ALL_DONE;
1479 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1480 result_decl = NULL_TREE;
1481 else
1483 result_decl = TREE_OPERAND (ret_expr, 0);
1485 /* See through a return by reference. */
1486 if (TREE_CODE (result_decl) == INDIRECT_REF)
1487 result_decl = TREE_OPERAND (result_decl, 0);
1489 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1490 || TREE_CODE (ret_expr) == INIT_EXPR)
1491 && TREE_CODE (result_decl) == RESULT_DECL);
1494 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1495 Recall that aggregate_value_p is FALSE for any aggregate type that is
1496 returned in registers. If we're returning values in registers, then
1497 we don't want to extend the lifetime of the RESULT_DECL, particularly
1498 across another call. In addition, for those aggregates for which
1499 hard_function_value generates a PARALLEL, we'll die during normal
1500 expansion of structure assignments; there's special code in expand_return
1501 to handle this case that does not exist in expand_expr. */
1502 if (!result_decl)
1503 result = NULL_TREE;
1504 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1506 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1508 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1509 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1510 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1511 should be effectively allocated by the caller, i.e. all calls to
1512 this function must be subject to the Return Slot Optimization. */
1513 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1514 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1516 result = result_decl;
1518 else if (gimplify_ctxp->return_temp)
1519 result = gimplify_ctxp->return_temp;
1520 else
1522 result = create_tmp_reg (TREE_TYPE (result_decl));
1524 /* ??? With complex control flow (usually involving abnormal edges),
1525 we can wind up warning about an uninitialized value for this. Due
1526 to how this variable is constructed and initialized, this is never
1527 true. Give up and never warn. */
1528 TREE_NO_WARNING (result) = 1;
1530 gimplify_ctxp->return_temp = result;
1533 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1534 Then gimplify the whole thing. */
1535 if (result != result_decl)
1536 TREE_OPERAND (ret_expr, 0) = result;
1538 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1540 maybe_add_early_return_predict_stmt (pre_p);
1541 ret = gimple_build_return (result);
1542 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1543 gimplify_seq_add_stmt (pre_p, ret);
1545 return GS_ALL_DONE;
1548 /* Gimplify a variable-length array DECL. */
1550 static void
1551 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1553 /* This is a variable-sized decl. Simplify its size and mark it
1554 for deferred expansion. */
1555 tree t, addr, ptr_type;
1557 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1558 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1560 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1561 if (DECL_HAS_VALUE_EXPR_P (decl))
1562 return;
1564 /* All occurrences of this decl in final gimplified code will be
1565 replaced by indirection. Setting DECL_VALUE_EXPR does two
1566 things: First, it lets the rest of the gimplifier know what
1567 replacement to use. Second, it lets the debug info know
1568 where to find the value. */
1569 ptr_type = build_pointer_type (TREE_TYPE (decl));
1570 addr = create_tmp_var (ptr_type, get_name (decl));
1571 DECL_IGNORED_P (addr) = 0;
1572 t = build_fold_indirect_ref (addr);
1573 TREE_THIS_NOTRAP (t) = 1;
1574 SET_DECL_VALUE_EXPR (decl, t);
1575 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1577 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1578 max_int_size_in_bytes (TREE_TYPE (decl)));
1579 /* The call has been built for a variable-sized object. */
1580 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1581 t = fold_convert (ptr_type, t);
1582 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1584 gimplify_and_add (t, seq_p);
1587 /* A helper function to be called via walk_tree. Mark all labels under *TP
1588 as being forced. To be called for DECL_INITIAL of static variables. */
1590 static tree
1591 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1593 if (TYPE_P (*tp))
1594 *walk_subtrees = 0;
1595 if (TREE_CODE (*tp) == LABEL_DECL)
1597 FORCED_LABEL (*tp) = 1;
1598 cfun->has_forced_label_in_static = 1;
1601 return NULL_TREE;
1604 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1605 and initialization explicit. */
1607 static enum gimplify_status
1608 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1610 tree stmt = *stmt_p;
1611 tree decl = DECL_EXPR_DECL (stmt);
1613 *stmt_p = NULL_TREE;
1615 if (TREE_TYPE (decl) == error_mark_node)
1616 return GS_ERROR;
1618 if ((TREE_CODE (decl) == TYPE_DECL
1619 || VAR_P (decl))
1620 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1622 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1623 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1624 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1627 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1628 in case its size expressions contain problematic nodes like CALL_EXPR. */
1629 if (TREE_CODE (decl) == TYPE_DECL
1630 && DECL_ORIGINAL_TYPE (decl)
1631 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1633 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1634 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1635 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1638 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1640 tree init = DECL_INITIAL (decl);
1641 bool is_vla = false;
1643 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1644 || (!TREE_STATIC (decl)
1645 && flag_stack_check == GENERIC_STACK_CHECK
1646 && compare_tree_int (DECL_SIZE_UNIT (decl),
1647 STACK_CHECK_MAX_VAR_SIZE) > 0))
1649 gimplify_vla_decl (decl, seq_p);
1650 is_vla = true;
1653 if (asan_poisoned_variables
1654 && !is_vla
1655 && TREE_ADDRESSABLE (decl)
1656 && !TREE_STATIC (decl)
1657 && !DECL_HAS_VALUE_EXPR_P (decl)
1658 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1659 && dbg_cnt (asan_use_after_scope))
1661 asan_poisoned_variables->add (decl);
1662 asan_poison_variable (decl, false, seq_p);
1663 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1664 gimplify_ctxp->live_switch_vars->add (decl);
1667 /* Some front ends do not explicitly declare all anonymous
1668 artificial variables. We compensate here by declaring the
1669 variables, though it would be better if the front ends would
1670 explicitly declare them. */
1671 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1672 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1673 gimple_add_tmp_var (decl);
1675 if (init && init != error_mark_node)
1677 if (!TREE_STATIC (decl))
1679 DECL_INITIAL (decl) = NULL_TREE;
1680 init = build2 (INIT_EXPR, void_type_node, decl, init);
1681 gimplify_and_add (init, seq_p);
1682 ggc_free (init);
1684 else
1685 /* We must still examine initializers for static variables
1686 as they may contain a label address. */
1687 walk_tree (&init, force_labels_r, NULL, NULL);
1691 return GS_ALL_DONE;
1694 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1695 and replacing the LOOP_EXPR with goto, but if the loop contains an
1696 EXIT_EXPR, we need to append a label for it to jump to. */
1698 static enum gimplify_status
1699 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1701 tree saved_label = gimplify_ctxp->exit_label;
1702 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1704 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1706 gimplify_ctxp->exit_label = NULL_TREE;
1708 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1710 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1712 if (gimplify_ctxp->exit_label)
1713 gimplify_seq_add_stmt (pre_p,
1714 gimple_build_label (gimplify_ctxp->exit_label));
1716 gimplify_ctxp->exit_label = saved_label;
1718 *expr_p = NULL;
1719 return GS_ALL_DONE;
1722 /* Gimplify a statement list onto a sequence. These may be created either
1723 by an enlightened front-end, or by shortcut_cond_expr. */
1725 static enum gimplify_status
1726 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1728 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1730 tree_stmt_iterator i = tsi_start (*expr_p);
1732 while (!tsi_end_p (i))
1734 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1735 tsi_delink (&i);
1738 if (temp)
1740 *expr_p = temp;
1741 return GS_OK;
1744 return GS_ALL_DONE;
1747 /* Callback for walk_gimple_seq. */
1749 static tree
1750 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1751 struct walk_stmt_info *wi)
1753 gimple *stmt = gsi_stmt (*gsi_p);
1755 *handled_ops_p = true;
1756 switch (gimple_code (stmt))
1758 case GIMPLE_TRY:
1759 /* A compiler-generated cleanup or a user-written try block.
1760 If it's empty, don't dive into it--that would result in
1761 worse location info. */
1762 if (gimple_try_eval (stmt) == NULL)
1764 wi->info = stmt;
1765 return integer_zero_node;
1767 /* Fall through. */
1768 case GIMPLE_BIND:
1769 case GIMPLE_CATCH:
1770 case GIMPLE_EH_FILTER:
1771 case GIMPLE_TRANSACTION:
1772 /* Walk the sub-statements. */
1773 *handled_ops_p = false;
1774 break;
1775 case GIMPLE_CALL:
1776 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1778 *handled_ops_p = false;
1779 break;
1781 /* Fall through. */
1782 default:
1783 /* Save the first "real" statement (not a decl/lexical scope/...). */
1784 wi->info = stmt;
1785 return integer_zero_node;
1787 return NULL_TREE;
1790 /* Possibly warn about unreachable statements between switch's controlling
1791 expression and the first case. SEQ is the body of a switch expression. */
1793 static void
1794 maybe_warn_switch_unreachable (gimple_seq seq)
1796 if (!warn_switch_unreachable
1797 /* This warning doesn't play well with Fortran when optimizations
1798 are on. */
1799 || lang_GNU_Fortran ()
1800 || seq == NULL)
1801 return;
1803 struct walk_stmt_info wi;
1804 memset (&wi, 0, sizeof (wi));
1805 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1806 gimple *stmt = (gimple *) wi.info;
1808 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1810 if (gimple_code (stmt) == GIMPLE_GOTO
1811 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1812 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1813 /* Don't warn for compiler-generated gotos. These occur
1814 in Duff's devices, for example. */;
1815 else
1816 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1817 "statement will never be executed");
1822 /* A label entry that pairs label and a location. */
1823 struct label_entry
1825 tree label;
1826 location_t loc;
1829 /* Find LABEL in vector of label entries VEC. */
1831 static struct label_entry *
1832 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1834 unsigned int i;
1835 struct label_entry *l;
1837 FOR_EACH_VEC_ELT (*vec, i, l)
1838 if (l->label == label)
1839 return l;
1840 return NULL;
1843 /* Return true if LABEL, a LABEL_DECL, represents a case label
1844 in a vector of labels CASES. */
1846 static bool
1847 case_label_p (const vec<tree> *cases, tree label)
1849 unsigned int i;
1850 tree l;
1852 FOR_EACH_VEC_ELT (*cases, i, l)
1853 if (CASE_LABEL (l) == label)
1854 return true;
1855 return false;
1858 /* Find the last statement in a scope STMT. */
1860 static gimple *
1861 last_stmt_in_scope (gimple *stmt)
1863 if (!stmt)
1864 return NULL;
1866 switch (gimple_code (stmt))
1868 case GIMPLE_BIND:
1870 gbind *bind = as_a <gbind *> (stmt);
1871 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1872 return last_stmt_in_scope (stmt);
1875 case GIMPLE_TRY:
1877 gtry *try_stmt = as_a <gtry *> (stmt);
1878 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1879 gimple *last_eval = last_stmt_in_scope (stmt);
1880 if (gimple_stmt_may_fallthru (last_eval)
1881 && (last_eval == NULL
1882 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1883 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1885 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1886 return last_stmt_in_scope (stmt);
1888 else
1889 return last_eval;
1892 default:
1893 return stmt;
1897 /* Collect interesting labels in LABELS and return the statement preceding
1898 another case label, or a user-defined label. */
1900 static gimple *
1901 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1902 auto_vec <struct label_entry> *labels)
1904 gimple *prev = NULL;
1908 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1909 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1911 /* Nested scope. Only look at the last statement of
1912 the innermost scope. */
1913 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1914 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1915 if (last)
1917 prev = last;
1918 /* It might be a label without a location. Use the
1919 location of the scope then. */
1920 if (!gimple_has_location (prev))
1921 gimple_set_location (prev, bind_loc);
1923 gsi_next (gsi_p);
1924 continue;
1927 /* Ifs are tricky. */
1928 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1930 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1931 tree false_lab = gimple_cond_false_label (cond_stmt);
1932 location_t if_loc = gimple_location (cond_stmt);
1934 /* If we have e.g.
1935 if (i > 1) goto <D.2259>; else goto D;
1936 we can't do much with the else-branch. */
1937 if (!DECL_ARTIFICIAL (false_lab))
1938 break;
1940 /* Go on until the false label, then one step back. */
1941 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1943 gimple *stmt = gsi_stmt (*gsi_p);
1944 if (gimple_code (stmt) == GIMPLE_LABEL
1945 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1946 break;
1949 /* Not found? Oops. */
1950 if (gsi_end_p (*gsi_p))
1951 break;
1953 struct label_entry l = { false_lab, if_loc };
1954 labels->safe_push (l);
1956 /* Go to the last statement of the then branch. */
1957 gsi_prev (gsi_p);
1959 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1960 <D.1759>:
1961 <stmt>;
1962 goto <D.1761>;
1963 <D.1760>:
1965 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1966 && !gimple_has_location (gsi_stmt (*gsi_p)))
1968 /* Look at the statement before, it might be
1969 attribute fallthrough, in which case don't warn. */
1970 gsi_prev (gsi_p);
1971 bool fallthru_before_dest
1972 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1973 gsi_next (gsi_p);
1974 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1975 if (!fallthru_before_dest)
1977 struct label_entry l = { goto_dest, if_loc };
1978 labels->safe_push (l);
1981 /* And move back. */
1982 gsi_next (gsi_p);
1985 /* Remember the last statement. Skip labels that are of no interest
1986 to us. */
1987 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1989 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1990 if (find_label_entry (labels, label))
1991 prev = gsi_stmt (*gsi_p);
1993 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1995 else
1996 prev = gsi_stmt (*gsi_p);
1997 gsi_next (gsi_p);
1999 while (!gsi_end_p (*gsi_p)
2000 /* Stop if we find a case or a user-defined label. */
2001 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2002 || !gimple_has_location (gsi_stmt (*gsi_p))));
2004 return prev;
2007 /* Return true if the switch fallthough warning should occur. LABEL is
2008 the label statement that we're falling through to. */
2010 static bool
2011 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2013 gimple_stmt_iterator gsi = *gsi_p;
2015 /* Don't warn if the label is marked with a "falls through" comment. */
2016 if (FALLTHROUGH_LABEL_P (label))
2017 return false;
2019 /* Don't warn for non-case labels followed by a statement:
2020 case 0:
2021 foo ();
2022 label:
2023 bar ();
2024 as these are likely intentional. */
2025 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2027 tree l;
2028 while (!gsi_end_p (gsi)
2029 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2030 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2031 && !case_label_p (&gimplify_ctxp->case_labels, l))
2032 gsi_next (&gsi);
2033 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2034 return false;
2037 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2038 immediately breaks. */
2039 gsi = *gsi_p;
2041 /* Skip all immediately following labels. */
2042 while (!gsi_end_p (gsi)
2043 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2044 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2045 gsi_next (&gsi);
2047 /* { ... something; default:; } */
2048 if (gsi_end_p (gsi)
2049 /* { ... something; default: break; } or
2050 { ... something; default: goto L; } */
2051 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2052 /* { ... something; default: return; } */
2053 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2054 return false;
2056 return true;
2059 /* Callback for walk_gimple_seq. */
2061 static tree
2062 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2063 struct walk_stmt_info *)
2065 gimple *stmt = gsi_stmt (*gsi_p);
2067 *handled_ops_p = true;
2068 switch (gimple_code (stmt))
2070 case GIMPLE_TRY:
2071 case GIMPLE_BIND:
2072 case GIMPLE_CATCH:
2073 case GIMPLE_EH_FILTER:
2074 case GIMPLE_TRANSACTION:
2075 /* Walk the sub-statements. */
2076 *handled_ops_p = false;
2077 break;
2079 /* Find a sequence of form:
2081 GIMPLE_LABEL
2082 [...]
2083 <may fallthru stmt>
2084 GIMPLE_LABEL
2086 and possibly warn. */
2087 case GIMPLE_LABEL:
2089 /* Found a label. Skip all immediately following labels. */
2090 while (!gsi_end_p (*gsi_p)
2091 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2092 gsi_next (gsi_p);
2094 /* There might be no more statements. */
2095 if (gsi_end_p (*gsi_p))
2096 return integer_zero_node;
2098 /* Vector of labels that fall through. */
2099 auto_vec <struct label_entry> labels;
2100 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2102 /* There might be no more statements. */
2103 if (gsi_end_p (*gsi_p))
2104 return integer_zero_node;
2106 gimple *next = gsi_stmt (*gsi_p);
2107 tree label;
2108 /* If what follows is a label, then we may have a fallthrough. */
2109 if (gimple_code (next) == GIMPLE_LABEL
2110 && gimple_has_location (next)
2111 && (label = gimple_label_label (as_a <glabel *> (next)))
2112 && prev != NULL)
2114 struct label_entry *l;
2115 bool warned_p = false;
2116 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2117 /* Quiet. */;
2118 else if (gimple_code (prev) == GIMPLE_LABEL
2119 && (label = gimple_label_label (as_a <glabel *> (prev)))
2120 && (l = find_label_entry (&labels, label)))
2121 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2122 "this statement may fall through");
2123 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2124 /* Try to be clever and don't warn when the statement
2125 can't actually fall through. */
2126 && gimple_stmt_may_fallthru (prev)
2127 && gimple_has_location (prev))
2128 warned_p = warning_at (gimple_location (prev),
2129 OPT_Wimplicit_fallthrough_,
2130 "this statement may fall through");
2131 if (warned_p)
2132 inform (gimple_location (next), "here");
2134 /* Mark this label as processed so as to prevent multiple
2135 warnings in nested switches. */
2136 FALLTHROUGH_LABEL_P (label) = true;
2138 /* So that next warn_implicit_fallthrough_r will start looking for
2139 a new sequence starting with this label. */
2140 gsi_prev (gsi_p);
2143 break;
2144 default:
2145 break;
2147 return NULL_TREE;
2150 /* Warn when a switch case falls through. */
2152 static void
2153 maybe_warn_implicit_fallthrough (gimple_seq seq)
2155 if (!warn_implicit_fallthrough)
2156 return;
2158 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2159 if (!(lang_GNU_C ()
2160 || lang_GNU_CXX ()
2161 || lang_GNU_OBJC ()))
2162 return;
2164 struct walk_stmt_info wi;
2165 memset (&wi, 0, sizeof (wi));
2166 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2169 /* Callback for walk_gimple_seq. */
2171 static tree
2172 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2173 struct walk_stmt_info *)
2175 gimple *stmt = gsi_stmt (*gsi_p);
2177 *handled_ops_p = true;
2178 switch (gimple_code (stmt))
2180 case GIMPLE_TRY:
2181 case GIMPLE_BIND:
2182 case GIMPLE_CATCH:
2183 case GIMPLE_EH_FILTER:
2184 case GIMPLE_TRANSACTION:
2185 /* Walk the sub-statements. */
2186 *handled_ops_p = false;
2187 break;
2188 case GIMPLE_CALL:
2189 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2191 gsi_remove (gsi_p, true);
2192 if (gsi_end_p (*gsi_p))
2193 return integer_zero_node;
2195 bool found = false;
2196 location_t loc = gimple_location (stmt);
2198 gimple_stmt_iterator gsi2 = *gsi_p;
2199 stmt = gsi_stmt (gsi2);
2200 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2202 /* Go on until the artificial label. */
2203 tree goto_dest = gimple_goto_dest (stmt);
2204 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2206 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2207 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2208 == goto_dest)
2209 break;
2212 /* Not found? Stop. */
2213 if (gsi_end_p (gsi2))
2214 break;
2216 /* Look one past it. */
2217 gsi_next (&gsi2);
2220 /* We're looking for a case label or default label here. */
2221 while (!gsi_end_p (gsi2))
2223 stmt = gsi_stmt (gsi2);
2224 if (gimple_code (stmt) == GIMPLE_LABEL)
2226 tree label = gimple_label_label (as_a <glabel *> (stmt));
2227 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2229 found = true;
2230 break;
2233 else
2234 /* Something other than a label. That's not expected. */
2235 break;
2236 gsi_next (&gsi2);
2238 if (!found)
2239 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2240 "a case label or default label");
2242 break;
2243 default:
2244 break;
2246 return NULL_TREE;
2249 /* Expand all FALLTHROUGH () calls in SEQ. */
2251 static void
2252 expand_FALLTHROUGH (gimple_seq *seq_p)
2254 struct walk_stmt_info wi;
2255 memset (&wi, 0, sizeof (wi));
2256 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2260 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2261 branch to. */
2263 static enum gimplify_status
2264 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2266 tree switch_expr = *expr_p;
2267 gimple_seq switch_body_seq = NULL;
2268 enum gimplify_status ret;
2269 tree index_type = TREE_TYPE (switch_expr);
2270 if (index_type == NULL_TREE)
2271 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2273 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2274 fb_rvalue);
2275 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2276 return ret;
2278 if (SWITCH_BODY (switch_expr))
2280 vec<tree> labels;
2281 vec<tree> saved_labels;
2282 hash_set<tree> *saved_live_switch_vars = NULL;
2283 tree default_case = NULL_TREE;
2284 gswitch *switch_stmt;
2286 /* If someone can be bothered to fill in the labels, they can
2287 be bothered to null out the body too. */
2288 gcc_assert (!SWITCH_LABELS (switch_expr));
2290 /* Save old labels, get new ones from body, then restore the old
2291 labels. Save all the things from the switch body to append after. */
2292 saved_labels = gimplify_ctxp->case_labels;
2293 gimplify_ctxp->case_labels.create (8);
2295 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2296 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2297 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2298 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2299 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2300 else
2301 gimplify_ctxp->live_switch_vars = NULL;
2303 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2304 gimplify_ctxp->in_switch_expr = true;
2306 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2308 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2309 maybe_warn_switch_unreachable (switch_body_seq);
2310 maybe_warn_implicit_fallthrough (switch_body_seq);
2311 /* Only do this for the outermost GIMPLE_SWITCH. */
2312 if (!gimplify_ctxp->in_switch_expr)
2313 expand_FALLTHROUGH (&switch_body_seq);
2315 labels = gimplify_ctxp->case_labels;
2316 gimplify_ctxp->case_labels = saved_labels;
2318 if (gimplify_ctxp->live_switch_vars)
2320 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2321 delete gimplify_ctxp->live_switch_vars;
2323 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2325 preprocess_case_label_vec_for_gimple (labels, index_type,
2326 &default_case);
2328 if (!default_case)
2330 glabel *new_default;
2332 default_case
2333 = build_case_label (NULL_TREE, NULL_TREE,
2334 create_artificial_label (UNKNOWN_LOCATION));
2335 new_default = gimple_build_label (CASE_LABEL (default_case));
2336 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2339 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2340 default_case, labels);
2341 gimplify_seq_add_stmt (pre_p, switch_stmt);
2342 gimplify_seq_add_seq (pre_p, switch_body_seq);
2343 labels.release ();
2345 else
2346 gcc_assert (SWITCH_LABELS (switch_expr));
2348 return GS_ALL_DONE;
2351 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2353 static enum gimplify_status
2354 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2356 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2357 == current_function_decl);
2359 tree label = LABEL_EXPR_LABEL (*expr_p);
2360 glabel *label_stmt = gimple_build_label (label);
2361 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2362 gimplify_seq_add_stmt (pre_p, label_stmt);
2364 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2365 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2366 NOT_TAKEN));
2367 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2368 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2369 TAKEN));
2371 return GS_ALL_DONE;
2374 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2376 static enum gimplify_status
2377 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2379 struct gimplify_ctx *ctxp;
2380 glabel *label_stmt;
2382 /* Invalid programs can play Duff's Device type games with, for example,
2383 #pragma omp parallel. At least in the C front end, we don't
2384 detect such invalid branches until after gimplification, in the
2385 diagnose_omp_blocks pass. */
2386 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2387 if (ctxp->case_labels.exists ())
2388 break;
2390 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2391 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2392 ctxp->case_labels.safe_push (*expr_p);
2393 gimplify_seq_add_stmt (pre_p, label_stmt);
2395 return GS_ALL_DONE;
2398 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2399 if necessary. */
2401 tree
2402 build_and_jump (tree *label_p)
2404 if (label_p == NULL)
2405 /* If there's nowhere to jump, just fall through. */
2406 return NULL_TREE;
2408 if (*label_p == NULL_TREE)
2410 tree label = create_artificial_label (UNKNOWN_LOCATION);
2411 *label_p = label;
2414 return build1 (GOTO_EXPR, void_type_node, *label_p);
2417 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2418 This also involves building a label to jump to and communicating it to
2419 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2421 static enum gimplify_status
2422 gimplify_exit_expr (tree *expr_p)
2424 tree cond = TREE_OPERAND (*expr_p, 0);
2425 tree expr;
2427 expr = build_and_jump (&gimplify_ctxp->exit_label);
2428 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2429 *expr_p = expr;
2431 return GS_OK;
2434 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2435 different from its canonical type, wrap the whole thing inside a
2436 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2437 type.
2439 The canonical type of a COMPONENT_REF is the type of the field being
2440 referenced--unless the field is a bit-field which can be read directly
2441 in a smaller mode, in which case the canonical type is the
2442 sign-appropriate type corresponding to that mode. */
2444 static void
2445 canonicalize_component_ref (tree *expr_p)
2447 tree expr = *expr_p;
2448 tree type;
2450 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2452 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2453 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2454 else
2455 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2457 /* One could argue that all the stuff below is not necessary for
2458 the non-bitfield case and declare it a FE error if type
2459 adjustment would be needed. */
2460 if (TREE_TYPE (expr) != type)
2462 #ifdef ENABLE_TYPES_CHECKING
2463 tree old_type = TREE_TYPE (expr);
2464 #endif
2465 int type_quals;
2467 /* We need to preserve qualifiers and propagate them from
2468 operand 0. */
2469 type_quals = TYPE_QUALS (type)
2470 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2471 if (TYPE_QUALS (type) != type_quals)
2472 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2474 /* Set the type of the COMPONENT_REF to the underlying type. */
2475 TREE_TYPE (expr) = type;
2477 #ifdef ENABLE_TYPES_CHECKING
2478 /* It is now a FE error, if the conversion from the canonical
2479 type to the original expression type is not useless. */
2480 gcc_assert (useless_type_conversion_p (old_type, type));
2481 #endif
2485 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2486 to foo, embed that change in the ADDR_EXPR by converting
2487 T array[U];
2488 (T *)&array
2490 &array[L]
2491 where L is the lower bound. For simplicity, only do this for constant
2492 lower bound.
2493 The constraint is that the type of &array[L] is trivially convertible
2494 to T *. */
2496 static void
2497 canonicalize_addr_expr (tree *expr_p)
2499 tree expr = *expr_p;
2500 tree addr_expr = TREE_OPERAND (expr, 0);
2501 tree datype, ddatype, pddatype;
2503 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2504 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2505 || TREE_CODE (addr_expr) != ADDR_EXPR)
2506 return;
2508 /* The addr_expr type should be a pointer to an array. */
2509 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2510 if (TREE_CODE (datype) != ARRAY_TYPE)
2511 return;
2513 /* The pointer to element type shall be trivially convertible to
2514 the expression pointer type. */
2515 ddatype = TREE_TYPE (datype);
2516 pddatype = build_pointer_type (ddatype);
2517 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2518 pddatype))
2519 return;
2521 /* The lower bound and element sizes must be constant. */
2522 if (!TYPE_SIZE_UNIT (ddatype)
2523 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2524 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2525 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2526 return;
2528 /* All checks succeeded. Build a new node to merge the cast. */
2529 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2530 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2531 NULL_TREE, NULL_TREE);
2532 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2534 /* We can have stripped a required restrict qualifier above. */
2535 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2536 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2539 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2540 underneath as appropriate. */
2542 static enum gimplify_status
2543 gimplify_conversion (tree *expr_p)
2545 location_t loc = EXPR_LOCATION (*expr_p);
2546 gcc_assert (CONVERT_EXPR_P (*expr_p));
2548 /* Then strip away all but the outermost conversion. */
2549 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2551 /* And remove the outermost conversion if it's useless. */
2552 if (tree_ssa_useless_type_conversion (*expr_p))
2553 *expr_p = TREE_OPERAND (*expr_p, 0);
2555 /* If we still have a conversion at the toplevel,
2556 then canonicalize some constructs. */
2557 if (CONVERT_EXPR_P (*expr_p))
2559 tree sub = TREE_OPERAND (*expr_p, 0);
2561 /* If a NOP conversion is changing the type of a COMPONENT_REF
2562 expression, then canonicalize its type now in order to expose more
2563 redundant conversions. */
2564 if (TREE_CODE (sub) == COMPONENT_REF)
2565 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2567 /* If a NOP conversion is changing a pointer to array of foo
2568 to a pointer to foo, embed that change in the ADDR_EXPR. */
2569 else if (TREE_CODE (sub) == ADDR_EXPR)
2570 canonicalize_addr_expr (expr_p);
2573 /* If we have a conversion to a non-register type force the
2574 use of a VIEW_CONVERT_EXPR instead. */
2575 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2576 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2577 TREE_OPERAND (*expr_p, 0));
2579 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2580 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2581 TREE_SET_CODE (*expr_p, NOP_EXPR);
2583 return GS_OK;
2586 /* Nonlocal VLAs seen in the current function. */
2587 static hash_set<tree> *nonlocal_vlas;
2589 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2590 static tree nonlocal_vla_vars;
2592 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2593 DECL_VALUE_EXPR, and it's worth re-examining things. */
2595 static enum gimplify_status
2596 gimplify_var_or_parm_decl (tree *expr_p)
2598 tree decl = *expr_p;
2600 /* ??? If this is a local variable, and it has not been seen in any
2601 outer BIND_EXPR, then it's probably the result of a duplicate
2602 declaration, for which we've already issued an error. It would
2603 be really nice if the front end wouldn't leak these at all.
2604 Currently the only known culprit is C++ destructors, as seen
2605 in g++.old-deja/g++.jason/binding.C. */
2606 if (VAR_P (decl)
2607 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2608 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2609 && decl_function_context (decl) == current_function_decl)
2611 gcc_assert (seen_error ());
2612 return GS_ERROR;
2615 /* When within an OMP context, notice uses of variables. */
2616 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2617 return GS_ALL_DONE;
2619 /* If the decl is an alias for another expression, substitute it now. */
2620 if (DECL_HAS_VALUE_EXPR_P (decl))
2622 tree value_expr = DECL_VALUE_EXPR (decl);
2624 /* For referenced nonlocal VLAs add a decl for debugging purposes
2625 to the current function. */
2626 if (VAR_P (decl)
2627 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2628 && nonlocal_vlas != NULL
2629 && TREE_CODE (value_expr) == INDIRECT_REF
2630 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2631 && decl_function_context (decl) != current_function_decl)
2633 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2634 while (ctx
2635 && (ctx->region_type == ORT_WORKSHARE
2636 || ctx->region_type == ORT_SIMD
2637 || ctx->region_type == ORT_ACC))
2638 ctx = ctx->outer_context;
2639 if (!ctx && !nonlocal_vlas->add (decl))
2641 tree copy = copy_node (decl);
2643 lang_hooks.dup_lang_specific_decl (copy);
2644 SET_DECL_RTL (copy, 0);
2645 TREE_USED (copy) = 1;
2646 DECL_CHAIN (copy) = nonlocal_vla_vars;
2647 nonlocal_vla_vars = copy;
2648 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2649 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2653 *expr_p = unshare_expr (value_expr);
2654 return GS_OK;
2657 return GS_ALL_DONE;
2660 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2662 static void
2663 recalculate_side_effects (tree t)
2665 enum tree_code code = TREE_CODE (t);
2666 int len = TREE_OPERAND_LENGTH (t);
2667 int i;
2669 switch (TREE_CODE_CLASS (code))
2671 case tcc_expression:
2672 switch (code)
2674 case INIT_EXPR:
2675 case MODIFY_EXPR:
2676 case VA_ARG_EXPR:
2677 case PREDECREMENT_EXPR:
2678 case PREINCREMENT_EXPR:
2679 case POSTDECREMENT_EXPR:
2680 case POSTINCREMENT_EXPR:
2681 /* All of these have side-effects, no matter what their
2682 operands are. */
2683 return;
2685 default:
2686 break;
2688 /* Fall through. */
2690 case tcc_comparison: /* a comparison expression */
2691 case tcc_unary: /* a unary arithmetic expression */
2692 case tcc_binary: /* a binary arithmetic expression */
2693 case tcc_reference: /* a reference */
2694 case tcc_vl_exp: /* a function call */
2695 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2696 for (i = 0; i < len; ++i)
2698 tree op = TREE_OPERAND (t, i);
2699 if (op && TREE_SIDE_EFFECTS (op))
2700 TREE_SIDE_EFFECTS (t) = 1;
2702 break;
2704 case tcc_constant:
2705 /* No side-effects. */
2706 return;
2708 default:
2709 gcc_unreachable ();
2713 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2714 node *EXPR_P.
2716 compound_lval
2717 : min_lval '[' val ']'
2718 | min_lval '.' ID
2719 | compound_lval '[' val ']'
2720 | compound_lval '.' ID
2722 This is not part of the original SIMPLE definition, which separates
2723 array and member references, but it seems reasonable to handle them
2724 together. Also, this way we don't run into problems with union
2725 aliasing; gcc requires that for accesses through a union to alias, the
2726 union reference must be explicit, which was not always the case when we
2727 were splitting up array and member refs.
2729 PRE_P points to the sequence where side effects that must happen before
2730 *EXPR_P should be stored.
2732 POST_P points to the sequence where side effects that must happen after
2733 *EXPR_P should be stored. */
2735 static enum gimplify_status
2736 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2737 fallback_t fallback)
2739 tree *p;
2740 enum gimplify_status ret = GS_ALL_DONE, tret;
2741 int i;
2742 location_t loc = EXPR_LOCATION (*expr_p);
2743 tree expr = *expr_p;
2745 /* Create a stack of the subexpressions so later we can walk them in
2746 order from inner to outer. */
2747 auto_vec<tree, 10> expr_stack;
2749 /* We can handle anything that get_inner_reference can deal with. */
2750 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2752 restart:
2753 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2754 if (TREE_CODE (*p) == INDIRECT_REF)
2755 *p = fold_indirect_ref_loc (loc, *p);
2757 if (handled_component_p (*p))
2759 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2760 additional COMPONENT_REFs. */
2761 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2762 && gimplify_var_or_parm_decl (p) == GS_OK)
2763 goto restart;
2764 else
2765 break;
2767 expr_stack.safe_push (*p);
2770 gcc_assert (expr_stack.length ());
2772 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2773 walked through and P points to the innermost expression.
2775 Java requires that we elaborated nodes in source order. That
2776 means we must gimplify the inner expression followed by each of
2777 the indices, in order. But we can't gimplify the inner
2778 expression until we deal with any variable bounds, sizes, or
2779 positions in order to deal with PLACEHOLDER_EXPRs.
2781 So we do this in three steps. First we deal with the annotations
2782 for any variables in the components, then we gimplify the base,
2783 then we gimplify any indices, from left to right. */
2784 for (i = expr_stack.length () - 1; i >= 0; i--)
2786 tree t = expr_stack[i];
2788 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2790 /* Gimplify the low bound and element type size and put them into
2791 the ARRAY_REF. If these values are set, they have already been
2792 gimplified. */
2793 if (TREE_OPERAND (t, 2) == NULL_TREE)
2795 tree low = unshare_expr (array_ref_low_bound (t));
2796 if (!is_gimple_min_invariant (low))
2798 TREE_OPERAND (t, 2) = low;
2799 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2800 post_p, is_gimple_reg,
2801 fb_rvalue);
2802 ret = MIN (ret, tret);
2805 else
2807 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2808 is_gimple_reg, fb_rvalue);
2809 ret = MIN (ret, tret);
2812 if (TREE_OPERAND (t, 3) == NULL_TREE)
2814 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2815 tree elmt_size = unshare_expr (array_ref_element_size (t));
2816 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2818 /* Divide the element size by the alignment of the element
2819 type (above). */
2820 elmt_size
2821 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2823 if (!is_gimple_min_invariant (elmt_size))
2825 TREE_OPERAND (t, 3) = elmt_size;
2826 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2827 post_p, is_gimple_reg,
2828 fb_rvalue);
2829 ret = MIN (ret, tret);
2832 else
2834 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2835 is_gimple_reg, fb_rvalue);
2836 ret = MIN (ret, tret);
2839 else if (TREE_CODE (t) == COMPONENT_REF)
2841 /* Set the field offset into T and gimplify it. */
2842 if (TREE_OPERAND (t, 2) == NULL_TREE)
2844 tree offset = unshare_expr (component_ref_field_offset (t));
2845 tree field = TREE_OPERAND (t, 1);
2846 tree factor
2847 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2849 /* Divide the offset by its alignment. */
2850 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2852 if (!is_gimple_min_invariant (offset))
2854 TREE_OPERAND (t, 2) = offset;
2855 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2856 post_p, is_gimple_reg,
2857 fb_rvalue);
2858 ret = MIN (ret, tret);
2861 else
2863 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2864 is_gimple_reg, fb_rvalue);
2865 ret = MIN (ret, tret);
2870 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2871 so as to match the min_lval predicate. Failure to do so may result
2872 in the creation of large aggregate temporaries. */
2873 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2874 fallback | fb_lvalue);
2875 ret = MIN (ret, tret);
2877 /* And finally, the indices and operands of ARRAY_REF. During this
2878 loop we also remove any useless conversions. */
2879 for (; expr_stack.length () > 0; )
2881 tree t = expr_stack.pop ();
2883 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2885 /* Gimplify the dimension. */
2886 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2888 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2889 is_gimple_val, fb_rvalue);
2890 ret = MIN (ret, tret);
2894 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2896 /* The innermost expression P may have originally had
2897 TREE_SIDE_EFFECTS set which would have caused all the outer
2898 expressions in *EXPR_P leading to P to also have had
2899 TREE_SIDE_EFFECTS set. */
2900 recalculate_side_effects (t);
2903 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2904 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2906 canonicalize_component_ref (expr_p);
2909 expr_stack.release ();
2911 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2913 return ret;
2916 /* Gimplify the self modifying expression pointed to by EXPR_P
2917 (++, --, +=, -=).
2919 PRE_P points to the list where side effects that must happen before
2920 *EXPR_P should be stored.
2922 POST_P points to the list where side effects that must happen after
2923 *EXPR_P should be stored.
2925 WANT_VALUE is nonzero iff we want to use the value of this expression
2926 in another expression.
2928 ARITH_TYPE is the type the computation should be performed in. */
2930 enum gimplify_status
2931 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2932 bool want_value, tree arith_type)
2934 enum tree_code code;
2935 tree lhs, lvalue, rhs, t1;
2936 gimple_seq post = NULL, *orig_post_p = post_p;
2937 bool postfix;
2938 enum tree_code arith_code;
2939 enum gimplify_status ret;
2940 location_t loc = EXPR_LOCATION (*expr_p);
2942 code = TREE_CODE (*expr_p);
2944 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2945 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2947 /* Prefix or postfix? */
2948 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2949 /* Faster to treat as prefix if result is not used. */
2950 postfix = want_value;
2951 else
2952 postfix = false;
2954 /* For postfix, make sure the inner expression's post side effects
2955 are executed after side effects from this expression. */
2956 if (postfix)
2957 post_p = &post;
2959 /* Add or subtract? */
2960 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2961 arith_code = PLUS_EXPR;
2962 else
2963 arith_code = MINUS_EXPR;
2965 /* Gimplify the LHS into a GIMPLE lvalue. */
2966 lvalue = TREE_OPERAND (*expr_p, 0);
2967 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2968 if (ret == GS_ERROR)
2969 return ret;
2971 /* Extract the operands to the arithmetic operation. */
2972 lhs = lvalue;
2973 rhs = TREE_OPERAND (*expr_p, 1);
2975 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2976 that as the result value and in the postqueue operation. */
2977 if (postfix)
2979 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2980 if (ret == GS_ERROR)
2981 return ret;
2983 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2986 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2987 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2989 rhs = convert_to_ptrofftype_loc (loc, rhs);
2990 if (arith_code == MINUS_EXPR)
2991 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2992 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2994 else
2995 t1 = fold_convert (TREE_TYPE (*expr_p),
2996 fold_build2 (arith_code, arith_type,
2997 fold_convert (arith_type, lhs),
2998 fold_convert (arith_type, rhs)));
3000 if (postfix)
3002 gimplify_assign (lvalue, t1, pre_p);
3003 gimplify_seq_add_seq (orig_post_p, post);
3004 *expr_p = lhs;
3005 return GS_ALL_DONE;
3007 else
3009 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3010 return GS_OK;
3014 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3016 static void
3017 maybe_with_size_expr (tree *expr_p)
3019 tree expr = *expr_p;
3020 tree type = TREE_TYPE (expr);
3021 tree size;
3023 /* If we've already wrapped this or the type is error_mark_node, we can't do
3024 anything. */
3025 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3026 || type == error_mark_node)
3027 return;
3029 /* If the size isn't known or is a constant, we have nothing to do. */
3030 size = TYPE_SIZE_UNIT (type);
3031 if (!size || TREE_CODE (size) == INTEGER_CST)
3032 return;
3034 /* Otherwise, make a WITH_SIZE_EXPR. */
3035 size = unshare_expr (size);
3036 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3037 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3040 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3041 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3042 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3043 gimplified to an SSA name. */
3045 enum gimplify_status
3046 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3047 bool allow_ssa)
3049 bool (*test) (tree);
3050 fallback_t fb;
3052 /* In general, we allow lvalues for function arguments to avoid
3053 extra overhead of copying large aggregates out of even larger
3054 aggregates into temporaries only to copy the temporaries to
3055 the argument list. Make optimizers happy by pulling out to
3056 temporaries those types that fit in registers. */
3057 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3058 test = is_gimple_val, fb = fb_rvalue;
3059 else
3061 test = is_gimple_lvalue, fb = fb_either;
3062 /* Also strip a TARGET_EXPR that would force an extra copy. */
3063 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3065 tree init = TARGET_EXPR_INITIAL (*arg_p);
3066 if (init
3067 && !VOID_TYPE_P (TREE_TYPE (init)))
3068 *arg_p = init;
3072 /* If this is a variable sized type, we must remember the size. */
3073 maybe_with_size_expr (arg_p);
3075 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3076 /* Make sure arguments have the same location as the function call
3077 itself. */
3078 protected_set_expr_location (*arg_p, call_location);
3080 /* There is a sequence point before a function call. Side effects in
3081 the argument list must occur before the actual call. So, when
3082 gimplifying arguments, force gimplify_expr to use an internal
3083 post queue which is then appended to the end of PRE_P. */
3084 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3087 /* Don't fold inside offloading or taskreg regions: it can break code by
3088 adding decl references that weren't in the source. We'll do it during
3089 omplower pass instead. */
3091 static bool
3092 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3094 struct gimplify_omp_ctx *ctx;
3095 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3096 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3097 return false;
3098 return fold_stmt (gsi);
3101 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3102 with the pointer to the proper cilk frame. */
3103 static void
3104 gimplify_cilk_detach (gimple_seq *pre_p)
3106 tree frame = cfun->cilk_frame_decl;
3107 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3108 frame);
3109 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3110 ptrf);
3111 gimplify_seq_add_stmt(pre_p, detach);
3114 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3115 WANT_VALUE is true if the result of the call is desired. */
3117 static enum gimplify_status
3118 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3120 tree fndecl, parms, p, fnptrtype;
3121 enum gimplify_status ret;
3122 int i, nargs;
3123 gcall *call;
3124 bool builtin_va_start_p = false;
3125 location_t loc = EXPR_LOCATION (*expr_p);
3127 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3129 /* For reliable diagnostics during inlining, it is necessary that
3130 every call_expr be annotated with file and line. */
3131 if (! EXPR_HAS_LOCATION (*expr_p))
3132 SET_EXPR_LOCATION (*expr_p, input_location);
3134 /* Gimplify internal functions created in the FEs. */
3135 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3137 if (want_value)
3138 return GS_ALL_DONE;
3140 nargs = call_expr_nargs (*expr_p);
3141 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3142 auto_vec<tree> vargs (nargs);
3144 for (i = 0; i < nargs; i++)
3146 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3147 EXPR_LOCATION (*expr_p));
3148 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3151 if (EXPR_CILK_SPAWN (*expr_p))
3152 gimplify_cilk_detach (pre_p);
3153 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3154 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3155 gimplify_seq_add_stmt (pre_p, call);
3156 return GS_ALL_DONE;
3159 /* This may be a call to a builtin function.
3161 Builtin function calls may be transformed into different
3162 (and more efficient) builtin function calls under certain
3163 circumstances. Unfortunately, gimplification can muck things
3164 up enough that the builtin expanders are not aware that certain
3165 transformations are still valid.
3167 So we attempt transformation/gimplification of the call before
3168 we gimplify the CALL_EXPR. At this time we do not manage to
3169 transform all calls in the same manner as the expanders do, but
3170 we do transform most of them. */
3171 fndecl = get_callee_fndecl (*expr_p);
3172 if (fndecl
3173 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3174 switch (DECL_FUNCTION_CODE (fndecl))
3176 CASE_BUILT_IN_ALLOCA:
3177 /* If the call has been built for a variable-sized object, then we
3178 want to restore the stack level when the enclosing BIND_EXPR is
3179 exited to reclaim the allocated space; otherwise, we precisely
3180 need to do the opposite and preserve the latest stack level. */
3181 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3182 gimplify_ctxp->save_stack = true;
3183 else
3184 gimplify_ctxp->keep_stack = true;
3185 break;
3187 case BUILT_IN_VA_START:
3189 builtin_va_start_p = TRUE;
3190 if (call_expr_nargs (*expr_p) < 2)
3192 error ("too few arguments to function %<va_start%>");
3193 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3194 return GS_OK;
3197 if (fold_builtin_next_arg (*expr_p, true))
3199 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3200 return GS_OK;
3202 break;
3205 default:
3208 if (fndecl && DECL_BUILT_IN (fndecl))
3210 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3211 if (new_tree && new_tree != *expr_p)
3213 /* There was a transformation of this call which computes the
3214 same value, but in a more efficient way. Return and try
3215 again. */
3216 *expr_p = new_tree;
3217 return GS_OK;
3221 /* Remember the original function pointer type. */
3222 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3224 /* There is a sequence point before the call, so any side effects in
3225 the calling expression must occur before the actual call. Force
3226 gimplify_expr to use an internal post queue. */
3227 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3228 is_gimple_call_addr, fb_rvalue);
3230 nargs = call_expr_nargs (*expr_p);
3232 /* Get argument types for verification. */
3233 fndecl = get_callee_fndecl (*expr_p);
3234 parms = NULL_TREE;
3235 if (fndecl)
3236 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3237 else
3238 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3240 if (fndecl && DECL_ARGUMENTS (fndecl))
3241 p = DECL_ARGUMENTS (fndecl);
3242 else if (parms)
3243 p = parms;
3244 else
3245 p = NULL_TREE;
3246 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3249 /* If the last argument is __builtin_va_arg_pack () and it is not
3250 passed as a named argument, decrease the number of CALL_EXPR
3251 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3252 if (!p
3253 && i < nargs
3254 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3256 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3257 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3259 if (last_arg_fndecl
3260 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3261 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3262 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3264 tree call = *expr_p;
3266 --nargs;
3267 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3268 CALL_EXPR_FN (call),
3269 nargs, CALL_EXPR_ARGP (call));
3271 /* Copy all CALL_EXPR flags, location and block, except
3272 CALL_EXPR_VA_ARG_PACK flag. */
3273 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3274 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3275 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3276 = CALL_EXPR_RETURN_SLOT_OPT (call);
3277 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3278 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3280 /* Set CALL_EXPR_VA_ARG_PACK. */
3281 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3285 /* If the call returns twice then after building the CFG the call
3286 argument computations will no longer dominate the call because
3287 we add an abnormal incoming edge to the call. So do not use SSA
3288 vars there. */
3289 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3291 /* Gimplify the function arguments. */
3292 if (nargs > 0)
3294 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3295 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3296 PUSH_ARGS_REVERSED ? i-- : i++)
3298 enum gimplify_status t;
3300 /* Avoid gimplifying the second argument to va_start, which needs to
3301 be the plain PARM_DECL. */
3302 if ((i != 1) || !builtin_va_start_p)
3304 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3305 EXPR_LOCATION (*expr_p), ! returns_twice);
3307 if (t == GS_ERROR)
3308 ret = GS_ERROR;
3313 /* Gimplify the static chain. */
3314 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3316 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3317 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3318 else
3320 enum gimplify_status t;
3321 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3322 EXPR_LOCATION (*expr_p), ! returns_twice);
3323 if (t == GS_ERROR)
3324 ret = GS_ERROR;
3328 /* Verify the function result. */
3329 if (want_value && fndecl
3330 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3332 error_at (loc, "using result of function returning %<void%>");
3333 ret = GS_ERROR;
3336 /* Try this again in case gimplification exposed something. */
3337 if (ret != GS_ERROR)
3339 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3341 if (new_tree && new_tree != *expr_p)
3343 /* There was a transformation of this call which computes the
3344 same value, but in a more efficient way. Return and try
3345 again. */
3346 *expr_p = new_tree;
3347 return GS_OK;
3350 else
3352 *expr_p = error_mark_node;
3353 return GS_ERROR;
3356 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3357 decl. This allows us to eliminate redundant or useless
3358 calls to "const" functions. */
3359 if (TREE_CODE (*expr_p) == CALL_EXPR)
3361 int flags = call_expr_flags (*expr_p);
3362 if (flags & (ECF_CONST | ECF_PURE)
3363 /* An infinite loop is considered a side effect. */
3364 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3365 TREE_SIDE_EFFECTS (*expr_p) = 0;
3368 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3369 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3370 form and delegate the creation of a GIMPLE_CALL to
3371 gimplify_modify_expr. This is always possible because when
3372 WANT_VALUE is true, the caller wants the result of this call into
3373 a temporary, which means that we will emit an INIT_EXPR in
3374 internal_get_tmp_var which will then be handled by
3375 gimplify_modify_expr. */
3376 if (!want_value)
3378 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3379 have to do is replicate it as a GIMPLE_CALL tuple. */
3380 gimple_stmt_iterator gsi;
3381 call = gimple_build_call_from_tree (*expr_p);
3382 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
3383 notice_special_calls (call);
3384 if (EXPR_CILK_SPAWN (*expr_p))
3385 gimplify_cilk_detach (pre_p);
3386 gimplify_seq_add_stmt (pre_p, call);
3387 gsi = gsi_last (*pre_p);
3388 maybe_fold_stmt (&gsi);
3389 *expr_p = NULL_TREE;
3391 else
3392 /* Remember the original function type. */
3393 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3394 CALL_EXPR_FN (*expr_p));
3396 return ret;
3399 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3400 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3402 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3403 condition is true or false, respectively. If null, we should generate
3404 our own to skip over the evaluation of this specific expression.
3406 LOCUS is the source location of the COND_EXPR.
3408 This function is the tree equivalent of do_jump.
3410 shortcut_cond_r should only be called by shortcut_cond_expr. */
3412 static tree
3413 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3414 location_t locus)
3416 tree local_label = NULL_TREE;
3417 tree t, expr = NULL;
3419 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3420 retain the shortcut semantics. Just insert the gotos here;
3421 shortcut_cond_expr will append the real blocks later. */
3422 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3424 location_t new_locus;
3426 /* Turn if (a && b) into
3428 if (a); else goto no;
3429 if (b) goto yes; else goto no;
3430 (no:) */
3432 if (false_label_p == NULL)
3433 false_label_p = &local_label;
3435 /* Keep the original source location on the first 'if'. */
3436 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3437 append_to_statement_list (t, &expr);
3439 /* Set the source location of the && on the second 'if'. */
3440 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3441 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3442 new_locus);
3443 append_to_statement_list (t, &expr);
3445 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3447 location_t new_locus;
3449 /* Turn if (a || b) into
3451 if (a) goto yes;
3452 if (b) goto yes; else goto no;
3453 (yes:) */
3455 if (true_label_p == NULL)
3456 true_label_p = &local_label;
3458 /* Keep the original source location on the first 'if'. */
3459 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3460 append_to_statement_list (t, &expr);
3462 /* Set the source location of the || on the second 'if'. */
3463 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3464 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3465 new_locus);
3466 append_to_statement_list (t, &expr);
3468 else if (TREE_CODE (pred) == COND_EXPR
3469 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3470 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3472 location_t new_locus;
3474 /* As long as we're messing with gotos, turn if (a ? b : c) into
3475 if (a)
3476 if (b) goto yes; else goto no;
3477 else
3478 if (c) goto yes; else goto no;
3480 Don't do this if one of the arms has void type, which can happen
3481 in C++ when the arm is throw. */
3483 /* Keep the original source location on the first 'if'. Set the source
3484 location of the ? on the second 'if'. */
3485 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3486 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3487 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3488 false_label_p, locus),
3489 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3490 false_label_p, new_locus));
3492 else
3494 expr = build3 (COND_EXPR, void_type_node, pred,
3495 build_and_jump (true_label_p),
3496 build_and_jump (false_label_p));
3497 SET_EXPR_LOCATION (expr, locus);
3500 if (local_label)
3502 t = build1 (LABEL_EXPR, void_type_node, local_label);
3503 append_to_statement_list (t, &expr);
3506 return expr;
3509 /* Given a conditional expression EXPR with short-circuit boolean
3510 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3511 predicate apart into the equivalent sequence of conditionals. */
3513 static tree
3514 shortcut_cond_expr (tree expr)
3516 tree pred = TREE_OPERAND (expr, 0);
3517 tree then_ = TREE_OPERAND (expr, 1);
3518 tree else_ = TREE_OPERAND (expr, 2);
3519 tree true_label, false_label, end_label, t;
3520 tree *true_label_p;
3521 tree *false_label_p;
3522 bool emit_end, emit_false, jump_over_else;
3523 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3524 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3526 /* First do simple transformations. */
3527 if (!else_se)
3529 /* If there is no 'else', turn
3530 if (a && b) then c
3531 into
3532 if (a) if (b) then c. */
3533 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3535 /* Keep the original source location on the first 'if'. */
3536 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3537 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3538 /* Set the source location of the && on the second 'if'. */
3539 if (EXPR_HAS_LOCATION (pred))
3540 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3541 then_ = shortcut_cond_expr (expr);
3542 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3543 pred = TREE_OPERAND (pred, 0);
3544 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3545 SET_EXPR_LOCATION (expr, locus);
3549 if (!then_se)
3551 /* If there is no 'then', turn
3552 if (a || b); else d
3553 into
3554 if (a); else if (b); else d. */
3555 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3557 /* Keep the original source location on the first 'if'. */
3558 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3559 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3560 /* Set the source location of the || on the second 'if'. */
3561 if (EXPR_HAS_LOCATION (pred))
3562 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3563 else_ = shortcut_cond_expr (expr);
3564 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3565 pred = TREE_OPERAND (pred, 0);
3566 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3567 SET_EXPR_LOCATION (expr, locus);
3571 /* If we're done, great. */
3572 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3573 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3574 return expr;
3576 /* Otherwise we need to mess with gotos. Change
3577 if (a) c; else d;
3579 if (a); else goto no;
3580 c; goto end;
3581 no: d; end:
3582 and recursively gimplify the condition. */
3584 true_label = false_label = end_label = NULL_TREE;
3586 /* If our arms just jump somewhere, hijack those labels so we don't
3587 generate jumps to jumps. */
3589 if (then_
3590 && TREE_CODE (then_) == GOTO_EXPR
3591 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3593 true_label = GOTO_DESTINATION (then_);
3594 then_ = NULL;
3595 then_se = false;
3598 if (else_
3599 && TREE_CODE (else_) == GOTO_EXPR
3600 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3602 false_label = GOTO_DESTINATION (else_);
3603 else_ = NULL;
3604 else_se = false;
3607 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3608 if (true_label)
3609 true_label_p = &true_label;
3610 else
3611 true_label_p = NULL;
3613 /* The 'else' branch also needs a label if it contains interesting code. */
3614 if (false_label || else_se)
3615 false_label_p = &false_label;
3616 else
3617 false_label_p = NULL;
3619 /* If there was nothing else in our arms, just forward the label(s). */
3620 if (!then_se && !else_se)
3621 return shortcut_cond_r (pred, true_label_p, false_label_p,
3622 EXPR_LOC_OR_LOC (expr, input_location));
3624 /* If our last subexpression already has a terminal label, reuse it. */
3625 if (else_se)
3626 t = expr_last (else_);
3627 else if (then_se)
3628 t = expr_last (then_);
3629 else
3630 t = NULL;
3631 if (t && TREE_CODE (t) == LABEL_EXPR)
3632 end_label = LABEL_EXPR_LABEL (t);
3634 /* If we don't care about jumping to the 'else' branch, jump to the end
3635 if the condition is false. */
3636 if (!false_label_p)
3637 false_label_p = &end_label;
3639 /* We only want to emit these labels if we aren't hijacking them. */
3640 emit_end = (end_label == NULL_TREE);
3641 emit_false = (false_label == NULL_TREE);
3643 /* We only emit the jump over the else clause if we have to--if the
3644 then clause may fall through. Otherwise we can wind up with a
3645 useless jump and a useless label at the end of gimplified code,
3646 which will cause us to think that this conditional as a whole
3647 falls through even if it doesn't. If we then inline a function
3648 which ends with such a condition, that can cause us to issue an
3649 inappropriate warning about control reaching the end of a
3650 non-void function. */
3651 jump_over_else = block_may_fallthru (then_);
3653 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3654 EXPR_LOC_OR_LOC (expr, input_location));
3656 expr = NULL;
3657 append_to_statement_list (pred, &expr);
3659 append_to_statement_list (then_, &expr);
3660 if (else_se)
3662 if (jump_over_else)
3664 tree last = expr_last (expr);
3665 t = build_and_jump (&end_label);
3666 if (EXPR_HAS_LOCATION (last))
3667 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3668 append_to_statement_list (t, &expr);
3670 if (emit_false)
3672 t = build1 (LABEL_EXPR, void_type_node, false_label);
3673 append_to_statement_list (t, &expr);
3675 append_to_statement_list (else_, &expr);
3677 if (emit_end && end_label)
3679 t = build1 (LABEL_EXPR, void_type_node, end_label);
3680 append_to_statement_list (t, &expr);
3683 return expr;
3686 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3688 tree
3689 gimple_boolify (tree expr)
3691 tree type = TREE_TYPE (expr);
3692 location_t loc = EXPR_LOCATION (expr);
3694 if (TREE_CODE (expr) == NE_EXPR
3695 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3696 && integer_zerop (TREE_OPERAND (expr, 1)))
3698 tree call = TREE_OPERAND (expr, 0);
3699 tree fn = get_callee_fndecl (call);
3701 /* For __builtin_expect ((long) (x), y) recurse into x as well
3702 if x is truth_value_p. */
3703 if (fn
3704 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3705 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3706 && call_expr_nargs (call) == 2)
3708 tree arg = CALL_EXPR_ARG (call, 0);
3709 if (arg)
3711 if (TREE_CODE (arg) == NOP_EXPR
3712 && TREE_TYPE (arg) == TREE_TYPE (call))
3713 arg = TREE_OPERAND (arg, 0);
3714 if (truth_value_p (TREE_CODE (arg)))
3716 arg = gimple_boolify (arg);
3717 CALL_EXPR_ARG (call, 0)
3718 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3724 switch (TREE_CODE (expr))
3726 case TRUTH_AND_EXPR:
3727 case TRUTH_OR_EXPR:
3728 case TRUTH_XOR_EXPR:
3729 case TRUTH_ANDIF_EXPR:
3730 case TRUTH_ORIF_EXPR:
3731 /* Also boolify the arguments of truth exprs. */
3732 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3733 /* FALLTHRU */
3735 case TRUTH_NOT_EXPR:
3736 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3738 /* These expressions always produce boolean results. */
3739 if (TREE_CODE (type) != BOOLEAN_TYPE)
3740 TREE_TYPE (expr) = boolean_type_node;
3741 return expr;
3743 case ANNOTATE_EXPR:
3744 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3746 case annot_expr_ivdep_kind:
3747 case annot_expr_no_vector_kind:
3748 case annot_expr_vector_kind:
3749 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3750 if (TREE_CODE (type) != BOOLEAN_TYPE)
3751 TREE_TYPE (expr) = boolean_type_node;
3752 return expr;
3753 default:
3754 gcc_unreachable ();
3757 default:
3758 if (COMPARISON_CLASS_P (expr))
3760 /* There expressions always prduce boolean results. */
3761 if (TREE_CODE (type) != BOOLEAN_TYPE)
3762 TREE_TYPE (expr) = boolean_type_node;
3763 return expr;
3765 /* Other expressions that get here must have boolean values, but
3766 might need to be converted to the appropriate mode. */
3767 if (TREE_CODE (type) == BOOLEAN_TYPE)
3768 return expr;
3769 return fold_convert_loc (loc, boolean_type_node, expr);
3773 /* Given a conditional expression *EXPR_P without side effects, gimplify
3774 its operands. New statements are inserted to PRE_P. */
3776 static enum gimplify_status
3777 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3779 tree expr = *expr_p, cond;
3780 enum gimplify_status ret, tret;
3781 enum tree_code code;
3783 cond = gimple_boolify (COND_EXPR_COND (expr));
3785 /* We need to handle && and || specially, as their gimplification
3786 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3787 code = TREE_CODE (cond);
3788 if (code == TRUTH_ANDIF_EXPR)
3789 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3790 else if (code == TRUTH_ORIF_EXPR)
3791 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3792 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3793 COND_EXPR_COND (*expr_p) = cond;
3795 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3796 is_gimple_val, fb_rvalue);
3797 ret = MIN (ret, tret);
3798 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3799 is_gimple_val, fb_rvalue);
3801 return MIN (ret, tret);
3804 /* Return true if evaluating EXPR could trap.
3805 EXPR is GENERIC, while tree_could_trap_p can be called
3806 only on GIMPLE. */
3808 static bool
3809 generic_expr_could_trap_p (tree expr)
3811 unsigned i, n;
3813 if (!expr || is_gimple_val (expr))
3814 return false;
3816 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3817 return true;
3819 n = TREE_OPERAND_LENGTH (expr);
3820 for (i = 0; i < n; i++)
3821 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3822 return true;
3824 return false;
3827 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3828 into
3830 if (p) if (p)
3831 t1 = a; a;
3832 else or else
3833 t1 = b; b;
3836 The second form is used when *EXPR_P is of type void.
3838 PRE_P points to the list where side effects that must happen before
3839 *EXPR_P should be stored. */
3841 static enum gimplify_status
3842 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3844 tree expr = *expr_p;
3845 tree type = TREE_TYPE (expr);
3846 location_t loc = EXPR_LOCATION (expr);
3847 tree tmp, arm1, arm2;
3848 enum gimplify_status ret;
3849 tree label_true, label_false, label_cont;
3850 bool have_then_clause_p, have_else_clause_p;
3851 gcond *cond_stmt;
3852 enum tree_code pred_code;
3853 gimple_seq seq = NULL;
3855 /* If this COND_EXPR has a value, copy the values into a temporary within
3856 the arms. */
3857 if (!VOID_TYPE_P (type))
3859 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3860 tree result;
3862 /* If either an rvalue is ok or we do not require an lvalue, create the
3863 temporary. But we cannot do that if the type is addressable. */
3864 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3865 && !TREE_ADDRESSABLE (type))
3867 if (gimplify_ctxp->allow_rhs_cond_expr
3868 /* If either branch has side effects or could trap, it can't be
3869 evaluated unconditionally. */
3870 && !TREE_SIDE_EFFECTS (then_)
3871 && !generic_expr_could_trap_p (then_)
3872 && !TREE_SIDE_EFFECTS (else_)
3873 && !generic_expr_could_trap_p (else_))
3874 return gimplify_pure_cond_expr (expr_p, pre_p);
3876 tmp = create_tmp_var (type, "iftmp");
3877 result = tmp;
3880 /* Otherwise, only create and copy references to the values. */
3881 else
3883 type = build_pointer_type (type);
3885 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3886 then_ = build_fold_addr_expr_loc (loc, then_);
3888 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3889 else_ = build_fold_addr_expr_loc (loc, else_);
3891 expr
3892 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3894 tmp = create_tmp_var (type, "iftmp");
3895 result = build_simple_mem_ref_loc (loc, tmp);
3898 /* Build the new then clause, `tmp = then_;'. But don't build the
3899 assignment if the value is void; in C++ it can be if it's a throw. */
3900 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3901 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3903 /* Similarly, build the new else clause, `tmp = else_;'. */
3904 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3905 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3907 TREE_TYPE (expr) = void_type_node;
3908 recalculate_side_effects (expr);
3910 /* Move the COND_EXPR to the prequeue. */
3911 gimplify_stmt (&expr, pre_p);
3913 *expr_p = result;
3914 return GS_ALL_DONE;
3917 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3918 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3919 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3920 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3922 /* Make sure the condition has BOOLEAN_TYPE. */
3923 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3925 /* Break apart && and || conditions. */
3926 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3927 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3929 expr = shortcut_cond_expr (expr);
3931 if (expr != *expr_p)
3933 *expr_p = expr;
3935 /* We can't rely on gimplify_expr to re-gimplify the expanded
3936 form properly, as cleanups might cause the target labels to be
3937 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3938 set up a conditional context. */
3939 gimple_push_condition ();
3940 gimplify_stmt (expr_p, &seq);
3941 gimple_pop_condition (pre_p);
3942 gimple_seq_add_seq (pre_p, seq);
3944 return GS_ALL_DONE;
3948 /* Now do the normal gimplification. */
3950 /* Gimplify condition. */
3951 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3952 fb_rvalue);
3953 if (ret == GS_ERROR)
3954 return GS_ERROR;
3955 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3957 gimple_push_condition ();
3959 have_then_clause_p = have_else_clause_p = false;
3960 if (TREE_OPERAND (expr, 1) != NULL
3961 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3962 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3963 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3964 == current_function_decl)
3965 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3966 have different locations, otherwise we end up with incorrect
3967 location information on the branches. */
3968 && (optimize
3969 || !EXPR_HAS_LOCATION (expr)
3970 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3971 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3973 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3974 have_then_clause_p = true;
3976 else
3977 label_true = create_artificial_label (UNKNOWN_LOCATION);
3978 if (TREE_OPERAND (expr, 2) != NULL
3979 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3980 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3981 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3982 == current_function_decl)
3983 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3984 have different locations, otherwise we end up with incorrect
3985 location information on the branches. */
3986 && (optimize
3987 || !EXPR_HAS_LOCATION (expr)
3988 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3989 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3991 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3992 have_else_clause_p = true;
3994 else
3995 label_false = create_artificial_label (UNKNOWN_LOCATION);
3997 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3998 &arm2);
3999 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4000 label_false);
4001 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4002 gimplify_seq_add_stmt (&seq, cond_stmt);
4003 gimple_stmt_iterator gsi = gsi_last (seq);
4004 maybe_fold_stmt (&gsi);
4006 label_cont = NULL_TREE;
4007 if (!have_then_clause_p)
4009 /* For if (...) {} else { code; } put label_true after
4010 the else block. */
4011 if (TREE_OPERAND (expr, 1) == NULL_TREE
4012 && !have_else_clause_p
4013 && TREE_OPERAND (expr, 2) != NULL_TREE)
4014 label_cont = label_true;
4015 else
4017 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4018 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4019 /* For if (...) { code; } else {} or
4020 if (...) { code; } else goto label; or
4021 if (...) { code; return; } else { ... }
4022 label_cont isn't needed. */
4023 if (!have_else_clause_p
4024 && TREE_OPERAND (expr, 2) != NULL_TREE
4025 && gimple_seq_may_fallthru (seq))
4027 gimple *g;
4028 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4030 g = gimple_build_goto (label_cont);
4032 /* GIMPLE_COND's are very low level; they have embedded
4033 gotos. This particular embedded goto should not be marked
4034 with the location of the original COND_EXPR, as it would
4035 correspond to the COND_EXPR's condition, not the ELSE or the
4036 THEN arms. To avoid marking it with the wrong location, flag
4037 it as "no location". */
4038 gimple_set_do_not_emit_location (g);
4040 gimplify_seq_add_stmt (&seq, g);
4044 if (!have_else_clause_p)
4046 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4047 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4049 if (label_cont)
4050 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4052 gimple_pop_condition (pre_p);
4053 gimple_seq_add_seq (pre_p, seq);
4055 if (ret == GS_ERROR)
4056 ; /* Do nothing. */
4057 else if (have_then_clause_p || have_else_clause_p)
4058 ret = GS_ALL_DONE;
4059 else
4061 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4062 expr = TREE_OPERAND (expr, 0);
4063 gimplify_stmt (&expr, pre_p);
4066 *expr_p = NULL;
4067 return ret;
4070 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4071 to be marked addressable.
4073 We cannot rely on such an expression being directly markable if a temporary
4074 has been created by the gimplification. In this case, we create another
4075 temporary and initialize it with a copy, which will become a store after we
4076 mark it addressable. This can happen if the front-end passed us something
4077 that it could not mark addressable yet, like a Fortran pass-by-reference
4078 parameter (int) floatvar. */
4080 static void
4081 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4083 while (handled_component_p (*expr_p))
4084 expr_p = &TREE_OPERAND (*expr_p, 0);
4085 if (is_gimple_reg (*expr_p))
4087 /* Do not allow an SSA name as the temporary. */
4088 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4089 DECL_GIMPLE_REG_P (var) = 0;
4090 *expr_p = var;
4094 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4095 a call to __builtin_memcpy. */
4097 static enum gimplify_status
4098 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4099 gimple_seq *seq_p)
4101 tree t, to, to_ptr, from, from_ptr;
4102 gcall *gs;
4103 location_t loc = EXPR_LOCATION (*expr_p);
4105 to = TREE_OPERAND (*expr_p, 0);
4106 from = TREE_OPERAND (*expr_p, 1);
4108 /* Mark the RHS addressable. Beware that it may not be possible to do so
4109 directly if a temporary has been created by the gimplification. */
4110 prepare_gimple_addressable (&from, seq_p);
4112 mark_addressable (from);
4113 from_ptr = build_fold_addr_expr_loc (loc, from);
4114 gimplify_arg (&from_ptr, seq_p, loc);
4116 mark_addressable (to);
4117 to_ptr = build_fold_addr_expr_loc (loc, to);
4118 gimplify_arg (&to_ptr, seq_p, loc);
4120 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4122 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4124 if (want_value)
4126 /* tmp = memcpy() */
4127 t = create_tmp_var (TREE_TYPE (to_ptr));
4128 gimple_call_set_lhs (gs, t);
4129 gimplify_seq_add_stmt (seq_p, gs);
4131 *expr_p = build_simple_mem_ref (t);
4132 return GS_ALL_DONE;
4135 gimplify_seq_add_stmt (seq_p, gs);
4136 *expr_p = NULL;
4137 return GS_ALL_DONE;
4140 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4141 a call to __builtin_memset. In this case we know that the RHS is
4142 a CONSTRUCTOR with an empty element list. */
4144 static enum gimplify_status
4145 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4146 gimple_seq *seq_p)
4148 tree t, from, to, to_ptr;
4149 gcall *gs;
4150 location_t loc = EXPR_LOCATION (*expr_p);
4152 /* Assert our assumptions, to abort instead of producing wrong code
4153 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4154 not be immediately exposed. */
4155 from = TREE_OPERAND (*expr_p, 1);
4156 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4157 from = TREE_OPERAND (from, 0);
4159 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4160 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4162 /* Now proceed. */
4163 to = TREE_OPERAND (*expr_p, 0);
4165 to_ptr = build_fold_addr_expr_loc (loc, to);
4166 gimplify_arg (&to_ptr, seq_p, loc);
4167 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4169 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4171 if (want_value)
4173 /* tmp = memset() */
4174 t = create_tmp_var (TREE_TYPE (to_ptr));
4175 gimple_call_set_lhs (gs, t);
4176 gimplify_seq_add_stmt (seq_p, gs);
4178 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4179 return GS_ALL_DONE;
4182 gimplify_seq_add_stmt (seq_p, gs);
4183 *expr_p = NULL;
4184 return GS_ALL_DONE;
4187 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4188 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4189 assignment. Return non-null if we detect a potential overlap. */
4191 struct gimplify_init_ctor_preeval_data
4193 /* The base decl of the lhs object. May be NULL, in which case we
4194 have to assume the lhs is indirect. */
4195 tree lhs_base_decl;
4197 /* The alias set of the lhs object. */
4198 alias_set_type lhs_alias_set;
4201 static tree
4202 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4204 struct gimplify_init_ctor_preeval_data *data
4205 = (struct gimplify_init_ctor_preeval_data *) xdata;
4206 tree t = *tp;
4208 /* If we find the base object, obviously we have overlap. */
4209 if (data->lhs_base_decl == t)
4210 return t;
4212 /* If the constructor component is indirect, determine if we have a
4213 potential overlap with the lhs. The only bits of information we
4214 have to go on at this point are addressability and alias sets. */
4215 if ((INDIRECT_REF_P (t)
4216 || TREE_CODE (t) == MEM_REF)
4217 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4218 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4219 return t;
4221 /* If the constructor component is a call, determine if it can hide a
4222 potential overlap with the lhs through an INDIRECT_REF like above.
4223 ??? Ugh - this is completely broken. In fact this whole analysis
4224 doesn't look conservative. */
4225 if (TREE_CODE (t) == CALL_EXPR)
4227 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4229 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4230 if (POINTER_TYPE_P (TREE_VALUE (type))
4231 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4232 && alias_sets_conflict_p (data->lhs_alias_set,
4233 get_alias_set
4234 (TREE_TYPE (TREE_VALUE (type)))))
4235 return t;
4238 if (IS_TYPE_OR_DECL_P (t))
4239 *walk_subtrees = 0;
4240 return NULL;
4243 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4244 force values that overlap with the lhs (as described by *DATA)
4245 into temporaries. */
4247 static void
4248 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4249 struct gimplify_init_ctor_preeval_data *data)
4251 enum gimplify_status one;
4253 /* If the value is constant, then there's nothing to pre-evaluate. */
4254 if (TREE_CONSTANT (*expr_p))
4256 /* Ensure it does not have side effects, it might contain a reference to
4257 the object we're initializing. */
4258 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4259 return;
4262 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4263 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4264 return;
4266 /* Recurse for nested constructors. */
4267 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4269 unsigned HOST_WIDE_INT ix;
4270 constructor_elt *ce;
4271 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4273 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4274 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4276 return;
4279 /* If this is a variable sized type, we must remember the size. */
4280 maybe_with_size_expr (expr_p);
4282 /* Gimplify the constructor element to something appropriate for the rhs
4283 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4284 the gimplifier will consider this a store to memory. Doing this
4285 gimplification now means that we won't have to deal with complicated
4286 language-specific trees, nor trees like SAVE_EXPR that can induce
4287 exponential search behavior. */
4288 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4289 if (one == GS_ERROR)
4291 *expr_p = NULL;
4292 return;
4295 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4296 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4297 always be true for all scalars, since is_gimple_mem_rhs insists on a
4298 temporary variable for them. */
4299 if (DECL_P (*expr_p))
4300 return;
4302 /* If this is of variable size, we have no choice but to assume it doesn't
4303 overlap since we can't make a temporary for it. */
4304 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4305 return;
4307 /* Otherwise, we must search for overlap ... */
4308 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4309 return;
4311 /* ... and if found, force the value into a temporary. */
4312 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4315 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4316 a RANGE_EXPR in a CONSTRUCTOR for an array.
4318 var = lower;
4319 loop_entry:
4320 object[var] = value;
4321 if (var == upper)
4322 goto loop_exit;
4323 var = var + 1;
4324 goto loop_entry;
4325 loop_exit:
4327 We increment var _after_ the loop exit check because we might otherwise
4328 fail if upper == TYPE_MAX_VALUE (type for upper).
4330 Note that we never have to deal with SAVE_EXPRs here, because this has
4331 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4333 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4334 gimple_seq *, bool);
4336 static void
4337 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4338 tree value, tree array_elt_type,
4339 gimple_seq *pre_p, bool cleared)
4341 tree loop_entry_label, loop_exit_label, fall_thru_label;
4342 tree var, var_type, cref, tmp;
4344 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4345 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4346 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4348 /* Create and initialize the index variable. */
4349 var_type = TREE_TYPE (upper);
4350 var = create_tmp_var (var_type);
4351 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4353 /* Add the loop entry label. */
4354 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4356 /* Build the reference. */
4357 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4358 var, NULL_TREE, NULL_TREE);
4360 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4361 the store. Otherwise just assign value to the reference. */
4363 if (TREE_CODE (value) == CONSTRUCTOR)
4364 /* NB we might have to call ourself recursively through
4365 gimplify_init_ctor_eval if the value is a constructor. */
4366 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4367 pre_p, cleared);
4368 else
4369 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4371 /* We exit the loop when the index var is equal to the upper bound. */
4372 gimplify_seq_add_stmt (pre_p,
4373 gimple_build_cond (EQ_EXPR, var, upper,
4374 loop_exit_label, fall_thru_label));
4376 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4378 /* Otherwise, increment the index var... */
4379 tmp = build2 (PLUS_EXPR, var_type, var,
4380 fold_convert (var_type, integer_one_node));
4381 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4383 /* ...and jump back to the loop entry. */
4384 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4386 /* Add the loop exit label. */
4387 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4390 /* Return true if FDECL is accessing a field that is zero sized. */
4392 static bool
4393 zero_sized_field_decl (const_tree fdecl)
4395 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4396 && integer_zerop (DECL_SIZE (fdecl)))
4397 return true;
4398 return false;
4401 /* Return true if TYPE is zero sized. */
4403 static bool
4404 zero_sized_type (const_tree type)
4406 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4407 && integer_zerop (TYPE_SIZE (type)))
4408 return true;
4409 return false;
4412 /* A subroutine of gimplify_init_constructor. Generate individual
4413 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4414 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4415 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4416 zeroed first. */
4418 static void
4419 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4420 gimple_seq *pre_p, bool cleared)
4422 tree array_elt_type = NULL;
4423 unsigned HOST_WIDE_INT ix;
4424 tree purpose, value;
4426 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4427 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4429 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4431 tree cref;
4433 /* NULL values are created above for gimplification errors. */
4434 if (value == NULL)
4435 continue;
4437 if (cleared && initializer_zerop (value))
4438 continue;
4440 /* ??? Here's to hoping the front end fills in all of the indices,
4441 so we don't have to figure out what's missing ourselves. */
4442 gcc_assert (purpose);
4444 /* Skip zero-sized fields, unless value has side-effects. This can
4445 happen with calls to functions returning a zero-sized type, which
4446 we shouldn't discard. As a number of downstream passes don't
4447 expect sets of zero-sized fields, we rely on the gimplification of
4448 the MODIFY_EXPR we make below to drop the assignment statement. */
4449 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4450 continue;
4452 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4453 whole range. */
4454 if (TREE_CODE (purpose) == RANGE_EXPR)
4456 tree lower = TREE_OPERAND (purpose, 0);
4457 tree upper = TREE_OPERAND (purpose, 1);
4459 /* If the lower bound is equal to upper, just treat it as if
4460 upper was the index. */
4461 if (simple_cst_equal (lower, upper))
4462 purpose = upper;
4463 else
4465 gimplify_init_ctor_eval_range (object, lower, upper, value,
4466 array_elt_type, pre_p, cleared);
4467 continue;
4471 if (array_elt_type)
4473 /* Do not use bitsizetype for ARRAY_REF indices. */
4474 if (TYPE_DOMAIN (TREE_TYPE (object)))
4475 purpose
4476 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4477 purpose);
4478 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4479 purpose, NULL_TREE, NULL_TREE);
4481 else
4483 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4484 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4485 unshare_expr (object), purpose, NULL_TREE);
4488 if (TREE_CODE (value) == CONSTRUCTOR
4489 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4490 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4491 pre_p, cleared);
4492 else
4494 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4495 gimplify_and_add (init, pre_p);
4496 ggc_free (init);
4501 /* Return the appropriate RHS predicate for this LHS. */
4503 gimple_predicate
4504 rhs_predicate_for (tree lhs)
4506 if (is_gimple_reg (lhs))
4507 return is_gimple_reg_rhs_or_call;
4508 else
4509 return is_gimple_mem_rhs_or_call;
4512 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4513 before the LHS has been gimplified. */
4515 static gimple_predicate
4516 initial_rhs_predicate_for (tree lhs)
4518 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4519 return is_gimple_reg_rhs_or_call;
4520 else
4521 return is_gimple_mem_rhs_or_call;
4524 /* Gimplify a C99 compound literal expression. This just means adding
4525 the DECL_EXPR before the current statement and using its anonymous
4526 decl instead. */
4528 static enum gimplify_status
4529 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4530 bool (*gimple_test_f) (tree),
4531 fallback_t fallback)
4533 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4534 tree decl = DECL_EXPR_DECL (decl_s);
4535 tree init = DECL_INITIAL (decl);
4536 /* Mark the decl as addressable if the compound literal
4537 expression is addressable now, otherwise it is marked too late
4538 after we gimplify the initialization expression. */
4539 if (TREE_ADDRESSABLE (*expr_p))
4540 TREE_ADDRESSABLE (decl) = 1;
4541 /* Otherwise, if we don't need an lvalue and have a literal directly
4542 substitute it. Check if it matches the gimple predicate, as
4543 otherwise we'd generate a new temporary, and we can as well just
4544 use the decl we already have. */
4545 else if (!TREE_ADDRESSABLE (decl)
4546 && init
4547 && (fallback & fb_lvalue) == 0
4548 && gimple_test_f (init))
4550 *expr_p = init;
4551 return GS_OK;
4554 /* Preliminarily mark non-addressed complex variables as eligible
4555 for promotion to gimple registers. We'll transform their uses
4556 as we find them. */
4557 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4558 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4559 && !TREE_THIS_VOLATILE (decl)
4560 && !needs_to_live_in_memory (decl))
4561 DECL_GIMPLE_REG_P (decl) = 1;
4563 /* If the decl is not addressable, then it is being used in some
4564 expression or on the right hand side of a statement, and it can
4565 be put into a readonly data section. */
4566 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4567 TREE_READONLY (decl) = 1;
4569 /* This decl isn't mentioned in the enclosing block, so add it to the
4570 list of temps. FIXME it seems a bit of a kludge to say that
4571 anonymous artificial vars aren't pushed, but everything else is. */
4572 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4573 gimple_add_tmp_var (decl);
4575 gimplify_and_add (decl_s, pre_p);
4576 *expr_p = decl;
4577 return GS_OK;
4580 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4581 return a new CONSTRUCTOR if something changed. */
4583 static tree
4584 optimize_compound_literals_in_ctor (tree orig_ctor)
4586 tree ctor = orig_ctor;
4587 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4588 unsigned int idx, num = vec_safe_length (elts);
4590 for (idx = 0; idx < num; idx++)
4592 tree value = (*elts)[idx].value;
4593 tree newval = value;
4594 if (TREE_CODE (value) == CONSTRUCTOR)
4595 newval = optimize_compound_literals_in_ctor (value);
4596 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4598 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4599 tree decl = DECL_EXPR_DECL (decl_s);
4600 tree init = DECL_INITIAL (decl);
4602 if (!TREE_ADDRESSABLE (value)
4603 && !TREE_ADDRESSABLE (decl)
4604 && init
4605 && TREE_CODE (init) == CONSTRUCTOR)
4606 newval = optimize_compound_literals_in_ctor (init);
4608 if (newval == value)
4609 continue;
4611 if (ctor == orig_ctor)
4613 ctor = copy_node (orig_ctor);
4614 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4615 elts = CONSTRUCTOR_ELTS (ctor);
4617 (*elts)[idx].value = newval;
4619 return ctor;
4622 /* A subroutine of gimplify_modify_expr. Break out elements of a
4623 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4625 Note that we still need to clear any elements that don't have explicit
4626 initializers, so if not all elements are initialized we keep the
4627 original MODIFY_EXPR, we just remove all of the constructor elements.
4629 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4630 GS_ERROR if we would have to create a temporary when gimplifying
4631 this constructor. Otherwise, return GS_OK.
4633 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4635 static enum gimplify_status
4636 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4637 bool want_value, bool notify_temp_creation)
4639 tree object, ctor, type;
4640 enum gimplify_status ret;
4641 vec<constructor_elt, va_gc> *elts;
4643 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4645 if (!notify_temp_creation)
4647 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4648 is_gimple_lvalue, fb_lvalue);
4649 if (ret == GS_ERROR)
4650 return ret;
4653 object = TREE_OPERAND (*expr_p, 0);
4654 ctor = TREE_OPERAND (*expr_p, 1)
4655 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4656 type = TREE_TYPE (ctor);
4657 elts = CONSTRUCTOR_ELTS (ctor);
4658 ret = GS_ALL_DONE;
4660 switch (TREE_CODE (type))
4662 case RECORD_TYPE:
4663 case UNION_TYPE:
4664 case QUAL_UNION_TYPE:
4665 case ARRAY_TYPE:
4667 struct gimplify_init_ctor_preeval_data preeval_data;
4668 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4669 bool cleared, complete_p, valid_const_initializer;
4671 /* Aggregate types must lower constructors to initialization of
4672 individual elements. The exception is that a CONSTRUCTOR node
4673 with no elements indicates zero-initialization of the whole. */
4674 if (vec_safe_is_empty (elts))
4676 if (notify_temp_creation)
4677 return GS_OK;
4678 break;
4681 /* Fetch information about the constructor to direct later processing.
4682 We might want to make static versions of it in various cases, and
4683 can only do so if it known to be a valid constant initializer. */
4684 valid_const_initializer
4685 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4686 &num_ctor_elements, &complete_p);
4688 /* If a const aggregate variable is being initialized, then it
4689 should never be a lose to promote the variable to be static. */
4690 if (valid_const_initializer
4691 && num_nonzero_elements > 1
4692 && TREE_READONLY (object)
4693 && VAR_P (object)
4694 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4696 if (notify_temp_creation)
4697 return GS_ERROR;
4698 DECL_INITIAL (object) = ctor;
4699 TREE_STATIC (object) = 1;
4700 if (!DECL_NAME (object))
4701 DECL_NAME (object) = create_tmp_var_name ("C");
4702 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4704 /* ??? C++ doesn't automatically append a .<number> to the
4705 assembler name, and even when it does, it looks at FE private
4706 data structures to figure out what that number should be,
4707 which are not set for this variable. I suppose this is
4708 important for local statics for inline functions, which aren't
4709 "local" in the object file sense. So in order to get a unique
4710 TU-local symbol, we must invoke the lhd version now. */
4711 lhd_set_decl_assembler_name (object);
4713 *expr_p = NULL_TREE;
4714 break;
4717 /* If there are "lots" of initialized elements, even discounting
4718 those that are not address constants (and thus *must* be
4719 computed at runtime), then partition the constructor into
4720 constant and non-constant parts. Block copy the constant
4721 parts in, then generate code for the non-constant parts. */
4722 /* TODO. There's code in cp/typeck.c to do this. */
4724 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4725 /* store_constructor will ignore the clearing of variable-sized
4726 objects. Initializers for such objects must explicitly set
4727 every field that needs to be set. */
4728 cleared = false;
4729 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4730 /* If the constructor isn't complete, clear the whole object
4731 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4733 ??? This ought not to be needed. For any element not present
4734 in the initializer, we should simply set them to zero. Except
4735 we'd need to *find* the elements that are not present, and that
4736 requires trickery to avoid quadratic compile-time behavior in
4737 large cases or excessive memory use in small cases. */
4738 cleared = true;
4739 else if (num_ctor_elements - num_nonzero_elements
4740 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4741 && num_nonzero_elements < num_ctor_elements / 4)
4742 /* If there are "lots" of zeros, it's more efficient to clear
4743 the memory and then set the nonzero elements. */
4744 cleared = true;
4745 else
4746 cleared = false;
4748 /* If there are "lots" of initialized elements, and all of them
4749 are valid address constants, then the entire initializer can
4750 be dropped to memory, and then memcpy'd out. Don't do this
4751 for sparse arrays, though, as it's more efficient to follow
4752 the standard CONSTRUCTOR behavior of memset followed by
4753 individual element initialization. Also don't do this for small
4754 all-zero initializers (which aren't big enough to merit
4755 clearing), and don't try to make bitwise copies of
4756 TREE_ADDRESSABLE types.
4758 We cannot apply such transformation when compiling chkp static
4759 initializer because creation of initializer image in the memory
4760 will require static initialization of bounds for it. It should
4761 result in another gimplification of similar initializer and we
4762 may fall into infinite loop. */
4763 if (valid_const_initializer
4764 && !(cleared || num_nonzero_elements == 0)
4765 && !TREE_ADDRESSABLE (type)
4766 && (!current_function_decl
4767 || !lookup_attribute ("chkp ctor",
4768 DECL_ATTRIBUTES (current_function_decl))))
4770 HOST_WIDE_INT size = int_size_in_bytes (type);
4771 unsigned int align;
4773 /* ??? We can still get unbounded array types, at least
4774 from the C++ front end. This seems wrong, but attempt
4775 to work around it for now. */
4776 if (size < 0)
4778 size = int_size_in_bytes (TREE_TYPE (object));
4779 if (size >= 0)
4780 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4783 /* Find the maximum alignment we can assume for the object. */
4784 /* ??? Make use of DECL_OFFSET_ALIGN. */
4785 if (DECL_P (object))
4786 align = DECL_ALIGN (object);
4787 else
4788 align = TYPE_ALIGN (type);
4790 /* Do a block move either if the size is so small as to make
4791 each individual move a sub-unit move on average, or if it
4792 is so large as to make individual moves inefficient. */
4793 if (size > 0
4794 && num_nonzero_elements > 1
4795 && (size < num_nonzero_elements
4796 || !can_move_by_pieces (size, align)))
4798 if (notify_temp_creation)
4799 return GS_ERROR;
4801 walk_tree (&ctor, force_labels_r, NULL, NULL);
4802 ctor = tree_output_constant_def (ctor);
4803 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4804 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4805 TREE_OPERAND (*expr_p, 1) = ctor;
4807 /* This is no longer an assignment of a CONSTRUCTOR, but
4808 we still may have processing to do on the LHS. So
4809 pretend we didn't do anything here to let that happen. */
4810 return GS_UNHANDLED;
4814 /* If the target is volatile, we have non-zero elements and more than
4815 one field to assign, initialize the target from a temporary. */
4816 if (TREE_THIS_VOLATILE (object)
4817 && !TREE_ADDRESSABLE (type)
4818 && num_nonzero_elements > 0
4819 && vec_safe_length (elts) > 1)
4821 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4822 TREE_OPERAND (*expr_p, 0) = temp;
4823 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4824 *expr_p,
4825 build2 (MODIFY_EXPR, void_type_node,
4826 object, temp));
4827 return GS_OK;
4830 if (notify_temp_creation)
4831 return GS_OK;
4833 /* If there are nonzero elements and if needed, pre-evaluate to capture
4834 elements overlapping with the lhs into temporaries. We must do this
4835 before clearing to fetch the values before they are zeroed-out. */
4836 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4838 preeval_data.lhs_base_decl = get_base_address (object);
4839 if (!DECL_P (preeval_data.lhs_base_decl))
4840 preeval_data.lhs_base_decl = NULL;
4841 preeval_data.lhs_alias_set = get_alias_set (object);
4843 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4844 pre_p, post_p, &preeval_data);
4847 bool ctor_has_side_effects_p
4848 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4850 if (cleared)
4852 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4853 Note that we still have to gimplify, in order to handle the
4854 case of variable sized types. Avoid shared tree structures. */
4855 CONSTRUCTOR_ELTS (ctor) = NULL;
4856 TREE_SIDE_EFFECTS (ctor) = 0;
4857 object = unshare_expr (object);
4858 gimplify_stmt (expr_p, pre_p);
4861 /* If we have not block cleared the object, or if there are nonzero
4862 elements in the constructor, or if the constructor has side effects,
4863 add assignments to the individual scalar fields of the object. */
4864 if (!cleared
4865 || num_nonzero_elements > 0
4866 || ctor_has_side_effects_p)
4867 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4869 *expr_p = NULL_TREE;
4871 break;
4873 case COMPLEX_TYPE:
4875 tree r, i;
4877 if (notify_temp_creation)
4878 return GS_OK;
4880 /* Extract the real and imaginary parts out of the ctor. */
4881 gcc_assert (elts->length () == 2);
4882 r = (*elts)[0].value;
4883 i = (*elts)[1].value;
4884 if (r == NULL || i == NULL)
4886 tree zero = build_zero_cst (TREE_TYPE (type));
4887 if (r == NULL)
4888 r = zero;
4889 if (i == NULL)
4890 i = zero;
4893 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4894 represent creation of a complex value. */
4895 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4897 ctor = build_complex (type, r, i);
4898 TREE_OPERAND (*expr_p, 1) = ctor;
4900 else
4902 ctor = build2 (COMPLEX_EXPR, type, r, i);
4903 TREE_OPERAND (*expr_p, 1) = ctor;
4904 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4905 pre_p,
4906 post_p,
4907 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4908 fb_rvalue);
4911 break;
4913 case VECTOR_TYPE:
4915 unsigned HOST_WIDE_INT ix;
4916 constructor_elt *ce;
4918 if (notify_temp_creation)
4919 return GS_OK;
4921 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4922 if (TREE_CONSTANT (ctor))
4924 bool constant_p = true;
4925 tree value;
4927 /* Even when ctor is constant, it might contain non-*_CST
4928 elements, such as addresses or trapping values like
4929 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4930 in VECTOR_CST nodes. */
4931 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4932 if (!CONSTANT_CLASS_P (value))
4934 constant_p = false;
4935 break;
4938 if (constant_p)
4940 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4941 break;
4944 TREE_CONSTANT (ctor) = 0;
4947 /* Vector types use CONSTRUCTOR all the way through gimple
4948 compilation as a general initializer. */
4949 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4951 enum gimplify_status tret;
4952 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4953 fb_rvalue);
4954 if (tret == GS_ERROR)
4955 ret = GS_ERROR;
4956 else if (TREE_STATIC (ctor)
4957 && !initializer_constant_valid_p (ce->value,
4958 TREE_TYPE (ce->value)))
4959 TREE_STATIC (ctor) = 0;
4961 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4962 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4964 break;
4966 default:
4967 /* So how did we get a CONSTRUCTOR for a scalar type? */
4968 gcc_unreachable ();
4971 if (ret == GS_ERROR)
4972 return GS_ERROR;
4973 /* If we have gimplified both sides of the initializer but have
4974 not emitted an assignment, do so now. */
4975 if (*expr_p)
4977 tree lhs = TREE_OPERAND (*expr_p, 0);
4978 tree rhs = TREE_OPERAND (*expr_p, 1);
4979 if (want_value && object == lhs)
4980 lhs = unshare_expr (lhs);
4981 gassign *init = gimple_build_assign (lhs, rhs);
4982 gimplify_seq_add_stmt (pre_p, init);
4984 if (want_value)
4986 *expr_p = object;
4987 return GS_OK;
4989 else
4991 *expr_p = NULL;
4992 return GS_ALL_DONE;
4996 /* Given a pointer value OP0, return a simplified version of an
4997 indirection through OP0, or NULL_TREE if no simplification is
4998 possible. This may only be applied to a rhs of an expression.
4999 Note that the resulting type may be different from the type pointed
5000 to in the sense that it is still compatible from the langhooks
5001 point of view. */
5003 static tree
5004 gimple_fold_indirect_ref_rhs (tree t)
5006 return gimple_fold_indirect_ref (t);
5009 /* Subroutine of gimplify_modify_expr to do simplifications of
5010 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5011 something changes. */
5013 static enum gimplify_status
5014 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5015 gimple_seq *pre_p, gimple_seq *post_p,
5016 bool want_value)
5018 enum gimplify_status ret = GS_UNHANDLED;
5019 bool changed;
5023 changed = false;
5024 switch (TREE_CODE (*from_p))
5026 case VAR_DECL:
5027 /* If we're assigning from a read-only variable initialized with
5028 a constructor, do the direct assignment from the constructor,
5029 but only if neither source nor target are volatile since this
5030 latter assignment might end up being done on a per-field basis. */
5031 if (DECL_INITIAL (*from_p)
5032 && TREE_READONLY (*from_p)
5033 && !TREE_THIS_VOLATILE (*from_p)
5034 && !TREE_THIS_VOLATILE (*to_p)
5035 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5037 tree old_from = *from_p;
5038 enum gimplify_status subret;
5040 /* Move the constructor into the RHS. */
5041 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5043 /* Let's see if gimplify_init_constructor will need to put
5044 it in memory. */
5045 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5046 false, true);
5047 if (subret == GS_ERROR)
5049 /* If so, revert the change. */
5050 *from_p = old_from;
5052 else
5054 ret = GS_OK;
5055 changed = true;
5058 break;
5059 case INDIRECT_REF:
5061 /* If we have code like
5063 *(const A*)(A*)&x
5065 where the type of "x" is a (possibly cv-qualified variant
5066 of "A"), treat the entire expression as identical to "x".
5067 This kind of code arises in C++ when an object is bound
5068 to a const reference, and if "x" is a TARGET_EXPR we want
5069 to take advantage of the optimization below. */
5070 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5071 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5072 if (t)
5074 if (TREE_THIS_VOLATILE (t) != volatile_p)
5076 if (DECL_P (t))
5077 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5078 build_fold_addr_expr (t));
5079 if (REFERENCE_CLASS_P (t))
5080 TREE_THIS_VOLATILE (t) = volatile_p;
5082 *from_p = t;
5083 ret = GS_OK;
5084 changed = true;
5086 break;
5089 case TARGET_EXPR:
5091 /* If we are initializing something from a TARGET_EXPR, strip the
5092 TARGET_EXPR and initialize it directly, if possible. This can't
5093 be done if the initializer is void, since that implies that the
5094 temporary is set in some non-trivial way.
5096 ??? What about code that pulls out the temp and uses it
5097 elsewhere? I think that such code never uses the TARGET_EXPR as
5098 an initializer. If I'm wrong, we'll die because the temp won't
5099 have any RTL. In that case, I guess we'll need to replace
5100 references somehow. */
5101 tree init = TARGET_EXPR_INITIAL (*from_p);
5103 if (init
5104 && !VOID_TYPE_P (TREE_TYPE (init)))
5106 *from_p = init;
5107 ret = GS_OK;
5108 changed = true;
5111 break;
5113 case COMPOUND_EXPR:
5114 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5115 caught. */
5116 gimplify_compound_expr (from_p, pre_p, true);
5117 ret = GS_OK;
5118 changed = true;
5119 break;
5121 case CONSTRUCTOR:
5122 /* If we already made some changes, let the front end have a
5123 crack at this before we break it down. */
5124 if (ret != GS_UNHANDLED)
5125 break;
5126 /* If we're initializing from a CONSTRUCTOR, break this into
5127 individual MODIFY_EXPRs. */
5128 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5129 false);
5131 case COND_EXPR:
5132 /* If we're assigning to a non-register type, push the assignment
5133 down into the branches. This is mandatory for ADDRESSABLE types,
5134 since we cannot generate temporaries for such, but it saves a
5135 copy in other cases as well. */
5136 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5138 /* This code should mirror the code in gimplify_cond_expr. */
5139 enum tree_code code = TREE_CODE (*expr_p);
5140 tree cond = *from_p;
5141 tree result = *to_p;
5143 ret = gimplify_expr (&result, pre_p, post_p,
5144 is_gimple_lvalue, fb_lvalue);
5145 if (ret != GS_ERROR)
5146 ret = GS_OK;
5148 /* If we are going to write RESULT more than once, clear
5149 TREE_READONLY flag, otherwise we might incorrectly promote
5150 the variable to static const and initialize it at compile
5151 time in one of the branches. */
5152 if (VAR_P (result)
5153 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5154 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5155 TREE_READONLY (result) = 0;
5156 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5157 TREE_OPERAND (cond, 1)
5158 = build2 (code, void_type_node, result,
5159 TREE_OPERAND (cond, 1));
5160 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5161 TREE_OPERAND (cond, 2)
5162 = build2 (code, void_type_node, unshare_expr (result),
5163 TREE_OPERAND (cond, 2));
5165 TREE_TYPE (cond) = void_type_node;
5166 recalculate_side_effects (cond);
5168 if (want_value)
5170 gimplify_and_add (cond, pre_p);
5171 *expr_p = unshare_expr (result);
5173 else
5174 *expr_p = cond;
5175 return ret;
5177 break;
5179 case CALL_EXPR:
5180 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5181 return slot so that we don't generate a temporary. */
5182 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5183 && aggregate_value_p (*from_p, *from_p))
5185 bool use_target;
5187 if (!(rhs_predicate_for (*to_p))(*from_p))
5188 /* If we need a temporary, *to_p isn't accurate. */
5189 use_target = false;
5190 /* It's OK to use the return slot directly unless it's an NRV. */
5191 else if (TREE_CODE (*to_p) == RESULT_DECL
5192 && DECL_NAME (*to_p) == NULL_TREE
5193 && needs_to_live_in_memory (*to_p))
5194 use_target = true;
5195 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5196 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5197 /* Don't force regs into memory. */
5198 use_target = false;
5199 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5200 /* It's OK to use the target directly if it's being
5201 initialized. */
5202 use_target = true;
5203 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5204 != INTEGER_CST)
5205 /* Always use the target and thus RSO for variable-sized types.
5206 GIMPLE cannot deal with a variable-sized assignment
5207 embedded in a call statement. */
5208 use_target = true;
5209 else if (TREE_CODE (*to_p) != SSA_NAME
5210 && (!is_gimple_variable (*to_p)
5211 || needs_to_live_in_memory (*to_p)))
5212 /* Don't use the original target if it's already addressable;
5213 if its address escapes, and the called function uses the
5214 NRV optimization, a conforming program could see *to_p
5215 change before the called function returns; see c++/19317.
5216 When optimizing, the return_slot pass marks more functions
5217 as safe after we have escape info. */
5218 use_target = false;
5219 else
5220 use_target = true;
5222 if (use_target)
5224 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5225 mark_addressable (*to_p);
5228 break;
5230 case WITH_SIZE_EXPR:
5231 /* Likewise for calls that return an aggregate of non-constant size,
5232 since we would not be able to generate a temporary at all. */
5233 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5235 *from_p = TREE_OPERAND (*from_p, 0);
5236 /* We don't change ret in this case because the
5237 WITH_SIZE_EXPR might have been added in
5238 gimplify_modify_expr, so returning GS_OK would lead to an
5239 infinite loop. */
5240 changed = true;
5242 break;
5244 /* If we're initializing from a container, push the initialization
5245 inside it. */
5246 case CLEANUP_POINT_EXPR:
5247 case BIND_EXPR:
5248 case STATEMENT_LIST:
5250 tree wrap = *from_p;
5251 tree t;
5253 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5254 fb_lvalue);
5255 if (ret != GS_ERROR)
5256 ret = GS_OK;
5258 t = voidify_wrapper_expr (wrap, *expr_p);
5259 gcc_assert (t == *expr_p);
5261 if (want_value)
5263 gimplify_and_add (wrap, pre_p);
5264 *expr_p = unshare_expr (*to_p);
5266 else
5267 *expr_p = wrap;
5268 return GS_OK;
5271 case COMPOUND_LITERAL_EXPR:
5273 tree complit = TREE_OPERAND (*expr_p, 1);
5274 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5275 tree decl = DECL_EXPR_DECL (decl_s);
5276 tree init = DECL_INITIAL (decl);
5278 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5279 into struct T x = { 0, 1, 2 } if the address of the
5280 compound literal has never been taken. */
5281 if (!TREE_ADDRESSABLE (complit)
5282 && !TREE_ADDRESSABLE (decl)
5283 && init)
5285 *expr_p = copy_node (*expr_p);
5286 TREE_OPERAND (*expr_p, 1) = init;
5287 return GS_OK;
5291 default:
5292 break;
5295 while (changed);
5297 return ret;
5301 /* Return true if T looks like a valid GIMPLE statement. */
5303 static bool
5304 is_gimple_stmt (tree t)
5306 const enum tree_code code = TREE_CODE (t);
5308 switch (code)
5310 case NOP_EXPR:
5311 /* The only valid NOP_EXPR is the empty statement. */
5312 return IS_EMPTY_STMT (t);
5314 case BIND_EXPR:
5315 case COND_EXPR:
5316 /* These are only valid if they're void. */
5317 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5319 case SWITCH_EXPR:
5320 case GOTO_EXPR:
5321 case RETURN_EXPR:
5322 case LABEL_EXPR:
5323 case CASE_LABEL_EXPR:
5324 case TRY_CATCH_EXPR:
5325 case TRY_FINALLY_EXPR:
5326 case EH_FILTER_EXPR:
5327 case CATCH_EXPR:
5328 case ASM_EXPR:
5329 case STATEMENT_LIST:
5330 case OACC_PARALLEL:
5331 case OACC_KERNELS:
5332 case OACC_DATA:
5333 case OACC_HOST_DATA:
5334 case OACC_DECLARE:
5335 case OACC_UPDATE:
5336 case OACC_ENTER_DATA:
5337 case OACC_EXIT_DATA:
5338 case OACC_CACHE:
5339 case OMP_PARALLEL:
5340 case OMP_FOR:
5341 case OMP_SIMD:
5342 case CILK_SIMD:
5343 case OMP_DISTRIBUTE:
5344 case OACC_LOOP:
5345 case OMP_SECTIONS:
5346 case OMP_SECTION:
5347 case OMP_SINGLE:
5348 case OMP_MASTER:
5349 case OMP_TASKGROUP:
5350 case OMP_ORDERED:
5351 case OMP_CRITICAL:
5352 case OMP_TASK:
5353 case OMP_TARGET:
5354 case OMP_TARGET_DATA:
5355 case OMP_TARGET_UPDATE:
5356 case OMP_TARGET_ENTER_DATA:
5357 case OMP_TARGET_EXIT_DATA:
5358 case OMP_TASKLOOP:
5359 case OMP_TEAMS:
5360 /* These are always void. */
5361 return true;
5363 case CALL_EXPR:
5364 case MODIFY_EXPR:
5365 case PREDICT_EXPR:
5366 /* These are valid regardless of their type. */
5367 return true;
5369 default:
5370 return false;
5375 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5376 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5377 DECL_GIMPLE_REG_P set.
5379 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5380 other, unmodified part of the complex object just before the total store.
5381 As a consequence, if the object is still uninitialized, an undefined value
5382 will be loaded into a register, which may result in a spurious exception
5383 if the register is floating-point and the value happens to be a signaling
5384 NaN for example. Then the fully-fledged complex operations lowering pass
5385 followed by a DCE pass are necessary in order to fix things up. */
5387 static enum gimplify_status
5388 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5389 bool want_value)
5391 enum tree_code code, ocode;
5392 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5394 lhs = TREE_OPERAND (*expr_p, 0);
5395 rhs = TREE_OPERAND (*expr_p, 1);
5396 code = TREE_CODE (lhs);
5397 lhs = TREE_OPERAND (lhs, 0);
5399 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5400 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5401 TREE_NO_WARNING (other) = 1;
5402 other = get_formal_tmp_var (other, pre_p);
5404 realpart = code == REALPART_EXPR ? rhs : other;
5405 imagpart = code == REALPART_EXPR ? other : rhs;
5407 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5408 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5409 else
5410 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5412 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5413 *expr_p = (want_value) ? rhs : NULL_TREE;
5415 return GS_ALL_DONE;
5418 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5420 modify_expr
5421 : varname '=' rhs
5422 | '*' ID '=' rhs
5424 PRE_P points to the list where side effects that must happen before
5425 *EXPR_P should be stored.
5427 POST_P points to the list where side effects that must happen after
5428 *EXPR_P should be stored.
5430 WANT_VALUE is nonzero iff we want to use the value of this expression
5431 in another expression. */
5433 static enum gimplify_status
5434 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5435 bool want_value)
5437 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5438 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5439 enum gimplify_status ret = GS_UNHANDLED;
5440 gimple *assign;
5441 location_t loc = EXPR_LOCATION (*expr_p);
5442 gimple_stmt_iterator gsi;
5444 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5445 || TREE_CODE (*expr_p) == INIT_EXPR);
5447 /* Trying to simplify a clobber using normal logic doesn't work,
5448 so handle it here. */
5449 if (TREE_CLOBBER_P (*from_p))
5451 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5452 if (ret == GS_ERROR)
5453 return ret;
5454 gcc_assert (!want_value
5455 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5456 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5457 *expr_p = NULL;
5458 return GS_ALL_DONE;
5461 /* Insert pointer conversions required by the middle-end that are not
5462 required by the frontend. This fixes middle-end type checking for
5463 for example gcc.dg/redecl-6.c. */
5464 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5466 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5467 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5468 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5471 /* See if any simplifications can be done based on what the RHS is. */
5472 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5473 want_value);
5474 if (ret != GS_UNHANDLED)
5475 return ret;
5477 /* For zero sized types only gimplify the left hand side and right hand
5478 side as statements and throw away the assignment. Do this after
5479 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5480 types properly. */
5481 if (zero_sized_type (TREE_TYPE (*from_p))
5482 && !want_value
5483 /* Don't do this for calls that return addressable types, expand_call
5484 relies on those having a lhs. */
5485 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5486 && TREE_CODE (*from_p) == CALL_EXPR))
5488 gimplify_stmt (from_p, pre_p);
5489 gimplify_stmt (to_p, pre_p);
5490 *expr_p = NULL_TREE;
5491 return GS_ALL_DONE;
5494 /* If the value being copied is of variable width, compute the length
5495 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5496 before gimplifying any of the operands so that we can resolve any
5497 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5498 the size of the expression to be copied, not of the destination, so
5499 that is what we must do here. */
5500 maybe_with_size_expr (from_p);
5502 /* As a special case, we have to temporarily allow for assignments
5503 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5504 a toplevel statement, when gimplifying the GENERIC expression
5505 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5506 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5508 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5509 prevent gimplify_expr from trying to create a new temporary for
5510 foo's LHS, we tell it that it should only gimplify until it
5511 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5512 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5513 and all we need to do here is set 'a' to be its LHS. */
5515 /* Gimplify the RHS first for C++17 and bug 71104. */
5516 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5517 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5518 if (ret == GS_ERROR)
5519 return ret;
5521 /* Then gimplify the LHS. */
5522 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5523 twice we have to make sure to gimplify into non-SSA as otherwise
5524 the abnormal edge added later will make those defs not dominate
5525 their uses.
5526 ??? Technically this applies only to the registers used in the
5527 resulting non-register *TO_P. */
5528 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5529 if (saved_into_ssa
5530 && TREE_CODE (*from_p) == CALL_EXPR
5531 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5532 gimplify_ctxp->into_ssa = false;
5533 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5534 gimplify_ctxp->into_ssa = saved_into_ssa;
5535 if (ret == GS_ERROR)
5536 return ret;
5538 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5539 guess for the predicate was wrong. */
5540 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5541 if (final_pred != initial_pred)
5543 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5544 if (ret == GS_ERROR)
5545 return ret;
5548 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5549 size as argument to the call. */
5550 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5552 tree call = TREE_OPERAND (*from_p, 0);
5553 tree vlasize = TREE_OPERAND (*from_p, 1);
5555 if (TREE_CODE (call) == CALL_EXPR
5556 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5558 int nargs = call_expr_nargs (call);
5559 tree type = TREE_TYPE (call);
5560 tree ap = CALL_EXPR_ARG (call, 0);
5561 tree tag = CALL_EXPR_ARG (call, 1);
5562 tree aptag = CALL_EXPR_ARG (call, 2);
5563 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5564 IFN_VA_ARG, type,
5565 nargs + 1, ap, tag,
5566 aptag, vlasize);
5567 TREE_OPERAND (*from_p, 0) = newcall;
5571 /* Now see if the above changed *from_p to something we handle specially. */
5572 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5573 want_value);
5574 if (ret != GS_UNHANDLED)
5575 return ret;
5577 /* If we've got a variable sized assignment between two lvalues (i.e. does
5578 not involve a call), then we can make things a bit more straightforward
5579 by converting the assignment to memcpy or memset. */
5580 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5582 tree from = TREE_OPERAND (*from_p, 0);
5583 tree size = TREE_OPERAND (*from_p, 1);
5585 if (TREE_CODE (from) == CONSTRUCTOR)
5586 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5588 if (is_gimple_addressable (from))
5590 *from_p = from;
5591 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5592 pre_p);
5596 /* Transform partial stores to non-addressable complex variables into
5597 total stores. This allows us to use real instead of virtual operands
5598 for these variables, which improves optimization. */
5599 if ((TREE_CODE (*to_p) == REALPART_EXPR
5600 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5601 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5602 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5604 /* Try to alleviate the effects of the gimplification creating artificial
5605 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5606 make sure not to create DECL_DEBUG_EXPR links across functions. */
5607 if (!gimplify_ctxp->into_ssa
5608 && VAR_P (*from_p)
5609 && DECL_IGNORED_P (*from_p)
5610 && DECL_P (*to_p)
5611 && !DECL_IGNORED_P (*to_p)
5612 && decl_function_context (*to_p) == current_function_decl
5613 && decl_function_context (*from_p) == current_function_decl)
5615 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5616 DECL_NAME (*from_p)
5617 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5618 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5619 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5622 if (want_value && TREE_THIS_VOLATILE (*to_p))
5623 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5625 if (TREE_CODE (*from_p) == CALL_EXPR)
5627 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5628 instead of a GIMPLE_ASSIGN. */
5629 gcall *call_stmt;
5630 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5632 /* Gimplify internal functions created in the FEs. */
5633 int nargs = call_expr_nargs (*from_p), i;
5634 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5635 auto_vec<tree> vargs (nargs);
5637 for (i = 0; i < nargs; i++)
5639 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5640 EXPR_LOCATION (*from_p));
5641 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5643 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5644 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5645 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5647 else
5649 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5650 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5651 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5652 tree fndecl = get_callee_fndecl (*from_p);
5653 if (fndecl
5654 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5655 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5656 && call_expr_nargs (*from_p) == 3)
5657 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5658 CALL_EXPR_ARG (*from_p, 0),
5659 CALL_EXPR_ARG (*from_p, 1),
5660 CALL_EXPR_ARG (*from_p, 2));
5661 else
5663 call_stmt = gimple_build_call_from_tree (*from_p);
5664 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
5667 notice_special_calls (call_stmt);
5668 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5669 gimple_call_set_lhs (call_stmt, *to_p);
5670 else if (TREE_CODE (*to_p) == SSA_NAME)
5671 /* The above is somewhat premature, avoid ICEing later for a
5672 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5673 ??? This doesn't make it a default-def. */
5674 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5676 if (EXPR_CILK_SPAWN (*from_p))
5677 gimplify_cilk_detach (pre_p);
5678 assign = call_stmt;
5680 else
5682 assign = gimple_build_assign (*to_p, *from_p);
5683 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5684 if (COMPARISON_CLASS_P (*from_p))
5685 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5688 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5690 /* We should have got an SSA name from the start. */
5691 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5692 || ! gimple_in_ssa_p (cfun));
5695 gimplify_seq_add_stmt (pre_p, assign);
5696 gsi = gsi_last (*pre_p);
5697 maybe_fold_stmt (&gsi);
5699 if (want_value)
5701 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5702 return GS_OK;
5704 else
5705 *expr_p = NULL;
5707 return GS_ALL_DONE;
5710 /* Gimplify a comparison between two variable-sized objects. Do this
5711 with a call to BUILT_IN_MEMCMP. */
5713 static enum gimplify_status
5714 gimplify_variable_sized_compare (tree *expr_p)
5716 location_t loc = EXPR_LOCATION (*expr_p);
5717 tree op0 = TREE_OPERAND (*expr_p, 0);
5718 tree op1 = TREE_OPERAND (*expr_p, 1);
5719 tree t, arg, dest, src, expr;
5721 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5722 arg = unshare_expr (arg);
5723 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5724 src = build_fold_addr_expr_loc (loc, op1);
5725 dest = build_fold_addr_expr_loc (loc, op0);
5726 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5727 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5729 expr
5730 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5731 SET_EXPR_LOCATION (expr, loc);
5732 *expr_p = expr;
5734 return GS_OK;
5737 /* Gimplify a comparison between two aggregate objects of integral scalar
5738 mode as a comparison between the bitwise equivalent scalar values. */
5740 static enum gimplify_status
5741 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5743 location_t loc = EXPR_LOCATION (*expr_p);
5744 tree op0 = TREE_OPERAND (*expr_p, 0);
5745 tree op1 = TREE_OPERAND (*expr_p, 1);
5747 tree type = TREE_TYPE (op0);
5748 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5750 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5751 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5753 *expr_p
5754 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5756 return GS_OK;
5759 /* Gimplify an expression sequence. This function gimplifies each
5760 expression and rewrites the original expression with the last
5761 expression of the sequence in GIMPLE form.
5763 PRE_P points to the list where the side effects for all the
5764 expressions in the sequence will be emitted.
5766 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5768 static enum gimplify_status
5769 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5771 tree t = *expr_p;
5775 tree *sub_p = &TREE_OPERAND (t, 0);
5777 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5778 gimplify_compound_expr (sub_p, pre_p, false);
5779 else
5780 gimplify_stmt (sub_p, pre_p);
5782 t = TREE_OPERAND (t, 1);
5784 while (TREE_CODE (t) == COMPOUND_EXPR);
5786 *expr_p = t;
5787 if (want_value)
5788 return GS_OK;
5789 else
5791 gimplify_stmt (expr_p, pre_p);
5792 return GS_ALL_DONE;
5796 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5797 gimplify. After gimplification, EXPR_P will point to a new temporary
5798 that holds the original value of the SAVE_EXPR node.
5800 PRE_P points to the list where side effects that must happen before
5801 *EXPR_P should be stored. */
5803 static enum gimplify_status
5804 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5806 enum gimplify_status ret = GS_ALL_DONE;
5807 tree val;
5809 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5810 val = TREE_OPERAND (*expr_p, 0);
5812 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5813 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5815 /* The operand may be a void-valued expression. It is
5816 being executed only for its side-effects. */
5817 if (TREE_TYPE (val) == void_type_node)
5819 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5820 is_gimple_stmt, fb_none);
5821 val = NULL;
5823 else
5824 /* The temporary may not be an SSA name as later abnormal and EH
5825 control flow may invalidate use/def domination. */
5826 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5828 TREE_OPERAND (*expr_p, 0) = val;
5829 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5832 *expr_p = val;
5834 return ret;
5837 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5839 unary_expr
5840 : ...
5841 | '&' varname
5844 PRE_P points to the list where side effects that must happen before
5845 *EXPR_P should be stored.
5847 POST_P points to the list where side effects that must happen after
5848 *EXPR_P should be stored. */
5850 static enum gimplify_status
5851 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5853 tree expr = *expr_p;
5854 tree op0 = TREE_OPERAND (expr, 0);
5855 enum gimplify_status ret;
5856 location_t loc = EXPR_LOCATION (*expr_p);
5858 switch (TREE_CODE (op0))
5860 case INDIRECT_REF:
5861 do_indirect_ref:
5862 /* Check if we are dealing with an expression of the form '&*ptr'.
5863 While the front end folds away '&*ptr' into 'ptr', these
5864 expressions may be generated internally by the compiler (e.g.,
5865 builtins like __builtin_va_end). */
5866 /* Caution: the silent array decomposition semantics we allow for
5867 ADDR_EXPR means we can't always discard the pair. */
5868 /* Gimplification of the ADDR_EXPR operand may drop
5869 cv-qualification conversions, so make sure we add them if
5870 needed. */
5872 tree op00 = TREE_OPERAND (op0, 0);
5873 tree t_expr = TREE_TYPE (expr);
5874 tree t_op00 = TREE_TYPE (op00);
5876 if (!useless_type_conversion_p (t_expr, t_op00))
5877 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5878 *expr_p = op00;
5879 ret = GS_OK;
5881 break;
5883 case VIEW_CONVERT_EXPR:
5884 /* Take the address of our operand and then convert it to the type of
5885 this ADDR_EXPR.
5887 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5888 all clear. The impact of this transformation is even less clear. */
5890 /* If the operand is a useless conversion, look through it. Doing so
5891 guarantees that the ADDR_EXPR and its operand will remain of the
5892 same type. */
5893 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5894 op0 = TREE_OPERAND (op0, 0);
5896 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5897 build_fold_addr_expr_loc (loc,
5898 TREE_OPERAND (op0, 0)));
5899 ret = GS_OK;
5900 break;
5902 case MEM_REF:
5903 if (integer_zerop (TREE_OPERAND (op0, 1)))
5904 goto do_indirect_ref;
5906 /* fall through */
5908 default:
5909 /* If we see a call to a declared builtin or see its address
5910 being taken (we can unify those cases here) then we can mark
5911 the builtin for implicit generation by GCC. */
5912 if (TREE_CODE (op0) == FUNCTION_DECL
5913 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5914 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5915 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5917 /* We use fb_either here because the C frontend sometimes takes
5918 the address of a call that returns a struct; see
5919 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5920 the implied temporary explicit. */
5922 /* Make the operand addressable. */
5923 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5924 is_gimple_addressable, fb_either);
5925 if (ret == GS_ERROR)
5926 break;
5928 /* Then mark it. Beware that it may not be possible to do so directly
5929 if a temporary has been created by the gimplification. */
5930 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5932 op0 = TREE_OPERAND (expr, 0);
5934 /* For various reasons, the gimplification of the expression
5935 may have made a new INDIRECT_REF. */
5936 if (TREE_CODE (op0) == INDIRECT_REF)
5937 goto do_indirect_ref;
5939 mark_addressable (TREE_OPERAND (expr, 0));
5941 /* The FEs may end up building ADDR_EXPRs early on a decl with
5942 an incomplete type. Re-build ADDR_EXPRs in canonical form
5943 here. */
5944 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5945 *expr_p = build_fold_addr_expr (op0);
5947 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5948 recompute_tree_invariant_for_addr_expr (*expr_p);
5950 /* If we re-built the ADDR_EXPR add a conversion to the original type
5951 if required. */
5952 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5953 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5955 break;
5958 return ret;
5961 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5962 value; output operands should be a gimple lvalue. */
5964 static enum gimplify_status
5965 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5967 tree expr;
5968 int noutputs;
5969 const char **oconstraints;
5970 int i;
5971 tree link;
5972 const char *constraint;
5973 bool allows_mem, allows_reg, is_inout;
5974 enum gimplify_status ret, tret;
5975 gasm *stmt;
5976 vec<tree, va_gc> *inputs;
5977 vec<tree, va_gc> *outputs;
5978 vec<tree, va_gc> *clobbers;
5979 vec<tree, va_gc> *labels;
5980 tree link_next;
5982 expr = *expr_p;
5983 noutputs = list_length (ASM_OUTPUTS (expr));
5984 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5986 inputs = NULL;
5987 outputs = NULL;
5988 clobbers = NULL;
5989 labels = NULL;
5991 ret = GS_ALL_DONE;
5992 link_next = NULL_TREE;
5993 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5995 bool ok;
5996 size_t constraint_len;
5998 link_next = TREE_CHAIN (link);
6000 oconstraints[i]
6001 = constraint
6002 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6003 constraint_len = strlen (constraint);
6004 if (constraint_len == 0)
6005 continue;
6007 ok = parse_output_constraint (&constraint, i, 0, 0,
6008 &allows_mem, &allows_reg, &is_inout);
6009 if (!ok)
6011 ret = GS_ERROR;
6012 is_inout = false;
6015 if (!allows_reg && allows_mem)
6016 mark_addressable (TREE_VALUE (link));
6018 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6019 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6020 fb_lvalue | fb_mayfail);
6021 if (tret == GS_ERROR)
6023 error ("invalid lvalue in asm output %d", i);
6024 ret = tret;
6027 /* If the constraint does not allow memory make sure we gimplify
6028 it to a register if it is not already but its base is. This
6029 happens for complex and vector components. */
6030 if (!allows_mem)
6032 tree op = TREE_VALUE (link);
6033 if (! is_gimple_val (op)
6034 && is_gimple_reg_type (TREE_TYPE (op))
6035 && is_gimple_reg (get_base_address (op)))
6037 tree tem = create_tmp_reg (TREE_TYPE (op));
6038 tree ass;
6039 if (is_inout)
6041 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6042 tem, unshare_expr (op));
6043 gimplify_and_add (ass, pre_p);
6045 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6046 gimplify_and_add (ass, post_p);
6048 TREE_VALUE (link) = tem;
6049 tret = GS_OK;
6053 vec_safe_push (outputs, link);
6054 TREE_CHAIN (link) = NULL_TREE;
6056 if (is_inout)
6058 /* An input/output operand. To give the optimizers more
6059 flexibility, split it into separate input and output
6060 operands. */
6061 tree input;
6062 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6063 char buf[11];
6065 /* Turn the in/out constraint into an output constraint. */
6066 char *p = xstrdup (constraint);
6067 p[0] = '=';
6068 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6070 /* And add a matching input constraint. */
6071 if (allows_reg)
6073 sprintf (buf, "%u", i);
6075 /* If there are multiple alternatives in the constraint,
6076 handle each of them individually. Those that allow register
6077 will be replaced with operand number, the others will stay
6078 unchanged. */
6079 if (strchr (p, ',') != NULL)
6081 size_t len = 0, buflen = strlen (buf);
6082 char *beg, *end, *str, *dst;
6084 for (beg = p + 1;;)
6086 end = strchr (beg, ',');
6087 if (end == NULL)
6088 end = strchr (beg, '\0');
6089 if ((size_t) (end - beg) < buflen)
6090 len += buflen + 1;
6091 else
6092 len += end - beg + 1;
6093 if (*end)
6094 beg = end + 1;
6095 else
6096 break;
6099 str = (char *) alloca (len);
6100 for (beg = p + 1, dst = str;;)
6102 const char *tem;
6103 bool mem_p, reg_p, inout_p;
6105 end = strchr (beg, ',');
6106 if (end)
6107 *end = '\0';
6108 beg[-1] = '=';
6109 tem = beg - 1;
6110 parse_output_constraint (&tem, i, 0, 0,
6111 &mem_p, &reg_p, &inout_p);
6112 if (dst != str)
6113 *dst++ = ',';
6114 if (reg_p)
6116 memcpy (dst, buf, buflen);
6117 dst += buflen;
6119 else
6121 if (end)
6122 len = end - beg;
6123 else
6124 len = strlen (beg);
6125 memcpy (dst, beg, len);
6126 dst += len;
6128 if (end)
6129 beg = end + 1;
6130 else
6131 break;
6133 *dst = '\0';
6134 input = build_string (dst - str, str);
6136 else
6137 input = build_string (strlen (buf), buf);
6139 else
6140 input = build_string (constraint_len - 1, constraint + 1);
6142 free (p);
6144 input = build_tree_list (build_tree_list (NULL_TREE, input),
6145 unshare_expr (TREE_VALUE (link)));
6146 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6150 link_next = NULL_TREE;
6151 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6153 link_next = TREE_CHAIN (link);
6154 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6155 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6156 oconstraints, &allows_mem, &allows_reg);
6158 /* If we can't make copies, we can only accept memory. */
6159 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6161 if (allows_mem)
6162 allows_reg = 0;
6163 else
6165 error ("impossible constraint in %<asm%>");
6166 error ("non-memory input %d must stay in memory", i);
6167 return GS_ERROR;
6171 /* If the operand is a memory input, it should be an lvalue. */
6172 if (!allows_reg && allows_mem)
6174 tree inputv = TREE_VALUE (link);
6175 STRIP_NOPS (inputv);
6176 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6177 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6178 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6179 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6180 || TREE_CODE (inputv) == MODIFY_EXPR)
6181 TREE_VALUE (link) = error_mark_node;
6182 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6183 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6184 if (tret != GS_ERROR)
6186 /* Unlike output operands, memory inputs are not guaranteed
6187 to be lvalues by the FE, and while the expressions are
6188 marked addressable there, if it is e.g. a statement
6189 expression, temporaries in it might not end up being
6190 addressable. They might be already used in the IL and thus
6191 it is too late to make them addressable now though. */
6192 tree x = TREE_VALUE (link);
6193 while (handled_component_p (x))
6194 x = TREE_OPERAND (x, 0);
6195 if (TREE_CODE (x) == MEM_REF
6196 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6197 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6198 if ((VAR_P (x)
6199 || TREE_CODE (x) == PARM_DECL
6200 || TREE_CODE (x) == RESULT_DECL)
6201 && !TREE_ADDRESSABLE (x)
6202 && is_gimple_reg (x))
6204 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6205 input_location), 0,
6206 "memory input %d is not directly addressable",
6208 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6211 mark_addressable (TREE_VALUE (link));
6212 if (tret == GS_ERROR)
6214 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6215 "memory input %d is not directly addressable", i);
6216 ret = tret;
6219 else
6221 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6222 is_gimple_asm_val, fb_rvalue);
6223 if (tret == GS_ERROR)
6224 ret = tret;
6227 TREE_CHAIN (link) = NULL_TREE;
6228 vec_safe_push (inputs, link);
6231 link_next = NULL_TREE;
6232 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6234 link_next = TREE_CHAIN (link);
6235 TREE_CHAIN (link) = NULL_TREE;
6236 vec_safe_push (clobbers, link);
6239 link_next = NULL_TREE;
6240 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6242 link_next = TREE_CHAIN (link);
6243 TREE_CHAIN (link) = NULL_TREE;
6244 vec_safe_push (labels, link);
6247 /* Do not add ASMs with errors to the gimple IL stream. */
6248 if (ret != GS_ERROR)
6250 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6251 inputs, outputs, clobbers, labels);
6253 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6254 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6256 gimplify_seq_add_stmt (pre_p, stmt);
6259 return ret;
6262 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6263 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6264 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6265 return to this function.
6267 FIXME should we complexify the prequeue handling instead? Or use flags
6268 for all the cleanups and let the optimizer tighten them up? The current
6269 code seems pretty fragile; it will break on a cleanup within any
6270 non-conditional nesting. But any such nesting would be broken, anyway;
6271 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6272 and continues out of it. We can do that at the RTL level, though, so
6273 having an optimizer to tighten up try/finally regions would be a Good
6274 Thing. */
6276 static enum gimplify_status
6277 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6279 gimple_stmt_iterator iter;
6280 gimple_seq body_sequence = NULL;
6282 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6284 /* We only care about the number of conditions between the innermost
6285 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6286 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6287 int old_conds = gimplify_ctxp->conditions;
6288 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6289 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6290 gimplify_ctxp->conditions = 0;
6291 gimplify_ctxp->conditional_cleanups = NULL;
6292 gimplify_ctxp->in_cleanup_point_expr = true;
6294 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6296 gimplify_ctxp->conditions = old_conds;
6297 gimplify_ctxp->conditional_cleanups = old_cleanups;
6298 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6300 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6302 gimple *wce = gsi_stmt (iter);
6304 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6306 if (gsi_one_before_end_p (iter))
6308 /* Note that gsi_insert_seq_before and gsi_remove do not
6309 scan operands, unlike some other sequence mutators. */
6310 if (!gimple_wce_cleanup_eh_only (wce))
6311 gsi_insert_seq_before_without_update (&iter,
6312 gimple_wce_cleanup (wce),
6313 GSI_SAME_STMT);
6314 gsi_remove (&iter, true);
6315 break;
6317 else
6319 gtry *gtry;
6320 gimple_seq seq;
6321 enum gimple_try_flags kind;
6323 if (gimple_wce_cleanup_eh_only (wce))
6324 kind = GIMPLE_TRY_CATCH;
6325 else
6326 kind = GIMPLE_TRY_FINALLY;
6327 seq = gsi_split_seq_after (iter);
6329 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6330 /* Do not use gsi_replace here, as it may scan operands.
6331 We want to do a simple structural modification only. */
6332 gsi_set_stmt (&iter, gtry);
6333 iter = gsi_start (gtry->eval);
6336 else
6337 gsi_next (&iter);
6340 gimplify_seq_add_seq (pre_p, body_sequence);
6341 if (temp)
6343 *expr_p = temp;
6344 return GS_OK;
6346 else
6348 *expr_p = NULL;
6349 return GS_ALL_DONE;
6353 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6354 is the cleanup action required. EH_ONLY is true if the cleanup should
6355 only be executed if an exception is thrown, not on normal exit.
6356 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6357 only valid for clobbers. */
6359 static void
6360 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6361 bool force_uncond = false)
6363 gimple *wce;
6364 gimple_seq cleanup_stmts = NULL;
6366 /* Errors can result in improperly nested cleanups. Which results in
6367 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6368 if (seen_error ())
6369 return;
6371 if (gimple_conditional_context ())
6373 /* If we're in a conditional context, this is more complex. We only
6374 want to run the cleanup if we actually ran the initialization that
6375 necessitates it, but we want to run it after the end of the
6376 conditional context. So we wrap the try/finally around the
6377 condition and use a flag to determine whether or not to actually
6378 run the destructor. Thus
6380 test ? f(A()) : 0
6382 becomes (approximately)
6384 flag = 0;
6385 try {
6386 if (test) { A::A(temp); flag = 1; val = f(temp); }
6387 else { val = 0; }
6388 } finally {
6389 if (flag) A::~A(temp);
6393 if (force_uncond)
6395 gimplify_stmt (&cleanup, &cleanup_stmts);
6396 wce = gimple_build_wce (cleanup_stmts);
6397 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6399 else
6401 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6402 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6403 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6405 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6406 gimplify_stmt (&cleanup, &cleanup_stmts);
6407 wce = gimple_build_wce (cleanup_stmts);
6409 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6410 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6411 gimplify_seq_add_stmt (pre_p, ftrue);
6413 /* Because of this manipulation, and the EH edges that jump
6414 threading cannot redirect, the temporary (VAR) will appear
6415 to be used uninitialized. Don't warn. */
6416 TREE_NO_WARNING (var) = 1;
6419 else
6421 gimplify_stmt (&cleanup, &cleanup_stmts);
6422 wce = gimple_build_wce (cleanup_stmts);
6423 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6424 gimplify_seq_add_stmt (pre_p, wce);
6428 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6430 static enum gimplify_status
6431 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6433 tree targ = *expr_p;
6434 tree temp = TARGET_EXPR_SLOT (targ);
6435 tree init = TARGET_EXPR_INITIAL (targ);
6436 enum gimplify_status ret;
6438 bool unpoison_empty_seq = false;
6439 gimple_stmt_iterator unpoison_it;
6441 if (init)
6443 tree cleanup = NULL_TREE;
6445 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6446 to the temps list. Handle also variable length TARGET_EXPRs. */
6447 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6449 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6450 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6451 gimplify_vla_decl (temp, pre_p);
6453 else
6455 /* Save location where we need to place unpoisoning. It's possible
6456 that a variable will be converted to needs_to_live_in_memory. */
6457 unpoison_it = gsi_last (*pre_p);
6458 unpoison_empty_seq = gsi_end_p (unpoison_it);
6460 gimple_add_tmp_var (temp);
6463 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6464 expression is supposed to initialize the slot. */
6465 if (VOID_TYPE_P (TREE_TYPE (init)))
6466 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6467 else
6469 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6470 init = init_expr;
6471 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6472 init = NULL;
6473 ggc_free (init_expr);
6475 if (ret == GS_ERROR)
6477 /* PR c++/28266 Make sure this is expanded only once. */
6478 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6479 return GS_ERROR;
6481 if (init)
6482 gimplify_and_add (init, pre_p);
6484 /* If needed, push the cleanup for the temp. */
6485 if (TARGET_EXPR_CLEANUP (targ))
6487 if (CLEANUP_EH_ONLY (targ))
6488 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6489 CLEANUP_EH_ONLY (targ), pre_p);
6490 else
6491 cleanup = TARGET_EXPR_CLEANUP (targ);
6494 /* Add a clobber for the temporary going out of scope, like
6495 gimplify_bind_expr. */
6496 if (gimplify_ctxp->in_cleanup_point_expr
6497 && needs_to_live_in_memory (temp))
6499 if (flag_stack_reuse == SR_ALL)
6501 tree clobber = build_constructor (TREE_TYPE (temp),
6502 NULL);
6503 TREE_THIS_VOLATILE (clobber) = true;
6504 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6505 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6507 if (asan_poisoned_variables
6508 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6509 && dbg_cnt (asan_use_after_scope))
6511 tree asan_cleanup = build_asan_poison_call_expr (temp);
6512 if (asan_cleanup)
6514 if (unpoison_empty_seq)
6515 unpoison_it = gsi_start (*pre_p);
6517 asan_poison_variable (temp, false, &unpoison_it,
6518 unpoison_empty_seq);
6519 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6523 if (cleanup)
6524 gimple_push_cleanup (temp, cleanup, false, pre_p);
6526 /* Only expand this once. */
6527 TREE_OPERAND (targ, 3) = init;
6528 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6530 else
6531 /* We should have expanded this before. */
6532 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6534 *expr_p = temp;
6535 return GS_OK;
6538 /* Gimplification of expression trees. */
6540 /* Gimplify an expression which appears at statement context. The
6541 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6542 NULL, a new sequence is allocated.
6544 Return true if we actually added a statement to the queue. */
6546 bool
6547 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6549 gimple_seq_node last;
6551 last = gimple_seq_last (*seq_p);
6552 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6553 return last != gimple_seq_last (*seq_p);
6556 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6557 to CTX. If entries already exist, force them to be some flavor of private.
6558 If there is no enclosing parallel, do nothing. */
6560 void
6561 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6563 splay_tree_node n;
6565 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6566 return;
6570 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6571 if (n != NULL)
6573 if (n->value & GOVD_SHARED)
6574 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6575 else if (n->value & GOVD_MAP)
6576 n->value |= GOVD_MAP_TO_ONLY;
6577 else
6578 return;
6580 else if ((ctx->region_type & ORT_TARGET) != 0)
6582 if (ctx->target_map_scalars_firstprivate)
6583 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6584 else
6585 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6587 else if (ctx->region_type != ORT_WORKSHARE
6588 && ctx->region_type != ORT_SIMD
6589 && ctx->region_type != ORT_ACC
6590 && !(ctx->region_type & ORT_TARGET_DATA))
6591 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6593 ctx = ctx->outer_context;
6595 while (ctx);
6598 /* Similarly for each of the type sizes of TYPE. */
6600 static void
6601 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6603 if (type == NULL || type == error_mark_node)
6604 return;
6605 type = TYPE_MAIN_VARIANT (type);
6607 if (ctx->privatized_types->add (type))
6608 return;
6610 switch (TREE_CODE (type))
6612 case INTEGER_TYPE:
6613 case ENUMERAL_TYPE:
6614 case BOOLEAN_TYPE:
6615 case REAL_TYPE:
6616 case FIXED_POINT_TYPE:
6617 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6618 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6619 break;
6621 case ARRAY_TYPE:
6622 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6623 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6624 break;
6626 case RECORD_TYPE:
6627 case UNION_TYPE:
6628 case QUAL_UNION_TYPE:
6630 tree field;
6631 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6632 if (TREE_CODE (field) == FIELD_DECL)
6634 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6635 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6638 break;
6640 case POINTER_TYPE:
6641 case REFERENCE_TYPE:
6642 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6643 break;
6645 default:
6646 break;
6649 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6650 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6651 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6654 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6656 static void
6657 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6659 splay_tree_node n;
6660 unsigned int nflags;
6661 tree t;
6663 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6664 return;
6666 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6667 there are constructors involved somewhere. Exception is a shared clause,
6668 there is nothing privatized in that case. */
6669 if ((flags & GOVD_SHARED) == 0
6670 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6671 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6672 flags |= GOVD_SEEN;
6674 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6675 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6677 /* We shouldn't be re-adding the decl with the same data
6678 sharing class. */
6679 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6680 nflags = n->value | flags;
6681 /* The only combination of data sharing classes we should see is
6682 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6683 reduction variables to be used in data sharing clauses. */
6684 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6685 || ((nflags & GOVD_DATA_SHARE_CLASS)
6686 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6687 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6688 n->value = nflags;
6689 return;
6692 /* When adding a variable-sized variable, we have to handle all sorts
6693 of additional bits of data: the pointer replacement variable, and
6694 the parameters of the type. */
6695 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6697 /* Add the pointer replacement variable as PRIVATE if the variable
6698 replacement is private, else FIRSTPRIVATE since we'll need the
6699 address of the original variable either for SHARED, or for the
6700 copy into or out of the context. */
6701 if (!(flags & GOVD_LOCAL))
6703 if (flags & GOVD_MAP)
6704 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6705 else if (flags & GOVD_PRIVATE)
6706 nflags = GOVD_PRIVATE;
6707 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6708 && (flags & GOVD_FIRSTPRIVATE))
6709 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6710 else
6711 nflags = GOVD_FIRSTPRIVATE;
6712 nflags |= flags & GOVD_SEEN;
6713 t = DECL_VALUE_EXPR (decl);
6714 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6715 t = TREE_OPERAND (t, 0);
6716 gcc_assert (DECL_P (t));
6717 omp_add_variable (ctx, t, nflags);
6720 /* Add all of the variable and type parameters (which should have
6721 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6722 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6723 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6724 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6726 /* The variable-sized variable itself is never SHARED, only some form
6727 of PRIVATE. The sharing would take place via the pointer variable
6728 which we remapped above. */
6729 if (flags & GOVD_SHARED)
6730 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6731 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6733 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6734 alloca statement we generate for the variable, so make sure it
6735 is available. This isn't automatically needed for the SHARED
6736 case, since we won't be allocating local storage then.
6737 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6738 in this case omp_notice_variable will be called later
6739 on when it is gimplified. */
6740 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6741 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6742 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6744 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6745 && lang_hooks.decls.omp_privatize_by_reference (decl))
6747 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6749 /* Similar to the direct variable sized case above, we'll need the
6750 size of references being privatized. */
6751 if ((flags & GOVD_SHARED) == 0)
6753 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6754 if (DECL_P (t))
6755 omp_notice_variable (ctx, t, true);
6759 if (n != NULL)
6760 n->value |= flags;
6761 else
6762 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6764 /* For reductions clauses in OpenACC loop directives, by default create a
6765 copy clause on the enclosing parallel construct for carrying back the
6766 results. */
6767 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6769 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6770 while (outer_ctx)
6772 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6773 if (n != NULL)
6775 /* Ignore local variables and explicitly declared clauses. */
6776 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6777 break;
6778 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6780 /* According to the OpenACC spec, such a reduction variable
6781 should already have a copy map on a kernels construct,
6782 verify that here. */
6783 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6784 && (n->value & GOVD_MAP));
6786 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6788 /* Remove firstprivate and make it a copy map. */
6789 n->value &= ~GOVD_FIRSTPRIVATE;
6790 n->value |= GOVD_MAP;
6793 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6795 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6796 GOVD_MAP | GOVD_SEEN);
6797 break;
6799 outer_ctx = outer_ctx->outer_context;
6804 /* Notice a threadprivate variable DECL used in OMP context CTX.
6805 This just prints out diagnostics about threadprivate variable uses
6806 in untied tasks. If DECL2 is non-NULL, prevent this warning
6807 on that variable. */
6809 static bool
6810 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6811 tree decl2)
6813 splay_tree_node n;
6814 struct gimplify_omp_ctx *octx;
6816 for (octx = ctx; octx; octx = octx->outer_context)
6817 if ((octx->region_type & ORT_TARGET) != 0)
6819 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6820 if (n == NULL)
6822 error ("threadprivate variable %qE used in target region",
6823 DECL_NAME (decl));
6824 error_at (octx->location, "enclosing target region");
6825 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6827 if (decl2)
6828 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6831 if (ctx->region_type != ORT_UNTIED_TASK)
6832 return false;
6833 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6834 if (n == NULL)
6836 error ("threadprivate variable %qE used in untied task",
6837 DECL_NAME (decl));
6838 error_at (ctx->location, "enclosing task");
6839 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6841 if (decl2)
6842 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6843 return false;
6846 /* Return true if global var DECL is device resident. */
6848 static bool
6849 device_resident_p (tree decl)
6851 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6853 if (!attr)
6854 return false;
6856 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6858 tree c = TREE_VALUE (t);
6859 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6860 return true;
6863 return false;
6866 /* Return true if DECL has an ACC DECLARE attribute. */
6868 static bool
6869 is_oacc_declared (tree decl)
6871 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6872 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6873 return declared != NULL_TREE;
6876 /* Determine outer default flags for DECL mentioned in an OMP region
6877 but not declared in an enclosing clause.
6879 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6880 remapped firstprivate instead of shared. To some extent this is
6881 addressed in omp_firstprivatize_type_sizes, but not
6882 effectively. */
6884 static unsigned
6885 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6886 bool in_code, unsigned flags)
6888 enum omp_clause_default_kind default_kind = ctx->default_kind;
6889 enum omp_clause_default_kind kind;
6891 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6892 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6893 default_kind = kind;
6895 switch (default_kind)
6897 case OMP_CLAUSE_DEFAULT_NONE:
6899 const char *rtype;
6901 if (ctx->region_type & ORT_PARALLEL)
6902 rtype = "parallel";
6903 else if (ctx->region_type & ORT_TASK)
6904 rtype = "task";
6905 else if (ctx->region_type & ORT_TEAMS)
6906 rtype = "teams";
6907 else
6908 gcc_unreachable ();
6910 error ("%qE not specified in enclosing %qs",
6911 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6912 error_at (ctx->location, "enclosing %qs", rtype);
6914 /* FALLTHRU */
6915 case OMP_CLAUSE_DEFAULT_SHARED:
6916 flags |= GOVD_SHARED;
6917 break;
6918 case OMP_CLAUSE_DEFAULT_PRIVATE:
6919 flags |= GOVD_PRIVATE;
6920 break;
6921 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6922 flags |= GOVD_FIRSTPRIVATE;
6923 break;
6924 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6925 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6926 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6927 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6929 omp_notice_variable (octx, decl, in_code);
6930 for (; octx; octx = octx->outer_context)
6932 splay_tree_node n2;
6934 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6935 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6936 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6937 continue;
6938 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6940 flags |= GOVD_FIRSTPRIVATE;
6941 goto found_outer;
6943 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6945 flags |= GOVD_SHARED;
6946 goto found_outer;
6951 if (TREE_CODE (decl) == PARM_DECL
6952 || (!is_global_var (decl)
6953 && DECL_CONTEXT (decl) == current_function_decl))
6954 flags |= GOVD_FIRSTPRIVATE;
6955 else
6956 flags |= GOVD_SHARED;
6957 found_outer:
6958 break;
6960 default:
6961 gcc_unreachable ();
6964 return flags;
6968 /* Determine outer default flags for DECL mentioned in an OACC region
6969 but not declared in an enclosing clause. */
6971 static unsigned
6972 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6974 const char *rkind;
6975 bool on_device = false;
6976 bool declared = is_oacc_declared (decl);
6977 tree type = TREE_TYPE (decl);
6979 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6980 type = TREE_TYPE (type);
6982 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6983 && is_global_var (decl)
6984 && device_resident_p (decl))
6986 on_device = true;
6987 flags |= GOVD_MAP_TO_ONLY;
6990 switch (ctx->region_type)
6992 case ORT_ACC_KERNELS:
6993 rkind = "kernels";
6995 if (AGGREGATE_TYPE_P (type))
6997 /* Aggregates default to 'present_or_copy', or 'present'. */
6998 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6999 flags |= GOVD_MAP;
7000 else
7001 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7003 else
7004 /* Scalars default to 'copy'. */
7005 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7007 break;
7009 case ORT_ACC_PARALLEL:
7010 rkind = "parallel";
7012 if (on_device || declared)
7013 flags |= GOVD_MAP;
7014 else if (AGGREGATE_TYPE_P (type))
7016 /* Aggregates default to 'present_or_copy', or 'present'. */
7017 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7018 flags |= GOVD_MAP;
7019 else
7020 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7022 else
7023 /* Scalars default to 'firstprivate'. */
7024 flags |= GOVD_FIRSTPRIVATE;
7026 break;
7028 default:
7029 gcc_unreachable ();
7032 if (DECL_ARTIFICIAL (decl))
7033 ; /* We can get compiler-generated decls, and should not complain
7034 about them. */
7035 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7037 error ("%qE not specified in enclosing OpenACC %qs construct",
7038 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7039 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7041 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7042 ; /* Handled above. */
7043 else
7044 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7046 return flags;
7049 /* Record the fact that DECL was used within the OMP context CTX.
7050 IN_CODE is true when real code uses DECL, and false when we should
7051 merely emit default(none) errors. Return true if DECL is going to
7052 be remapped and thus DECL shouldn't be gimplified into its
7053 DECL_VALUE_EXPR (if any). */
7055 static bool
7056 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7058 splay_tree_node n;
7059 unsigned flags = in_code ? GOVD_SEEN : 0;
7060 bool ret = false, shared;
7062 if (error_operand_p (decl))
7063 return false;
7065 if (ctx->region_type == ORT_NONE)
7066 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7068 if (is_global_var (decl))
7070 /* Threadprivate variables are predetermined. */
7071 if (DECL_THREAD_LOCAL_P (decl))
7072 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7074 if (DECL_HAS_VALUE_EXPR_P (decl))
7076 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7078 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7079 return omp_notice_threadprivate_variable (ctx, decl, value);
7082 if (gimplify_omp_ctxp->outer_context == NULL
7083 && VAR_P (decl)
7084 && oacc_get_fn_attrib (current_function_decl))
7086 location_t loc = DECL_SOURCE_LOCATION (decl);
7088 if (lookup_attribute ("omp declare target link",
7089 DECL_ATTRIBUTES (decl)))
7091 error_at (loc,
7092 "%qE with %<link%> clause used in %<routine%> function",
7093 DECL_NAME (decl));
7094 return false;
7096 else if (!lookup_attribute ("omp declare target",
7097 DECL_ATTRIBUTES (decl)))
7099 error_at (loc,
7100 "%qE requires a %<declare%> directive for use "
7101 "in a %<routine%> function", DECL_NAME (decl));
7102 return false;
7107 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7108 if ((ctx->region_type & ORT_TARGET) != 0)
7110 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7111 if (n == NULL)
7113 unsigned nflags = flags;
7114 if (ctx->target_map_pointers_as_0len_arrays
7115 || ctx->target_map_scalars_firstprivate)
7117 bool is_declare_target = false;
7118 bool is_scalar = false;
7119 if (is_global_var (decl)
7120 && varpool_node::get_create (decl)->offloadable)
7122 struct gimplify_omp_ctx *octx;
7123 for (octx = ctx->outer_context;
7124 octx; octx = octx->outer_context)
7126 n = splay_tree_lookup (octx->variables,
7127 (splay_tree_key)decl);
7128 if (n
7129 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7130 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7131 break;
7133 is_declare_target = octx == NULL;
7135 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7136 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7137 if (is_declare_target)
7139 else if (ctx->target_map_pointers_as_0len_arrays
7140 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7141 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7142 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7143 == POINTER_TYPE)))
7144 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7145 else if (is_scalar)
7146 nflags |= GOVD_FIRSTPRIVATE;
7149 struct gimplify_omp_ctx *octx = ctx->outer_context;
7150 if ((ctx->region_type & ORT_ACC) && octx)
7152 /* Look in outer OpenACC contexts, to see if there's a
7153 data attribute for this variable. */
7154 omp_notice_variable (octx, decl, in_code);
7156 for (; octx; octx = octx->outer_context)
7158 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7159 break;
7160 splay_tree_node n2
7161 = splay_tree_lookup (octx->variables,
7162 (splay_tree_key) decl);
7163 if (n2)
7165 if (octx->region_type == ORT_ACC_HOST_DATA)
7166 error ("variable %qE declared in enclosing "
7167 "%<host_data%> region", DECL_NAME (decl));
7168 nflags |= GOVD_MAP;
7169 if (octx->region_type == ORT_ACC_DATA
7170 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7171 nflags |= GOVD_MAP_0LEN_ARRAY;
7172 goto found_outer;
7178 tree type = TREE_TYPE (decl);
7180 if (nflags == flags
7181 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7182 && lang_hooks.decls.omp_privatize_by_reference (decl))
7183 type = TREE_TYPE (type);
7184 if (nflags == flags
7185 && !lang_hooks.types.omp_mappable_type (type))
7187 error ("%qD referenced in target region does not have "
7188 "a mappable type", decl);
7189 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7191 else if (nflags == flags)
7193 if ((ctx->region_type & ORT_ACC) != 0)
7194 nflags = oacc_default_clause (ctx, decl, flags);
7195 else
7196 nflags |= GOVD_MAP;
7199 found_outer:
7200 omp_add_variable (ctx, decl, nflags);
7202 else
7204 /* If nothing changed, there's nothing left to do. */
7205 if ((n->value & flags) == flags)
7206 return ret;
7207 flags |= n->value;
7208 n->value = flags;
7210 goto do_outer;
7213 if (n == NULL)
7215 if (ctx->region_type == ORT_WORKSHARE
7216 || ctx->region_type == ORT_SIMD
7217 || ctx->region_type == ORT_ACC
7218 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7219 goto do_outer;
7221 flags = omp_default_clause (ctx, decl, in_code, flags);
7223 if ((flags & GOVD_PRIVATE)
7224 && lang_hooks.decls.omp_private_outer_ref (decl))
7225 flags |= GOVD_PRIVATE_OUTER_REF;
7227 omp_add_variable (ctx, decl, flags);
7229 shared = (flags & GOVD_SHARED) != 0;
7230 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7231 goto do_outer;
7234 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7235 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7236 && DECL_SIZE (decl))
7238 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7240 splay_tree_node n2;
7241 tree t = DECL_VALUE_EXPR (decl);
7242 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7243 t = TREE_OPERAND (t, 0);
7244 gcc_assert (DECL_P (t));
7245 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7246 n2->value |= GOVD_SEEN;
7248 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7249 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7250 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7251 != INTEGER_CST))
7253 splay_tree_node n2;
7254 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7255 gcc_assert (DECL_P (t));
7256 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7257 if (n2)
7258 omp_notice_variable (ctx, t, true);
7262 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7263 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7265 /* If nothing changed, there's nothing left to do. */
7266 if ((n->value & flags) == flags)
7267 return ret;
7268 flags |= n->value;
7269 n->value = flags;
7271 do_outer:
7272 /* If the variable is private in the current context, then we don't
7273 need to propagate anything to an outer context. */
7274 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7275 return ret;
7276 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7277 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7278 return ret;
7279 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7280 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7281 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7282 return ret;
7283 if (ctx->outer_context
7284 && omp_notice_variable (ctx->outer_context, decl, in_code))
7285 return true;
7286 return ret;
7289 /* Verify that DECL is private within CTX. If there's specific information
7290 to the contrary in the innermost scope, generate an error. */
7292 static bool
7293 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7295 splay_tree_node n;
7297 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7298 if (n != NULL)
7300 if (n->value & GOVD_SHARED)
7302 if (ctx == gimplify_omp_ctxp)
7304 if (simd)
7305 error ("iteration variable %qE is predetermined linear",
7306 DECL_NAME (decl));
7307 else
7308 error ("iteration variable %qE should be private",
7309 DECL_NAME (decl));
7310 n->value = GOVD_PRIVATE;
7311 return true;
7313 else
7314 return false;
7316 else if ((n->value & GOVD_EXPLICIT) != 0
7317 && (ctx == gimplify_omp_ctxp
7318 || (ctx->region_type == ORT_COMBINED_PARALLEL
7319 && gimplify_omp_ctxp->outer_context == ctx)))
7321 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7322 error ("iteration variable %qE should not be firstprivate",
7323 DECL_NAME (decl));
7324 else if ((n->value & GOVD_REDUCTION) != 0)
7325 error ("iteration variable %qE should not be reduction",
7326 DECL_NAME (decl));
7327 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7328 error ("iteration variable %qE should not be linear",
7329 DECL_NAME (decl));
7330 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7331 error ("iteration variable %qE should not be lastprivate",
7332 DECL_NAME (decl));
7333 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7334 error ("iteration variable %qE should not be private",
7335 DECL_NAME (decl));
7336 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7337 error ("iteration variable %qE is predetermined linear",
7338 DECL_NAME (decl));
7340 return (ctx == gimplify_omp_ctxp
7341 || (ctx->region_type == ORT_COMBINED_PARALLEL
7342 && gimplify_omp_ctxp->outer_context == ctx));
7345 if (ctx->region_type != ORT_WORKSHARE
7346 && ctx->region_type != ORT_SIMD
7347 && ctx->region_type != ORT_ACC)
7348 return false;
7349 else if (ctx->outer_context)
7350 return omp_is_private (ctx->outer_context, decl, simd);
7351 return false;
7354 /* Return true if DECL is private within a parallel region
7355 that binds to the current construct's context or in parallel
7356 region's REDUCTION clause. */
7358 static bool
7359 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7361 splay_tree_node n;
7365 ctx = ctx->outer_context;
7366 if (ctx == NULL)
7368 if (is_global_var (decl))
7369 return false;
7371 /* References might be private, but might be shared too,
7372 when checking for copyprivate, assume they might be
7373 private, otherwise assume they might be shared. */
7374 if (copyprivate)
7375 return true;
7377 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7378 return false;
7380 /* Treat C++ privatized non-static data members outside
7381 of the privatization the same. */
7382 if (omp_member_access_dummy_var (decl))
7383 return false;
7385 return true;
7388 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7390 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7391 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7392 continue;
7394 if (n != NULL)
7396 if ((n->value & GOVD_LOCAL) != 0
7397 && omp_member_access_dummy_var (decl))
7398 return false;
7399 return (n->value & GOVD_SHARED) == 0;
7402 while (ctx->region_type == ORT_WORKSHARE
7403 || ctx->region_type == ORT_SIMD
7404 || ctx->region_type == ORT_ACC);
7405 return false;
7408 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7410 static tree
7411 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7413 tree t = *tp;
7415 /* If this node has been visited, unmark it and keep looking. */
7416 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7417 return t;
7419 if (IS_TYPE_OR_DECL_P (t))
7420 *walk_subtrees = 0;
7421 return NULL_TREE;
7424 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7425 and previous omp contexts. */
7427 static void
7428 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7429 enum omp_region_type region_type,
7430 enum tree_code code)
7432 struct gimplify_omp_ctx *ctx, *outer_ctx;
7433 tree c;
7434 hash_map<tree, tree> *struct_map_to_clause = NULL;
7435 tree *prev_list_p = NULL;
7437 ctx = new_omp_context (region_type);
7438 outer_ctx = ctx->outer_context;
7439 if (code == OMP_TARGET)
7441 if (!lang_GNU_Fortran ())
7442 ctx->target_map_pointers_as_0len_arrays = true;
7443 ctx->target_map_scalars_firstprivate = true;
7445 if (!lang_GNU_Fortran ())
7446 switch (code)
7448 case OMP_TARGET:
7449 case OMP_TARGET_DATA:
7450 case OMP_TARGET_ENTER_DATA:
7451 case OMP_TARGET_EXIT_DATA:
7452 case OACC_DECLARE:
7453 case OACC_HOST_DATA:
7454 ctx->target_firstprivatize_array_bases = true;
7455 default:
7456 break;
7459 while ((c = *list_p) != NULL)
7461 bool remove = false;
7462 bool notice_outer = true;
7463 const char *check_non_private = NULL;
7464 unsigned int flags;
7465 tree decl;
7467 switch (OMP_CLAUSE_CODE (c))
7469 case OMP_CLAUSE_PRIVATE:
7470 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7471 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7473 flags |= GOVD_PRIVATE_OUTER_REF;
7474 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7476 else
7477 notice_outer = false;
7478 goto do_add;
7479 case OMP_CLAUSE_SHARED:
7480 flags = GOVD_SHARED | GOVD_EXPLICIT;
7481 goto do_add;
7482 case OMP_CLAUSE_FIRSTPRIVATE:
7483 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7484 check_non_private = "firstprivate";
7485 goto do_add;
7486 case OMP_CLAUSE_LASTPRIVATE:
7487 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7488 check_non_private = "lastprivate";
7489 decl = OMP_CLAUSE_DECL (c);
7490 if (error_operand_p (decl))
7491 goto do_add;
7492 else if (outer_ctx
7493 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7494 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7495 && splay_tree_lookup (outer_ctx->variables,
7496 (splay_tree_key) decl) == NULL)
7498 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7499 if (outer_ctx->outer_context)
7500 omp_notice_variable (outer_ctx->outer_context, decl, true);
7502 else if (outer_ctx
7503 && (outer_ctx->region_type & ORT_TASK) != 0
7504 && outer_ctx->combined_loop
7505 && splay_tree_lookup (outer_ctx->variables,
7506 (splay_tree_key) decl) == NULL)
7508 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7509 if (outer_ctx->outer_context)
7510 omp_notice_variable (outer_ctx->outer_context, decl, true);
7512 else if (outer_ctx
7513 && (outer_ctx->region_type == ORT_WORKSHARE
7514 || outer_ctx->region_type == ORT_ACC)
7515 && outer_ctx->combined_loop
7516 && splay_tree_lookup (outer_ctx->variables,
7517 (splay_tree_key) decl) == NULL
7518 && !omp_check_private (outer_ctx, decl, false))
7520 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7521 if (outer_ctx->outer_context
7522 && (outer_ctx->outer_context->region_type
7523 == ORT_COMBINED_PARALLEL)
7524 && splay_tree_lookup (outer_ctx->outer_context->variables,
7525 (splay_tree_key) decl) == NULL)
7527 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7528 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7529 if (octx->outer_context)
7531 octx = octx->outer_context;
7532 if (octx->region_type == ORT_WORKSHARE
7533 && octx->combined_loop
7534 && splay_tree_lookup (octx->variables,
7535 (splay_tree_key) decl) == NULL
7536 && !omp_check_private (octx, decl, false))
7538 omp_add_variable (octx, decl,
7539 GOVD_LASTPRIVATE | GOVD_SEEN);
7540 octx = octx->outer_context;
7541 if (octx
7542 && octx->region_type == ORT_COMBINED_TEAMS
7543 && (splay_tree_lookup (octx->variables,
7544 (splay_tree_key) decl)
7545 == NULL))
7547 omp_add_variable (octx, decl,
7548 GOVD_SHARED | GOVD_SEEN);
7549 octx = octx->outer_context;
7552 if (octx)
7553 omp_notice_variable (octx, decl, true);
7556 else if (outer_ctx->outer_context)
7557 omp_notice_variable (outer_ctx->outer_context, decl, true);
7559 goto do_add;
7560 case OMP_CLAUSE_REDUCTION:
7561 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7562 /* OpenACC permits reductions on private variables. */
7563 if (!(region_type & ORT_ACC))
7564 check_non_private = "reduction";
7565 decl = OMP_CLAUSE_DECL (c);
7566 if (TREE_CODE (decl) == MEM_REF)
7568 tree type = TREE_TYPE (decl);
7569 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7570 NULL, is_gimple_val, fb_rvalue, false)
7571 == GS_ERROR)
7573 remove = true;
7574 break;
7576 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7577 if (DECL_P (v))
7579 omp_firstprivatize_variable (ctx, v);
7580 omp_notice_variable (ctx, v, true);
7582 decl = TREE_OPERAND (decl, 0);
7583 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7585 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7586 NULL, is_gimple_val, fb_rvalue, false)
7587 == GS_ERROR)
7589 remove = true;
7590 break;
7592 v = TREE_OPERAND (decl, 1);
7593 if (DECL_P (v))
7595 omp_firstprivatize_variable (ctx, v);
7596 omp_notice_variable (ctx, v, true);
7598 decl = TREE_OPERAND (decl, 0);
7600 if (TREE_CODE (decl) == ADDR_EXPR
7601 || TREE_CODE (decl) == INDIRECT_REF)
7602 decl = TREE_OPERAND (decl, 0);
7604 goto do_add_decl;
7605 case OMP_CLAUSE_LINEAR:
7606 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7607 is_gimple_val, fb_rvalue) == GS_ERROR)
7609 remove = true;
7610 break;
7612 else
7614 if (code == OMP_SIMD
7615 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7617 struct gimplify_omp_ctx *octx = outer_ctx;
7618 if (octx
7619 && octx->region_type == ORT_WORKSHARE
7620 && octx->combined_loop
7621 && !octx->distribute)
7623 if (octx->outer_context
7624 && (octx->outer_context->region_type
7625 == ORT_COMBINED_PARALLEL))
7626 octx = octx->outer_context->outer_context;
7627 else
7628 octx = octx->outer_context;
7630 if (octx
7631 && octx->region_type == ORT_WORKSHARE
7632 && octx->combined_loop
7633 && octx->distribute)
7635 error_at (OMP_CLAUSE_LOCATION (c),
7636 "%<linear%> clause for variable other than "
7637 "loop iterator specified on construct "
7638 "combined with %<distribute%>");
7639 remove = true;
7640 break;
7643 /* For combined #pragma omp parallel for simd, need to put
7644 lastprivate and perhaps firstprivate too on the
7645 parallel. Similarly for #pragma omp for simd. */
7646 struct gimplify_omp_ctx *octx = outer_ctx;
7647 decl = NULL_TREE;
7650 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7651 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7652 break;
7653 decl = OMP_CLAUSE_DECL (c);
7654 if (error_operand_p (decl))
7656 decl = NULL_TREE;
7657 break;
7659 flags = GOVD_SEEN;
7660 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7661 flags |= GOVD_FIRSTPRIVATE;
7662 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7663 flags |= GOVD_LASTPRIVATE;
7664 if (octx
7665 && octx->region_type == ORT_WORKSHARE
7666 && octx->combined_loop)
7668 if (octx->outer_context
7669 && (octx->outer_context->region_type
7670 == ORT_COMBINED_PARALLEL))
7671 octx = octx->outer_context;
7672 else if (omp_check_private (octx, decl, false))
7673 break;
7675 else if (octx
7676 && (octx->region_type & ORT_TASK) != 0
7677 && octx->combined_loop)
7679 else if (octx
7680 && octx->region_type == ORT_COMBINED_PARALLEL
7681 && ctx->region_type == ORT_WORKSHARE
7682 && octx == outer_ctx)
7683 flags = GOVD_SEEN | GOVD_SHARED;
7684 else if (octx
7685 && octx->region_type == ORT_COMBINED_TEAMS)
7686 flags = GOVD_SEEN | GOVD_SHARED;
7687 else if (octx
7688 && octx->region_type == ORT_COMBINED_TARGET)
7690 flags &= ~GOVD_LASTPRIVATE;
7691 if (flags == GOVD_SEEN)
7692 break;
7694 else
7695 break;
7696 splay_tree_node on
7697 = splay_tree_lookup (octx->variables,
7698 (splay_tree_key) decl);
7699 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7701 octx = NULL;
7702 break;
7704 omp_add_variable (octx, decl, flags);
7705 if (octx->outer_context == NULL)
7706 break;
7707 octx = octx->outer_context;
7709 while (1);
7710 if (octx
7711 && decl
7712 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7713 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7714 omp_notice_variable (octx, decl, true);
7716 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7717 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7718 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7720 notice_outer = false;
7721 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7723 goto do_add;
7725 case OMP_CLAUSE_MAP:
7726 decl = OMP_CLAUSE_DECL (c);
7727 if (error_operand_p (decl))
7728 remove = true;
7729 switch (code)
7731 case OMP_TARGET:
7732 break;
7733 case OACC_DATA:
7734 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7735 break;
7736 /* FALLTHRU */
7737 case OMP_TARGET_DATA:
7738 case OMP_TARGET_ENTER_DATA:
7739 case OMP_TARGET_EXIT_DATA:
7740 case OACC_ENTER_DATA:
7741 case OACC_EXIT_DATA:
7742 case OACC_HOST_DATA:
7743 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7744 || (OMP_CLAUSE_MAP_KIND (c)
7745 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7746 /* For target {,enter ,exit }data only the array slice is
7747 mapped, but not the pointer to it. */
7748 remove = true;
7749 break;
7750 default:
7751 break;
7753 if (remove)
7754 break;
7755 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7757 struct gimplify_omp_ctx *octx;
7758 for (octx = outer_ctx; octx; octx = octx->outer_context)
7760 if (octx->region_type != ORT_ACC_HOST_DATA)
7761 break;
7762 splay_tree_node n2
7763 = splay_tree_lookup (octx->variables,
7764 (splay_tree_key) decl);
7765 if (n2)
7766 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7767 "declared in enclosing %<host_data%> region",
7768 DECL_NAME (decl));
7771 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7772 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7773 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7774 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7775 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7777 remove = true;
7778 break;
7780 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7781 || (OMP_CLAUSE_MAP_KIND (c)
7782 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7783 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7785 OMP_CLAUSE_SIZE (c)
7786 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7787 false);
7788 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7789 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7791 if (!DECL_P (decl))
7793 tree d = decl, *pd;
7794 if (TREE_CODE (d) == ARRAY_REF)
7796 while (TREE_CODE (d) == ARRAY_REF)
7797 d = TREE_OPERAND (d, 0);
7798 if (TREE_CODE (d) == COMPONENT_REF
7799 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7800 decl = d;
7802 pd = &OMP_CLAUSE_DECL (c);
7803 if (d == decl
7804 && TREE_CODE (decl) == INDIRECT_REF
7805 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7806 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7807 == REFERENCE_TYPE))
7809 pd = &TREE_OPERAND (decl, 0);
7810 decl = TREE_OPERAND (decl, 0);
7812 if (TREE_CODE (decl) == COMPONENT_REF)
7814 while (TREE_CODE (decl) == COMPONENT_REF)
7815 decl = TREE_OPERAND (decl, 0);
7816 if (TREE_CODE (decl) == INDIRECT_REF
7817 && DECL_P (TREE_OPERAND (decl, 0))
7818 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7819 == REFERENCE_TYPE))
7820 decl = TREE_OPERAND (decl, 0);
7822 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7823 == GS_ERROR)
7825 remove = true;
7826 break;
7828 if (DECL_P (decl))
7830 if (error_operand_p (decl))
7832 remove = true;
7833 break;
7836 tree stype = TREE_TYPE (decl);
7837 if (TREE_CODE (stype) == REFERENCE_TYPE)
7838 stype = TREE_TYPE (stype);
7839 if (TYPE_SIZE_UNIT (stype) == NULL
7840 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7842 error_at (OMP_CLAUSE_LOCATION (c),
7843 "mapping field %qE of variable length "
7844 "structure", OMP_CLAUSE_DECL (c));
7845 remove = true;
7846 break;
7849 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7851 /* Error recovery. */
7852 if (prev_list_p == NULL)
7854 remove = true;
7855 break;
7857 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7859 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7860 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7862 remove = true;
7863 break;
7868 tree offset;
7869 HOST_WIDE_INT bitsize, bitpos;
7870 machine_mode mode;
7871 int unsignedp, reversep, volatilep = 0;
7872 tree base = OMP_CLAUSE_DECL (c);
7873 while (TREE_CODE (base) == ARRAY_REF)
7874 base = TREE_OPERAND (base, 0);
7875 if (TREE_CODE (base) == INDIRECT_REF)
7876 base = TREE_OPERAND (base, 0);
7877 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7878 &mode, &unsignedp, &reversep,
7879 &volatilep);
7880 tree orig_base = base;
7881 if ((TREE_CODE (base) == INDIRECT_REF
7882 || (TREE_CODE (base) == MEM_REF
7883 && integer_zerop (TREE_OPERAND (base, 1))))
7884 && DECL_P (TREE_OPERAND (base, 0))
7885 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7886 == REFERENCE_TYPE))
7887 base = TREE_OPERAND (base, 0);
7888 gcc_assert (base == decl
7889 && (offset == NULL_TREE
7890 || TREE_CODE (offset) == INTEGER_CST));
7892 splay_tree_node n
7893 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7894 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7895 == GOMP_MAP_ALWAYS_POINTER);
7896 if (n == NULL || (n->value & GOVD_MAP) == 0)
7898 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7899 OMP_CLAUSE_MAP);
7900 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7901 if (orig_base != base)
7902 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7903 else
7904 OMP_CLAUSE_DECL (l) = decl;
7905 OMP_CLAUSE_SIZE (l) = size_int (1);
7906 if (struct_map_to_clause == NULL)
7907 struct_map_to_clause = new hash_map<tree, tree>;
7908 struct_map_to_clause->put (decl, l);
7909 if (ptr)
7911 enum gomp_map_kind mkind
7912 = code == OMP_TARGET_EXIT_DATA
7913 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7914 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7915 OMP_CLAUSE_MAP);
7916 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7917 OMP_CLAUSE_DECL (c2)
7918 = unshare_expr (OMP_CLAUSE_DECL (c));
7919 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7920 OMP_CLAUSE_SIZE (c2)
7921 = TYPE_SIZE_UNIT (ptr_type_node);
7922 OMP_CLAUSE_CHAIN (l) = c2;
7923 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7925 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7926 tree c3
7927 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7928 OMP_CLAUSE_MAP);
7929 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7930 OMP_CLAUSE_DECL (c3)
7931 = unshare_expr (OMP_CLAUSE_DECL (c4));
7932 OMP_CLAUSE_SIZE (c3)
7933 = TYPE_SIZE_UNIT (ptr_type_node);
7934 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7935 OMP_CLAUSE_CHAIN (c2) = c3;
7937 *prev_list_p = l;
7938 prev_list_p = NULL;
7940 else
7942 OMP_CLAUSE_CHAIN (l) = c;
7943 *list_p = l;
7944 list_p = &OMP_CLAUSE_CHAIN (l);
7946 if (orig_base != base && code == OMP_TARGET)
7948 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7949 OMP_CLAUSE_MAP);
7950 enum gomp_map_kind mkind
7951 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7952 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7953 OMP_CLAUSE_DECL (c2) = decl;
7954 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7955 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7956 OMP_CLAUSE_CHAIN (l) = c2;
7958 flags = GOVD_MAP | GOVD_EXPLICIT;
7959 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7960 flags |= GOVD_SEEN;
7961 goto do_add_decl;
7963 else
7965 tree *osc = struct_map_to_clause->get (decl);
7966 tree *sc = NULL, *scp = NULL;
7967 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7968 n->value |= GOVD_SEEN;
7969 offset_int o1, o2;
7970 if (offset)
7971 o1 = wi::to_offset (offset);
7972 else
7973 o1 = 0;
7974 if (bitpos)
7975 o1 = o1 + bitpos / BITS_PER_UNIT;
7976 sc = &OMP_CLAUSE_CHAIN (*osc);
7977 if (*sc != c
7978 && (OMP_CLAUSE_MAP_KIND (*sc)
7979 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7980 sc = &OMP_CLAUSE_CHAIN (*sc);
7981 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7982 if (ptr && sc == prev_list_p)
7983 break;
7984 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7985 != COMPONENT_REF
7986 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7987 != INDIRECT_REF)
7988 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7989 != ARRAY_REF))
7990 break;
7991 else
7993 tree offset2;
7994 HOST_WIDE_INT bitsize2, bitpos2;
7995 base = OMP_CLAUSE_DECL (*sc);
7996 if (TREE_CODE (base) == ARRAY_REF)
7998 while (TREE_CODE (base) == ARRAY_REF)
7999 base = TREE_OPERAND (base, 0);
8000 if (TREE_CODE (base) != COMPONENT_REF
8001 || (TREE_CODE (TREE_TYPE (base))
8002 != ARRAY_TYPE))
8003 break;
8005 else if (TREE_CODE (base) == INDIRECT_REF
8006 && (TREE_CODE (TREE_OPERAND (base, 0))
8007 == COMPONENT_REF)
8008 && (TREE_CODE (TREE_TYPE
8009 (TREE_OPERAND (base, 0)))
8010 == REFERENCE_TYPE))
8011 base = TREE_OPERAND (base, 0);
8012 base = get_inner_reference (base, &bitsize2,
8013 &bitpos2, &offset2,
8014 &mode, &unsignedp,
8015 &reversep, &volatilep);
8016 if ((TREE_CODE (base) == INDIRECT_REF
8017 || (TREE_CODE (base) == MEM_REF
8018 && integer_zerop (TREE_OPERAND (base,
8019 1))))
8020 && DECL_P (TREE_OPERAND (base, 0))
8021 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8022 0)))
8023 == REFERENCE_TYPE))
8024 base = TREE_OPERAND (base, 0);
8025 if (base != decl)
8026 break;
8027 if (scp)
8028 continue;
8029 gcc_assert (offset == NULL_TREE
8030 || TREE_CODE (offset) == INTEGER_CST);
8031 tree d1 = OMP_CLAUSE_DECL (*sc);
8032 tree d2 = OMP_CLAUSE_DECL (c);
8033 while (TREE_CODE (d1) == ARRAY_REF)
8034 d1 = TREE_OPERAND (d1, 0);
8035 while (TREE_CODE (d2) == ARRAY_REF)
8036 d2 = TREE_OPERAND (d2, 0);
8037 if (TREE_CODE (d1) == INDIRECT_REF)
8038 d1 = TREE_OPERAND (d1, 0);
8039 if (TREE_CODE (d2) == INDIRECT_REF)
8040 d2 = TREE_OPERAND (d2, 0);
8041 while (TREE_CODE (d1) == COMPONENT_REF)
8042 if (TREE_CODE (d2) == COMPONENT_REF
8043 && TREE_OPERAND (d1, 1)
8044 == TREE_OPERAND (d2, 1))
8046 d1 = TREE_OPERAND (d1, 0);
8047 d2 = TREE_OPERAND (d2, 0);
8049 else
8050 break;
8051 if (d1 == d2)
8053 error_at (OMP_CLAUSE_LOCATION (c),
8054 "%qE appears more than once in map "
8055 "clauses", OMP_CLAUSE_DECL (c));
8056 remove = true;
8057 break;
8059 if (offset2)
8060 o2 = wi::to_offset (offset2);
8061 else
8062 o2 = 0;
8063 if (bitpos2)
8064 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8065 if (wi::ltu_p (o1, o2)
8066 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8068 if (ptr)
8069 scp = sc;
8070 else
8071 break;
8074 if (remove)
8075 break;
8076 OMP_CLAUSE_SIZE (*osc)
8077 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8078 size_one_node);
8079 if (ptr)
8081 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8082 OMP_CLAUSE_MAP);
8083 tree cl = NULL_TREE;
8084 enum gomp_map_kind mkind
8085 = code == OMP_TARGET_EXIT_DATA
8086 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8087 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8088 OMP_CLAUSE_DECL (c2)
8089 = unshare_expr (OMP_CLAUSE_DECL (c));
8090 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8091 OMP_CLAUSE_SIZE (c2)
8092 = TYPE_SIZE_UNIT (ptr_type_node);
8093 cl = scp ? *prev_list_p : c2;
8094 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8096 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8097 tree c3
8098 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8099 OMP_CLAUSE_MAP);
8100 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8101 OMP_CLAUSE_DECL (c3)
8102 = unshare_expr (OMP_CLAUSE_DECL (c4));
8103 OMP_CLAUSE_SIZE (c3)
8104 = TYPE_SIZE_UNIT (ptr_type_node);
8105 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8106 if (!scp)
8107 OMP_CLAUSE_CHAIN (c2) = c3;
8108 else
8109 cl = c3;
8111 if (scp)
8112 *scp = c2;
8113 if (sc == prev_list_p)
8115 *sc = cl;
8116 prev_list_p = NULL;
8118 else
8120 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8121 list_p = prev_list_p;
8122 prev_list_p = NULL;
8123 OMP_CLAUSE_CHAIN (c) = *sc;
8124 *sc = cl;
8125 continue;
8128 else if (*sc != c)
8130 *list_p = OMP_CLAUSE_CHAIN (c);
8131 OMP_CLAUSE_CHAIN (c) = *sc;
8132 *sc = c;
8133 continue;
8137 if (!remove
8138 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8139 && OMP_CLAUSE_CHAIN (c)
8140 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8141 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8142 == GOMP_MAP_ALWAYS_POINTER))
8143 prev_list_p = list_p;
8144 break;
8146 flags = GOVD_MAP | GOVD_EXPLICIT;
8147 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8148 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8149 flags |= GOVD_MAP_ALWAYS_TO;
8150 goto do_add;
8152 case OMP_CLAUSE_DEPEND:
8153 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8155 tree deps = OMP_CLAUSE_DECL (c);
8156 while (deps && TREE_CODE (deps) == TREE_LIST)
8158 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8159 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8160 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8161 pre_p, NULL, is_gimple_val, fb_rvalue);
8162 deps = TREE_CHAIN (deps);
8164 break;
8166 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8167 break;
8168 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8170 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8171 NULL, is_gimple_val, fb_rvalue);
8172 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8174 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8176 remove = true;
8177 break;
8179 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8180 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8181 is_gimple_val, fb_rvalue) == GS_ERROR)
8183 remove = true;
8184 break;
8186 break;
8188 case OMP_CLAUSE_TO:
8189 case OMP_CLAUSE_FROM:
8190 case OMP_CLAUSE__CACHE_:
8191 decl = OMP_CLAUSE_DECL (c);
8192 if (error_operand_p (decl))
8194 remove = true;
8195 break;
8197 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8198 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8199 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8200 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8201 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8203 remove = true;
8204 break;
8206 if (!DECL_P (decl))
8208 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8209 NULL, is_gimple_lvalue, fb_lvalue)
8210 == GS_ERROR)
8212 remove = true;
8213 break;
8215 break;
8217 goto do_notice;
8219 case OMP_CLAUSE_USE_DEVICE_PTR:
8220 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8221 goto do_add;
8222 case OMP_CLAUSE_IS_DEVICE_PTR:
8223 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8224 goto do_add;
8226 do_add:
8227 decl = OMP_CLAUSE_DECL (c);
8228 do_add_decl:
8229 if (error_operand_p (decl))
8231 remove = true;
8232 break;
8234 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8236 tree t = omp_member_access_dummy_var (decl);
8237 if (t)
8239 tree v = DECL_VALUE_EXPR (decl);
8240 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8241 if (outer_ctx)
8242 omp_notice_variable (outer_ctx, t, true);
8245 if (code == OACC_DATA
8246 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8247 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8248 flags |= GOVD_MAP_0LEN_ARRAY;
8249 omp_add_variable (ctx, decl, flags);
8250 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8251 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8253 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8254 GOVD_LOCAL | GOVD_SEEN);
8255 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8256 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8257 find_decl_expr,
8258 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8259 NULL) == NULL_TREE)
8260 omp_add_variable (ctx,
8261 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8262 GOVD_LOCAL | GOVD_SEEN);
8263 gimplify_omp_ctxp = ctx;
8264 push_gimplify_context ();
8266 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8267 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8269 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8270 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8271 pop_gimplify_context
8272 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8273 push_gimplify_context ();
8274 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8275 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8276 pop_gimplify_context
8277 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8278 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8279 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8281 gimplify_omp_ctxp = outer_ctx;
8283 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8284 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8286 gimplify_omp_ctxp = ctx;
8287 push_gimplify_context ();
8288 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8290 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8291 NULL, NULL);
8292 TREE_SIDE_EFFECTS (bind) = 1;
8293 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8294 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8296 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8297 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8298 pop_gimplify_context
8299 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8300 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8302 gimplify_omp_ctxp = outer_ctx;
8304 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8305 && OMP_CLAUSE_LINEAR_STMT (c))
8307 gimplify_omp_ctxp = ctx;
8308 push_gimplify_context ();
8309 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8311 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8312 NULL, NULL);
8313 TREE_SIDE_EFFECTS (bind) = 1;
8314 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8315 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8317 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8318 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8319 pop_gimplify_context
8320 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8321 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8323 gimplify_omp_ctxp = outer_ctx;
8325 if (notice_outer)
8326 goto do_notice;
8327 break;
8329 case OMP_CLAUSE_COPYIN:
8330 case OMP_CLAUSE_COPYPRIVATE:
8331 decl = OMP_CLAUSE_DECL (c);
8332 if (error_operand_p (decl))
8334 remove = true;
8335 break;
8337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8338 && !remove
8339 && !omp_check_private (ctx, decl, true))
8341 remove = true;
8342 if (is_global_var (decl))
8344 if (DECL_THREAD_LOCAL_P (decl))
8345 remove = false;
8346 else if (DECL_HAS_VALUE_EXPR_P (decl))
8348 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8350 if (value
8351 && DECL_P (value)
8352 && DECL_THREAD_LOCAL_P (value))
8353 remove = false;
8356 if (remove)
8357 error_at (OMP_CLAUSE_LOCATION (c),
8358 "copyprivate variable %qE is not threadprivate"
8359 " or private in outer context", DECL_NAME (decl));
8361 do_notice:
8362 if (outer_ctx)
8363 omp_notice_variable (outer_ctx, decl, true);
8364 if (check_non_private
8365 && region_type == ORT_WORKSHARE
8366 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8367 || decl == OMP_CLAUSE_DECL (c)
8368 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8369 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8370 == ADDR_EXPR
8371 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8372 == POINTER_PLUS_EXPR
8373 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8374 (OMP_CLAUSE_DECL (c), 0), 0))
8375 == ADDR_EXPR)))))
8376 && omp_check_private (ctx, decl, false))
8378 error ("%s variable %qE is private in outer context",
8379 check_non_private, DECL_NAME (decl));
8380 remove = true;
8382 break;
8384 case OMP_CLAUSE_IF:
8385 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8386 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8388 const char *p[2];
8389 for (int i = 0; i < 2; i++)
8390 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8392 case OMP_PARALLEL: p[i] = "parallel"; break;
8393 case OMP_TASK: p[i] = "task"; break;
8394 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8395 case OMP_TARGET_DATA: p[i] = "target data"; break;
8396 case OMP_TARGET: p[i] = "target"; break;
8397 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8398 case OMP_TARGET_ENTER_DATA:
8399 p[i] = "target enter data"; break;
8400 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8401 default: gcc_unreachable ();
8403 error_at (OMP_CLAUSE_LOCATION (c),
8404 "expected %qs %<if%> clause modifier rather than %qs",
8405 p[0], p[1]);
8406 remove = true;
8408 /* Fall through. */
8410 case OMP_CLAUSE_FINAL:
8411 OMP_CLAUSE_OPERAND (c, 0)
8412 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8413 /* Fall through. */
8415 case OMP_CLAUSE_SCHEDULE:
8416 case OMP_CLAUSE_NUM_THREADS:
8417 case OMP_CLAUSE_NUM_TEAMS:
8418 case OMP_CLAUSE_THREAD_LIMIT:
8419 case OMP_CLAUSE_DIST_SCHEDULE:
8420 case OMP_CLAUSE_DEVICE:
8421 case OMP_CLAUSE_PRIORITY:
8422 case OMP_CLAUSE_GRAINSIZE:
8423 case OMP_CLAUSE_NUM_TASKS:
8424 case OMP_CLAUSE_HINT:
8425 case OMP_CLAUSE__CILK_FOR_COUNT_:
8426 case OMP_CLAUSE_ASYNC:
8427 case OMP_CLAUSE_WAIT:
8428 case OMP_CLAUSE_NUM_GANGS:
8429 case OMP_CLAUSE_NUM_WORKERS:
8430 case OMP_CLAUSE_VECTOR_LENGTH:
8431 case OMP_CLAUSE_WORKER:
8432 case OMP_CLAUSE_VECTOR:
8433 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8434 is_gimple_val, fb_rvalue) == GS_ERROR)
8435 remove = true;
8436 break;
8438 case OMP_CLAUSE_GANG:
8439 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8440 is_gimple_val, fb_rvalue) == GS_ERROR)
8441 remove = true;
8442 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8443 is_gimple_val, fb_rvalue) == GS_ERROR)
8444 remove = true;
8445 break;
8447 case OMP_CLAUSE_NOWAIT:
8448 case OMP_CLAUSE_ORDERED:
8449 case OMP_CLAUSE_UNTIED:
8450 case OMP_CLAUSE_COLLAPSE:
8451 case OMP_CLAUSE_TILE:
8452 case OMP_CLAUSE_AUTO:
8453 case OMP_CLAUSE_SEQ:
8454 case OMP_CLAUSE_INDEPENDENT:
8455 case OMP_CLAUSE_MERGEABLE:
8456 case OMP_CLAUSE_PROC_BIND:
8457 case OMP_CLAUSE_SAFELEN:
8458 case OMP_CLAUSE_SIMDLEN:
8459 case OMP_CLAUSE_NOGROUP:
8460 case OMP_CLAUSE_THREADS:
8461 case OMP_CLAUSE_SIMD:
8462 break;
8464 case OMP_CLAUSE_DEFAULTMAP:
8465 ctx->target_map_scalars_firstprivate = false;
8466 break;
8468 case OMP_CLAUSE_ALIGNED:
8469 decl = OMP_CLAUSE_DECL (c);
8470 if (error_operand_p (decl))
8472 remove = true;
8473 break;
8475 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8476 is_gimple_val, fb_rvalue) == GS_ERROR)
8478 remove = true;
8479 break;
8481 if (!is_global_var (decl)
8482 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8483 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8484 break;
8486 case OMP_CLAUSE_DEFAULT:
8487 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8488 break;
8490 default:
8491 gcc_unreachable ();
8494 if (code == OACC_DATA
8495 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8496 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8497 remove = true;
8498 if (remove)
8499 *list_p = OMP_CLAUSE_CHAIN (c);
8500 else
8501 list_p = &OMP_CLAUSE_CHAIN (c);
8504 gimplify_omp_ctxp = ctx;
8505 if (struct_map_to_clause)
8506 delete struct_map_to_clause;
8509 /* Return true if DECL is a candidate for shared to firstprivate
8510 optimization. We only consider non-addressable scalars, not
8511 too big, and not references. */
8513 static bool
8514 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8516 if (TREE_ADDRESSABLE (decl))
8517 return false;
8518 tree type = TREE_TYPE (decl);
8519 if (!is_gimple_reg_type (type)
8520 || TREE_CODE (type) == REFERENCE_TYPE
8521 || TREE_ADDRESSABLE (type))
8522 return false;
8523 /* Don't optimize too large decls, as each thread/task will have
8524 its own. */
8525 HOST_WIDE_INT len = int_size_in_bytes (type);
8526 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8527 return false;
8528 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8529 return false;
8530 return true;
8533 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8534 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8535 GOVD_WRITTEN in outer contexts. */
8537 static void
8538 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8540 for (; ctx; ctx = ctx->outer_context)
8542 splay_tree_node n = splay_tree_lookup (ctx->variables,
8543 (splay_tree_key) decl);
8544 if (n == NULL)
8545 continue;
8546 else if (n->value & GOVD_SHARED)
8548 n->value |= GOVD_WRITTEN;
8549 return;
8551 else if (n->value & GOVD_DATA_SHARE_CLASS)
8552 return;
8556 /* Helper callback for walk_gimple_seq to discover possible stores
8557 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8558 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8559 for those. */
8561 static tree
8562 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8564 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8566 *walk_subtrees = 0;
8567 if (!wi->is_lhs)
8568 return NULL_TREE;
8570 tree op = *tp;
8573 if (handled_component_p (op))
8574 op = TREE_OPERAND (op, 0);
8575 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8576 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8577 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8578 else
8579 break;
8581 while (1);
8582 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8583 return NULL_TREE;
8585 omp_mark_stores (gimplify_omp_ctxp, op);
8586 return NULL_TREE;
8589 /* Helper callback for walk_gimple_seq to discover possible stores
8590 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8591 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8592 for those. */
8594 static tree
8595 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8596 bool *handled_ops_p,
8597 struct walk_stmt_info *wi)
8599 gimple *stmt = gsi_stmt (*gsi_p);
8600 switch (gimple_code (stmt))
8602 /* Don't recurse on OpenMP constructs for which
8603 gimplify_adjust_omp_clauses already handled the bodies,
8604 except handle gimple_omp_for_pre_body. */
8605 case GIMPLE_OMP_FOR:
8606 *handled_ops_p = true;
8607 if (gimple_omp_for_pre_body (stmt))
8608 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8609 omp_find_stores_stmt, omp_find_stores_op, wi);
8610 break;
8611 case GIMPLE_OMP_PARALLEL:
8612 case GIMPLE_OMP_TASK:
8613 case GIMPLE_OMP_SECTIONS:
8614 case GIMPLE_OMP_SINGLE:
8615 case GIMPLE_OMP_TARGET:
8616 case GIMPLE_OMP_TEAMS:
8617 case GIMPLE_OMP_CRITICAL:
8618 *handled_ops_p = true;
8619 break;
8620 default:
8621 break;
8623 return NULL_TREE;
8626 struct gimplify_adjust_omp_clauses_data
8628 tree *list_p;
8629 gimple_seq *pre_p;
8632 /* For all variables that were not actually used within the context,
8633 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8635 static int
8636 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8638 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8639 gimple_seq *pre_p
8640 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8641 tree decl = (tree) n->key;
8642 unsigned flags = n->value;
8643 enum omp_clause_code code;
8644 tree clause;
8645 bool private_debug;
8647 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8648 return 0;
8649 if ((flags & GOVD_SEEN) == 0)
8650 return 0;
8651 if (flags & GOVD_DEBUG_PRIVATE)
8653 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8654 private_debug = true;
8656 else if (flags & GOVD_MAP)
8657 private_debug = false;
8658 else
8659 private_debug
8660 = lang_hooks.decls.omp_private_debug_clause (decl,
8661 !!(flags & GOVD_SHARED));
8662 if (private_debug)
8663 code = OMP_CLAUSE_PRIVATE;
8664 else if (flags & GOVD_MAP)
8666 code = OMP_CLAUSE_MAP;
8667 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8668 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8670 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8671 return 0;
8674 else if (flags & GOVD_SHARED)
8676 if (is_global_var (decl))
8678 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8679 while (ctx != NULL)
8681 splay_tree_node on
8682 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8683 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8684 | GOVD_PRIVATE | GOVD_REDUCTION
8685 | GOVD_LINEAR | GOVD_MAP)) != 0)
8686 break;
8687 ctx = ctx->outer_context;
8689 if (ctx == NULL)
8690 return 0;
8692 code = OMP_CLAUSE_SHARED;
8694 else if (flags & GOVD_PRIVATE)
8695 code = OMP_CLAUSE_PRIVATE;
8696 else if (flags & GOVD_FIRSTPRIVATE)
8698 code = OMP_CLAUSE_FIRSTPRIVATE;
8699 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8700 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8701 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8703 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8704 "%<target%> construct", decl);
8705 return 0;
8708 else if (flags & GOVD_LASTPRIVATE)
8709 code = OMP_CLAUSE_LASTPRIVATE;
8710 else if (flags & GOVD_ALIGNED)
8711 return 0;
8712 else
8713 gcc_unreachable ();
8715 if (((flags & GOVD_LASTPRIVATE)
8716 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8717 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8718 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8720 tree chain = *list_p;
8721 clause = build_omp_clause (input_location, code);
8722 OMP_CLAUSE_DECL (clause) = decl;
8723 OMP_CLAUSE_CHAIN (clause) = chain;
8724 if (private_debug)
8725 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8726 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8727 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8728 else if (code == OMP_CLAUSE_SHARED
8729 && (flags & GOVD_WRITTEN) == 0
8730 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8731 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8732 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8733 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8734 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8736 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8737 OMP_CLAUSE_DECL (nc) = decl;
8738 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8739 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8740 OMP_CLAUSE_DECL (clause)
8741 = build_simple_mem_ref_loc (input_location, decl);
8742 OMP_CLAUSE_DECL (clause)
8743 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8744 build_int_cst (build_pointer_type (char_type_node), 0));
8745 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8746 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8747 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8748 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8749 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8750 OMP_CLAUSE_CHAIN (nc) = chain;
8751 OMP_CLAUSE_CHAIN (clause) = nc;
8752 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8753 gimplify_omp_ctxp = ctx->outer_context;
8754 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8755 pre_p, NULL, is_gimple_val, fb_rvalue);
8756 gimplify_omp_ctxp = ctx;
8758 else if (code == OMP_CLAUSE_MAP)
8760 int kind;
8761 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8762 switch (flags & (GOVD_MAP_TO_ONLY
8763 | GOVD_MAP_FORCE
8764 | GOVD_MAP_FORCE_PRESENT))
8766 case 0:
8767 kind = GOMP_MAP_TOFROM;
8768 break;
8769 case GOVD_MAP_FORCE:
8770 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8771 break;
8772 case GOVD_MAP_TO_ONLY:
8773 kind = GOMP_MAP_TO;
8774 break;
8775 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8776 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8777 break;
8778 case GOVD_MAP_FORCE_PRESENT:
8779 kind = GOMP_MAP_FORCE_PRESENT;
8780 break;
8781 default:
8782 gcc_unreachable ();
8784 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8785 if (DECL_SIZE (decl)
8786 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8788 tree decl2 = DECL_VALUE_EXPR (decl);
8789 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8790 decl2 = TREE_OPERAND (decl2, 0);
8791 gcc_assert (DECL_P (decl2));
8792 tree mem = build_simple_mem_ref (decl2);
8793 OMP_CLAUSE_DECL (clause) = mem;
8794 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8795 if (gimplify_omp_ctxp->outer_context)
8797 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8798 omp_notice_variable (ctx, decl2, true);
8799 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8801 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8802 OMP_CLAUSE_MAP);
8803 OMP_CLAUSE_DECL (nc) = decl;
8804 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8805 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8806 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8807 else
8808 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8809 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8810 OMP_CLAUSE_CHAIN (clause) = nc;
8812 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8813 && lang_hooks.decls.omp_privatize_by_reference (decl))
8815 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8816 OMP_CLAUSE_SIZE (clause)
8817 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8818 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8819 gimplify_omp_ctxp = ctx->outer_context;
8820 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8821 pre_p, NULL, is_gimple_val, fb_rvalue);
8822 gimplify_omp_ctxp = ctx;
8823 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8824 OMP_CLAUSE_MAP);
8825 OMP_CLAUSE_DECL (nc) = decl;
8826 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8827 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8828 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8829 OMP_CLAUSE_CHAIN (clause) = nc;
8831 else
8832 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8834 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8836 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8837 OMP_CLAUSE_DECL (nc) = decl;
8838 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8839 OMP_CLAUSE_CHAIN (nc) = chain;
8840 OMP_CLAUSE_CHAIN (clause) = nc;
8841 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8842 gimplify_omp_ctxp = ctx->outer_context;
8843 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8844 gimplify_omp_ctxp = ctx;
8846 *list_p = clause;
8847 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8848 gimplify_omp_ctxp = ctx->outer_context;
8849 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8850 if (gimplify_omp_ctxp)
8851 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8852 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8853 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8854 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8855 true);
8856 gimplify_omp_ctxp = ctx;
8857 return 0;
8860 static void
8861 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8862 enum tree_code code)
8864 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8865 tree c, decl;
8867 if (body)
8869 struct gimplify_omp_ctx *octx;
8870 for (octx = ctx; octx; octx = octx->outer_context)
8871 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8872 break;
8873 if (octx)
8875 struct walk_stmt_info wi;
8876 memset (&wi, 0, sizeof (wi));
8877 walk_gimple_seq (body, omp_find_stores_stmt,
8878 omp_find_stores_op, &wi);
8881 while ((c = *list_p) != NULL)
8883 splay_tree_node n;
8884 bool remove = false;
8886 switch (OMP_CLAUSE_CODE (c))
8888 case OMP_CLAUSE_FIRSTPRIVATE:
8889 if ((ctx->region_type & ORT_TARGET)
8890 && (ctx->region_type & ORT_ACC) == 0
8891 && TYPE_ATOMIC (strip_array_types
8892 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8894 error_at (OMP_CLAUSE_LOCATION (c),
8895 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8896 "%<target%> construct", OMP_CLAUSE_DECL (c));
8897 remove = true;
8898 break;
8900 /* FALLTHRU */
8901 case OMP_CLAUSE_PRIVATE:
8902 case OMP_CLAUSE_SHARED:
8903 case OMP_CLAUSE_LINEAR:
8904 decl = OMP_CLAUSE_DECL (c);
8905 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8906 remove = !(n->value & GOVD_SEEN);
8907 if (! remove)
8909 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8910 if ((n->value & GOVD_DEBUG_PRIVATE)
8911 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8913 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8914 || ((n->value & GOVD_DATA_SHARE_CLASS)
8915 == GOVD_SHARED));
8916 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8917 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8920 && (n->value & GOVD_WRITTEN) == 0
8921 && DECL_P (decl)
8922 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8923 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8924 else if (DECL_P (decl)
8925 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8926 && (n->value & GOVD_WRITTEN) != 0)
8927 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8928 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8929 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8930 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8932 break;
8934 case OMP_CLAUSE_LASTPRIVATE:
8935 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8936 accurately reflect the presence of a FIRSTPRIVATE clause. */
8937 decl = OMP_CLAUSE_DECL (c);
8938 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8939 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8940 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8941 if (code == OMP_DISTRIBUTE
8942 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8944 remove = true;
8945 error_at (OMP_CLAUSE_LOCATION (c),
8946 "same variable used in %<firstprivate%> and "
8947 "%<lastprivate%> clauses on %<distribute%> "
8948 "construct");
8950 if (!remove
8951 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8952 && DECL_P (decl)
8953 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8954 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8955 break;
8957 case OMP_CLAUSE_ALIGNED:
8958 decl = OMP_CLAUSE_DECL (c);
8959 if (!is_global_var (decl))
8961 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8962 remove = n == NULL || !(n->value & GOVD_SEEN);
8963 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8965 struct gimplify_omp_ctx *octx;
8966 if (n != NULL
8967 && (n->value & (GOVD_DATA_SHARE_CLASS
8968 & ~GOVD_FIRSTPRIVATE)))
8969 remove = true;
8970 else
8971 for (octx = ctx->outer_context; octx;
8972 octx = octx->outer_context)
8974 n = splay_tree_lookup (octx->variables,
8975 (splay_tree_key) decl);
8976 if (n == NULL)
8977 continue;
8978 if (n->value & GOVD_LOCAL)
8979 break;
8980 /* We have to avoid assigning a shared variable
8981 to itself when trying to add
8982 __builtin_assume_aligned. */
8983 if (n->value & GOVD_SHARED)
8985 remove = true;
8986 break;
8991 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8993 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8994 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8995 remove = true;
8997 break;
8999 case OMP_CLAUSE_MAP:
9000 if (code == OMP_TARGET_EXIT_DATA
9001 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9003 remove = true;
9004 break;
9006 decl = OMP_CLAUSE_DECL (c);
9007 /* Data clauses associated with acc parallel reductions must be
9008 compatible with present_or_copy. Warn and adjust the clause
9009 if that is not the case. */
9010 if (ctx->region_type == ORT_ACC_PARALLEL)
9012 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9013 n = NULL;
9015 if (DECL_P (t))
9016 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9018 if (n && (n->value & GOVD_REDUCTION))
9020 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9022 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9023 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9024 && kind != GOMP_MAP_FORCE_PRESENT
9025 && kind != GOMP_MAP_POINTER)
9027 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9028 "incompatible data clause with reduction "
9029 "on %qE; promoting to present_or_copy",
9030 DECL_NAME (t));
9031 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9035 if (!DECL_P (decl))
9037 if ((ctx->region_type & ORT_TARGET) != 0
9038 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9040 if (TREE_CODE (decl) == INDIRECT_REF
9041 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9042 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9043 == REFERENCE_TYPE))
9044 decl = TREE_OPERAND (decl, 0);
9045 if (TREE_CODE (decl) == COMPONENT_REF)
9047 while (TREE_CODE (decl) == COMPONENT_REF)
9048 decl = TREE_OPERAND (decl, 0);
9049 if (DECL_P (decl))
9051 n = splay_tree_lookup (ctx->variables,
9052 (splay_tree_key) decl);
9053 if (!(n->value & GOVD_SEEN))
9054 remove = true;
9058 break;
9060 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9061 if ((ctx->region_type & ORT_TARGET) != 0
9062 && !(n->value & GOVD_SEEN)
9063 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9064 && (!is_global_var (decl)
9065 || !lookup_attribute ("omp declare target link",
9066 DECL_ATTRIBUTES (decl))))
9068 remove = true;
9069 /* For struct element mapping, if struct is never referenced
9070 in target block and none of the mapping has always modifier,
9071 remove all the struct element mappings, which immediately
9072 follow the GOMP_MAP_STRUCT map clause. */
9073 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9075 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9076 while (cnt--)
9077 OMP_CLAUSE_CHAIN (c)
9078 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9081 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9082 && code == OMP_TARGET_EXIT_DATA)
9083 remove = true;
9084 else if (DECL_SIZE (decl)
9085 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9086 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9087 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9088 && (OMP_CLAUSE_MAP_KIND (c)
9089 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9091 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9092 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9093 INTEGER_CST. */
9094 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9096 tree decl2 = DECL_VALUE_EXPR (decl);
9097 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9098 decl2 = TREE_OPERAND (decl2, 0);
9099 gcc_assert (DECL_P (decl2));
9100 tree mem = build_simple_mem_ref (decl2);
9101 OMP_CLAUSE_DECL (c) = mem;
9102 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9103 if (ctx->outer_context)
9105 omp_notice_variable (ctx->outer_context, decl2, true);
9106 omp_notice_variable (ctx->outer_context,
9107 OMP_CLAUSE_SIZE (c), true);
9109 if (((ctx->region_type & ORT_TARGET) != 0
9110 || !ctx->target_firstprivatize_array_bases)
9111 && ((n->value & GOVD_SEEN) == 0
9112 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9114 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9115 OMP_CLAUSE_MAP);
9116 OMP_CLAUSE_DECL (nc) = decl;
9117 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9118 if (ctx->target_firstprivatize_array_bases)
9119 OMP_CLAUSE_SET_MAP_KIND (nc,
9120 GOMP_MAP_FIRSTPRIVATE_POINTER);
9121 else
9122 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9123 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9124 OMP_CLAUSE_CHAIN (c) = nc;
9125 c = nc;
9128 else
9130 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9131 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9132 gcc_assert ((n->value & GOVD_SEEN) == 0
9133 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9134 == 0));
9136 break;
9138 case OMP_CLAUSE_TO:
9139 case OMP_CLAUSE_FROM:
9140 case OMP_CLAUSE__CACHE_:
9141 decl = OMP_CLAUSE_DECL (c);
9142 if (!DECL_P (decl))
9143 break;
9144 if (DECL_SIZE (decl)
9145 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9147 tree decl2 = DECL_VALUE_EXPR (decl);
9148 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9149 decl2 = TREE_OPERAND (decl2, 0);
9150 gcc_assert (DECL_P (decl2));
9151 tree mem = build_simple_mem_ref (decl2);
9152 OMP_CLAUSE_DECL (c) = mem;
9153 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9154 if (ctx->outer_context)
9156 omp_notice_variable (ctx->outer_context, decl2, true);
9157 omp_notice_variable (ctx->outer_context,
9158 OMP_CLAUSE_SIZE (c), true);
9161 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9162 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9163 break;
9165 case OMP_CLAUSE_REDUCTION:
9166 decl = OMP_CLAUSE_DECL (c);
9167 /* OpenACC reductions need a present_or_copy data clause.
9168 Add one if necessary. Error is the reduction is private. */
9169 if (ctx->region_type == ORT_ACC_PARALLEL)
9171 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9172 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9173 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9174 "reduction on %qE", DECL_NAME (decl));
9175 else if ((n->value & GOVD_MAP) == 0)
9177 tree next = OMP_CLAUSE_CHAIN (c);
9178 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9179 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9180 OMP_CLAUSE_DECL (nc) = decl;
9181 OMP_CLAUSE_CHAIN (c) = nc;
9182 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9183 while (1)
9185 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9186 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9187 break;
9188 nc = OMP_CLAUSE_CHAIN (nc);
9190 OMP_CLAUSE_CHAIN (nc) = next;
9191 n->value |= GOVD_MAP;
9194 if (DECL_P (decl)
9195 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9196 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9197 break;
9198 case OMP_CLAUSE_COPYIN:
9199 case OMP_CLAUSE_COPYPRIVATE:
9200 case OMP_CLAUSE_IF:
9201 case OMP_CLAUSE_NUM_THREADS:
9202 case OMP_CLAUSE_NUM_TEAMS:
9203 case OMP_CLAUSE_THREAD_LIMIT:
9204 case OMP_CLAUSE_DIST_SCHEDULE:
9205 case OMP_CLAUSE_DEVICE:
9206 case OMP_CLAUSE_SCHEDULE:
9207 case OMP_CLAUSE_NOWAIT:
9208 case OMP_CLAUSE_ORDERED:
9209 case OMP_CLAUSE_DEFAULT:
9210 case OMP_CLAUSE_UNTIED:
9211 case OMP_CLAUSE_COLLAPSE:
9212 case OMP_CLAUSE_FINAL:
9213 case OMP_CLAUSE_MERGEABLE:
9214 case OMP_CLAUSE_PROC_BIND:
9215 case OMP_CLAUSE_SAFELEN:
9216 case OMP_CLAUSE_SIMDLEN:
9217 case OMP_CLAUSE_DEPEND:
9218 case OMP_CLAUSE_PRIORITY:
9219 case OMP_CLAUSE_GRAINSIZE:
9220 case OMP_CLAUSE_NUM_TASKS:
9221 case OMP_CLAUSE_NOGROUP:
9222 case OMP_CLAUSE_THREADS:
9223 case OMP_CLAUSE_SIMD:
9224 case OMP_CLAUSE_HINT:
9225 case OMP_CLAUSE_DEFAULTMAP:
9226 case OMP_CLAUSE_USE_DEVICE_PTR:
9227 case OMP_CLAUSE_IS_DEVICE_PTR:
9228 case OMP_CLAUSE__CILK_FOR_COUNT_:
9229 case OMP_CLAUSE_ASYNC:
9230 case OMP_CLAUSE_WAIT:
9231 case OMP_CLAUSE_INDEPENDENT:
9232 case OMP_CLAUSE_NUM_GANGS:
9233 case OMP_CLAUSE_NUM_WORKERS:
9234 case OMP_CLAUSE_VECTOR_LENGTH:
9235 case OMP_CLAUSE_GANG:
9236 case OMP_CLAUSE_WORKER:
9237 case OMP_CLAUSE_VECTOR:
9238 case OMP_CLAUSE_AUTO:
9239 case OMP_CLAUSE_SEQ:
9240 case OMP_CLAUSE_TILE:
9241 break;
9243 default:
9244 gcc_unreachable ();
9247 if (remove)
9248 *list_p = OMP_CLAUSE_CHAIN (c);
9249 else
9250 list_p = &OMP_CLAUSE_CHAIN (c);
9253 /* Add in any implicit data sharing. */
9254 struct gimplify_adjust_omp_clauses_data data;
9255 data.list_p = list_p;
9256 data.pre_p = pre_p;
9257 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9259 gimplify_omp_ctxp = ctx->outer_context;
9260 delete_omp_context (ctx);
9263 /* Gimplify OACC_CACHE. */
9265 static void
9266 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9268 tree expr = *expr_p;
9270 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9271 OACC_CACHE);
9272 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9273 OACC_CACHE);
9275 /* TODO: Do something sensible with this information. */
9277 *expr_p = NULL_TREE;
9280 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9281 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9282 kind. The entry kind will replace the one in CLAUSE, while the exit
9283 kind will be used in a new omp_clause and returned to the caller. */
9285 static tree
9286 gimplify_oacc_declare_1 (tree clause)
9288 HOST_WIDE_INT kind, new_op;
9289 bool ret = false;
9290 tree c = NULL;
9292 kind = OMP_CLAUSE_MAP_KIND (clause);
9294 switch (kind)
9296 case GOMP_MAP_ALLOC:
9297 case GOMP_MAP_FORCE_ALLOC:
9298 case GOMP_MAP_FORCE_TO:
9299 new_op = GOMP_MAP_DELETE;
9300 ret = true;
9301 break;
9303 case GOMP_MAP_FORCE_FROM:
9304 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9305 new_op = GOMP_MAP_FORCE_FROM;
9306 ret = true;
9307 break;
9309 case GOMP_MAP_FORCE_TOFROM:
9310 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9311 new_op = GOMP_MAP_FORCE_FROM;
9312 ret = true;
9313 break;
9315 case GOMP_MAP_FROM:
9316 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9317 new_op = GOMP_MAP_FROM;
9318 ret = true;
9319 break;
9321 case GOMP_MAP_TOFROM:
9322 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9323 new_op = GOMP_MAP_FROM;
9324 ret = true;
9325 break;
9327 case GOMP_MAP_DEVICE_RESIDENT:
9328 case GOMP_MAP_FORCE_DEVICEPTR:
9329 case GOMP_MAP_FORCE_PRESENT:
9330 case GOMP_MAP_LINK:
9331 case GOMP_MAP_POINTER:
9332 case GOMP_MAP_TO:
9333 break;
9335 default:
9336 gcc_unreachable ();
9337 break;
9340 if (ret)
9342 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9343 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9344 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9347 return c;
9350 /* Gimplify OACC_DECLARE. */
9352 static void
9353 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9355 tree expr = *expr_p;
9356 gomp_target *stmt;
9357 tree clauses, t, decl;
9359 clauses = OACC_DECLARE_CLAUSES (expr);
9361 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9362 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9364 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9366 decl = OMP_CLAUSE_DECL (t);
9368 if (TREE_CODE (decl) == MEM_REF)
9369 decl = TREE_OPERAND (decl, 0);
9371 if (VAR_P (decl) && !is_oacc_declared (decl))
9373 tree attr = get_identifier ("oacc declare target");
9374 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9375 DECL_ATTRIBUTES (decl));
9378 if (VAR_P (decl)
9379 && !is_global_var (decl)
9380 && DECL_CONTEXT (decl) == current_function_decl)
9382 tree c = gimplify_oacc_declare_1 (t);
9383 if (c)
9385 if (oacc_declare_returns == NULL)
9386 oacc_declare_returns = new hash_map<tree, tree>;
9388 oacc_declare_returns->put (decl, c);
9392 if (gimplify_omp_ctxp)
9393 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9396 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9397 clauses);
9399 gimplify_seq_add_stmt (pre_p, stmt);
9401 *expr_p = NULL_TREE;
9404 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9405 gimplification of the body, as well as scanning the body for used
9406 variables. We need to do this scan now, because variable-sized
9407 decls will be decomposed during gimplification. */
9409 static void
9410 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9412 tree expr = *expr_p;
9413 gimple *g;
9414 gimple_seq body = NULL;
9416 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9417 OMP_PARALLEL_COMBINED (expr)
9418 ? ORT_COMBINED_PARALLEL
9419 : ORT_PARALLEL, OMP_PARALLEL);
9421 push_gimplify_context ();
9423 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9424 if (gimple_code (g) == GIMPLE_BIND)
9425 pop_gimplify_context (g);
9426 else
9427 pop_gimplify_context (NULL);
9429 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9430 OMP_PARALLEL);
9432 g = gimple_build_omp_parallel (body,
9433 OMP_PARALLEL_CLAUSES (expr),
9434 NULL_TREE, NULL_TREE);
9435 if (OMP_PARALLEL_COMBINED (expr))
9436 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9437 gimplify_seq_add_stmt (pre_p, g);
9438 *expr_p = NULL_TREE;
9441 /* Gimplify the contents of an OMP_TASK statement. This involves
9442 gimplification of the body, as well as scanning the body for used
9443 variables. We need to do this scan now, because variable-sized
9444 decls will be decomposed during gimplification. */
9446 static void
9447 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9449 tree expr = *expr_p;
9450 gimple *g;
9451 gimple_seq body = NULL;
9453 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9454 omp_find_clause (OMP_TASK_CLAUSES (expr),
9455 OMP_CLAUSE_UNTIED)
9456 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9458 push_gimplify_context ();
9460 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9461 if (gimple_code (g) == GIMPLE_BIND)
9462 pop_gimplify_context (g);
9463 else
9464 pop_gimplify_context (NULL);
9466 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9467 OMP_TASK);
9469 g = gimple_build_omp_task (body,
9470 OMP_TASK_CLAUSES (expr),
9471 NULL_TREE, NULL_TREE,
9472 NULL_TREE, NULL_TREE, NULL_TREE);
9473 gimplify_seq_add_stmt (pre_p, g);
9474 *expr_p = NULL_TREE;
9477 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9478 with non-NULL OMP_FOR_INIT. */
9480 static tree
9481 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9483 *walk_subtrees = 0;
9484 switch (TREE_CODE (*tp))
9486 case OMP_FOR:
9487 *walk_subtrees = 1;
9488 /* FALLTHRU */
9489 case OMP_SIMD:
9490 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9491 return *tp;
9492 break;
9493 case BIND_EXPR:
9494 case STATEMENT_LIST:
9495 case OMP_PARALLEL:
9496 *walk_subtrees = 1;
9497 break;
9498 default:
9499 break;
9501 return NULL_TREE;
9504 /* Gimplify the gross structure of an OMP_FOR statement. */
9506 static enum gimplify_status
9507 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9509 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9510 enum gimplify_status ret = GS_ALL_DONE;
9511 enum gimplify_status tret;
9512 gomp_for *gfor;
9513 gimple_seq for_body, for_pre_body;
9514 int i;
9515 bitmap has_decl_expr = NULL;
9516 enum omp_region_type ort = ORT_WORKSHARE;
9518 orig_for_stmt = for_stmt = *expr_p;
9520 switch (TREE_CODE (for_stmt))
9522 case OMP_FOR:
9523 case CILK_FOR:
9524 case OMP_DISTRIBUTE:
9525 break;
9526 case OACC_LOOP:
9527 ort = ORT_ACC;
9528 break;
9529 case OMP_TASKLOOP:
9530 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9531 ort = ORT_UNTIED_TASK;
9532 else
9533 ort = ORT_TASK;
9534 break;
9535 case OMP_SIMD:
9536 case CILK_SIMD:
9537 ort = ORT_SIMD;
9538 break;
9539 default:
9540 gcc_unreachable ();
9543 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9544 clause for the IV. */
9545 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9547 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9548 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9549 decl = TREE_OPERAND (t, 0);
9550 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9551 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9552 && OMP_CLAUSE_DECL (c) == decl)
9554 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9555 break;
9559 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9561 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9562 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9563 find_combined_omp_for, NULL, NULL);
9564 if (inner_for_stmt == NULL_TREE)
9566 gcc_assert (seen_error ());
9567 *expr_p = NULL_TREE;
9568 return GS_ERROR;
9572 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9573 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9574 TREE_CODE (for_stmt));
9576 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9577 gimplify_omp_ctxp->distribute = true;
9579 /* Handle OMP_FOR_INIT. */
9580 for_pre_body = NULL;
9581 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9583 has_decl_expr = BITMAP_ALLOC (NULL);
9584 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9585 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9586 == VAR_DECL)
9588 t = OMP_FOR_PRE_BODY (for_stmt);
9589 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9591 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9593 tree_stmt_iterator si;
9594 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9595 tsi_next (&si))
9597 t = tsi_stmt (si);
9598 if (TREE_CODE (t) == DECL_EXPR
9599 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9600 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9604 if (OMP_FOR_PRE_BODY (for_stmt))
9606 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9607 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9608 else
9610 struct gimplify_omp_ctx ctx;
9611 memset (&ctx, 0, sizeof (ctx));
9612 ctx.region_type = ORT_NONE;
9613 gimplify_omp_ctxp = &ctx;
9614 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9615 gimplify_omp_ctxp = NULL;
9618 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9620 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9621 for_stmt = inner_for_stmt;
9623 /* For taskloop, need to gimplify the start, end and step before the
9624 taskloop, outside of the taskloop omp context. */
9625 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9627 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9629 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9630 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9632 TREE_OPERAND (t, 1)
9633 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9634 pre_p, NULL, false);
9635 tree c = build_omp_clause (input_location,
9636 OMP_CLAUSE_FIRSTPRIVATE);
9637 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9638 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9639 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9642 /* Handle OMP_FOR_COND. */
9643 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9644 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9646 TREE_OPERAND (t, 1)
9647 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9648 gimple_seq_empty_p (for_pre_body)
9649 ? pre_p : &for_pre_body, NULL,
9650 false);
9651 tree c = build_omp_clause (input_location,
9652 OMP_CLAUSE_FIRSTPRIVATE);
9653 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9654 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9655 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9658 /* Handle OMP_FOR_INCR. */
9659 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9660 if (TREE_CODE (t) == MODIFY_EXPR)
9662 decl = TREE_OPERAND (t, 0);
9663 t = TREE_OPERAND (t, 1);
9664 tree *tp = &TREE_OPERAND (t, 1);
9665 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9666 tp = &TREE_OPERAND (t, 0);
9668 if (!is_gimple_constant (*tp))
9670 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9671 ? pre_p : &for_pre_body;
9672 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9673 tree c = build_omp_clause (input_location,
9674 OMP_CLAUSE_FIRSTPRIVATE);
9675 OMP_CLAUSE_DECL (c) = *tp;
9676 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9677 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9682 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9683 OMP_TASKLOOP);
9686 if (orig_for_stmt != for_stmt)
9687 gimplify_omp_ctxp->combined_loop = true;
9689 for_body = NULL;
9690 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9691 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9692 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9693 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9695 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9696 bool is_doacross = false;
9697 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9699 is_doacross = true;
9700 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9701 (OMP_FOR_INIT (for_stmt))
9702 * 2);
9704 int collapse = 1, tile = 0;
9705 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9706 if (c)
9707 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9708 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9709 if (c)
9710 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9711 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9713 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9714 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9715 decl = TREE_OPERAND (t, 0);
9716 gcc_assert (DECL_P (decl));
9717 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9718 || POINTER_TYPE_P (TREE_TYPE (decl)));
9719 if (is_doacross)
9721 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9722 gimplify_omp_ctxp->loop_iter_var.quick_push
9723 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9724 else
9725 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9726 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9729 /* Make sure the iteration variable is private. */
9730 tree c = NULL_TREE;
9731 tree c2 = NULL_TREE;
9732 if (orig_for_stmt != for_stmt)
9733 /* Do this only on innermost construct for combined ones. */;
9734 else if (ort == ORT_SIMD)
9736 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9737 (splay_tree_key) decl);
9738 omp_is_private (gimplify_omp_ctxp, decl,
9739 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9740 != 1));
9741 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9742 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9743 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9745 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9746 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9747 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9748 if (has_decl_expr
9749 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9751 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9752 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9754 struct gimplify_omp_ctx *outer
9755 = gimplify_omp_ctxp->outer_context;
9756 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9758 if (outer->region_type == ORT_WORKSHARE
9759 && outer->combined_loop)
9761 n = splay_tree_lookup (outer->variables,
9762 (splay_tree_key)decl);
9763 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9765 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9766 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9768 else
9770 struct gimplify_omp_ctx *octx = outer->outer_context;
9771 if (octx
9772 && octx->region_type == ORT_COMBINED_PARALLEL
9773 && octx->outer_context
9774 && (octx->outer_context->region_type
9775 == ORT_WORKSHARE)
9776 && octx->outer_context->combined_loop)
9778 octx = octx->outer_context;
9779 n = splay_tree_lookup (octx->variables,
9780 (splay_tree_key)decl);
9781 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9783 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9784 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9791 OMP_CLAUSE_DECL (c) = decl;
9792 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9793 OMP_FOR_CLAUSES (for_stmt) = c;
9794 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9795 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9797 if (outer->region_type == ORT_WORKSHARE
9798 && outer->combined_loop)
9800 if (outer->outer_context
9801 && (outer->outer_context->region_type
9802 == ORT_COMBINED_PARALLEL))
9803 outer = outer->outer_context;
9804 else if (omp_check_private (outer, decl, false))
9805 outer = NULL;
9807 else if (((outer->region_type & ORT_TASK) != 0)
9808 && outer->combined_loop
9809 && !omp_check_private (gimplify_omp_ctxp,
9810 decl, false))
9812 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9814 omp_notice_variable (outer, decl, true);
9815 outer = NULL;
9817 if (outer)
9819 n = splay_tree_lookup (outer->variables,
9820 (splay_tree_key)decl);
9821 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9823 omp_add_variable (outer, decl,
9824 GOVD_LASTPRIVATE | GOVD_SEEN);
9825 if (outer->region_type == ORT_COMBINED_PARALLEL
9826 && outer->outer_context
9827 && (outer->outer_context->region_type
9828 == ORT_WORKSHARE)
9829 && outer->outer_context->combined_loop)
9831 outer = outer->outer_context;
9832 n = splay_tree_lookup (outer->variables,
9833 (splay_tree_key)decl);
9834 if (omp_check_private (outer, decl, false))
9835 outer = NULL;
9836 else if (n == NULL
9837 || ((n->value & GOVD_DATA_SHARE_CLASS)
9838 == 0))
9839 omp_add_variable (outer, decl,
9840 GOVD_LASTPRIVATE
9841 | GOVD_SEEN);
9842 else
9843 outer = NULL;
9845 if (outer && outer->outer_context
9846 && (outer->outer_context->region_type
9847 == ORT_COMBINED_TEAMS))
9849 outer = outer->outer_context;
9850 n = splay_tree_lookup (outer->variables,
9851 (splay_tree_key)decl);
9852 if (n == NULL
9853 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9854 omp_add_variable (outer, decl,
9855 GOVD_SHARED | GOVD_SEEN);
9856 else
9857 outer = NULL;
9859 if (outer && outer->outer_context)
9860 omp_notice_variable (outer->outer_context, decl,
9861 true);
9866 else
9868 bool lastprivate
9869 = (!has_decl_expr
9870 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9871 struct gimplify_omp_ctx *outer
9872 = gimplify_omp_ctxp->outer_context;
9873 if (outer && lastprivate)
9875 if (outer->region_type == ORT_WORKSHARE
9876 && outer->combined_loop)
9878 n = splay_tree_lookup (outer->variables,
9879 (splay_tree_key)decl);
9880 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9882 lastprivate = false;
9883 outer = NULL;
9885 else if (outer->outer_context
9886 && (outer->outer_context->region_type
9887 == ORT_COMBINED_PARALLEL))
9888 outer = outer->outer_context;
9889 else if (omp_check_private (outer, decl, false))
9890 outer = NULL;
9892 else if (((outer->region_type & ORT_TASK) != 0)
9893 && outer->combined_loop
9894 && !omp_check_private (gimplify_omp_ctxp,
9895 decl, false))
9897 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9899 omp_notice_variable (outer, decl, true);
9900 outer = NULL;
9902 if (outer)
9904 n = splay_tree_lookup (outer->variables,
9905 (splay_tree_key)decl);
9906 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9908 omp_add_variable (outer, decl,
9909 GOVD_LASTPRIVATE | GOVD_SEEN);
9910 if (outer->region_type == ORT_COMBINED_PARALLEL
9911 && outer->outer_context
9912 && (outer->outer_context->region_type
9913 == ORT_WORKSHARE)
9914 && outer->outer_context->combined_loop)
9916 outer = outer->outer_context;
9917 n = splay_tree_lookup (outer->variables,
9918 (splay_tree_key)decl);
9919 if (omp_check_private (outer, decl, false))
9920 outer = NULL;
9921 else if (n == NULL
9922 || ((n->value & GOVD_DATA_SHARE_CLASS)
9923 == 0))
9924 omp_add_variable (outer, decl,
9925 GOVD_LASTPRIVATE
9926 | GOVD_SEEN);
9927 else
9928 outer = NULL;
9930 if (outer && outer->outer_context
9931 && (outer->outer_context->region_type
9932 == ORT_COMBINED_TEAMS))
9934 outer = outer->outer_context;
9935 n = splay_tree_lookup (outer->variables,
9936 (splay_tree_key)decl);
9937 if (n == NULL
9938 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9939 omp_add_variable (outer, decl,
9940 GOVD_SHARED | GOVD_SEEN);
9941 else
9942 outer = NULL;
9944 if (outer && outer->outer_context)
9945 omp_notice_variable (outer->outer_context, decl,
9946 true);
9951 c = build_omp_clause (input_location,
9952 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9953 : OMP_CLAUSE_PRIVATE);
9954 OMP_CLAUSE_DECL (c) = decl;
9955 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9956 OMP_FOR_CLAUSES (for_stmt) = c;
9957 omp_add_variable (gimplify_omp_ctxp, decl,
9958 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9959 | GOVD_EXPLICIT | GOVD_SEEN);
9960 c = NULL_TREE;
9963 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9964 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9965 else
9966 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9968 /* If DECL is not a gimple register, create a temporary variable to act
9969 as an iteration counter. This is valid, since DECL cannot be
9970 modified in the body of the loop. Similarly for any iteration vars
9971 in simd with collapse > 1 where the iterator vars must be
9972 lastprivate. */
9973 if (orig_for_stmt != for_stmt)
9974 var = decl;
9975 else if (!is_gimple_reg (decl)
9976 || (ort == ORT_SIMD
9977 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9979 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9980 /* Make sure omp_add_variable is not called on it prematurely.
9981 We call it ourselves a few lines later. */
9982 gimplify_omp_ctxp = NULL;
9983 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9984 gimplify_omp_ctxp = ctx;
9985 TREE_OPERAND (t, 0) = var;
9987 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9989 if (ort == ORT_SIMD
9990 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9992 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9993 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9994 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9995 OMP_CLAUSE_DECL (c2) = var;
9996 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9997 OMP_FOR_CLAUSES (for_stmt) = c2;
9998 omp_add_variable (gimplify_omp_ctxp, var,
9999 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10000 if (c == NULL_TREE)
10002 c = c2;
10003 c2 = NULL_TREE;
10006 else
10007 omp_add_variable (gimplify_omp_ctxp, var,
10008 GOVD_PRIVATE | GOVD_SEEN);
10010 else
10011 var = decl;
10013 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10014 is_gimple_val, fb_rvalue, false);
10015 ret = MIN (ret, tret);
10016 if (ret == GS_ERROR)
10017 return ret;
10019 /* Handle OMP_FOR_COND. */
10020 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10021 gcc_assert (COMPARISON_CLASS_P (t));
10022 gcc_assert (TREE_OPERAND (t, 0) == decl);
10024 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10025 is_gimple_val, fb_rvalue, false);
10026 ret = MIN (ret, tret);
10028 /* Handle OMP_FOR_INCR. */
10029 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10030 switch (TREE_CODE (t))
10032 case PREINCREMENT_EXPR:
10033 case POSTINCREMENT_EXPR:
10035 tree decl = TREE_OPERAND (t, 0);
10036 /* c_omp_for_incr_canonicalize_ptr() should have been
10037 called to massage things appropriately. */
10038 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10040 if (orig_for_stmt != for_stmt)
10041 break;
10042 t = build_int_cst (TREE_TYPE (decl), 1);
10043 if (c)
10044 OMP_CLAUSE_LINEAR_STEP (c) = t;
10045 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10046 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10047 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10048 break;
10051 case PREDECREMENT_EXPR:
10052 case POSTDECREMENT_EXPR:
10053 /* c_omp_for_incr_canonicalize_ptr() should have been
10054 called to massage things appropriately. */
10055 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10056 if (orig_for_stmt != for_stmt)
10057 break;
10058 t = build_int_cst (TREE_TYPE (decl), -1);
10059 if (c)
10060 OMP_CLAUSE_LINEAR_STEP (c) = t;
10061 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10062 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10063 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10064 break;
10066 case MODIFY_EXPR:
10067 gcc_assert (TREE_OPERAND (t, 0) == decl);
10068 TREE_OPERAND (t, 0) = var;
10070 t = TREE_OPERAND (t, 1);
10071 switch (TREE_CODE (t))
10073 case PLUS_EXPR:
10074 if (TREE_OPERAND (t, 1) == decl)
10076 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10077 TREE_OPERAND (t, 0) = var;
10078 break;
10081 /* Fallthru. */
10082 case MINUS_EXPR:
10083 case POINTER_PLUS_EXPR:
10084 gcc_assert (TREE_OPERAND (t, 0) == decl);
10085 TREE_OPERAND (t, 0) = var;
10086 break;
10087 default:
10088 gcc_unreachable ();
10091 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10092 is_gimple_val, fb_rvalue, false);
10093 ret = MIN (ret, tret);
10094 if (c)
10096 tree step = TREE_OPERAND (t, 1);
10097 tree stept = TREE_TYPE (decl);
10098 if (POINTER_TYPE_P (stept))
10099 stept = sizetype;
10100 step = fold_convert (stept, step);
10101 if (TREE_CODE (t) == MINUS_EXPR)
10102 step = fold_build1 (NEGATE_EXPR, stept, step);
10103 OMP_CLAUSE_LINEAR_STEP (c) = step;
10104 if (step != TREE_OPERAND (t, 1))
10106 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10107 &for_pre_body, NULL,
10108 is_gimple_val, fb_rvalue, false);
10109 ret = MIN (ret, tret);
10112 break;
10114 default:
10115 gcc_unreachable ();
10118 if (c2)
10120 gcc_assert (c);
10121 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10124 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10126 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10127 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10128 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10129 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10130 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10131 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10132 && OMP_CLAUSE_DECL (c) == decl)
10134 if (is_doacross && (collapse == 1 || i >= collapse))
10135 t = var;
10136 else
10138 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10139 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10140 gcc_assert (TREE_OPERAND (t, 0) == var);
10141 t = TREE_OPERAND (t, 1);
10142 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10143 || TREE_CODE (t) == MINUS_EXPR
10144 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10145 gcc_assert (TREE_OPERAND (t, 0) == var);
10146 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10147 is_doacross ? var : decl,
10148 TREE_OPERAND (t, 1));
10150 gimple_seq *seq;
10151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10152 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10153 else
10154 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10155 gimplify_assign (decl, t, seq);
10160 BITMAP_FREE (has_decl_expr);
10162 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10164 push_gimplify_context ();
10165 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10167 OMP_FOR_BODY (orig_for_stmt)
10168 = build3 (BIND_EXPR, void_type_node, NULL,
10169 OMP_FOR_BODY (orig_for_stmt), NULL);
10170 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10174 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10175 &for_body);
10177 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10179 if (gimple_code (g) == GIMPLE_BIND)
10180 pop_gimplify_context (g);
10181 else
10182 pop_gimplify_context (NULL);
10185 if (orig_for_stmt != for_stmt)
10186 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10188 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10189 decl = TREE_OPERAND (t, 0);
10190 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10191 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10192 gimplify_omp_ctxp = ctx->outer_context;
10193 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10194 gimplify_omp_ctxp = ctx;
10195 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10196 TREE_OPERAND (t, 0) = var;
10197 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10198 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10199 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10202 gimplify_adjust_omp_clauses (pre_p, for_body,
10203 &OMP_FOR_CLAUSES (orig_for_stmt),
10204 TREE_CODE (orig_for_stmt));
10206 int kind;
10207 switch (TREE_CODE (orig_for_stmt))
10209 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10210 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10211 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10212 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10213 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10214 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10215 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10216 default:
10217 gcc_unreachable ();
10219 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10220 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10221 for_pre_body);
10222 if (orig_for_stmt != for_stmt)
10223 gimple_omp_for_set_combined_p (gfor, true);
10224 if (gimplify_omp_ctxp
10225 && (gimplify_omp_ctxp->combined_loop
10226 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10227 && gimplify_omp_ctxp->outer_context
10228 && gimplify_omp_ctxp->outer_context->combined_loop)))
10230 gimple_omp_for_set_combined_into_p (gfor, true);
10231 if (gimplify_omp_ctxp->combined_loop)
10232 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10233 else
10234 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10237 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10239 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10240 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10241 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10242 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10243 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10244 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10245 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10246 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10249 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10250 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10251 The outer taskloop stands for computing the number of iterations,
10252 counts for collapsed loops and holding taskloop specific clauses.
10253 The task construct stands for the effect of data sharing on the
10254 explicit task it creates and the inner taskloop stands for expansion
10255 of the static loop inside of the explicit task construct. */
10256 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10258 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10259 tree task_clauses = NULL_TREE;
10260 tree c = *gfor_clauses_ptr;
10261 tree *gtask_clauses_ptr = &task_clauses;
10262 tree outer_for_clauses = NULL_TREE;
10263 tree *gforo_clauses_ptr = &outer_for_clauses;
10264 for (; c; c = OMP_CLAUSE_CHAIN (c))
10265 switch (OMP_CLAUSE_CODE (c))
10267 /* These clauses are allowed on task, move them there. */
10268 case OMP_CLAUSE_SHARED:
10269 case OMP_CLAUSE_FIRSTPRIVATE:
10270 case OMP_CLAUSE_DEFAULT:
10271 case OMP_CLAUSE_IF:
10272 case OMP_CLAUSE_UNTIED:
10273 case OMP_CLAUSE_FINAL:
10274 case OMP_CLAUSE_MERGEABLE:
10275 case OMP_CLAUSE_PRIORITY:
10276 *gtask_clauses_ptr = c;
10277 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10278 break;
10279 case OMP_CLAUSE_PRIVATE:
10280 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10282 /* We want private on outer for and firstprivate
10283 on task. */
10284 *gtask_clauses_ptr
10285 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10286 OMP_CLAUSE_FIRSTPRIVATE);
10287 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10288 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10289 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10290 *gforo_clauses_ptr = c;
10291 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10293 else
10295 *gtask_clauses_ptr = c;
10296 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10298 break;
10299 /* These clauses go into outer taskloop clauses. */
10300 case OMP_CLAUSE_GRAINSIZE:
10301 case OMP_CLAUSE_NUM_TASKS:
10302 case OMP_CLAUSE_NOGROUP:
10303 *gforo_clauses_ptr = c;
10304 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10305 break;
10306 /* Taskloop clause we duplicate on both taskloops. */
10307 case OMP_CLAUSE_COLLAPSE:
10308 *gfor_clauses_ptr = c;
10309 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10310 *gforo_clauses_ptr = copy_node (c);
10311 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10312 break;
10313 /* For lastprivate, keep the clause on inner taskloop, and add
10314 a shared clause on task. If the same decl is also firstprivate,
10315 add also firstprivate clause on the inner taskloop. */
10316 case OMP_CLAUSE_LASTPRIVATE:
10317 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10319 /* For taskloop C++ lastprivate IVs, we want:
10320 1) private on outer taskloop
10321 2) firstprivate and shared on task
10322 3) lastprivate on inner taskloop */
10323 *gtask_clauses_ptr
10324 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10325 OMP_CLAUSE_FIRSTPRIVATE);
10326 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10327 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10328 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10329 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10330 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10331 OMP_CLAUSE_PRIVATE);
10332 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10333 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10334 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10335 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10337 *gfor_clauses_ptr = c;
10338 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10339 *gtask_clauses_ptr
10340 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10341 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10342 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10343 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10344 gtask_clauses_ptr
10345 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10346 break;
10347 default:
10348 gcc_unreachable ();
10350 *gfor_clauses_ptr = NULL_TREE;
10351 *gtask_clauses_ptr = NULL_TREE;
10352 *gforo_clauses_ptr = NULL_TREE;
10353 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10354 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10355 NULL_TREE, NULL_TREE, NULL_TREE);
10356 gimple_omp_task_set_taskloop_p (g, true);
10357 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10358 gomp_for *gforo
10359 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10360 gimple_omp_for_collapse (gfor),
10361 gimple_omp_for_pre_body (gfor));
10362 gimple_omp_for_set_pre_body (gfor, NULL);
10363 gimple_omp_for_set_combined_p (gforo, true);
10364 gimple_omp_for_set_combined_into_p (gfor, true);
10365 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10367 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10368 tree v = create_tmp_var (type);
10369 gimple_omp_for_set_index (gforo, i, v);
10370 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10371 gimple_omp_for_set_initial (gforo, i, t);
10372 gimple_omp_for_set_cond (gforo, i,
10373 gimple_omp_for_cond (gfor, i));
10374 t = unshare_expr (gimple_omp_for_final (gfor, i));
10375 gimple_omp_for_set_final (gforo, i, t);
10376 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10377 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10378 TREE_OPERAND (t, 0) = v;
10379 gimple_omp_for_set_incr (gforo, i, t);
10380 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10381 OMP_CLAUSE_DECL (t) = v;
10382 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10383 gimple_omp_for_set_clauses (gforo, t);
10385 gimplify_seq_add_stmt (pre_p, gforo);
10387 else
10388 gimplify_seq_add_stmt (pre_p, gfor);
10389 if (ret != GS_ALL_DONE)
10390 return GS_ERROR;
10391 *expr_p = NULL_TREE;
10392 return GS_ALL_DONE;
10395 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10396 of OMP_TARGET's body. */
10398 static tree
10399 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10401 *walk_subtrees = 0;
10402 switch (TREE_CODE (*tp))
10404 case OMP_TEAMS:
10405 return *tp;
10406 case BIND_EXPR:
10407 case STATEMENT_LIST:
10408 *walk_subtrees = 1;
10409 break;
10410 default:
10411 break;
10413 return NULL_TREE;
10416 /* Helper function of optimize_target_teams, determine if the expression
10417 can be computed safely before the target construct on the host. */
10419 static tree
10420 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10422 splay_tree_node n;
10424 if (TYPE_P (*tp))
10426 *walk_subtrees = 0;
10427 return NULL_TREE;
10429 switch (TREE_CODE (*tp))
10431 case VAR_DECL:
10432 case PARM_DECL:
10433 case RESULT_DECL:
10434 *walk_subtrees = 0;
10435 if (error_operand_p (*tp)
10436 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10437 || DECL_HAS_VALUE_EXPR_P (*tp)
10438 || DECL_THREAD_LOCAL_P (*tp)
10439 || TREE_SIDE_EFFECTS (*tp)
10440 || TREE_THIS_VOLATILE (*tp))
10441 return *tp;
10442 if (is_global_var (*tp)
10443 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10444 || lookup_attribute ("omp declare target link",
10445 DECL_ATTRIBUTES (*tp))))
10446 return *tp;
10447 if (VAR_P (*tp)
10448 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10449 && !is_global_var (*tp)
10450 && decl_function_context (*tp) == current_function_decl)
10451 return *tp;
10452 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10453 (splay_tree_key) *tp);
10454 if (n == NULL)
10456 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10457 return NULL_TREE;
10458 return *tp;
10460 else if (n->value & GOVD_LOCAL)
10461 return *tp;
10462 else if (n->value & GOVD_FIRSTPRIVATE)
10463 return NULL_TREE;
10464 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10465 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10466 return NULL_TREE;
10467 return *tp;
10468 case INTEGER_CST:
10469 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10470 return *tp;
10471 return NULL_TREE;
10472 case TARGET_EXPR:
10473 if (TARGET_EXPR_INITIAL (*tp)
10474 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10475 return *tp;
10476 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10477 walk_subtrees, NULL);
10478 /* Allow some reasonable subset of integral arithmetics. */
10479 case PLUS_EXPR:
10480 case MINUS_EXPR:
10481 case MULT_EXPR:
10482 case TRUNC_DIV_EXPR:
10483 case CEIL_DIV_EXPR:
10484 case FLOOR_DIV_EXPR:
10485 case ROUND_DIV_EXPR:
10486 case TRUNC_MOD_EXPR:
10487 case CEIL_MOD_EXPR:
10488 case FLOOR_MOD_EXPR:
10489 case ROUND_MOD_EXPR:
10490 case RDIV_EXPR:
10491 case EXACT_DIV_EXPR:
10492 case MIN_EXPR:
10493 case MAX_EXPR:
10494 case LSHIFT_EXPR:
10495 case RSHIFT_EXPR:
10496 case BIT_IOR_EXPR:
10497 case BIT_XOR_EXPR:
10498 case BIT_AND_EXPR:
10499 case NEGATE_EXPR:
10500 case ABS_EXPR:
10501 case BIT_NOT_EXPR:
10502 case NON_LVALUE_EXPR:
10503 CASE_CONVERT:
10504 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10505 return *tp;
10506 return NULL_TREE;
10507 /* And disallow anything else, except for comparisons. */
10508 default:
10509 if (COMPARISON_CLASS_P (*tp))
10510 return NULL_TREE;
10511 return *tp;
10515 /* Try to determine if the num_teams and/or thread_limit expressions
10516 can have their values determined already before entering the
10517 target construct.
10518 INTEGER_CSTs trivially are,
10519 integral decls that are firstprivate (explicitly or implicitly)
10520 or explicitly map(always, to:) or map(always, tofrom:) on the target
10521 region too, and expressions involving simple arithmetics on those
10522 too, function calls are not ok, dereferencing something neither etc.
10523 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10524 EXPR based on what we find:
10525 0 stands for clause not specified at all, use implementation default
10526 -1 stands for value that can't be determined easily before entering
10527 the target construct.
10528 If teams construct is not present at all, use 1 for num_teams
10529 and 0 for thread_limit (only one team is involved, and the thread
10530 limit is implementation defined. */
10532 static void
10533 optimize_target_teams (tree target, gimple_seq *pre_p)
10535 tree body = OMP_BODY (target);
10536 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10537 tree num_teams = integer_zero_node;
10538 tree thread_limit = integer_zero_node;
10539 location_t num_teams_loc = EXPR_LOCATION (target);
10540 location_t thread_limit_loc = EXPR_LOCATION (target);
10541 tree c, *p, expr;
10542 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10544 if (teams == NULL_TREE)
10545 num_teams = integer_one_node;
10546 else
10547 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10551 p = &num_teams;
10552 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10554 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10556 p = &thread_limit;
10557 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10559 else
10560 continue;
10561 expr = OMP_CLAUSE_OPERAND (c, 0);
10562 if (TREE_CODE (expr) == INTEGER_CST)
10564 *p = expr;
10565 continue;
10567 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10569 *p = integer_minus_one_node;
10570 continue;
10572 *p = expr;
10573 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10574 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10575 == GS_ERROR)
10577 gimplify_omp_ctxp = target_ctx;
10578 *p = integer_minus_one_node;
10579 continue;
10581 gimplify_omp_ctxp = target_ctx;
10582 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10583 OMP_CLAUSE_OPERAND (c, 0) = *p;
10585 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10586 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10587 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10588 OMP_TARGET_CLAUSES (target) = c;
10589 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10590 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10591 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10592 OMP_TARGET_CLAUSES (target) = c;
10595 /* Gimplify the gross structure of several OMP constructs. */
10597 static void
10598 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10600 tree expr = *expr_p;
10601 gimple *stmt;
10602 gimple_seq body = NULL;
10603 enum omp_region_type ort;
10605 switch (TREE_CODE (expr))
10607 case OMP_SECTIONS:
10608 case OMP_SINGLE:
10609 ort = ORT_WORKSHARE;
10610 break;
10611 case OMP_TARGET:
10612 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10613 break;
10614 case OACC_KERNELS:
10615 ort = ORT_ACC_KERNELS;
10616 break;
10617 case OACC_PARALLEL:
10618 ort = ORT_ACC_PARALLEL;
10619 break;
10620 case OACC_DATA:
10621 ort = ORT_ACC_DATA;
10622 break;
10623 case OMP_TARGET_DATA:
10624 ort = ORT_TARGET_DATA;
10625 break;
10626 case OMP_TEAMS:
10627 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10628 break;
10629 case OACC_HOST_DATA:
10630 ort = ORT_ACC_HOST_DATA;
10631 break;
10632 default:
10633 gcc_unreachable ();
10635 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10636 TREE_CODE (expr));
10637 if (TREE_CODE (expr) == OMP_TARGET)
10638 optimize_target_teams (expr, pre_p);
10639 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10641 push_gimplify_context ();
10642 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10643 if (gimple_code (g) == GIMPLE_BIND)
10644 pop_gimplify_context (g);
10645 else
10646 pop_gimplify_context (NULL);
10647 if ((ort & ORT_TARGET_DATA) != 0)
10649 enum built_in_function end_ix;
10650 switch (TREE_CODE (expr))
10652 case OACC_DATA:
10653 case OACC_HOST_DATA:
10654 end_ix = BUILT_IN_GOACC_DATA_END;
10655 break;
10656 case OMP_TARGET_DATA:
10657 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10658 break;
10659 default:
10660 gcc_unreachable ();
10662 tree fn = builtin_decl_explicit (end_ix);
10663 g = gimple_build_call (fn, 0);
10664 gimple_seq cleanup = NULL;
10665 gimple_seq_add_stmt (&cleanup, g);
10666 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10667 body = NULL;
10668 gimple_seq_add_stmt (&body, g);
10671 else
10672 gimplify_and_add (OMP_BODY (expr), &body);
10673 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10674 TREE_CODE (expr));
10676 switch (TREE_CODE (expr))
10678 case OACC_DATA:
10679 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10680 OMP_CLAUSES (expr));
10681 break;
10682 case OACC_KERNELS:
10683 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10684 OMP_CLAUSES (expr));
10685 break;
10686 case OACC_HOST_DATA:
10687 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10688 OMP_CLAUSES (expr));
10689 break;
10690 case OACC_PARALLEL:
10691 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10692 OMP_CLAUSES (expr));
10693 break;
10694 case OMP_SECTIONS:
10695 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10696 break;
10697 case OMP_SINGLE:
10698 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10699 break;
10700 case OMP_TARGET:
10701 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10702 OMP_CLAUSES (expr));
10703 break;
10704 case OMP_TARGET_DATA:
10705 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10706 OMP_CLAUSES (expr));
10707 break;
10708 case OMP_TEAMS:
10709 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10710 break;
10711 default:
10712 gcc_unreachable ();
10715 gimplify_seq_add_stmt (pre_p, stmt);
10716 *expr_p = NULL_TREE;
10719 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10720 target update constructs. */
10722 static void
10723 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10725 tree expr = *expr_p;
10726 int kind;
10727 gomp_target *stmt;
10728 enum omp_region_type ort = ORT_WORKSHARE;
10730 switch (TREE_CODE (expr))
10732 case OACC_ENTER_DATA:
10733 case OACC_EXIT_DATA:
10734 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10735 ort = ORT_ACC;
10736 break;
10737 case OACC_UPDATE:
10738 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10739 ort = ORT_ACC;
10740 break;
10741 case OMP_TARGET_UPDATE:
10742 kind = GF_OMP_TARGET_KIND_UPDATE;
10743 break;
10744 case OMP_TARGET_ENTER_DATA:
10745 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10746 break;
10747 case OMP_TARGET_EXIT_DATA:
10748 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10749 break;
10750 default:
10751 gcc_unreachable ();
10753 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10754 ort, TREE_CODE (expr));
10755 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10756 TREE_CODE (expr));
10757 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10759 gimplify_seq_add_stmt (pre_p, stmt);
10760 *expr_p = NULL_TREE;
10763 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10764 stabilized the lhs of the atomic operation as *ADDR. Return true if
10765 EXPR is this stabilized form. */
10767 static bool
10768 goa_lhs_expr_p (tree expr, tree addr)
10770 /* Also include casts to other type variants. The C front end is fond
10771 of adding these for e.g. volatile variables. This is like
10772 STRIP_TYPE_NOPS but includes the main variant lookup. */
10773 STRIP_USELESS_TYPE_CONVERSION (expr);
10775 if (TREE_CODE (expr) == INDIRECT_REF)
10777 expr = TREE_OPERAND (expr, 0);
10778 while (expr != addr
10779 && (CONVERT_EXPR_P (expr)
10780 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10781 && TREE_CODE (expr) == TREE_CODE (addr)
10782 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10784 expr = TREE_OPERAND (expr, 0);
10785 addr = TREE_OPERAND (addr, 0);
10787 if (expr == addr)
10788 return true;
10789 return (TREE_CODE (addr) == ADDR_EXPR
10790 && TREE_CODE (expr) == ADDR_EXPR
10791 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10793 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10794 return true;
10795 return false;
10798 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10799 expression does not involve the lhs, evaluate it into a temporary.
10800 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10801 or -1 if an error was encountered. */
10803 static int
10804 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10805 tree lhs_var)
10807 tree expr = *expr_p;
10808 int saw_lhs;
10810 if (goa_lhs_expr_p (expr, lhs_addr))
10812 *expr_p = lhs_var;
10813 return 1;
10815 if (is_gimple_val (expr))
10816 return 0;
10818 saw_lhs = 0;
10819 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10821 case tcc_binary:
10822 case tcc_comparison:
10823 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10824 lhs_var);
10825 /* FALLTHRU */
10826 case tcc_unary:
10827 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10828 lhs_var);
10829 break;
10830 case tcc_expression:
10831 switch (TREE_CODE (expr))
10833 case TRUTH_ANDIF_EXPR:
10834 case TRUTH_ORIF_EXPR:
10835 case TRUTH_AND_EXPR:
10836 case TRUTH_OR_EXPR:
10837 case TRUTH_XOR_EXPR:
10838 case BIT_INSERT_EXPR:
10839 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10840 lhs_addr, lhs_var);
10841 /* FALLTHRU */
10842 case TRUTH_NOT_EXPR:
10843 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10844 lhs_addr, lhs_var);
10845 break;
10846 case COMPOUND_EXPR:
10847 /* Break out any preevaluations from cp_build_modify_expr. */
10848 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10849 expr = TREE_OPERAND (expr, 1))
10850 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10851 *expr_p = expr;
10852 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10853 default:
10854 break;
10856 break;
10857 case tcc_reference:
10858 if (TREE_CODE (expr) == BIT_FIELD_REF)
10859 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10860 lhs_addr, lhs_var);
10861 break;
10862 default:
10863 break;
10866 if (saw_lhs == 0)
10868 enum gimplify_status gs;
10869 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10870 if (gs != GS_ALL_DONE)
10871 saw_lhs = -1;
10874 return saw_lhs;
10877 /* Gimplify an OMP_ATOMIC statement. */
10879 static enum gimplify_status
10880 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10882 tree addr = TREE_OPERAND (*expr_p, 0);
10883 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10884 ? NULL : TREE_OPERAND (*expr_p, 1);
10885 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10886 tree tmp_load;
10887 gomp_atomic_load *loadstmt;
10888 gomp_atomic_store *storestmt;
10890 tmp_load = create_tmp_reg (type);
10891 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10892 return GS_ERROR;
10894 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10895 != GS_ALL_DONE)
10896 return GS_ERROR;
10898 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10899 gimplify_seq_add_stmt (pre_p, loadstmt);
10900 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10901 != GS_ALL_DONE)
10902 return GS_ERROR;
10904 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10905 rhs = tmp_load;
10906 storestmt = gimple_build_omp_atomic_store (rhs);
10907 gimplify_seq_add_stmt (pre_p, storestmt);
10908 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10910 gimple_omp_atomic_set_seq_cst (loadstmt);
10911 gimple_omp_atomic_set_seq_cst (storestmt);
10913 switch (TREE_CODE (*expr_p))
10915 case OMP_ATOMIC_READ:
10916 case OMP_ATOMIC_CAPTURE_OLD:
10917 *expr_p = tmp_load;
10918 gimple_omp_atomic_set_need_value (loadstmt);
10919 break;
10920 case OMP_ATOMIC_CAPTURE_NEW:
10921 *expr_p = rhs;
10922 gimple_omp_atomic_set_need_value (storestmt);
10923 break;
10924 default:
10925 *expr_p = NULL;
10926 break;
10929 return GS_ALL_DONE;
10932 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10933 body, and adding some EH bits. */
10935 static enum gimplify_status
10936 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10938 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10939 gimple *body_stmt;
10940 gtransaction *trans_stmt;
10941 gimple_seq body = NULL;
10942 int subcode = 0;
10944 /* Wrap the transaction body in a BIND_EXPR so we have a context
10945 where to put decls for OMP. */
10946 if (TREE_CODE (tbody) != BIND_EXPR)
10948 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10949 TREE_SIDE_EFFECTS (bind) = 1;
10950 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10951 TRANSACTION_EXPR_BODY (expr) = bind;
10954 push_gimplify_context ();
10955 temp = voidify_wrapper_expr (*expr_p, NULL);
10957 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10958 pop_gimplify_context (body_stmt);
10960 trans_stmt = gimple_build_transaction (body);
10961 if (TRANSACTION_EXPR_OUTER (expr))
10962 subcode = GTMA_IS_OUTER;
10963 else if (TRANSACTION_EXPR_RELAXED (expr))
10964 subcode = GTMA_IS_RELAXED;
10965 gimple_transaction_set_subcode (trans_stmt, subcode);
10967 gimplify_seq_add_stmt (pre_p, trans_stmt);
10969 if (temp)
10971 *expr_p = temp;
10972 return GS_OK;
10975 *expr_p = NULL_TREE;
10976 return GS_ALL_DONE;
10979 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10980 is the OMP_BODY of the original EXPR (which has already been
10981 gimplified so it's not present in the EXPR).
10983 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10985 static gimple *
10986 gimplify_omp_ordered (tree expr, gimple_seq body)
10988 tree c, decls;
10989 int failures = 0;
10990 unsigned int i;
10991 tree source_c = NULL_TREE;
10992 tree sink_c = NULL_TREE;
10994 if (gimplify_omp_ctxp)
10996 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10997 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10998 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10999 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
11000 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
11002 error_at (OMP_CLAUSE_LOCATION (c),
11003 "%<ordered%> construct with %<depend%> clause must be "
11004 "closely nested inside a loop with %<ordered%> clause "
11005 "with a parameter");
11006 failures++;
11008 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11009 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11011 bool fail = false;
11012 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11013 decls && TREE_CODE (decls) == TREE_LIST;
11014 decls = TREE_CHAIN (decls), ++i)
11015 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11016 continue;
11017 else if (TREE_VALUE (decls)
11018 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11020 error_at (OMP_CLAUSE_LOCATION (c),
11021 "variable %qE is not an iteration "
11022 "of outermost loop %d, expected %qE",
11023 TREE_VALUE (decls), i + 1,
11024 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11025 fail = true;
11026 failures++;
11028 else
11029 TREE_VALUE (decls)
11030 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11031 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11033 error_at (OMP_CLAUSE_LOCATION (c),
11034 "number of variables in %<depend(sink)%> "
11035 "clause does not match number of "
11036 "iteration variables");
11037 failures++;
11039 sink_c = c;
11041 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11042 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11044 if (source_c)
11046 error_at (OMP_CLAUSE_LOCATION (c),
11047 "more than one %<depend(source)%> clause on an "
11048 "%<ordered%> construct");
11049 failures++;
11051 else
11052 source_c = c;
11055 if (source_c && sink_c)
11057 error_at (OMP_CLAUSE_LOCATION (source_c),
11058 "%<depend(source)%> clause specified together with "
11059 "%<depend(sink:)%> clauses on the same construct");
11060 failures++;
11063 if (failures)
11064 return gimple_build_nop ();
11065 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11068 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11069 expression produces a value to be used as an operand inside a GIMPLE
11070 statement, the value will be stored back in *EXPR_P. This value will
11071 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11072 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11073 emitted in PRE_P and POST_P.
11075 Additionally, this process may overwrite parts of the input
11076 expression during gimplification. Ideally, it should be
11077 possible to do non-destructive gimplification.
11079 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11080 the expression needs to evaluate to a value to be used as
11081 an operand in a GIMPLE statement, this value will be stored in
11082 *EXPR_P on exit. This happens when the caller specifies one
11083 of fb_lvalue or fb_rvalue fallback flags.
11085 PRE_P will contain the sequence of GIMPLE statements corresponding
11086 to the evaluation of EXPR and all the side-effects that must
11087 be executed before the main expression. On exit, the last
11088 statement of PRE_P is the core statement being gimplified. For
11089 instance, when gimplifying 'if (++a)' the last statement in
11090 PRE_P will be 'if (t.1)' where t.1 is the result of
11091 pre-incrementing 'a'.
11093 POST_P will contain the sequence of GIMPLE statements corresponding
11094 to the evaluation of all the side-effects that must be executed
11095 after the main expression. If this is NULL, the post
11096 side-effects are stored at the end of PRE_P.
11098 The reason why the output is split in two is to handle post
11099 side-effects explicitly. In some cases, an expression may have
11100 inner and outer post side-effects which need to be emitted in
11101 an order different from the one given by the recursive
11102 traversal. For instance, for the expression (*p--)++ the post
11103 side-effects of '--' must actually occur *after* the post
11104 side-effects of '++'. However, gimplification will first visit
11105 the inner expression, so if a separate POST sequence was not
11106 used, the resulting sequence would be:
11108 1 t.1 = *p
11109 2 p = p - 1
11110 3 t.2 = t.1 + 1
11111 4 *p = t.2
11113 However, the post-decrement operation in line #2 must not be
11114 evaluated until after the store to *p at line #4, so the
11115 correct sequence should be:
11117 1 t.1 = *p
11118 2 t.2 = t.1 + 1
11119 3 *p = t.2
11120 4 p = p - 1
11122 So, by specifying a separate post queue, it is possible
11123 to emit the post side-effects in the correct order.
11124 If POST_P is NULL, an internal queue will be used. Before
11125 returning to the caller, the sequence POST_P is appended to
11126 the main output sequence PRE_P.
11128 GIMPLE_TEST_F points to a function that takes a tree T and
11129 returns nonzero if T is in the GIMPLE form requested by the
11130 caller. The GIMPLE predicates are in gimple.c.
11132 FALLBACK tells the function what sort of a temporary we want if
11133 gimplification cannot produce an expression that complies with
11134 GIMPLE_TEST_F.
11136 fb_none means that no temporary should be generated
11137 fb_rvalue means that an rvalue is OK to generate
11138 fb_lvalue means that an lvalue is OK to generate
11139 fb_either means that either is OK, but an lvalue is preferable.
11140 fb_mayfail means that gimplification may fail (in which case
11141 GS_ERROR will be returned)
11143 The return value is either GS_ERROR or GS_ALL_DONE, since this
11144 function iterates until EXPR is completely gimplified or an error
11145 occurs. */
11147 enum gimplify_status
11148 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11149 bool (*gimple_test_f) (tree), fallback_t fallback)
11151 tree tmp;
11152 gimple_seq internal_pre = NULL;
11153 gimple_seq internal_post = NULL;
11154 tree save_expr;
11155 bool is_statement;
11156 location_t saved_location;
11157 enum gimplify_status ret;
11158 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11159 tree label;
11161 save_expr = *expr_p;
11162 if (save_expr == NULL_TREE)
11163 return GS_ALL_DONE;
11165 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11166 is_statement = gimple_test_f == is_gimple_stmt;
11167 if (is_statement)
11168 gcc_assert (pre_p);
11170 /* Consistency checks. */
11171 if (gimple_test_f == is_gimple_reg)
11172 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11173 else if (gimple_test_f == is_gimple_val
11174 || gimple_test_f == is_gimple_call_addr
11175 || gimple_test_f == is_gimple_condexpr
11176 || gimple_test_f == is_gimple_mem_rhs
11177 || gimple_test_f == is_gimple_mem_rhs_or_call
11178 || gimple_test_f == is_gimple_reg_rhs
11179 || gimple_test_f == is_gimple_reg_rhs_or_call
11180 || gimple_test_f == is_gimple_asm_val
11181 || gimple_test_f == is_gimple_mem_ref_addr)
11182 gcc_assert (fallback & fb_rvalue);
11183 else if (gimple_test_f == is_gimple_min_lval
11184 || gimple_test_f == is_gimple_lvalue)
11185 gcc_assert (fallback & fb_lvalue);
11186 else if (gimple_test_f == is_gimple_addressable)
11187 gcc_assert (fallback & fb_either);
11188 else if (gimple_test_f == is_gimple_stmt)
11189 gcc_assert (fallback == fb_none);
11190 else
11192 /* We should have recognized the GIMPLE_TEST_F predicate to
11193 know what kind of fallback to use in case a temporary is
11194 needed to hold the value or address of *EXPR_P. */
11195 gcc_unreachable ();
11198 /* We used to check the predicate here and return immediately if it
11199 succeeds. This is wrong; the design is for gimplification to be
11200 idempotent, and for the predicates to only test for valid forms, not
11201 whether they are fully simplified. */
11202 if (pre_p == NULL)
11203 pre_p = &internal_pre;
11205 if (post_p == NULL)
11206 post_p = &internal_post;
11208 /* Remember the last statements added to PRE_P and POST_P. Every
11209 new statement added by the gimplification helpers needs to be
11210 annotated with location information. To centralize the
11211 responsibility, we remember the last statement that had been
11212 added to both queues before gimplifying *EXPR_P. If
11213 gimplification produces new statements in PRE_P and POST_P, those
11214 statements will be annotated with the same location information
11215 as *EXPR_P. */
11216 pre_last_gsi = gsi_last (*pre_p);
11217 post_last_gsi = gsi_last (*post_p);
11219 saved_location = input_location;
11220 if (save_expr != error_mark_node
11221 && EXPR_HAS_LOCATION (*expr_p))
11222 input_location = EXPR_LOCATION (*expr_p);
11224 /* Loop over the specific gimplifiers until the toplevel node
11225 remains the same. */
11228 /* Strip away as many useless type conversions as possible
11229 at the toplevel. */
11230 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11232 /* Remember the expr. */
11233 save_expr = *expr_p;
11235 /* Die, die, die, my darling. */
11236 if (save_expr == error_mark_node
11237 || (TREE_TYPE (save_expr)
11238 && TREE_TYPE (save_expr) == error_mark_node))
11240 ret = GS_ERROR;
11241 break;
11244 /* Do any language-specific gimplification. */
11245 ret = ((enum gimplify_status)
11246 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11247 if (ret == GS_OK)
11249 if (*expr_p == NULL_TREE)
11250 break;
11251 if (*expr_p != save_expr)
11252 continue;
11254 else if (ret != GS_UNHANDLED)
11255 break;
11257 /* Make sure that all the cases set 'ret' appropriately. */
11258 ret = GS_UNHANDLED;
11259 switch (TREE_CODE (*expr_p))
11261 /* First deal with the special cases. */
11263 case POSTINCREMENT_EXPR:
11264 case POSTDECREMENT_EXPR:
11265 case PREINCREMENT_EXPR:
11266 case PREDECREMENT_EXPR:
11267 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11268 fallback != fb_none,
11269 TREE_TYPE (*expr_p));
11270 break;
11272 case VIEW_CONVERT_EXPR:
11273 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11274 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11276 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11277 post_p, is_gimple_val, fb_rvalue);
11278 recalculate_side_effects (*expr_p);
11279 break;
11281 /* Fallthru. */
11283 case ARRAY_REF:
11284 case ARRAY_RANGE_REF:
11285 case REALPART_EXPR:
11286 case IMAGPART_EXPR:
11287 case COMPONENT_REF:
11288 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11289 fallback ? fallback : fb_rvalue);
11290 break;
11292 case COND_EXPR:
11293 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11295 /* C99 code may assign to an array in a structure value of a
11296 conditional expression, and this has undefined behavior
11297 only on execution, so create a temporary if an lvalue is
11298 required. */
11299 if (fallback == fb_lvalue)
11301 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11302 mark_addressable (*expr_p);
11303 ret = GS_OK;
11305 break;
11307 case CALL_EXPR:
11308 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11310 /* C99 code may assign to an array in a structure returned
11311 from a function, and this has undefined behavior only on
11312 execution, so create a temporary if an lvalue is
11313 required. */
11314 if (fallback == fb_lvalue)
11316 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11317 mark_addressable (*expr_p);
11318 ret = GS_OK;
11320 break;
11322 case TREE_LIST:
11323 gcc_unreachable ();
11325 case COMPOUND_EXPR:
11326 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11327 break;
11329 case COMPOUND_LITERAL_EXPR:
11330 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11331 gimple_test_f, fallback);
11332 break;
11334 case MODIFY_EXPR:
11335 case INIT_EXPR:
11336 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11337 fallback != fb_none);
11338 break;
11340 case TRUTH_ANDIF_EXPR:
11341 case TRUTH_ORIF_EXPR:
11343 /* Preserve the original type of the expression and the
11344 source location of the outer expression. */
11345 tree org_type = TREE_TYPE (*expr_p);
11346 *expr_p = gimple_boolify (*expr_p);
11347 *expr_p = build3_loc (input_location, COND_EXPR,
11348 org_type, *expr_p,
11349 fold_convert_loc
11350 (input_location,
11351 org_type, boolean_true_node),
11352 fold_convert_loc
11353 (input_location,
11354 org_type, boolean_false_node));
11355 ret = GS_OK;
11356 break;
11359 case TRUTH_NOT_EXPR:
11361 tree type = TREE_TYPE (*expr_p);
11362 /* The parsers are careful to generate TRUTH_NOT_EXPR
11363 only with operands that are always zero or one.
11364 We do not fold here but handle the only interesting case
11365 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11366 *expr_p = gimple_boolify (*expr_p);
11367 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11368 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11369 TREE_TYPE (*expr_p),
11370 TREE_OPERAND (*expr_p, 0));
11371 else
11372 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11373 TREE_TYPE (*expr_p),
11374 TREE_OPERAND (*expr_p, 0),
11375 build_int_cst (TREE_TYPE (*expr_p), 1));
11376 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11377 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11378 ret = GS_OK;
11379 break;
11382 case ADDR_EXPR:
11383 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11384 break;
11386 case ANNOTATE_EXPR:
11388 tree cond = TREE_OPERAND (*expr_p, 0);
11389 tree kind = TREE_OPERAND (*expr_p, 1);
11390 tree type = TREE_TYPE (cond);
11391 if (!INTEGRAL_TYPE_P (type))
11393 *expr_p = cond;
11394 ret = GS_OK;
11395 break;
11397 tree tmp = create_tmp_var (type);
11398 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11399 gcall *call
11400 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11401 gimple_call_set_lhs (call, tmp);
11402 gimplify_seq_add_stmt (pre_p, call);
11403 *expr_p = tmp;
11404 ret = GS_ALL_DONE;
11405 break;
11408 case VA_ARG_EXPR:
11409 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11410 break;
11412 CASE_CONVERT:
11413 if (IS_EMPTY_STMT (*expr_p))
11415 ret = GS_ALL_DONE;
11416 break;
11419 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11420 || fallback == fb_none)
11422 /* Just strip a conversion to void (or in void context) and
11423 try again. */
11424 *expr_p = TREE_OPERAND (*expr_p, 0);
11425 ret = GS_OK;
11426 break;
11429 ret = gimplify_conversion (expr_p);
11430 if (ret == GS_ERROR)
11431 break;
11432 if (*expr_p != save_expr)
11433 break;
11434 /* FALLTHRU */
11436 case FIX_TRUNC_EXPR:
11437 /* unary_expr: ... | '(' cast ')' val | ... */
11438 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11439 is_gimple_val, fb_rvalue);
11440 recalculate_side_effects (*expr_p);
11441 break;
11443 case INDIRECT_REF:
11445 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11446 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11447 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11449 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11450 if (*expr_p != save_expr)
11452 ret = GS_OK;
11453 break;
11456 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11457 is_gimple_reg, fb_rvalue);
11458 if (ret == GS_ERROR)
11459 break;
11461 recalculate_side_effects (*expr_p);
11462 *expr_p = fold_build2_loc (input_location, MEM_REF,
11463 TREE_TYPE (*expr_p),
11464 TREE_OPERAND (*expr_p, 0),
11465 build_int_cst (saved_ptr_type, 0));
11466 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11467 TREE_THIS_NOTRAP (*expr_p) = notrap;
11468 ret = GS_OK;
11469 break;
11472 /* We arrive here through the various re-gimplifcation paths. */
11473 case MEM_REF:
11474 /* First try re-folding the whole thing. */
11475 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11476 TREE_OPERAND (*expr_p, 0),
11477 TREE_OPERAND (*expr_p, 1));
11478 if (tmp)
11480 REF_REVERSE_STORAGE_ORDER (tmp)
11481 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11482 *expr_p = tmp;
11483 recalculate_side_effects (*expr_p);
11484 ret = GS_OK;
11485 break;
11487 /* Avoid re-gimplifying the address operand if it is already
11488 in suitable form. Re-gimplifying would mark the address
11489 operand addressable. Always gimplify when not in SSA form
11490 as we still may have to gimplify decls with value-exprs. */
11491 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11492 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11494 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11495 is_gimple_mem_ref_addr, fb_rvalue);
11496 if (ret == GS_ERROR)
11497 break;
11499 recalculate_side_effects (*expr_p);
11500 ret = GS_ALL_DONE;
11501 break;
11503 /* Constants need not be gimplified. */
11504 case INTEGER_CST:
11505 case REAL_CST:
11506 case FIXED_CST:
11507 case STRING_CST:
11508 case COMPLEX_CST:
11509 case VECTOR_CST:
11510 /* Drop the overflow flag on constants, we do not want
11511 that in the GIMPLE IL. */
11512 if (TREE_OVERFLOW_P (*expr_p))
11513 *expr_p = drop_tree_overflow (*expr_p);
11514 ret = GS_ALL_DONE;
11515 break;
11517 case CONST_DECL:
11518 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11519 CONST_DECL node. Otherwise the decl is replaceable by its
11520 value. */
11521 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11522 if (fallback & fb_lvalue)
11523 ret = GS_ALL_DONE;
11524 else
11526 *expr_p = DECL_INITIAL (*expr_p);
11527 ret = GS_OK;
11529 break;
11531 case DECL_EXPR:
11532 ret = gimplify_decl_expr (expr_p, pre_p);
11533 break;
11535 case BIND_EXPR:
11536 ret = gimplify_bind_expr (expr_p, pre_p);
11537 break;
11539 case LOOP_EXPR:
11540 ret = gimplify_loop_expr (expr_p, pre_p);
11541 break;
11543 case SWITCH_EXPR:
11544 ret = gimplify_switch_expr (expr_p, pre_p);
11545 break;
11547 case EXIT_EXPR:
11548 ret = gimplify_exit_expr (expr_p);
11549 break;
11551 case GOTO_EXPR:
11552 /* If the target is not LABEL, then it is a computed jump
11553 and the target needs to be gimplified. */
11554 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11556 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11557 NULL, is_gimple_val, fb_rvalue);
11558 if (ret == GS_ERROR)
11559 break;
11561 gimplify_seq_add_stmt (pre_p,
11562 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11563 ret = GS_ALL_DONE;
11564 break;
11566 case PREDICT_EXPR:
11567 gimplify_seq_add_stmt (pre_p,
11568 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11569 PREDICT_EXPR_OUTCOME (*expr_p)));
11570 ret = GS_ALL_DONE;
11571 break;
11573 case LABEL_EXPR:
11574 ret = gimplify_label_expr (expr_p, pre_p);
11575 label = LABEL_EXPR_LABEL (*expr_p);
11576 gcc_assert (decl_function_context (label) == current_function_decl);
11578 /* If the label is used in a goto statement, or address of the label
11579 is taken, we need to unpoison all variables that were seen so far.
11580 Doing so would prevent us from reporting a false positives. */
11581 if (asan_poisoned_variables
11582 && asan_used_labels != NULL
11583 && asan_used_labels->contains (label))
11584 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11585 break;
11587 case CASE_LABEL_EXPR:
11588 ret = gimplify_case_label_expr (expr_p, pre_p);
11590 if (gimplify_ctxp->live_switch_vars)
11591 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11592 pre_p);
11593 break;
11595 case RETURN_EXPR:
11596 ret = gimplify_return_expr (*expr_p, pre_p);
11597 break;
11599 case CONSTRUCTOR:
11600 /* Don't reduce this in place; let gimplify_init_constructor work its
11601 magic. Buf if we're just elaborating this for side effects, just
11602 gimplify any element that has side-effects. */
11603 if (fallback == fb_none)
11605 unsigned HOST_WIDE_INT ix;
11606 tree val;
11607 tree temp = NULL_TREE;
11608 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11609 if (TREE_SIDE_EFFECTS (val))
11610 append_to_statement_list (val, &temp);
11612 *expr_p = temp;
11613 ret = temp ? GS_OK : GS_ALL_DONE;
11615 /* C99 code may assign to an array in a constructed
11616 structure or union, and this has undefined behavior only
11617 on execution, so create a temporary if an lvalue is
11618 required. */
11619 else if (fallback == fb_lvalue)
11621 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11622 mark_addressable (*expr_p);
11623 ret = GS_OK;
11625 else
11626 ret = GS_ALL_DONE;
11627 break;
11629 /* The following are special cases that are not handled by the
11630 original GIMPLE grammar. */
11632 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11633 eliminated. */
11634 case SAVE_EXPR:
11635 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11636 break;
11638 case BIT_FIELD_REF:
11639 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11640 post_p, is_gimple_lvalue, fb_either);
11641 recalculate_side_effects (*expr_p);
11642 break;
11644 case TARGET_MEM_REF:
11646 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11648 if (TMR_BASE (*expr_p))
11649 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11650 post_p, is_gimple_mem_ref_addr, fb_either);
11651 if (TMR_INDEX (*expr_p))
11652 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11653 post_p, is_gimple_val, fb_rvalue);
11654 if (TMR_INDEX2 (*expr_p))
11655 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11656 post_p, is_gimple_val, fb_rvalue);
11657 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11658 ret = MIN (r0, r1);
11660 break;
11662 case NON_LVALUE_EXPR:
11663 /* This should have been stripped above. */
11664 gcc_unreachable ();
11666 case ASM_EXPR:
11667 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11668 break;
11670 case TRY_FINALLY_EXPR:
11671 case TRY_CATCH_EXPR:
11673 gimple_seq eval, cleanup;
11674 gtry *try_;
11676 /* Calls to destructors are generated automatically in FINALLY/CATCH
11677 block. They should have location as UNKNOWN_LOCATION. However,
11678 gimplify_call_expr will reset these call stmts to input_location
11679 if it finds stmt's location is unknown. To prevent resetting for
11680 destructors, we set the input_location to unknown.
11681 Note that this only affects the destructor calls in FINALLY/CATCH
11682 block, and will automatically reset to its original value by the
11683 end of gimplify_expr. */
11684 input_location = UNKNOWN_LOCATION;
11685 eval = cleanup = NULL;
11686 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11687 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11688 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11689 if (gimple_seq_empty_p (cleanup))
11691 gimple_seq_add_seq (pre_p, eval);
11692 ret = GS_ALL_DONE;
11693 break;
11695 try_ = gimple_build_try (eval, cleanup,
11696 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11697 ? GIMPLE_TRY_FINALLY
11698 : GIMPLE_TRY_CATCH);
11699 if (EXPR_HAS_LOCATION (save_expr))
11700 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11701 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11702 gimple_set_location (try_, saved_location);
11703 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11704 gimple_try_set_catch_is_cleanup (try_,
11705 TRY_CATCH_IS_CLEANUP (*expr_p));
11706 gimplify_seq_add_stmt (pre_p, try_);
11707 ret = GS_ALL_DONE;
11708 break;
11711 case CLEANUP_POINT_EXPR:
11712 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11713 break;
11715 case TARGET_EXPR:
11716 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11717 break;
11719 case CATCH_EXPR:
11721 gimple *c;
11722 gimple_seq handler = NULL;
11723 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11724 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11725 gimplify_seq_add_stmt (pre_p, c);
11726 ret = GS_ALL_DONE;
11727 break;
11730 case EH_FILTER_EXPR:
11732 gimple *ehf;
11733 gimple_seq failure = NULL;
11735 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11736 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11737 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11738 gimplify_seq_add_stmt (pre_p, ehf);
11739 ret = GS_ALL_DONE;
11740 break;
11743 case OBJ_TYPE_REF:
11745 enum gimplify_status r0, r1;
11746 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11747 post_p, is_gimple_val, fb_rvalue);
11748 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11749 post_p, is_gimple_val, fb_rvalue);
11750 TREE_SIDE_EFFECTS (*expr_p) = 0;
11751 ret = MIN (r0, r1);
11753 break;
11755 case LABEL_DECL:
11756 /* We get here when taking the address of a label. We mark
11757 the label as "forced"; meaning it can never be removed and
11758 it is a potential target for any computed goto. */
11759 FORCED_LABEL (*expr_p) = 1;
11760 ret = GS_ALL_DONE;
11761 break;
11763 case STATEMENT_LIST:
11764 ret = gimplify_statement_list (expr_p, pre_p);
11765 break;
11767 case WITH_SIZE_EXPR:
11769 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11770 post_p == &internal_post ? NULL : post_p,
11771 gimple_test_f, fallback);
11772 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11773 is_gimple_val, fb_rvalue);
11774 ret = GS_ALL_DONE;
11776 break;
11778 case VAR_DECL:
11779 case PARM_DECL:
11780 ret = gimplify_var_or_parm_decl (expr_p);
11781 break;
11783 case RESULT_DECL:
11784 /* When within an OMP context, notice uses of variables. */
11785 if (gimplify_omp_ctxp)
11786 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11787 ret = GS_ALL_DONE;
11788 break;
11790 case SSA_NAME:
11791 /* Allow callbacks into the gimplifier during optimization. */
11792 ret = GS_ALL_DONE;
11793 break;
11795 case OMP_PARALLEL:
11796 gimplify_omp_parallel (expr_p, pre_p);
11797 ret = GS_ALL_DONE;
11798 break;
11800 case OMP_TASK:
11801 gimplify_omp_task (expr_p, pre_p);
11802 ret = GS_ALL_DONE;
11803 break;
11805 case OMP_FOR:
11806 case OMP_SIMD:
11807 case CILK_SIMD:
11808 case CILK_FOR:
11809 case OMP_DISTRIBUTE:
11810 case OMP_TASKLOOP:
11811 case OACC_LOOP:
11812 ret = gimplify_omp_for (expr_p, pre_p);
11813 break;
11815 case OACC_CACHE:
11816 gimplify_oacc_cache (expr_p, pre_p);
11817 ret = GS_ALL_DONE;
11818 break;
11820 case OACC_DECLARE:
11821 gimplify_oacc_declare (expr_p, pre_p);
11822 ret = GS_ALL_DONE;
11823 break;
11825 case OACC_HOST_DATA:
11826 case OACC_DATA:
11827 case OACC_KERNELS:
11828 case OACC_PARALLEL:
11829 case OMP_SECTIONS:
11830 case OMP_SINGLE:
11831 case OMP_TARGET:
11832 case OMP_TARGET_DATA:
11833 case OMP_TEAMS:
11834 gimplify_omp_workshare (expr_p, pre_p);
11835 ret = GS_ALL_DONE;
11836 break;
11838 case OACC_ENTER_DATA:
11839 case OACC_EXIT_DATA:
11840 case OACC_UPDATE:
11841 case OMP_TARGET_UPDATE:
11842 case OMP_TARGET_ENTER_DATA:
11843 case OMP_TARGET_EXIT_DATA:
11844 gimplify_omp_target_update (expr_p, pre_p);
11845 ret = GS_ALL_DONE;
11846 break;
11848 case OMP_SECTION:
11849 case OMP_MASTER:
11850 case OMP_TASKGROUP:
11851 case OMP_ORDERED:
11852 case OMP_CRITICAL:
11854 gimple_seq body = NULL;
11855 gimple *g;
11857 gimplify_and_add (OMP_BODY (*expr_p), &body);
11858 switch (TREE_CODE (*expr_p))
11860 case OMP_SECTION:
11861 g = gimple_build_omp_section (body);
11862 break;
11863 case OMP_MASTER:
11864 g = gimple_build_omp_master (body);
11865 break;
11866 case OMP_TASKGROUP:
11868 gimple_seq cleanup = NULL;
11869 tree fn
11870 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11871 g = gimple_build_call (fn, 0);
11872 gimple_seq_add_stmt (&cleanup, g);
11873 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11874 body = NULL;
11875 gimple_seq_add_stmt (&body, g);
11876 g = gimple_build_omp_taskgroup (body);
11878 break;
11879 case OMP_ORDERED:
11880 g = gimplify_omp_ordered (*expr_p, body);
11881 break;
11882 case OMP_CRITICAL:
11883 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11884 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11885 gimplify_adjust_omp_clauses (pre_p, body,
11886 &OMP_CRITICAL_CLAUSES (*expr_p),
11887 OMP_CRITICAL);
11888 g = gimple_build_omp_critical (body,
11889 OMP_CRITICAL_NAME (*expr_p),
11890 OMP_CRITICAL_CLAUSES (*expr_p));
11891 break;
11892 default:
11893 gcc_unreachable ();
11895 gimplify_seq_add_stmt (pre_p, g);
11896 ret = GS_ALL_DONE;
11897 break;
11900 case OMP_ATOMIC:
11901 case OMP_ATOMIC_READ:
11902 case OMP_ATOMIC_CAPTURE_OLD:
11903 case OMP_ATOMIC_CAPTURE_NEW:
11904 ret = gimplify_omp_atomic (expr_p, pre_p);
11905 break;
11907 case TRANSACTION_EXPR:
11908 ret = gimplify_transaction (expr_p, pre_p);
11909 break;
11911 case TRUTH_AND_EXPR:
11912 case TRUTH_OR_EXPR:
11913 case TRUTH_XOR_EXPR:
11915 tree orig_type = TREE_TYPE (*expr_p);
11916 tree new_type, xop0, xop1;
11917 *expr_p = gimple_boolify (*expr_p);
11918 new_type = TREE_TYPE (*expr_p);
11919 if (!useless_type_conversion_p (orig_type, new_type))
11921 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11922 ret = GS_OK;
11923 break;
11926 /* Boolified binary truth expressions are semantically equivalent
11927 to bitwise binary expressions. Canonicalize them to the
11928 bitwise variant. */
11929 switch (TREE_CODE (*expr_p))
11931 case TRUTH_AND_EXPR:
11932 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11933 break;
11934 case TRUTH_OR_EXPR:
11935 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11936 break;
11937 case TRUTH_XOR_EXPR:
11938 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11939 break;
11940 default:
11941 break;
11943 /* Now make sure that operands have compatible type to
11944 expression's new_type. */
11945 xop0 = TREE_OPERAND (*expr_p, 0);
11946 xop1 = TREE_OPERAND (*expr_p, 1);
11947 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11948 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11949 new_type,
11950 xop0);
11951 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11952 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11953 new_type,
11954 xop1);
11955 /* Continue classified as tcc_binary. */
11956 goto expr_2;
11959 case VEC_COND_EXPR:
11961 enum gimplify_status r0, r1, r2;
11963 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11964 post_p, is_gimple_condexpr, fb_rvalue);
11965 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11966 post_p, is_gimple_val, fb_rvalue);
11967 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11968 post_p, is_gimple_val, fb_rvalue);
11970 ret = MIN (MIN (r0, r1), r2);
11971 recalculate_side_effects (*expr_p);
11973 break;
11975 case FMA_EXPR:
11976 case VEC_PERM_EXPR:
11977 /* Classified as tcc_expression. */
11978 goto expr_3;
11980 case BIT_INSERT_EXPR:
11981 /* Argument 3 is a constant. */
11982 goto expr_2;
11984 case POINTER_PLUS_EXPR:
11986 enum gimplify_status r0, r1;
11987 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11988 post_p, is_gimple_val, fb_rvalue);
11989 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11990 post_p, is_gimple_val, fb_rvalue);
11991 recalculate_side_effects (*expr_p);
11992 ret = MIN (r0, r1);
11993 break;
11996 case CILK_SYNC_STMT:
11998 if (!fn_contains_cilk_spawn_p (cfun))
12000 error_at (EXPR_LOCATION (*expr_p),
12001 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
12002 ret = GS_ERROR;
12004 else
12006 gimplify_cilk_sync (expr_p, pre_p);
12007 ret = GS_ALL_DONE;
12009 break;
12012 default:
12013 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12015 case tcc_comparison:
12016 /* Handle comparison of objects of non scalar mode aggregates
12017 with a call to memcmp. It would be nice to only have to do
12018 this for variable-sized objects, but then we'd have to allow
12019 the same nest of reference nodes we allow for MODIFY_EXPR and
12020 that's too complex.
12022 Compare scalar mode aggregates as scalar mode values. Using
12023 memcmp for them would be very inefficient at best, and is
12024 plain wrong if bitfields are involved. */
12026 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12028 /* Vector comparisons need no boolification. */
12029 if (TREE_CODE (type) == VECTOR_TYPE)
12030 goto expr_2;
12031 else if (!AGGREGATE_TYPE_P (type))
12033 tree org_type = TREE_TYPE (*expr_p);
12034 *expr_p = gimple_boolify (*expr_p);
12035 if (!useless_type_conversion_p (org_type,
12036 TREE_TYPE (*expr_p)))
12038 *expr_p = fold_convert_loc (input_location,
12039 org_type, *expr_p);
12040 ret = GS_OK;
12042 else
12043 goto expr_2;
12045 else if (TYPE_MODE (type) != BLKmode)
12046 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12047 else
12048 ret = gimplify_variable_sized_compare (expr_p);
12050 break;
12053 /* If *EXPR_P does not need to be special-cased, handle it
12054 according to its class. */
12055 case tcc_unary:
12056 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12057 post_p, is_gimple_val, fb_rvalue);
12058 break;
12060 case tcc_binary:
12061 expr_2:
12063 enum gimplify_status r0, r1;
12065 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12066 post_p, is_gimple_val, fb_rvalue);
12067 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12068 post_p, is_gimple_val, fb_rvalue);
12070 ret = MIN (r0, r1);
12071 break;
12074 expr_3:
12076 enum gimplify_status r0, r1, r2;
12078 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12079 post_p, is_gimple_val, fb_rvalue);
12080 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12081 post_p, is_gimple_val, fb_rvalue);
12082 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12083 post_p, is_gimple_val, fb_rvalue);
12085 ret = MIN (MIN (r0, r1), r2);
12086 break;
12089 case tcc_declaration:
12090 case tcc_constant:
12091 ret = GS_ALL_DONE;
12092 goto dont_recalculate;
12094 default:
12095 gcc_unreachable ();
12098 recalculate_side_effects (*expr_p);
12100 dont_recalculate:
12101 break;
12104 gcc_assert (*expr_p || ret != GS_OK);
12106 while (ret == GS_OK);
12108 /* If we encountered an error_mark somewhere nested inside, either
12109 stub out the statement or propagate the error back out. */
12110 if (ret == GS_ERROR)
12112 if (is_statement)
12113 *expr_p = NULL;
12114 goto out;
12117 /* This was only valid as a return value from the langhook, which
12118 we handled. Make sure it doesn't escape from any other context. */
12119 gcc_assert (ret != GS_UNHANDLED);
12121 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12123 /* We aren't looking for a value, and we don't have a valid
12124 statement. If it doesn't have side-effects, throw it away.
12125 We can also get here with code such as "*&&L;", where L is
12126 a LABEL_DECL that is marked as FORCED_LABEL. */
12127 if (TREE_CODE (*expr_p) == LABEL_DECL
12128 || !TREE_SIDE_EFFECTS (*expr_p))
12129 *expr_p = NULL;
12130 else if (!TREE_THIS_VOLATILE (*expr_p))
12132 /* This is probably a _REF that contains something nested that
12133 has side effects. Recurse through the operands to find it. */
12134 enum tree_code code = TREE_CODE (*expr_p);
12136 switch (code)
12138 case COMPONENT_REF:
12139 case REALPART_EXPR:
12140 case IMAGPART_EXPR:
12141 case VIEW_CONVERT_EXPR:
12142 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12143 gimple_test_f, fallback);
12144 break;
12146 case ARRAY_REF:
12147 case ARRAY_RANGE_REF:
12148 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12149 gimple_test_f, fallback);
12150 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12151 gimple_test_f, fallback);
12152 break;
12154 default:
12155 /* Anything else with side-effects must be converted to
12156 a valid statement before we get here. */
12157 gcc_unreachable ();
12160 *expr_p = NULL;
12162 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12163 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12165 /* Historically, the compiler has treated a bare reference
12166 to a non-BLKmode volatile lvalue as forcing a load. */
12167 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12169 /* Normally, we do not want to create a temporary for a
12170 TREE_ADDRESSABLE type because such a type should not be
12171 copied by bitwise-assignment. However, we make an
12172 exception here, as all we are doing here is ensuring that
12173 we read the bytes that make up the type. We use
12174 create_tmp_var_raw because create_tmp_var will abort when
12175 given a TREE_ADDRESSABLE type. */
12176 tree tmp = create_tmp_var_raw (type, "vol");
12177 gimple_add_tmp_var (tmp);
12178 gimplify_assign (tmp, *expr_p, pre_p);
12179 *expr_p = NULL;
12181 else
12182 /* We can't do anything useful with a volatile reference to
12183 an incomplete type, so just throw it away. Likewise for
12184 a BLKmode type, since any implicit inner load should
12185 already have been turned into an explicit one by the
12186 gimplification process. */
12187 *expr_p = NULL;
12190 /* If we are gimplifying at the statement level, we're done. Tack
12191 everything together and return. */
12192 if (fallback == fb_none || is_statement)
12194 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12195 it out for GC to reclaim it. */
12196 *expr_p = NULL_TREE;
12198 if (!gimple_seq_empty_p (internal_pre)
12199 || !gimple_seq_empty_p (internal_post))
12201 gimplify_seq_add_seq (&internal_pre, internal_post);
12202 gimplify_seq_add_seq (pre_p, internal_pre);
12205 /* The result of gimplifying *EXPR_P is going to be the last few
12206 statements in *PRE_P and *POST_P. Add location information
12207 to all the statements that were added by the gimplification
12208 helpers. */
12209 if (!gimple_seq_empty_p (*pre_p))
12210 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12212 if (!gimple_seq_empty_p (*post_p))
12213 annotate_all_with_location_after (*post_p, post_last_gsi,
12214 input_location);
12216 goto out;
12219 #ifdef ENABLE_GIMPLE_CHECKING
12220 if (*expr_p)
12222 enum tree_code code = TREE_CODE (*expr_p);
12223 /* These expressions should already be in gimple IR form. */
12224 gcc_assert (code != MODIFY_EXPR
12225 && code != ASM_EXPR
12226 && code != BIND_EXPR
12227 && code != CATCH_EXPR
12228 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12229 && code != EH_FILTER_EXPR
12230 && code != GOTO_EXPR
12231 && code != LABEL_EXPR
12232 && code != LOOP_EXPR
12233 && code != SWITCH_EXPR
12234 && code != TRY_FINALLY_EXPR
12235 && code != OACC_PARALLEL
12236 && code != OACC_KERNELS
12237 && code != OACC_DATA
12238 && code != OACC_HOST_DATA
12239 && code != OACC_DECLARE
12240 && code != OACC_UPDATE
12241 && code != OACC_ENTER_DATA
12242 && code != OACC_EXIT_DATA
12243 && code != OACC_CACHE
12244 && code != OMP_CRITICAL
12245 && code != OMP_FOR
12246 && code != OACC_LOOP
12247 && code != OMP_MASTER
12248 && code != OMP_TASKGROUP
12249 && code != OMP_ORDERED
12250 && code != OMP_PARALLEL
12251 && code != OMP_SECTIONS
12252 && code != OMP_SECTION
12253 && code != OMP_SINGLE);
12255 #endif
12257 /* Otherwise we're gimplifying a subexpression, so the resulting
12258 value is interesting. If it's a valid operand that matches
12259 GIMPLE_TEST_F, we're done. Unless we are handling some
12260 post-effects internally; if that's the case, we need to copy into
12261 a temporary before adding the post-effects to POST_P. */
12262 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12263 goto out;
12265 /* Otherwise, we need to create a new temporary for the gimplified
12266 expression. */
12268 /* We can't return an lvalue if we have an internal postqueue. The
12269 object the lvalue refers to would (probably) be modified by the
12270 postqueue; we need to copy the value out first, which means an
12271 rvalue. */
12272 if ((fallback & fb_lvalue)
12273 && gimple_seq_empty_p (internal_post)
12274 && is_gimple_addressable (*expr_p))
12276 /* An lvalue will do. Take the address of the expression, store it
12277 in a temporary, and replace the expression with an INDIRECT_REF of
12278 that temporary. */
12279 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12280 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12281 *expr_p = build_simple_mem_ref (tmp);
12283 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12285 /* An rvalue will do. Assign the gimplified expression into a
12286 new temporary TMP and replace the original expression with
12287 TMP. First, make sure that the expression has a type so that
12288 it can be assigned into a temporary. */
12289 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12290 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12292 else
12294 #ifdef ENABLE_GIMPLE_CHECKING
12295 if (!(fallback & fb_mayfail))
12297 fprintf (stderr, "gimplification failed:\n");
12298 print_generic_expr (stderr, *expr_p);
12299 debug_tree (*expr_p);
12300 internal_error ("gimplification failed");
12302 #endif
12303 gcc_assert (fallback & fb_mayfail);
12305 /* If this is an asm statement, and the user asked for the
12306 impossible, don't die. Fail and let gimplify_asm_expr
12307 issue an error. */
12308 ret = GS_ERROR;
12309 goto out;
12312 /* Make sure the temporary matches our predicate. */
12313 gcc_assert ((*gimple_test_f) (*expr_p));
12315 if (!gimple_seq_empty_p (internal_post))
12317 annotate_all_with_location (internal_post, input_location);
12318 gimplify_seq_add_seq (pre_p, internal_post);
12321 out:
12322 input_location = saved_location;
12323 return ret;
12326 /* Like gimplify_expr but make sure the gimplified result is not itself
12327 a SSA name (but a decl if it were). Temporaries required by
12328 evaluating *EXPR_P may be still SSA names. */
12330 static enum gimplify_status
12331 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12332 bool (*gimple_test_f) (tree), fallback_t fallback,
12333 bool allow_ssa)
12335 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12336 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12337 gimple_test_f, fallback);
12338 if (! allow_ssa
12339 && TREE_CODE (*expr_p) == SSA_NAME)
12341 tree name = *expr_p;
12342 if (was_ssa_name_p)
12343 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12344 else
12346 /* Avoid the extra copy if possible. */
12347 *expr_p = create_tmp_reg (TREE_TYPE (name));
12348 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12349 release_ssa_name (name);
12352 return ret;
12355 /* Look through TYPE for variable-sized objects and gimplify each such
12356 size that we find. Add to LIST_P any statements generated. */
12358 void
12359 gimplify_type_sizes (tree type, gimple_seq *list_p)
12361 tree field, t;
12363 if (type == NULL || type == error_mark_node)
12364 return;
12366 /* We first do the main variant, then copy into any other variants. */
12367 type = TYPE_MAIN_VARIANT (type);
12369 /* Avoid infinite recursion. */
12370 if (TYPE_SIZES_GIMPLIFIED (type))
12371 return;
12373 TYPE_SIZES_GIMPLIFIED (type) = 1;
12375 switch (TREE_CODE (type))
12377 case INTEGER_TYPE:
12378 case ENUMERAL_TYPE:
12379 case BOOLEAN_TYPE:
12380 case REAL_TYPE:
12381 case FIXED_POINT_TYPE:
12382 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12383 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12385 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12387 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12388 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12390 break;
12392 case ARRAY_TYPE:
12393 /* These types may not have declarations, so handle them here. */
12394 gimplify_type_sizes (TREE_TYPE (type), list_p);
12395 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12396 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12397 with assigned stack slots, for -O1+ -g they should be tracked
12398 by VTA. */
12399 if (!(TYPE_NAME (type)
12400 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12401 && DECL_IGNORED_P (TYPE_NAME (type)))
12402 && TYPE_DOMAIN (type)
12403 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12405 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12406 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12407 DECL_IGNORED_P (t) = 0;
12408 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12409 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12410 DECL_IGNORED_P (t) = 0;
12412 break;
12414 case RECORD_TYPE:
12415 case UNION_TYPE:
12416 case QUAL_UNION_TYPE:
12417 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12418 if (TREE_CODE (field) == FIELD_DECL)
12420 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12421 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12422 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12423 gimplify_type_sizes (TREE_TYPE (field), list_p);
12425 break;
12427 case POINTER_TYPE:
12428 case REFERENCE_TYPE:
12429 /* We used to recurse on the pointed-to type here, which turned out to
12430 be incorrect because its definition might refer to variables not
12431 yet initialized at this point if a forward declaration is involved.
12433 It was actually useful for anonymous pointed-to types to ensure
12434 that the sizes evaluation dominates every possible later use of the
12435 values. Restricting to such types here would be safe since there
12436 is no possible forward declaration around, but would introduce an
12437 undesirable middle-end semantic to anonymity. We then defer to
12438 front-ends the responsibility of ensuring that the sizes are
12439 evaluated both early and late enough, e.g. by attaching artificial
12440 type declarations to the tree. */
12441 break;
12443 default:
12444 break;
12447 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12448 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12450 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12452 TYPE_SIZE (t) = TYPE_SIZE (type);
12453 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12454 TYPE_SIZES_GIMPLIFIED (t) = 1;
12458 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12459 a size or position, has had all of its SAVE_EXPRs evaluated.
12460 We add any required statements to *STMT_P. */
12462 void
12463 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12465 tree expr = *expr_p;
12467 /* We don't do anything if the value isn't there, is constant, or contains
12468 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12469 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12470 will want to replace it with a new variable, but that will cause problems
12471 if this type is from outside the function. It's OK to have that here. */
12472 if (is_gimple_sizepos (expr))
12473 return;
12475 *expr_p = unshare_expr (expr);
12477 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12478 if the def vanishes. */
12479 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12482 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12483 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12484 is true, also gimplify the parameters. */
12486 gbind *
12487 gimplify_body (tree fndecl, bool do_parms)
12489 location_t saved_location = input_location;
12490 gimple_seq parm_stmts, seq;
12491 gimple *outer_stmt;
12492 gbind *outer_bind;
12493 struct cgraph_node *cgn;
12495 timevar_push (TV_TREE_GIMPLIFY);
12497 init_tree_ssa (cfun);
12499 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12500 gimplification. */
12501 default_rtl_profile ();
12503 gcc_assert (gimplify_ctxp == NULL);
12504 push_gimplify_context (true);
12506 if (flag_openacc || flag_openmp)
12508 gcc_assert (gimplify_omp_ctxp == NULL);
12509 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12510 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12513 /* Unshare most shared trees in the body and in that of any nested functions.
12514 It would seem we don't have to do this for nested functions because
12515 they are supposed to be output and then the outer function gimplified
12516 first, but the g++ front end doesn't always do it that way. */
12517 unshare_body (fndecl);
12518 unvisit_body (fndecl);
12520 cgn = cgraph_node::get (fndecl);
12521 if (cgn && cgn->origin)
12522 nonlocal_vlas = new hash_set<tree>;
12524 /* Make sure input_location isn't set to something weird. */
12525 input_location = DECL_SOURCE_LOCATION (fndecl);
12527 /* Resolve callee-copies. This has to be done before processing
12528 the body so that DECL_VALUE_EXPR gets processed correctly. */
12529 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12531 /* Gimplify the function's body. */
12532 seq = NULL;
12533 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12534 outer_stmt = gimple_seq_first_stmt (seq);
12535 if (!outer_stmt)
12537 outer_stmt = gimple_build_nop ();
12538 gimplify_seq_add_stmt (&seq, outer_stmt);
12541 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12542 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12543 if (gimple_code (outer_stmt) == GIMPLE_BIND
12544 && gimple_seq_first (seq) == gimple_seq_last (seq))
12545 outer_bind = as_a <gbind *> (outer_stmt);
12546 else
12547 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12549 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12551 /* If we had callee-copies statements, insert them at the beginning
12552 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12553 if (!gimple_seq_empty_p (parm_stmts))
12555 tree parm;
12557 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12558 gimple_bind_set_body (outer_bind, parm_stmts);
12560 for (parm = DECL_ARGUMENTS (current_function_decl);
12561 parm; parm = DECL_CHAIN (parm))
12562 if (DECL_HAS_VALUE_EXPR_P (parm))
12564 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12565 DECL_IGNORED_P (parm) = 0;
12569 if (nonlocal_vlas)
12571 if (nonlocal_vla_vars)
12573 /* tree-nested.c may later on call declare_vars (..., true);
12574 which relies on BLOCK_VARS chain to be the tail of the
12575 gimple_bind_vars chain. Ensure we don't violate that
12576 assumption. */
12577 if (gimple_bind_block (outer_bind)
12578 == DECL_INITIAL (current_function_decl))
12579 declare_vars (nonlocal_vla_vars, outer_bind, true);
12580 else
12581 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12582 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12583 nonlocal_vla_vars);
12584 nonlocal_vla_vars = NULL_TREE;
12586 delete nonlocal_vlas;
12587 nonlocal_vlas = NULL;
12590 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12591 && gimplify_omp_ctxp)
12593 delete_omp_context (gimplify_omp_ctxp);
12594 gimplify_omp_ctxp = NULL;
12597 pop_gimplify_context (outer_bind);
12598 gcc_assert (gimplify_ctxp == NULL);
12600 if (flag_checking && !seen_error ())
12601 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12603 timevar_pop (TV_TREE_GIMPLIFY);
12604 input_location = saved_location;
12606 return outer_bind;
12609 typedef char *char_p; /* For DEF_VEC_P. */
12611 /* Return whether we should exclude FNDECL from instrumentation. */
12613 static bool
12614 flag_instrument_functions_exclude_p (tree fndecl)
12616 vec<char_p> *v;
12618 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12619 if (v && v->length () > 0)
12621 const char *name;
12622 int i;
12623 char *s;
12625 name = lang_hooks.decl_printable_name (fndecl, 0);
12626 FOR_EACH_VEC_ELT (*v, i, s)
12627 if (strstr (name, s) != NULL)
12628 return true;
12631 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12632 if (v && v->length () > 0)
12634 const char *name;
12635 int i;
12636 char *s;
12638 name = DECL_SOURCE_FILE (fndecl);
12639 FOR_EACH_VEC_ELT (*v, i, s)
12640 if (strstr (name, s) != NULL)
12641 return true;
12644 return false;
12647 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12648 node for the function we want to gimplify.
12650 Return the sequence of GIMPLE statements corresponding to the body
12651 of FNDECL. */
12653 void
12654 gimplify_function_tree (tree fndecl)
12656 tree parm, ret;
12657 gimple_seq seq;
12658 gbind *bind;
12660 gcc_assert (!gimple_body (fndecl));
12662 if (DECL_STRUCT_FUNCTION (fndecl))
12663 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12664 else
12665 push_struct_function (fndecl);
12667 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12668 if necessary. */
12669 cfun->curr_properties |= PROP_gimple_lva;
12671 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12673 /* Preliminarily mark non-addressed complex variables as eligible
12674 for promotion to gimple registers. We'll transform their uses
12675 as we find them. */
12676 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12677 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12678 && !TREE_THIS_VOLATILE (parm)
12679 && !needs_to_live_in_memory (parm))
12680 DECL_GIMPLE_REG_P (parm) = 1;
12683 ret = DECL_RESULT (fndecl);
12684 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12685 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12686 && !needs_to_live_in_memory (ret))
12687 DECL_GIMPLE_REG_P (ret) = 1;
12689 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12690 asan_poisoned_variables = new hash_set<tree> ();
12691 bind = gimplify_body (fndecl, true);
12692 if (asan_poisoned_variables)
12694 delete asan_poisoned_variables;
12695 asan_poisoned_variables = NULL;
12698 /* The tree body of the function is no longer needed, replace it
12699 with the new GIMPLE body. */
12700 seq = NULL;
12701 gimple_seq_add_stmt (&seq, bind);
12702 gimple_set_body (fndecl, seq);
12704 /* If we're instrumenting function entry/exit, then prepend the call to
12705 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12706 catch the exit hook. */
12707 /* ??? Add some way to ignore exceptions for this TFE. */
12708 if (flag_instrument_function_entry_exit
12709 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12710 /* Do not instrument extern inline functions. */
12711 && !(DECL_DECLARED_INLINE_P (fndecl)
12712 && DECL_EXTERNAL (fndecl)
12713 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12714 && !flag_instrument_functions_exclude_p (fndecl))
12716 tree x;
12717 gbind *new_bind;
12718 gimple *tf;
12719 gimple_seq cleanup = NULL, body = NULL;
12720 tree tmp_var;
12721 gcall *call;
12723 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12724 call = gimple_build_call (x, 1, integer_zero_node);
12725 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12726 gimple_call_set_lhs (call, tmp_var);
12727 gimplify_seq_add_stmt (&cleanup, call);
12728 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12729 call = gimple_build_call (x, 2,
12730 build_fold_addr_expr (current_function_decl),
12731 tmp_var);
12732 gimplify_seq_add_stmt (&cleanup, call);
12733 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12735 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12736 call = gimple_build_call (x, 1, integer_zero_node);
12737 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12738 gimple_call_set_lhs (call, tmp_var);
12739 gimplify_seq_add_stmt (&body, call);
12740 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12741 call = gimple_build_call (x, 2,
12742 build_fold_addr_expr (current_function_decl),
12743 tmp_var);
12744 gimplify_seq_add_stmt (&body, call);
12745 gimplify_seq_add_stmt (&body, tf);
12746 new_bind = gimple_build_bind (NULL, body, NULL);
12748 /* Replace the current function body with the body
12749 wrapped in the try/finally TF. */
12750 seq = NULL;
12751 gimple_seq_add_stmt (&seq, new_bind);
12752 gimple_set_body (fndecl, seq);
12753 bind = new_bind;
12756 if (sanitize_flags_p (SANITIZE_THREAD))
12758 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12759 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12760 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12761 /* Replace the current function body with the body
12762 wrapped in the try/finally TF. */
12763 seq = NULL;
12764 gimple_seq_add_stmt (&seq, new_bind);
12765 gimple_set_body (fndecl, seq);
12768 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12769 cfun->curr_properties |= PROP_gimple_any;
12771 pop_cfun ();
12773 dump_function (TDI_gimple, fndecl);
12776 /* Return a dummy expression of type TYPE in order to keep going after an
12777 error. */
12779 static tree
12780 dummy_object (tree type)
12782 tree t = build_int_cst (build_pointer_type (type), 0);
12783 return build2 (MEM_REF, type, t, t);
12786 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12787 builtin function, but a very special sort of operator. */
12789 enum gimplify_status
12790 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12791 gimple_seq *post_p ATTRIBUTE_UNUSED)
12793 tree promoted_type, have_va_type;
12794 tree valist = TREE_OPERAND (*expr_p, 0);
12795 tree type = TREE_TYPE (*expr_p);
12796 tree t, tag, aptag;
12797 location_t loc = EXPR_LOCATION (*expr_p);
12799 /* Verify that valist is of the proper type. */
12800 have_va_type = TREE_TYPE (valist);
12801 if (have_va_type == error_mark_node)
12802 return GS_ERROR;
12803 have_va_type = targetm.canonical_va_list_type (have_va_type);
12804 if (have_va_type == NULL_TREE
12805 && POINTER_TYPE_P (TREE_TYPE (valist)))
12806 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12807 have_va_type
12808 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12809 gcc_assert (have_va_type != NULL_TREE);
12811 /* Generate a diagnostic for requesting data of a type that cannot
12812 be passed through `...' due to type promotion at the call site. */
12813 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12814 != type)
12816 static bool gave_help;
12817 bool warned;
12818 /* Use the expansion point to handle cases such as passing bool (defined
12819 in a system header) through `...'. */
12820 source_location xloc
12821 = expansion_point_location_if_in_system_header (loc);
12823 /* Unfortunately, this is merely undefined, rather than a constraint
12824 violation, so we cannot make this an error. If this call is never
12825 executed, the program is still strictly conforming. */
12826 warned = warning_at (xloc, 0,
12827 "%qT is promoted to %qT when passed through %<...%>",
12828 type, promoted_type);
12829 if (!gave_help && warned)
12831 gave_help = true;
12832 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12833 promoted_type, type);
12836 /* We can, however, treat "undefined" any way we please.
12837 Call abort to encourage the user to fix the program. */
12838 if (warned)
12839 inform (xloc, "if this code is reached, the program will abort");
12840 /* Before the abort, allow the evaluation of the va_list
12841 expression to exit or longjmp. */
12842 gimplify_and_add (valist, pre_p);
12843 t = build_call_expr_loc (loc,
12844 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12845 gimplify_and_add (t, pre_p);
12847 /* This is dead code, but go ahead and finish so that the
12848 mode of the result comes out right. */
12849 *expr_p = dummy_object (type);
12850 return GS_ALL_DONE;
12853 tag = build_int_cst (build_pointer_type (type), 0);
12854 aptag = build_int_cst (TREE_TYPE (valist), 0);
12856 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12857 valist, tag, aptag);
12859 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12860 needs to be expanded. */
12861 cfun->curr_properties &= ~PROP_gimple_lva;
12863 return GS_OK;
12866 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12868 DST/SRC are the destination and source respectively. You can pass
12869 ungimplified trees in DST or SRC, in which case they will be
12870 converted to a gimple operand if necessary.
12872 This function returns the newly created GIMPLE_ASSIGN tuple. */
12874 gimple *
12875 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12877 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12878 gimplify_and_add (t, seq_p);
12879 ggc_free (t);
12880 return gimple_seq_last_stmt (*seq_p);
12883 inline hashval_t
12884 gimplify_hasher::hash (const elt_t *p)
12886 tree t = p->val;
12887 return iterative_hash_expr (t, 0);
12890 inline bool
12891 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12893 tree t1 = p1->val;
12894 tree t2 = p2->val;
12895 enum tree_code code = TREE_CODE (t1);
12897 if (TREE_CODE (t2) != code
12898 || TREE_TYPE (t1) != TREE_TYPE (t2))
12899 return false;
12901 if (!operand_equal_p (t1, t2, 0))
12902 return false;
12904 /* Only allow them to compare equal if they also hash equal; otherwise
12905 results are nondeterminate, and we fail bootstrap comparison. */
12906 gcc_checking_assert (hash (p1) == hash (p2));
12908 return true;