gcc/ChangeLog:
[official-gcc.git] / gcc / gimplify.c
blob86623e09f5d8ab898b1e497d321717e3778606ed
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-general.h"
55 #include "omp-low.h"
56 #include "gimple-low.h"
57 #include "cilk.h"
58 #include "gomp-constants.h"
59 #include "splay-tree.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "dbgcnt.h"
68 /* Hash set of poisoned variables in a bind expr. */
69 static hash_set<tree> *asan_poisoned_variables = NULL;
71 enum gimplify_omp_var_data
73 GOVD_SEEN = 1,
74 GOVD_EXPLICIT = 2,
75 GOVD_SHARED = 4,
76 GOVD_PRIVATE = 8,
77 GOVD_FIRSTPRIVATE = 16,
78 GOVD_LASTPRIVATE = 32,
79 GOVD_REDUCTION = 64,
80 GOVD_LOCAL = 128,
81 GOVD_MAP = 256,
82 GOVD_DEBUG_PRIVATE = 512,
83 GOVD_PRIVATE_OUTER_REF = 1024,
84 GOVD_LINEAR = 2048,
85 GOVD_ALIGNED = 4096,
87 /* Flag for GOVD_MAP: don't copy back. */
88 GOVD_MAP_TO_ONLY = 8192,
90 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
91 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
93 GOVD_MAP_0LEN_ARRAY = 32768,
95 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
96 GOVD_MAP_ALWAYS_TO = 65536,
98 /* Flag for shared vars that are or might be stored to in the region. */
99 GOVD_WRITTEN = 131072,
101 /* Flag for GOVD_MAP, if it is a forced mapping. */
102 GOVD_MAP_FORCE = 262144,
104 /* Flag for GOVD_MAP: must be present already. */
105 GOVD_MAP_FORCE_PRESENT = 524288,
107 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
108 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
109 | GOVD_LOCAL)
113 enum omp_region_type
115 ORT_WORKSHARE = 0x00,
116 ORT_SIMD = 0x01,
118 ORT_PARALLEL = 0x02,
119 ORT_COMBINED_PARALLEL = 0x03,
121 ORT_TASK = 0x04,
122 ORT_UNTIED_TASK = 0x05,
124 ORT_TEAMS = 0x08,
125 ORT_COMBINED_TEAMS = 0x09,
127 /* Data region. */
128 ORT_TARGET_DATA = 0x10,
130 /* Data region with offloading. */
131 ORT_TARGET = 0x20,
132 ORT_COMBINED_TARGET = 0x21,
134 /* OpenACC variants. */
135 ORT_ACC = 0x40, /* A generic OpenACC region. */
136 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
137 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
138 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
139 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
141 /* Dummy OpenMP region, used to disable expansion of
142 DECL_VALUE_EXPRs in taskloop pre body. */
143 ORT_NONE = 0x100
146 /* Gimplify hashtable helper. */
148 struct gimplify_hasher : free_ptr_hash <elt_t>
150 static inline hashval_t hash (const elt_t *);
151 static inline bool equal (const elt_t *, const elt_t *);
154 struct gimplify_ctx
156 struct gimplify_ctx *prev_context;
158 vec<gbind *> bind_expr_stack;
159 tree temps;
160 gimple_seq conditional_cleanups;
161 tree exit_label;
162 tree return_temp;
164 vec<tree> case_labels;
165 hash_set<tree> *live_switch_vars;
166 /* The formal temporary table. Should this be persistent? */
167 hash_table<gimplify_hasher> *temp_htab;
169 int conditions;
170 unsigned into_ssa : 1;
171 unsigned allow_rhs_cond_expr : 1;
172 unsigned in_cleanup_point_expr : 1;
173 unsigned keep_stack : 1;
174 unsigned save_stack : 1;
175 unsigned in_switch_expr : 1;
178 struct gimplify_omp_ctx
180 struct gimplify_omp_ctx *outer_context;
181 splay_tree variables;
182 hash_set<tree> *privatized_types;
183 /* Iteration variables in an OMP_FOR. */
184 vec<tree> loop_iter_var;
185 location_t location;
186 enum omp_clause_default_kind default_kind;
187 enum omp_region_type region_type;
188 bool combined_loop;
189 bool distribute;
190 bool target_map_scalars_firstprivate;
191 bool target_map_pointers_as_0len_arrays;
192 bool target_firstprivatize_array_bases;
195 static struct gimplify_ctx *gimplify_ctxp;
196 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
198 /* Forward declaration. */
199 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
200 static hash_map<tree, tree> *oacc_declare_returns;
201 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
202 bool (*) (tree), fallback_t, bool);
204 /* Shorter alias name for the above function for use in gimplify.c
205 only. */
207 static inline void
208 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
210 gimple_seq_add_stmt_without_update (seq_p, gs);
213 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
214 NULL, a new sequence is allocated. This function is
215 similar to gimple_seq_add_seq, but does not scan the operands.
216 During gimplification, we need to manipulate statement sequences
217 before the def/use vectors have been constructed. */
219 static void
220 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
222 gimple_stmt_iterator si;
224 if (src == NULL)
225 return;
227 si = gsi_last (*dst_p);
228 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
232 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
233 and popping gimplify contexts. */
235 static struct gimplify_ctx *ctx_pool = NULL;
237 /* Return a gimplify context struct from the pool. */
239 static inline struct gimplify_ctx *
240 ctx_alloc (void)
242 struct gimplify_ctx * c = ctx_pool;
244 if (c)
245 ctx_pool = c->prev_context;
246 else
247 c = XNEW (struct gimplify_ctx);
249 memset (c, '\0', sizeof (*c));
250 return c;
253 /* Put gimplify context C back into the pool. */
255 static inline void
256 ctx_free (struct gimplify_ctx *c)
258 c->prev_context = ctx_pool;
259 ctx_pool = c;
262 /* Free allocated ctx stack memory. */
264 void
265 free_gimplify_stack (void)
267 struct gimplify_ctx *c;
269 while ((c = ctx_pool))
271 ctx_pool = c->prev_context;
272 free (c);
277 /* Set up a context for the gimplifier. */
279 void
280 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
282 struct gimplify_ctx *c = ctx_alloc ();
284 c->prev_context = gimplify_ctxp;
285 gimplify_ctxp = c;
286 gimplify_ctxp->into_ssa = in_ssa;
287 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
290 /* Tear down a context for the gimplifier. If BODY is non-null, then
291 put the temporaries into the outer BIND_EXPR. Otherwise, put them
292 in the local_decls.
294 BODY is not a sequence, but the first tuple in a sequence. */
296 void
297 pop_gimplify_context (gimple *body)
299 struct gimplify_ctx *c = gimplify_ctxp;
301 gcc_assert (c
302 && (!c->bind_expr_stack.exists ()
303 || c->bind_expr_stack.is_empty ()));
304 c->bind_expr_stack.release ();
305 gimplify_ctxp = c->prev_context;
307 if (body)
308 declare_vars (c->temps, body, false);
309 else
310 record_vars (c->temps);
312 delete c->temp_htab;
313 c->temp_htab = NULL;
314 ctx_free (c);
317 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
319 static void
320 gimple_push_bind_expr (gbind *bind_stmt)
322 gimplify_ctxp->bind_expr_stack.reserve (8);
323 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
326 /* Pop the first element off the stack of bindings. */
328 static void
329 gimple_pop_bind_expr (void)
331 gimplify_ctxp->bind_expr_stack.pop ();
334 /* Return the first element of the stack of bindings. */
336 gbind *
337 gimple_current_bind_expr (void)
339 return gimplify_ctxp->bind_expr_stack.last ();
342 /* Return the stack of bindings created during gimplification. */
344 vec<gbind *>
345 gimple_bind_expr_stack (void)
347 return gimplify_ctxp->bind_expr_stack;
350 /* Return true iff there is a COND_EXPR between us and the innermost
351 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
353 static bool
354 gimple_conditional_context (void)
356 return gimplify_ctxp->conditions > 0;
359 /* Note that we've entered a COND_EXPR. */
361 static void
362 gimple_push_condition (void)
364 #ifdef ENABLE_GIMPLE_CHECKING
365 if (gimplify_ctxp->conditions == 0)
366 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
367 #endif
368 ++(gimplify_ctxp->conditions);
371 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
372 now, add any conditional cleanups we've seen to the prequeue. */
374 static void
375 gimple_pop_condition (gimple_seq *pre_p)
377 int conds = --(gimplify_ctxp->conditions);
379 gcc_assert (conds >= 0);
380 if (conds == 0)
382 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
383 gimplify_ctxp->conditional_cleanups = NULL;
387 /* A stable comparison routine for use with splay trees and DECLs. */
389 static int
390 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
392 tree a = (tree) xa;
393 tree b = (tree) xb;
395 return DECL_UID (a) - DECL_UID (b);
398 /* Create a new omp construct that deals with variable remapping. */
400 static struct gimplify_omp_ctx *
401 new_omp_context (enum omp_region_type region_type)
403 struct gimplify_omp_ctx *c;
405 c = XCNEW (struct gimplify_omp_ctx);
406 c->outer_context = gimplify_omp_ctxp;
407 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
408 c->privatized_types = new hash_set<tree>;
409 c->location = input_location;
410 c->region_type = region_type;
411 if ((region_type & ORT_TASK) == 0)
412 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
413 else
414 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
416 return c;
419 /* Destroy an omp construct that deals with variable remapping. */
421 static void
422 delete_omp_context (struct gimplify_omp_ctx *c)
424 splay_tree_delete (c->variables);
425 delete c->privatized_types;
426 c->loop_iter_var.release ();
427 XDELETE (c);
430 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
431 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
433 /* Both gimplify the statement T and append it to *SEQ_P. This function
434 behaves exactly as gimplify_stmt, but you don't have to pass T as a
435 reference. */
437 void
438 gimplify_and_add (tree t, gimple_seq *seq_p)
440 gimplify_stmt (&t, seq_p);
443 /* Gimplify statement T into sequence *SEQ_P, and return the first
444 tuple in the sequence of generated tuples for this statement.
445 Return NULL if gimplifying T produced no tuples. */
447 static gimple *
448 gimplify_and_return_first (tree t, gimple_seq *seq_p)
450 gimple_stmt_iterator last = gsi_last (*seq_p);
452 gimplify_and_add (t, seq_p);
454 if (!gsi_end_p (last))
456 gsi_next (&last);
457 return gsi_stmt (last);
459 else
460 return gimple_seq_first_stmt (*seq_p);
463 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
464 LHS, or for a call argument. */
466 static bool
467 is_gimple_mem_rhs (tree t)
469 /* If we're dealing with a renamable type, either source or dest must be
470 a renamed variable. */
471 if (is_gimple_reg_type (TREE_TYPE (t)))
472 return is_gimple_val (t);
473 else
474 return is_gimple_val (t) || is_gimple_lvalue (t);
477 /* Return true if T is a CALL_EXPR or an expression that can be
478 assigned to a temporary. Note that this predicate should only be
479 used during gimplification. See the rationale for this in
480 gimplify_modify_expr. */
482 static bool
483 is_gimple_reg_rhs_or_call (tree t)
485 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
486 || TREE_CODE (t) == CALL_EXPR);
489 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
490 this predicate should only be used during gimplification. See the
491 rationale for this in gimplify_modify_expr. */
493 static bool
494 is_gimple_mem_rhs_or_call (tree t)
496 /* If we're dealing with a renamable type, either source or dest must be
497 a renamed variable. */
498 if (is_gimple_reg_type (TREE_TYPE (t)))
499 return is_gimple_val (t);
500 else
501 return (is_gimple_val (t)
502 || is_gimple_lvalue (t)
503 || TREE_CLOBBER_P (t)
504 || TREE_CODE (t) == CALL_EXPR);
507 /* Create a temporary with a name derived from VAL. Subroutine of
508 lookup_tmp_var; nobody else should call this function. */
510 static inline tree
511 create_tmp_from_val (tree val)
513 /* Drop all qualifiers and address-space information from the value type. */
514 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
515 tree var = create_tmp_var (type, get_name (val));
516 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
517 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
518 DECL_GIMPLE_REG_P (var) = 1;
519 return var;
522 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
523 an existing expression temporary. */
525 static tree
526 lookup_tmp_var (tree val, bool is_formal)
528 tree ret;
530 /* If not optimizing, never really reuse a temporary. local-alloc
531 won't allocate any variable that is used in more than one basic
532 block, which means it will go into memory, causing much extra
533 work in reload and final and poorer code generation, outweighing
534 the extra memory allocation here. */
535 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
536 ret = create_tmp_from_val (val);
537 else
539 elt_t elt, *elt_p;
540 elt_t **slot;
542 elt.val = val;
543 if (!gimplify_ctxp->temp_htab)
544 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
545 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
546 if (*slot == NULL)
548 elt_p = XNEW (elt_t);
549 elt_p->val = val;
550 elt_p->temp = ret = create_tmp_from_val (val);
551 *slot = elt_p;
553 else
555 elt_p = *slot;
556 ret = elt_p->temp;
560 return ret;
563 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
565 static tree
566 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
567 bool is_formal, bool allow_ssa)
569 tree t, mod;
571 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
572 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
573 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
574 fb_rvalue);
576 if (allow_ssa
577 && gimplify_ctxp->into_ssa
578 && is_gimple_reg_type (TREE_TYPE (val)))
580 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
581 if (! gimple_in_ssa_p (cfun))
583 const char *name = get_name (val);
584 if (name)
585 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
588 else
589 t = lookup_tmp_var (val, is_formal);
591 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
593 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
595 /* gimplify_modify_expr might want to reduce this further. */
596 gimplify_and_add (mod, pre_p);
597 ggc_free (mod);
599 return t;
602 /* Return a formal temporary variable initialized with VAL. PRE_P is as
603 in gimplify_expr. Only use this function if:
605 1) The value of the unfactored expression represented by VAL will not
606 change between the initialization and use of the temporary, and
607 2) The temporary will not be otherwise modified.
609 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
610 and #2 means it is inappropriate for && temps.
612 For other cases, use get_initialized_tmp_var instead. */
614 tree
615 get_formal_tmp_var (tree val, gimple_seq *pre_p)
617 return internal_get_tmp_var (val, pre_p, NULL, true, true);
620 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
621 are as in gimplify_expr. */
623 tree
624 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
625 bool allow_ssa)
627 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
630 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
631 generate debug info for them; otherwise don't. */
633 void
634 declare_vars (tree vars, gimple *gs, bool debug_info)
636 tree last = vars;
637 if (last)
639 tree temps, block;
641 gbind *scope = as_a <gbind *> (gs);
643 temps = nreverse (last);
645 block = gimple_bind_block (scope);
646 gcc_assert (!block || TREE_CODE (block) == BLOCK);
647 if (!block || !debug_info)
649 DECL_CHAIN (last) = gimple_bind_vars (scope);
650 gimple_bind_set_vars (scope, temps);
652 else
654 /* We need to attach the nodes both to the BIND_EXPR and to its
655 associated BLOCK for debugging purposes. The key point here
656 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
657 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
658 if (BLOCK_VARS (block))
659 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
660 else
662 gimple_bind_set_vars (scope,
663 chainon (gimple_bind_vars (scope), temps));
664 BLOCK_VARS (block) = temps;
670 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
671 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
672 no such upper bound can be obtained. */
674 static void
675 force_constant_size (tree var)
677 /* The only attempt we make is by querying the maximum size of objects
678 of the variable's type. */
680 HOST_WIDE_INT max_size;
682 gcc_assert (VAR_P (var));
684 max_size = max_int_size_in_bytes (TREE_TYPE (var));
686 gcc_assert (max_size >= 0);
688 DECL_SIZE_UNIT (var)
689 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
690 DECL_SIZE (var)
691 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
694 /* Push the temporary variable TMP into the current binding. */
696 void
697 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
699 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
701 /* Later processing assumes that the object size is constant, which might
702 not be true at this point. Force the use of a constant upper bound in
703 this case. */
704 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
705 force_constant_size (tmp);
707 DECL_CONTEXT (tmp) = fn->decl;
708 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
710 record_vars_into (tmp, fn->decl);
713 /* Push the temporary variable TMP into the current binding. */
715 void
716 gimple_add_tmp_var (tree tmp)
718 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
720 /* Later processing assumes that the object size is constant, which might
721 not be true at this point. Force the use of a constant upper bound in
722 this case. */
723 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
724 force_constant_size (tmp);
726 DECL_CONTEXT (tmp) = current_function_decl;
727 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
729 if (gimplify_ctxp)
731 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
732 gimplify_ctxp->temps = tmp;
734 /* Mark temporaries local within the nearest enclosing parallel. */
735 if (gimplify_omp_ctxp)
737 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
738 while (ctx
739 && (ctx->region_type == ORT_WORKSHARE
740 || ctx->region_type == ORT_SIMD
741 || ctx->region_type == ORT_ACC))
742 ctx = ctx->outer_context;
743 if (ctx)
744 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
747 else if (cfun)
748 record_vars (tmp);
749 else
751 gimple_seq body_seq;
753 /* This case is for nested functions. We need to expose the locals
754 they create. */
755 body_seq = gimple_body (current_function_decl);
756 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
762 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
763 nodes that are referenced more than once in GENERIC functions. This is
764 necessary because gimplification (translation into GIMPLE) is performed
765 by modifying tree nodes in-place, so gimplication of a shared node in a
766 first context could generate an invalid GIMPLE form in a second context.
768 This is achieved with a simple mark/copy/unmark algorithm that walks the
769 GENERIC representation top-down, marks nodes with TREE_VISITED the first
770 time it encounters them, duplicates them if they already have TREE_VISITED
771 set, and finally removes the TREE_VISITED marks it has set.
773 The algorithm works only at the function level, i.e. it generates a GENERIC
774 representation of a function with no nodes shared within the function when
775 passed a GENERIC function (except for nodes that are allowed to be shared).
777 At the global level, it is also necessary to unshare tree nodes that are
778 referenced in more than one function, for the same aforementioned reason.
779 This requires some cooperation from the front-end. There are 2 strategies:
781 1. Manual unsharing. The front-end needs to call unshare_expr on every
782 expression that might end up being shared across functions.
784 2. Deep unsharing. This is an extension of regular unsharing. Instead
785 of calling unshare_expr on expressions that might be shared across
786 functions, the front-end pre-marks them with TREE_VISITED. This will
787 ensure that they are unshared on the first reference within functions
788 when the regular unsharing algorithm runs. The counterpart is that
789 this algorithm must look deeper than for manual unsharing, which is
790 specified by LANG_HOOKS_DEEP_UNSHARING.
792 If there are only few specific cases of node sharing across functions, it is
793 probably easier for a front-end to unshare the expressions manually. On the
794 contrary, if the expressions generated at the global level are as widespread
795 as expressions generated within functions, deep unsharing is very likely the
796 way to go. */
798 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
799 These nodes model computations that must be done once. If we were to
800 unshare something like SAVE_EXPR(i++), the gimplification process would
801 create wrong code. However, if DATA is non-null, it must hold a pointer
802 set that is used to unshare the subtrees of these nodes. */
804 static tree
805 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
807 tree t = *tp;
808 enum tree_code code = TREE_CODE (t);
810 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
811 copy their subtrees if we can make sure to do it only once. */
812 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
814 if (data && !((hash_set<tree> *)data)->add (t))
816 else
817 *walk_subtrees = 0;
820 /* Stop at types, decls, constants like copy_tree_r. */
821 else if (TREE_CODE_CLASS (code) == tcc_type
822 || TREE_CODE_CLASS (code) == tcc_declaration
823 || TREE_CODE_CLASS (code) == tcc_constant)
824 *walk_subtrees = 0;
826 /* Cope with the statement expression extension. */
827 else if (code == STATEMENT_LIST)
830 /* Leave the bulk of the work to copy_tree_r itself. */
831 else
832 copy_tree_r (tp, walk_subtrees, NULL);
834 return NULL_TREE;
837 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
838 If *TP has been visited already, then *TP is deeply copied by calling
839 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
841 static tree
842 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
844 tree t = *tp;
845 enum tree_code code = TREE_CODE (t);
847 /* Skip types, decls, and constants. But we do want to look at their
848 types and the bounds of types. Mark them as visited so we properly
849 unmark their subtrees on the unmark pass. If we've already seen them,
850 don't look down further. */
851 if (TREE_CODE_CLASS (code) == tcc_type
852 || TREE_CODE_CLASS (code) == tcc_declaration
853 || TREE_CODE_CLASS (code) == tcc_constant)
855 if (TREE_VISITED (t))
856 *walk_subtrees = 0;
857 else
858 TREE_VISITED (t) = 1;
861 /* If this node has been visited already, unshare it and don't look
862 any deeper. */
863 else if (TREE_VISITED (t))
865 walk_tree (tp, mostly_copy_tree_r, data, NULL);
866 *walk_subtrees = 0;
869 /* Otherwise, mark the node as visited and keep looking. */
870 else
871 TREE_VISITED (t) = 1;
873 return NULL_TREE;
876 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
877 copy_if_shared_r callback unmodified. */
879 static inline void
880 copy_if_shared (tree *tp, void *data)
882 walk_tree (tp, copy_if_shared_r, data, NULL);
885 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
886 any nested functions. */
888 static void
889 unshare_body (tree fndecl)
891 struct cgraph_node *cgn = cgraph_node::get (fndecl);
892 /* If the language requires deep unsharing, we need a pointer set to make
893 sure we don't repeatedly unshare subtrees of unshareable nodes. */
894 hash_set<tree> *visited
895 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
897 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
898 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
899 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
901 delete visited;
903 if (cgn)
904 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
905 unshare_body (cgn->decl);
908 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
909 Subtrees are walked until the first unvisited node is encountered. */
911 static tree
912 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
914 tree t = *tp;
916 /* If this node has been visited, unmark it and keep looking. */
917 if (TREE_VISITED (t))
918 TREE_VISITED (t) = 0;
920 /* Otherwise, don't look any deeper. */
921 else
922 *walk_subtrees = 0;
924 return NULL_TREE;
927 /* Unmark the visited trees rooted at *TP. */
929 static inline void
930 unmark_visited (tree *tp)
932 walk_tree (tp, unmark_visited_r, NULL, NULL);
935 /* Likewise, but mark all trees as not visited. */
937 static void
938 unvisit_body (tree fndecl)
940 struct cgraph_node *cgn = cgraph_node::get (fndecl);
942 unmark_visited (&DECL_SAVED_TREE (fndecl));
943 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
944 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
946 if (cgn)
947 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
948 unvisit_body (cgn->decl);
951 /* Unconditionally make an unshared copy of EXPR. This is used when using
952 stored expressions which span multiple functions, such as BINFO_VTABLE,
953 as the normal unsharing process can't tell that they're shared. */
955 tree
956 unshare_expr (tree expr)
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 return expr;
962 /* Worker for unshare_expr_without_location. */
964 static tree
965 prune_expr_location (tree *tp, int *walk_subtrees, void *)
967 if (EXPR_P (*tp))
968 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
969 else
970 *walk_subtrees = 0;
971 return NULL_TREE;
974 /* Similar to unshare_expr but also prune all expression locations
975 from EXPR. */
977 tree
978 unshare_expr_without_location (tree expr)
980 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
981 if (EXPR_P (expr))
982 walk_tree (&expr, prune_expr_location, NULL, NULL);
983 return expr;
986 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
987 contain statements and have a value. Assign its value to a temporary
988 and give it void_type_node. Return the temporary, or NULL_TREE if
989 WRAPPER was already void. */
991 tree
992 voidify_wrapper_expr (tree wrapper, tree temp)
994 tree type = TREE_TYPE (wrapper);
995 if (type && !VOID_TYPE_P (type))
997 tree *p;
999 /* Set p to point to the body of the wrapper. Loop until we find
1000 something that isn't a wrapper. */
1001 for (p = &wrapper; p && *p; )
1003 switch (TREE_CODE (*p))
1005 case BIND_EXPR:
1006 TREE_SIDE_EFFECTS (*p) = 1;
1007 TREE_TYPE (*p) = void_type_node;
1008 /* For a BIND_EXPR, the body is operand 1. */
1009 p = &BIND_EXPR_BODY (*p);
1010 break;
1012 case CLEANUP_POINT_EXPR:
1013 case TRY_FINALLY_EXPR:
1014 case TRY_CATCH_EXPR:
1015 TREE_SIDE_EFFECTS (*p) = 1;
1016 TREE_TYPE (*p) = void_type_node;
1017 p = &TREE_OPERAND (*p, 0);
1018 break;
1020 case STATEMENT_LIST:
1022 tree_stmt_iterator i = tsi_last (*p);
1023 TREE_SIDE_EFFECTS (*p) = 1;
1024 TREE_TYPE (*p) = void_type_node;
1025 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1027 break;
1029 case COMPOUND_EXPR:
1030 /* Advance to the last statement. Set all container types to
1031 void. */
1032 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1034 TREE_SIDE_EFFECTS (*p) = 1;
1035 TREE_TYPE (*p) = void_type_node;
1037 break;
1039 case TRANSACTION_EXPR:
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TRANSACTION_EXPR_BODY (*p);
1043 break;
1045 default:
1046 /* Assume that any tree upon which voidify_wrapper_expr is
1047 directly called is a wrapper, and that its body is op0. */
1048 if (p == &wrapper)
1050 TREE_SIDE_EFFECTS (*p) = 1;
1051 TREE_TYPE (*p) = void_type_node;
1052 p = &TREE_OPERAND (*p, 0);
1053 break;
1055 goto out;
1059 out:
1060 if (p == NULL || IS_EMPTY_STMT (*p))
1061 temp = NULL_TREE;
1062 else if (temp)
1064 /* The wrapper is on the RHS of an assignment that we're pushing
1065 down. */
1066 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1067 || TREE_CODE (temp) == MODIFY_EXPR);
1068 TREE_OPERAND (temp, 1) = *p;
1069 *p = temp;
1071 else
1073 temp = create_tmp_var (type, "retval");
1074 *p = build2 (INIT_EXPR, type, temp, *p);
1077 return temp;
1080 return NULL_TREE;
1083 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1084 a temporary through which they communicate. */
1086 static void
1087 build_stack_save_restore (gcall **save, gcall **restore)
1089 tree tmp_var;
1091 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1092 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1093 gimple_call_set_lhs (*save, tmp_var);
1095 *restore
1096 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1097 1, tmp_var);
1100 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1102 static tree
1103 build_asan_poison_call_expr (tree decl)
1105 /* Do not poison variables that have size equal to zero. */
1106 tree unit_size = DECL_SIZE_UNIT (decl);
1107 if (zerop (unit_size))
1108 return NULL_TREE;
1110 tree base = build_fold_addr_expr (decl);
1112 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1113 void_type_node, 3,
1114 build_int_cst (integer_type_node,
1115 ASAN_MARK_POISON),
1116 base, unit_size);
1119 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1120 on POISON flag, shadow memory of a DECL variable. The call will be
1121 put on location identified by IT iterator, where BEFORE flag drives
1122 position where the stmt will be put. */
1124 static void
1125 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1126 bool before)
1128 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1129 if (gimplify_omp_ctxp)
1130 return;
1132 tree unit_size = DECL_SIZE_UNIT (decl);
1133 tree base = build_fold_addr_expr (decl);
1135 /* Do not poison variables that have size equal to zero. */
1136 if (zerop (unit_size))
1137 return;
1139 /* It's necessary to have all stack variables aligned to ASAN granularity
1140 bytes. */
1141 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1142 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1144 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1146 gimple *g
1147 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1148 build_int_cst (integer_type_node, flags),
1149 base, unit_size);
1151 if (before)
1152 gsi_insert_before (it, g, GSI_NEW_STMT);
1153 else
1154 gsi_insert_after (it, g, GSI_NEW_STMT);
1157 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1158 either poisons or unpoisons a DECL. Created statement is appended
1159 to SEQ_P gimple sequence. */
1161 static void
1162 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1164 gimple_stmt_iterator it = gsi_last (*seq_p);
1165 bool before = false;
1167 if (gsi_end_p (it))
1168 before = true;
1170 asan_poison_variable (decl, poison, &it, before);
1173 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1175 static int
1176 sort_by_decl_uid (const void *a, const void *b)
1178 const tree *t1 = (const tree *)a;
1179 const tree *t2 = (const tree *)b;
1181 int uid1 = DECL_UID (*t1);
1182 int uid2 = DECL_UID (*t2);
1184 if (uid1 < uid2)
1185 return -1;
1186 else if (uid1 > uid2)
1187 return 1;
1188 else
1189 return 0;
1192 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1193 depending on POISON flag. Created statement is appended
1194 to SEQ_P gimple sequence. */
1196 static void
1197 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1199 unsigned c = variables->elements ();
1200 if (c == 0)
1201 return;
1203 auto_vec<tree> sorted_variables (c);
1205 for (hash_set<tree>::iterator it = variables->begin ();
1206 it != variables->end (); ++it)
1207 sorted_variables.safe_push (*it);
1209 sorted_variables.qsort (sort_by_decl_uid);
1211 unsigned i;
1212 tree var;
1213 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1215 asan_poison_variable (var, poison, seq_p);
1217 /* Add use_after_scope_memory attribute for the variable in order
1218 to prevent re-written into SSA. */
1219 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1220 DECL_ATTRIBUTES (var)))
1221 DECL_ATTRIBUTES (var)
1222 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1223 integer_one_node,
1224 DECL_ATTRIBUTES (var));
1228 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1230 static enum gimplify_status
1231 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1233 tree bind_expr = *expr_p;
1234 bool old_keep_stack = gimplify_ctxp->keep_stack;
1235 bool old_save_stack = gimplify_ctxp->save_stack;
1236 tree t;
1237 gbind *bind_stmt;
1238 gimple_seq body, cleanup;
1239 gcall *stack_save;
1240 location_t start_locus = 0, end_locus = 0;
1241 tree ret_clauses = NULL;
1243 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1245 /* Mark variables seen in this bind expr. */
1246 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1248 if (VAR_P (t))
1250 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1252 /* Mark variable as local. */
1253 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1254 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1255 || splay_tree_lookup (ctx->variables,
1256 (splay_tree_key) t) == NULL))
1258 if (ctx->region_type == ORT_SIMD
1259 && TREE_ADDRESSABLE (t)
1260 && !TREE_STATIC (t))
1261 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1262 else
1263 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1266 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1268 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1269 cfun->has_local_explicit_reg_vars = true;
1272 /* Preliminarily mark non-addressed complex variables as eligible
1273 for promotion to gimple registers. We'll transform their uses
1274 as we find them. */
1275 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1276 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1277 && !TREE_THIS_VOLATILE (t)
1278 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1279 && !needs_to_live_in_memory (t))
1280 DECL_GIMPLE_REG_P (t) = 1;
1283 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1284 BIND_EXPR_BLOCK (bind_expr));
1285 gimple_push_bind_expr (bind_stmt);
1287 gimplify_ctxp->keep_stack = false;
1288 gimplify_ctxp->save_stack = false;
1290 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1291 body = NULL;
1292 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1293 gimple_bind_set_body (bind_stmt, body);
1295 /* Source location wise, the cleanup code (stack_restore and clobbers)
1296 belongs to the end of the block, so propagate what we have. The
1297 stack_save operation belongs to the beginning of block, which we can
1298 infer from the bind_expr directly if the block has no explicit
1299 assignment. */
1300 if (BIND_EXPR_BLOCK (bind_expr))
1302 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1303 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1305 if (start_locus == 0)
1306 start_locus = EXPR_LOCATION (bind_expr);
1308 cleanup = NULL;
1309 stack_save = NULL;
1311 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1312 the stack space allocated to the VLAs. */
1313 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1315 gcall *stack_restore;
1317 /* Save stack on entry and restore it on exit. Add a try_finally
1318 block to achieve this. */
1319 build_stack_save_restore (&stack_save, &stack_restore);
1321 gimple_set_location (stack_save, start_locus);
1322 gimple_set_location (stack_restore, end_locus);
1324 gimplify_seq_add_stmt (&cleanup, stack_restore);
1327 /* Add clobbers for all variables that go out of scope. */
1328 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1330 if (VAR_P (t)
1331 && !is_global_var (t)
1332 && DECL_CONTEXT (t) == current_function_decl)
1334 if (!DECL_HARD_REGISTER (t)
1335 && !TREE_THIS_VOLATILE (t)
1336 && !DECL_HAS_VALUE_EXPR_P (t)
1337 /* Only care for variables that have to be in memory. Others
1338 will be rewritten into SSA names, hence moved to the
1339 top-level. */
1340 && !is_gimple_reg (t)
1341 && flag_stack_reuse != SR_NONE)
1343 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1344 gimple *clobber_stmt;
1345 TREE_THIS_VOLATILE (clobber) = 1;
1346 clobber_stmt = gimple_build_assign (t, clobber);
1347 gimple_set_location (clobber_stmt, end_locus);
1348 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1351 if (flag_openacc && oacc_declare_returns != NULL)
1353 tree *c = oacc_declare_returns->get (t);
1354 if (c != NULL)
1356 if (ret_clauses)
1357 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1359 ret_clauses = *c;
1361 oacc_declare_returns->remove (t);
1363 if (oacc_declare_returns->elements () == 0)
1365 delete oacc_declare_returns;
1366 oacc_declare_returns = NULL;
1372 if (asan_poisoned_variables != NULL
1373 && asan_poisoned_variables->contains (t))
1375 asan_poisoned_variables->remove (t);
1376 asan_poison_variable (t, true, &cleanup);
1379 if (gimplify_ctxp->live_switch_vars != NULL
1380 && gimplify_ctxp->live_switch_vars->contains (t))
1381 gimplify_ctxp->live_switch_vars->remove (t);
1384 if (ret_clauses)
1386 gomp_target *stmt;
1387 gimple_stmt_iterator si = gsi_start (cleanup);
1389 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1390 ret_clauses);
1391 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1394 if (cleanup)
1396 gtry *gs;
1397 gimple_seq new_body;
1399 new_body = NULL;
1400 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1401 GIMPLE_TRY_FINALLY);
1403 if (stack_save)
1404 gimplify_seq_add_stmt (&new_body, stack_save);
1405 gimplify_seq_add_stmt (&new_body, gs);
1406 gimple_bind_set_body (bind_stmt, new_body);
1409 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1410 if (!gimplify_ctxp->keep_stack)
1411 gimplify_ctxp->keep_stack = old_keep_stack;
1412 gimplify_ctxp->save_stack = old_save_stack;
1414 gimple_pop_bind_expr ();
1416 gimplify_seq_add_stmt (pre_p, bind_stmt);
1418 if (temp)
1420 *expr_p = temp;
1421 return GS_OK;
1424 *expr_p = NULL_TREE;
1425 return GS_ALL_DONE;
1428 /* Maybe add early return predict statement to PRE_P sequence. */
1430 static void
1431 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1433 /* If we are not in a conditional context, add PREDICT statement. */
1434 if (gimple_conditional_context ())
1436 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1437 NOT_TAKEN);
1438 gimplify_seq_add_stmt (pre_p, predict);
1442 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1443 GIMPLE value, it is assigned to a new temporary and the statement is
1444 re-written to return the temporary.
1446 PRE_P points to the sequence where side effects that must happen before
1447 STMT should be stored. */
1449 static enum gimplify_status
1450 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1452 greturn *ret;
1453 tree ret_expr = TREE_OPERAND (stmt, 0);
1454 tree result_decl, result;
1456 if (ret_expr == error_mark_node)
1457 return GS_ERROR;
1459 /* Implicit _Cilk_sync must be inserted right before any return statement
1460 if there is a _Cilk_spawn in the function. If the user has provided a
1461 _Cilk_sync, the optimizer should remove this duplicate one. */
1462 if (fn_contains_cilk_spawn_p (cfun))
1464 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1465 gimplify_and_add (impl_sync, pre_p);
1468 if (!ret_expr
1469 || TREE_CODE (ret_expr) == RESULT_DECL
1470 || ret_expr == error_mark_node)
1472 maybe_add_early_return_predict_stmt (pre_p);
1473 greturn *ret = gimple_build_return (ret_expr);
1474 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1475 gimplify_seq_add_stmt (pre_p, ret);
1476 return GS_ALL_DONE;
1479 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1480 result_decl = NULL_TREE;
1481 else
1483 result_decl = TREE_OPERAND (ret_expr, 0);
1485 /* See through a return by reference. */
1486 if (TREE_CODE (result_decl) == INDIRECT_REF)
1487 result_decl = TREE_OPERAND (result_decl, 0);
1489 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1490 || TREE_CODE (ret_expr) == INIT_EXPR)
1491 && TREE_CODE (result_decl) == RESULT_DECL);
1494 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1495 Recall that aggregate_value_p is FALSE for any aggregate type that is
1496 returned in registers. If we're returning values in registers, then
1497 we don't want to extend the lifetime of the RESULT_DECL, particularly
1498 across another call. In addition, for those aggregates for which
1499 hard_function_value generates a PARALLEL, we'll die during normal
1500 expansion of structure assignments; there's special code in expand_return
1501 to handle this case that does not exist in expand_expr. */
1502 if (!result_decl)
1503 result = NULL_TREE;
1504 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1506 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1508 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1509 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1510 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1511 should be effectively allocated by the caller, i.e. all calls to
1512 this function must be subject to the Return Slot Optimization. */
1513 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1514 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1516 result = result_decl;
1518 else if (gimplify_ctxp->return_temp)
1519 result = gimplify_ctxp->return_temp;
1520 else
1522 result = create_tmp_reg (TREE_TYPE (result_decl));
1524 /* ??? With complex control flow (usually involving abnormal edges),
1525 we can wind up warning about an uninitialized value for this. Due
1526 to how this variable is constructed and initialized, this is never
1527 true. Give up and never warn. */
1528 TREE_NO_WARNING (result) = 1;
1530 gimplify_ctxp->return_temp = result;
1533 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1534 Then gimplify the whole thing. */
1535 if (result != result_decl)
1536 TREE_OPERAND (ret_expr, 0) = result;
1538 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1540 maybe_add_early_return_predict_stmt (pre_p);
1541 ret = gimple_build_return (result);
1542 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1543 gimplify_seq_add_stmt (pre_p, ret);
1545 return GS_ALL_DONE;
1548 /* Gimplify a variable-length array DECL. */
1550 static void
1551 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1553 /* This is a variable-sized decl. Simplify its size and mark it
1554 for deferred expansion. */
1555 tree t, addr, ptr_type;
1557 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1558 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1560 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1561 if (DECL_HAS_VALUE_EXPR_P (decl))
1562 return;
1564 /* All occurrences of this decl in final gimplified code will be
1565 replaced by indirection. Setting DECL_VALUE_EXPR does two
1566 things: First, it lets the rest of the gimplifier know what
1567 replacement to use. Second, it lets the debug info know
1568 where to find the value. */
1569 ptr_type = build_pointer_type (TREE_TYPE (decl));
1570 addr = create_tmp_var (ptr_type, get_name (decl));
1571 DECL_IGNORED_P (addr) = 0;
1572 t = build_fold_indirect_ref (addr);
1573 TREE_THIS_NOTRAP (t) = 1;
1574 SET_DECL_VALUE_EXPR (decl, t);
1575 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1577 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1578 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1579 size_int (DECL_ALIGN (decl)));
1580 /* The call has been built for a variable-sized object. */
1581 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1582 t = fold_convert (ptr_type, t);
1583 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1585 gimplify_and_add (t, seq_p);
1588 /* A helper function to be called via walk_tree. Mark all labels under *TP
1589 as being forced. To be called for DECL_INITIAL of static variables. */
1591 static tree
1592 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1594 if (TYPE_P (*tp))
1595 *walk_subtrees = 0;
1596 if (TREE_CODE (*tp) == LABEL_DECL)
1598 FORCED_LABEL (*tp) = 1;
1599 cfun->has_forced_label_in_static = 1;
1602 return NULL_TREE;
1605 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1606 and initialization explicit. */
1608 static enum gimplify_status
1609 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1611 tree stmt = *stmt_p;
1612 tree decl = DECL_EXPR_DECL (stmt);
1614 *stmt_p = NULL_TREE;
1616 if (TREE_TYPE (decl) == error_mark_node)
1617 return GS_ERROR;
1619 if ((TREE_CODE (decl) == TYPE_DECL
1620 || VAR_P (decl))
1621 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1623 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1624 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1625 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1628 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1629 in case its size expressions contain problematic nodes like CALL_EXPR. */
1630 if (TREE_CODE (decl) == TYPE_DECL
1631 && DECL_ORIGINAL_TYPE (decl)
1632 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1634 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1635 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1636 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1639 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1641 tree init = DECL_INITIAL (decl);
1642 bool is_vla = false;
1644 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1645 || (!TREE_STATIC (decl)
1646 && flag_stack_check == GENERIC_STACK_CHECK
1647 && compare_tree_int (DECL_SIZE_UNIT (decl),
1648 STACK_CHECK_MAX_VAR_SIZE) > 0))
1650 gimplify_vla_decl (decl, seq_p);
1651 is_vla = true;
1654 if (asan_poisoned_variables
1655 && !is_vla
1656 && TREE_ADDRESSABLE (decl)
1657 && !TREE_STATIC (decl)
1658 && !DECL_HAS_VALUE_EXPR_P (decl)
1659 && dbg_cnt (asan_use_after_scope))
1661 asan_poisoned_variables->add (decl);
1662 asan_poison_variable (decl, false, seq_p);
1663 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1664 gimplify_ctxp->live_switch_vars->add (decl);
1667 /* Some front ends do not explicitly declare all anonymous
1668 artificial variables. We compensate here by declaring the
1669 variables, though it would be better if the front ends would
1670 explicitly declare them. */
1671 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1672 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1673 gimple_add_tmp_var (decl);
1675 if (init && init != error_mark_node)
1677 if (!TREE_STATIC (decl))
1679 DECL_INITIAL (decl) = NULL_TREE;
1680 init = build2 (INIT_EXPR, void_type_node, decl, init);
1681 gimplify_and_add (init, seq_p);
1682 ggc_free (init);
1684 else
1685 /* We must still examine initializers for static variables
1686 as they may contain a label address. */
1687 walk_tree (&init, force_labels_r, NULL, NULL);
1691 return GS_ALL_DONE;
1694 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1695 and replacing the LOOP_EXPR with goto, but if the loop contains an
1696 EXIT_EXPR, we need to append a label for it to jump to. */
1698 static enum gimplify_status
1699 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1701 tree saved_label = gimplify_ctxp->exit_label;
1702 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1704 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1706 gimplify_ctxp->exit_label = NULL_TREE;
1708 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1710 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1712 if (gimplify_ctxp->exit_label)
1713 gimplify_seq_add_stmt (pre_p,
1714 gimple_build_label (gimplify_ctxp->exit_label));
1716 gimplify_ctxp->exit_label = saved_label;
1718 *expr_p = NULL;
1719 return GS_ALL_DONE;
1722 /* Gimplify a statement list onto a sequence. These may be created either
1723 by an enlightened front-end, or by shortcut_cond_expr. */
1725 static enum gimplify_status
1726 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1728 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1730 tree_stmt_iterator i = tsi_start (*expr_p);
1732 while (!tsi_end_p (i))
1734 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1735 tsi_delink (&i);
1738 if (temp)
1740 *expr_p = temp;
1741 return GS_OK;
1744 return GS_ALL_DONE;
1747 /* Callback for walk_gimple_seq. */
1749 static tree
1750 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1751 struct walk_stmt_info *wi)
1753 gimple *stmt = gsi_stmt (*gsi_p);
1755 *handled_ops_p = true;
1756 switch (gimple_code (stmt))
1758 case GIMPLE_TRY:
1759 /* A compiler-generated cleanup or a user-written try block.
1760 If it's empty, don't dive into it--that would result in
1761 worse location info. */
1762 if (gimple_try_eval (stmt) == NULL)
1764 wi->info = stmt;
1765 return integer_zero_node;
1767 /* Fall through. */
1768 case GIMPLE_BIND:
1769 case GIMPLE_CATCH:
1770 case GIMPLE_EH_FILTER:
1771 case GIMPLE_TRANSACTION:
1772 /* Walk the sub-statements. */
1773 *handled_ops_p = false;
1774 break;
1775 case GIMPLE_CALL:
1776 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1778 *handled_ops_p = false;
1779 break;
1781 /* Fall through. */
1782 default:
1783 /* Save the first "real" statement (not a decl/lexical scope/...). */
1784 wi->info = stmt;
1785 return integer_zero_node;
1787 return NULL_TREE;
1790 /* Possibly warn about unreachable statements between switch's controlling
1791 expression and the first case. SEQ is the body of a switch expression. */
1793 static void
1794 maybe_warn_switch_unreachable (gimple_seq seq)
1796 if (!warn_switch_unreachable
1797 /* This warning doesn't play well with Fortran when optimizations
1798 are on. */
1799 || lang_GNU_Fortran ()
1800 || seq == NULL)
1801 return;
1803 struct walk_stmt_info wi;
1804 memset (&wi, 0, sizeof (wi));
1805 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1806 gimple *stmt = (gimple *) wi.info;
1808 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1810 if (gimple_code (stmt) == GIMPLE_GOTO
1811 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1812 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1813 /* Don't warn for compiler-generated gotos. These occur
1814 in Duff's devices, for example. */;
1815 else
1816 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1817 "statement will never be executed");
1822 /* A label entry that pairs label and a location. */
1823 struct label_entry
1825 tree label;
1826 location_t loc;
1829 /* Find LABEL in vector of label entries VEC. */
1831 static struct label_entry *
1832 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1834 unsigned int i;
1835 struct label_entry *l;
1837 FOR_EACH_VEC_ELT (*vec, i, l)
1838 if (l->label == label)
1839 return l;
1840 return NULL;
1843 /* Return true if LABEL, a LABEL_DECL, represents a case label
1844 in a vector of labels CASES. */
1846 static bool
1847 case_label_p (const vec<tree> *cases, tree label)
1849 unsigned int i;
1850 tree l;
1852 FOR_EACH_VEC_ELT (*cases, i, l)
1853 if (CASE_LABEL (l) == label)
1854 return true;
1855 return false;
1858 /* Find the last statement in a scope STMT. */
1860 static gimple *
1861 last_stmt_in_scope (gimple *stmt)
1863 if (!stmt)
1864 return NULL;
1866 switch (gimple_code (stmt))
1868 case GIMPLE_BIND:
1870 gbind *bind = as_a <gbind *> (stmt);
1871 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1872 return last_stmt_in_scope (stmt);
1875 case GIMPLE_TRY:
1877 gtry *try_stmt = as_a <gtry *> (stmt);
1878 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1879 gimple *last_eval = last_stmt_in_scope (stmt);
1880 if (gimple_stmt_may_fallthru (last_eval)
1881 && (last_eval == NULL
1882 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1883 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1885 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1886 return last_stmt_in_scope (stmt);
1888 else
1889 return last_eval;
1892 default:
1893 return stmt;
1897 /* Collect interesting labels in LABELS and return the statement preceding
1898 another case label, or a user-defined label. */
1900 static gimple *
1901 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1902 auto_vec <struct label_entry> *labels)
1904 gimple *prev = NULL;
1908 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1909 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1911 /* Nested scope. Only look at the last statement of
1912 the innermost scope. */
1913 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1914 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1915 if (last)
1917 prev = last;
1918 /* It might be a label without a location. Use the
1919 location of the scope then. */
1920 if (!gimple_has_location (prev))
1921 gimple_set_location (prev, bind_loc);
1923 gsi_next (gsi_p);
1924 continue;
1927 /* Ifs are tricky. */
1928 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1930 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1931 tree false_lab = gimple_cond_false_label (cond_stmt);
1932 location_t if_loc = gimple_location (cond_stmt);
1934 /* If we have e.g.
1935 if (i > 1) goto <D.2259>; else goto D;
1936 we can't do much with the else-branch. */
1937 if (!DECL_ARTIFICIAL (false_lab))
1938 break;
1940 /* Go on until the false label, then one step back. */
1941 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1943 gimple *stmt = gsi_stmt (*gsi_p);
1944 if (gimple_code (stmt) == GIMPLE_LABEL
1945 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1946 break;
1949 /* Not found? Oops. */
1950 if (gsi_end_p (*gsi_p))
1951 break;
1953 struct label_entry l = { false_lab, if_loc };
1954 labels->safe_push (l);
1956 /* Go to the last statement of the then branch. */
1957 gsi_prev (gsi_p);
1959 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1960 <D.1759>:
1961 <stmt>;
1962 goto <D.1761>;
1963 <D.1760>:
1965 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1966 && !gimple_has_location (gsi_stmt (*gsi_p)))
1968 /* Look at the statement before, it might be
1969 attribute fallthrough, in which case don't warn. */
1970 gsi_prev (gsi_p);
1971 bool fallthru_before_dest
1972 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1973 gsi_next (gsi_p);
1974 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1975 if (!fallthru_before_dest)
1977 struct label_entry l = { goto_dest, if_loc };
1978 labels->safe_push (l);
1981 /* And move back. */
1982 gsi_next (gsi_p);
1985 /* Remember the last statement. Skip labels that are of no interest
1986 to us. */
1987 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1989 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1990 if (find_label_entry (labels, label))
1991 prev = gsi_stmt (*gsi_p);
1993 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1995 else
1996 prev = gsi_stmt (*gsi_p);
1997 gsi_next (gsi_p);
1999 while (!gsi_end_p (*gsi_p)
2000 /* Stop if we find a case or a user-defined label. */
2001 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2002 || !gimple_has_location (gsi_stmt (*gsi_p))));
2004 return prev;
2007 /* Return true if the switch fallthough warning should occur. LABEL is
2008 the label statement that we're falling through to. */
2010 static bool
2011 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2013 gimple_stmt_iterator gsi = *gsi_p;
2015 /* Don't warn if the label is marked with a "falls through" comment. */
2016 if (FALLTHROUGH_LABEL_P (label))
2017 return false;
2019 /* Don't warn for non-case labels followed by a statement:
2020 case 0:
2021 foo ();
2022 label:
2023 bar ();
2024 as these are likely intentional. */
2025 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2027 tree l;
2028 while (!gsi_end_p (gsi)
2029 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2030 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2031 && !case_label_p (&gimplify_ctxp->case_labels, l))
2032 gsi_next (&gsi);
2033 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2034 return false;
2037 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2038 immediately breaks. */
2039 gsi = *gsi_p;
2041 /* Skip all immediately following labels. */
2042 while (!gsi_end_p (gsi)
2043 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2044 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2045 gsi_next (&gsi);
2047 /* { ... something; default:; } */
2048 if (gsi_end_p (gsi)
2049 /* { ... something; default: break; } or
2050 { ... something; default: goto L; } */
2051 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2052 /* { ... something; default: return; } */
2053 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2054 return false;
2056 return true;
2059 /* Callback for walk_gimple_seq. */
2061 static tree
2062 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2063 struct walk_stmt_info *)
2065 gimple *stmt = gsi_stmt (*gsi_p);
2067 *handled_ops_p = true;
2068 switch (gimple_code (stmt))
2070 case GIMPLE_TRY:
2071 case GIMPLE_BIND:
2072 case GIMPLE_CATCH:
2073 case GIMPLE_EH_FILTER:
2074 case GIMPLE_TRANSACTION:
2075 /* Walk the sub-statements. */
2076 *handled_ops_p = false;
2077 break;
2079 /* Find a sequence of form:
2081 GIMPLE_LABEL
2082 [...]
2083 <may fallthru stmt>
2084 GIMPLE_LABEL
2086 and possibly warn. */
2087 case GIMPLE_LABEL:
2089 /* Found a label. Skip all immediately following labels. */
2090 while (!gsi_end_p (*gsi_p)
2091 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2092 gsi_next (gsi_p);
2094 /* There might be no more statements. */
2095 if (gsi_end_p (*gsi_p))
2096 return integer_zero_node;
2098 /* Vector of labels that fall through. */
2099 auto_vec <struct label_entry> labels;
2100 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2102 /* There might be no more statements. */
2103 if (gsi_end_p (*gsi_p))
2104 return integer_zero_node;
2106 gimple *next = gsi_stmt (*gsi_p);
2107 tree label;
2108 /* If what follows is a label, then we may have a fallthrough. */
2109 if (gimple_code (next) == GIMPLE_LABEL
2110 && gimple_has_location (next)
2111 && (label = gimple_label_label (as_a <glabel *> (next)))
2112 && prev != NULL)
2114 struct label_entry *l;
2115 bool warned_p = false;
2116 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2117 /* Quiet. */;
2118 else if (gimple_code (prev) == GIMPLE_LABEL
2119 && (label = gimple_label_label (as_a <glabel *> (prev)))
2120 && (l = find_label_entry (&labels, label)))
2121 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2122 "this statement may fall through");
2123 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2124 /* Try to be clever and don't warn when the statement
2125 can't actually fall through. */
2126 && gimple_stmt_may_fallthru (prev)
2127 && gimple_has_location (prev))
2128 warned_p = warning_at (gimple_location (prev),
2129 OPT_Wimplicit_fallthrough_,
2130 "this statement may fall through");
2131 if (warned_p)
2132 inform (gimple_location (next), "here");
2134 /* Mark this label as processed so as to prevent multiple
2135 warnings in nested switches. */
2136 FALLTHROUGH_LABEL_P (label) = true;
2138 /* So that next warn_implicit_fallthrough_r will start looking for
2139 a new sequence starting with this label. */
2140 gsi_prev (gsi_p);
2143 break;
2144 default:
2145 break;
2147 return NULL_TREE;
2150 /* Warn when a switch case falls through. */
2152 static void
2153 maybe_warn_implicit_fallthrough (gimple_seq seq)
2155 if (!warn_implicit_fallthrough)
2156 return;
2158 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2159 if (!(lang_GNU_C ()
2160 || lang_GNU_CXX ()
2161 || lang_GNU_OBJC ()))
2162 return;
2164 struct walk_stmt_info wi;
2165 memset (&wi, 0, sizeof (wi));
2166 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2169 /* Callback for walk_gimple_seq. */
2171 static tree
2172 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2173 struct walk_stmt_info *)
2175 gimple *stmt = gsi_stmt (*gsi_p);
2177 *handled_ops_p = true;
2178 switch (gimple_code (stmt))
2180 case GIMPLE_TRY:
2181 case GIMPLE_BIND:
2182 case GIMPLE_CATCH:
2183 case GIMPLE_EH_FILTER:
2184 case GIMPLE_TRANSACTION:
2185 /* Walk the sub-statements. */
2186 *handled_ops_p = false;
2187 break;
2188 case GIMPLE_CALL:
2189 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2191 gsi_remove (gsi_p, true);
2192 if (gsi_end_p (*gsi_p))
2193 return integer_zero_node;
2195 bool found = false;
2196 location_t loc = gimple_location (stmt);
2198 gimple_stmt_iterator gsi2 = *gsi_p;
2199 stmt = gsi_stmt (gsi2);
2200 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2202 /* Go on until the artificial label. */
2203 tree goto_dest = gimple_goto_dest (stmt);
2204 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2206 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2207 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2208 == goto_dest)
2209 break;
2212 /* Not found? Stop. */
2213 if (gsi_end_p (gsi2))
2214 break;
2216 /* Look one past it. */
2217 gsi_next (&gsi2);
2220 /* We're looking for a case label or default label here. */
2221 while (!gsi_end_p (gsi2))
2223 stmt = gsi_stmt (gsi2);
2224 if (gimple_code (stmt) == GIMPLE_LABEL)
2226 tree label = gimple_label_label (as_a <glabel *> (stmt));
2227 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2229 found = true;
2230 break;
2233 else
2234 /* Something other than a label. That's not expected. */
2235 break;
2236 gsi_next (&gsi2);
2238 if (!found)
2239 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2240 "a case label or default label");
2242 break;
2243 default:
2244 break;
2246 return NULL_TREE;
2249 /* Expand all FALLTHROUGH () calls in SEQ. */
2251 static void
2252 expand_FALLTHROUGH (gimple_seq *seq_p)
2254 struct walk_stmt_info wi;
2255 memset (&wi, 0, sizeof (wi));
2256 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2260 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2261 branch to. */
2263 static enum gimplify_status
2264 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2266 tree switch_expr = *expr_p;
2267 gimple_seq switch_body_seq = NULL;
2268 enum gimplify_status ret;
2269 tree index_type = TREE_TYPE (switch_expr);
2270 if (index_type == NULL_TREE)
2271 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2273 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2274 fb_rvalue);
2275 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2276 return ret;
2278 if (SWITCH_BODY (switch_expr))
2280 vec<tree> labels;
2281 vec<tree> saved_labels;
2282 hash_set<tree> *saved_live_switch_vars = NULL;
2283 tree default_case = NULL_TREE;
2284 gswitch *switch_stmt;
2286 /* If someone can be bothered to fill in the labels, they can
2287 be bothered to null out the body too. */
2288 gcc_assert (!SWITCH_LABELS (switch_expr));
2290 /* Save old labels, get new ones from body, then restore the old
2291 labels. Save all the things from the switch body to append after. */
2292 saved_labels = gimplify_ctxp->case_labels;
2293 gimplify_ctxp->case_labels.create (8);
2295 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2296 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2297 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2298 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2299 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2300 else
2301 gimplify_ctxp->live_switch_vars = NULL;
2303 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2304 gimplify_ctxp->in_switch_expr = true;
2306 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2308 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2309 maybe_warn_switch_unreachable (switch_body_seq);
2310 maybe_warn_implicit_fallthrough (switch_body_seq);
2311 /* Only do this for the outermost GIMPLE_SWITCH. */
2312 if (!gimplify_ctxp->in_switch_expr)
2313 expand_FALLTHROUGH (&switch_body_seq);
2315 labels = gimplify_ctxp->case_labels;
2316 gimplify_ctxp->case_labels = saved_labels;
2318 if (gimplify_ctxp->live_switch_vars)
2320 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2321 delete gimplify_ctxp->live_switch_vars;
2323 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2325 preprocess_case_label_vec_for_gimple (labels, index_type,
2326 &default_case);
2328 if (!default_case)
2330 glabel *new_default;
2332 default_case
2333 = build_case_label (NULL_TREE, NULL_TREE,
2334 create_artificial_label (UNKNOWN_LOCATION));
2335 new_default = gimple_build_label (CASE_LABEL (default_case));
2336 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2339 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2340 default_case, labels);
2341 gimplify_seq_add_stmt (pre_p, switch_stmt);
2342 gimplify_seq_add_seq (pre_p, switch_body_seq);
2343 labels.release ();
2345 else
2346 gcc_assert (SWITCH_LABELS (switch_expr));
2348 return GS_ALL_DONE;
2351 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2353 static enum gimplify_status
2354 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2356 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2357 == current_function_decl);
2359 tree label = LABEL_EXPR_LABEL (*expr_p);
2360 glabel *label_stmt = gimple_build_label (label);
2361 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2362 gimplify_seq_add_stmt (pre_p, label_stmt);
2364 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2365 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2366 NOT_TAKEN));
2367 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2368 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2369 TAKEN));
2371 return GS_ALL_DONE;
2374 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2376 static enum gimplify_status
2377 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2379 struct gimplify_ctx *ctxp;
2380 glabel *label_stmt;
2382 /* Invalid programs can play Duff's Device type games with, for example,
2383 #pragma omp parallel. At least in the C front end, we don't
2384 detect such invalid branches until after gimplification, in the
2385 diagnose_omp_blocks pass. */
2386 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2387 if (ctxp->case_labels.exists ())
2388 break;
2390 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2391 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2392 ctxp->case_labels.safe_push (*expr_p);
2393 gimplify_seq_add_stmt (pre_p, label_stmt);
2395 return GS_ALL_DONE;
2398 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2399 if necessary. */
2401 tree
2402 build_and_jump (tree *label_p)
2404 if (label_p == NULL)
2405 /* If there's nowhere to jump, just fall through. */
2406 return NULL_TREE;
2408 if (*label_p == NULL_TREE)
2410 tree label = create_artificial_label (UNKNOWN_LOCATION);
2411 *label_p = label;
2414 return build1 (GOTO_EXPR, void_type_node, *label_p);
2417 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2418 This also involves building a label to jump to and communicating it to
2419 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2421 static enum gimplify_status
2422 gimplify_exit_expr (tree *expr_p)
2424 tree cond = TREE_OPERAND (*expr_p, 0);
2425 tree expr;
2427 expr = build_and_jump (&gimplify_ctxp->exit_label);
2428 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2429 *expr_p = expr;
2431 return GS_OK;
2434 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2435 different from its canonical type, wrap the whole thing inside a
2436 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2437 type.
2439 The canonical type of a COMPONENT_REF is the type of the field being
2440 referenced--unless the field is a bit-field which can be read directly
2441 in a smaller mode, in which case the canonical type is the
2442 sign-appropriate type corresponding to that mode. */
2444 static void
2445 canonicalize_component_ref (tree *expr_p)
2447 tree expr = *expr_p;
2448 tree type;
2450 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2452 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2453 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2454 else
2455 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2457 /* One could argue that all the stuff below is not necessary for
2458 the non-bitfield case and declare it a FE error if type
2459 adjustment would be needed. */
2460 if (TREE_TYPE (expr) != type)
2462 #ifdef ENABLE_TYPES_CHECKING
2463 tree old_type = TREE_TYPE (expr);
2464 #endif
2465 int type_quals;
2467 /* We need to preserve qualifiers and propagate them from
2468 operand 0. */
2469 type_quals = TYPE_QUALS (type)
2470 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2471 if (TYPE_QUALS (type) != type_quals)
2472 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2474 /* Set the type of the COMPONENT_REF to the underlying type. */
2475 TREE_TYPE (expr) = type;
2477 #ifdef ENABLE_TYPES_CHECKING
2478 /* It is now a FE error, if the conversion from the canonical
2479 type to the original expression type is not useless. */
2480 gcc_assert (useless_type_conversion_p (old_type, type));
2481 #endif
2485 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2486 to foo, embed that change in the ADDR_EXPR by converting
2487 T array[U];
2488 (T *)&array
2490 &array[L]
2491 where L is the lower bound. For simplicity, only do this for constant
2492 lower bound.
2493 The constraint is that the type of &array[L] is trivially convertible
2494 to T *. */
2496 static void
2497 canonicalize_addr_expr (tree *expr_p)
2499 tree expr = *expr_p;
2500 tree addr_expr = TREE_OPERAND (expr, 0);
2501 tree datype, ddatype, pddatype;
2503 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2504 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2505 || TREE_CODE (addr_expr) != ADDR_EXPR)
2506 return;
2508 /* The addr_expr type should be a pointer to an array. */
2509 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2510 if (TREE_CODE (datype) != ARRAY_TYPE)
2511 return;
2513 /* The pointer to element type shall be trivially convertible to
2514 the expression pointer type. */
2515 ddatype = TREE_TYPE (datype);
2516 pddatype = build_pointer_type (ddatype);
2517 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2518 pddatype))
2519 return;
2521 /* The lower bound and element sizes must be constant. */
2522 if (!TYPE_SIZE_UNIT (ddatype)
2523 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2524 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2525 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2526 return;
2528 /* All checks succeeded. Build a new node to merge the cast. */
2529 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2530 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2531 NULL_TREE, NULL_TREE);
2532 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2534 /* We can have stripped a required restrict qualifier above. */
2535 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2536 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2539 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2540 underneath as appropriate. */
2542 static enum gimplify_status
2543 gimplify_conversion (tree *expr_p)
2545 location_t loc = EXPR_LOCATION (*expr_p);
2546 gcc_assert (CONVERT_EXPR_P (*expr_p));
2548 /* Then strip away all but the outermost conversion. */
2549 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2551 /* And remove the outermost conversion if it's useless. */
2552 if (tree_ssa_useless_type_conversion (*expr_p))
2553 *expr_p = TREE_OPERAND (*expr_p, 0);
2555 /* If we still have a conversion at the toplevel,
2556 then canonicalize some constructs. */
2557 if (CONVERT_EXPR_P (*expr_p))
2559 tree sub = TREE_OPERAND (*expr_p, 0);
2561 /* If a NOP conversion is changing the type of a COMPONENT_REF
2562 expression, then canonicalize its type now in order to expose more
2563 redundant conversions. */
2564 if (TREE_CODE (sub) == COMPONENT_REF)
2565 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2567 /* If a NOP conversion is changing a pointer to array of foo
2568 to a pointer to foo, embed that change in the ADDR_EXPR. */
2569 else if (TREE_CODE (sub) == ADDR_EXPR)
2570 canonicalize_addr_expr (expr_p);
2573 /* If we have a conversion to a non-register type force the
2574 use of a VIEW_CONVERT_EXPR instead. */
2575 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2576 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2577 TREE_OPERAND (*expr_p, 0));
2579 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2580 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2581 TREE_SET_CODE (*expr_p, NOP_EXPR);
2583 return GS_OK;
2586 /* Nonlocal VLAs seen in the current function. */
2587 static hash_set<tree> *nonlocal_vlas;
2589 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2590 static tree nonlocal_vla_vars;
2592 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2593 DECL_VALUE_EXPR, and it's worth re-examining things. */
2595 static enum gimplify_status
2596 gimplify_var_or_parm_decl (tree *expr_p)
2598 tree decl = *expr_p;
2600 /* ??? If this is a local variable, and it has not been seen in any
2601 outer BIND_EXPR, then it's probably the result of a duplicate
2602 declaration, for which we've already issued an error. It would
2603 be really nice if the front end wouldn't leak these at all.
2604 Currently the only known culprit is C++ destructors, as seen
2605 in g++.old-deja/g++.jason/binding.C. */
2606 if (VAR_P (decl)
2607 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2608 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2609 && decl_function_context (decl) == current_function_decl)
2611 gcc_assert (seen_error ());
2612 return GS_ERROR;
2615 /* When within an OMP context, notice uses of variables. */
2616 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2617 return GS_ALL_DONE;
2619 /* If the decl is an alias for another expression, substitute it now. */
2620 if (DECL_HAS_VALUE_EXPR_P (decl))
2622 tree value_expr = DECL_VALUE_EXPR (decl);
2624 /* For referenced nonlocal VLAs add a decl for debugging purposes
2625 to the current function. */
2626 if (VAR_P (decl)
2627 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2628 && nonlocal_vlas != NULL
2629 && TREE_CODE (value_expr) == INDIRECT_REF
2630 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2631 && decl_function_context (decl) != current_function_decl)
2633 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2634 while (ctx
2635 && (ctx->region_type == ORT_WORKSHARE
2636 || ctx->region_type == ORT_SIMD
2637 || ctx->region_type == ORT_ACC))
2638 ctx = ctx->outer_context;
2639 if (!ctx && !nonlocal_vlas->add (decl))
2641 tree copy = copy_node (decl);
2643 lang_hooks.dup_lang_specific_decl (copy);
2644 SET_DECL_RTL (copy, 0);
2645 TREE_USED (copy) = 1;
2646 DECL_CHAIN (copy) = nonlocal_vla_vars;
2647 nonlocal_vla_vars = copy;
2648 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2649 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2653 *expr_p = unshare_expr (value_expr);
2654 return GS_OK;
2657 return GS_ALL_DONE;
2660 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2662 static void
2663 recalculate_side_effects (tree t)
2665 enum tree_code code = TREE_CODE (t);
2666 int len = TREE_OPERAND_LENGTH (t);
2667 int i;
2669 switch (TREE_CODE_CLASS (code))
2671 case tcc_expression:
2672 switch (code)
2674 case INIT_EXPR:
2675 case MODIFY_EXPR:
2676 case VA_ARG_EXPR:
2677 case PREDECREMENT_EXPR:
2678 case PREINCREMENT_EXPR:
2679 case POSTDECREMENT_EXPR:
2680 case POSTINCREMENT_EXPR:
2681 /* All of these have side-effects, no matter what their
2682 operands are. */
2683 return;
2685 default:
2686 break;
2688 /* Fall through. */
2690 case tcc_comparison: /* a comparison expression */
2691 case tcc_unary: /* a unary arithmetic expression */
2692 case tcc_binary: /* a binary arithmetic expression */
2693 case tcc_reference: /* a reference */
2694 case tcc_vl_exp: /* a function call */
2695 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2696 for (i = 0; i < len; ++i)
2698 tree op = TREE_OPERAND (t, i);
2699 if (op && TREE_SIDE_EFFECTS (op))
2700 TREE_SIDE_EFFECTS (t) = 1;
2702 break;
2704 case tcc_constant:
2705 /* No side-effects. */
2706 return;
2708 default:
2709 gcc_unreachable ();
2713 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2714 node *EXPR_P.
2716 compound_lval
2717 : min_lval '[' val ']'
2718 | min_lval '.' ID
2719 | compound_lval '[' val ']'
2720 | compound_lval '.' ID
2722 This is not part of the original SIMPLE definition, which separates
2723 array and member references, but it seems reasonable to handle them
2724 together. Also, this way we don't run into problems with union
2725 aliasing; gcc requires that for accesses through a union to alias, the
2726 union reference must be explicit, which was not always the case when we
2727 were splitting up array and member refs.
2729 PRE_P points to the sequence where side effects that must happen before
2730 *EXPR_P should be stored.
2732 POST_P points to the sequence where side effects that must happen after
2733 *EXPR_P should be stored. */
2735 static enum gimplify_status
2736 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2737 fallback_t fallback)
2739 tree *p;
2740 enum gimplify_status ret = GS_ALL_DONE, tret;
2741 int i;
2742 location_t loc = EXPR_LOCATION (*expr_p);
2743 tree expr = *expr_p;
2745 /* Create a stack of the subexpressions so later we can walk them in
2746 order from inner to outer. */
2747 auto_vec<tree, 10> expr_stack;
2749 /* We can handle anything that get_inner_reference can deal with. */
2750 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2752 restart:
2753 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2754 if (TREE_CODE (*p) == INDIRECT_REF)
2755 *p = fold_indirect_ref_loc (loc, *p);
2757 if (handled_component_p (*p))
2759 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2760 additional COMPONENT_REFs. */
2761 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2762 && gimplify_var_or_parm_decl (p) == GS_OK)
2763 goto restart;
2764 else
2765 break;
2767 expr_stack.safe_push (*p);
2770 gcc_assert (expr_stack.length ());
2772 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2773 walked through and P points to the innermost expression.
2775 Java requires that we elaborated nodes in source order. That
2776 means we must gimplify the inner expression followed by each of
2777 the indices, in order. But we can't gimplify the inner
2778 expression until we deal with any variable bounds, sizes, or
2779 positions in order to deal with PLACEHOLDER_EXPRs.
2781 So we do this in three steps. First we deal with the annotations
2782 for any variables in the components, then we gimplify the base,
2783 then we gimplify any indices, from left to right. */
2784 for (i = expr_stack.length () - 1; i >= 0; i--)
2786 tree t = expr_stack[i];
2788 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2790 /* Gimplify the low bound and element type size and put them into
2791 the ARRAY_REF. If these values are set, they have already been
2792 gimplified. */
2793 if (TREE_OPERAND (t, 2) == NULL_TREE)
2795 tree low = unshare_expr (array_ref_low_bound (t));
2796 if (!is_gimple_min_invariant (low))
2798 TREE_OPERAND (t, 2) = low;
2799 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2800 post_p, is_gimple_reg,
2801 fb_rvalue);
2802 ret = MIN (ret, tret);
2805 else
2807 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2808 is_gimple_reg, fb_rvalue);
2809 ret = MIN (ret, tret);
2812 if (TREE_OPERAND (t, 3) == NULL_TREE)
2814 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2815 tree elmt_size = unshare_expr (array_ref_element_size (t));
2816 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2818 /* Divide the element size by the alignment of the element
2819 type (above). */
2820 elmt_size
2821 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2823 if (!is_gimple_min_invariant (elmt_size))
2825 TREE_OPERAND (t, 3) = elmt_size;
2826 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2827 post_p, is_gimple_reg,
2828 fb_rvalue);
2829 ret = MIN (ret, tret);
2832 else
2834 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2835 is_gimple_reg, fb_rvalue);
2836 ret = MIN (ret, tret);
2839 else if (TREE_CODE (t) == COMPONENT_REF)
2841 /* Set the field offset into T and gimplify it. */
2842 if (TREE_OPERAND (t, 2) == NULL_TREE)
2844 tree offset = unshare_expr (component_ref_field_offset (t));
2845 tree field = TREE_OPERAND (t, 1);
2846 tree factor
2847 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2849 /* Divide the offset by its alignment. */
2850 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2852 if (!is_gimple_min_invariant (offset))
2854 TREE_OPERAND (t, 2) = offset;
2855 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2856 post_p, is_gimple_reg,
2857 fb_rvalue);
2858 ret = MIN (ret, tret);
2861 else
2863 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2864 is_gimple_reg, fb_rvalue);
2865 ret = MIN (ret, tret);
2870 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2871 so as to match the min_lval predicate. Failure to do so may result
2872 in the creation of large aggregate temporaries. */
2873 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2874 fallback | fb_lvalue);
2875 ret = MIN (ret, tret);
2877 /* And finally, the indices and operands of ARRAY_REF. During this
2878 loop we also remove any useless conversions. */
2879 for (; expr_stack.length () > 0; )
2881 tree t = expr_stack.pop ();
2883 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2885 /* Gimplify the dimension. */
2886 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2888 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2889 is_gimple_val, fb_rvalue);
2890 ret = MIN (ret, tret);
2894 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2896 /* The innermost expression P may have originally had
2897 TREE_SIDE_EFFECTS set which would have caused all the outer
2898 expressions in *EXPR_P leading to P to also have had
2899 TREE_SIDE_EFFECTS set. */
2900 recalculate_side_effects (t);
2903 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2904 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2906 canonicalize_component_ref (expr_p);
2909 expr_stack.release ();
2911 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2913 return ret;
2916 /* Gimplify the self modifying expression pointed to by EXPR_P
2917 (++, --, +=, -=).
2919 PRE_P points to the list where side effects that must happen before
2920 *EXPR_P should be stored.
2922 POST_P points to the list where side effects that must happen after
2923 *EXPR_P should be stored.
2925 WANT_VALUE is nonzero iff we want to use the value of this expression
2926 in another expression.
2928 ARITH_TYPE is the type the computation should be performed in. */
2930 enum gimplify_status
2931 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2932 bool want_value, tree arith_type)
2934 enum tree_code code;
2935 tree lhs, lvalue, rhs, t1;
2936 gimple_seq post = NULL, *orig_post_p = post_p;
2937 bool postfix;
2938 enum tree_code arith_code;
2939 enum gimplify_status ret;
2940 location_t loc = EXPR_LOCATION (*expr_p);
2942 code = TREE_CODE (*expr_p);
2944 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2945 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2947 /* Prefix or postfix? */
2948 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2949 /* Faster to treat as prefix if result is not used. */
2950 postfix = want_value;
2951 else
2952 postfix = false;
2954 /* For postfix, make sure the inner expression's post side effects
2955 are executed after side effects from this expression. */
2956 if (postfix)
2957 post_p = &post;
2959 /* Add or subtract? */
2960 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2961 arith_code = PLUS_EXPR;
2962 else
2963 arith_code = MINUS_EXPR;
2965 /* Gimplify the LHS into a GIMPLE lvalue. */
2966 lvalue = TREE_OPERAND (*expr_p, 0);
2967 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2968 if (ret == GS_ERROR)
2969 return ret;
2971 /* Extract the operands to the arithmetic operation. */
2972 lhs = lvalue;
2973 rhs = TREE_OPERAND (*expr_p, 1);
2975 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2976 that as the result value and in the postqueue operation. */
2977 if (postfix)
2979 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2980 if (ret == GS_ERROR)
2981 return ret;
2983 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2986 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2987 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2989 rhs = convert_to_ptrofftype_loc (loc, rhs);
2990 if (arith_code == MINUS_EXPR)
2991 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2992 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2994 else
2995 t1 = fold_convert (TREE_TYPE (*expr_p),
2996 fold_build2 (arith_code, arith_type,
2997 fold_convert (arith_type, lhs),
2998 fold_convert (arith_type, rhs)));
3000 if (postfix)
3002 gimplify_assign (lvalue, t1, pre_p);
3003 gimplify_seq_add_seq (orig_post_p, post);
3004 *expr_p = lhs;
3005 return GS_ALL_DONE;
3007 else
3009 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3010 return GS_OK;
3014 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3016 static void
3017 maybe_with_size_expr (tree *expr_p)
3019 tree expr = *expr_p;
3020 tree type = TREE_TYPE (expr);
3021 tree size;
3023 /* If we've already wrapped this or the type is error_mark_node, we can't do
3024 anything. */
3025 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3026 || type == error_mark_node)
3027 return;
3029 /* If the size isn't known or is a constant, we have nothing to do. */
3030 size = TYPE_SIZE_UNIT (type);
3031 if (!size || TREE_CODE (size) == INTEGER_CST)
3032 return;
3034 /* Otherwise, make a WITH_SIZE_EXPR. */
3035 size = unshare_expr (size);
3036 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3037 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3040 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3041 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3042 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3043 gimplified to an SSA name. */
3045 enum gimplify_status
3046 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3047 bool allow_ssa)
3049 bool (*test) (tree);
3050 fallback_t fb;
3052 /* In general, we allow lvalues for function arguments to avoid
3053 extra overhead of copying large aggregates out of even larger
3054 aggregates into temporaries only to copy the temporaries to
3055 the argument list. Make optimizers happy by pulling out to
3056 temporaries those types that fit in registers. */
3057 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3058 test = is_gimple_val, fb = fb_rvalue;
3059 else
3061 test = is_gimple_lvalue, fb = fb_either;
3062 /* Also strip a TARGET_EXPR that would force an extra copy. */
3063 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3065 tree init = TARGET_EXPR_INITIAL (*arg_p);
3066 if (init
3067 && !VOID_TYPE_P (TREE_TYPE (init)))
3068 *arg_p = init;
3072 /* If this is a variable sized type, we must remember the size. */
3073 maybe_with_size_expr (arg_p);
3075 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3076 /* Make sure arguments have the same location as the function call
3077 itself. */
3078 protected_set_expr_location (*arg_p, call_location);
3080 /* There is a sequence point before a function call. Side effects in
3081 the argument list must occur before the actual call. So, when
3082 gimplifying arguments, force gimplify_expr to use an internal
3083 post queue which is then appended to the end of PRE_P. */
3084 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3087 /* Don't fold inside offloading or taskreg regions: it can break code by
3088 adding decl references that weren't in the source. We'll do it during
3089 omplower pass instead. */
3091 static bool
3092 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3094 struct gimplify_omp_ctx *ctx;
3095 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3096 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3097 return false;
3098 return fold_stmt (gsi);
3101 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3102 with the pointer to the proper cilk frame. */
3103 static void
3104 gimplify_cilk_detach (gimple_seq *pre_p)
3106 tree frame = cfun->cilk_frame_decl;
3107 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3108 frame);
3109 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3110 ptrf);
3111 gimplify_seq_add_stmt(pre_p, detach);
3114 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3115 WANT_VALUE is true if the result of the call is desired. */
3117 static enum gimplify_status
3118 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3120 tree fndecl, parms, p, fnptrtype;
3121 enum gimplify_status ret;
3122 int i, nargs;
3123 gcall *call;
3124 bool builtin_va_start_p = false;
3125 location_t loc = EXPR_LOCATION (*expr_p);
3127 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3129 /* For reliable diagnostics during inlining, it is necessary that
3130 every call_expr be annotated with file and line. */
3131 if (! EXPR_HAS_LOCATION (*expr_p))
3132 SET_EXPR_LOCATION (*expr_p, input_location);
3134 /* Gimplify internal functions created in the FEs. */
3135 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3137 if (want_value)
3138 return GS_ALL_DONE;
3140 nargs = call_expr_nargs (*expr_p);
3141 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3142 auto_vec<tree> vargs (nargs);
3144 for (i = 0; i < nargs; i++)
3146 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3147 EXPR_LOCATION (*expr_p));
3148 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3151 if (EXPR_CILK_SPAWN (*expr_p))
3152 gimplify_cilk_detach (pre_p);
3153 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
3154 gimplify_seq_add_stmt (pre_p, call);
3155 return GS_ALL_DONE;
3158 /* This may be a call to a builtin function.
3160 Builtin function calls may be transformed into different
3161 (and more efficient) builtin function calls under certain
3162 circumstances. Unfortunately, gimplification can muck things
3163 up enough that the builtin expanders are not aware that certain
3164 transformations are still valid.
3166 So we attempt transformation/gimplification of the call before
3167 we gimplify the CALL_EXPR. At this time we do not manage to
3168 transform all calls in the same manner as the expanders do, but
3169 we do transform most of them. */
3170 fndecl = get_callee_fndecl (*expr_p);
3171 if (fndecl
3172 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3173 switch (DECL_FUNCTION_CODE (fndecl))
3175 case BUILT_IN_ALLOCA:
3176 case BUILT_IN_ALLOCA_WITH_ALIGN:
3177 /* If the call has been built for a variable-sized object, then we
3178 want to restore the stack level when the enclosing BIND_EXPR is
3179 exited to reclaim the allocated space; otherwise, we precisely
3180 need to do the opposite and preserve the latest stack level. */
3181 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3182 gimplify_ctxp->save_stack = true;
3183 else
3184 gimplify_ctxp->keep_stack = true;
3185 break;
3187 case BUILT_IN_VA_START:
3189 builtin_va_start_p = TRUE;
3190 if (call_expr_nargs (*expr_p) < 2)
3192 error ("too few arguments to function %<va_start%>");
3193 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3194 return GS_OK;
3197 if (fold_builtin_next_arg (*expr_p, true))
3199 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3200 return GS_OK;
3202 break;
3205 default:
3208 if (fndecl && DECL_BUILT_IN (fndecl))
3210 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3211 if (new_tree && new_tree != *expr_p)
3213 /* There was a transformation of this call which computes the
3214 same value, but in a more efficient way. Return and try
3215 again. */
3216 *expr_p = new_tree;
3217 return GS_OK;
3221 /* Remember the original function pointer type. */
3222 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3224 /* There is a sequence point before the call, so any side effects in
3225 the calling expression must occur before the actual call. Force
3226 gimplify_expr to use an internal post queue. */
3227 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3228 is_gimple_call_addr, fb_rvalue);
3230 nargs = call_expr_nargs (*expr_p);
3232 /* Get argument types for verification. */
3233 fndecl = get_callee_fndecl (*expr_p);
3234 parms = NULL_TREE;
3235 if (fndecl)
3236 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3237 else
3238 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3240 if (fndecl && DECL_ARGUMENTS (fndecl))
3241 p = DECL_ARGUMENTS (fndecl);
3242 else if (parms)
3243 p = parms;
3244 else
3245 p = NULL_TREE;
3246 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3249 /* If the last argument is __builtin_va_arg_pack () and it is not
3250 passed as a named argument, decrease the number of CALL_EXPR
3251 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3252 if (!p
3253 && i < nargs
3254 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3256 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3257 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3259 if (last_arg_fndecl
3260 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3261 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3262 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3264 tree call = *expr_p;
3266 --nargs;
3267 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3268 CALL_EXPR_FN (call),
3269 nargs, CALL_EXPR_ARGP (call));
3271 /* Copy all CALL_EXPR flags, location and block, except
3272 CALL_EXPR_VA_ARG_PACK flag. */
3273 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3274 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3275 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3276 = CALL_EXPR_RETURN_SLOT_OPT (call);
3277 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3278 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3280 /* Set CALL_EXPR_VA_ARG_PACK. */
3281 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3285 /* If the call returns twice then after building the CFG the call
3286 argument computations will no longer dominate the call because
3287 we add an abnormal incoming edge to the call. So do not use SSA
3288 vars there. */
3289 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3291 /* Gimplify the function arguments. */
3292 if (nargs > 0)
3294 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3295 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3296 PUSH_ARGS_REVERSED ? i-- : i++)
3298 enum gimplify_status t;
3300 /* Avoid gimplifying the second argument to va_start, which needs to
3301 be the plain PARM_DECL. */
3302 if ((i != 1) || !builtin_va_start_p)
3304 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3305 EXPR_LOCATION (*expr_p), ! returns_twice);
3307 if (t == GS_ERROR)
3308 ret = GS_ERROR;
3313 /* Gimplify the static chain. */
3314 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3316 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3317 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3318 else
3320 enum gimplify_status t;
3321 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3322 EXPR_LOCATION (*expr_p), ! returns_twice);
3323 if (t == GS_ERROR)
3324 ret = GS_ERROR;
3328 /* Verify the function result. */
3329 if (want_value && fndecl
3330 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3332 error_at (loc, "using result of function returning %<void%>");
3333 ret = GS_ERROR;
3336 /* Try this again in case gimplification exposed something. */
3337 if (ret != GS_ERROR)
3339 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3341 if (new_tree && new_tree != *expr_p)
3343 /* There was a transformation of this call which computes the
3344 same value, but in a more efficient way. Return and try
3345 again. */
3346 *expr_p = new_tree;
3347 return GS_OK;
3350 else
3352 *expr_p = error_mark_node;
3353 return GS_ERROR;
3356 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3357 decl. This allows us to eliminate redundant or useless
3358 calls to "const" functions. */
3359 if (TREE_CODE (*expr_p) == CALL_EXPR)
3361 int flags = call_expr_flags (*expr_p);
3362 if (flags & (ECF_CONST | ECF_PURE)
3363 /* An infinite loop is considered a side effect. */
3364 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3365 TREE_SIDE_EFFECTS (*expr_p) = 0;
3368 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3369 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3370 form and delegate the creation of a GIMPLE_CALL to
3371 gimplify_modify_expr. This is always possible because when
3372 WANT_VALUE is true, the caller wants the result of this call into
3373 a temporary, which means that we will emit an INIT_EXPR in
3374 internal_get_tmp_var which will then be handled by
3375 gimplify_modify_expr. */
3376 if (!want_value)
3378 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3379 have to do is replicate it as a GIMPLE_CALL tuple. */
3380 gimple_stmt_iterator gsi;
3381 call = gimple_build_call_from_tree (*expr_p);
3382 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
3383 notice_special_calls (call);
3384 if (EXPR_CILK_SPAWN (*expr_p))
3385 gimplify_cilk_detach (pre_p);
3386 gimplify_seq_add_stmt (pre_p, call);
3387 gsi = gsi_last (*pre_p);
3388 maybe_fold_stmt (&gsi);
3389 *expr_p = NULL_TREE;
3391 else
3392 /* Remember the original function type. */
3393 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3394 CALL_EXPR_FN (*expr_p));
3396 return ret;
3399 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3400 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3402 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3403 condition is true or false, respectively. If null, we should generate
3404 our own to skip over the evaluation of this specific expression.
3406 LOCUS is the source location of the COND_EXPR.
3408 This function is the tree equivalent of do_jump.
3410 shortcut_cond_r should only be called by shortcut_cond_expr. */
3412 static tree
3413 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3414 location_t locus)
3416 tree local_label = NULL_TREE;
3417 tree t, expr = NULL;
3419 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3420 retain the shortcut semantics. Just insert the gotos here;
3421 shortcut_cond_expr will append the real blocks later. */
3422 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3424 location_t new_locus;
3426 /* Turn if (a && b) into
3428 if (a); else goto no;
3429 if (b) goto yes; else goto no;
3430 (no:) */
3432 if (false_label_p == NULL)
3433 false_label_p = &local_label;
3435 /* Keep the original source location on the first 'if'. */
3436 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3437 append_to_statement_list (t, &expr);
3439 /* Set the source location of the && on the second 'if'. */
3440 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3441 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3442 new_locus);
3443 append_to_statement_list (t, &expr);
3445 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3447 location_t new_locus;
3449 /* Turn if (a || b) into
3451 if (a) goto yes;
3452 if (b) goto yes; else goto no;
3453 (yes:) */
3455 if (true_label_p == NULL)
3456 true_label_p = &local_label;
3458 /* Keep the original source location on the first 'if'. */
3459 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3460 append_to_statement_list (t, &expr);
3462 /* Set the source location of the || on the second 'if'. */
3463 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3464 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3465 new_locus);
3466 append_to_statement_list (t, &expr);
3468 else if (TREE_CODE (pred) == COND_EXPR
3469 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3470 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3472 location_t new_locus;
3474 /* As long as we're messing with gotos, turn if (a ? b : c) into
3475 if (a)
3476 if (b) goto yes; else goto no;
3477 else
3478 if (c) goto yes; else goto no;
3480 Don't do this if one of the arms has void type, which can happen
3481 in C++ when the arm is throw. */
3483 /* Keep the original source location on the first 'if'. Set the source
3484 location of the ? on the second 'if'. */
3485 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3486 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3487 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3488 false_label_p, locus),
3489 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3490 false_label_p, new_locus));
3492 else
3494 expr = build3 (COND_EXPR, void_type_node, pred,
3495 build_and_jump (true_label_p),
3496 build_and_jump (false_label_p));
3497 SET_EXPR_LOCATION (expr, locus);
3500 if (local_label)
3502 t = build1 (LABEL_EXPR, void_type_node, local_label);
3503 append_to_statement_list (t, &expr);
3506 return expr;
3509 /* Given a conditional expression EXPR with short-circuit boolean
3510 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3511 predicate apart into the equivalent sequence of conditionals. */
3513 static tree
3514 shortcut_cond_expr (tree expr)
3516 tree pred = TREE_OPERAND (expr, 0);
3517 tree then_ = TREE_OPERAND (expr, 1);
3518 tree else_ = TREE_OPERAND (expr, 2);
3519 tree true_label, false_label, end_label, t;
3520 tree *true_label_p;
3521 tree *false_label_p;
3522 bool emit_end, emit_false, jump_over_else;
3523 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3524 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3526 /* First do simple transformations. */
3527 if (!else_se)
3529 /* If there is no 'else', turn
3530 if (a && b) then c
3531 into
3532 if (a) if (b) then c. */
3533 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3535 /* Keep the original source location on the first 'if'. */
3536 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3537 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3538 /* Set the source location of the && on the second 'if'. */
3539 if (EXPR_HAS_LOCATION (pred))
3540 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3541 then_ = shortcut_cond_expr (expr);
3542 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3543 pred = TREE_OPERAND (pred, 0);
3544 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3545 SET_EXPR_LOCATION (expr, locus);
3549 if (!then_se)
3551 /* If there is no 'then', turn
3552 if (a || b); else d
3553 into
3554 if (a); else if (b); else d. */
3555 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3557 /* Keep the original source location on the first 'if'. */
3558 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3559 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3560 /* Set the source location of the || on the second 'if'. */
3561 if (EXPR_HAS_LOCATION (pred))
3562 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3563 else_ = shortcut_cond_expr (expr);
3564 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3565 pred = TREE_OPERAND (pred, 0);
3566 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3567 SET_EXPR_LOCATION (expr, locus);
3571 /* If we're done, great. */
3572 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3573 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3574 return expr;
3576 /* Otherwise we need to mess with gotos. Change
3577 if (a) c; else d;
3579 if (a); else goto no;
3580 c; goto end;
3581 no: d; end:
3582 and recursively gimplify the condition. */
3584 true_label = false_label = end_label = NULL_TREE;
3586 /* If our arms just jump somewhere, hijack those labels so we don't
3587 generate jumps to jumps. */
3589 if (then_
3590 && TREE_CODE (then_) == GOTO_EXPR
3591 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3593 true_label = GOTO_DESTINATION (then_);
3594 then_ = NULL;
3595 then_se = false;
3598 if (else_
3599 && TREE_CODE (else_) == GOTO_EXPR
3600 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3602 false_label = GOTO_DESTINATION (else_);
3603 else_ = NULL;
3604 else_se = false;
3607 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3608 if (true_label)
3609 true_label_p = &true_label;
3610 else
3611 true_label_p = NULL;
3613 /* The 'else' branch also needs a label if it contains interesting code. */
3614 if (false_label || else_se)
3615 false_label_p = &false_label;
3616 else
3617 false_label_p = NULL;
3619 /* If there was nothing else in our arms, just forward the label(s). */
3620 if (!then_se && !else_se)
3621 return shortcut_cond_r (pred, true_label_p, false_label_p,
3622 EXPR_LOC_OR_LOC (expr, input_location));
3624 /* If our last subexpression already has a terminal label, reuse it. */
3625 if (else_se)
3626 t = expr_last (else_);
3627 else if (then_se)
3628 t = expr_last (then_);
3629 else
3630 t = NULL;
3631 if (t && TREE_CODE (t) == LABEL_EXPR)
3632 end_label = LABEL_EXPR_LABEL (t);
3634 /* If we don't care about jumping to the 'else' branch, jump to the end
3635 if the condition is false. */
3636 if (!false_label_p)
3637 false_label_p = &end_label;
3639 /* We only want to emit these labels if we aren't hijacking them. */
3640 emit_end = (end_label == NULL_TREE);
3641 emit_false = (false_label == NULL_TREE);
3643 /* We only emit the jump over the else clause if we have to--if the
3644 then clause may fall through. Otherwise we can wind up with a
3645 useless jump and a useless label at the end of gimplified code,
3646 which will cause us to think that this conditional as a whole
3647 falls through even if it doesn't. If we then inline a function
3648 which ends with such a condition, that can cause us to issue an
3649 inappropriate warning about control reaching the end of a
3650 non-void function. */
3651 jump_over_else = block_may_fallthru (then_);
3653 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3654 EXPR_LOC_OR_LOC (expr, input_location));
3656 expr = NULL;
3657 append_to_statement_list (pred, &expr);
3659 append_to_statement_list (then_, &expr);
3660 if (else_se)
3662 if (jump_over_else)
3664 tree last = expr_last (expr);
3665 t = build_and_jump (&end_label);
3666 if (EXPR_HAS_LOCATION (last))
3667 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3668 append_to_statement_list (t, &expr);
3670 if (emit_false)
3672 t = build1 (LABEL_EXPR, void_type_node, false_label);
3673 append_to_statement_list (t, &expr);
3675 append_to_statement_list (else_, &expr);
3677 if (emit_end && end_label)
3679 t = build1 (LABEL_EXPR, void_type_node, end_label);
3680 append_to_statement_list (t, &expr);
3683 return expr;
3686 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3688 tree
3689 gimple_boolify (tree expr)
3691 tree type = TREE_TYPE (expr);
3692 location_t loc = EXPR_LOCATION (expr);
3694 if (TREE_CODE (expr) == NE_EXPR
3695 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3696 && integer_zerop (TREE_OPERAND (expr, 1)))
3698 tree call = TREE_OPERAND (expr, 0);
3699 tree fn = get_callee_fndecl (call);
3701 /* For __builtin_expect ((long) (x), y) recurse into x as well
3702 if x is truth_value_p. */
3703 if (fn
3704 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3705 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3706 && call_expr_nargs (call) == 2)
3708 tree arg = CALL_EXPR_ARG (call, 0);
3709 if (arg)
3711 if (TREE_CODE (arg) == NOP_EXPR
3712 && TREE_TYPE (arg) == TREE_TYPE (call))
3713 arg = TREE_OPERAND (arg, 0);
3714 if (truth_value_p (TREE_CODE (arg)))
3716 arg = gimple_boolify (arg);
3717 CALL_EXPR_ARG (call, 0)
3718 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3724 switch (TREE_CODE (expr))
3726 case TRUTH_AND_EXPR:
3727 case TRUTH_OR_EXPR:
3728 case TRUTH_XOR_EXPR:
3729 case TRUTH_ANDIF_EXPR:
3730 case TRUTH_ORIF_EXPR:
3731 /* Also boolify the arguments of truth exprs. */
3732 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3733 /* FALLTHRU */
3735 case TRUTH_NOT_EXPR:
3736 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3738 /* These expressions always produce boolean results. */
3739 if (TREE_CODE (type) != BOOLEAN_TYPE)
3740 TREE_TYPE (expr) = boolean_type_node;
3741 return expr;
3743 case ANNOTATE_EXPR:
3744 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3746 case annot_expr_ivdep_kind:
3747 case annot_expr_no_vector_kind:
3748 case annot_expr_vector_kind:
3749 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3750 if (TREE_CODE (type) != BOOLEAN_TYPE)
3751 TREE_TYPE (expr) = boolean_type_node;
3752 return expr;
3753 default:
3754 gcc_unreachable ();
3757 default:
3758 if (COMPARISON_CLASS_P (expr))
3760 /* There expressions always prduce boolean results. */
3761 if (TREE_CODE (type) != BOOLEAN_TYPE)
3762 TREE_TYPE (expr) = boolean_type_node;
3763 return expr;
3765 /* Other expressions that get here must have boolean values, but
3766 might need to be converted to the appropriate mode. */
3767 if (TREE_CODE (type) == BOOLEAN_TYPE)
3768 return expr;
3769 return fold_convert_loc (loc, boolean_type_node, expr);
3773 /* Given a conditional expression *EXPR_P without side effects, gimplify
3774 its operands. New statements are inserted to PRE_P. */
3776 static enum gimplify_status
3777 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3779 tree expr = *expr_p, cond;
3780 enum gimplify_status ret, tret;
3781 enum tree_code code;
3783 cond = gimple_boolify (COND_EXPR_COND (expr));
3785 /* We need to handle && and || specially, as their gimplification
3786 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3787 code = TREE_CODE (cond);
3788 if (code == TRUTH_ANDIF_EXPR)
3789 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3790 else if (code == TRUTH_ORIF_EXPR)
3791 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3792 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3793 COND_EXPR_COND (*expr_p) = cond;
3795 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3796 is_gimple_val, fb_rvalue);
3797 ret = MIN (ret, tret);
3798 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3799 is_gimple_val, fb_rvalue);
3801 return MIN (ret, tret);
3804 /* Return true if evaluating EXPR could trap.
3805 EXPR is GENERIC, while tree_could_trap_p can be called
3806 only on GIMPLE. */
3808 static bool
3809 generic_expr_could_trap_p (tree expr)
3811 unsigned i, n;
3813 if (!expr || is_gimple_val (expr))
3814 return false;
3816 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3817 return true;
3819 n = TREE_OPERAND_LENGTH (expr);
3820 for (i = 0; i < n; i++)
3821 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3822 return true;
3824 return false;
3827 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3828 into
3830 if (p) if (p)
3831 t1 = a; a;
3832 else or else
3833 t1 = b; b;
3836 The second form is used when *EXPR_P is of type void.
3838 PRE_P points to the list where side effects that must happen before
3839 *EXPR_P should be stored. */
3841 static enum gimplify_status
3842 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3844 tree expr = *expr_p;
3845 tree type = TREE_TYPE (expr);
3846 location_t loc = EXPR_LOCATION (expr);
3847 tree tmp, arm1, arm2;
3848 enum gimplify_status ret;
3849 tree label_true, label_false, label_cont;
3850 bool have_then_clause_p, have_else_clause_p;
3851 gcond *cond_stmt;
3852 enum tree_code pred_code;
3853 gimple_seq seq = NULL;
3855 /* If this COND_EXPR has a value, copy the values into a temporary within
3856 the arms. */
3857 if (!VOID_TYPE_P (type))
3859 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3860 tree result;
3862 /* If either an rvalue is ok or we do not require an lvalue, create the
3863 temporary. But we cannot do that if the type is addressable. */
3864 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3865 && !TREE_ADDRESSABLE (type))
3867 if (gimplify_ctxp->allow_rhs_cond_expr
3868 /* If either branch has side effects or could trap, it can't be
3869 evaluated unconditionally. */
3870 && !TREE_SIDE_EFFECTS (then_)
3871 && !generic_expr_could_trap_p (then_)
3872 && !TREE_SIDE_EFFECTS (else_)
3873 && !generic_expr_could_trap_p (else_))
3874 return gimplify_pure_cond_expr (expr_p, pre_p);
3876 tmp = create_tmp_var (type, "iftmp");
3877 result = tmp;
3880 /* Otherwise, only create and copy references to the values. */
3881 else
3883 type = build_pointer_type (type);
3885 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3886 then_ = build_fold_addr_expr_loc (loc, then_);
3888 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3889 else_ = build_fold_addr_expr_loc (loc, else_);
3891 expr
3892 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3894 tmp = create_tmp_var (type, "iftmp");
3895 result = build_simple_mem_ref_loc (loc, tmp);
3898 /* Build the new then clause, `tmp = then_;'. But don't build the
3899 assignment if the value is void; in C++ it can be if it's a throw. */
3900 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3901 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3903 /* Similarly, build the new else clause, `tmp = else_;'. */
3904 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3905 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3907 TREE_TYPE (expr) = void_type_node;
3908 recalculate_side_effects (expr);
3910 /* Move the COND_EXPR to the prequeue. */
3911 gimplify_stmt (&expr, pre_p);
3913 *expr_p = result;
3914 return GS_ALL_DONE;
3917 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3918 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3919 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3920 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3922 /* Make sure the condition has BOOLEAN_TYPE. */
3923 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3925 /* Break apart && and || conditions. */
3926 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3927 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3929 expr = shortcut_cond_expr (expr);
3931 if (expr != *expr_p)
3933 *expr_p = expr;
3935 /* We can't rely on gimplify_expr to re-gimplify the expanded
3936 form properly, as cleanups might cause the target labels to be
3937 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3938 set up a conditional context. */
3939 gimple_push_condition ();
3940 gimplify_stmt (expr_p, &seq);
3941 gimple_pop_condition (pre_p);
3942 gimple_seq_add_seq (pre_p, seq);
3944 return GS_ALL_DONE;
3948 /* Now do the normal gimplification. */
3950 /* Gimplify condition. */
3951 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3952 fb_rvalue);
3953 if (ret == GS_ERROR)
3954 return GS_ERROR;
3955 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3957 gimple_push_condition ();
3959 have_then_clause_p = have_else_clause_p = false;
3960 if (TREE_OPERAND (expr, 1) != NULL
3961 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3962 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3963 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3964 == current_function_decl)
3965 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3966 have different locations, otherwise we end up with incorrect
3967 location information on the branches. */
3968 && (optimize
3969 || !EXPR_HAS_LOCATION (expr)
3970 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3971 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3973 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3974 have_then_clause_p = true;
3976 else
3977 label_true = create_artificial_label (UNKNOWN_LOCATION);
3978 if (TREE_OPERAND (expr, 2) != NULL
3979 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3980 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3981 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3982 == current_function_decl)
3983 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3984 have different locations, otherwise we end up with incorrect
3985 location information on the branches. */
3986 && (optimize
3987 || !EXPR_HAS_LOCATION (expr)
3988 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3989 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3991 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3992 have_else_clause_p = true;
3994 else
3995 label_false = create_artificial_label (UNKNOWN_LOCATION);
3997 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3998 &arm2);
3999 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4000 label_false);
4001 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4002 gimplify_seq_add_stmt (&seq, cond_stmt);
4003 gimple_stmt_iterator gsi = gsi_last (seq);
4004 maybe_fold_stmt (&gsi);
4006 label_cont = NULL_TREE;
4007 if (!have_then_clause_p)
4009 /* For if (...) {} else { code; } put label_true after
4010 the else block. */
4011 if (TREE_OPERAND (expr, 1) == NULL_TREE
4012 && !have_else_clause_p
4013 && TREE_OPERAND (expr, 2) != NULL_TREE)
4014 label_cont = label_true;
4015 else
4017 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4018 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4019 /* For if (...) { code; } else {} or
4020 if (...) { code; } else goto label; or
4021 if (...) { code; return; } else { ... }
4022 label_cont isn't needed. */
4023 if (!have_else_clause_p
4024 && TREE_OPERAND (expr, 2) != NULL_TREE
4025 && gimple_seq_may_fallthru (seq))
4027 gimple *g;
4028 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4030 g = gimple_build_goto (label_cont);
4032 /* GIMPLE_COND's are very low level; they have embedded
4033 gotos. This particular embedded goto should not be marked
4034 with the location of the original COND_EXPR, as it would
4035 correspond to the COND_EXPR's condition, not the ELSE or the
4036 THEN arms. To avoid marking it with the wrong location, flag
4037 it as "no location". */
4038 gimple_set_do_not_emit_location (g);
4040 gimplify_seq_add_stmt (&seq, g);
4044 if (!have_else_clause_p)
4046 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4047 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4049 if (label_cont)
4050 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4052 gimple_pop_condition (pre_p);
4053 gimple_seq_add_seq (pre_p, seq);
4055 if (ret == GS_ERROR)
4056 ; /* Do nothing. */
4057 else if (have_then_clause_p || have_else_clause_p)
4058 ret = GS_ALL_DONE;
4059 else
4061 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4062 expr = TREE_OPERAND (expr, 0);
4063 gimplify_stmt (&expr, pre_p);
4066 *expr_p = NULL;
4067 return ret;
4070 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4071 to be marked addressable.
4073 We cannot rely on such an expression being directly markable if a temporary
4074 has been created by the gimplification. In this case, we create another
4075 temporary and initialize it with a copy, which will become a store after we
4076 mark it addressable. This can happen if the front-end passed us something
4077 that it could not mark addressable yet, like a Fortran pass-by-reference
4078 parameter (int) floatvar. */
4080 static void
4081 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4083 while (handled_component_p (*expr_p))
4084 expr_p = &TREE_OPERAND (*expr_p, 0);
4085 if (is_gimple_reg (*expr_p))
4087 /* Do not allow an SSA name as the temporary. */
4088 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4089 DECL_GIMPLE_REG_P (var) = 0;
4090 *expr_p = var;
4094 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4095 a call to __builtin_memcpy. */
4097 static enum gimplify_status
4098 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4099 gimple_seq *seq_p)
4101 tree t, to, to_ptr, from, from_ptr;
4102 gcall *gs;
4103 location_t loc = EXPR_LOCATION (*expr_p);
4105 to = TREE_OPERAND (*expr_p, 0);
4106 from = TREE_OPERAND (*expr_p, 1);
4108 /* Mark the RHS addressable. Beware that it may not be possible to do so
4109 directly if a temporary has been created by the gimplification. */
4110 prepare_gimple_addressable (&from, seq_p);
4112 mark_addressable (from);
4113 from_ptr = build_fold_addr_expr_loc (loc, from);
4114 gimplify_arg (&from_ptr, seq_p, loc);
4116 mark_addressable (to);
4117 to_ptr = build_fold_addr_expr_loc (loc, to);
4118 gimplify_arg (&to_ptr, seq_p, loc);
4120 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4122 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4124 if (want_value)
4126 /* tmp = memcpy() */
4127 t = create_tmp_var (TREE_TYPE (to_ptr));
4128 gimple_call_set_lhs (gs, t);
4129 gimplify_seq_add_stmt (seq_p, gs);
4131 *expr_p = build_simple_mem_ref (t);
4132 return GS_ALL_DONE;
4135 gimplify_seq_add_stmt (seq_p, gs);
4136 *expr_p = NULL;
4137 return GS_ALL_DONE;
4140 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4141 a call to __builtin_memset. In this case we know that the RHS is
4142 a CONSTRUCTOR with an empty element list. */
4144 static enum gimplify_status
4145 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4146 gimple_seq *seq_p)
4148 tree t, from, to, to_ptr;
4149 gcall *gs;
4150 location_t loc = EXPR_LOCATION (*expr_p);
4152 /* Assert our assumptions, to abort instead of producing wrong code
4153 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4154 not be immediately exposed. */
4155 from = TREE_OPERAND (*expr_p, 1);
4156 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4157 from = TREE_OPERAND (from, 0);
4159 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4160 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4162 /* Now proceed. */
4163 to = TREE_OPERAND (*expr_p, 0);
4165 to_ptr = build_fold_addr_expr_loc (loc, to);
4166 gimplify_arg (&to_ptr, seq_p, loc);
4167 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4169 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4171 if (want_value)
4173 /* tmp = memset() */
4174 t = create_tmp_var (TREE_TYPE (to_ptr));
4175 gimple_call_set_lhs (gs, t);
4176 gimplify_seq_add_stmt (seq_p, gs);
4178 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4179 return GS_ALL_DONE;
4182 gimplify_seq_add_stmt (seq_p, gs);
4183 *expr_p = NULL;
4184 return GS_ALL_DONE;
4187 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4188 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4189 assignment. Return non-null if we detect a potential overlap. */
4191 struct gimplify_init_ctor_preeval_data
4193 /* The base decl of the lhs object. May be NULL, in which case we
4194 have to assume the lhs is indirect. */
4195 tree lhs_base_decl;
4197 /* The alias set of the lhs object. */
4198 alias_set_type lhs_alias_set;
4201 static tree
4202 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4204 struct gimplify_init_ctor_preeval_data *data
4205 = (struct gimplify_init_ctor_preeval_data *) xdata;
4206 tree t = *tp;
4208 /* If we find the base object, obviously we have overlap. */
4209 if (data->lhs_base_decl == t)
4210 return t;
4212 /* If the constructor component is indirect, determine if we have a
4213 potential overlap with the lhs. The only bits of information we
4214 have to go on at this point are addressability and alias sets. */
4215 if ((INDIRECT_REF_P (t)
4216 || TREE_CODE (t) == MEM_REF)
4217 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4218 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4219 return t;
4221 /* If the constructor component is a call, determine if it can hide a
4222 potential overlap with the lhs through an INDIRECT_REF like above.
4223 ??? Ugh - this is completely broken. In fact this whole analysis
4224 doesn't look conservative. */
4225 if (TREE_CODE (t) == CALL_EXPR)
4227 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4229 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4230 if (POINTER_TYPE_P (TREE_VALUE (type))
4231 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4232 && alias_sets_conflict_p (data->lhs_alias_set,
4233 get_alias_set
4234 (TREE_TYPE (TREE_VALUE (type)))))
4235 return t;
4238 if (IS_TYPE_OR_DECL_P (t))
4239 *walk_subtrees = 0;
4240 return NULL;
4243 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4244 force values that overlap with the lhs (as described by *DATA)
4245 into temporaries. */
4247 static void
4248 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4249 struct gimplify_init_ctor_preeval_data *data)
4251 enum gimplify_status one;
4253 /* If the value is constant, then there's nothing to pre-evaluate. */
4254 if (TREE_CONSTANT (*expr_p))
4256 /* Ensure it does not have side effects, it might contain a reference to
4257 the object we're initializing. */
4258 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4259 return;
4262 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4263 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4264 return;
4266 /* Recurse for nested constructors. */
4267 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4269 unsigned HOST_WIDE_INT ix;
4270 constructor_elt *ce;
4271 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4273 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4274 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4276 return;
4279 /* If this is a variable sized type, we must remember the size. */
4280 maybe_with_size_expr (expr_p);
4282 /* Gimplify the constructor element to something appropriate for the rhs
4283 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4284 the gimplifier will consider this a store to memory. Doing this
4285 gimplification now means that we won't have to deal with complicated
4286 language-specific trees, nor trees like SAVE_EXPR that can induce
4287 exponential search behavior. */
4288 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4289 if (one == GS_ERROR)
4291 *expr_p = NULL;
4292 return;
4295 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4296 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4297 always be true for all scalars, since is_gimple_mem_rhs insists on a
4298 temporary variable for them. */
4299 if (DECL_P (*expr_p))
4300 return;
4302 /* If this is of variable size, we have no choice but to assume it doesn't
4303 overlap since we can't make a temporary for it. */
4304 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4305 return;
4307 /* Otherwise, we must search for overlap ... */
4308 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4309 return;
4311 /* ... and if found, force the value into a temporary. */
4312 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4315 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4316 a RANGE_EXPR in a CONSTRUCTOR for an array.
4318 var = lower;
4319 loop_entry:
4320 object[var] = value;
4321 if (var == upper)
4322 goto loop_exit;
4323 var = var + 1;
4324 goto loop_entry;
4325 loop_exit:
4327 We increment var _after_ the loop exit check because we might otherwise
4328 fail if upper == TYPE_MAX_VALUE (type for upper).
4330 Note that we never have to deal with SAVE_EXPRs here, because this has
4331 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4333 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4334 gimple_seq *, bool);
4336 static void
4337 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4338 tree value, tree array_elt_type,
4339 gimple_seq *pre_p, bool cleared)
4341 tree loop_entry_label, loop_exit_label, fall_thru_label;
4342 tree var, var_type, cref, tmp;
4344 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4345 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4346 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4348 /* Create and initialize the index variable. */
4349 var_type = TREE_TYPE (upper);
4350 var = create_tmp_var (var_type);
4351 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4353 /* Add the loop entry label. */
4354 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4356 /* Build the reference. */
4357 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4358 var, NULL_TREE, NULL_TREE);
4360 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4361 the store. Otherwise just assign value to the reference. */
4363 if (TREE_CODE (value) == CONSTRUCTOR)
4364 /* NB we might have to call ourself recursively through
4365 gimplify_init_ctor_eval if the value is a constructor. */
4366 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4367 pre_p, cleared);
4368 else
4369 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4371 /* We exit the loop when the index var is equal to the upper bound. */
4372 gimplify_seq_add_stmt (pre_p,
4373 gimple_build_cond (EQ_EXPR, var, upper,
4374 loop_exit_label, fall_thru_label));
4376 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4378 /* Otherwise, increment the index var... */
4379 tmp = build2 (PLUS_EXPR, var_type, var,
4380 fold_convert (var_type, integer_one_node));
4381 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4383 /* ...and jump back to the loop entry. */
4384 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4386 /* Add the loop exit label. */
4387 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4390 /* Return true if FDECL is accessing a field that is zero sized. */
4392 static bool
4393 zero_sized_field_decl (const_tree fdecl)
4395 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4396 && integer_zerop (DECL_SIZE (fdecl)))
4397 return true;
4398 return false;
4401 /* Return true if TYPE is zero sized. */
4403 static bool
4404 zero_sized_type (const_tree type)
4406 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4407 && integer_zerop (TYPE_SIZE (type)))
4408 return true;
4409 return false;
4412 /* A subroutine of gimplify_init_constructor. Generate individual
4413 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4414 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4415 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4416 zeroed first. */
4418 static void
4419 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4420 gimple_seq *pre_p, bool cleared)
4422 tree array_elt_type = NULL;
4423 unsigned HOST_WIDE_INT ix;
4424 tree purpose, value;
4426 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4427 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4429 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4431 tree cref;
4433 /* NULL values are created above for gimplification errors. */
4434 if (value == NULL)
4435 continue;
4437 if (cleared && initializer_zerop (value))
4438 continue;
4440 /* ??? Here's to hoping the front end fills in all of the indices,
4441 so we don't have to figure out what's missing ourselves. */
4442 gcc_assert (purpose);
4444 /* Skip zero-sized fields, unless value has side-effects. This can
4445 happen with calls to functions returning a zero-sized type, which
4446 we shouldn't discard. As a number of downstream passes don't
4447 expect sets of zero-sized fields, we rely on the gimplification of
4448 the MODIFY_EXPR we make below to drop the assignment statement. */
4449 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4450 continue;
4452 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4453 whole range. */
4454 if (TREE_CODE (purpose) == RANGE_EXPR)
4456 tree lower = TREE_OPERAND (purpose, 0);
4457 tree upper = TREE_OPERAND (purpose, 1);
4459 /* If the lower bound is equal to upper, just treat it as if
4460 upper was the index. */
4461 if (simple_cst_equal (lower, upper))
4462 purpose = upper;
4463 else
4465 gimplify_init_ctor_eval_range (object, lower, upper, value,
4466 array_elt_type, pre_p, cleared);
4467 continue;
4471 if (array_elt_type)
4473 /* Do not use bitsizetype for ARRAY_REF indices. */
4474 if (TYPE_DOMAIN (TREE_TYPE (object)))
4475 purpose
4476 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4477 purpose);
4478 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4479 purpose, NULL_TREE, NULL_TREE);
4481 else
4483 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4484 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4485 unshare_expr (object), purpose, NULL_TREE);
4488 if (TREE_CODE (value) == CONSTRUCTOR
4489 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4490 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4491 pre_p, cleared);
4492 else
4494 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4495 gimplify_and_add (init, pre_p);
4496 ggc_free (init);
4501 /* Return the appropriate RHS predicate for this LHS. */
4503 gimple_predicate
4504 rhs_predicate_for (tree lhs)
4506 if (is_gimple_reg (lhs))
4507 return is_gimple_reg_rhs_or_call;
4508 else
4509 return is_gimple_mem_rhs_or_call;
4512 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4513 before the LHS has been gimplified. */
4515 static gimple_predicate
4516 initial_rhs_predicate_for (tree lhs)
4518 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4519 return is_gimple_reg_rhs_or_call;
4520 else
4521 return is_gimple_mem_rhs_or_call;
4524 /* Gimplify a C99 compound literal expression. This just means adding
4525 the DECL_EXPR before the current statement and using its anonymous
4526 decl instead. */
4528 static enum gimplify_status
4529 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4530 bool (*gimple_test_f) (tree),
4531 fallback_t fallback)
4533 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4534 tree decl = DECL_EXPR_DECL (decl_s);
4535 tree init = DECL_INITIAL (decl);
4536 /* Mark the decl as addressable if the compound literal
4537 expression is addressable now, otherwise it is marked too late
4538 after we gimplify the initialization expression. */
4539 if (TREE_ADDRESSABLE (*expr_p))
4540 TREE_ADDRESSABLE (decl) = 1;
4541 /* Otherwise, if we don't need an lvalue and have a literal directly
4542 substitute it. Check if it matches the gimple predicate, as
4543 otherwise we'd generate a new temporary, and we can as well just
4544 use the decl we already have. */
4545 else if (!TREE_ADDRESSABLE (decl)
4546 && init
4547 && (fallback & fb_lvalue) == 0
4548 && gimple_test_f (init))
4550 *expr_p = init;
4551 return GS_OK;
4554 /* Preliminarily mark non-addressed complex variables as eligible
4555 for promotion to gimple registers. We'll transform their uses
4556 as we find them. */
4557 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4558 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4559 && !TREE_THIS_VOLATILE (decl)
4560 && !needs_to_live_in_memory (decl))
4561 DECL_GIMPLE_REG_P (decl) = 1;
4563 /* If the decl is not addressable, then it is being used in some
4564 expression or on the right hand side of a statement, and it can
4565 be put into a readonly data section. */
4566 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4567 TREE_READONLY (decl) = 1;
4569 /* This decl isn't mentioned in the enclosing block, so add it to the
4570 list of temps. FIXME it seems a bit of a kludge to say that
4571 anonymous artificial vars aren't pushed, but everything else is. */
4572 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4573 gimple_add_tmp_var (decl);
4575 gimplify_and_add (decl_s, pre_p);
4576 *expr_p = decl;
4577 return GS_OK;
4580 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4581 return a new CONSTRUCTOR if something changed. */
4583 static tree
4584 optimize_compound_literals_in_ctor (tree orig_ctor)
4586 tree ctor = orig_ctor;
4587 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4588 unsigned int idx, num = vec_safe_length (elts);
4590 for (idx = 0; idx < num; idx++)
4592 tree value = (*elts)[idx].value;
4593 tree newval = value;
4594 if (TREE_CODE (value) == CONSTRUCTOR)
4595 newval = optimize_compound_literals_in_ctor (value);
4596 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4598 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4599 tree decl = DECL_EXPR_DECL (decl_s);
4600 tree init = DECL_INITIAL (decl);
4602 if (!TREE_ADDRESSABLE (value)
4603 && !TREE_ADDRESSABLE (decl)
4604 && init
4605 && TREE_CODE (init) == CONSTRUCTOR)
4606 newval = optimize_compound_literals_in_ctor (init);
4608 if (newval == value)
4609 continue;
4611 if (ctor == orig_ctor)
4613 ctor = copy_node (orig_ctor);
4614 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4615 elts = CONSTRUCTOR_ELTS (ctor);
4617 (*elts)[idx].value = newval;
4619 return ctor;
4622 /* A subroutine of gimplify_modify_expr. Break out elements of a
4623 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4625 Note that we still need to clear any elements that don't have explicit
4626 initializers, so if not all elements are initialized we keep the
4627 original MODIFY_EXPR, we just remove all of the constructor elements.
4629 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4630 GS_ERROR if we would have to create a temporary when gimplifying
4631 this constructor. Otherwise, return GS_OK.
4633 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4635 static enum gimplify_status
4636 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4637 bool want_value, bool notify_temp_creation)
4639 tree object, ctor, type;
4640 enum gimplify_status ret;
4641 vec<constructor_elt, va_gc> *elts;
4643 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4645 if (!notify_temp_creation)
4647 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4648 is_gimple_lvalue, fb_lvalue);
4649 if (ret == GS_ERROR)
4650 return ret;
4653 object = TREE_OPERAND (*expr_p, 0);
4654 ctor = TREE_OPERAND (*expr_p, 1)
4655 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4656 type = TREE_TYPE (ctor);
4657 elts = CONSTRUCTOR_ELTS (ctor);
4658 ret = GS_ALL_DONE;
4660 switch (TREE_CODE (type))
4662 case RECORD_TYPE:
4663 case UNION_TYPE:
4664 case QUAL_UNION_TYPE:
4665 case ARRAY_TYPE:
4667 struct gimplify_init_ctor_preeval_data preeval_data;
4668 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4669 bool cleared, complete_p, valid_const_initializer;
4671 /* Aggregate types must lower constructors to initialization of
4672 individual elements. The exception is that a CONSTRUCTOR node
4673 with no elements indicates zero-initialization of the whole. */
4674 if (vec_safe_is_empty (elts))
4676 if (notify_temp_creation)
4677 return GS_OK;
4678 break;
4681 /* Fetch information about the constructor to direct later processing.
4682 We might want to make static versions of it in various cases, and
4683 can only do so if it known to be a valid constant initializer. */
4684 valid_const_initializer
4685 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4686 &num_ctor_elements, &complete_p);
4688 /* If a const aggregate variable is being initialized, then it
4689 should never be a lose to promote the variable to be static. */
4690 if (valid_const_initializer
4691 && num_nonzero_elements > 1
4692 && TREE_READONLY (object)
4693 && VAR_P (object)
4694 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4696 if (notify_temp_creation)
4697 return GS_ERROR;
4698 DECL_INITIAL (object) = ctor;
4699 TREE_STATIC (object) = 1;
4700 if (!DECL_NAME (object))
4701 DECL_NAME (object) = create_tmp_var_name ("C");
4702 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4704 /* ??? C++ doesn't automatically append a .<number> to the
4705 assembler name, and even when it does, it looks at FE private
4706 data structures to figure out what that number should be,
4707 which are not set for this variable. I suppose this is
4708 important for local statics for inline functions, which aren't
4709 "local" in the object file sense. So in order to get a unique
4710 TU-local symbol, we must invoke the lhd version now. */
4711 lhd_set_decl_assembler_name (object);
4713 *expr_p = NULL_TREE;
4714 break;
4717 /* If there are "lots" of initialized elements, even discounting
4718 those that are not address constants (and thus *must* be
4719 computed at runtime), then partition the constructor into
4720 constant and non-constant parts. Block copy the constant
4721 parts in, then generate code for the non-constant parts. */
4722 /* TODO. There's code in cp/typeck.c to do this. */
4724 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4725 /* store_constructor will ignore the clearing of variable-sized
4726 objects. Initializers for such objects must explicitly set
4727 every field that needs to be set. */
4728 cleared = false;
4729 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4730 /* If the constructor isn't complete, clear the whole object
4731 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4733 ??? This ought not to be needed. For any element not present
4734 in the initializer, we should simply set them to zero. Except
4735 we'd need to *find* the elements that are not present, and that
4736 requires trickery to avoid quadratic compile-time behavior in
4737 large cases or excessive memory use in small cases. */
4738 cleared = true;
4739 else if (num_ctor_elements - num_nonzero_elements
4740 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4741 && num_nonzero_elements < num_ctor_elements / 4)
4742 /* If there are "lots" of zeros, it's more efficient to clear
4743 the memory and then set the nonzero elements. */
4744 cleared = true;
4745 else
4746 cleared = false;
4748 /* If there are "lots" of initialized elements, and all of them
4749 are valid address constants, then the entire initializer can
4750 be dropped to memory, and then memcpy'd out. Don't do this
4751 for sparse arrays, though, as it's more efficient to follow
4752 the standard CONSTRUCTOR behavior of memset followed by
4753 individual element initialization. Also don't do this for small
4754 all-zero initializers (which aren't big enough to merit
4755 clearing), and don't try to make bitwise copies of
4756 TREE_ADDRESSABLE types.
4758 We cannot apply such transformation when compiling chkp static
4759 initializer because creation of initializer image in the memory
4760 will require static initialization of bounds for it. It should
4761 result in another gimplification of similar initializer and we
4762 may fall into infinite loop. */
4763 if (valid_const_initializer
4764 && !(cleared || num_nonzero_elements == 0)
4765 && !TREE_ADDRESSABLE (type)
4766 && (!current_function_decl
4767 || !lookup_attribute ("chkp ctor",
4768 DECL_ATTRIBUTES (current_function_decl))))
4770 HOST_WIDE_INT size = int_size_in_bytes (type);
4771 unsigned int align;
4773 /* ??? We can still get unbounded array types, at least
4774 from the C++ front end. This seems wrong, but attempt
4775 to work around it for now. */
4776 if (size < 0)
4778 size = int_size_in_bytes (TREE_TYPE (object));
4779 if (size >= 0)
4780 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4783 /* Find the maximum alignment we can assume for the object. */
4784 /* ??? Make use of DECL_OFFSET_ALIGN. */
4785 if (DECL_P (object))
4786 align = DECL_ALIGN (object);
4787 else
4788 align = TYPE_ALIGN (type);
4790 /* Do a block move either if the size is so small as to make
4791 each individual move a sub-unit move on average, or if it
4792 is so large as to make individual moves inefficient. */
4793 if (size > 0
4794 && num_nonzero_elements > 1
4795 && (size < num_nonzero_elements
4796 || !can_move_by_pieces (size, align)))
4798 if (notify_temp_creation)
4799 return GS_ERROR;
4801 walk_tree (&ctor, force_labels_r, NULL, NULL);
4802 ctor = tree_output_constant_def (ctor);
4803 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4804 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4805 TREE_OPERAND (*expr_p, 1) = ctor;
4807 /* This is no longer an assignment of a CONSTRUCTOR, but
4808 we still may have processing to do on the LHS. So
4809 pretend we didn't do anything here to let that happen. */
4810 return GS_UNHANDLED;
4814 /* If the target is volatile, we have non-zero elements and more than
4815 one field to assign, initialize the target from a temporary. */
4816 if (TREE_THIS_VOLATILE (object)
4817 && !TREE_ADDRESSABLE (type)
4818 && num_nonzero_elements > 0
4819 && vec_safe_length (elts) > 1)
4821 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4822 TREE_OPERAND (*expr_p, 0) = temp;
4823 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4824 *expr_p,
4825 build2 (MODIFY_EXPR, void_type_node,
4826 object, temp));
4827 return GS_OK;
4830 if (notify_temp_creation)
4831 return GS_OK;
4833 /* If there are nonzero elements and if needed, pre-evaluate to capture
4834 elements overlapping with the lhs into temporaries. We must do this
4835 before clearing to fetch the values before they are zeroed-out. */
4836 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4838 preeval_data.lhs_base_decl = get_base_address (object);
4839 if (!DECL_P (preeval_data.lhs_base_decl))
4840 preeval_data.lhs_base_decl = NULL;
4841 preeval_data.lhs_alias_set = get_alias_set (object);
4843 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4844 pre_p, post_p, &preeval_data);
4847 bool ctor_has_side_effects_p
4848 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4850 if (cleared)
4852 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4853 Note that we still have to gimplify, in order to handle the
4854 case of variable sized types. Avoid shared tree structures. */
4855 CONSTRUCTOR_ELTS (ctor) = NULL;
4856 TREE_SIDE_EFFECTS (ctor) = 0;
4857 object = unshare_expr (object);
4858 gimplify_stmt (expr_p, pre_p);
4861 /* If we have not block cleared the object, or if there are nonzero
4862 elements in the constructor, or if the constructor has side effects,
4863 add assignments to the individual scalar fields of the object. */
4864 if (!cleared
4865 || num_nonzero_elements > 0
4866 || ctor_has_side_effects_p)
4867 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4869 *expr_p = NULL_TREE;
4871 break;
4873 case COMPLEX_TYPE:
4875 tree r, i;
4877 if (notify_temp_creation)
4878 return GS_OK;
4880 /* Extract the real and imaginary parts out of the ctor. */
4881 gcc_assert (elts->length () == 2);
4882 r = (*elts)[0].value;
4883 i = (*elts)[1].value;
4884 if (r == NULL || i == NULL)
4886 tree zero = build_zero_cst (TREE_TYPE (type));
4887 if (r == NULL)
4888 r = zero;
4889 if (i == NULL)
4890 i = zero;
4893 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4894 represent creation of a complex value. */
4895 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4897 ctor = build_complex (type, r, i);
4898 TREE_OPERAND (*expr_p, 1) = ctor;
4900 else
4902 ctor = build2 (COMPLEX_EXPR, type, r, i);
4903 TREE_OPERAND (*expr_p, 1) = ctor;
4904 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4905 pre_p,
4906 post_p,
4907 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4908 fb_rvalue);
4911 break;
4913 case VECTOR_TYPE:
4915 unsigned HOST_WIDE_INT ix;
4916 constructor_elt *ce;
4918 if (notify_temp_creation)
4919 return GS_OK;
4921 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4922 if (TREE_CONSTANT (ctor))
4924 bool constant_p = true;
4925 tree value;
4927 /* Even when ctor is constant, it might contain non-*_CST
4928 elements, such as addresses or trapping values like
4929 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4930 in VECTOR_CST nodes. */
4931 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4932 if (!CONSTANT_CLASS_P (value))
4934 constant_p = false;
4935 break;
4938 if (constant_p)
4940 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4941 break;
4944 TREE_CONSTANT (ctor) = 0;
4947 /* Vector types use CONSTRUCTOR all the way through gimple
4948 compilation as a general initializer. */
4949 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4951 enum gimplify_status tret;
4952 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4953 fb_rvalue);
4954 if (tret == GS_ERROR)
4955 ret = GS_ERROR;
4956 else if (TREE_STATIC (ctor)
4957 && !initializer_constant_valid_p (ce->value,
4958 TREE_TYPE (ce->value)))
4959 TREE_STATIC (ctor) = 0;
4961 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4962 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4964 break;
4966 default:
4967 /* So how did we get a CONSTRUCTOR for a scalar type? */
4968 gcc_unreachable ();
4971 if (ret == GS_ERROR)
4972 return GS_ERROR;
4973 /* If we have gimplified both sides of the initializer but have
4974 not emitted an assignment, do so now. */
4975 if (*expr_p)
4977 tree lhs = TREE_OPERAND (*expr_p, 0);
4978 tree rhs = TREE_OPERAND (*expr_p, 1);
4979 if (want_value && object == lhs)
4980 lhs = unshare_expr (lhs);
4981 gassign *init = gimple_build_assign (lhs, rhs);
4982 gimplify_seq_add_stmt (pre_p, init);
4984 if (want_value)
4986 *expr_p = object;
4987 return GS_OK;
4989 else
4991 *expr_p = NULL;
4992 return GS_ALL_DONE;
4996 /* Given a pointer value OP0, return a simplified version of an
4997 indirection through OP0, or NULL_TREE if no simplification is
4998 possible. This may only be applied to a rhs of an expression.
4999 Note that the resulting type may be different from the type pointed
5000 to in the sense that it is still compatible from the langhooks
5001 point of view. */
5003 static tree
5004 gimple_fold_indirect_ref_rhs (tree t)
5006 return gimple_fold_indirect_ref (t);
5009 /* Subroutine of gimplify_modify_expr to do simplifications of
5010 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5011 something changes. */
5013 static enum gimplify_status
5014 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5015 gimple_seq *pre_p, gimple_seq *post_p,
5016 bool want_value)
5018 enum gimplify_status ret = GS_UNHANDLED;
5019 bool changed;
5023 changed = false;
5024 switch (TREE_CODE (*from_p))
5026 case VAR_DECL:
5027 /* If we're assigning from a read-only variable initialized with
5028 a constructor, do the direct assignment from the constructor,
5029 but only if neither source nor target are volatile since this
5030 latter assignment might end up being done on a per-field basis. */
5031 if (DECL_INITIAL (*from_p)
5032 && TREE_READONLY (*from_p)
5033 && !TREE_THIS_VOLATILE (*from_p)
5034 && !TREE_THIS_VOLATILE (*to_p)
5035 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5037 tree old_from = *from_p;
5038 enum gimplify_status subret;
5040 /* Move the constructor into the RHS. */
5041 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5043 /* Let's see if gimplify_init_constructor will need to put
5044 it in memory. */
5045 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5046 false, true);
5047 if (subret == GS_ERROR)
5049 /* If so, revert the change. */
5050 *from_p = old_from;
5052 else
5054 ret = GS_OK;
5055 changed = true;
5058 break;
5059 case INDIRECT_REF:
5061 /* If we have code like
5063 *(const A*)(A*)&x
5065 where the type of "x" is a (possibly cv-qualified variant
5066 of "A"), treat the entire expression as identical to "x".
5067 This kind of code arises in C++ when an object is bound
5068 to a const reference, and if "x" is a TARGET_EXPR we want
5069 to take advantage of the optimization below. */
5070 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5071 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5072 if (t)
5074 if (TREE_THIS_VOLATILE (t) != volatile_p)
5076 if (DECL_P (t))
5077 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5078 build_fold_addr_expr (t));
5079 if (REFERENCE_CLASS_P (t))
5080 TREE_THIS_VOLATILE (t) = volatile_p;
5082 *from_p = t;
5083 ret = GS_OK;
5084 changed = true;
5086 break;
5089 case TARGET_EXPR:
5091 /* If we are initializing something from a TARGET_EXPR, strip the
5092 TARGET_EXPR and initialize it directly, if possible. This can't
5093 be done if the initializer is void, since that implies that the
5094 temporary is set in some non-trivial way.
5096 ??? What about code that pulls out the temp and uses it
5097 elsewhere? I think that such code never uses the TARGET_EXPR as
5098 an initializer. If I'm wrong, we'll die because the temp won't
5099 have any RTL. In that case, I guess we'll need to replace
5100 references somehow. */
5101 tree init = TARGET_EXPR_INITIAL (*from_p);
5103 if (init
5104 && !VOID_TYPE_P (TREE_TYPE (init)))
5106 *from_p = init;
5107 ret = GS_OK;
5108 changed = true;
5111 break;
5113 case COMPOUND_EXPR:
5114 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5115 caught. */
5116 gimplify_compound_expr (from_p, pre_p, true);
5117 ret = GS_OK;
5118 changed = true;
5119 break;
5121 case CONSTRUCTOR:
5122 /* If we already made some changes, let the front end have a
5123 crack at this before we break it down. */
5124 if (ret != GS_UNHANDLED)
5125 break;
5126 /* If we're initializing from a CONSTRUCTOR, break this into
5127 individual MODIFY_EXPRs. */
5128 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5129 false);
5131 case COND_EXPR:
5132 /* If we're assigning to a non-register type, push the assignment
5133 down into the branches. This is mandatory for ADDRESSABLE types,
5134 since we cannot generate temporaries for such, but it saves a
5135 copy in other cases as well. */
5136 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5138 /* This code should mirror the code in gimplify_cond_expr. */
5139 enum tree_code code = TREE_CODE (*expr_p);
5140 tree cond = *from_p;
5141 tree result = *to_p;
5143 ret = gimplify_expr (&result, pre_p, post_p,
5144 is_gimple_lvalue, fb_lvalue);
5145 if (ret != GS_ERROR)
5146 ret = GS_OK;
5148 /* If we are going to write RESULT more than once, clear
5149 TREE_READONLY flag, otherwise we might incorrectly promote
5150 the variable to static const and initialize it at compile
5151 time in one of the branches. */
5152 if (VAR_P (result)
5153 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5154 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5155 TREE_READONLY (result) = 0;
5156 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5157 TREE_OPERAND (cond, 1)
5158 = build2 (code, void_type_node, result,
5159 TREE_OPERAND (cond, 1));
5160 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5161 TREE_OPERAND (cond, 2)
5162 = build2 (code, void_type_node, unshare_expr (result),
5163 TREE_OPERAND (cond, 2));
5165 TREE_TYPE (cond) = void_type_node;
5166 recalculate_side_effects (cond);
5168 if (want_value)
5170 gimplify_and_add (cond, pre_p);
5171 *expr_p = unshare_expr (result);
5173 else
5174 *expr_p = cond;
5175 return ret;
5177 break;
5179 case CALL_EXPR:
5180 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5181 return slot so that we don't generate a temporary. */
5182 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5183 && aggregate_value_p (*from_p, *from_p))
5185 bool use_target;
5187 if (!(rhs_predicate_for (*to_p))(*from_p))
5188 /* If we need a temporary, *to_p isn't accurate. */
5189 use_target = false;
5190 /* It's OK to use the return slot directly unless it's an NRV. */
5191 else if (TREE_CODE (*to_p) == RESULT_DECL
5192 && DECL_NAME (*to_p) == NULL_TREE
5193 && needs_to_live_in_memory (*to_p))
5194 use_target = true;
5195 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5196 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5197 /* Don't force regs into memory. */
5198 use_target = false;
5199 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5200 /* It's OK to use the target directly if it's being
5201 initialized. */
5202 use_target = true;
5203 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5204 != INTEGER_CST)
5205 /* Always use the target and thus RSO for variable-sized types.
5206 GIMPLE cannot deal with a variable-sized assignment
5207 embedded in a call statement. */
5208 use_target = true;
5209 else if (TREE_CODE (*to_p) != SSA_NAME
5210 && (!is_gimple_variable (*to_p)
5211 || needs_to_live_in_memory (*to_p)))
5212 /* Don't use the original target if it's already addressable;
5213 if its address escapes, and the called function uses the
5214 NRV optimization, a conforming program could see *to_p
5215 change before the called function returns; see c++/19317.
5216 When optimizing, the return_slot pass marks more functions
5217 as safe after we have escape info. */
5218 use_target = false;
5219 else
5220 use_target = true;
5222 if (use_target)
5224 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5225 mark_addressable (*to_p);
5228 break;
5230 case WITH_SIZE_EXPR:
5231 /* Likewise for calls that return an aggregate of non-constant size,
5232 since we would not be able to generate a temporary at all. */
5233 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5235 *from_p = TREE_OPERAND (*from_p, 0);
5236 /* We don't change ret in this case because the
5237 WITH_SIZE_EXPR might have been added in
5238 gimplify_modify_expr, so returning GS_OK would lead to an
5239 infinite loop. */
5240 changed = true;
5242 break;
5244 /* If we're initializing from a container, push the initialization
5245 inside it. */
5246 case CLEANUP_POINT_EXPR:
5247 case BIND_EXPR:
5248 case STATEMENT_LIST:
5250 tree wrap = *from_p;
5251 tree t;
5253 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5254 fb_lvalue);
5255 if (ret != GS_ERROR)
5256 ret = GS_OK;
5258 t = voidify_wrapper_expr (wrap, *expr_p);
5259 gcc_assert (t == *expr_p);
5261 if (want_value)
5263 gimplify_and_add (wrap, pre_p);
5264 *expr_p = unshare_expr (*to_p);
5266 else
5267 *expr_p = wrap;
5268 return GS_OK;
5271 case COMPOUND_LITERAL_EXPR:
5273 tree complit = TREE_OPERAND (*expr_p, 1);
5274 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5275 tree decl = DECL_EXPR_DECL (decl_s);
5276 tree init = DECL_INITIAL (decl);
5278 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5279 into struct T x = { 0, 1, 2 } if the address of the
5280 compound literal has never been taken. */
5281 if (!TREE_ADDRESSABLE (complit)
5282 && !TREE_ADDRESSABLE (decl)
5283 && init)
5285 *expr_p = copy_node (*expr_p);
5286 TREE_OPERAND (*expr_p, 1) = init;
5287 return GS_OK;
5291 default:
5292 break;
5295 while (changed);
5297 return ret;
5301 /* Return true if T looks like a valid GIMPLE statement. */
5303 static bool
5304 is_gimple_stmt (tree t)
5306 const enum tree_code code = TREE_CODE (t);
5308 switch (code)
5310 case NOP_EXPR:
5311 /* The only valid NOP_EXPR is the empty statement. */
5312 return IS_EMPTY_STMT (t);
5314 case BIND_EXPR:
5315 case COND_EXPR:
5316 /* These are only valid if they're void. */
5317 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5319 case SWITCH_EXPR:
5320 case GOTO_EXPR:
5321 case RETURN_EXPR:
5322 case LABEL_EXPR:
5323 case CASE_LABEL_EXPR:
5324 case TRY_CATCH_EXPR:
5325 case TRY_FINALLY_EXPR:
5326 case EH_FILTER_EXPR:
5327 case CATCH_EXPR:
5328 case ASM_EXPR:
5329 case STATEMENT_LIST:
5330 case OACC_PARALLEL:
5331 case OACC_KERNELS:
5332 case OACC_DATA:
5333 case OACC_HOST_DATA:
5334 case OACC_DECLARE:
5335 case OACC_UPDATE:
5336 case OACC_ENTER_DATA:
5337 case OACC_EXIT_DATA:
5338 case OACC_CACHE:
5339 case OMP_PARALLEL:
5340 case OMP_FOR:
5341 case OMP_SIMD:
5342 case CILK_SIMD:
5343 case OMP_DISTRIBUTE:
5344 case OACC_LOOP:
5345 case OMP_SECTIONS:
5346 case OMP_SECTION:
5347 case OMP_SINGLE:
5348 case OMP_MASTER:
5349 case OMP_TASKGROUP:
5350 case OMP_ORDERED:
5351 case OMP_CRITICAL:
5352 case OMP_TASK:
5353 case OMP_TARGET:
5354 case OMP_TARGET_DATA:
5355 case OMP_TARGET_UPDATE:
5356 case OMP_TARGET_ENTER_DATA:
5357 case OMP_TARGET_EXIT_DATA:
5358 case OMP_TASKLOOP:
5359 case OMP_TEAMS:
5360 /* These are always void. */
5361 return true;
5363 case CALL_EXPR:
5364 case MODIFY_EXPR:
5365 case PREDICT_EXPR:
5366 /* These are valid regardless of their type. */
5367 return true;
5369 default:
5370 return false;
5375 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5376 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5377 DECL_GIMPLE_REG_P set.
5379 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5380 other, unmodified part of the complex object just before the total store.
5381 As a consequence, if the object is still uninitialized, an undefined value
5382 will be loaded into a register, which may result in a spurious exception
5383 if the register is floating-point and the value happens to be a signaling
5384 NaN for example. Then the fully-fledged complex operations lowering pass
5385 followed by a DCE pass are necessary in order to fix things up. */
5387 static enum gimplify_status
5388 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5389 bool want_value)
5391 enum tree_code code, ocode;
5392 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5394 lhs = TREE_OPERAND (*expr_p, 0);
5395 rhs = TREE_OPERAND (*expr_p, 1);
5396 code = TREE_CODE (lhs);
5397 lhs = TREE_OPERAND (lhs, 0);
5399 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5400 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5401 TREE_NO_WARNING (other) = 1;
5402 other = get_formal_tmp_var (other, pre_p);
5404 realpart = code == REALPART_EXPR ? rhs : other;
5405 imagpart = code == REALPART_EXPR ? other : rhs;
5407 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5408 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5409 else
5410 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5412 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5413 *expr_p = (want_value) ? rhs : NULL_TREE;
5415 return GS_ALL_DONE;
5418 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5420 modify_expr
5421 : varname '=' rhs
5422 | '*' ID '=' rhs
5424 PRE_P points to the list where side effects that must happen before
5425 *EXPR_P should be stored.
5427 POST_P points to the list where side effects that must happen after
5428 *EXPR_P should be stored.
5430 WANT_VALUE is nonzero iff we want to use the value of this expression
5431 in another expression. */
5433 static enum gimplify_status
5434 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5435 bool want_value)
5437 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5438 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5439 enum gimplify_status ret = GS_UNHANDLED;
5440 gimple *assign;
5441 location_t loc = EXPR_LOCATION (*expr_p);
5442 gimple_stmt_iterator gsi;
5444 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5445 || TREE_CODE (*expr_p) == INIT_EXPR);
5447 /* Trying to simplify a clobber using normal logic doesn't work,
5448 so handle it here. */
5449 if (TREE_CLOBBER_P (*from_p))
5451 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5452 if (ret == GS_ERROR)
5453 return ret;
5454 gcc_assert (!want_value
5455 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5456 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5457 *expr_p = NULL;
5458 return GS_ALL_DONE;
5461 /* Insert pointer conversions required by the middle-end that are not
5462 required by the frontend. This fixes middle-end type checking for
5463 for example gcc.dg/redecl-6.c. */
5464 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5466 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5467 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5468 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5471 /* See if any simplifications can be done based on what the RHS is. */
5472 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5473 want_value);
5474 if (ret != GS_UNHANDLED)
5475 return ret;
5477 /* For zero sized types only gimplify the left hand side and right hand
5478 side as statements and throw away the assignment. Do this after
5479 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5480 types properly. */
5481 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
5483 gimplify_stmt (from_p, pre_p);
5484 gimplify_stmt (to_p, pre_p);
5485 *expr_p = NULL_TREE;
5486 return GS_ALL_DONE;
5489 /* If the value being copied is of variable width, compute the length
5490 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5491 before gimplifying any of the operands so that we can resolve any
5492 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5493 the size of the expression to be copied, not of the destination, so
5494 that is what we must do here. */
5495 maybe_with_size_expr (from_p);
5497 /* As a special case, we have to temporarily allow for assignments
5498 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5499 a toplevel statement, when gimplifying the GENERIC expression
5500 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5501 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5503 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5504 prevent gimplify_expr from trying to create a new temporary for
5505 foo's LHS, we tell it that it should only gimplify until it
5506 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5507 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5508 and all we need to do here is set 'a' to be its LHS. */
5510 /* Gimplify the RHS first for C++17 and bug 71104. */
5511 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5512 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5513 if (ret == GS_ERROR)
5514 return ret;
5516 /* Then gimplify the LHS. */
5517 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5518 twice we have to make sure to gimplify into non-SSA as otherwise
5519 the abnormal edge added later will make those defs not dominate
5520 their uses.
5521 ??? Technically this applies only to the registers used in the
5522 resulting non-register *TO_P. */
5523 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5524 if (saved_into_ssa
5525 && TREE_CODE (*from_p) == CALL_EXPR
5526 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5527 gimplify_ctxp->into_ssa = false;
5528 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5529 gimplify_ctxp->into_ssa = saved_into_ssa;
5530 if (ret == GS_ERROR)
5531 return ret;
5533 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5534 guess for the predicate was wrong. */
5535 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5536 if (final_pred != initial_pred)
5538 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5539 if (ret == GS_ERROR)
5540 return ret;
5543 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5544 size as argument to the call. */
5545 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5547 tree call = TREE_OPERAND (*from_p, 0);
5548 tree vlasize = TREE_OPERAND (*from_p, 1);
5550 if (TREE_CODE (call) == CALL_EXPR
5551 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5553 int nargs = call_expr_nargs (call);
5554 tree type = TREE_TYPE (call);
5555 tree ap = CALL_EXPR_ARG (call, 0);
5556 tree tag = CALL_EXPR_ARG (call, 1);
5557 tree aptag = CALL_EXPR_ARG (call, 2);
5558 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5559 IFN_VA_ARG, type,
5560 nargs + 1, ap, tag,
5561 aptag, vlasize);
5562 TREE_OPERAND (*from_p, 0) = newcall;
5566 /* Now see if the above changed *from_p to something we handle specially. */
5567 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5568 want_value);
5569 if (ret != GS_UNHANDLED)
5570 return ret;
5572 /* If we've got a variable sized assignment between two lvalues (i.e. does
5573 not involve a call), then we can make things a bit more straightforward
5574 by converting the assignment to memcpy or memset. */
5575 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5577 tree from = TREE_OPERAND (*from_p, 0);
5578 tree size = TREE_OPERAND (*from_p, 1);
5580 if (TREE_CODE (from) == CONSTRUCTOR)
5581 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5583 if (is_gimple_addressable (from))
5585 *from_p = from;
5586 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5587 pre_p);
5591 /* Transform partial stores to non-addressable complex variables into
5592 total stores. This allows us to use real instead of virtual operands
5593 for these variables, which improves optimization. */
5594 if ((TREE_CODE (*to_p) == REALPART_EXPR
5595 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5596 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5597 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5599 /* Try to alleviate the effects of the gimplification creating artificial
5600 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5601 make sure not to create DECL_DEBUG_EXPR links across functions. */
5602 if (!gimplify_ctxp->into_ssa
5603 && VAR_P (*from_p)
5604 && DECL_IGNORED_P (*from_p)
5605 && DECL_P (*to_p)
5606 && !DECL_IGNORED_P (*to_p)
5607 && decl_function_context (*to_p) == current_function_decl
5608 && decl_function_context (*from_p) == current_function_decl)
5610 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5611 DECL_NAME (*from_p)
5612 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5613 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5614 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5617 if (want_value && TREE_THIS_VOLATILE (*to_p))
5618 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5620 if (TREE_CODE (*from_p) == CALL_EXPR)
5622 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5623 instead of a GIMPLE_ASSIGN. */
5624 gcall *call_stmt;
5625 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5627 /* Gimplify internal functions created in the FEs. */
5628 int nargs = call_expr_nargs (*from_p), i;
5629 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5630 auto_vec<tree> vargs (nargs);
5632 for (i = 0; i < nargs; i++)
5634 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5635 EXPR_LOCATION (*from_p));
5636 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5638 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5639 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5641 else
5643 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5644 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5645 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5646 tree fndecl = get_callee_fndecl (*from_p);
5647 if (fndecl
5648 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5649 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5650 && call_expr_nargs (*from_p) == 3)
5651 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5652 CALL_EXPR_ARG (*from_p, 0),
5653 CALL_EXPR_ARG (*from_p, 1),
5654 CALL_EXPR_ARG (*from_p, 2));
5655 else
5657 call_stmt = gimple_build_call_from_tree (*from_p);
5658 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
5661 notice_special_calls (call_stmt);
5662 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5663 gimple_call_set_lhs (call_stmt, *to_p);
5664 else if (TREE_CODE (*to_p) == SSA_NAME)
5665 /* The above is somewhat premature, avoid ICEing later for a
5666 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5667 ??? This doesn't make it a default-def. */
5668 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5670 if (EXPR_CILK_SPAWN (*from_p))
5671 gimplify_cilk_detach (pre_p);
5672 assign = call_stmt;
5674 else
5676 assign = gimple_build_assign (*to_p, *from_p);
5677 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5678 if (COMPARISON_CLASS_P (*from_p))
5679 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5682 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5684 /* We should have got an SSA name from the start. */
5685 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5686 || ! gimple_in_ssa_p (cfun));
5689 gimplify_seq_add_stmt (pre_p, assign);
5690 gsi = gsi_last (*pre_p);
5691 maybe_fold_stmt (&gsi);
5693 if (want_value)
5695 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5696 return GS_OK;
5698 else
5699 *expr_p = NULL;
5701 return GS_ALL_DONE;
5704 /* Gimplify a comparison between two variable-sized objects. Do this
5705 with a call to BUILT_IN_MEMCMP. */
5707 static enum gimplify_status
5708 gimplify_variable_sized_compare (tree *expr_p)
5710 location_t loc = EXPR_LOCATION (*expr_p);
5711 tree op0 = TREE_OPERAND (*expr_p, 0);
5712 tree op1 = TREE_OPERAND (*expr_p, 1);
5713 tree t, arg, dest, src, expr;
5715 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5716 arg = unshare_expr (arg);
5717 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5718 src = build_fold_addr_expr_loc (loc, op1);
5719 dest = build_fold_addr_expr_loc (loc, op0);
5720 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5721 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5723 expr
5724 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5725 SET_EXPR_LOCATION (expr, loc);
5726 *expr_p = expr;
5728 return GS_OK;
5731 /* Gimplify a comparison between two aggregate objects of integral scalar
5732 mode as a comparison between the bitwise equivalent scalar values. */
5734 static enum gimplify_status
5735 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5737 location_t loc = EXPR_LOCATION (*expr_p);
5738 tree op0 = TREE_OPERAND (*expr_p, 0);
5739 tree op1 = TREE_OPERAND (*expr_p, 1);
5741 tree type = TREE_TYPE (op0);
5742 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5744 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5745 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5747 *expr_p
5748 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5750 return GS_OK;
5753 /* Gimplify an expression sequence. This function gimplifies each
5754 expression and rewrites the original expression with the last
5755 expression of the sequence in GIMPLE form.
5757 PRE_P points to the list where the side effects for all the
5758 expressions in the sequence will be emitted.
5760 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5762 static enum gimplify_status
5763 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5765 tree t = *expr_p;
5769 tree *sub_p = &TREE_OPERAND (t, 0);
5771 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5772 gimplify_compound_expr (sub_p, pre_p, false);
5773 else
5774 gimplify_stmt (sub_p, pre_p);
5776 t = TREE_OPERAND (t, 1);
5778 while (TREE_CODE (t) == COMPOUND_EXPR);
5780 *expr_p = t;
5781 if (want_value)
5782 return GS_OK;
5783 else
5785 gimplify_stmt (expr_p, pre_p);
5786 return GS_ALL_DONE;
5790 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5791 gimplify. After gimplification, EXPR_P will point to a new temporary
5792 that holds the original value of the SAVE_EXPR node.
5794 PRE_P points to the list where side effects that must happen before
5795 *EXPR_P should be stored. */
5797 static enum gimplify_status
5798 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5800 enum gimplify_status ret = GS_ALL_DONE;
5801 tree val;
5803 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5804 val = TREE_OPERAND (*expr_p, 0);
5806 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5807 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5809 /* The operand may be a void-valued expression. It is
5810 being executed only for its side-effects. */
5811 if (TREE_TYPE (val) == void_type_node)
5813 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5814 is_gimple_stmt, fb_none);
5815 val = NULL;
5817 else
5818 /* The temporary may not be an SSA name as later abnormal and EH
5819 control flow may invalidate use/def domination. */
5820 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5822 TREE_OPERAND (*expr_p, 0) = val;
5823 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5826 *expr_p = val;
5828 return ret;
5831 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5833 unary_expr
5834 : ...
5835 | '&' varname
5838 PRE_P points to the list where side effects that must happen before
5839 *EXPR_P should be stored.
5841 POST_P points to the list where side effects that must happen after
5842 *EXPR_P should be stored. */
5844 static enum gimplify_status
5845 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5847 tree expr = *expr_p;
5848 tree op0 = TREE_OPERAND (expr, 0);
5849 enum gimplify_status ret;
5850 location_t loc = EXPR_LOCATION (*expr_p);
5852 switch (TREE_CODE (op0))
5854 case INDIRECT_REF:
5855 do_indirect_ref:
5856 /* Check if we are dealing with an expression of the form '&*ptr'.
5857 While the front end folds away '&*ptr' into 'ptr', these
5858 expressions may be generated internally by the compiler (e.g.,
5859 builtins like __builtin_va_end). */
5860 /* Caution: the silent array decomposition semantics we allow for
5861 ADDR_EXPR means we can't always discard the pair. */
5862 /* Gimplification of the ADDR_EXPR operand may drop
5863 cv-qualification conversions, so make sure we add them if
5864 needed. */
5866 tree op00 = TREE_OPERAND (op0, 0);
5867 tree t_expr = TREE_TYPE (expr);
5868 tree t_op00 = TREE_TYPE (op00);
5870 if (!useless_type_conversion_p (t_expr, t_op00))
5871 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5872 *expr_p = op00;
5873 ret = GS_OK;
5875 break;
5877 case VIEW_CONVERT_EXPR:
5878 /* Take the address of our operand and then convert it to the type of
5879 this ADDR_EXPR.
5881 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5882 all clear. The impact of this transformation is even less clear. */
5884 /* If the operand is a useless conversion, look through it. Doing so
5885 guarantees that the ADDR_EXPR and its operand will remain of the
5886 same type. */
5887 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5888 op0 = TREE_OPERAND (op0, 0);
5890 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5891 build_fold_addr_expr_loc (loc,
5892 TREE_OPERAND (op0, 0)));
5893 ret = GS_OK;
5894 break;
5896 case MEM_REF:
5897 if (integer_zerop (TREE_OPERAND (op0, 1)))
5898 goto do_indirect_ref;
5900 /* fall through */
5902 default:
5903 /* If we see a call to a declared builtin or see its address
5904 being taken (we can unify those cases here) then we can mark
5905 the builtin for implicit generation by GCC. */
5906 if (TREE_CODE (op0) == FUNCTION_DECL
5907 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5908 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5909 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5911 /* We use fb_either here because the C frontend sometimes takes
5912 the address of a call that returns a struct; see
5913 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5914 the implied temporary explicit. */
5916 /* Make the operand addressable. */
5917 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5918 is_gimple_addressable, fb_either);
5919 if (ret == GS_ERROR)
5920 break;
5922 /* Then mark it. Beware that it may not be possible to do so directly
5923 if a temporary has been created by the gimplification. */
5924 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5926 op0 = TREE_OPERAND (expr, 0);
5928 /* For various reasons, the gimplification of the expression
5929 may have made a new INDIRECT_REF. */
5930 if (TREE_CODE (op0) == INDIRECT_REF)
5931 goto do_indirect_ref;
5933 mark_addressable (TREE_OPERAND (expr, 0));
5935 /* The FEs may end up building ADDR_EXPRs early on a decl with
5936 an incomplete type. Re-build ADDR_EXPRs in canonical form
5937 here. */
5938 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5939 *expr_p = build_fold_addr_expr (op0);
5941 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5942 recompute_tree_invariant_for_addr_expr (*expr_p);
5944 /* If we re-built the ADDR_EXPR add a conversion to the original type
5945 if required. */
5946 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5947 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5949 break;
5952 return ret;
5955 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5956 value; output operands should be a gimple lvalue. */
5958 static enum gimplify_status
5959 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5961 tree expr;
5962 int noutputs;
5963 const char **oconstraints;
5964 int i;
5965 tree link;
5966 const char *constraint;
5967 bool allows_mem, allows_reg, is_inout;
5968 enum gimplify_status ret, tret;
5969 gasm *stmt;
5970 vec<tree, va_gc> *inputs;
5971 vec<tree, va_gc> *outputs;
5972 vec<tree, va_gc> *clobbers;
5973 vec<tree, va_gc> *labels;
5974 tree link_next;
5976 expr = *expr_p;
5977 noutputs = list_length (ASM_OUTPUTS (expr));
5978 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5980 inputs = NULL;
5981 outputs = NULL;
5982 clobbers = NULL;
5983 labels = NULL;
5985 ret = GS_ALL_DONE;
5986 link_next = NULL_TREE;
5987 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5989 bool ok;
5990 size_t constraint_len;
5992 link_next = TREE_CHAIN (link);
5994 oconstraints[i]
5995 = constraint
5996 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5997 constraint_len = strlen (constraint);
5998 if (constraint_len == 0)
5999 continue;
6001 ok = parse_output_constraint (&constraint, i, 0, 0,
6002 &allows_mem, &allows_reg, &is_inout);
6003 if (!ok)
6005 ret = GS_ERROR;
6006 is_inout = false;
6009 if (!allows_reg && allows_mem)
6010 mark_addressable (TREE_VALUE (link));
6012 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6013 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6014 fb_lvalue | fb_mayfail);
6015 if (tret == GS_ERROR)
6017 error ("invalid lvalue in asm output %d", i);
6018 ret = tret;
6021 /* If the constraint does not allow memory make sure we gimplify
6022 it to a register if it is not already but its base is. This
6023 happens for complex and vector components. */
6024 if (!allows_mem)
6026 tree op = TREE_VALUE (link);
6027 if (! is_gimple_val (op)
6028 && is_gimple_reg_type (TREE_TYPE (op))
6029 && is_gimple_reg (get_base_address (op)))
6031 tree tem = create_tmp_reg (TREE_TYPE (op));
6032 tree ass;
6033 if (is_inout)
6035 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6036 tem, unshare_expr (op));
6037 gimplify_and_add (ass, pre_p);
6039 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6040 gimplify_and_add (ass, post_p);
6042 TREE_VALUE (link) = tem;
6043 tret = GS_OK;
6047 vec_safe_push (outputs, link);
6048 TREE_CHAIN (link) = NULL_TREE;
6050 if (is_inout)
6052 /* An input/output operand. To give the optimizers more
6053 flexibility, split it into separate input and output
6054 operands. */
6055 tree input;
6056 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6057 char buf[11];
6059 /* Turn the in/out constraint into an output constraint. */
6060 char *p = xstrdup (constraint);
6061 p[0] = '=';
6062 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6064 /* And add a matching input constraint. */
6065 if (allows_reg)
6067 sprintf (buf, "%u", i);
6069 /* If there are multiple alternatives in the constraint,
6070 handle each of them individually. Those that allow register
6071 will be replaced with operand number, the others will stay
6072 unchanged. */
6073 if (strchr (p, ',') != NULL)
6075 size_t len = 0, buflen = strlen (buf);
6076 char *beg, *end, *str, *dst;
6078 for (beg = p + 1;;)
6080 end = strchr (beg, ',');
6081 if (end == NULL)
6082 end = strchr (beg, '\0');
6083 if ((size_t) (end - beg) < buflen)
6084 len += buflen + 1;
6085 else
6086 len += end - beg + 1;
6087 if (*end)
6088 beg = end + 1;
6089 else
6090 break;
6093 str = (char *) alloca (len);
6094 for (beg = p + 1, dst = str;;)
6096 const char *tem;
6097 bool mem_p, reg_p, inout_p;
6099 end = strchr (beg, ',');
6100 if (end)
6101 *end = '\0';
6102 beg[-1] = '=';
6103 tem = beg - 1;
6104 parse_output_constraint (&tem, i, 0, 0,
6105 &mem_p, &reg_p, &inout_p);
6106 if (dst != str)
6107 *dst++ = ',';
6108 if (reg_p)
6110 memcpy (dst, buf, buflen);
6111 dst += buflen;
6113 else
6115 if (end)
6116 len = end - beg;
6117 else
6118 len = strlen (beg);
6119 memcpy (dst, beg, len);
6120 dst += len;
6122 if (end)
6123 beg = end + 1;
6124 else
6125 break;
6127 *dst = '\0';
6128 input = build_string (dst - str, str);
6130 else
6131 input = build_string (strlen (buf), buf);
6133 else
6134 input = build_string (constraint_len - 1, constraint + 1);
6136 free (p);
6138 input = build_tree_list (build_tree_list (NULL_TREE, input),
6139 unshare_expr (TREE_VALUE (link)));
6140 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6144 link_next = NULL_TREE;
6145 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6147 link_next = TREE_CHAIN (link);
6148 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6149 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6150 oconstraints, &allows_mem, &allows_reg);
6152 /* If we can't make copies, we can only accept memory. */
6153 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6155 if (allows_mem)
6156 allows_reg = 0;
6157 else
6159 error ("impossible constraint in %<asm%>");
6160 error ("non-memory input %d must stay in memory", i);
6161 return GS_ERROR;
6165 /* If the operand is a memory input, it should be an lvalue. */
6166 if (!allows_reg && allows_mem)
6168 tree inputv = TREE_VALUE (link);
6169 STRIP_NOPS (inputv);
6170 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6171 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6172 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6173 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6174 || TREE_CODE (inputv) == MODIFY_EXPR)
6175 TREE_VALUE (link) = error_mark_node;
6176 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6177 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6178 if (tret != GS_ERROR)
6180 /* Unlike output operands, memory inputs are not guaranteed
6181 to be lvalues by the FE, and while the expressions are
6182 marked addressable there, if it is e.g. a statement
6183 expression, temporaries in it might not end up being
6184 addressable. They might be already used in the IL and thus
6185 it is too late to make them addressable now though. */
6186 tree x = TREE_VALUE (link);
6187 while (handled_component_p (x))
6188 x = TREE_OPERAND (x, 0);
6189 if (TREE_CODE (x) == MEM_REF
6190 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6191 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6192 if ((VAR_P (x)
6193 || TREE_CODE (x) == PARM_DECL
6194 || TREE_CODE (x) == RESULT_DECL)
6195 && !TREE_ADDRESSABLE (x)
6196 && is_gimple_reg (x))
6198 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6199 input_location), 0,
6200 "memory input %d is not directly addressable",
6202 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6205 mark_addressable (TREE_VALUE (link));
6206 if (tret == GS_ERROR)
6208 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6209 "memory input %d is not directly addressable", i);
6210 ret = tret;
6213 else
6215 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6216 is_gimple_asm_val, fb_rvalue);
6217 if (tret == GS_ERROR)
6218 ret = tret;
6221 TREE_CHAIN (link) = NULL_TREE;
6222 vec_safe_push (inputs, link);
6225 link_next = NULL_TREE;
6226 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6228 link_next = TREE_CHAIN (link);
6229 TREE_CHAIN (link) = NULL_TREE;
6230 vec_safe_push (clobbers, link);
6233 link_next = NULL_TREE;
6234 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6236 link_next = TREE_CHAIN (link);
6237 TREE_CHAIN (link) = NULL_TREE;
6238 vec_safe_push (labels, link);
6241 /* Do not add ASMs with errors to the gimple IL stream. */
6242 if (ret != GS_ERROR)
6244 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6245 inputs, outputs, clobbers, labels);
6247 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6248 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6250 gimplify_seq_add_stmt (pre_p, stmt);
6253 return ret;
6256 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6257 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6258 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6259 return to this function.
6261 FIXME should we complexify the prequeue handling instead? Or use flags
6262 for all the cleanups and let the optimizer tighten them up? The current
6263 code seems pretty fragile; it will break on a cleanup within any
6264 non-conditional nesting. But any such nesting would be broken, anyway;
6265 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6266 and continues out of it. We can do that at the RTL level, though, so
6267 having an optimizer to tighten up try/finally regions would be a Good
6268 Thing. */
6270 static enum gimplify_status
6271 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6273 gimple_stmt_iterator iter;
6274 gimple_seq body_sequence = NULL;
6276 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6278 /* We only care about the number of conditions between the innermost
6279 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6280 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6281 int old_conds = gimplify_ctxp->conditions;
6282 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6283 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6284 gimplify_ctxp->conditions = 0;
6285 gimplify_ctxp->conditional_cleanups = NULL;
6286 gimplify_ctxp->in_cleanup_point_expr = true;
6288 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6290 gimplify_ctxp->conditions = old_conds;
6291 gimplify_ctxp->conditional_cleanups = old_cleanups;
6292 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6294 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6296 gimple *wce = gsi_stmt (iter);
6298 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6300 if (gsi_one_before_end_p (iter))
6302 /* Note that gsi_insert_seq_before and gsi_remove do not
6303 scan operands, unlike some other sequence mutators. */
6304 if (!gimple_wce_cleanup_eh_only (wce))
6305 gsi_insert_seq_before_without_update (&iter,
6306 gimple_wce_cleanup (wce),
6307 GSI_SAME_STMT);
6308 gsi_remove (&iter, true);
6309 break;
6311 else
6313 gtry *gtry;
6314 gimple_seq seq;
6315 enum gimple_try_flags kind;
6317 if (gimple_wce_cleanup_eh_only (wce))
6318 kind = GIMPLE_TRY_CATCH;
6319 else
6320 kind = GIMPLE_TRY_FINALLY;
6321 seq = gsi_split_seq_after (iter);
6323 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6324 /* Do not use gsi_replace here, as it may scan operands.
6325 We want to do a simple structural modification only. */
6326 gsi_set_stmt (&iter, gtry);
6327 iter = gsi_start (gtry->eval);
6330 else
6331 gsi_next (&iter);
6334 gimplify_seq_add_seq (pre_p, body_sequence);
6335 if (temp)
6337 *expr_p = temp;
6338 return GS_OK;
6340 else
6342 *expr_p = NULL;
6343 return GS_ALL_DONE;
6347 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6348 is the cleanup action required. EH_ONLY is true if the cleanup should
6349 only be executed if an exception is thrown, not on normal exit.
6350 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6351 only valid for clobbers. */
6353 static void
6354 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6355 bool force_uncond = false)
6357 gimple *wce;
6358 gimple_seq cleanup_stmts = NULL;
6360 /* Errors can result in improperly nested cleanups. Which results in
6361 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6362 if (seen_error ())
6363 return;
6365 if (gimple_conditional_context ())
6367 /* If we're in a conditional context, this is more complex. We only
6368 want to run the cleanup if we actually ran the initialization that
6369 necessitates it, but we want to run it after the end of the
6370 conditional context. So we wrap the try/finally around the
6371 condition and use a flag to determine whether or not to actually
6372 run the destructor. Thus
6374 test ? f(A()) : 0
6376 becomes (approximately)
6378 flag = 0;
6379 try {
6380 if (test) { A::A(temp); flag = 1; val = f(temp); }
6381 else { val = 0; }
6382 } finally {
6383 if (flag) A::~A(temp);
6387 if (force_uncond)
6389 gimplify_stmt (&cleanup, &cleanup_stmts);
6390 wce = gimple_build_wce (cleanup_stmts);
6391 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6393 else
6395 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6396 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6397 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6399 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6400 gimplify_stmt (&cleanup, &cleanup_stmts);
6401 wce = gimple_build_wce (cleanup_stmts);
6403 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6404 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6405 gimplify_seq_add_stmt (pre_p, ftrue);
6407 /* Because of this manipulation, and the EH edges that jump
6408 threading cannot redirect, the temporary (VAR) will appear
6409 to be used uninitialized. Don't warn. */
6410 TREE_NO_WARNING (var) = 1;
6413 else
6415 gimplify_stmt (&cleanup, &cleanup_stmts);
6416 wce = gimple_build_wce (cleanup_stmts);
6417 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6418 gimplify_seq_add_stmt (pre_p, wce);
6422 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6424 static enum gimplify_status
6425 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6427 tree targ = *expr_p;
6428 tree temp = TARGET_EXPR_SLOT (targ);
6429 tree init = TARGET_EXPR_INITIAL (targ);
6430 enum gimplify_status ret;
6432 bool unpoison_empty_seq = false;
6433 gimple_stmt_iterator unpoison_it;
6435 if (init)
6437 tree cleanup = NULL_TREE;
6439 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6440 to the temps list. Handle also variable length TARGET_EXPRs. */
6441 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6443 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6444 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6445 gimplify_vla_decl (temp, pre_p);
6447 else
6449 /* Save location where we need to place unpoisoning. It's possible
6450 that a variable will be converted to needs_to_live_in_memory. */
6451 unpoison_it = gsi_last (*pre_p);
6452 unpoison_empty_seq = gsi_end_p (unpoison_it);
6454 gimple_add_tmp_var (temp);
6457 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6458 expression is supposed to initialize the slot. */
6459 if (VOID_TYPE_P (TREE_TYPE (init)))
6460 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6461 else
6463 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6464 init = init_expr;
6465 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6466 init = NULL;
6467 ggc_free (init_expr);
6469 if (ret == GS_ERROR)
6471 /* PR c++/28266 Make sure this is expanded only once. */
6472 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6473 return GS_ERROR;
6475 if (init)
6476 gimplify_and_add (init, pre_p);
6478 /* If needed, push the cleanup for the temp. */
6479 if (TARGET_EXPR_CLEANUP (targ))
6481 if (CLEANUP_EH_ONLY (targ))
6482 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6483 CLEANUP_EH_ONLY (targ), pre_p);
6484 else
6485 cleanup = TARGET_EXPR_CLEANUP (targ);
6488 /* Add a clobber for the temporary going out of scope, like
6489 gimplify_bind_expr. */
6490 if (gimplify_ctxp->in_cleanup_point_expr
6491 && needs_to_live_in_memory (temp))
6493 if (flag_stack_reuse == SR_ALL)
6495 tree clobber = build_constructor (TREE_TYPE (temp),
6496 NULL);
6497 TREE_THIS_VOLATILE (clobber) = true;
6498 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6499 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6501 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope))
6503 tree asan_cleanup = build_asan_poison_call_expr (temp);
6504 if (asan_cleanup)
6506 if (unpoison_empty_seq)
6507 unpoison_it = gsi_start (*pre_p);
6509 asan_poison_variable (temp, false, &unpoison_it,
6510 unpoison_empty_seq);
6511 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6515 if (cleanup)
6516 gimple_push_cleanup (temp, cleanup, false, pre_p);
6518 /* Only expand this once. */
6519 TREE_OPERAND (targ, 3) = init;
6520 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6522 else
6523 /* We should have expanded this before. */
6524 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6526 *expr_p = temp;
6527 return GS_OK;
6530 /* Gimplification of expression trees. */
6532 /* Gimplify an expression which appears at statement context. The
6533 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6534 NULL, a new sequence is allocated.
6536 Return true if we actually added a statement to the queue. */
6538 bool
6539 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6541 gimple_seq_node last;
6543 last = gimple_seq_last (*seq_p);
6544 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6545 return last != gimple_seq_last (*seq_p);
6548 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6549 to CTX. If entries already exist, force them to be some flavor of private.
6550 If there is no enclosing parallel, do nothing. */
6552 void
6553 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6555 splay_tree_node n;
6557 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6558 return;
6562 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6563 if (n != NULL)
6565 if (n->value & GOVD_SHARED)
6566 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6567 else if (n->value & GOVD_MAP)
6568 n->value |= GOVD_MAP_TO_ONLY;
6569 else
6570 return;
6572 else if ((ctx->region_type & ORT_TARGET) != 0)
6574 if (ctx->target_map_scalars_firstprivate)
6575 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6576 else
6577 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6579 else if (ctx->region_type != ORT_WORKSHARE
6580 && ctx->region_type != ORT_SIMD
6581 && ctx->region_type != ORT_ACC
6582 && !(ctx->region_type & ORT_TARGET_DATA))
6583 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6585 ctx = ctx->outer_context;
6587 while (ctx);
6590 /* Similarly for each of the type sizes of TYPE. */
6592 static void
6593 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6595 if (type == NULL || type == error_mark_node)
6596 return;
6597 type = TYPE_MAIN_VARIANT (type);
6599 if (ctx->privatized_types->add (type))
6600 return;
6602 switch (TREE_CODE (type))
6604 case INTEGER_TYPE:
6605 case ENUMERAL_TYPE:
6606 case BOOLEAN_TYPE:
6607 case REAL_TYPE:
6608 case FIXED_POINT_TYPE:
6609 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6610 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6611 break;
6613 case ARRAY_TYPE:
6614 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6615 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6616 break;
6618 case RECORD_TYPE:
6619 case UNION_TYPE:
6620 case QUAL_UNION_TYPE:
6622 tree field;
6623 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6624 if (TREE_CODE (field) == FIELD_DECL)
6626 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6627 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6630 break;
6632 case POINTER_TYPE:
6633 case REFERENCE_TYPE:
6634 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6635 break;
6637 default:
6638 break;
6641 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6642 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6643 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6646 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6648 static void
6649 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6651 splay_tree_node n;
6652 unsigned int nflags;
6653 tree t;
6655 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6656 return;
6658 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6659 there are constructors involved somewhere. Exception is a shared clause,
6660 there is nothing privatized in that case. */
6661 if ((flags & GOVD_SHARED) == 0
6662 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6663 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6664 flags |= GOVD_SEEN;
6666 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6667 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6669 /* We shouldn't be re-adding the decl with the same data
6670 sharing class. */
6671 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6672 nflags = n->value | flags;
6673 /* The only combination of data sharing classes we should see is
6674 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6675 reduction variables to be used in data sharing clauses. */
6676 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6677 || ((nflags & GOVD_DATA_SHARE_CLASS)
6678 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6679 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6680 n->value = nflags;
6681 return;
6684 /* When adding a variable-sized variable, we have to handle all sorts
6685 of additional bits of data: the pointer replacement variable, and
6686 the parameters of the type. */
6687 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6689 /* Add the pointer replacement variable as PRIVATE if the variable
6690 replacement is private, else FIRSTPRIVATE since we'll need the
6691 address of the original variable either for SHARED, or for the
6692 copy into or out of the context. */
6693 if (!(flags & GOVD_LOCAL))
6695 if (flags & GOVD_MAP)
6696 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6697 else if (flags & GOVD_PRIVATE)
6698 nflags = GOVD_PRIVATE;
6699 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6700 && (flags & GOVD_FIRSTPRIVATE))
6701 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6702 else
6703 nflags = GOVD_FIRSTPRIVATE;
6704 nflags |= flags & GOVD_SEEN;
6705 t = DECL_VALUE_EXPR (decl);
6706 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6707 t = TREE_OPERAND (t, 0);
6708 gcc_assert (DECL_P (t));
6709 omp_add_variable (ctx, t, nflags);
6712 /* Add all of the variable and type parameters (which should have
6713 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6714 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6715 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6716 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6718 /* The variable-sized variable itself is never SHARED, only some form
6719 of PRIVATE. The sharing would take place via the pointer variable
6720 which we remapped above. */
6721 if (flags & GOVD_SHARED)
6722 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6723 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6725 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6726 alloca statement we generate for the variable, so make sure it
6727 is available. This isn't automatically needed for the SHARED
6728 case, since we won't be allocating local storage then.
6729 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6730 in this case omp_notice_variable will be called later
6731 on when it is gimplified. */
6732 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6733 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6734 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6736 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6737 && lang_hooks.decls.omp_privatize_by_reference (decl))
6739 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6741 /* Similar to the direct variable sized case above, we'll need the
6742 size of references being privatized. */
6743 if ((flags & GOVD_SHARED) == 0)
6745 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6746 if (DECL_P (t))
6747 omp_notice_variable (ctx, t, true);
6751 if (n != NULL)
6752 n->value |= flags;
6753 else
6754 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6756 /* For reductions clauses in OpenACC loop directives, by default create a
6757 copy clause on the enclosing parallel construct for carrying back the
6758 results. */
6759 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6761 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6762 while (outer_ctx)
6764 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6765 if (n != NULL)
6767 /* Ignore local variables and explicitly declared clauses. */
6768 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6769 break;
6770 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6772 /* According to the OpenACC spec, such a reduction variable
6773 should already have a copy map on a kernels construct,
6774 verify that here. */
6775 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6776 && (n->value & GOVD_MAP));
6778 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6780 /* Remove firstprivate and make it a copy map. */
6781 n->value &= ~GOVD_FIRSTPRIVATE;
6782 n->value |= GOVD_MAP;
6785 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6787 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6788 GOVD_MAP | GOVD_SEEN);
6789 break;
6791 outer_ctx = outer_ctx->outer_context;
6796 /* Notice a threadprivate variable DECL used in OMP context CTX.
6797 This just prints out diagnostics about threadprivate variable uses
6798 in untied tasks. If DECL2 is non-NULL, prevent this warning
6799 on that variable. */
6801 static bool
6802 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6803 tree decl2)
6805 splay_tree_node n;
6806 struct gimplify_omp_ctx *octx;
6808 for (octx = ctx; octx; octx = octx->outer_context)
6809 if ((octx->region_type & ORT_TARGET) != 0)
6811 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6812 if (n == NULL)
6814 error ("threadprivate variable %qE used in target region",
6815 DECL_NAME (decl));
6816 error_at (octx->location, "enclosing target region");
6817 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6819 if (decl2)
6820 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6823 if (ctx->region_type != ORT_UNTIED_TASK)
6824 return false;
6825 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6826 if (n == NULL)
6828 error ("threadprivate variable %qE used in untied task",
6829 DECL_NAME (decl));
6830 error_at (ctx->location, "enclosing task");
6831 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6833 if (decl2)
6834 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6835 return false;
6838 /* Return true if global var DECL is device resident. */
6840 static bool
6841 device_resident_p (tree decl)
6843 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6845 if (!attr)
6846 return false;
6848 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6850 tree c = TREE_VALUE (t);
6851 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6852 return true;
6855 return false;
6858 /* Return true if DECL has an ACC DECLARE attribute. */
6860 static bool
6861 is_oacc_declared (tree decl)
6863 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6864 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6865 return declared != NULL_TREE;
6868 /* Determine outer default flags for DECL mentioned in an OMP region
6869 but not declared in an enclosing clause.
6871 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6872 remapped firstprivate instead of shared. To some extent this is
6873 addressed in omp_firstprivatize_type_sizes, but not
6874 effectively. */
6876 static unsigned
6877 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6878 bool in_code, unsigned flags)
6880 enum omp_clause_default_kind default_kind = ctx->default_kind;
6881 enum omp_clause_default_kind kind;
6883 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6884 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6885 default_kind = kind;
6887 switch (default_kind)
6889 case OMP_CLAUSE_DEFAULT_NONE:
6891 const char *rtype;
6893 if (ctx->region_type & ORT_PARALLEL)
6894 rtype = "parallel";
6895 else if (ctx->region_type & ORT_TASK)
6896 rtype = "task";
6897 else if (ctx->region_type & ORT_TEAMS)
6898 rtype = "teams";
6899 else
6900 gcc_unreachable ();
6902 error ("%qE not specified in enclosing %qs",
6903 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6904 error_at (ctx->location, "enclosing %qs", rtype);
6906 /* FALLTHRU */
6907 case OMP_CLAUSE_DEFAULT_SHARED:
6908 flags |= GOVD_SHARED;
6909 break;
6910 case OMP_CLAUSE_DEFAULT_PRIVATE:
6911 flags |= GOVD_PRIVATE;
6912 break;
6913 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6914 flags |= GOVD_FIRSTPRIVATE;
6915 break;
6916 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6917 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6918 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6919 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6921 omp_notice_variable (octx, decl, in_code);
6922 for (; octx; octx = octx->outer_context)
6924 splay_tree_node n2;
6926 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6927 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6928 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6929 continue;
6930 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6932 flags |= GOVD_FIRSTPRIVATE;
6933 goto found_outer;
6935 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6937 flags |= GOVD_SHARED;
6938 goto found_outer;
6943 if (TREE_CODE (decl) == PARM_DECL
6944 || (!is_global_var (decl)
6945 && DECL_CONTEXT (decl) == current_function_decl))
6946 flags |= GOVD_FIRSTPRIVATE;
6947 else
6948 flags |= GOVD_SHARED;
6949 found_outer:
6950 break;
6952 default:
6953 gcc_unreachable ();
6956 return flags;
6960 /* Determine outer default flags for DECL mentioned in an OACC region
6961 but not declared in an enclosing clause. */
6963 static unsigned
6964 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6966 const char *rkind;
6967 bool on_device = false;
6968 bool declared = is_oacc_declared (decl);
6969 tree type = TREE_TYPE (decl);
6971 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6972 type = TREE_TYPE (type);
6974 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6975 && is_global_var (decl)
6976 && device_resident_p (decl))
6978 on_device = true;
6979 flags |= GOVD_MAP_TO_ONLY;
6982 switch (ctx->region_type)
6984 case ORT_ACC_KERNELS:
6985 rkind = "kernels";
6987 if (AGGREGATE_TYPE_P (type))
6989 /* Aggregates default to 'present_or_copy', or 'present'. */
6990 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6991 flags |= GOVD_MAP;
6992 else
6993 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
6995 else
6996 /* Scalars default to 'copy'. */
6997 flags |= GOVD_MAP | GOVD_MAP_FORCE;
6999 break;
7001 case ORT_ACC_PARALLEL:
7002 rkind = "parallel";
7004 if (on_device || declared)
7005 flags |= GOVD_MAP;
7006 else if (AGGREGATE_TYPE_P (type))
7008 /* Aggregates default to 'present_or_copy', or 'present'. */
7009 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7010 flags |= GOVD_MAP;
7011 else
7012 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7014 else
7015 /* Scalars default to 'firstprivate'. */
7016 flags |= GOVD_FIRSTPRIVATE;
7018 break;
7020 default:
7021 gcc_unreachable ();
7024 if (DECL_ARTIFICIAL (decl))
7025 ; /* We can get compiler-generated decls, and should not complain
7026 about them. */
7027 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7029 error ("%qE not specified in enclosing OpenACC %qs construct",
7030 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7031 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7033 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7034 ; /* Handled above. */
7035 else
7036 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7038 return flags;
7041 /* Record the fact that DECL was used within the OMP context CTX.
7042 IN_CODE is true when real code uses DECL, and false when we should
7043 merely emit default(none) errors. Return true if DECL is going to
7044 be remapped and thus DECL shouldn't be gimplified into its
7045 DECL_VALUE_EXPR (if any). */
7047 static bool
7048 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7050 splay_tree_node n;
7051 unsigned flags = in_code ? GOVD_SEEN : 0;
7052 bool ret = false, shared;
7054 if (error_operand_p (decl))
7055 return false;
7057 if (ctx->region_type == ORT_NONE)
7058 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7060 if (is_global_var (decl))
7062 /* Threadprivate variables are predetermined. */
7063 if (DECL_THREAD_LOCAL_P (decl))
7064 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7066 if (DECL_HAS_VALUE_EXPR_P (decl))
7068 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7070 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7071 return omp_notice_threadprivate_variable (ctx, decl, value);
7074 if (gimplify_omp_ctxp->outer_context == NULL
7075 && VAR_P (decl)
7076 && oacc_get_fn_attrib (current_function_decl))
7078 location_t loc = DECL_SOURCE_LOCATION (decl);
7080 if (lookup_attribute ("omp declare target link",
7081 DECL_ATTRIBUTES (decl)))
7083 error_at (loc,
7084 "%qE with %<link%> clause used in %<routine%> function",
7085 DECL_NAME (decl));
7086 return false;
7088 else if (!lookup_attribute ("omp declare target",
7089 DECL_ATTRIBUTES (decl)))
7091 error_at (loc,
7092 "%qE requires a %<declare%> directive for use "
7093 "in a %<routine%> function", DECL_NAME (decl));
7094 return false;
7099 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7100 if ((ctx->region_type & ORT_TARGET) != 0)
7102 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7103 if (n == NULL)
7105 unsigned nflags = flags;
7106 if (ctx->target_map_pointers_as_0len_arrays
7107 || ctx->target_map_scalars_firstprivate)
7109 bool is_declare_target = false;
7110 bool is_scalar = false;
7111 if (is_global_var (decl)
7112 && varpool_node::get_create (decl)->offloadable)
7114 struct gimplify_omp_ctx *octx;
7115 for (octx = ctx->outer_context;
7116 octx; octx = octx->outer_context)
7118 n = splay_tree_lookup (octx->variables,
7119 (splay_tree_key)decl);
7120 if (n
7121 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7122 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7123 break;
7125 is_declare_target = octx == NULL;
7127 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7128 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7129 if (is_declare_target)
7131 else if (ctx->target_map_pointers_as_0len_arrays
7132 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7133 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7134 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7135 == POINTER_TYPE)))
7136 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7137 else if (is_scalar)
7138 nflags |= GOVD_FIRSTPRIVATE;
7141 struct gimplify_omp_ctx *octx = ctx->outer_context;
7142 if ((ctx->region_type & ORT_ACC) && octx)
7144 /* Look in outer OpenACC contexts, to see if there's a
7145 data attribute for this variable. */
7146 omp_notice_variable (octx, decl, in_code);
7148 for (; octx; octx = octx->outer_context)
7150 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7151 break;
7152 splay_tree_node n2
7153 = splay_tree_lookup (octx->variables,
7154 (splay_tree_key) decl);
7155 if (n2)
7157 if (octx->region_type == ORT_ACC_HOST_DATA)
7158 error ("variable %qE declared in enclosing "
7159 "%<host_data%> region", DECL_NAME (decl));
7160 nflags |= GOVD_MAP;
7161 if (octx->region_type == ORT_ACC_DATA
7162 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7163 nflags |= GOVD_MAP_0LEN_ARRAY;
7164 goto found_outer;
7170 tree type = TREE_TYPE (decl);
7172 if (nflags == flags
7173 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7174 && lang_hooks.decls.omp_privatize_by_reference (decl))
7175 type = TREE_TYPE (type);
7176 if (nflags == flags
7177 && !lang_hooks.types.omp_mappable_type (type))
7179 error ("%qD referenced in target region does not have "
7180 "a mappable type", decl);
7181 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7183 else if (nflags == flags)
7185 if ((ctx->region_type & ORT_ACC) != 0)
7186 nflags = oacc_default_clause (ctx, decl, flags);
7187 else
7188 nflags |= GOVD_MAP;
7191 found_outer:
7192 omp_add_variable (ctx, decl, nflags);
7194 else
7196 /* If nothing changed, there's nothing left to do. */
7197 if ((n->value & flags) == flags)
7198 return ret;
7199 flags |= n->value;
7200 n->value = flags;
7202 goto do_outer;
7205 if (n == NULL)
7207 if (ctx->region_type == ORT_WORKSHARE
7208 || ctx->region_type == ORT_SIMD
7209 || ctx->region_type == ORT_ACC
7210 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7211 goto do_outer;
7213 flags = omp_default_clause (ctx, decl, in_code, flags);
7215 if ((flags & GOVD_PRIVATE)
7216 && lang_hooks.decls.omp_private_outer_ref (decl))
7217 flags |= GOVD_PRIVATE_OUTER_REF;
7219 omp_add_variable (ctx, decl, flags);
7221 shared = (flags & GOVD_SHARED) != 0;
7222 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7223 goto do_outer;
7226 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7227 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7228 && DECL_SIZE (decl))
7230 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7232 splay_tree_node n2;
7233 tree t = DECL_VALUE_EXPR (decl);
7234 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7235 t = TREE_OPERAND (t, 0);
7236 gcc_assert (DECL_P (t));
7237 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7238 n2->value |= GOVD_SEEN;
7240 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7241 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7242 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7243 != INTEGER_CST))
7245 splay_tree_node n2;
7246 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7247 gcc_assert (DECL_P (t));
7248 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7249 if (n2)
7250 omp_notice_variable (ctx, t, true);
7254 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7255 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7257 /* If nothing changed, there's nothing left to do. */
7258 if ((n->value & flags) == flags)
7259 return ret;
7260 flags |= n->value;
7261 n->value = flags;
7263 do_outer:
7264 /* If the variable is private in the current context, then we don't
7265 need to propagate anything to an outer context. */
7266 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7267 return ret;
7268 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7269 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7270 return ret;
7271 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7272 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7273 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7274 return ret;
7275 if (ctx->outer_context
7276 && omp_notice_variable (ctx->outer_context, decl, in_code))
7277 return true;
7278 return ret;
7281 /* Verify that DECL is private within CTX. If there's specific information
7282 to the contrary in the innermost scope, generate an error. */
7284 static bool
7285 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7287 splay_tree_node n;
7289 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7290 if (n != NULL)
7292 if (n->value & GOVD_SHARED)
7294 if (ctx == gimplify_omp_ctxp)
7296 if (simd)
7297 error ("iteration variable %qE is predetermined linear",
7298 DECL_NAME (decl));
7299 else
7300 error ("iteration variable %qE should be private",
7301 DECL_NAME (decl));
7302 n->value = GOVD_PRIVATE;
7303 return true;
7305 else
7306 return false;
7308 else if ((n->value & GOVD_EXPLICIT) != 0
7309 && (ctx == gimplify_omp_ctxp
7310 || (ctx->region_type == ORT_COMBINED_PARALLEL
7311 && gimplify_omp_ctxp->outer_context == ctx)))
7313 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7314 error ("iteration variable %qE should not be firstprivate",
7315 DECL_NAME (decl));
7316 else if ((n->value & GOVD_REDUCTION) != 0)
7317 error ("iteration variable %qE should not be reduction",
7318 DECL_NAME (decl));
7319 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7320 error ("iteration variable %qE should not be linear",
7321 DECL_NAME (decl));
7322 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7323 error ("iteration variable %qE should not be lastprivate",
7324 DECL_NAME (decl));
7325 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7326 error ("iteration variable %qE should not be private",
7327 DECL_NAME (decl));
7328 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7329 error ("iteration variable %qE is predetermined linear",
7330 DECL_NAME (decl));
7332 return (ctx == gimplify_omp_ctxp
7333 || (ctx->region_type == ORT_COMBINED_PARALLEL
7334 && gimplify_omp_ctxp->outer_context == ctx));
7337 if (ctx->region_type != ORT_WORKSHARE
7338 && ctx->region_type != ORT_SIMD
7339 && ctx->region_type != ORT_ACC)
7340 return false;
7341 else if (ctx->outer_context)
7342 return omp_is_private (ctx->outer_context, decl, simd);
7343 return false;
7346 /* Return true if DECL is private within a parallel region
7347 that binds to the current construct's context or in parallel
7348 region's REDUCTION clause. */
7350 static bool
7351 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7353 splay_tree_node n;
7357 ctx = ctx->outer_context;
7358 if (ctx == NULL)
7360 if (is_global_var (decl))
7361 return false;
7363 /* References might be private, but might be shared too,
7364 when checking for copyprivate, assume they might be
7365 private, otherwise assume they might be shared. */
7366 if (copyprivate)
7367 return true;
7369 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7370 return false;
7372 /* Treat C++ privatized non-static data members outside
7373 of the privatization the same. */
7374 if (omp_member_access_dummy_var (decl))
7375 return false;
7377 return true;
7380 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7382 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7383 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7384 continue;
7386 if (n != NULL)
7388 if ((n->value & GOVD_LOCAL) != 0
7389 && omp_member_access_dummy_var (decl))
7390 return false;
7391 return (n->value & GOVD_SHARED) == 0;
7394 while (ctx->region_type == ORT_WORKSHARE
7395 || ctx->region_type == ORT_SIMD
7396 || ctx->region_type == ORT_ACC);
7397 return false;
7400 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7402 static tree
7403 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7405 tree t = *tp;
7407 /* If this node has been visited, unmark it and keep looking. */
7408 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7409 return t;
7411 if (IS_TYPE_OR_DECL_P (t))
7412 *walk_subtrees = 0;
7413 return NULL_TREE;
7416 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7417 and previous omp contexts. */
7419 static void
7420 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7421 enum omp_region_type region_type,
7422 enum tree_code code)
7424 struct gimplify_omp_ctx *ctx, *outer_ctx;
7425 tree c;
7426 hash_map<tree, tree> *struct_map_to_clause = NULL;
7427 tree *prev_list_p = NULL;
7429 ctx = new_omp_context (region_type);
7430 outer_ctx = ctx->outer_context;
7431 if (code == OMP_TARGET)
7433 if (!lang_GNU_Fortran ())
7434 ctx->target_map_pointers_as_0len_arrays = true;
7435 ctx->target_map_scalars_firstprivate = true;
7437 if (!lang_GNU_Fortran ())
7438 switch (code)
7440 case OMP_TARGET:
7441 case OMP_TARGET_DATA:
7442 case OMP_TARGET_ENTER_DATA:
7443 case OMP_TARGET_EXIT_DATA:
7444 case OACC_DECLARE:
7445 case OACC_HOST_DATA:
7446 ctx->target_firstprivatize_array_bases = true;
7447 default:
7448 break;
7451 while ((c = *list_p) != NULL)
7453 bool remove = false;
7454 bool notice_outer = true;
7455 const char *check_non_private = NULL;
7456 unsigned int flags;
7457 tree decl;
7459 switch (OMP_CLAUSE_CODE (c))
7461 case OMP_CLAUSE_PRIVATE:
7462 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7463 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7465 flags |= GOVD_PRIVATE_OUTER_REF;
7466 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7468 else
7469 notice_outer = false;
7470 goto do_add;
7471 case OMP_CLAUSE_SHARED:
7472 flags = GOVD_SHARED | GOVD_EXPLICIT;
7473 goto do_add;
7474 case OMP_CLAUSE_FIRSTPRIVATE:
7475 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7476 check_non_private = "firstprivate";
7477 goto do_add;
7478 case OMP_CLAUSE_LASTPRIVATE:
7479 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7480 check_non_private = "lastprivate";
7481 decl = OMP_CLAUSE_DECL (c);
7482 if (error_operand_p (decl))
7483 goto do_add;
7484 else if (outer_ctx
7485 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7486 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7487 && splay_tree_lookup (outer_ctx->variables,
7488 (splay_tree_key) decl) == NULL)
7490 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7491 if (outer_ctx->outer_context)
7492 omp_notice_variable (outer_ctx->outer_context, decl, true);
7494 else if (outer_ctx
7495 && (outer_ctx->region_type & ORT_TASK) != 0
7496 && outer_ctx->combined_loop
7497 && splay_tree_lookup (outer_ctx->variables,
7498 (splay_tree_key) decl) == NULL)
7500 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7501 if (outer_ctx->outer_context)
7502 omp_notice_variable (outer_ctx->outer_context, decl, true);
7504 else if (outer_ctx
7505 && (outer_ctx->region_type == ORT_WORKSHARE
7506 || outer_ctx->region_type == ORT_ACC)
7507 && outer_ctx->combined_loop
7508 && splay_tree_lookup (outer_ctx->variables,
7509 (splay_tree_key) decl) == NULL
7510 && !omp_check_private (outer_ctx, decl, false))
7512 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7513 if (outer_ctx->outer_context
7514 && (outer_ctx->outer_context->region_type
7515 == ORT_COMBINED_PARALLEL)
7516 && splay_tree_lookup (outer_ctx->outer_context->variables,
7517 (splay_tree_key) decl) == NULL)
7519 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7520 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7521 if (octx->outer_context)
7523 octx = octx->outer_context;
7524 if (octx->region_type == ORT_WORKSHARE
7525 && octx->combined_loop
7526 && splay_tree_lookup (octx->variables,
7527 (splay_tree_key) decl) == NULL
7528 && !omp_check_private (octx, decl, false))
7530 omp_add_variable (octx, decl,
7531 GOVD_LASTPRIVATE | GOVD_SEEN);
7532 octx = octx->outer_context;
7533 if (octx
7534 && octx->region_type == ORT_COMBINED_TEAMS
7535 && (splay_tree_lookup (octx->variables,
7536 (splay_tree_key) decl)
7537 == NULL))
7539 omp_add_variable (octx, decl,
7540 GOVD_SHARED | GOVD_SEEN);
7541 octx = octx->outer_context;
7544 if (octx)
7545 omp_notice_variable (octx, decl, true);
7548 else if (outer_ctx->outer_context)
7549 omp_notice_variable (outer_ctx->outer_context, decl, true);
7551 goto do_add;
7552 case OMP_CLAUSE_REDUCTION:
7553 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7554 /* OpenACC permits reductions on private variables. */
7555 if (!(region_type & ORT_ACC))
7556 check_non_private = "reduction";
7557 decl = OMP_CLAUSE_DECL (c);
7558 if (TREE_CODE (decl) == MEM_REF)
7560 tree type = TREE_TYPE (decl);
7561 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7562 NULL, is_gimple_val, fb_rvalue, false)
7563 == GS_ERROR)
7565 remove = true;
7566 break;
7568 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7569 if (DECL_P (v))
7571 omp_firstprivatize_variable (ctx, v);
7572 omp_notice_variable (ctx, v, true);
7574 decl = TREE_OPERAND (decl, 0);
7575 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7577 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7578 NULL, is_gimple_val, fb_rvalue, false)
7579 == GS_ERROR)
7581 remove = true;
7582 break;
7584 v = TREE_OPERAND (decl, 1);
7585 if (DECL_P (v))
7587 omp_firstprivatize_variable (ctx, v);
7588 omp_notice_variable (ctx, v, true);
7590 decl = TREE_OPERAND (decl, 0);
7592 if (TREE_CODE (decl) == ADDR_EXPR
7593 || TREE_CODE (decl) == INDIRECT_REF)
7594 decl = TREE_OPERAND (decl, 0);
7596 goto do_add_decl;
7597 case OMP_CLAUSE_LINEAR:
7598 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7599 is_gimple_val, fb_rvalue) == GS_ERROR)
7601 remove = true;
7602 break;
7604 else
7606 if (code == OMP_SIMD
7607 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7609 struct gimplify_omp_ctx *octx = outer_ctx;
7610 if (octx
7611 && octx->region_type == ORT_WORKSHARE
7612 && octx->combined_loop
7613 && !octx->distribute)
7615 if (octx->outer_context
7616 && (octx->outer_context->region_type
7617 == ORT_COMBINED_PARALLEL))
7618 octx = octx->outer_context->outer_context;
7619 else
7620 octx = octx->outer_context;
7622 if (octx
7623 && octx->region_type == ORT_WORKSHARE
7624 && octx->combined_loop
7625 && octx->distribute)
7627 error_at (OMP_CLAUSE_LOCATION (c),
7628 "%<linear%> clause for variable other than "
7629 "loop iterator specified on construct "
7630 "combined with %<distribute%>");
7631 remove = true;
7632 break;
7635 /* For combined #pragma omp parallel for simd, need to put
7636 lastprivate and perhaps firstprivate too on the
7637 parallel. Similarly for #pragma omp for simd. */
7638 struct gimplify_omp_ctx *octx = outer_ctx;
7639 decl = NULL_TREE;
7642 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7643 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7644 break;
7645 decl = OMP_CLAUSE_DECL (c);
7646 if (error_operand_p (decl))
7648 decl = NULL_TREE;
7649 break;
7651 flags = GOVD_SEEN;
7652 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7653 flags |= GOVD_FIRSTPRIVATE;
7654 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7655 flags |= GOVD_LASTPRIVATE;
7656 if (octx
7657 && octx->region_type == ORT_WORKSHARE
7658 && octx->combined_loop)
7660 if (octx->outer_context
7661 && (octx->outer_context->region_type
7662 == ORT_COMBINED_PARALLEL))
7663 octx = octx->outer_context;
7664 else if (omp_check_private (octx, decl, false))
7665 break;
7667 else if (octx
7668 && (octx->region_type & ORT_TASK) != 0
7669 && octx->combined_loop)
7671 else if (octx
7672 && octx->region_type == ORT_COMBINED_PARALLEL
7673 && ctx->region_type == ORT_WORKSHARE
7674 && octx == outer_ctx)
7675 flags = GOVD_SEEN | GOVD_SHARED;
7676 else if (octx
7677 && octx->region_type == ORT_COMBINED_TEAMS)
7678 flags = GOVD_SEEN | GOVD_SHARED;
7679 else if (octx
7680 && octx->region_type == ORT_COMBINED_TARGET)
7682 flags &= ~GOVD_LASTPRIVATE;
7683 if (flags == GOVD_SEEN)
7684 break;
7686 else
7687 break;
7688 splay_tree_node on
7689 = splay_tree_lookup (octx->variables,
7690 (splay_tree_key) decl);
7691 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7693 octx = NULL;
7694 break;
7696 omp_add_variable (octx, decl, flags);
7697 if (octx->outer_context == NULL)
7698 break;
7699 octx = octx->outer_context;
7701 while (1);
7702 if (octx
7703 && decl
7704 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7705 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7706 omp_notice_variable (octx, decl, true);
7708 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7709 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7710 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7712 notice_outer = false;
7713 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7715 goto do_add;
7717 case OMP_CLAUSE_MAP:
7718 decl = OMP_CLAUSE_DECL (c);
7719 if (error_operand_p (decl))
7720 remove = true;
7721 switch (code)
7723 case OMP_TARGET:
7724 break;
7725 case OACC_DATA:
7726 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7727 break;
7728 /* FALLTHRU */
7729 case OMP_TARGET_DATA:
7730 case OMP_TARGET_ENTER_DATA:
7731 case OMP_TARGET_EXIT_DATA:
7732 case OACC_ENTER_DATA:
7733 case OACC_EXIT_DATA:
7734 case OACC_HOST_DATA:
7735 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7736 || (OMP_CLAUSE_MAP_KIND (c)
7737 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7738 /* For target {,enter ,exit }data only the array slice is
7739 mapped, but not the pointer to it. */
7740 remove = true;
7741 break;
7742 default:
7743 break;
7745 if (remove)
7746 break;
7747 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7749 struct gimplify_omp_ctx *octx;
7750 for (octx = outer_ctx; octx; octx = octx->outer_context)
7752 if (octx->region_type != ORT_ACC_HOST_DATA)
7753 break;
7754 splay_tree_node n2
7755 = splay_tree_lookup (octx->variables,
7756 (splay_tree_key) decl);
7757 if (n2)
7758 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7759 "declared in enclosing %<host_data%> region",
7760 DECL_NAME (decl));
7763 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7764 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7765 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7766 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7767 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7769 remove = true;
7770 break;
7772 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7773 || (OMP_CLAUSE_MAP_KIND (c)
7774 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7775 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7777 OMP_CLAUSE_SIZE (c)
7778 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7779 false);
7780 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7781 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7783 if (!DECL_P (decl))
7785 tree d = decl, *pd;
7786 if (TREE_CODE (d) == ARRAY_REF)
7788 while (TREE_CODE (d) == ARRAY_REF)
7789 d = TREE_OPERAND (d, 0);
7790 if (TREE_CODE (d) == COMPONENT_REF
7791 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7792 decl = d;
7794 pd = &OMP_CLAUSE_DECL (c);
7795 if (d == decl
7796 && TREE_CODE (decl) == INDIRECT_REF
7797 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7798 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7799 == REFERENCE_TYPE))
7801 pd = &TREE_OPERAND (decl, 0);
7802 decl = TREE_OPERAND (decl, 0);
7804 if (TREE_CODE (decl) == COMPONENT_REF)
7806 while (TREE_CODE (decl) == COMPONENT_REF)
7807 decl = TREE_OPERAND (decl, 0);
7808 if (TREE_CODE (decl) == INDIRECT_REF
7809 && DECL_P (TREE_OPERAND (decl, 0))
7810 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7811 == REFERENCE_TYPE))
7812 decl = TREE_OPERAND (decl, 0);
7814 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7815 == GS_ERROR)
7817 remove = true;
7818 break;
7820 if (DECL_P (decl))
7822 if (error_operand_p (decl))
7824 remove = true;
7825 break;
7828 tree stype = TREE_TYPE (decl);
7829 if (TREE_CODE (stype) == REFERENCE_TYPE)
7830 stype = TREE_TYPE (stype);
7831 if (TYPE_SIZE_UNIT (stype) == NULL
7832 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7834 error_at (OMP_CLAUSE_LOCATION (c),
7835 "mapping field %qE of variable length "
7836 "structure", OMP_CLAUSE_DECL (c));
7837 remove = true;
7838 break;
7841 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7843 /* Error recovery. */
7844 if (prev_list_p == NULL)
7846 remove = true;
7847 break;
7849 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7851 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7852 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7854 remove = true;
7855 break;
7860 tree offset;
7861 HOST_WIDE_INT bitsize, bitpos;
7862 machine_mode mode;
7863 int unsignedp, reversep, volatilep = 0;
7864 tree base = OMP_CLAUSE_DECL (c);
7865 while (TREE_CODE (base) == ARRAY_REF)
7866 base = TREE_OPERAND (base, 0);
7867 if (TREE_CODE (base) == INDIRECT_REF)
7868 base = TREE_OPERAND (base, 0);
7869 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7870 &mode, &unsignedp, &reversep,
7871 &volatilep);
7872 tree orig_base = base;
7873 if ((TREE_CODE (base) == INDIRECT_REF
7874 || (TREE_CODE (base) == MEM_REF
7875 && integer_zerop (TREE_OPERAND (base, 1))))
7876 && DECL_P (TREE_OPERAND (base, 0))
7877 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7878 == REFERENCE_TYPE))
7879 base = TREE_OPERAND (base, 0);
7880 gcc_assert (base == decl
7881 && (offset == NULL_TREE
7882 || TREE_CODE (offset) == INTEGER_CST));
7884 splay_tree_node n
7885 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7886 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7887 == GOMP_MAP_ALWAYS_POINTER);
7888 if (n == NULL || (n->value & GOVD_MAP) == 0)
7890 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7891 OMP_CLAUSE_MAP);
7892 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7893 if (orig_base != base)
7894 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7895 else
7896 OMP_CLAUSE_DECL (l) = decl;
7897 OMP_CLAUSE_SIZE (l) = size_int (1);
7898 if (struct_map_to_clause == NULL)
7899 struct_map_to_clause = new hash_map<tree, tree>;
7900 struct_map_to_clause->put (decl, l);
7901 if (ptr)
7903 enum gomp_map_kind mkind
7904 = code == OMP_TARGET_EXIT_DATA
7905 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7906 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7907 OMP_CLAUSE_MAP);
7908 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7909 OMP_CLAUSE_DECL (c2)
7910 = unshare_expr (OMP_CLAUSE_DECL (c));
7911 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7912 OMP_CLAUSE_SIZE (c2)
7913 = TYPE_SIZE_UNIT (ptr_type_node);
7914 OMP_CLAUSE_CHAIN (l) = c2;
7915 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7917 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7918 tree c3
7919 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7920 OMP_CLAUSE_MAP);
7921 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7922 OMP_CLAUSE_DECL (c3)
7923 = unshare_expr (OMP_CLAUSE_DECL (c4));
7924 OMP_CLAUSE_SIZE (c3)
7925 = TYPE_SIZE_UNIT (ptr_type_node);
7926 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7927 OMP_CLAUSE_CHAIN (c2) = c3;
7929 *prev_list_p = l;
7930 prev_list_p = NULL;
7932 else
7934 OMP_CLAUSE_CHAIN (l) = c;
7935 *list_p = l;
7936 list_p = &OMP_CLAUSE_CHAIN (l);
7938 if (orig_base != base && code == OMP_TARGET)
7940 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7941 OMP_CLAUSE_MAP);
7942 enum gomp_map_kind mkind
7943 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7944 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7945 OMP_CLAUSE_DECL (c2) = decl;
7946 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7947 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7948 OMP_CLAUSE_CHAIN (l) = c2;
7950 flags = GOVD_MAP | GOVD_EXPLICIT;
7951 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7952 flags |= GOVD_SEEN;
7953 goto do_add_decl;
7955 else
7957 tree *osc = struct_map_to_clause->get (decl);
7958 tree *sc = NULL, *scp = NULL;
7959 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7960 n->value |= GOVD_SEEN;
7961 offset_int o1, o2;
7962 if (offset)
7963 o1 = wi::to_offset (offset);
7964 else
7965 o1 = 0;
7966 if (bitpos)
7967 o1 = o1 + bitpos / BITS_PER_UNIT;
7968 sc = &OMP_CLAUSE_CHAIN (*osc);
7969 if (*sc != c
7970 && (OMP_CLAUSE_MAP_KIND (*sc)
7971 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7972 sc = &OMP_CLAUSE_CHAIN (*sc);
7973 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7974 if (ptr && sc == prev_list_p)
7975 break;
7976 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7977 != COMPONENT_REF
7978 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7979 != INDIRECT_REF)
7980 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7981 != ARRAY_REF))
7982 break;
7983 else
7985 tree offset2;
7986 HOST_WIDE_INT bitsize2, bitpos2;
7987 base = OMP_CLAUSE_DECL (*sc);
7988 if (TREE_CODE (base) == ARRAY_REF)
7990 while (TREE_CODE (base) == ARRAY_REF)
7991 base = TREE_OPERAND (base, 0);
7992 if (TREE_CODE (base) != COMPONENT_REF
7993 || (TREE_CODE (TREE_TYPE (base))
7994 != ARRAY_TYPE))
7995 break;
7997 else if (TREE_CODE (base) == INDIRECT_REF
7998 && (TREE_CODE (TREE_OPERAND (base, 0))
7999 == COMPONENT_REF)
8000 && (TREE_CODE (TREE_TYPE
8001 (TREE_OPERAND (base, 0)))
8002 == REFERENCE_TYPE))
8003 base = TREE_OPERAND (base, 0);
8004 base = get_inner_reference (base, &bitsize2,
8005 &bitpos2, &offset2,
8006 &mode, &unsignedp,
8007 &reversep, &volatilep);
8008 if ((TREE_CODE (base) == INDIRECT_REF
8009 || (TREE_CODE (base) == MEM_REF
8010 && integer_zerop (TREE_OPERAND (base,
8011 1))))
8012 && DECL_P (TREE_OPERAND (base, 0))
8013 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8014 0)))
8015 == REFERENCE_TYPE))
8016 base = TREE_OPERAND (base, 0);
8017 if (base != decl)
8018 break;
8019 if (scp)
8020 continue;
8021 gcc_assert (offset == NULL_TREE
8022 || TREE_CODE (offset) == INTEGER_CST);
8023 tree d1 = OMP_CLAUSE_DECL (*sc);
8024 tree d2 = OMP_CLAUSE_DECL (c);
8025 while (TREE_CODE (d1) == ARRAY_REF)
8026 d1 = TREE_OPERAND (d1, 0);
8027 while (TREE_CODE (d2) == ARRAY_REF)
8028 d2 = TREE_OPERAND (d2, 0);
8029 if (TREE_CODE (d1) == INDIRECT_REF)
8030 d1 = TREE_OPERAND (d1, 0);
8031 if (TREE_CODE (d2) == INDIRECT_REF)
8032 d2 = TREE_OPERAND (d2, 0);
8033 while (TREE_CODE (d1) == COMPONENT_REF)
8034 if (TREE_CODE (d2) == COMPONENT_REF
8035 && TREE_OPERAND (d1, 1)
8036 == TREE_OPERAND (d2, 1))
8038 d1 = TREE_OPERAND (d1, 0);
8039 d2 = TREE_OPERAND (d2, 0);
8041 else
8042 break;
8043 if (d1 == d2)
8045 error_at (OMP_CLAUSE_LOCATION (c),
8046 "%qE appears more than once in map "
8047 "clauses", OMP_CLAUSE_DECL (c));
8048 remove = true;
8049 break;
8051 if (offset2)
8052 o2 = wi::to_offset (offset2);
8053 else
8054 o2 = 0;
8055 if (bitpos2)
8056 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8057 if (wi::ltu_p (o1, o2)
8058 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8060 if (ptr)
8061 scp = sc;
8062 else
8063 break;
8066 if (remove)
8067 break;
8068 OMP_CLAUSE_SIZE (*osc)
8069 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8070 size_one_node);
8071 if (ptr)
8073 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8074 OMP_CLAUSE_MAP);
8075 tree cl = NULL_TREE;
8076 enum gomp_map_kind mkind
8077 = code == OMP_TARGET_EXIT_DATA
8078 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8079 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8080 OMP_CLAUSE_DECL (c2)
8081 = unshare_expr (OMP_CLAUSE_DECL (c));
8082 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8083 OMP_CLAUSE_SIZE (c2)
8084 = TYPE_SIZE_UNIT (ptr_type_node);
8085 cl = scp ? *prev_list_p : c2;
8086 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8088 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8089 tree c3
8090 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8091 OMP_CLAUSE_MAP);
8092 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8093 OMP_CLAUSE_DECL (c3)
8094 = unshare_expr (OMP_CLAUSE_DECL (c4));
8095 OMP_CLAUSE_SIZE (c3)
8096 = TYPE_SIZE_UNIT (ptr_type_node);
8097 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8098 if (!scp)
8099 OMP_CLAUSE_CHAIN (c2) = c3;
8100 else
8101 cl = c3;
8103 if (scp)
8104 *scp = c2;
8105 if (sc == prev_list_p)
8107 *sc = cl;
8108 prev_list_p = NULL;
8110 else
8112 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8113 list_p = prev_list_p;
8114 prev_list_p = NULL;
8115 OMP_CLAUSE_CHAIN (c) = *sc;
8116 *sc = cl;
8117 continue;
8120 else if (*sc != c)
8122 *list_p = OMP_CLAUSE_CHAIN (c);
8123 OMP_CLAUSE_CHAIN (c) = *sc;
8124 *sc = c;
8125 continue;
8129 if (!remove
8130 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8131 && OMP_CLAUSE_CHAIN (c)
8132 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8133 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8134 == GOMP_MAP_ALWAYS_POINTER))
8135 prev_list_p = list_p;
8136 break;
8138 flags = GOVD_MAP | GOVD_EXPLICIT;
8139 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8140 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8141 flags |= GOVD_MAP_ALWAYS_TO;
8142 goto do_add;
8144 case OMP_CLAUSE_DEPEND:
8145 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8147 tree deps = OMP_CLAUSE_DECL (c);
8148 while (deps && TREE_CODE (deps) == TREE_LIST)
8150 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8151 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8152 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8153 pre_p, NULL, is_gimple_val, fb_rvalue);
8154 deps = TREE_CHAIN (deps);
8156 break;
8158 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8159 break;
8160 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8162 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8163 NULL, is_gimple_val, fb_rvalue);
8164 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8166 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8168 remove = true;
8169 break;
8171 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8172 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8173 is_gimple_val, fb_rvalue) == GS_ERROR)
8175 remove = true;
8176 break;
8178 break;
8180 case OMP_CLAUSE_TO:
8181 case OMP_CLAUSE_FROM:
8182 case OMP_CLAUSE__CACHE_:
8183 decl = OMP_CLAUSE_DECL (c);
8184 if (error_operand_p (decl))
8186 remove = true;
8187 break;
8189 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8190 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8191 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8192 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8193 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8195 remove = true;
8196 break;
8198 if (!DECL_P (decl))
8200 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8201 NULL, is_gimple_lvalue, fb_lvalue)
8202 == GS_ERROR)
8204 remove = true;
8205 break;
8207 break;
8209 goto do_notice;
8211 case OMP_CLAUSE_USE_DEVICE_PTR:
8212 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8213 goto do_add;
8214 case OMP_CLAUSE_IS_DEVICE_PTR:
8215 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8216 goto do_add;
8218 do_add:
8219 decl = OMP_CLAUSE_DECL (c);
8220 do_add_decl:
8221 if (error_operand_p (decl))
8223 remove = true;
8224 break;
8226 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8228 tree t = omp_member_access_dummy_var (decl);
8229 if (t)
8231 tree v = DECL_VALUE_EXPR (decl);
8232 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8233 if (outer_ctx)
8234 omp_notice_variable (outer_ctx, t, true);
8237 if (code == OACC_DATA
8238 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8239 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8240 flags |= GOVD_MAP_0LEN_ARRAY;
8241 omp_add_variable (ctx, decl, flags);
8242 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8243 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8245 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8246 GOVD_LOCAL | GOVD_SEEN);
8247 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8248 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8249 find_decl_expr,
8250 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8251 NULL) == NULL_TREE)
8252 omp_add_variable (ctx,
8253 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8254 GOVD_LOCAL | GOVD_SEEN);
8255 gimplify_omp_ctxp = ctx;
8256 push_gimplify_context ();
8258 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8259 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8261 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8262 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8263 pop_gimplify_context
8264 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8265 push_gimplify_context ();
8266 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8267 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8268 pop_gimplify_context
8269 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8270 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8271 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8273 gimplify_omp_ctxp = outer_ctx;
8275 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8276 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8278 gimplify_omp_ctxp = ctx;
8279 push_gimplify_context ();
8280 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8282 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8283 NULL, NULL);
8284 TREE_SIDE_EFFECTS (bind) = 1;
8285 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8286 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8288 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8289 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8290 pop_gimplify_context
8291 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8292 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8294 gimplify_omp_ctxp = outer_ctx;
8296 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8297 && OMP_CLAUSE_LINEAR_STMT (c))
8299 gimplify_omp_ctxp = ctx;
8300 push_gimplify_context ();
8301 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8303 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8304 NULL, NULL);
8305 TREE_SIDE_EFFECTS (bind) = 1;
8306 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8307 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8309 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8310 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8311 pop_gimplify_context
8312 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8313 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8315 gimplify_omp_ctxp = outer_ctx;
8317 if (notice_outer)
8318 goto do_notice;
8319 break;
8321 case OMP_CLAUSE_COPYIN:
8322 case OMP_CLAUSE_COPYPRIVATE:
8323 decl = OMP_CLAUSE_DECL (c);
8324 if (error_operand_p (decl))
8326 remove = true;
8327 break;
8329 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8330 && !remove
8331 && !omp_check_private (ctx, decl, true))
8333 remove = true;
8334 if (is_global_var (decl))
8336 if (DECL_THREAD_LOCAL_P (decl))
8337 remove = false;
8338 else if (DECL_HAS_VALUE_EXPR_P (decl))
8340 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8342 if (value
8343 && DECL_P (value)
8344 && DECL_THREAD_LOCAL_P (value))
8345 remove = false;
8348 if (remove)
8349 error_at (OMP_CLAUSE_LOCATION (c),
8350 "copyprivate variable %qE is not threadprivate"
8351 " or private in outer context", DECL_NAME (decl));
8353 do_notice:
8354 if (outer_ctx)
8355 omp_notice_variable (outer_ctx, decl, true);
8356 if (check_non_private
8357 && region_type == ORT_WORKSHARE
8358 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8359 || decl == OMP_CLAUSE_DECL (c)
8360 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8361 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8362 == ADDR_EXPR
8363 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8364 == POINTER_PLUS_EXPR
8365 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8366 (OMP_CLAUSE_DECL (c), 0), 0))
8367 == ADDR_EXPR)))))
8368 && omp_check_private (ctx, decl, false))
8370 error ("%s variable %qE is private in outer context",
8371 check_non_private, DECL_NAME (decl));
8372 remove = true;
8374 break;
8376 case OMP_CLAUSE_IF:
8377 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8378 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8380 const char *p[2];
8381 for (int i = 0; i < 2; i++)
8382 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8384 case OMP_PARALLEL: p[i] = "parallel"; break;
8385 case OMP_TASK: p[i] = "task"; break;
8386 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8387 case OMP_TARGET_DATA: p[i] = "target data"; break;
8388 case OMP_TARGET: p[i] = "target"; break;
8389 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8390 case OMP_TARGET_ENTER_DATA:
8391 p[i] = "target enter data"; break;
8392 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8393 default: gcc_unreachable ();
8395 error_at (OMP_CLAUSE_LOCATION (c),
8396 "expected %qs %<if%> clause modifier rather than %qs",
8397 p[0], p[1]);
8398 remove = true;
8400 /* Fall through. */
8402 case OMP_CLAUSE_FINAL:
8403 OMP_CLAUSE_OPERAND (c, 0)
8404 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8405 /* Fall through. */
8407 case OMP_CLAUSE_SCHEDULE:
8408 case OMP_CLAUSE_NUM_THREADS:
8409 case OMP_CLAUSE_NUM_TEAMS:
8410 case OMP_CLAUSE_THREAD_LIMIT:
8411 case OMP_CLAUSE_DIST_SCHEDULE:
8412 case OMP_CLAUSE_DEVICE:
8413 case OMP_CLAUSE_PRIORITY:
8414 case OMP_CLAUSE_GRAINSIZE:
8415 case OMP_CLAUSE_NUM_TASKS:
8416 case OMP_CLAUSE_HINT:
8417 case OMP_CLAUSE__CILK_FOR_COUNT_:
8418 case OMP_CLAUSE_ASYNC:
8419 case OMP_CLAUSE_WAIT:
8420 case OMP_CLAUSE_NUM_GANGS:
8421 case OMP_CLAUSE_NUM_WORKERS:
8422 case OMP_CLAUSE_VECTOR_LENGTH:
8423 case OMP_CLAUSE_WORKER:
8424 case OMP_CLAUSE_VECTOR:
8425 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8426 is_gimple_val, fb_rvalue) == GS_ERROR)
8427 remove = true;
8428 break;
8430 case OMP_CLAUSE_GANG:
8431 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8432 is_gimple_val, fb_rvalue) == GS_ERROR)
8433 remove = true;
8434 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8435 is_gimple_val, fb_rvalue) == GS_ERROR)
8436 remove = true;
8437 break;
8439 case OMP_CLAUSE_NOWAIT:
8440 case OMP_CLAUSE_ORDERED:
8441 case OMP_CLAUSE_UNTIED:
8442 case OMP_CLAUSE_COLLAPSE:
8443 case OMP_CLAUSE_TILE:
8444 case OMP_CLAUSE_AUTO:
8445 case OMP_CLAUSE_SEQ:
8446 case OMP_CLAUSE_INDEPENDENT:
8447 case OMP_CLAUSE_MERGEABLE:
8448 case OMP_CLAUSE_PROC_BIND:
8449 case OMP_CLAUSE_SAFELEN:
8450 case OMP_CLAUSE_SIMDLEN:
8451 case OMP_CLAUSE_NOGROUP:
8452 case OMP_CLAUSE_THREADS:
8453 case OMP_CLAUSE_SIMD:
8454 break;
8456 case OMP_CLAUSE_DEFAULTMAP:
8457 ctx->target_map_scalars_firstprivate = false;
8458 break;
8460 case OMP_CLAUSE_ALIGNED:
8461 decl = OMP_CLAUSE_DECL (c);
8462 if (error_operand_p (decl))
8464 remove = true;
8465 break;
8467 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8468 is_gimple_val, fb_rvalue) == GS_ERROR)
8470 remove = true;
8471 break;
8473 if (!is_global_var (decl)
8474 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8475 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8476 break;
8478 case OMP_CLAUSE_DEFAULT:
8479 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8480 break;
8482 default:
8483 gcc_unreachable ();
8486 if (code == OACC_DATA
8487 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8488 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8489 remove = true;
8490 if (remove)
8491 *list_p = OMP_CLAUSE_CHAIN (c);
8492 else
8493 list_p = &OMP_CLAUSE_CHAIN (c);
8496 gimplify_omp_ctxp = ctx;
8497 if (struct_map_to_clause)
8498 delete struct_map_to_clause;
8501 /* Return true if DECL is a candidate for shared to firstprivate
8502 optimization. We only consider non-addressable scalars, not
8503 too big, and not references. */
8505 static bool
8506 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8508 if (TREE_ADDRESSABLE (decl))
8509 return false;
8510 tree type = TREE_TYPE (decl);
8511 if (!is_gimple_reg_type (type)
8512 || TREE_CODE (type) == REFERENCE_TYPE
8513 || TREE_ADDRESSABLE (type))
8514 return false;
8515 /* Don't optimize too large decls, as each thread/task will have
8516 its own. */
8517 HOST_WIDE_INT len = int_size_in_bytes (type);
8518 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8519 return false;
8520 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8521 return false;
8522 return true;
8525 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8526 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8527 GOVD_WRITTEN in outer contexts. */
8529 static void
8530 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8532 for (; ctx; ctx = ctx->outer_context)
8534 splay_tree_node n = splay_tree_lookup (ctx->variables,
8535 (splay_tree_key) decl);
8536 if (n == NULL)
8537 continue;
8538 else if (n->value & GOVD_SHARED)
8540 n->value |= GOVD_WRITTEN;
8541 return;
8543 else if (n->value & GOVD_DATA_SHARE_CLASS)
8544 return;
8548 /* Helper callback for walk_gimple_seq to discover possible stores
8549 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8550 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8551 for those. */
8553 static tree
8554 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8556 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8558 *walk_subtrees = 0;
8559 if (!wi->is_lhs)
8560 return NULL_TREE;
8562 tree op = *tp;
8565 if (handled_component_p (op))
8566 op = TREE_OPERAND (op, 0);
8567 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8568 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8569 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8570 else
8571 break;
8573 while (1);
8574 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8575 return NULL_TREE;
8577 omp_mark_stores (gimplify_omp_ctxp, op);
8578 return NULL_TREE;
8581 /* Helper callback for walk_gimple_seq to discover possible stores
8582 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8583 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8584 for those. */
8586 static tree
8587 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8588 bool *handled_ops_p,
8589 struct walk_stmt_info *wi)
8591 gimple *stmt = gsi_stmt (*gsi_p);
8592 switch (gimple_code (stmt))
8594 /* Don't recurse on OpenMP constructs for which
8595 gimplify_adjust_omp_clauses already handled the bodies,
8596 except handle gimple_omp_for_pre_body. */
8597 case GIMPLE_OMP_FOR:
8598 *handled_ops_p = true;
8599 if (gimple_omp_for_pre_body (stmt))
8600 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8601 omp_find_stores_stmt, omp_find_stores_op, wi);
8602 break;
8603 case GIMPLE_OMP_PARALLEL:
8604 case GIMPLE_OMP_TASK:
8605 case GIMPLE_OMP_SECTIONS:
8606 case GIMPLE_OMP_SINGLE:
8607 case GIMPLE_OMP_TARGET:
8608 case GIMPLE_OMP_TEAMS:
8609 case GIMPLE_OMP_CRITICAL:
8610 *handled_ops_p = true;
8611 break;
8612 default:
8613 break;
8615 return NULL_TREE;
8618 struct gimplify_adjust_omp_clauses_data
8620 tree *list_p;
8621 gimple_seq *pre_p;
8624 /* For all variables that were not actually used within the context,
8625 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8627 static int
8628 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8630 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8631 gimple_seq *pre_p
8632 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8633 tree decl = (tree) n->key;
8634 unsigned flags = n->value;
8635 enum omp_clause_code code;
8636 tree clause;
8637 bool private_debug;
8639 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8640 return 0;
8641 if ((flags & GOVD_SEEN) == 0)
8642 return 0;
8643 if (flags & GOVD_DEBUG_PRIVATE)
8645 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8646 private_debug = true;
8648 else if (flags & GOVD_MAP)
8649 private_debug = false;
8650 else
8651 private_debug
8652 = lang_hooks.decls.omp_private_debug_clause (decl,
8653 !!(flags & GOVD_SHARED));
8654 if (private_debug)
8655 code = OMP_CLAUSE_PRIVATE;
8656 else if (flags & GOVD_MAP)
8658 code = OMP_CLAUSE_MAP;
8659 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8660 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8662 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8663 return 0;
8666 else if (flags & GOVD_SHARED)
8668 if (is_global_var (decl))
8670 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8671 while (ctx != NULL)
8673 splay_tree_node on
8674 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8675 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8676 | GOVD_PRIVATE | GOVD_REDUCTION
8677 | GOVD_LINEAR | GOVD_MAP)) != 0)
8678 break;
8679 ctx = ctx->outer_context;
8681 if (ctx == NULL)
8682 return 0;
8684 code = OMP_CLAUSE_SHARED;
8686 else if (flags & GOVD_PRIVATE)
8687 code = OMP_CLAUSE_PRIVATE;
8688 else if (flags & GOVD_FIRSTPRIVATE)
8690 code = OMP_CLAUSE_FIRSTPRIVATE;
8691 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8692 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8693 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8695 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8696 "%<target%> construct", decl);
8697 return 0;
8700 else if (flags & GOVD_LASTPRIVATE)
8701 code = OMP_CLAUSE_LASTPRIVATE;
8702 else if (flags & GOVD_ALIGNED)
8703 return 0;
8704 else
8705 gcc_unreachable ();
8707 if (((flags & GOVD_LASTPRIVATE)
8708 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8709 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8710 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8712 tree chain = *list_p;
8713 clause = build_omp_clause (input_location, code);
8714 OMP_CLAUSE_DECL (clause) = decl;
8715 OMP_CLAUSE_CHAIN (clause) = chain;
8716 if (private_debug)
8717 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8718 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8719 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8720 else if (code == OMP_CLAUSE_SHARED
8721 && (flags & GOVD_WRITTEN) == 0
8722 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8723 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8724 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8725 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8726 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8728 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8729 OMP_CLAUSE_DECL (nc) = decl;
8730 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8731 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8732 OMP_CLAUSE_DECL (clause)
8733 = build_simple_mem_ref_loc (input_location, decl);
8734 OMP_CLAUSE_DECL (clause)
8735 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8736 build_int_cst (build_pointer_type (char_type_node), 0));
8737 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8738 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8739 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8740 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8741 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8742 OMP_CLAUSE_CHAIN (nc) = chain;
8743 OMP_CLAUSE_CHAIN (clause) = nc;
8744 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8745 gimplify_omp_ctxp = ctx->outer_context;
8746 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8747 pre_p, NULL, is_gimple_val, fb_rvalue);
8748 gimplify_omp_ctxp = ctx;
8750 else if (code == OMP_CLAUSE_MAP)
8752 int kind;
8753 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8754 switch (flags & (GOVD_MAP_TO_ONLY
8755 | GOVD_MAP_FORCE
8756 | GOVD_MAP_FORCE_PRESENT))
8758 case 0:
8759 kind = GOMP_MAP_TOFROM;
8760 break;
8761 case GOVD_MAP_FORCE:
8762 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8763 break;
8764 case GOVD_MAP_TO_ONLY:
8765 kind = GOMP_MAP_TO;
8766 break;
8767 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8768 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8769 break;
8770 case GOVD_MAP_FORCE_PRESENT:
8771 kind = GOMP_MAP_FORCE_PRESENT;
8772 break;
8773 default:
8774 gcc_unreachable ();
8776 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8777 if (DECL_SIZE (decl)
8778 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8780 tree decl2 = DECL_VALUE_EXPR (decl);
8781 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8782 decl2 = TREE_OPERAND (decl2, 0);
8783 gcc_assert (DECL_P (decl2));
8784 tree mem = build_simple_mem_ref (decl2);
8785 OMP_CLAUSE_DECL (clause) = mem;
8786 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8787 if (gimplify_omp_ctxp->outer_context)
8789 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8790 omp_notice_variable (ctx, decl2, true);
8791 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8793 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8794 OMP_CLAUSE_MAP);
8795 OMP_CLAUSE_DECL (nc) = decl;
8796 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8797 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8798 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8799 else
8800 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8801 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8802 OMP_CLAUSE_CHAIN (clause) = nc;
8804 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8805 && lang_hooks.decls.omp_privatize_by_reference (decl))
8807 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8808 OMP_CLAUSE_SIZE (clause)
8809 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8810 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8811 gimplify_omp_ctxp = ctx->outer_context;
8812 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8813 pre_p, NULL, is_gimple_val, fb_rvalue);
8814 gimplify_omp_ctxp = ctx;
8815 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8816 OMP_CLAUSE_MAP);
8817 OMP_CLAUSE_DECL (nc) = decl;
8818 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8819 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8820 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8821 OMP_CLAUSE_CHAIN (clause) = nc;
8823 else
8824 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8826 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8828 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8829 OMP_CLAUSE_DECL (nc) = decl;
8830 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8831 OMP_CLAUSE_CHAIN (nc) = chain;
8832 OMP_CLAUSE_CHAIN (clause) = nc;
8833 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8834 gimplify_omp_ctxp = ctx->outer_context;
8835 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8836 gimplify_omp_ctxp = ctx;
8838 *list_p = clause;
8839 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8840 gimplify_omp_ctxp = ctx->outer_context;
8841 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8842 if (gimplify_omp_ctxp)
8843 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8844 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8845 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8846 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8847 true);
8848 gimplify_omp_ctxp = ctx;
8849 return 0;
8852 static void
8853 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8854 enum tree_code code)
8856 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8857 tree c, decl;
8859 if (body)
8861 struct gimplify_omp_ctx *octx;
8862 for (octx = ctx; octx; octx = octx->outer_context)
8863 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8864 break;
8865 if (octx)
8867 struct walk_stmt_info wi;
8868 memset (&wi, 0, sizeof (wi));
8869 walk_gimple_seq (body, omp_find_stores_stmt,
8870 omp_find_stores_op, &wi);
8873 while ((c = *list_p) != NULL)
8875 splay_tree_node n;
8876 bool remove = false;
8878 switch (OMP_CLAUSE_CODE (c))
8880 case OMP_CLAUSE_FIRSTPRIVATE:
8881 if ((ctx->region_type & ORT_TARGET)
8882 && (ctx->region_type & ORT_ACC) == 0
8883 && TYPE_ATOMIC (strip_array_types
8884 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8886 error_at (OMP_CLAUSE_LOCATION (c),
8887 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8888 "%<target%> construct", OMP_CLAUSE_DECL (c));
8889 remove = true;
8890 break;
8892 /* FALLTHRU */
8893 case OMP_CLAUSE_PRIVATE:
8894 case OMP_CLAUSE_SHARED:
8895 case OMP_CLAUSE_LINEAR:
8896 decl = OMP_CLAUSE_DECL (c);
8897 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8898 remove = !(n->value & GOVD_SEEN);
8899 if (! remove)
8901 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8902 if ((n->value & GOVD_DEBUG_PRIVATE)
8903 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8905 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8906 || ((n->value & GOVD_DATA_SHARE_CLASS)
8907 == GOVD_SHARED));
8908 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8909 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8911 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8912 && (n->value & GOVD_WRITTEN) == 0
8913 && DECL_P (decl)
8914 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8915 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8916 else if (DECL_P (decl)
8917 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8918 && (n->value & GOVD_WRITTEN) != 1)
8919 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8920 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8921 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8922 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8924 break;
8926 case OMP_CLAUSE_LASTPRIVATE:
8927 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8928 accurately reflect the presence of a FIRSTPRIVATE clause. */
8929 decl = OMP_CLAUSE_DECL (c);
8930 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8931 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8932 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8933 if (code == OMP_DISTRIBUTE
8934 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8936 remove = true;
8937 error_at (OMP_CLAUSE_LOCATION (c),
8938 "same variable used in %<firstprivate%> and "
8939 "%<lastprivate%> clauses on %<distribute%> "
8940 "construct");
8942 if (!remove
8943 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8944 && DECL_P (decl)
8945 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8946 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8947 break;
8949 case OMP_CLAUSE_ALIGNED:
8950 decl = OMP_CLAUSE_DECL (c);
8951 if (!is_global_var (decl))
8953 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8954 remove = n == NULL || !(n->value & GOVD_SEEN);
8955 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8957 struct gimplify_omp_ctx *octx;
8958 if (n != NULL
8959 && (n->value & (GOVD_DATA_SHARE_CLASS
8960 & ~GOVD_FIRSTPRIVATE)))
8961 remove = true;
8962 else
8963 for (octx = ctx->outer_context; octx;
8964 octx = octx->outer_context)
8966 n = splay_tree_lookup (octx->variables,
8967 (splay_tree_key) decl);
8968 if (n == NULL)
8969 continue;
8970 if (n->value & GOVD_LOCAL)
8971 break;
8972 /* We have to avoid assigning a shared variable
8973 to itself when trying to add
8974 __builtin_assume_aligned. */
8975 if (n->value & GOVD_SHARED)
8977 remove = true;
8978 break;
8983 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8985 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8986 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8987 remove = true;
8989 break;
8991 case OMP_CLAUSE_MAP:
8992 if (code == OMP_TARGET_EXIT_DATA
8993 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8995 remove = true;
8996 break;
8998 decl = OMP_CLAUSE_DECL (c);
8999 /* Data clauses associated with acc parallel reductions must be
9000 compatible with present_or_copy. Warn and adjust the clause
9001 if that is not the case. */
9002 if (ctx->region_type == ORT_ACC_PARALLEL)
9004 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9005 n = NULL;
9007 if (DECL_P (t))
9008 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9010 if (n && (n->value & GOVD_REDUCTION))
9012 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9014 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9015 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9016 && kind != GOMP_MAP_FORCE_PRESENT
9017 && kind != GOMP_MAP_POINTER)
9019 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9020 "incompatible data clause with reduction "
9021 "on %qE; promoting to present_or_copy",
9022 DECL_NAME (t));
9023 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9027 if (!DECL_P (decl))
9029 if ((ctx->region_type & ORT_TARGET) != 0
9030 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9032 if (TREE_CODE (decl) == INDIRECT_REF
9033 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9034 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9035 == REFERENCE_TYPE))
9036 decl = TREE_OPERAND (decl, 0);
9037 if (TREE_CODE (decl) == COMPONENT_REF)
9039 while (TREE_CODE (decl) == COMPONENT_REF)
9040 decl = TREE_OPERAND (decl, 0);
9041 if (DECL_P (decl))
9043 n = splay_tree_lookup (ctx->variables,
9044 (splay_tree_key) decl);
9045 if (!(n->value & GOVD_SEEN))
9046 remove = true;
9050 break;
9052 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9053 if ((ctx->region_type & ORT_TARGET) != 0
9054 && !(n->value & GOVD_SEEN)
9055 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9056 && (!is_global_var (decl)
9057 || !lookup_attribute ("omp declare target link",
9058 DECL_ATTRIBUTES (decl))))
9060 remove = true;
9061 /* For struct element mapping, if struct is never referenced
9062 in target block and none of the mapping has always modifier,
9063 remove all the struct element mappings, which immediately
9064 follow the GOMP_MAP_STRUCT map clause. */
9065 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9067 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9068 while (cnt--)
9069 OMP_CLAUSE_CHAIN (c)
9070 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9073 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9074 && code == OMP_TARGET_EXIT_DATA)
9075 remove = true;
9076 else if (DECL_SIZE (decl)
9077 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9078 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9079 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9080 && (OMP_CLAUSE_MAP_KIND (c)
9081 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9083 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9084 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9085 INTEGER_CST. */
9086 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9088 tree decl2 = DECL_VALUE_EXPR (decl);
9089 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9090 decl2 = TREE_OPERAND (decl2, 0);
9091 gcc_assert (DECL_P (decl2));
9092 tree mem = build_simple_mem_ref (decl2);
9093 OMP_CLAUSE_DECL (c) = mem;
9094 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9095 if (ctx->outer_context)
9097 omp_notice_variable (ctx->outer_context, decl2, true);
9098 omp_notice_variable (ctx->outer_context,
9099 OMP_CLAUSE_SIZE (c), true);
9101 if (((ctx->region_type & ORT_TARGET) != 0
9102 || !ctx->target_firstprivatize_array_bases)
9103 && ((n->value & GOVD_SEEN) == 0
9104 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9106 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9107 OMP_CLAUSE_MAP);
9108 OMP_CLAUSE_DECL (nc) = decl;
9109 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9110 if (ctx->target_firstprivatize_array_bases)
9111 OMP_CLAUSE_SET_MAP_KIND (nc,
9112 GOMP_MAP_FIRSTPRIVATE_POINTER);
9113 else
9114 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9115 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9116 OMP_CLAUSE_CHAIN (c) = nc;
9117 c = nc;
9120 else
9122 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9123 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9124 gcc_assert ((n->value & GOVD_SEEN) == 0
9125 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9126 == 0));
9128 break;
9130 case OMP_CLAUSE_TO:
9131 case OMP_CLAUSE_FROM:
9132 case OMP_CLAUSE__CACHE_:
9133 decl = OMP_CLAUSE_DECL (c);
9134 if (!DECL_P (decl))
9135 break;
9136 if (DECL_SIZE (decl)
9137 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9139 tree decl2 = DECL_VALUE_EXPR (decl);
9140 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9141 decl2 = TREE_OPERAND (decl2, 0);
9142 gcc_assert (DECL_P (decl2));
9143 tree mem = build_simple_mem_ref (decl2);
9144 OMP_CLAUSE_DECL (c) = mem;
9145 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9146 if (ctx->outer_context)
9148 omp_notice_variable (ctx->outer_context, decl2, true);
9149 omp_notice_variable (ctx->outer_context,
9150 OMP_CLAUSE_SIZE (c), true);
9153 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9154 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9155 break;
9157 case OMP_CLAUSE_REDUCTION:
9158 decl = OMP_CLAUSE_DECL (c);
9159 /* OpenACC reductions need a present_or_copy data clause.
9160 Add one if necessary. Error is the reduction is private. */
9161 if (ctx->region_type == ORT_ACC_PARALLEL)
9163 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9164 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9165 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9166 "reduction on %qE", DECL_NAME (decl));
9167 else if ((n->value & GOVD_MAP) == 0)
9169 tree next = OMP_CLAUSE_CHAIN (c);
9170 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9171 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9172 OMP_CLAUSE_DECL (nc) = decl;
9173 OMP_CLAUSE_CHAIN (c) = nc;
9174 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9175 while (1)
9177 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9178 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9179 break;
9180 nc = OMP_CLAUSE_CHAIN (nc);
9182 OMP_CLAUSE_CHAIN (nc) = next;
9183 n->value |= GOVD_MAP;
9186 if (DECL_P (decl)
9187 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9188 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9189 break;
9190 case OMP_CLAUSE_COPYIN:
9191 case OMP_CLAUSE_COPYPRIVATE:
9192 case OMP_CLAUSE_IF:
9193 case OMP_CLAUSE_NUM_THREADS:
9194 case OMP_CLAUSE_NUM_TEAMS:
9195 case OMP_CLAUSE_THREAD_LIMIT:
9196 case OMP_CLAUSE_DIST_SCHEDULE:
9197 case OMP_CLAUSE_DEVICE:
9198 case OMP_CLAUSE_SCHEDULE:
9199 case OMP_CLAUSE_NOWAIT:
9200 case OMP_CLAUSE_ORDERED:
9201 case OMP_CLAUSE_DEFAULT:
9202 case OMP_CLAUSE_UNTIED:
9203 case OMP_CLAUSE_COLLAPSE:
9204 case OMP_CLAUSE_FINAL:
9205 case OMP_CLAUSE_MERGEABLE:
9206 case OMP_CLAUSE_PROC_BIND:
9207 case OMP_CLAUSE_SAFELEN:
9208 case OMP_CLAUSE_SIMDLEN:
9209 case OMP_CLAUSE_DEPEND:
9210 case OMP_CLAUSE_PRIORITY:
9211 case OMP_CLAUSE_GRAINSIZE:
9212 case OMP_CLAUSE_NUM_TASKS:
9213 case OMP_CLAUSE_NOGROUP:
9214 case OMP_CLAUSE_THREADS:
9215 case OMP_CLAUSE_SIMD:
9216 case OMP_CLAUSE_HINT:
9217 case OMP_CLAUSE_DEFAULTMAP:
9218 case OMP_CLAUSE_USE_DEVICE_PTR:
9219 case OMP_CLAUSE_IS_DEVICE_PTR:
9220 case OMP_CLAUSE__CILK_FOR_COUNT_:
9221 case OMP_CLAUSE_ASYNC:
9222 case OMP_CLAUSE_WAIT:
9223 case OMP_CLAUSE_INDEPENDENT:
9224 case OMP_CLAUSE_NUM_GANGS:
9225 case OMP_CLAUSE_NUM_WORKERS:
9226 case OMP_CLAUSE_VECTOR_LENGTH:
9227 case OMP_CLAUSE_GANG:
9228 case OMP_CLAUSE_WORKER:
9229 case OMP_CLAUSE_VECTOR:
9230 case OMP_CLAUSE_AUTO:
9231 case OMP_CLAUSE_SEQ:
9232 case OMP_CLAUSE_TILE:
9233 break;
9235 default:
9236 gcc_unreachable ();
9239 if (remove)
9240 *list_p = OMP_CLAUSE_CHAIN (c);
9241 else
9242 list_p = &OMP_CLAUSE_CHAIN (c);
9245 /* Add in any implicit data sharing. */
9246 struct gimplify_adjust_omp_clauses_data data;
9247 data.list_p = list_p;
9248 data.pre_p = pre_p;
9249 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9251 gimplify_omp_ctxp = ctx->outer_context;
9252 delete_omp_context (ctx);
9255 /* Gimplify OACC_CACHE. */
9257 static void
9258 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9260 tree expr = *expr_p;
9262 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9263 OACC_CACHE);
9264 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9265 OACC_CACHE);
9267 /* TODO: Do something sensible with this information. */
9269 *expr_p = NULL_TREE;
9272 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9273 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9274 kind. The entry kind will replace the one in CLAUSE, while the exit
9275 kind will be used in a new omp_clause and returned to the caller. */
9277 static tree
9278 gimplify_oacc_declare_1 (tree clause)
9280 HOST_WIDE_INT kind, new_op;
9281 bool ret = false;
9282 tree c = NULL;
9284 kind = OMP_CLAUSE_MAP_KIND (clause);
9286 switch (kind)
9288 case GOMP_MAP_ALLOC:
9289 case GOMP_MAP_FORCE_ALLOC:
9290 case GOMP_MAP_FORCE_TO:
9291 new_op = GOMP_MAP_DELETE;
9292 ret = true;
9293 break;
9295 case GOMP_MAP_FORCE_FROM:
9296 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9297 new_op = GOMP_MAP_FORCE_FROM;
9298 ret = true;
9299 break;
9301 case GOMP_MAP_FORCE_TOFROM:
9302 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9303 new_op = GOMP_MAP_FORCE_FROM;
9304 ret = true;
9305 break;
9307 case GOMP_MAP_FROM:
9308 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9309 new_op = GOMP_MAP_FROM;
9310 ret = true;
9311 break;
9313 case GOMP_MAP_TOFROM:
9314 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9315 new_op = GOMP_MAP_FROM;
9316 ret = true;
9317 break;
9319 case GOMP_MAP_DEVICE_RESIDENT:
9320 case GOMP_MAP_FORCE_DEVICEPTR:
9321 case GOMP_MAP_FORCE_PRESENT:
9322 case GOMP_MAP_LINK:
9323 case GOMP_MAP_POINTER:
9324 case GOMP_MAP_TO:
9325 break;
9327 default:
9328 gcc_unreachable ();
9329 break;
9332 if (ret)
9334 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9335 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9336 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9339 return c;
9342 /* Gimplify OACC_DECLARE. */
9344 static void
9345 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9347 tree expr = *expr_p;
9348 gomp_target *stmt;
9349 tree clauses, t, decl;
9351 clauses = OACC_DECLARE_CLAUSES (expr);
9353 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9354 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9356 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9358 decl = OMP_CLAUSE_DECL (t);
9360 if (TREE_CODE (decl) == MEM_REF)
9361 decl = TREE_OPERAND (decl, 0);
9363 if (VAR_P (decl) && !is_oacc_declared (decl))
9365 tree attr = get_identifier ("oacc declare target");
9366 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9367 DECL_ATTRIBUTES (decl));
9370 if (VAR_P (decl)
9371 && !is_global_var (decl)
9372 && DECL_CONTEXT (decl) == current_function_decl)
9374 tree c = gimplify_oacc_declare_1 (t);
9375 if (c)
9377 if (oacc_declare_returns == NULL)
9378 oacc_declare_returns = new hash_map<tree, tree>;
9380 oacc_declare_returns->put (decl, c);
9384 if (gimplify_omp_ctxp)
9385 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9388 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9389 clauses);
9391 gimplify_seq_add_stmt (pre_p, stmt);
9393 *expr_p = NULL_TREE;
9396 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9397 gimplification of the body, as well as scanning the body for used
9398 variables. We need to do this scan now, because variable-sized
9399 decls will be decomposed during gimplification. */
9401 static void
9402 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9404 tree expr = *expr_p;
9405 gimple *g;
9406 gimple_seq body = NULL;
9408 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9409 OMP_PARALLEL_COMBINED (expr)
9410 ? ORT_COMBINED_PARALLEL
9411 : ORT_PARALLEL, OMP_PARALLEL);
9413 push_gimplify_context ();
9415 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9416 if (gimple_code (g) == GIMPLE_BIND)
9417 pop_gimplify_context (g);
9418 else
9419 pop_gimplify_context (NULL);
9421 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9422 OMP_PARALLEL);
9424 g = gimple_build_omp_parallel (body,
9425 OMP_PARALLEL_CLAUSES (expr),
9426 NULL_TREE, NULL_TREE);
9427 if (OMP_PARALLEL_COMBINED (expr))
9428 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9429 gimplify_seq_add_stmt (pre_p, g);
9430 *expr_p = NULL_TREE;
9433 /* Gimplify the contents of an OMP_TASK statement. This involves
9434 gimplification of the body, as well as scanning the body for used
9435 variables. We need to do this scan now, because variable-sized
9436 decls will be decomposed during gimplification. */
9438 static void
9439 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9441 tree expr = *expr_p;
9442 gimple *g;
9443 gimple_seq body = NULL;
9445 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9446 omp_find_clause (OMP_TASK_CLAUSES (expr),
9447 OMP_CLAUSE_UNTIED)
9448 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9450 push_gimplify_context ();
9452 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9453 if (gimple_code (g) == GIMPLE_BIND)
9454 pop_gimplify_context (g);
9455 else
9456 pop_gimplify_context (NULL);
9458 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9459 OMP_TASK);
9461 g = gimple_build_omp_task (body,
9462 OMP_TASK_CLAUSES (expr),
9463 NULL_TREE, NULL_TREE,
9464 NULL_TREE, NULL_TREE, NULL_TREE);
9465 gimplify_seq_add_stmt (pre_p, g);
9466 *expr_p = NULL_TREE;
9469 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9470 with non-NULL OMP_FOR_INIT. */
9472 static tree
9473 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9475 *walk_subtrees = 0;
9476 switch (TREE_CODE (*tp))
9478 case OMP_FOR:
9479 *walk_subtrees = 1;
9480 /* FALLTHRU */
9481 case OMP_SIMD:
9482 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9483 return *tp;
9484 break;
9485 case BIND_EXPR:
9486 case STATEMENT_LIST:
9487 case OMP_PARALLEL:
9488 *walk_subtrees = 1;
9489 break;
9490 default:
9491 break;
9493 return NULL_TREE;
9496 /* Gimplify the gross structure of an OMP_FOR statement. */
9498 static enum gimplify_status
9499 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9501 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9502 enum gimplify_status ret = GS_ALL_DONE;
9503 enum gimplify_status tret;
9504 gomp_for *gfor;
9505 gimple_seq for_body, for_pre_body;
9506 int i;
9507 bitmap has_decl_expr = NULL;
9508 enum omp_region_type ort = ORT_WORKSHARE;
9510 orig_for_stmt = for_stmt = *expr_p;
9512 switch (TREE_CODE (for_stmt))
9514 case OMP_FOR:
9515 case CILK_FOR:
9516 case OMP_DISTRIBUTE:
9517 break;
9518 case OACC_LOOP:
9519 ort = ORT_ACC;
9520 break;
9521 case OMP_TASKLOOP:
9522 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9523 ort = ORT_UNTIED_TASK;
9524 else
9525 ort = ORT_TASK;
9526 break;
9527 case OMP_SIMD:
9528 case CILK_SIMD:
9529 ort = ORT_SIMD;
9530 break;
9531 default:
9532 gcc_unreachable ();
9535 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9536 clause for the IV. */
9537 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9539 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9540 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9541 decl = TREE_OPERAND (t, 0);
9542 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9543 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9544 && OMP_CLAUSE_DECL (c) == decl)
9546 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9547 break;
9551 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9553 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9554 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9555 find_combined_omp_for, NULL, NULL);
9556 if (inner_for_stmt == NULL_TREE)
9558 gcc_assert (seen_error ());
9559 *expr_p = NULL_TREE;
9560 return GS_ERROR;
9564 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9565 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9566 TREE_CODE (for_stmt));
9568 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9569 gimplify_omp_ctxp->distribute = true;
9571 /* Handle OMP_FOR_INIT. */
9572 for_pre_body = NULL;
9573 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9575 has_decl_expr = BITMAP_ALLOC (NULL);
9576 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9577 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9578 == VAR_DECL)
9580 t = OMP_FOR_PRE_BODY (for_stmt);
9581 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9583 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9585 tree_stmt_iterator si;
9586 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9587 tsi_next (&si))
9589 t = tsi_stmt (si);
9590 if (TREE_CODE (t) == DECL_EXPR
9591 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9592 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9596 if (OMP_FOR_PRE_BODY (for_stmt))
9598 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9599 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9600 else
9602 struct gimplify_omp_ctx ctx;
9603 memset (&ctx, 0, sizeof (ctx));
9604 ctx.region_type = ORT_NONE;
9605 gimplify_omp_ctxp = &ctx;
9606 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9607 gimplify_omp_ctxp = NULL;
9610 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9612 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9613 for_stmt = inner_for_stmt;
9615 /* For taskloop, need to gimplify the start, end and step before the
9616 taskloop, outside of the taskloop omp context. */
9617 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9619 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9621 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9622 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9624 TREE_OPERAND (t, 1)
9625 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9626 pre_p, NULL, false);
9627 tree c = build_omp_clause (input_location,
9628 OMP_CLAUSE_FIRSTPRIVATE);
9629 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9630 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9631 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9634 /* Handle OMP_FOR_COND. */
9635 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9636 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9638 TREE_OPERAND (t, 1)
9639 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9640 gimple_seq_empty_p (for_pre_body)
9641 ? pre_p : &for_pre_body, NULL,
9642 false);
9643 tree c = build_omp_clause (input_location,
9644 OMP_CLAUSE_FIRSTPRIVATE);
9645 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9646 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9647 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9650 /* Handle OMP_FOR_INCR. */
9651 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9652 if (TREE_CODE (t) == MODIFY_EXPR)
9654 decl = TREE_OPERAND (t, 0);
9655 t = TREE_OPERAND (t, 1);
9656 tree *tp = &TREE_OPERAND (t, 1);
9657 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9658 tp = &TREE_OPERAND (t, 0);
9660 if (!is_gimple_constant (*tp))
9662 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9663 ? pre_p : &for_pre_body;
9664 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9665 tree c = build_omp_clause (input_location,
9666 OMP_CLAUSE_FIRSTPRIVATE);
9667 OMP_CLAUSE_DECL (c) = *tp;
9668 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9669 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9674 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9675 OMP_TASKLOOP);
9678 if (orig_for_stmt != for_stmt)
9679 gimplify_omp_ctxp->combined_loop = true;
9681 for_body = NULL;
9682 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9683 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9684 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9685 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9687 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9688 bool is_doacross = false;
9689 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9691 is_doacross = true;
9692 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9693 (OMP_FOR_INIT (for_stmt))
9694 * 2);
9696 int collapse = 1, tile = 0;
9697 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9698 if (c)
9699 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9700 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9701 if (c)
9702 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9703 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9705 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9706 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9707 decl = TREE_OPERAND (t, 0);
9708 gcc_assert (DECL_P (decl));
9709 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9710 || POINTER_TYPE_P (TREE_TYPE (decl)));
9711 if (is_doacross)
9713 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9714 gimplify_omp_ctxp->loop_iter_var.quick_push
9715 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9716 else
9717 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9718 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9721 /* Make sure the iteration variable is private. */
9722 tree c = NULL_TREE;
9723 tree c2 = NULL_TREE;
9724 if (orig_for_stmt != for_stmt)
9725 /* Do this only on innermost construct for combined ones. */;
9726 else if (ort == ORT_SIMD)
9728 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9729 (splay_tree_key) decl);
9730 omp_is_private (gimplify_omp_ctxp, decl,
9731 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9732 != 1));
9733 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9734 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9735 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9737 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9738 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9739 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9740 if (has_decl_expr
9741 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9743 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9744 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9746 struct gimplify_omp_ctx *outer
9747 = gimplify_omp_ctxp->outer_context;
9748 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9750 if (outer->region_type == ORT_WORKSHARE
9751 && outer->combined_loop)
9753 n = splay_tree_lookup (outer->variables,
9754 (splay_tree_key)decl);
9755 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9757 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9758 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9760 else
9762 struct gimplify_omp_ctx *octx = outer->outer_context;
9763 if (octx
9764 && octx->region_type == ORT_COMBINED_PARALLEL
9765 && octx->outer_context
9766 && (octx->outer_context->region_type
9767 == ORT_WORKSHARE)
9768 && octx->outer_context->combined_loop)
9770 octx = octx->outer_context;
9771 n = splay_tree_lookup (octx->variables,
9772 (splay_tree_key)decl);
9773 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9775 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9776 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9783 OMP_CLAUSE_DECL (c) = decl;
9784 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9785 OMP_FOR_CLAUSES (for_stmt) = c;
9786 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9787 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9789 if (outer->region_type == ORT_WORKSHARE
9790 && outer->combined_loop)
9792 if (outer->outer_context
9793 && (outer->outer_context->region_type
9794 == ORT_COMBINED_PARALLEL))
9795 outer = outer->outer_context;
9796 else if (omp_check_private (outer, decl, false))
9797 outer = NULL;
9799 else if (((outer->region_type & ORT_TASK) != 0)
9800 && outer->combined_loop
9801 && !omp_check_private (gimplify_omp_ctxp,
9802 decl, false))
9804 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9806 omp_notice_variable (outer, decl, true);
9807 outer = NULL;
9809 if (outer)
9811 n = splay_tree_lookup (outer->variables,
9812 (splay_tree_key)decl);
9813 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9815 omp_add_variable (outer, decl,
9816 GOVD_LASTPRIVATE | GOVD_SEEN);
9817 if (outer->region_type == ORT_COMBINED_PARALLEL
9818 && outer->outer_context
9819 && (outer->outer_context->region_type
9820 == ORT_WORKSHARE)
9821 && outer->outer_context->combined_loop)
9823 outer = outer->outer_context;
9824 n = splay_tree_lookup (outer->variables,
9825 (splay_tree_key)decl);
9826 if (omp_check_private (outer, decl, false))
9827 outer = NULL;
9828 else if (n == NULL
9829 || ((n->value & GOVD_DATA_SHARE_CLASS)
9830 == 0))
9831 omp_add_variable (outer, decl,
9832 GOVD_LASTPRIVATE
9833 | GOVD_SEEN);
9834 else
9835 outer = NULL;
9837 if (outer && outer->outer_context
9838 && (outer->outer_context->region_type
9839 == ORT_COMBINED_TEAMS))
9841 outer = outer->outer_context;
9842 n = splay_tree_lookup (outer->variables,
9843 (splay_tree_key)decl);
9844 if (n == NULL
9845 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9846 omp_add_variable (outer, decl,
9847 GOVD_SHARED | GOVD_SEEN);
9848 else
9849 outer = NULL;
9851 if (outer && outer->outer_context)
9852 omp_notice_variable (outer->outer_context, decl,
9853 true);
9858 else
9860 bool lastprivate
9861 = (!has_decl_expr
9862 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9863 struct gimplify_omp_ctx *outer
9864 = gimplify_omp_ctxp->outer_context;
9865 if (outer && lastprivate)
9867 if (outer->region_type == ORT_WORKSHARE
9868 && outer->combined_loop)
9870 n = splay_tree_lookup (outer->variables,
9871 (splay_tree_key)decl);
9872 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9874 lastprivate = false;
9875 outer = NULL;
9877 else if (outer->outer_context
9878 && (outer->outer_context->region_type
9879 == ORT_COMBINED_PARALLEL))
9880 outer = outer->outer_context;
9881 else if (omp_check_private (outer, decl, false))
9882 outer = NULL;
9884 else if (((outer->region_type & ORT_TASK) != 0)
9885 && outer->combined_loop
9886 && !omp_check_private (gimplify_omp_ctxp,
9887 decl, false))
9889 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9891 omp_notice_variable (outer, decl, true);
9892 outer = NULL;
9894 if (outer)
9896 n = splay_tree_lookup (outer->variables,
9897 (splay_tree_key)decl);
9898 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9900 omp_add_variable (outer, decl,
9901 GOVD_LASTPRIVATE | GOVD_SEEN);
9902 if (outer->region_type == ORT_COMBINED_PARALLEL
9903 && outer->outer_context
9904 && (outer->outer_context->region_type
9905 == ORT_WORKSHARE)
9906 && outer->outer_context->combined_loop)
9908 outer = outer->outer_context;
9909 n = splay_tree_lookup (outer->variables,
9910 (splay_tree_key)decl);
9911 if (omp_check_private (outer, decl, false))
9912 outer = NULL;
9913 else if (n == NULL
9914 || ((n->value & GOVD_DATA_SHARE_CLASS)
9915 == 0))
9916 omp_add_variable (outer, decl,
9917 GOVD_LASTPRIVATE
9918 | GOVD_SEEN);
9919 else
9920 outer = NULL;
9922 if (outer && outer->outer_context
9923 && (outer->outer_context->region_type
9924 == ORT_COMBINED_TEAMS))
9926 outer = outer->outer_context;
9927 n = splay_tree_lookup (outer->variables,
9928 (splay_tree_key)decl);
9929 if (n == NULL
9930 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9931 omp_add_variable (outer, decl,
9932 GOVD_SHARED | GOVD_SEEN);
9933 else
9934 outer = NULL;
9936 if (outer && outer->outer_context)
9937 omp_notice_variable (outer->outer_context, decl,
9938 true);
9943 c = build_omp_clause (input_location,
9944 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9945 : OMP_CLAUSE_PRIVATE);
9946 OMP_CLAUSE_DECL (c) = decl;
9947 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9948 OMP_FOR_CLAUSES (for_stmt) = c;
9949 omp_add_variable (gimplify_omp_ctxp, decl,
9950 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9951 | GOVD_EXPLICIT | GOVD_SEEN);
9952 c = NULL_TREE;
9955 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9956 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9957 else
9958 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9960 /* If DECL is not a gimple register, create a temporary variable to act
9961 as an iteration counter. This is valid, since DECL cannot be
9962 modified in the body of the loop. Similarly for any iteration vars
9963 in simd with collapse > 1 where the iterator vars must be
9964 lastprivate. */
9965 if (orig_for_stmt != for_stmt)
9966 var = decl;
9967 else if (!is_gimple_reg (decl)
9968 || (ort == ORT_SIMD
9969 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9971 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9972 /* Make sure omp_add_variable is not called on it prematurely.
9973 We call it ourselves a few lines later. */
9974 gimplify_omp_ctxp = NULL;
9975 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9976 gimplify_omp_ctxp = ctx;
9977 TREE_OPERAND (t, 0) = var;
9979 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9981 if (ort == ORT_SIMD
9982 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9984 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9985 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9986 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9987 OMP_CLAUSE_DECL (c2) = var;
9988 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9989 OMP_FOR_CLAUSES (for_stmt) = c2;
9990 omp_add_variable (gimplify_omp_ctxp, var,
9991 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9992 if (c == NULL_TREE)
9994 c = c2;
9995 c2 = NULL_TREE;
9998 else
9999 omp_add_variable (gimplify_omp_ctxp, var,
10000 GOVD_PRIVATE | GOVD_SEEN);
10002 else
10003 var = decl;
10005 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10006 is_gimple_val, fb_rvalue, false);
10007 ret = MIN (ret, tret);
10008 if (ret == GS_ERROR)
10009 return ret;
10011 /* Handle OMP_FOR_COND. */
10012 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10013 gcc_assert (COMPARISON_CLASS_P (t));
10014 gcc_assert (TREE_OPERAND (t, 0) == decl);
10016 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10017 is_gimple_val, fb_rvalue, false);
10018 ret = MIN (ret, tret);
10020 /* Handle OMP_FOR_INCR. */
10021 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10022 switch (TREE_CODE (t))
10024 case PREINCREMENT_EXPR:
10025 case POSTINCREMENT_EXPR:
10027 tree decl = TREE_OPERAND (t, 0);
10028 /* c_omp_for_incr_canonicalize_ptr() should have been
10029 called to massage things appropriately. */
10030 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10032 if (orig_for_stmt != for_stmt)
10033 break;
10034 t = build_int_cst (TREE_TYPE (decl), 1);
10035 if (c)
10036 OMP_CLAUSE_LINEAR_STEP (c) = t;
10037 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10038 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10039 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10040 break;
10043 case PREDECREMENT_EXPR:
10044 case POSTDECREMENT_EXPR:
10045 /* c_omp_for_incr_canonicalize_ptr() should have been
10046 called to massage things appropriately. */
10047 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10048 if (orig_for_stmt != for_stmt)
10049 break;
10050 t = build_int_cst (TREE_TYPE (decl), -1);
10051 if (c)
10052 OMP_CLAUSE_LINEAR_STEP (c) = t;
10053 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10054 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10055 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10056 break;
10058 case MODIFY_EXPR:
10059 gcc_assert (TREE_OPERAND (t, 0) == decl);
10060 TREE_OPERAND (t, 0) = var;
10062 t = TREE_OPERAND (t, 1);
10063 switch (TREE_CODE (t))
10065 case PLUS_EXPR:
10066 if (TREE_OPERAND (t, 1) == decl)
10068 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10069 TREE_OPERAND (t, 0) = var;
10070 break;
10073 /* Fallthru. */
10074 case MINUS_EXPR:
10075 case POINTER_PLUS_EXPR:
10076 gcc_assert (TREE_OPERAND (t, 0) == decl);
10077 TREE_OPERAND (t, 0) = var;
10078 break;
10079 default:
10080 gcc_unreachable ();
10083 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10084 is_gimple_val, fb_rvalue, false);
10085 ret = MIN (ret, tret);
10086 if (c)
10088 tree step = TREE_OPERAND (t, 1);
10089 tree stept = TREE_TYPE (decl);
10090 if (POINTER_TYPE_P (stept))
10091 stept = sizetype;
10092 step = fold_convert (stept, step);
10093 if (TREE_CODE (t) == MINUS_EXPR)
10094 step = fold_build1 (NEGATE_EXPR, stept, step);
10095 OMP_CLAUSE_LINEAR_STEP (c) = step;
10096 if (step != TREE_OPERAND (t, 1))
10098 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10099 &for_pre_body, NULL,
10100 is_gimple_val, fb_rvalue, false);
10101 ret = MIN (ret, tret);
10104 break;
10106 default:
10107 gcc_unreachable ();
10110 if (c2)
10112 gcc_assert (c);
10113 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10116 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10118 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10119 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10120 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10121 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10122 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10123 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10124 && OMP_CLAUSE_DECL (c) == decl)
10126 if (is_doacross && (collapse == 1 || i >= collapse))
10127 t = var;
10128 else
10130 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10131 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10132 gcc_assert (TREE_OPERAND (t, 0) == var);
10133 t = TREE_OPERAND (t, 1);
10134 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10135 || TREE_CODE (t) == MINUS_EXPR
10136 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10137 gcc_assert (TREE_OPERAND (t, 0) == var);
10138 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10139 is_doacross ? var : decl,
10140 TREE_OPERAND (t, 1));
10142 gimple_seq *seq;
10143 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10144 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10145 else
10146 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10147 gimplify_assign (decl, t, seq);
10152 BITMAP_FREE (has_decl_expr);
10154 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10156 push_gimplify_context ();
10157 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10159 OMP_FOR_BODY (orig_for_stmt)
10160 = build3 (BIND_EXPR, void_type_node, NULL,
10161 OMP_FOR_BODY (orig_for_stmt), NULL);
10162 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10166 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10167 &for_body);
10169 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10171 if (gimple_code (g) == GIMPLE_BIND)
10172 pop_gimplify_context (g);
10173 else
10174 pop_gimplify_context (NULL);
10177 if (orig_for_stmt != for_stmt)
10178 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10180 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10181 decl = TREE_OPERAND (t, 0);
10182 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10183 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10184 gimplify_omp_ctxp = ctx->outer_context;
10185 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10186 gimplify_omp_ctxp = ctx;
10187 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10188 TREE_OPERAND (t, 0) = var;
10189 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10190 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10191 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10194 gimplify_adjust_omp_clauses (pre_p, for_body,
10195 &OMP_FOR_CLAUSES (orig_for_stmt),
10196 TREE_CODE (orig_for_stmt));
10198 int kind;
10199 switch (TREE_CODE (orig_for_stmt))
10201 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10202 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10203 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10204 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10205 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10206 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10207 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10208 default:
10209 gcc_unreachable ();
10211 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10212 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10213 for_pre_body);
10214 if (orig_for_stmt != for_stmt)
10215 gimple_omp_for_set_combined_p (gfor, true);
10216 if (gimplify_omp_ctxp
10217 && (gimplify_omp_ctxp->combined_loop
10218 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10219 && gimplify_omp_ctxp->outer_context
10220 && gimplify_omp_ctxp->outer_context->combined_loop)))
10222 gimple_omp_for_set_combined_into_p (gfor, true);
10223 if (gimplify_omp_ctxp->combined_loop)
10224 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10225 else
10226 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10229 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10231 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10232 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10233 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10234 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10235 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10236 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10237 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10238 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10241 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10242 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10243 The outer taskloop stands for computing the number of iterations,
10244 counts for collapsed loops and holding taskloop specific clauses.
10245 The task construct stands for the effect of data sharing on the
10246 explicit task it creates and the inner taskloop stands for expansion
10247 of the static loop inside of the explicit task construct. */
10248 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10250 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10251 tree task_clauses = NULL_TREE;
10252 tree c = *gfor_clauses_ptr;
10253 tree *gtask_clauses_ptr = &task_clauses;
10254 tree outer_for_clauses = NULL_TREE;
10255 tree *gforo_clauses_ptr = &outer_for_clauses;
10256 for (; c; c = OMP_CLAUSE_CHAIN (c))
10257 switch (OMP_CLAUSE_CODE (c))
10259 /* These clauses are allowed on task, move them there. */
10260 case OMP_CLAUSE_SHARED:
10261 case OMP_CLAUSE_FIRSTPRIVATE:
10262 case OMP_CLAUSE_DEFAULT:
10263 case OMP_CLAUSE_IF:
10264 case OMP_CLAUSE_UNTIED:
10265 case OMP_CLAUSE_FINAL:
10266 case OMP_CLAUSE_MERGEABLE:
10267 case OMP_CLAUSE_PRIORITY:
10268 *gtask_clauses_ptr = c;
10269 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10270 break;
10271 case OMP_CLAUSE_PRIVATE:
10272 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10274 /* We want private on outer for and firstprivate
10275 on task. */
10276 *gtask_clauses_ptr
10277 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10278 OMP_CLAUSE_FIRSTPRIVATE);
10279 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10280 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10281 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10282 *gforo_clauses_ptr = c;
10283 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10285 else
10287 *gtask_clauses_ptr = c;
10288 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10290 break;
10291 /* These clauses go into outer taskloop clauses. */
10292 case OMP_CLAUSE_GRAINSIZE:
10293 case OMP_CLAUSE_NUM_TASKS:
10294 case OMP_CLAUSE_NOGROUP:
10295 *gforo_clauses_ptr = c;
10296 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10297 break;
10298 /* Taskloop clause we duplicate on both taskloops. */
10299 case OMP_CLAUSE_COLLAPSE:
10300 *gfor_clauses_ptr = c;
10301 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10302 *gforo_clauses_ptr = copy_node (c);
10303 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10304 break;
10305 /* For lastprivate, keep the clause on inner taskloop, and add
10306 a shared clause on task. If the same decl is also firstprivate,
10307 add also firstprivate clause on the inner taskloop. */
10308 case OMP_CLAUSE_LASTPRIVATE:
10309 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10311 /* For taskloop C++ lastprivate IVs, we want:
10312 1) private on outer taskloop
10313 2) firstprivate and shared on task
10314 3) lastprivate on inner taskloop */
10315 *gtask_clauses_ptr
10316 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10317 OMP_CLAUSE_FIRSTPRIVATE);
10318 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10319 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10320 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10321 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10322 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10323 OMP_CLAUSE_PRIVATE);
10324 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10325 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10326 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10327 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10329 *gfor_clauses_ptr = c;
10330 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10331 *gtask_clauses_ptr
10332 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10333 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10334 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10335 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10336 gtask_clauses_ptr
10337 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10338 break;
10339 default:
10340 gcc_unreachable ();
10342 *gfor_clauses_ptr = NULL_TREE;
10343 *gtask_clauses_ptr = NULL_TREE;
10344 *gforo_clauses_ptr = NULL_TREE;
10345 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10346 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10347 NULL_TREE, NULL_TREE, NULL_TREE);
10348 gimple_omp_task_set_taskloop_p (g, true);
10349 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10350 gomp_for *gforo
10351 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10352 gimple_omp_for_collapse (gfor),
10353 gimple_omp_for_pre_body (gfor));
10354 gimple_omp_for_set_pre_body (gfor, NULL);
10355 gimple_omp_for_set_combined_p (gforo, true);
10356 gimple_omp_for_set_combined_into_p (gfor, true);
10357 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10359 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10360 tree v = create_tmp_var (type);
10361 gimple_omp_for_set_index (gforo, i, v);
10362 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10363 gimple_omp_for_set_initial (gforo, i, t);
10364 gimple_omp_for_set_cond (gforo, i,
10365 gimple_omp_for_cond (gfor, i));
10366 t = unshare_expr (gimple_omp_for_final (gfor, i));
10367 gimple_omp_for_set_final (gforo, i, t);
10368 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10369 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10370 TREE_OPERAND (t, 0) = v;
10371 gimple_omp_for_set_incr (gforo, i, t);
10372 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10373 OMP_CLAUSE_DECL (t) = v;
10374 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10375 gimple_omp_for_set_clauses (gforo, t);
10377 gimplify_seq_add_stmt (pre_p, gforo);
10379 else
10380 gimplify_seq_add_stmt (pre_p, gfor);
10381 if (ret != GS_ALL_DONE)
10382 return GS_ERROR;
10383 *expr_p = NULL_TREE;
10384 return GS_ALL_DONE;
10387 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10388 of OMP_TARGET's body. */
10390 static tree
10391 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10393 *walk_subtrees = 0;
10394 switch (TREE_CODE (*tp))
10396 case OMP_TEAMS:
10397 return *tp;
10398 case BIND_EXPR:
10399 case STATEMENT_LIST:
10400 *walk_subtrees = 1;
10401 break;
10402 default:
10403 break;
10405 return NULL_TREE;
10408 /* Helper function of optimize_target_teams, determine if the expression
10409 can be computed safely before the target construct on the host. */
10411 static tree
10412 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10414 splay_tree_node n;
10416 if (TYPE_P (*tp))
10418 *walk_subtrees = 0;
10419 return NULL_TREE;
10421 switch (TREE_CODE (*tp))
10423 case VAR_DECL:
10424 case PARM_DECL:
10425 case RESULT_DECL:
10426 *walk_subtrees = 0;
10427 if (error_operand_p (*tp)
10428 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10429 || DECL_HAS_VALUE_EXPR_P (*tp)
10430 || DECL_THREAD_LOCAL_P (*tp)
10431 || TREE_SIDE_EFFECTS (*tp)
10432 || TREE_THIS_VOLATILE (*tp))
10433 return *tp;
10434 if (is_global_var (*tp)
10435 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10436 || lookup_attribute ("omp declare target link",
10437 DECL_ATTRIBUTES (*tp))))
10438 return *tp;
10439 if (VAR_P (*tp)
10440 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10441 && !is_global_var (*tp)
10442 && decl_function_context (*tp) == current_function_decl)
10443 return *tp;
10444 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10445 (splay_tree_key) *tp);
10446 if (n == NULL)
10448 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10449 return NULL_TREE;
10450 return *tp;
10452 else if (n->value & GOVD_LOCAL)
10453 return *tp;
10454 else if (n->value & GOVD_FIRSTPRIVATE)
10455 return NULL_TREE;
10456 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10457 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10458 return NULL_TREE;
10459 return *tp;
10460 case INTEGER_CST:
10461 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10462 return *tp;
10463 return NULL_TREE;
10464 case TARGET_EXPR:
10465 if (TARGET_EXPR_INITIAL (*tp)
10466 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10467 return *tp;
10468 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10469 walk_subtrees, NULL);
10470 /* Allow some reasonable subset of integral arithmetics. */
10471 case PLUS_EXPR:
10472 case MINUS_EXPR:
10473 case MULT_EXPR:
10474 case TRUNC_DIV_EXPR:
10475 case CEIL_DIV_EXPR:
10476 case FLOOR_DIV_EXPR:
10477 case ROUND_DIV_EXPR:
10478 case TRUNC_MOD_EXPR:
10479 case CEIL_MOD_EXPR:
10480 case FLOOR_MOD_EXPR:
10481 case ROUND_MOD_EXPR:
10482 case RDIV_EXPR:
10483 case EXACT_DIV_EXPR:
10484 case MIN_EXPR:
10485 case MAX_EXPR:
10486 case LSHIFT_EXPR:
10487 case RSHIFT_EXPR:
10488 case BIT_IOR_EXPR:
10489 case BIT_XOR_EXPR:
10490 case BIT_AND_EXPR:
10491 case NEGATE_EXPR:
10492 case ABS_EXPR:
10493 case BIT_NOT_EXPR:
10494 case NON_LVALUE_EXPR:
10495 CASE_CONVERT:
10496 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10497 return *tp;
10498 return NULL_TREE;
10499 /* And disallow anything else, except for comparisons. */
10500 default:
10501 if (COMPARISON_CLASS_P (*tp))
10502 return NULL_TREE;
10503 return *tp;
10507 /* Try to determine if the num_teams and/or thread_limit expressions
10508 can have their values determined already before entering the
10509 target construct.
10510 INTEGER_CSTs trivially are,
10511 integral decls that are firstprivate (explicitly or implicitly)
10512 or explicitly map(always, to:) or map(always, tofrom:) on the target
10513 region too, and expressions involving simple arithmetics on those
10514 too, function calls are not ok, dereferencing something neither etc.
10515 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10516 EXPR based on what we find:
10517 0 stands for clause not specified at all, use implementation default
10518 -1 stands for value that can't be determined easily before entering
10519 the target construct.
10520 If teams construct is not present at all, use 1 for num_teams
10521 and 0 for thread_limit (only one team is involved, and the thread
10522 limit is implementation defined. */
10524 static void
10525 optimize_target_teams (tree target, gimple_seq *pre_p)
10527 tree body = OMP_BODY (target);
10528 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10529 tree num_teams = integer_zero_node;
10530 tree thread_limit = integer_zero_node;
10531 location_t num_teams_loc = EXPR_LOCATION (target);
10532 location_t thread_limit_loc = EXPR_LOCATION (target);
10533 tree c, *p, expr;
10534 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10536 if (teams == NULL_TREE)
10537 num_teams = integer_one_node;
10538 else
10539 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10541 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10543 p = &num_teams;
10544 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10546 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10548 p = &thread_limit;
10549 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10551 else
10552 continue;
10553 expr = OMP_CLAUSE_OPERAND (c, 0);
10554 if (TREE_CODE (expr) == INTEGER_CST)
10556 *p = expr;
10557 continue;
10559 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10561 *p = integer_minus_one_node;
10562 continue;
10564 *p = expr;
10565 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10566 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10567 == GS_ERROR)
10569 gimplify_omp_ctxp = target_ctx;
10570 *p = integer_minus_one_node;
10571 continue;
10573 gimplify_omp_ctxp = target_ctx;
10574 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10575 OMP_CLAUSE_OPERAND (c, 0) = *p;
10577 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10578 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10579 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10580 OMP_TARGET_CLAUSES (target) = c;
10581 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10582 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10583 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10584 OMP_TARGET_CLAUSES (target) = c;
10587 /* Gimplify the gross structure of several OMP constructs. */
10589 static void
10590 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10592 tree expr = *expr_p;
10593 gimple *stmt;
10594 gimple_seq body = NULL;
10595 enum omp_region_type ort;
10597 switch (TREE_CODE (expr))
10599 case OMP_SECTIONS:
10600 case OMP_SINGLE:
10601 ort = ORT_WORKSHARE;
10602 break;
10603 case OMP_TARGET:
10604 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10605 break;
10606 case OACC_KERNELS:
10607 ort = ORT_ACC_KERNELS;
10608 break;
10609 case OACC_PARALLEL:
10610 ort = ORT_ACC_PARALLEL;
10611 break;
10612 case OACC_DATA:
10613 ort = ORT_ACC_DATA;
10614 break;
10615 case OMP_TARGET_DATA:
10616 ort = ORT_TARGET_DATA;
10617 break;
10618 case OMP_TEAMS:
10619 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10620 break;
10621 case OACC_HOST_DATA:
10622 ort = ORT_ACC_HOST_DATA;
10623 break;
10624 default:
10625 gcc_unreachable ();
10627 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10628 TREE_CODE (expr));
10629 if (TREE_CODE (expr) == OMP_TARGET)
10630 optimize_target_teams (expr, pre_p);
10631 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10633 push_gimplify_context ();
10634 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10635 if (gimple_code (g) == GIMPLE_BIND)
10636 pop_gimplify_context (g);
10637 else
10638 pop_gimplify_context (NULL);
10639 if ((ort & ORT_TARGET_DATA) != 0)
10641 enum built_in_function end_ix;
10642 switch (TREE_CODE (expr))
10644 case OACC_DATA:
10645 case OACC_HOST_DATA:
10646 end_ix = BUILT_IN_GOACC_DATA_END;
10647 break;
10648 case OMP_TARGET_DATA:
10649 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10650 break;
10651 default:
10652 gcc_unreachable ();
10654 tree fn = builtin_decl_explicit (end_ix);
10655 g = gimple_build_call (fn, 0);
10656 gimple_seq cleanup = NULL;
10657 gimple_seq_add_stmt (&cleanup, g);
10658 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10659 body = NULL;
10660 gimple_seq_add_stmt (&body, g);
10663 else
10664 gimplify_and_add (OMP_BODY (expr), &body);
10665 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10666 TREE_CODE (expr));
10668 switch (TREE_CODE (expr))
10670 case OACC_DATA:
10671 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10672 OMP_CLAUSES (expr));
10673 break;
10674 case OACC_KERNELS:
10675 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10676 OMP_CLAUSES (expr));
10677 break;
10678 case OACC_HOST_DATA:
10679 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10680 OMP_CLAUSES (expr));
10681 break;
10682 case OACC_PARALLEL:
10683 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10684 OMP_CLAUSES (expr));
10685 break;
10686 case OMP_SECTIONS:
10687 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10688 break;
10689 case OMP_SINGLE:
10690 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10691 break;
10692 case OMP_TARGET:
10693 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10694 OMP_CLAUSES (expr));
10695 break;
10696 case OMP_TARGET_DATA:
10697 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10698 OMP_CLAUSES (expr));
10699 break;
10700 case OMP_TEAMS:
10701 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10702 break;
10703 default:
10704 gcc_unreachable ();
10707 gimplify_seq_add_stmt (pre_p, stmt);
10708 *expr_p = NULL_TREE;
10711 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10712 target update constructs. */
10714 static void
10715 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10717 tree expr = *expr_p;
10718 int kind;
10719 gomp_target *stmt;
10720 enum omp_region_type ort = ORT_WORKSHARE;
10722 switch (TREE_CODE (expr))
10724 case OACC_ENTER_DATA:
10725 case OACC_EXIT_DATA:
10726 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10727 ort = ORT_ACC;
10728 break;
10729 case OACC_UPDATE:
10730 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10731 ort = ORT_ACC;
10732 break;
10733 case OMP_TARGET_UPDATE:
10734 kind = GF_OMP_TARGET_KIND_UPDATE;
10735 break;
10736 case OMP_TARGET_ENTER_DATA:
10737 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10738 break;
10739 case OMP_TARGET_EXIT_DATA:
10740 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10741 break;
10742 default:
10743 gcc_unreachable ();
10745 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10746 ort, TREE_CODE (expr));
10747 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10748 TREE_CODE (expr));
10749 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10751 gimplify_seq_add_stmt (pre_p, stmt);
10752 *expr_p = NULL_TREE;
10755 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10756 stabilized the lhs of the atomic operation as *ADDR. Return true if
10757 EXPR is this stabilized form. */
10759 static bool
10760 goa_lhs_expr_p (tree expr, tree addr)
10762 /* Also include casts to other type variants. The C front end is fond
10763 of adding these for e.g. volatile variables. This is like
10764 STRIP_TYPE_NOPS but includes the main variant lookup. */
10765 STRIP_USELESS_TYPE_CONVERSION (expr);
10767 if (TREE_CODE (expr) == INDIRECT_REF)
10769 expr = TREE_OPERAND (expr, 0);
10770 while (expr != addr
10771 && (CONVERT_EXPR_P (expr)
10772 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10773 && TREE_CODE (expr) == TREE_CODE (addr)
10774 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10776 expr = TREE_OPERAND (expr, 0);
10777 addr = TREE_OPERAND (addr, 0);
10779 if (expr == addr)
10780 return true;
10781 return (TREE_CODE (addr) == ADDR_EXPR
10782 && TREE_CODE (expr) == ADDR_EXPR
10783 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10785 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10786 return true;
10787 return false;
10790 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10791 expression does not involve the lhs, evaluate it into a temporary.
10792 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10793 or -1 if an error was encountered. */
10795 static int
10796 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10797 tree lhs_var)
10799 tree expr = *expr_p;
10800 int saw_lhs;
10802 if (goa_lhs_expr_p (expr, lhs_addr))
10804 *expr_p = lhs_var;
10805 return 1;
10807 if (is_gimple_val (expr))
10808 return 0;
10810 saw_lhs = 0;
10811 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10813 case tcc_binary:
10814 case tcc_comparison:
10815 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10816 lhs_var);
10817 /* FALLTHRU */
10818 case tcc_unary:
10819 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10820 lhs_var);
10821 break;
10822 case tcc_expression:
10823 switch (TREE_CODE (expr))
10825 case TRUTH_ANDIF_EXPR:
10826 case TRUTH_ORIF_EXPR:
10827 case TRUTH_AND_EXPR:
10828 case TRUTH_OR_EXPR:
10829 case TRUTH_XOR_EXPR:
10830 case BIT_INSERT_EXPR:
10831 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10832 lhs_addr, lhs_var);
10833 /* FALLTHRU */
10834 case TRUTH_NOT_EXPR:
10835 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10836 lhs_addr, lhs_var);
10837 break;
10838 case COMPOUND_EXPR:
10839 /* Break out any preevaluations from cp_build_modify_expr. */
10840 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10841 expr = TREE_OPERAND (expr, 1))
10842 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10843 *expr_p = expr;
10844 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10845 default:
10846 break;
10848 break;
10849 case tcc_reference:
10850 if (TREE_CODE (expr) == BIT_FIELD_REF)
10851 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10852 lhs_addr, lhs_var);
10853 break;
10854 default:
10855 break;
10858 if (saw_lhs == 0)
10860 enum gimplify_status gs;
10861 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10862 if (gs != GS_ALL_DONE)
10863 saw_lhs = -1;
10866 return saw_lhs;
10869 /* Gimplify an OMP_ATOMIC statement. */
10871 static enum gimplify_status
10872 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10874 tree addr = TREE_OPERAND (*expr_p, 0);
10875 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10876 ? NULL : TREE_OPERAND (*expr_p, 1);
10877 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10878 tree tmp_load;
10879 gomp_atomic_load *loadstmt;
10880 gomp_atomic_store *storestmt;
10882 tmp_load = create_tmp_reg (type);
10883 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10884 return GS_ERROR;
10886 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10887 != GS_ALL_DONE)
10888 return GS_ERROR;
10890 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10891 gimplify_seq_add_stmt (pre_p, loadstmt);
10892 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10893 != GS_ALL_DONE)
10894 return GS_ERROR;
10896 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10897 rhs = tmp_load;
10898 storestmt = gimple_build_omp_atomic_store (rhs);
10899 gimplify_seq_add_stmt (pre_p, storestmt);
10900 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10902 gimple_omp_atomic_set_seq_cst (loadstmt);
10903 gimple_omp_atomic_set_seq_cst (storestmt);
10905 switch (TREE_CODE (*expr_p))
10907 case OMP_ATOMIC_READ:
10908 case OMP_ATOMIC_CAPTURE_OLD:
10909 *expr_p = tmp_load;
10910 gimple_omp_atomic_set_need_value (loadstmt);
10911 break;
10912 case OMP_ATOMIC_CAPTURE_NEW:
10913 *expr_p = rhs;
10914 gimple_omp_atomic_set_need_value (storestmt);
10915 break;
10916 default:
10917 *expr_p = NULL;
10918 break;
10921 return GS_ALL_DONE;
10924 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10925 body, and adding some EH bits. */
10927 static enum gimplify_status
10928 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10930 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10931 gimple *body_stmt;
10932 gtransaction *trans_stmt;
10933 gimple_seq body = NULL;
10934 int subcode = 0;
10936 /* Wrap the transaction body in a BIND_EXPR so we have a context
10937 where to put decls for OMP. */
10938 if (TREE_CODE (tbody) != BIND_EXPR)
10940 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10941 TREE_SIDE_EFFECTS (bind) = 1;
10942 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10943 TRANSACTION_EXPR_BODY (expr) = bind;
10946 push_gimplify_context ();
10947 temp = voidify_wrapper_expr (*expr_p, NULL);
10949 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10950 pop_gimplify_context (body_stmt);
10952 trans_stmt = gimple_build_transaction (body);
10953 if (TRANSACTION_EXPR_OUTER (expr))
10954 subcode = GTMA_IS_OUTER;
10955 else if (TRANSACTION_EXPR_RELAXED (expr))
10956 subcode = GTMA_IS_RELAXED;
10957 gimple_transaction_set_subcode (trans_stmt, subcode);
10959 gimplify_seq_add_stmt (pre_p, trans_stmt);
10961 if (temp)
10963 *expr_p = temp;
10964 return GS_OK;
10967 *expr_p = NULL_TREE;
10968 return GS_ALL_DONE;
10971 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10972 is the OMP_BODY of the original EXPR (which has already been
10973 gimplified so it's not present in the EXPR).
10975 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10977 static gimple *
10978 gimplify_omp_ordered (tree expr, gimple_seq body)
10980 tree c, decls;
10981 int failures = 0;
10982 unsigned int i;
10983 tree source_c = NULL_TREE;
10984 tree sink_c = NULL_TREE;
10986 if (gimplify_omp_ctxp)
10988 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10989 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10990 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10991 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10992 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10994 error_at (OMP_CLAUSE_LOCATION (c),
10995 "%<ordered%> construct with %<depend%> clause must be "
10996 "closely nested inside a loop with %<ordered%> clause "
10997 "with a parameter");
10998 failures++;
11000 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11001 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11003 bool fail = false;
11004 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11005 decls && TREE_CODE (decls) == TREE_LIST;
11006 decls = TREE_CHAIN (decls), ++i)
11007 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11008 continue;
11009 else if (TREE_VALUE (decls)
11010 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11012 error_at (OMP_CLAUSE_LOCATION (c),
11013 "variable %qE is not an iteration "
11014 "of outermost loop %d, expected %qE",
11015 TREE_VALUE (decls), i + 1,
11016 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11017 fail = true;
11018 failures++;
11020 else
11021 TREE_VALUE (decls)
11022 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11023 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11025 error_at (OMP_CLAUSE_LOCATION (c),
11026 "number of variables in %<depend(sink)%> "
11027 "clause does not match number of "
11028 "iteration variables");
11029 failures++;
11031 sink_c = c;
11033 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11034 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11036 if (source_c)
11038 error_at (OMP_CLAUSE_LOCATION (c),
11039 "more than one %<depend(source)%> clause on an "
11040 "%<ordered%> construct");
11041 failures++;
11043 else
11044 source_c = c;
11047 if (source_c && sink_c)
11049 error_at (OMP_CLAUSE_LOCATION (source_c),
11050 "%<depend(source)%> clause specified together with "
11051 "%<depend(sink:)%> clauses on the same construct");
11052 failures++;
11055 if (failures)
11056 return gimple_build_nop ();
11057 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11060 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11061 expression produces a value to be used as an operand inside a GIMPLE
11062 statement, the value will be stored back in *EXPR_P. This value will
11063 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11064 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11065 emitted in PRE_P and POST_P.
11067 Additionally, this process may overwrite parts of the input
11068 expression during gimplification. Ideally, it should be
11069 possible to do non-destructive gimplification.
11071 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11072 the expression needs to evaluate to a value to be used as
11073 an operand in a GIMPLE statement, this value will be stored in
11074 *EXPR_P on exit. This happens when the caller specifies one
11075 of fb_lvalue or fb_rvalue fallback flags.
11077 PRE_P will contain the sequence of GIMPLE statements corresponding
11078 to the evaluation of EXPR and all the side-effects that must
11079 be executed before the main expression. On exit, the last
11080 statement of PRE_P is the core statement being gimplified. For
11081 instance, when gimplifying 'if (++a)' the last statement in
11082 PRE_P will be 'if (t.1)' where t.1 is the result of
11083 pre-incrementing 'a'.
11085 POST_P will contain the sequence of GIMPLE statements corresponding
11086 to the evaluation of all the side-effects that must be executed
11087 after the main expression. If this is NULL, the post
11088 side-effects are stored at the end of PRE_P.
11090 The reason why the output is split in two is to handle post
11091 side-effects explicitly. In some cases, an expression may have
11092 inner and outer post side-effects which need to be emitted in
11093 an order different from the one given by the recursive
11094 traversal. For instance, for the expression (*p--)++ the post
11095 side-effects of '--' must actually occur *after* the post
11096 side-effects of '++'. However, gimplification will first visit
11097 the inner expression, so if a separate POST sequence was not
11098 used, the resulting sequence would be:
11100 1 t.1 = *p
11101 2 p = p - 1
11102 3 t.2 = t.1 + 1
11103 4 *p = t.2
11105 However, the post-decrement operation in line #2 must not be
11106 evaluated until after the store to *p at line #4, so the
11107 correct sequence should be:
11109 1 t.1 = *p
11110 2 t.2 = t.1 + 1
11111 3 *p = t.2
11112 4 p = p - 1
11114 So, by specifying a separate post queue, it is possible
11115 to emit the post side-effects in the correct order.
11116 If POST_P is NULL, an internal queue will be used. Before
11117 returning to the caller, the sequence POST_P is appended to
11118 the main output sequence PRE_P.
11120 GIMPLE_TEST_F points to a function that takes a tree T and
11121 returns nonzero if T is in the GIMPLE form requested by the
11122 caller. The GIMPLE predicates are in gimple.c.
11124 FALLBACK tells the function what sort of a temporary we want if
11125 gimplification cannot produce an expression that complies with
11126 GIMPLE_TEST_F.
11128 fb_none means that no temporary should be generated
11129 fb_rvalue means that an rvalue is OK to generate
11130 fb_lvalue means that an lvalue is OK to generate
11131 fb_either means that either is OK, but an lvalue is preferable.
11132 fb_mayfail means that gimplification may fail (in which case
11133 GS_ERROR will be returned)
11135 The return value is either GS_ERROR or GS_ALL_DONE, since this
11136 function iterates until EXPR is completely gimplified or an error
11137 occurs. */
11139 enum gimplify_status
11140 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11141 bool (*gimple_test_f) (tree), fallback_t fallback)
11143 tree tmp;
11144 gimple_seq internal_pre = NULL;
11145 gimple_seq internal_post = NULL;
11146 tree save_expr;
11147 bool is_statement;
11148 location_t saved_location;
11149 enum gimplify_status ret;
11150 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11151 tree label;
11153 save_expr = *expr_p;
11154 if (save_expr == NULL_TREE)
11155 return GS_ALL_DONE;
11157 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11158 is_statement = gimple_test_f == is_gimple_stmt;
11159 if (is_statement)
11160 gcc_assert (pre_p);
11162 /* Consistency checks. */
11163 if (gimple_test_f == is_gimple_reg)
11164 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11165 else if (gimple_test_f == is_gimple_val
11166 || gimple_test_f == is_gimple_call_addr
11167 || gimple_test_f == is_gimple_condexpr
11168 || gimple_test_f == is_gimple_mem_rhs
11169 || gimple_test_f == is_gimple_mem_rhs_or_call
11170 || gimple_test_f == is_gimple_reg_rhs
11171 || gimple_test_f == is_gimple_reg_rhs_or_call
11172 || gimple_test_f == is_gimple_asm_val
11173 || gimple_test_f == is_gimple_mem_ref_addr)
11174 gcc_assert (fallback & fb_rvalue);
11175 else if (gimple_test_f == is_gimple_min_lval
11176 || gimple_test_f == is_gimple_lvalue)
11177 gcc_assert (fallback & fb_lvalue);
11178 else if (gimple_test_f == is_gimple_addressable)
11179 gcc_assert (fallback & fb_either);
11180 else if (gimple_test_f == is_gimple_stmt)
11181 gcc_assert (fallback == fb_none);
11182 else
11184 /* We should have recognized the GIMPLE_TEST_F predicate to
11185 know what kind of fallback to use in case a temporary is
11186 needed to hold the value or address of *EXPR_P. */
11187 gcc_unreachable ();
11190 /* We used to check the predicate here and return immediately if it
11191 succeeds. This is wrong; the design is for gimplification to be
11192 idempotent, and for the predicates to only test for valid forms, not
11193 whether they are fully simplified. */
11194 if (pre_p == NULL)
11195 pre_p = &internal_pre;
11197 if (post_p == NULL)
11198 post_p = &internal_post;
11200 /* Remember the last statements added to PRE_P and POST_P. Every
11201 new statement added by the gimplification helpers needs to be
11202 annotated with location information. To centralize the
11203 responsibility, we remember the last statement that had been
11204 added to both queues before gimplifying *EXPR_P. If
11205 gimplification produces new statements in PRE_P and POST_P, those
11206 statements will be annotated with the same location information
11207 as *EXPR_P. */
11208 pre_last_gsi = gsi_last (*pre_p);
11209 post_last_gsi = gsi_last (*post_p);
11211 saved_location = input_location;
11212 if (save_expr != error_mark_node
11213 && EXPR_HAS_LOCATION (*expr_p))
11214 input_location = EXPR_LOCATION (*expr_p);
11216 /* Loop over the specific gimplifiers until the toplevel node
11217 remains the same. */
11220 /* Strip away as many useless type conversions as possible
11221 at the toplevel. */
11222 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11224 /* Remember the expr. */
11225 save_expr = *expr_p;
11227 /* Die, die, die, my darling. */
11228 if (save_expr == error_mark_node
11229 || (TREE_TYPE (save_expr)
11230 && TREE_TYPE (save_expr) == error_mark_node))
11232 ret = GS_ERROR;
11233 break;
11236 /* Do any language-specific gimplification. */
11237 ret = ((enum gimplify_status)
11238 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11239 if (ret == GS_OK)
11241 if (*expr_p == NULL_TREE)
11242 break;
11243 if (*expr_p != save_expr)
11244 continue;
11246 else if (ret != GS_UNHANDLED)
11247 break;
11249 /* Make sure that all the cases set 'ret' appropriately. */
11250 ret = GS_UNHANDLED;
11251 switch (TREE_CODE (*expr_p))
11253 /* First deal with the special cases. */
11255 case POSTINCREMENT_EXPR:
11256 case POSTDECREMENT_EXPR:
11257 case PREINCREMENT_EXPR:
11258 case PREDECREMENT_EXPR:
11259 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11260 fallback != fb_none,
11261 TREE_TYPE (*expr_p));
11262 break;
11264 case VIEW_CONVERT_EXPR:
11265 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11266 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11268 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11269 post_p, is_gimple_val, fb_rvalue);
11270 recalculate_side_effects (*expr_p);
11271 break;
11273 /* Fallthru. */
11275 case ARRAY_REF:
11276 case ARRAY_RANGE_REF:
11277 case REALPART_EXPR:
11278 case IMAGPART_EXPR:
11279 case COMPONENT_REF:
11280 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11281 fallback ? fallback : fb_rvalue);
11282 break;
11284 case COND_EXPR:
11285 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11287 /* C99 code may assign to an array in a structure value of a
11288 conditional expression, and this has undefined behavior
11289 only on execution, so create a temporary if an lvalue is
11290 required. */
11291 if (fallback == fb_lvalue)
11293 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11294 mark_addressable (*expr_p);
11295 ret = GS_OK;
11297 break;
11299 case CALL_EXPR:
11300 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11302 /* C99 code may assign to an array in a structure returned
11303 from a function, and this has undefined behavior only on
11304 execution, so create a temporary if an lvalue is
11305 required. */
11306 if (fallback == fb_lvalue)
11308 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11309 mark_addressable (*expr_p);
11310 ret = GS_OK;
11312 break;
11314 case TREE_LIST:
11315 gcc_unreachable ();
11317 case COMPOUND_EXPR:
11318 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11319 break;
11321 case COMPOUND_LITERAL_EXPR:
11322 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11323 gimple_test_f, fallback);
11324 break;
11326 case MODIFY_EXPR:
11327 case INIT_EXPR:
11328 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11329 fallback != fb_none);
11330 break;
11332 case TRUTH_ANDIF_EXPR:
11333 case TRUTH_ORIF_EXPR:
11335 /* Preserve the original type of the expression and the
11336 source location of the outer expression. */
11337 tree org_type = TREE_TYPE (*expr_p);
11338 *expr_p = gimple_boolify (*expr_p);
11339 *expr_p = build3_loc (input_location, COND_EXPR,
11340 org_type, *expr_p,
11341 fold_convert_loc
11342 (input_location,
11343 org_type, boolean_true_node),
11344 fold_convert_loc
11345 (input_location,
11346 org_type, boolean_false_node));
11347 ret = GS_OK;
11348 break;
11351 case TRUTH_NOT_EXPR:
11353 tree type = TREE_TYPE (*expr_p);
11354 /* The parsers are careful to generate TRUTH_NOT_EXPR
11355 only with operands that are always zero or one.
11356 We do not fold here but handle the only interesting case
11357 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11358 *expr_p = gimple_boolify (*expr_p);
11359 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11360 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11361 TREE_TYPE (*expr_p),
11362 TREE_OPERAND (*expr_p, 0));
11363 else
11364 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11365 TREE_TYPE (*expr_p),
11366 TREE_OPERAND (*expr_p, 0),
11367 build_int_cst (TREE_TYPE (*expr_p), 1));
11368 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11369 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11370 ret = GS_OK;
11371 break;
11374 case ADDR_EXPR:
11375 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11376 break;
11378 case ANNOTATE_EXPR:
11380 tree cond = TREE_OPERAND (*expr_p, 0);
11381 tree kind = TREE_OPERAND (*expr_p, 1);
11382 tree type = TREE_TYPE (cond);
11383 if (!INTEGRAL_TYPE_P (type))
11385 *expr_p = cond;
11386 ret = GS_OK;
11387 break;
11389 tree tmp = create_tmp_var (type);
11390 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11391 gcall *call
11392 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11393 gimple_call_set_lhs (call, tmp);
11394 gimplify_seq_add_stmt (pre_p, call);
11395 *expr_p = tmp;
11396 ret = GS_ALL_DONE;
11397 break;
11400 case VA_ARG_EXPR:
11401 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11402 break;
11404 CASE_CONVERT:
11405 if (IS_EMPTY_STMT (*expr_p))
11407 ret = GS_ALL_DONE;
11408 break;
11411 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11412 || fallback == fb_none)
11414 /* Just strip a conversion to void (or in void context) and
11415 try again. */
11416 *expr_p = TREE_OPERAND (*expr_p, 0);
11417 ret = GS_OK;
11418 break;
11421 ret = gimplify_conversion (expr_p);
11422 if (ret == GS_ERROR)
11423 break;
11424 if (*expr_p != save_expr)
11425 break;
11426 /* FALLTHRU */
11428 case FIX_TRUNC_EXPR:
11429 /* unary_expr: ... | '(' cast ')' val | ... */
11430 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11431 is_gimple_val, fb_rvalue);
11432 recalculate_side_effects (*expr_p);
11433 break;
11435 case INDIRECT_REF:
11437 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11438 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11439 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11441 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11442 if (*expr_p != save_expr)
11444 ret = GS_OK;
11445 break;
11448 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11449 is_gimple_reg, fb_rvalue);
11450 if (ret == GS_ERROR)
11451 break;
11453 recalculate_side_effects (*expr_p);
11454 *expr_p = fold_build2_loc (input_location, MEM_REF,
11455 TREE_TYPE (*expr_p),
11456 TREE_OPERAND (*expr_p, 0),
11457 build_int_cst (saved_ptr_type, 0));
11458 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11459 TREE_THIS_NOTRAP (*expr_p) = notrap;
11460 ret = GS_OK;
11461 break;
11464 /* We arrive here through the various re-gimplifcation paths. */
11465 case MEM_REF:
11466 /* First try re-folding the whole thing. */
11467 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11468 TREE_OPERAND (*expr_p, 0),
11469 TREE_OPERAND (*expr_p, 1));
11470 if (tmp)
11472 REF_REVERSE_STORAGE_ORDER (tmp)
11473 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11474 *expr_p = tmp;
11475 recalculate_side_effects (*expr_p);
11476 ret = GS_OK;
11477 break;
11479 /* Avoid re-gimplifying the address operand if it is already
11480 in suitable form. Re-gimplifying would mark the address
11481 operand addressable. Always gimplify when not in SSA form
11482 as we still may have to gimplify decls with value-exprs. */
11483 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11484 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11486 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11487 is_gimple_mem_ref_addr, fb_rvalue);
11488 if (ret == GS_ERROR)
11489 break;
11491 recalculate_side_effects (*expr_p);
11492 ret = GS_ALL_DONE;
11493 break;
11495 /* Constants need not be gimplified. */
11496 case INTEGER_CST:
11497 case REAL_CST:
11498 case FIXED_CST:
11499 case STRING_CST:
11500 case COMPLEX_CST:
11501 case VECTOR_CST:
11502 /* Drop the overflow flag on constants, we do not want
11503 that in the GIMPLE IL. */
11504 if (TREE_OVERFLOW_P (*expr_p))
11505 *expr_p = drop_tree_overflow (*expr_p);
11506 ret = GS_ALL_DONE;
11507 break;
11509 case CONST_DECL:
11510 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11511 CONST_DECL node. Otherwise the decl is replaceable by its
11512 value. */
11513 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11514 if (fallback & fb_lvalue)
11515 ret = GS_ALL_DONE;
11516 else
11518 *expr_p = DECL_INITIAL (*expr_p);
11519 ret = GS_OK;
11521 break;
11523 case DECL_EXPR:
11524 ret = gimplify_decl_expr (expr_p, pre_p);
11525 break;
11527 case BIND_EXPR:
11528 ret = gimplify_bind_expr (expr_p, pre_p);
11529 break;
11531 case LOOP_EXPR:
11532 ret = gimplify_loop_expr (expr_p, pre_p);
11533 break;
11535 case SWITCH_EXPR:
11536 ret = gimplify_switch_expr (expr_p, pre_p);
11537 break;
11539 case EXIT_EXPR:
11540 ret = gimplify_exit_expr (expr_p);
11541 break;
11543 case GOTO_EXPR:
11544 /* If the target is not LABEL, then it is a computed jump
11545 and the target needs to be gimplified. */
11546 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11548 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11549 NULL, is_gimple_val, fb_rvalue);
11550 if (ret == GS_ERROR)
11551 break;
11553 gimplify_seq_add_stmt (pre_p,
11554 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11555 ret = GS_ALL_DONE;
11556 break;
11558 case PREDICT_EXPR:
11559 gimplify_seq_add_stmt (pre_p,
11560 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11561 PREDICT_EXPR_OUTCOME (*expr_p)));
11562 ret = GS_ALL_DONE;
11563 break;
11565 case LABEL_EXPR:
11566 ret = gimplify_label_expr (expr_p, pre_p);
11567 label = LABEL_EXPR_LABEL (*expr_p);
11568 gcc_assert (decl_function_context (label) == current_function_decl);
11570 /* If the label is used in a goto statement, or address of the label
11571 is taken, we need to unpoison all variables that were seen so far.
11572 Doing so would prevent us from reporting a false positives. */
11573 if (asan_poisoned_variables
11574 && asan_used_labels != NULL
11575 && asan_used_labels->contains (label))
11576 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11577 break;
11579 case CASE_LABEL_EXPR:
11580 ret = gimplify_case_label_expr (expr_p, pre_p);
11582 if (gimplify_ctxp->live_switch_vars)
11583 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11584 pre_p);
11585 break;
11587 case RETURN_EXPR:
11588 ret = gimplify_return_expr (*expr_p, pre_p);
11589 break;
11591 case CONSTRUCTOR:
11592 /* Don't reduce this in place; let gimplify_init_constructor work its
11593 magic. Buf if we're just elaborating this for side effects, just
11594 gimplify any element that has side-effects. */
11595 if (fallback == fb_none)
11597 unsigned HOST_WIDE_INT ix;
11598 tree val;
11599 tree temp = NULL_TREE;
11600 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11601 if (TREE_SIDE_EFFECTS (val))
11602 append_to_statement_list (val, &temp);
11604 *expr_p = temp;
11605 ret = temp ? GS_OK : GS_ALL_DONE;
11607 /* C99 code may assign to an array in a constructed
11608 structure or union, and this has undefined behavior only
11609 on execution, so create a temporary if an lvalue is
11610 required. */
11611 else if (fallback == fb_lvalue)
11613 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11614 mark_addressable (*expr_p);
11615 ret = GS_OK;
11617 else
11618 ret = GS_ALL_DONE;
11619 break;
11621 /* The following are special cases that are not handled by the
11622 original GIMPLE grammar. */
11624 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11625 eliminated. */
11626 case SAVE_EXPR:
11627 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11628 break;
11630 case BIT_FIELD_REF:
11631 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11632 post_p, is_gimple_lvalue, fb_either);
11633 recalculate_side_effects (*expr_p);
11634 break;
11636 case TARGET_MEM_REF:
11638 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11640 if (TMR_BASE (*expr_p))
11641 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11642 post_p, is_gimple_mem_ref_addr, fb_either);
11643 if (TMR_INDEX (*expr_p))
11644 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11645 post_p, is_gimple_val, fb_rvalue);
11646 if (TMR_INDEX2 (*expr_p))
11647 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11648 post_p, is_gimple_val, fb_rvalue);
11649 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11650 ret = MIN (r0, r1);
11652 break;
11654 case NON_LVALUE_EXPR:
11655 /* This should have been stripped above. */
11656 gcc_unreachable ();
11658 case ASM_EXPR:
11659 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11660 break;
11662 case TRY_FINALLY_EXPR:
11663 case TRY_CATCH_EXPR:
11665 gimple_seq eval, cleanup;
11666 gtry *try_;
11668 /* Calls to destructors are generated automatically in FINALLY/CATCH
11669 block. They should have location as UNKNOWN_LOCATION. However,
11670 gimplify_call_expr will reset these call stmts to input_location
11671 if it finds stmt's location is unknown. To prevent resetting for
11672 destructors, we set the input_location to unknown.
11673 Note that this only affects the destructor calls in FINALLY/CATCH
11674 block, and will automatically reset to its original value by the
11675 end of gimplify_expr. */
11676 input_location = UNKNOWN_LOCATION;
11677 eval = cleanup = NULL;
11678 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11679 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11680 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11681 if (gimple_seq_empty_p (cleanup))
11683 gimple_seq_add_seq (pre_p, eval);
11684 ret = GS_ALL_DONE;
11685 break;
11687 try_ = gimple_build_try (eval, cleanup,
11688 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11689 ? GIMPLE_TRY_FINALLY
11690 : GIMPLE_TRY_CATCH);
11691 if (EXPR_HAS_LOCATION (save_expr))
11692 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11693 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11694 gimple_set_location (try_, saved_location);
11695 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11696 gimple_try_set_catch_is_cleanup (try_,
11697 TRY_CATCH_IS_CLEANUP (*expr_p));
11698 gimplify_seq_add_stmt (pre_p, try_);
11699 ret = GS_ALL_DONE;
11700 break;
11703 case CLEANUP_POINT_EXPR:
11704 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11705 break;
11707 case TARGET_EXPR:
11708 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11709 break;
11711 case CATCH_EXPR:
11713 gimple *c;
11714 gimple_seq handler = NULL;
11715 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11716 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11717 gimplify_seq_add_stmt (pre_p, c);
11718 ret = GS_ALL_DONE;
11719 break;
11722 case EH_FILTER_EXPR:
11724 gimple *ehf;
11725 gimple_seq failure = NULL;
11727 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11728 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11729 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11730 gimplify_seq_add_stmt (pre_p, ehf);
11731 ret = GS_ALL_DONE;
11732 break;
11735 case OBJ_TYPE_REF:
11737 enum gimplify_status r0, r1;
11738 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11739 post_p, is_gimple_val, fb_rvalue);
11740 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11741 post_p, is_gimple_val, fb_rvalue);
11742 TREE_SIDE_EFFECTS (*expr_p) = 0;
11743 ret = MIN (r0, r1);
11745 break;
11747 case LABEL_DECL:
11748 /* We get here when taking the address of a label. We mark
11749 the label as "forced"; meaning it can never be removed and
11750 it is a potential target for any computed goto. */
11751 FORCED_LABEL (*expr_p) = 1;
11752 ret = GS_ALL_DONE;
11753 break;
11755 case STATEMENT_LIST:
11756 ret = gimplify_statement_list (expr_p, pre_p);
11757 break;
11759 case WITH_SIZE_EXPR:
11761 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11762 post_p == &internal_post ? NULL : post_p,
11763 gimple_test_f, fallback);
11764 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11765 is_gimple_val, fb_rvalue);
11766 ret = GS_ALL_DONE;
11768 break;
11770 case VAR_DECL:
11771 case PARM_DECL:
11772 ret = gimplify_var_or_parm_decl (expr_p);
11773 break;
11775 case RESULT_DECL:
11776 /* When within an OMP context, notice uses of variables. */
11777 if (gimplify_omp_ctxp)
11778 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11779 ret = GS_ALL_DONE;
11780 break;
11782 case SSA_NAME:
11783 /* Allow callbacks into the gimplifier during optimization. */
11784 ret = GS_ALL_DONE;
11785 break;
11787 case OMP_PARALLEL:
11788 gimplify_omp_parallel (expr_p, pre_p);
11789 ret = GS_ALL_DONE;
11790 break;
11792 case OMP_TASK:
11793 gimplify_omp_task (expr_p, pre_p);
11794 ret = GS_ALL_DONE;
11795 break;
11797 case OMP_FOR:
11798 case OMP_SIMD:
11799 case CILK_SIMD:
11800 case CILK_FOR:
11801 case OMP_DISTRIBUTE:
11802 case OMP_TASKLOOP:
11803 case OACC_LOOP:
11804 ret = gimplify_omp_for (expr_p, pre_p);
11805 break;
11807 case OACC_CACHE:
11808 gimplify_oacc_cache (expr_p, pre_p);
11809 ret = GS_ALL_DONE;
11810 break;
11812 case OACC_DECLARE:
11813 gimplify_oacc_declare (expr_p, pre_p);
11814 ret = GS_ALL_DONE;
11815 break;
11817 case OACC_HOST_DATA:
11818 case OACC_DATA:
11819 case OACC_KERNELS:
11820 case OACC_PARALLEL:
11821 case OMP_SECTIONS:
11822 case OMP_SINGLE:
11823 case OMP_TARGET:
11824 case OMP_TARGET_DATA:
11825 case OMP_TEAMS:
11826 gimplify_omp_workshare (expr_p, pre_p);
11827 ret = GS_ALL_DONE;
11828 break;
11830 case OACC_ENTER_DATA:
11831 case OACC_EXIT_DATA:
11832 case OACC_UPDATE:
11833 case OMP_TARGET_UPDATE:
11834 case OMP_TARGET_ENTER_DATA:
11835 case OMP_TARGET_EXIT_DATA:
11836 gimplify_omp_target_update (expr_p, pre_p);
11837 ret = GS_ALL_DONE;
11838 break;
11840 case OMP_SECTION:
11841 case OMP_MASTER:
11842 case OMP_TASKGROUP:
11843 case OMP_ORDERED:
11844 case OMP_CRITICAL:
11846 gimple_seq body = NULL;
11847 gimple *g;
11849 gimplify_and_add (OMP_BODY (*expr_p), &body);
11850 switch (TREE_CODE (*expr_p))
11852 case OMP_SECTION:
11853 g = gimple_build_omp_section (body);
11854 break;
11855 case OMP_MASTER:
11856 g = gimple_build_omp_master (body);
11857 break;
11858 case OMP_TASKGROUP:
11860 gimple_seq cleanup = NULL;
11861 tree fn
11862 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11863 g = gimple_build_call (fn, 0);
11864 gimple_seq_add_stmt (&cleanup, g);
11865 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11866 body = NULL;
11867 gimple_seq_add_stmt (&body, g);
11868 g = gimple_build_omp_taskgroup (body);
11870 break;
11871 case OMP_ORDERED:
11872 g = gimplify_omp_ordered (*expr_p, body);
11873 break;
11874 case OMP_CRITICAL:
11875 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11876 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11877 gimplify_adjust_omp_clauses (pre_p, body,
11878 &OMP_CRITICAL_CLAUSES (*expr_p),
11879 OMP_CRITICAL);
11880 g = gimple_build_omp_critical (body,
11881 OMP_CRITICAL_NAME (*expr_p),
11882 OMP_CRITICAL_CLAUSES (*expr_p));
11883 break;
11884 default:
11885 gcc_unreachable ();
11887 gimplify_seq_add_stmt (pre_p, g);
11888 ret = GS_ALL_DONE;
11889 break;
11892 case OMP_ATOMIC:
11893 case OMP_ATOMIC_READ:
11894 case OMP_ATOMIC_CAPTURE_OLD:
11895 case OMP_ATOMIC_CAPTURE_NEW:
11896 ret = gimplify_omp_atomic (expr_p, pre_p);
11897 break;
11899 case TRANSACTION_EXPR:
11900 ret = gimplify_transaction (expr_p, pre_p);
11901 break;
11903 case TRUTH_AND_EXPR:
11904 case TRUTH_OR_EXPR:
11905 case TRUTH_XOR_EXPR:
11907 tree orig_type = TREE_TYPE (*expr_p);
11908 tree new_type, xop0, xop1;
11909 *expr_p = gimple_boolify (*expr_p);
11910 new_type = TREE_TYPE (*expr_p);
11911 if (!useless_type_conversion_p (orig_type, new_type))
11913 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11914 ret = GS_OK;
11915 break;
11918 /* Boolified binary truth expressions are semantically equivalent
11919 to bitwise binary expressions. Canonicalize them to the
11920 bitwise variant. */
11921 switch (TREE_CODE (*expr_p))
11923 case TRUTH_AND_EXPR:
11924 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11925 break;
11926 case TRUTH_OR_EXPR:
11927 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11928 break;
11929 case TRUTH_XOR_EXPR:
11930 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11931 break;
11932 default:
11933 break;
11935 /* Now make sure that operands have compatible type to
11936 expression's new_type. */
11937 xop0 = TREE_OPERAND (*expr_p, 0);
11938 xop1 = TREE_OPERAND (*expr_p, 1);
11939 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11940 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11941 new_type,
11942 xop0);
11943 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11944 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11945 new_type,
11946 xop1);
11947 /* Continue classified as tcc_binary. */
11948 goto expr_2;
11951 case VEC_COND_EXPR:
11953 enum gimplify_status r0, r1, r2;
11955 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11956 post_p, is_gimple_condexpr, fb_rvalue);
11957 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11958 post_p, is_gimple_val, fb_rvalue);
11959 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11960 post_p, is_gimple_val, fb_rvalue);
11962 ret = MIN (MIN (r0, r1), r2);
11963 recalculate_side_effects (*expr_p);
11965 break;
11967 case FMA_EXPR:
11968 case VEC_PERM_EXPR:
11969 /* Classified as tcc_expression. */
11970 goto expr_3;
11972 case BIT_INSERT_EXPR:
11973 /* Argument 3 is a constant. */
11974 goto expr_2;
11976 case POINTER_PLUS_EXPR:
11978 enum gimplify_status r0, r1;
11979 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11980 post_p, is_gimple_val, fb_rvalue);
11981 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11982 post_p, is_gimple_val, fb_rvalue);
11983 recalculate_side_effects (*expr_p);
11984 ret = MIN (r0, r1);
11985 break;
11988 case CILK_SYNC_STMT:
11990 if (!fn_contains_cilk_spawn_p (cfun))
11992 error_at (EXPR_LOCATION (*expr_p),
11993 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11994 ret = GS_ERROR;
11996 else
11998 gimplify_cilk_sync (expr_p, pre_p);
11999 ret = GS_ALL_DONE;
12001 break;
12004 default:
12005 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12007 case tcc_comparison:
12008 /* Handle comparison of objects of non scalar mode aggregates
12009 with a call to memcmp. It would be nice to only have to do
12010 this for variable-sized objects, but then we'd have to allow
12011 the same nest of reference nodes we allow for MODIFY_EXPR and
12012 that's too complex.
12014 Compare scalar mode aggregates as scalar mode values. Using
12015 memcmp for them would be very inefficient at best, and is
12016 plain wrong if bitfields are involved. */
12018 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12020 /* Vector comparisons need no boolification. */
12021 if (TREE_CODE (type) == VECTOR_TYPE)
12022 goto expr_2;
12023 else if (!AGGREGATE_TYPE_P (type))
12025 tree org_type = TREE_TYPE (*expr_p);
12026 *expr_p = gimple_boolify (*expr_p);
12027 if (!useless_type_conversion_p (org_type,
12028 TREE_TYPE (*expr_p)))
12030 *expr_p = fold_convert_loc (input_location,
12031 org_type, *expr_p);
12032 ret = GS_OK;
12034 else
12035 goto expr_2;
12037 else if (TYPE_MODE (type) != BLKmode)
12038 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12039 else
12040 ret = gimplify_variable_sized_compare (expr_p);
12042 break;
12045 /* If *EXPR_P does not need to be special-cased, handle it
12046 according to its class. */
12047 case tcc_unary:
12048 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12049 post_p, is_gimple_val, fb_rvalue);
12050 break;
12052 case tcc_binary:
12053 expr_2:
12055 enum gimplify_status r0, r1;
12057 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12058 post_p, is_gimple_val, fb_rvalue);
12059 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12060 post_p, is_gimple_val, fb_rvalue);
12062 ret = MIN (r0, r1);
12063 break;
12066 expr_3:
12068 enum gimplify_status r0, r1, r2;
12070 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12071 post_p, is_gimple_val, fb_rvalue);
12072 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12073 post_p, is_gimple_val, fb_rvalue);
12074 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12075 post_p, is_gimple_val, fb_rvalue);
12077 ret = MIN (MIN (r0, r1), r2);
12078 break;
12081 case tcc_declaration:
12082 case tcc_constant:
12083 ret = GS_ALL_DONE;
12084 goto dont_recalculate;
12086 default:
12087 gcc_unreachable ();
12090 recalculate_side_effects (*expr_p);
12092 dont_recalculate:
12093 break;
12096 gcc_assert (*expr_p || ret != GS_OK);
12098 while (ret == GS_OK);
12100 /* If we encountered an error_mark somewhere nested inside, either
12101 stub out the statement or propagate the error back out. */
12102 if (ret == GS_ERROR)
12104 if (is_statement)
12105 *expr_p = NULL;
12106 goto out;
12109 /* This was only valid as a return value from the langhook, which
12110 we handled. Make sure it doesn't escape from any other context. */
12111 gcc_assert (ret != GS_UNHANDLED);
12113 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12115 /* We aren't looking for a value, and we don't have a valid
12116 statement. If it doesn't have side-effects, throw it away.
12117 We can also get here with code such as "*&&L;", where L is
12118 a LABEL_DECL that is marked as FORCED_LABEL. */
12119 if (TREE_CODE (*expr_p) == LABEL_DECL
12120 || !TREE_SIDE_EFFECTS (*expr_p))
12121 *expr_p = NULL;
12122 else if (!TREE_THIS_VOLATILE (*expr_p))
12124 /* This is probably a _REF that contains something nested that
12125 has side effects. Recurse through the operands to find it. */
12126 enum tree_code code = TREE_CODE (*expr_p);
12128 switch (code)
12130 case COMPONENT_REF:
12131 case REALPART_EXPR:
12132 case IMAGPART_EXPR:
12133 case VIEW_CONVERT_EXPR:
12134 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12135 gimple_test_f, fallback);
12136 break;
12138 case ARRAY_REF:
12139 case ARRAY_RANGE_REF:
12140 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12141 gimple_test_f, fallback);
12142 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12143 gimple_test_f, fallback);
12144 break;
12146 default:
12147 /* Anything else with side-effects must be converted to
12148 a valid statement before we get here. */
12149 gcc_unreachable ();
12152 *expr_p = NULL;
12154 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12155 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12157 /* Historically, the compiler has treated a bare reference
12158 to a non-BLKmode volatile lvalue as forcing a load. */
12159 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12161 /* Normally, we do not want to create a temporary for a
12162 TREE_ADDRESSABLE type because such a type should not be
12163 copied by bitwise-assignment. However, we make an
12164 exception here, as all we are doing here is ensuring that
12165 we read the bytes that make up the type. We use
12166 create_tmp_var_raw because create_tmp_var will abort when
12167 given a TREE_ADDRESSABLE type. */
12168 tree tmp = create_tmp_var_raw (type, "vol");
12169 gimple_add_tmp_var (tmp);
12170 gimplify_assign (tmp, *expr_p, pre_p);
12171 *expr_p = NULL;
12173 else
12174 /* We can't do anything useful with a volatile reference to
12175 an incomplete type, so just throw it away. Likewise for
12176 a BLKmode type, since any implicit inner load should
12177 already have been turned into an explicit one by the
12178 gimplification process. */
12179 *expr_p = NULL;
12182 /* If we are gimplifying at the statement level, we're done. Tack
12183 everything together and return. */
12184 if (fallback == fb_none || is_statement)
12186 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12187 it out for GC to reclaim it. */
12188 *expr_p = NULL_TREE;
12190 if (!gimple_seq_empty_p (internal_pre)
12191 || !gimple_seq_empty_p (internal_post))
12193 gimplify_seq_add_seq (&internal_pre, internal_post);
12194 gimplify_seq_add_seq (pre_p, internal_pre);
12197 /* The result of gimplifying *EXPR_P is going to be the last few
12198 statements in *PRE_P and *POST_P. Add location information
12199 to all the statements that were added by the gimplification
12200 helpers. */
12201 if (!gimple_seq_empty_p (*pre_p))
12202 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12204 if (!gimple_seq_empty_p (*post_p))
12205 annotate_all_with_location_after (*post_p, post_last_gsi,
12206 input_location);
12208 goto out;
12211 #ifdef ENABLE_GIMPLE_CHECKING
12212 if (*expr_p)
12214 enum tree_code code = TREE_CODE (*expr_p);
12215 /* These expressions should already be in gimple IR form. */
12216 gcc_assert (code != MODIFY_EXPR
12217 && code != ASM_EXPR
12218 && code != BIND_EXPR
12219 && code != CATCH_EXPR
12220 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12221 && code != EH_FILTER_EXPR
12222 && code != GOTO_EXPR
12223 && code != LABEL_EXPR
12224 && code != LOOP_EXPR
12225 && code != SWITCH_EXPR
12226 && code != TRY_FINALLY_EXPR
12227 && code != OACC_PARALLEL
12228 && code != OACC_KERNELS
12229 && code != OACC_DATA
12230 && code != OACC_HOST_DATA
12231 && code != OACC_DECLARE
12232 && code != OACC_UPDATE
12233 && code != OACC_ENTER_DATA
12234 && code != OACC_EXIT_DATA
12235 && code != OACC_CACHE
12236 && code != OMP_CRITICAL
12237 && code != OMP_FOR
12238 && code != OACC_LOOP
12239 && code != OMP_MASTER
12240 && code != OMP_TASKGROUP
12241 && code != OMP_ORDERED
12242 && code != OMP_PARALLEL
12243 && code != OMP_SECTIONS
12244 && code != OMP_SECTION
12245 && code != OMP_SINGLE);
12247 #endif
12249 /* Otherwise we're gimplifying a subexpression, so the resulting
12250 value is interesting. If it's a valid operand that matches
12251 GIMPLE_TEST_F, we're done. Unless we are handling some
12252 post-effects internally; if that's the case, we need to copy into
12253 a temporary before adding the post-effects to POST_P. */
12254 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12255 goto out;
12257 /* Otherwise, we need to create a new temporary for the gimplified
12258 expression. */
12260 /* We can't return an lvalue if we have an internal postqueue. The
12261 object the lvalue refers to would (probably) be modified by the
12262 postqueue; we need to copy the value out first, which means an
12263 rvalue. */
12264 if ((fallback & fb_lvalue)
12265 && gimple_seq_empty_p (internal_post)
12266 && is_gimple_addressable (*expr_p))
12268 /* An lvalue will do. Take the address of the expression, store it
12269 in a temporary, and replace the expression with an INDIRECT_REF of
12270 that temporary. */
12271 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12272 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12273 *expr_p = build_simple_mem_ref (tmp);
12275 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12277 /* An rvalue will do. Assign the gimplified expression into a
12278 new temporary TMP and replace the original expression with
12279 TMP. First, make sure that the expression has a type so that
12280 it can be assigned into a temporary. */
12281 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12282 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12284 else
12286 #ifdef ENABLE_GIMPLE_CHECKING
12287 if (!(fallback & fb_mayfail))
12289 fprintf (stderr, "gimplification failed:\n");
12290 print_generic_expr (stderr, *expr_p);
12291 debug_tree (*expr_p);
12292 internal_error ("gimplification failed");
12294 #endif
12295 gcc_assert (fallback & fb_mayfail);
12297 /* If this is an asm statement, and the user asked for the
12298 impossible, don't die. Fail and let gimplify_asm_expr
12299 issue an error. */
12300 ret = GS_ERROR;
12301 goto out;
12304 /* Make sure the temporary matches our predicate. */
12305 gcc_assert ((*gimple_test_f) (*expr_p));
12307 if (!gimple_seq_empty_p (internal_post))
12309 annotate_all_with_location (internal_post, input_location);
12310 gimplify_seq_add_seq (pre_p, internal_post);
12313 out:
12314 input_location = saved_location;
12315 return ret;
12318 /* Like gimplify_expr but make sure the gimplified result is not itself
12319 a SSA name (but a decl if it were). Temporaries required by
12320 evaluating *EXPR_P may be still SSA names. */
12322 static enum gimplify_status
12323 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12324 bool (*gimple_test_f) (tree), fallback_t fallback,
12325 bool allow_ssa)
12327 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12328 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12329 gimple_test_f, fallback);
12330 if (! allow_ssa
12331 && TREE_CODE (*expr_p) == SSA_NAME)
12333 tree name = *expr_p;
12334 if (was_ssa_name_p)
12335 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12336 else
12338 /* Avoid the extra copy if possible. */
12339 *expr_p = create_tmp_reg (TREE_TYPE (name));
12340 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12341 release_ssa_name (name);
12344 return ret;
12347 /* Look through TYPE for variable-sized objects and gimplify each such
12348 size that we find. Add to LIST_P any statements generated. */
12350 void
12351 gimplify_type_sizes (tree type, gimple_seq *list_p)
12353 tree field, t;
12355 if (type == NULL || type == error_mark_node)
12356 return;
12358 /* We first do the main variant, then copy into any other variants. */
12359 type = TYPE_MAIN_VARIANT (type);
12361 /* Avoid infinite recursion. */
12362 if (TYPE_SIZES_GIMPLIFIED (type))
12363 return;
12365 TYPE_SIZES_GIMPLIFIED (type) = 1;
12367 switch (TREE_CODE (type))
12369 case INTEGER_TYPE:
12370 case ENUMERAL_TYPE:
12371 case BOOLEAN_TYPE:
12372 case REAL_TYPE:
12373 case FIXED_POINT_TYPE:
12374 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12375 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12377 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12379 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12380 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12382 break;
12384 case ARRAY_TYPE:
12385 /* These types may not have declarations, so handle them here. */
12386 gimplify_type_sizes (TREE_TYPE (type), list_p);
12387 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12388 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12389 with assigned stack slots, for -O1+ -g they should be tracked
12390 by VTA. */
12391 if (!(TYPE_NAME (type)
12392 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12393 && DECL_IGNORED_P (TYPE_NAME (type)))
12394 && TYPE_DOMAIN (type)
12395 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12397 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12398 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12399 DECL_IGNORED_P (t) = 0;
12400 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12401 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12402 DECL_IGNORED_P (t) = 0;
12404 break;
12406 case RECORD_TYPE:
12407 case UNION_TYPE:
12408 case QUAL_UNION_TYPE:
12409 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12410 if (TREE_CODE (field) == FIELD_DECL)
12412 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12413 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12414 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12415 gimplify_type_sizes (TREE_TYPE (field), list_p);
12417 break;
12419 case POINTER_TYPE:
12420 case REFERENCE_TYPE:
12421 /* We used to recurse on the pointed-to type here, which turned out to
12422 be incorrect because its definition might refer to variables not
12423 yet initialized at this point if a forward declaration is involved.
12425 It was actually useful for anonymous pointed-to types to ensure
12426 that the sizes evaluation dominates every possible later use of the
12427 values. Restricting to such types here would be safe since there
12428 is no possible forward declaration around, but would introduce an
12429 undesirable middle-end semantic to anonymity. We then defer to
12430 front-ends the responsibility of ensuring that the sizes are
12431 evaluated both early and late enough, e.g. by attaching artificial
12432 type declarations to the tree. */
12433 break;
12435 default:
12436 break;
12439 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12440 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12442 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12444 TYPE_SIZE (t) = TYPE_SIZE (type);
12445 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12446 TYPE_SIZES_GIMPLIFIED (t) = 1;
12450 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12451 a size or position, has had all of its SAVE_EXPRs evaluated.
12452 We add any required statements to *STMT_P. */
12454 void
12455 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12457 tree expr = *expr_p;
12459 /* We don't do anything if the value isn't there, is constant, or contains
12460 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12461 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12462 will want to replace it with a new variable, but that will cause problems
12463 if this type is from outside the function. It's OK to have that here. */
12464 if (is_gimple_sizepos (expr))
12465 return;
12467 *expr_p = unshare_expr (expr);
12469 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12470 if the def vanishes. */
12471 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12474 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12475 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12476 is true, also gimplify the parameters. */
12478 gbind *
12479 gimplify_body (tree fndecl, bool do_parms)
12481 location_t saved_location = input_location;
12482 gimple_seq parm_stmts, seq;
12483 gimple *outer_stmt;
12484 gbind *outer_bind;
12485 struct cgraph_node *cgn;
12487 timevar_push (TV_TREE_GIMPLIFY);
12489 init_tree_ssa (cfun);
12491 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12492 gimplification. */
12493 default_rtl_profile ();
12495 gcc_assert (gimplify_ctxp == NULL);
12496 push_gimplify_context (true);
12498 if (flag_openacc || flag_openmp)
12500 gcc_assert (gimplify_omp_ctxp == NULL);
12501 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12502 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12505 /* Unshare most shared trees in the body and in that of any nested functions.
12506 It would seem we don't have to do this for nested functions because
12507 they are supposed to be output and then the outer function gimplified
12508 first, but the g++ front end doesn't always do it that way. */
12509 unshare_body (fndecl);
12510 unvisit_body (fndecl);
12512 cgn = cgraph_node::get (fndecl);
12513 if (cgn && cgn->origin)
12514 nonlocal_vlas = new hash_set<tree>;
12516 /* Make sure input_location isn't set to something weird. */
12517 input_location = DECL_SOURCE_LOCATION (fndecl);
12519 /* Resolve callee-copies. This has to be done before processing
12520 the body so that DECL_VALUE_EXPR gets processed correctly. */
12521 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12523 /* Gimplify the function's body. */
12524 seq = NULL;
12525 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12526 outer_stmt = gimple_seq_first_stmt (seq);
12527 if (!outer_stmt)
12529 outer_stmt = gimple_build_nop ();
12530 gimplify_seq_add_stmt (&seq, outer_stmt);
12533 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12534 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12535 if (gimple_code (outer_stmt) == GIMPLE_BIND
12536 && gimple_seq_first (seq) == gimple_seq_last (seq))
12537 outer_bind = as_a <gbind *> (outer_stmt);
12538 else
12539 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12541 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12543 /* If we had callee-copies statements, insert them at the beginning
12544 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12545 if (!gimple_seq_empty_p (parm_stmts))
12547 tree parm;
12549 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12550 gimple_bind_set_body (outer_bind, parm_stmts);
12552 for (parm = DECL_ARGUMENTS (current_function_decl);
12553 parm; parm = DECL_CHAIN (parm))
12554 if (DECL_HAS_VALUE_EXPR_P (parm))
12556 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12557 DECL_IGNORED_P (parm) = 0;
12561 if (nonlocal_vlas)
12563 if (nonlocal_vla_vars)
12565 /* tree-nested.c may later on call declare_vars (..., true);
12566 which relies on BLOCK_VARS chain to be the tail of the
12567 gimple_bind_vars chain. Ensure we don't violate that
12568 assumption. */
12569 if (gimple_bind_block (outer_bind)
12570 == DECL_INITIAL (current_function_decl))
12571 declare_vars (nonlocal_vla_vars, outer_bind, true);
12572 else
12573 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12574 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12575 nonlocal_vla_vars);
12576 nonlocal_vla_vars = NULL_TREE;
12578 delete nonlocal_vlas;
12579 nonlocal_vlas = NULL;
12582 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12583 && gimplify_omp_ctxp)
12585 delete_omp_context (gimplify_omp_ctxp);
12586 gimplify_omp_ctxp = NULL;
12589 pop_gimplify_context (outer_bind);
12590 gcc_assert (gimplify_ctxp == NULL);
12592 if (flag_checking && !seen_error ())
12593 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12595 timevar_pop (TV_TREE_GIMPLIFY);
12596 input_location = saved_location;
12598 return outer_bind;
12601 typedef char *char_p; /* For DEF_VEC_P. */
12603 /* Return whether we should exclude FNDECL from instrumentation. */
12605 static bool
12606 flag_instrument_functions_exclude_p (tree fndecl)
12608 vec<char_p> *v;
12610 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12611 if (v && v->length () > 0)
12613 const char *name;
12614 int i;
12615 char *s;
12617 name = lang_hooks.decl_printable_name (fndecl, 0);
12618 FOR_EACH_VEC_ELT (*v, i, s)
12619 if (strstr (name, s) != NULL)
12620 return true;
12623 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12624 if (v && v->length () > 0)
12626 const char *name;
12627 int i;
12628 char *s;
12630 name = DECL_SOURCE_FILE (fndecl);
12631 FOR_EACH_VEC_ELT (*v, i, s)
12632 if (strstr (name, s) != NULL)
12633 return true;
12636 return false;
12639 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12640 node for the function we want to gimplify.
12642 Return the sequence of GIMPLE statements corresponding to the body
12643 of FNDECL. */
12645 void
12646 gimplify_function_tree (tree fndecl)
12648 tree parm, ret;
12649 gimple_seq seq;
12650 gbind *bind;
12652 gcc_assert (!gimple_body (fndecl));
12654 if (DECL_STRUCT_FUNCTION (fndecl))
12655 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12656 else
12657 push_struct_function (fndecl);
12659 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12660 if necessary. */
12661 cfun->curr_properties |= PROP_gimple_lva;
12663 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12665 /* Preliminarily mark non-addressed complex variables as eligible
12666 for promotion to gimple registers. We'll transform their uses
12667 as we find them. */
12668 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12669 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12670 && !TREE_THIS_VOLATILE (parm)
12671 && !needs_to_live_in_memory (parm))
12672 DECL_GIMPLE_REG_P (parm) = 1;
12675 ret = DECL_RESULT (fndecl);
12676 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12677 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12678 && !needs_to_live_in_memory (ret))
12679 DECL_GIMPLE_REG_P (ret) = 1;
12681 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12682 asan_poisoned_variables = new hash_set<tree> ();
12683 bind = gimplify_body (fndecl, true);
12684 if (asan_poisoned_variables)
12686 delete asan_poisoned_variables;
12687 asan_poisoned_variables = NULL;
12690 /* The tree body of the function is no longer needed, replace it
12691 with the new GIMPLE body. */
12692 seq = NULL;
12693 gimple_seq_add_stmt (&seq, bind);
12694 gimple_set_body (fndecl, seq);
12696 /* If we're instrumenting function entry/exit, then prepend the call to
12697 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12698 catch the exit hook. */
12699 /* ??? Add some way to ignore exceptions for this TFE. */
12700 if (flag_instrument_function_entry_exit
12701 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12702 /* Do not instrument extern inline functions. */
12703 && !(DECL_DECLARED_INLINE_P (fndecl)
12704 && DECL_EXTERNAL (fndecl)
12705 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12706 && !flag_instrument_functions_exclude_p (fndecl))
12708 tree x;
12709 gbind *new_bind;
12710 gimple *tf;
12711 gimple_seq cleanup = NULL, body = NULL;
12712 tree tmp_var;
12713 gcall *call;
12715 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12716 call = gimple_build_call (x, 1, integer_zero_node);
12717 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12718 gimple_call_set_lhs (call, tmp_var);
12719 gimplify_seq_add_stmt (&cleanup, call);
12720 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12721 call = gimple_build_call (x, 2,
12722 build_fold_addr_expr (current_function_decl),
12723 tmp_var);
12724 gimplify_seq_add_stmt (&cleanup, call);
12725 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12727 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12728 call = gimple_build_call (x, 1, integer_zero_node);
12729 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12730 gimple_call_set_lhs (call, tmp_var);
12731 gimplify_seq_add_stmt (&body, call);
12732 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12733 call = gimple_build_call (x, 2,
12734 build_fold_addr_expr (current_function_decl),
12735 tmp_var);
12736 gimplify_seq_add_stmt (&body, call);
12737 gimplify_seq_add_stmt (&body, tf);
12738 new_bind = gimple_build_bind (NULL, body, NULL);
12740 /* Replace the current function body with the body
12741 wrapped in the try/finally TF. */
12742 seq = NULL;
12743 gimple_seq_add_stmt (&seq, new_bind);
12744 gimple_set_body (fndecl, seq);
12745 bind = new_bind;
12748 if (sanitize_flags_p (SANITIZE_THREAD))
12750 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12751 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12752 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12753 /* Replace the current function body with the body
12754 wrapped in the try/finally TF. */
12755 seq = NULL;
12756 gimple_seq_add_stmt (&seq, new_bind);
12757 gimple_set_body (fndecl, seq);
12760 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12761 cfun->curr_properties |= PROP_gimple_any;
12763 pop_cfun ();
12765 dump_function (TDI_gimple, fndecl);
12768 /* Return a dummy expression of type TYPE in order to keep going after an
12769 error. */
12771 static tree
12772 dummy_object (tree type)
12774 tree t = build_int_cst (build_pointer_type (type), 0);
12775 return build2 (MEM_REF, type, t, t);
12778 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12779 builtin function, but a very special sort of operator. */
12781 enum gimplify_status
12782 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12783 gimple_seq *post_p ATTRIBUTE_UNUSED)
12785 tree promoted_type, have_va_type;
12786 tree valist = TREE_OPERAND (*expr_p, 0);
12787 tree type = TREE_TYPE (*expr_p);
12788 tree t, tag, aptag;
12789 location_t loc = EXPR_LOCATION (*expr_p);
12791 /* Verify that valist is of the proper type. */
12792 have_va_type = TREE_TYPE (valist);
12793 if (have_va_type == error_mark_node)
12794 return GS_ERROR;
12795 have_va_type = targetm.canonical_va_list_type (have_va_type);
12796 if (have_va_type == NULL_TREE
12797 && POINTER_TYPE_P (TREE_TYPE (valist)))
12798 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12799 have_va_type
12800 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12801 gcc_assert (have_va_type != NULL_TREE);
12803 /* Generate a diagnostic for requesting data of a type that cannot
12804 be passed through `...' due to type promotion at the call site. */
12805 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12806 != type)
12808 static bool gave_help;
12809 bool warned;
12810 /* Use the expansion point to handle cases such as passing bool (defined
12811 in a system header) through `...'. */
12812 source_location xloc
12813 = expansion_point_location_if_in_system_header (loc);
12815 /* Unfortunately, this is merely undefined, rather than a constraint
12816 violation, so we cannot make this an error. If this call is never
12817 executed, the program is still strictly conforming. */
12818 warned = warning_at (xloc, 0,
12819 "%qT is promoted to %qT when passed through %<...%>",
12820 type, promoted_type);
12821 if (!gave_help && warned)
12823 gave_help = true;
12824 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12825 promoted_type, type);
12828 /* We can, however, treat "undefined" any way we please.
12829 Call abort to encourage the user to fix the program. */
12830 if (warned)
12831 inform (xloc, "if this code is reached, the program will abort");
12832 /* Before the abort, allow the evaluation of the va_list
12833 expression to exit or longjmp. */
12834 gimplify_and_add (valist, pre_p);
12835 t = build_call_expr_loc (loc,
12836 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12837 gimplify_and_add (t, pre_p);
12839 /* This is dead code, but go ahead and finish so that the
12840 mode of the result comes out right. */
12841 *expr_p = dummy_object (type);
12842 return GS_ALL_DONE;
12845 tag = build_int_cst (build_pointer_type (type), 0);
12846 aptag = build_int_cst (TREE_TYPE (valist), 0);
12848 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12849 valist, tag, aptag);
12851 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12852 needs to be expanded. */
12853 cfun->curr_properties &= ~PROP_gimple_lva;
12855 return GS_OK;
12858 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12860 DST/SRC are the destination and source respectively. You can pass
12861 ungimplified trees in DST or SRC, in which case they will be
12862 converted to a gimple operand if necessary.
12864 This function returns the newly created GIMPLE_ASSIGN tuple. */
12866 gimple *
12867 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12869 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12870 gimplify_and_add (t, seq_p);
12871 ggc_free (t);
12872 return gimple_seq_last_stmt (*seq_p);
12875 inline hashval_t
12876 gimplify_hasher::hash (const elt_t *p)
12878 tree t = p->val;
12879 return iterative_hash_expr (t, 0);
12882 inline bool
12883 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12885 tree t1 = p1->val;
12886 tree t2 = p2->val;
12887 enum tree_code code = TREE_CODE (t1);
12889 if (TREE_CODE (t2) != code
12890 || TREE_TYPE (t1) != TREE_TYPE (t2))
12891 return false;
12893 if (!operand_equal_p (t1, t2, 0))
12894 return false;
12896 /* Only allow them to compare equal if they also hash equal; otherwise
12897 results are nondeterminate, and we fail bootstrap comparison. */
12898 gcc_checking_assert (hash (p1) == hash (p2));
12900 return true;