PR rtl-optimization/88018
[official-gcc.git] / gcc / gimplify.c
blob87082ad10d2a907b2b309ad16debecd9917d16dc
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
69 /* Hash set of poisoned variables in a bind expr. */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
72 enum gimplify_omp_var_data
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
85 GOVD_LINEAR = 2048,
86 GOVD_ALIGNED = 4096,
88 /* Flag for GOVD_MAP: don't copy back. */
89 GOVD_MAP_TO_ONLY = 8192,
91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
94 GOVD_MAP_0LEN_ARRAY = 32768,
96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
108 /* Flag for GOVD_MAP: only allocate. */
109 GOVD_MAP_ALLOC_ONLY = 1048576,
111 /* Flag for GOVD_MAP: only copy back. */
112 GOVD_MAP_FROM_ONLY = 2097152,
114 GOVD_NONTEMPORAL = 4194304,
116 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
117 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
118 | GOVD_LOCAL)
122 enum omp_region_type
124 ORT_WORKSHARE = 0x00,
125 ORT_TASKGROUP = 0x01,
126 ORT_SIMD = 0x04,
128 ORT_PARALLEL = 0x08,
129 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
131 ORT_TASK = 0x10,
132 ORT_UNTIED_TASK = ORT_TASK | 1,
133 ORT_TASKLOOP = ORT_TASK | 2,
134 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
136 ORT_TEAMS = 0x20,
137 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
138 ORT_HOST_TEAMS = ORT_TEAMS | 2,
139 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
141 /* Data region. */
142 ORT_TARGET_DATA = 0x40,
144 /* Data region with offloading. */
145 ORT_TARGET = 0x80,
146 ORT_COMBINED_TARGET = ORT_TARGET | 1,
148 /* OpenACC variants. */
149 ORT_ACC = 0x100, /* A generic OpenACC region. */
150 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
151 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
152 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
153 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
155 /* Dummy OpenMP region, used to disable expansion of
156 DECL_VALUE_EXPRs in taskloop pre body. */
157 ORT_NONE = 0x200
160 /* Gimplify hashtable helper. */
162 struct gimplify_hasher : free_ptr_hash <elt_t>
164 static inline hashval_t hash (const elt_t *);
165 static inline bool equal (const elt_t *, const elt_t *);
168 struct gimplify_ctx
170 struct gimplify_ctx *prev_context;
172 vec<gbind *> bind_expr_stack;
173 tree temps;
174 gimple_seq conditional_cleanups;
175 tree exit_label;
176 tree return_temp;
178 vec<tree> case_labels;
179 hash_set<tree> *live_switch_vars;
180 /* The formal temporary table. Should this be persistent? */
181 hash_table<gimplify_hasher> *temp_htab;
183 int conditions;
184 unsigned into_ssa : 1;
185 unsigned allow_rhs_cond_expr : 1;
186 unsigned in_cleanup_point_expr : 1;
187 unsigned keep_stack : 1;
188 unsigned save_stack : 1;
189 unsigned in_switch_expr : 1;
192 enum gimplify_defaultmap_kind
194 GDMK_SCALAR,
195 GDMK_AGGREGATE,
196 GDMK_ALLOCATABLE,
197 GDMK_POINTER
200 struct gimplify_omp_ctx
202 struct gimplify_omp_ctx *outer_context;
203 splay_tree variables;
204 hash_set<tree> *privatized_types;
205 /* Iteration variables in an OMP_FOR. */
206 vec<tree> loop_iter_var;
207 location_t location;
208 enum omp_clause_default_kind default_kind;
209 enum omp_region_type region_type;
210 bool combined_loop;
211 bool distribute;
212 bool target_firstprivatize_array_bases;
213 int defaultmap[4];
216 static struct gimplify_ctx *gimplify_ctxp;
217 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
219 /* Forward declaration. */
220 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
221 static hash_map<tree, tree> *oacc_declare_returns;
222 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
223 bool (*) (tree), fallback_t, bool);
225 /* Shorter alias name for the above function for use in gimplify.c
226 only. */
228 static inline void
229 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
231 gimple_seq_add_stmt_without_update (seq_p, gs);
234 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
235 NULL, a new sequence is allocated. This function is
236 similar to gimple_seq_add_seq, but does not scan the operands.
237 During gimplification, we need to manipulate statement sequences
238 before the def/use vectors have been constructed. */
240 static void
241 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
243 gimple_stmt_iterator si;
245 if (src == NULL)
246 return;
248 si = gsi_last (*dst_p);
249 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
253 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
254 and popping gimplify contexts. */
256 static struct gimplify_ctx *ctx_pool = NULL;
258 /* Return a gimplify context struct from the pool. */
260 static inline struct gimplify_ctx *
261 ctx_alloc (void)
263 struct gimplify_ctx * c = ctx_pool;
265 if (c)
266 ctx_pool = c->prev_context;
267 else
268 c = XNEW (struct gimplify_ctx);
270 memset (c, '\0', sizeof (*c));
271 return c;
274 /* Put gimplify context C back into the pool. */
276 static inline void
277 ctx_free (struct gimplify_ctx *c)
279 c->prev_context = ctx_pool;
280 ctx_pool = c;
283 /* Free allocated ctx stack memory. */
285 void
286 free_gimplify_stack (void)
288 struct gimplify_ctx *c;
290 while ((c = ctx_pool))
292 ctx_pool = c->prev_context;
293 free (c);
298 /* Set up a context for the gimplifier. */
300 void
301 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
303 struct gimplify_ctx *c = ctx_alloc ();
305 c->prev_context = gimplify_ctxp;
306 gimplify_ctxp = c;
307 gimplify_ctxp->into_ssa = in_ssa;
308 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
311 /* Tear down a context for the gimplifier. If BODY is non-null, then
312 put the temporaries into the outer BIND_EXPR. Otherwise, put them
313 in the local_decls.
315 BODY is not a sequence, but the first tuple in a sequence. */
317 void
318 pop_gimplify_context (gimple *body)
320 struct gimplify_ctx *c = gimplify_ctxp;
322 gcc_assert (c
323 && (!c->bind_expr_stack.exists ()
324 || c->bind_expr_stack.is_empty ()));
325 c->bind_expr_stack.release ();
326 gimplify_ctxp = c->prev_context;
328 if (body)
329 declare_vars (c->temps, body, false);
330 else
331 record_vars (c->temps);
333 delete c->temp_htab;
334 c->temp_htab = NULL;
335 ctx_free (c);
338 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
340 static void
341 gimple_push_bind_expr (gbind *bind_stmt)
343 gimplify_ctxp->bind_expr_stack.reserve (8);
344 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
347 /* Pop the first element off the stack of bindings. */
349 static void
350 gimple_pop_bind_expr (void)
352 gimplify_ctxp->bind_expr_stack.pop ();
355 /* Return the first element of the stack of bindings. */
357 gbind *
358 gimple_current_bind_expr (void)
360 return gimplify_ctxp->bind_expr_stack.last ();
363 /* Return the stack of bindings created during gimplification. */
365 vec<gbind *>
366 gimple_bind_expr_stack (void)
368 return gimplify_ctxp->bind_expr_stack;
371 /* Return true iff there is a COND_EXPR between us and the innermost
372 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
374 static bool
375 gimple_conditional_context (void)
377 return gimplify_ctxp->conditions > 0;
380 /* Note that we've entered a COND_EXPR. */
382 static void
383 gimple_push_condition (void)
385 #ifdef ENABLE_GIMPLE_CHECKING
386 if (gimplify_ctxp->conditions == 0)
387 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
388 #endif
389 ++(gimplify_ctxp->conditions);
392 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
393 now, add any conditional cleanups we've seen to the prequeue. */
395 static void
396 gimple_pop_condition (gimple_seq *pre_p)
398 int conds = --(gimplify_ctxp->conditions);
400 gcc_assert (conds >= 0);
401 if (conds == 0)
403 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
404 gimplify_ctxp->conditional_cleanups = NULL;
408 /* A stable comparison routine for use with splay trees and DECLs. */
410 static int
411 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
413 tree a = (tree) xa;
414 tree b = (tree) xb;
416 return DECL_UID (a) - DECL_UID (b);
419 /* Create a new omp construct that deals with variable remapping. */
421 static struct gimplify_omp_ctx *
422 new_omp_context (enum omp_region_type region_type)
424 struct gimplify_omp_ctx *c;
426 c = XCNEW (struct gimplify_omp_ctx);
427 c->outer_context = gimplify_omp_ctxp;
428 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
429 c->privatized_types = new hash_set<tree>;
430 c->location = input_location;
431 c->region_type = region_type;
432 if ((region_type & ORT_TASK) == 0)
433 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
434 else
435 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
436 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
437 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
438 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
439 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
441 return c;
444 /* Destroy an omp construct that deals with variable remapping. */
446 static void
447 delete_omp_context (struct gimplify_omp_ctx *c)
449 splay_tree_delete (c->variables);
450 delete c->privatized_types;
451 c->loop_iter_var.release ();
452 XDELETE (c);
455 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
456 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
458 /* Both gimplify the statement T and append it to *SEQ_P. This function
459 behaves exactly as gimplify_stmt, but you don't have to pass T as a
460 reference. */
462 void
463 gimplify_and_add (tree t, gimple_seq *seq_p)
465 gimplify_stmt (&t, seq_p);
468 /* Gimplify statement T into sequence *SEQ_P, and return the first
469 tuple in the sequence of generated tuples for this statement.
470 Return NULL if gimplifying T produced no tuples. */
472 static gimple *
473 gimplify_and_return_first (tree t, gimple_seq *seq_p)
475 gimple_stmt_iterator last = gsi_last (*seq_p);
477 gimplify_and_add (t, seq_p);
479 if (!gsi_end_p (last))
481 gsi_next (&last);
482 return gsi_stmt (last);
484 else
485 return gimple_seq_first_stmt (*seq_p);
488 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
489 LHS, or for a call argument. */
491 static bool
492 is_gimple_mem_rhs (tree t)
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return is_gimple_val (t) || is_gimple_lvalue (t);
502 /* Return true if T is a CALL_EXPR or an expression that can be
503 assigned to a temporary. Note that this predicate should only be
504 used during gimplification. See the rationale for this in
505 gimplify_modify_expr. */
507 static bool
508 is_gimple_reg_rhs_or_call (tree t)
510 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
511 || TREE_CODE (t) == CALL_EXPR);
514 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
515 this predicate should only be used during gimplification. See the
516 rationale for this in gimplify_modify_expr. */
518 static bool
519 is_gimple_mem_rhs_or_call (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return (is_gimple_val (t)
527 || is_gimple_lvalue (t)
528 || TREE_CLOBBER_P (t)
529 || TREE_CODE (t) == CALL_EXPR);
532 /* Create a temporary with a name derived from VAL. Subroutine of
533 lookup_tmp_var; nobody else should call this function. */
535 static inline tree
536 create_tmp_from_val (tree val)
538 /* Drop all qualifiers and address-space information from the value type. */
539 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
540 tree var = create_tmp_var (type, get_name (val));
541 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (var) = 1;
544 return var;
547 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
548 an existing expression temporary. */
550 static tree
551 lookup_tmp_var (tree val, bool is_formal)
553 tree ret;
555 /* If not optimizing, never really reuse a temporary. local-alloc
556 won't allocate any variable that is used in more than one basic
557 block, which means it will go into memory, causing much extra
558 work in reload and final and poorer code generation, outweighing
559 the extra memory allocation here. */
560 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
561 ret = create_tmp_from_val (val);
562 else
564 elt_t elt, *elt_p;
565 elt_t **slot;
567 elt.val = val;
568 if (!gimplify_ctxp->temp_htab)
569 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
570 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
571 if (*slot == NULL)
573 elt_p = XNEW (elt_t);
574 elt_p->val = val;
575 elt_p->temp = ret = create_tmp_from_val (val);
576 *slot = elt_p;
578 else
580 elt_p = *slot;
581 ret = elt_p->temp;
585 return ret;
588 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
590 static tree
591 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
592 bool is_formal, bool allow_ssa)
594 tree t, mod;
596 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
597 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
598 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
599 fb_rvalue);
601 if (allow_ssa
602 && gimplify_ctxp->into_ssa
603 && is_gimple_reg_type (TREE_TYPE (val)))
605 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
606 if (! gimple_in_ssa_p (cfun))
608 const char *name = get_name (val);
609 if (name)
610 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
613 else
614 t = lookup_tmp_var (val, is_formal);
616 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
618 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622 ggc_free (mod);
624 return t;
627 /* Return a formal temporary variable initialized with VAL. PRE_P is as
628 in gimplify_expr. Only use this function if:
630 1) The value of the unfactored expression represented by VAL will not
631 change between the initialization and use of the temporary, and
632 2) The temporary will not be otherwise modified.
634 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
635 and #2 means it is inappropriate for && temps.
637 For other cases, use get_initialized_tmp_var instead. */
639 tree
640 get_formal_tmp_var (tree val, gimple_seq *pre_p)
642 return internal_get_tmp_var (val, pre_p, NULL, true, true);
645 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
646 are as in gimplify_expr. */
648 tree
649 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
650 bool allow_ssa)
652 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
655 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
656 generate debug info for them; otherwise don't. */
658 void
659 declare_vars (tree vars, gimple *gs, bool debug_info)
661 tree last = vars;
662 if (last)
664 tree temps, block;
666 gbind *scope = as_a <gbind *> (gs);
668 temps = nreverse (last);
670 block = gimple_bind_block (scope);
671 gcc_assert (!block || TREE_CODE (block) == BLOCK);
672 if (!block || !debug_info)
674 DECL_CHAIN (last) = gimple_bind_vars (scope);
675 gimple_bind_set_vars (scope, temps);
677 else
679 /* We need to attach the nodes both to the BIND_EXPR and to its
680 associated BLOCK for debugging purposes. The key point here
681 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
682 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
683 if (BLOCK_VARS (block))
684 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
685 else
687 gimple_bind_set_vars (scope,
688 chainon (gimple_bind_vars (scope), temps));
689 BLOCK_VARS (block) = temps;
695 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
696 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
697 no such upper bound can be obtained. */
699 static void
700 force_constant_size (tree var)
702 /* The only attempt we make is by querying the maximum size of objects
703 of the variable's type. */
705 HOST_WIDE_INT max_size;
707 gcc_assert (VAR_P (var));
709 max_size = max_int_size_in_bytes (TREE_TYPE (var));
711 gcc_assert (max_size >= 0);
713 DECL_SIZE_UNIT (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
715 DECL_SIZE (var)
716 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
719 /* Push the temporary variable TMP into the current binding. */
721 void
722 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
724 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
726 /* Later processing assumes that the object size is constant, which might
727 not be true at this point. Force the use of a constant upper bound in
728 this case. */
729 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
730 force_constant_size (tmp);
732 DECL_CONTEXT (tmp) = fn->decl;
733 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
735 record_vars_into (tmp, fn->decl);
738 /* Push the temporary variable TMP into the current binding. */
740 void
741 gimple_add_tmp_var (tree tmp)
743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
749 force_constant_size (tmp);
751 DECL_CONTEXT (tmp) = current_function_decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
754 if (gimplify_ctxp)
756 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
757 gimplify_ctxp->temps = tmp;
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
763 while (ctx
764 && (ctx->region_type == ORT_WORKSHARE
765 || ctx->region_type == ORT_TASKGROUP
766 || ctx->region_type == ORT_SIMD
767 || ctx->region_type == ORT_ACC))
768 ctx = ctx->outer_context;
769 if (ctx)
770 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
773 else if (cfun)
774 record_vars (tmp);
775 else
777 gimple_seq body_seq;
779 /* This case is for nested functions. We need to expose the locals
780 they create. */
781 body_seq = gimple_body (current_function_decl);
782 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
788 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
789 nodes that are referenced more than once in GENERIC functions. This is
790 necessary because gimplification (translation into GIMPLE) is performed
791 by modifying tree nodes in-place, so gimplication of a shared node in a
792 first context could generate an invalid GIMPLE form in a second context.
794 This is achieved with a simple mark/copy/unmark algorithm that walks the
795 GENERIC representation top-down, marks nodes with TREE_VISITED the first
796 time it encounters them, duplicates them if they already have TREE_VISITED
797 set, and finally removes the TREE_VISITED marks it has set.
799 The algorithm works only at the function level, i.e. it generates a GENERIC
800 representation of a function with no nodes shared within the function when
801 passed a GENERIC function (except for nodes that are allowed to be shared).
803 At the global level, it is also necessary to unshare tree nodes that are
804 referenced in more than one function, for the same aforementioned reason.
805 This requires some cooperation from the front-end. There are 2 strategies:
807 1. Manual unsharing. The front-end needs to call unshare_expr on every
808 expression that might end up being shared across functions.
810 2. Deep unsharing. This is an extension of regular unsharing. Instead
811 of calling unshare_expr on expressions that might be shared across
812 functions, the front-end pre-marks them with TREE_VISITED. This will
813 ensure that they are unshared on the first reference within functions
814 when the regular unsharing algorithm runs. The counterpart is that
815 this algorithm must look deeper than for manual unsharing, which is
816 specified by LANG_HOOKS_DEEP_UNSHARING.
818 If there are only few specific cases of node sharing across functions, it is
819 probably easier for a front-end to unshare the expressions manually. On the
820 contrary, if the expressions generated at the global level are as widespread
821 as expressions generated within functions, deep unsharing is very likely the
822 way to go. */
824 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
825 These nodes model computations that must be done once. If we were to
826 unshare something like SAVE_EXPR(i++), the gimplification process would
827 create wrong code. However, if DATA is non-null, it must hold a pointer
828 set that is used to unshare the subtrees of these nodes. */
830 static tree
831 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
833 tree t = *tp;
834 enum tree_code code = TREE_CODE (t);
836 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
837 copy their subtrees if we can make sure to do it only once. */
838 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
840 if (data && !((hash_set<tree> *)data)->add (t))
842 else
843 *walk_subtrees = 0;
846 /* Stop at types, decls, constants like copy_tree_r. */
847 else if (TREE_CODE_CLASS (code) == tcc_type
848 || TREE_CODE_CLASS (code) == tcc_declaration
849 || TREE_CODE_CLASS (code) == tcc_constant)
850 *walk_subtrees = 0;
852 /* Cope with the statement expression extension. */
853 else if (code == STATEMENT_LIST)
856 /* Leave the bulk of the work to copy_tree_r itself. */
857 else
858 copy_tree_r (tp, walk_subtrees, NULL);
860 return NULL_TREE;
863 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
864 If *TP has been visited already, then *TP is deeply copied by calling
865 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
867 static tree
868 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
870 tree t = *tp;
871 enum tree_code code = TREE_CODE (t);
873 /* Skip types, decls, and constants. But we do want to look at their
874 types and the bounds of types. Mark them as visited so we properly
875 unmark their subtrees on the unmark pass. If we've already seen them,
876 don't look down further. */
877 if (TREE_CODE_CLASS (code) == tcc_type
878 || TREE_CODE_CLASS (code) == tcc_declaration
879 || TREE_CODE_CLASS (code) == tcc_constant)
881 if (TREE_VISITED (t))
882 *walk_subtrees = 0;
883 else
884 TREE_VISITED (t) = 1;
887 /* If this node has been visited already, unshare it and don't look
888 any deeper. */
889 else if (TREE_VISITED (t))
891 walk_tree (tp, mostly_copy_tree_r, data, NULL);
892 *walk_subtrees = 0;
895 /* Otherwise, mark the node as visited and keep looking. */
896 else
897 TREE_VISITED (t) = 1;
899 return NULL_TREE;
902 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
903 copy_if_shared_r callback unmodified. */
905 static inline void
906 copy_if_shared (tree *tp, void *data)
908 walk_tree (tp, copy_if_shared_r, data, NULL);
911 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
912 any nested functions. */
914 static void
915 unshare_body (tree fndecl)
917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
918 /* If the language requires deep unsharing, we need a pointer set to make
919 sure we don't repeatedly unshare subtrees of unshareable nodes. */
920 hash_set<tree> *visited
921 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
923 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
924 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
925 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
927 delete visited;
929 if (cgn)
930 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
931 unshare_body (cgn->decl);
934 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
935 Subtrees are walked until the first unvisited node is encountered. */
937 static tree
938 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
940 tree t = *tp;
942 /* If this node has been visited, unmark it and keep looking. */
943 if (TREE_VISITED (t))
944 TREE_VISITED (t) = 0;
946 /* Otherwise, don't look any deeper. */
947 else
948 *walk_subtrees = 0;
950 return NULL_TREE;
953 /* Unmark the visited trees rooted at *TP. */
955 static inline void
956 unmark_visited (tree *tp)
958 walk_tree (tp, unmark_visited_r, NULL, NULL);
961 /* Likewise, but mark all trees as not visited. */
963 static void
964 unvisit_body (tree fndecl)
966 struct cgraph_node *cgn = cgraph_node::get (fndecl);
968 unmark_visited (&DECL_SAVED_TREE (fndecl));
969 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
970 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
972 if (cgn)
973 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
974 unvisit_body (cgn->decl);
977 /* Unconditionally make an unshared copy of EXPR. This is used when using
978 stored expressions which span multiple functions, such as BINFO_VTABLE,
979 as the normal unsharing process can't tell that they're shared. */
981 tree
982 unshare_expr (tree expr)
984 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
985 return expr;
988 /* Worker for unshare_expr_without_location. */
990 static tree
991 prune_expr_location (tree *tp, int *walk_subtrees, void *)
993 if (EXPR_P (*tp))
994 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
995 else
996 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Similar to unshare_expr but also prune all expression locations
1001 from EXPR. */
1003 tree
1004 unshare_expr_without_location (tree expr)
1006 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1007 if (EXPR_P (expr))
1008 walk_tree (&expr, prune_expr_location, NULL, NULL);
1009 return expr;
1012 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1013 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1014 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1015 EXPR is the location of the EXPR. */
1017 static location_t
1018 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1020 if (!expr)
1021 return or_else;
1023 if (EXPR_HAS_LOCATION (expr))
1024 return EXPR_LOCATION (expr);
1026 if (TREE_CODE (expr) != STATEMENT_LIST)
1027 return or_else;
1029 tree_stmt_iterator i = tsi_start (expr);
1031 bool found = false;
1032 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1034 found = true;
1035 tsi_next (&i);
1038 if (!found || !tsi_one_before_end_p (i))
1039 return or_else;
1041 return rexpr_location (tsi_stmt (i), or_else);
1044 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1045 rexpr_location for the potential recursion. */
1047 static inline bool
1048 rexpr_has_location (tree expr)
1050 return rexpr_location (expr) != UNKNOWN_LOCATION;
1054 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1055 contain statements and have a value. Assign its value to a temporary
1056 and give it void_type_node. Return the temporary, or NULL_TREE if
1057 WRAPPER was already void. */
1059 tree
1060 voidify_wrapper_expr (tree wrapper, tree temp)
1062 tree type = TREE_TYPE (wrapper);
1063 if (type && !VOID_TYPE_P (type))
1065 tree *p;
1067 /* Set p to point to the body of the wrapper. Loop until we find
1068 something that isn't a wrapper. */
1069 for (p = &wrapper; p && *p; )
1071 switch (TREE_CODE (*p))
1073 case BIND_EXPR:
1074 TREE_SIDE_EFFECTS (*p) = 1;
1075 TREE_TYPE (*p) = void_type_node;
1076 /* For a BIND_EXPR, the body is operand 1. */
1077 p = &BIND_EXPR_BODY (*p);
1078 break;
1080 case CLEANUP_POINT_EXPR:
1081 case TRY_FINALLY_EXPR:
1082 case TRY_CATCH_EXPR:
1083 TREE_SIDE_EFFECTS (*p) = 1;
1084 TREE_TYPE (*p) = void_type_node;
1085 p = &TREE_OPERAND (*p, 0);
1086 break;
1088 case STATEMENT_LIST:
1090 tree_stmt_iterator i = tsi_last (*p);
1091 TREE_SIDE_EFFECTS (*p) = 1;
1092 TREE_TYPE (*p) = void_type_node;
1093 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1095 break;
1097 case COMPOUND_EXPR:
1098 /* Advance to the last statement. Set all container types to
1099 void. */
1100 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1102 TREE_SIDE_EFFECTS (*p) = 1;
1103 TREE_TYPE (*p) = void_type_node;
1105 break;
1107 case TRANSACTION_EXPR:
1108 TREE_SIDE_EFFECTS (*p) = 1;
1109 TREE_TYPE (*p) = void_type_node;
1110 p = &TRANSACTION_EXPR_BODY (*p);
1111 break;
1113 default:
1114 /* Assume that any tree upon which voidify_wrapper_expr is
1115 directly called is a wrapper, and that its body is op0. */
1116 if (p == &wrapper)
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1123 goto out;
1127 out:
1128 if (p == NULL || IS_EMPTY_STMT (*p))
1129 temp = NULL_TREE;
1130 else if (temp)
1132 /* The wrapper is on the RHS of an assignment that we're pushing
1133 down. */
1134 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1135 || TREE_CODE (temp) == MODIFY_EXPR);
1136 TREE_OPERAND (temp, 1) = *p;
1137 *p = temp;
1139 else
1141 temp = create_tmp_var (type, "retval");
1142 *p = build2 (INIT_EXPR, type, temp, *p);
1145 return temp;
1148 return NULL_TREE;
1151 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1152 a temporary through which they communicate. */
1154 static void
1155 build_stack_save_restore (gcall **save, gcall **restore)
1157 tree tmp_var;
1159 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1160 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1161 gimple_call_set_lhs (*save, tmp_var);
1163 *restore
1164 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1165 1, tmp_var);
1168 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1170 static tree
1171 build_asan_poison_call_expr (tree decl)
1173 /* Do not poison variables that have size equal to zero. */
1174 tree unit_size = DECL_SIZE_UNIT (decl);
1175 if (zerop (unit_size))
1176 return NULL_TREE;
1178 tree base = build_fold_addr_expr (decl);
1180 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1181 void_type_node, 3,
1182 build_int_cst (integer_type_node,
1183 ASAN_MARK_POISON),
1184 base, unit_size);
1187 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1188 on POISON flag, shadow memory of a DECL variable. The call will be
1189 put on location identified by IT iterator, where BEFORE flag drives
1190 position where the stmt will be put. */
1192 static void
1193 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1194 bool before)
1196 tree unit_size = DECL_SIZE_UNIT (decl);
1197 tree base = build_fold_addr_expr (decl);
1199 /* Do not poison variables that have size equal to zero. */
1200 if (zerop (unit_size))
1201 return;
1203 /* It's necessary to have all stack variables aligned to ASAN granularity
1204 bytes. */
1205 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1206 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1208 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1210 gimple *g
1211 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1212 build_int_cst (integer_type_node, flags),
1213 base, unit_size);
1215 if (before)
1216 gsi_insert_before (it, g, GSI_NEW_STMT);
1217 else
1218 gsi_insert_after (it, g, GSI_NEW_STMT);
1221 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1222 either poisons or unpoisons a DECL. Created statement is appended
1223 to SEQ_P gimple sequence. */
1225 static void
1226 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1228 gimple_stmt_iterator it = gsi_last (*seq_p);
1229 bool before = false;
1231 if (gsi_end_p (it))
1232 before = true;
1234 asan_poison_variable (decl, poison, &it, before);
1237 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1239 static int
1240 sort_by_decl_uid (const void *a, const void *b)
1242 const tree *t1 = (const tree *)a;
1243 const tree *t2 = (const tree *)b;
1245 int uid1 = DECL_UID (*t1);
1246 int uid2 = DECL_UID (*t2);
1248 if (uid1 < uid2)
1249 return -1;
1250 else if (uid1 > uid2)
1251 return 1;
1252 else
1253 return 0;
1256 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1257 depending on POISON flag. Created statement is appended
1258 to SEQ_P gimple sequence. */
1260 static void
1261 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1263 unsigned c = variables->elements ();
1264 if (c == 0)
1265 return;
1267 auto_vec<tree> sorted_variables (c);
1269 for (hash_set<tree>::iterator it = variables->begin ();
1270 it != variables->end (); ++it)
1271 sorted_variables.safe_push (*it);
1273 sorted_variables.qsort (sort_by_decl_uid);
1275 unsigned i;
1276 tree var;
1277 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1279 asan_poison_variable (var, poison, seq_p);
1281 /* Add use_after_scope_memory attribute for the variable in order
1282 to prevent re-written into SSA. */
1283 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1284 DECL_ATTRIBUTES (var)))
1285 DECL_ATTRIBUTES (var)
1286 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1287 integer_one_node,
1288 DECL_ATTRIBUTES (var));
1292 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1294 static enum gimplify_status
1295 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1297 tree bind_expr = *expr_p;
1298 bool old_keep_stack = gimplify_ctxp->keep_stack;
1299 bool old_save_stack = gimplify_ctxp->save_stack;
1300 tree t;
1301 gbind *bind_stmt;
1302 gimple_seq body, cleanup;
1303 gcall *stack_save;
1304 location_t start_locus = 0, end_locus = 0;
1305 tree ret_clauses = NULL;
1307 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1309 /* Mark variables seen in this bind expr. */
1310 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1312 if (VAR_P (t))
1314 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1316 /* Mark variable as local. */
1317 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1318 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1319 || splay_tree_lookup (ctx->variables,
1320 (splay_tree_key) t) == NULL))
1322 if (ctx->region_type == ORT_SIMD
1323 && TREE_ADDRESSABLE (t)
1324 && !TREE_STATIC (t))
1325 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1326 else
1327 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1330 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1332 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1333 cfun->has_local_explicit_reg_vars = true;
1336 /* Preliminarily mark non-addressed complex variables as eligible
1337 for promotion to gimple registers. We'll transform their uses
1338 as we find them. */
1339 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1340 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1341 && !TREE_THIS_VOLATILE (t)
1342 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1343 && !needs_to_live_in_memory (t))
1344 DECL_GIMPLE_REG_P (t) = 1;
1347 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1348 BIND_EXPR_BLOCK (bind_expr));
1349 gimple_push_bind_expr (bind_stmt);
1351 gimplify_ctxp->keep_stack = false;
1352 gimplify_ctxp->save_stack = false;
1354 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1355 body = NULL;
1356 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1357 gimple_bind_set_body (bind_stmt, body);
1359 /* Source location wise, the cleanup code (stack_restore and clobbers)
1360 belongs to the end of the block, so propagate what we have. The
1361 stack_save operation belongs to the beginning of block, which we can
1362 infer from the bind_expr directly if the block has no explicit
1363 assignment. */
1364 if (BIND_EXPR_BLOCK (bind_expr))
1366 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1367 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1369 if (start_locus == 0)
1370 start_locus = EXPR_LOCATION (bind_expr);
1372 cleanup = NULL;
1373 stack_save = NULL;
1375 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1376 the stack space allocated to the VLAs. */
1377 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1379 gcall *stack_restore;
1381 /* Save stack on entry and restore it on exit. Add a try_finally
1382 block to achieve this. */
1383 build_stack_save_restore (&stack_save, &stack_restore);
1385 gimple_set_location (stack_save, start_locus);
1386 gimple_set_location (stack_restore, end_locus);
1388 gimplify_seq_add_stmt (&cleanup, stack_restore);
1391 /* Add clobbers for all variables that go out of scope. */
1392 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1394 if (VAR_P (t)
1395 && !is_global_var (t)
1396 && DECL_CONTEXT (t) == current_function_decl)
1398 if (!DECL_HARD_REGISTER (t)
1399 && !TREE_THIS_VOLATILE (t)
1400 && !DECL_HAS_VALUE_EXPR_P (t)
1401 /* Only care for variables that have to be in memory. Others
1402 will be rewritten into SSA names, hence moved to the
1403 top-level. */
1404 && !is_gimple_reg (t)
1405 && flag_stack_reuse != SR_NONE)
1407 tree clobber = build_clobber (TREE_TYPE (t));
1408 gimple *clobber_stmt;
1409 clobber_stmt = gimple_build_assign (t, clobber);
1410 gimple_set_location (clobber_stmt, end_locus);
1411 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1414 if (flag_openacc && oacc_declare_returns != NULL)
1416 tree *c = oacc_declare_returns->get (t);
1417 if (c != NULL)
1419 if (ret_clauses)
1420 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1422 ret_clauses = *c;
1424 oacc_declare_returns->remove (t);
1426 if (oacc_declare_returns->elements () == 0)
1428 delete oacc_declare_returns;
1429 oacc_declare_returns = NULL;
1435 if (asan_poisoned_variables != NULL
1436 && asan_poisoned_variables->contains (t))
1438 asan_poisoned_variables->remove (t);
1439 asan_poison_variable (t, true, &cleanup);
1442 if (gimplify_ctxp->live_switch_vars != NULL
1443 && gimplify_ctxp->live_switch_vars->contains (t))
1444 gimplify_ctxp->live_switch_vars->remove (t);
1447 if (ret_clauses)
1449 gomp_target *stmt;
1450 gimple_stmt_iterator si = gsi_start (cleanup);
1452 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1453 ret_clauses);
1454 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1457 if (cleanup)
1459 gtry *gs;
1460 gimple_seq new_body;
1462 new_body = NULL;
1463 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1464 GIMPLE_TRY_FINALLY);
1466 if (stack_save)
1467 gimplify_seq_add_stmt (&new_body, stack_save);
1468 gimplify_seq_add_stmt (&new_body, gs);
1469 gimple_bind_set_body (bind_stmt, new_body);
1472 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1473 if (!gimplify_ctxp->keep_stack)
1474 gimplify_ctxp->keep_stack = old_keep_stack;
1475 gimplify_ctxp->save_stack = old_save_stack;
1477 gimple_pop_bind_expr ();
1479 gimplify_seq_add_stmt (pre_p, bind_stmt);
1481 if (temp)
1483 *expr_p = temp;
1484 return GS_OK;
1487 *expr_p = NULL_TREE;
1488 return GS_ALL_DONE;
1491 /* Maybe add early return predict statement to PRE_P sequence. */
1493 static void
1494 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1496 /* If we are not in a conditional context, add PREDICT statement. */
1497 if (gimple_conditional_context ())
1499 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1500 NOT_TAKEN);
1501 gimplify_seq_add_stmt (pre_p, predict);
1505 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1506 GIMPLE value, it is assigned to a new temporary and the statement is
1507 re-written to return the temporary.
1509 PRE_P points to the sequence where side effects that must happen before
1510 STMT should be stored. */
1512 static enum gimplify_status
1513 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1515 greturn *ret;
1516 tree ret_expr = TREE_OPERAND (stmt, 0);
1517 tree result_decl, result;
1519 if (ret_expr == error_mark_node)
1520 return GS_ERROR;
1522 if (!ret_expr
1523 || TREE_CODE (ret_expr) == RESULT_DECL)
1525 maybe_add_early_return_predict_stmt (pre_p);
1526 greturn *ret = gimple_build_return (ret_expr);
1527 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1528 gimplify_seq_add_stmt (pre_p, ret);
1529 return GS_ALL_DONE;
1532 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1533 result_decl = NULL_TREE;
1534 else
1536 result_decl = TREE_OPERAND (ret_expr, 0);
1538 /* See through a return by reference. */
1539 if (TREE_CODE (result_decl) == INDIRECT_REF)
1540 result_decl = TREE_OPERAND (result_decl, 0);
1542 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1543 || TREE_CODE (ret_expr) == INIT_EXPR)
1544 && TREE_CODE (result_decl) == RESULT_DECL);
1547 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1548 Recall that aggregate_value_p is FALSE for any aggregate type that is
1549 returned in registers. If we're returning values in registers, then
1550 we don't want to extend the lifetime of the RESULT_DECL, particularly
1551 across another call. In addition, for those aggregates for which
1552 hard_function_value generates a PARALLEL, we'll die during normal
1553 expansion of structure assignments; there's special code in expand_return
1554 to handle this case that does not exist in expand_expr. */
1555 if (!result_decl)
1556 result = NULL_TREE;
1557 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1559 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1561 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1562 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1563 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1564 should be effectively allocated by the caller, i.e. all calls to
1565 this function must be subject to the Return Slot Optimization. */
1566 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1567 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1569 result = result_decl;
1571 else if (gimplify_ctxp->return_temp)
1572 result = gimplify_ctxp->return_temp;
1573 else
1575 result = create_tmp_reg (TREE_TYPE (result_decl));
1577 /* ??? With complex control flow (usually involving abnormal edges),
1578 we can wind up warning about an uninitialized value for this. Due
1579 to how this variable is constructed and initialized, this is never
1580 true. Give up and never warn. */
1581 TREE_NO_WARNING (result) = 1;
1583 gimplify_ctxp->return_temp = result;
1586 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1587 Then gimplify the whole thing. */
1588 if (result != result_decl)
1589 TREE_OPERAND (ret_expr, 0) = result;
1591 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 ret = gimple_build_return (result);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1598 return GS_ALL_DONE;
1601 /* Gimplify a variable-length array DECL. */
1603 static void
1604 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1606 /* This is a variable-sized decl. Simplify its size and mark it
1607 for deferred expansion. */
1608 tree t, addr, ptr_type;
1610 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1611 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1613 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1614 if (DECL_HAS_VALUE_EXPR_P (decl))
1615 return;
1617 /* All occurrences of this decl in final gimplified code will be
1618 replaced by indirection. Setting DECL_VALUE_EXPR does two
1619 things: First, it lets the rest of the gimplifier know what
1620 replacement to use. Second, it lets the debug info know
1621 where to find the value. */
1622 ptr_type = build_pointer_type (TREE_TYPE (decl));
1623 addr = create_tmp_var (ptr_type, get_name (decl));
1624 DECL_IGNORED_P (addr) = 0;
1625 t = build_fold_indirect_ref (addr);
1626 TREE_THIS_NOTRAP (t) = 1;
1627 SET_DECL_VALUE_EXPR (decl, t);
1628 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1630 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1631 max_int_size_in_bytes (TREE_TYPE (decl)));
1632 /* The call has been built for a variable-sized object. */
1633 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1634 t = fold_convert (ptr_type, t);
1635 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1637 gimplify_and_add (t, seq_p);
1640 /* A helper function to be called via walk_tree. Mark all labels under *TP
1641 as being forced. To be called for DECL_INITIAL of static variables. */
1643 static tree
1644 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1646 if (TYPE_P (*tp))
1647 *walk_subtrees = 0;
1648 if (TREE_CODE (*tp) == LABEL_DECL)
1650 FORCED_LABEL (*tp) = 1;
1651 cfun->has_forced_label_in_static = 1;
1654 return NULL_TREE;
1657 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1658 and initialization explicit. */
1660 static enum gimplify_status
1661 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1663 tree stmt = *stmt_p;
1664 tree decl = DECL_EXPR_DECL (stmt);
1666 *stmt_p = NULL_TREE;
1668 if (TREE_TYPE (decl) == error_mark_node)
1669 return GS_ERROR;
1671 if ((TREE_CODE (decl) == TYPE_DECL
1672 || VAR_P (decl))
1673 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1675 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1676 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1677 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1680 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1681 in case its size expressions contain problematic nodes like CALL_EXPR. */
1682 if (TREE_CODE (decl) == TYPE_DECL
1683 && DECL_ORIGINAL_TYPE (decl)
1684 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1686 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1687 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1688 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1691 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1693 tree init = DECL_INITIAL (decl);
1694 bool is_vla = false;
1696 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1697 || (!TREE_STATIC (decl)
1698 && flag_stack_check == GENERIC_STACK_CHECK
1699 && compare_tree_int (DECL_SIZE_UNIT (decl),
1700 STACK_CHECK_MAX_VAR_SIZE) > 0))
1702 gimplify_vla_decl (decl, seq_p);
1703 is_vla = true;
1706 if (asan_poisoned_variables
1707 && !is_vla
1708 && TREE_ADDRESSABLE (decl)
1709 && !TREE_STATIC (decl)
1710 && !DECL_HAS_VALUE_EXPR_P (decl)
1711 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1712 && dbg_cnt (asan_use_after_scope)
1713 && !gimplify_omp_ctxp)
1715 asan_poisoned_variables->add (decl);
1716 asan_poison_variable (decl, false, seq_p);
1717 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1718 gimplify_ctxp->live_switch_vars->add (decl);
1721 /* Some front ends do not explicitly declare all anonymous
1722 artificial variables. We compensate here by declaring the
1723 variables, though it would be better if the front ends would
1724 explicitly declare them. */
1725 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1726 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1727 gimple_add_tmp_var (decl);
1729 if (init && init != error_mark_node)
1731 if (!TREE_STATIC (decl))
1733 DECL_INITIAL (decl) = NULL_TREE;
1734 init = build2 (INIT_EXPR, void_type_node, decl, init);
1735 gimplify_and_add (init, seq_p);
1736 ggc_free (init);
1738 else
1739 /* We must still examine initializers for static variables
1740 as they may contain a label address. */
1741 walk_tree (&init, force_labels_r, NULL, NULL);
1745 return GS_ALL_DONE;
1748 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1749 and replacing the LOOP_EXPR with goto, but if the loop contains an
1750 EXIT_EXPR, we need to append a label for it to jump to. */
1752 static enum gimplify_status
1753 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1755 tree saved_label = gimplify_ctxp->exit_label;
1756 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1758 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1760 gimplify_ctxp->exit_label = NULL_TREE;
1762 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1764 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1766 if (gimplify_ctxp->exit_label)
1767 gimplify_seq_add_stmt (pre_p,
1768 gimple_build_label (gimplify_ctxp->exit_label));
1770 gimplify_ctxp->exit_label = saved_label;
1772 *expr_p = NULL;
1773 return GS_ALL_DONE;
1776 /* Gimplify a statement list onto a sequence. These may be created either
1777 by an enlightened front-end, or by shortcut_cond_expr. */
1779 static enum gimplify_status
1780 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1782 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1784 tree_stmt_iterator i = tsi_start (*expr_p);
1786 while (!tsi_end_p (i))
1788 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1789 tsi_delink (&i);
1792 if (temp)
1794 *expr_p = temp;
1795 return GS_OK;
1798 return GS_ALL_DONE;
1801 /* Callback for walk_gimple_seq. */
1803 static tree
1804 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1805 struct walk_stmt_info *wi)
1807 gimple *stmt = gsi_stmt (*gsi_p);
1809 *handled_ops_p = true;
1810 switch (gimple_code (stmt))
1812 case GIMPLE_TRY:
1813 /* A compiler-generated cleanup or a user-written try block.
1814 If it's empty, don't dive into it--that would result in
1815 worse location info. */
1816 if (gimple_try_eval (stmt) == NULL)
1818 wi->info = stmt;
1819 return integer_zero_node;
1821 /* Fall through. */
1822 case GIMPLE_BIND:
1823 case GIMPLE_CATCH:
1824 case GIMPLE_EH_FILTER:
1825 case GIMPLE_TRANSACTION:
1826 /* Walk the sub-statements. */
1827 *handled_ops_p = false;
1828 break;
1830 case GIMPLE_DEBUG:
1831 /* Ignore these. We may generate them before declarations that
1832 are never executed. If there's something to warn about,
1833 there will be non-debug stmts too, and we'll catch those. */
1834 break;
1836 case GIMPLE_CALL:
1837 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1839 *handled_ops_p = false;
1840 break;
1842 /* Fall through. */
1843 default:
1844 /* Save the first "real" statement (not a decl/lexical scope/...). */
1845 wi->info = stmt;
1846 return integer_zero_node;
1848 return NULL_TREE;
1851 /* Possibly warn about unreachable statements between switch's controlling
1852 expression and the first case. SEQ is the body of a switch expression. */
1854 static void
1855 maybe_warn_switch_unreachable (gimple_seq seq)
1857 if (!warn_switch_unreachable
1858 /* This warning doesn't play well with Fortran when optimizations
1859 are on. */
1860 || lang_GNU_Fortran ()
1861 || seq == NULL)
1862 return;
1864 struct walk_stmt_info wi;
1865 memset (&wi, 0, sizeof (wi));
1866 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1867 gimple *stmt = (gimple *) wi.info;
1869 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1871 if (gimple_code (stmt) == GIMPLE_GOTO
1872 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1873 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1874 /* Don't warn for compiler-generated gotos. These occur
1875 in Duff's devices, for example. */;
1876 else
1877 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1878 "statement will never be executed");
1883 /* A label entry that pairs label and a location. */
1884 struct label_entry
1886 tree label;
1887 location_t loc;
1890 /* Find LABEL in vector of label entries VEC. */
1892 static struct label_entry *
1893 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1895 unsigned int i;
1896 struct label_entry *l;
1898 FOR_EACH_VEC_ELT (*vec, i, l)
1899 if (l->label == label)
1900 return l;
1901 return NULL;
1904 /* Return true if LABEL, a LABEL_DECL, represents a case label
1905 in a vector of labels CASES. */
1907 static bool
1908 case_label_p (const vec<tree> *cases, tree label)
1910 unsigned int i;
1911 tree l;
1913 FOR_EACH_VEC_ELT (*cases, i, l)
1914 if (CASE_LABEL (l) == label)
1915 return true;
1916 return false;
1919 /* Find the last nondebug statement in a scope STMT. */
1921 static gimple *
1922 last_stmt_in_scope (gimple *stmt)
1924 if (!stmt)
1925 return NULL;
1927 switch (gimple_code (stmt))
1929 case GIMPLE_BIND:
1931 gbind *bind = as_a <gbind *> (stmt);
1932 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1933 return last_stmt_in_scope (stmt);
1936 case GIMPLE_TRY:
1938 gtry *try_stmt = as_a <gtry *> (stmt);
1939 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
1940 gimple *last_eval = last_stmt_in_scope (stmt);
1941 if (gimple_stmt_may_fallthru (last_eval)
1942 && (last_eval == NULL
1943 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1944 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1946 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
1947 return last_stmt_in_scope (stmt);
1949 else
1950 return last_eval;
1953 case GIMPLE_DEBUG:
1954 gcc_unreachable ();
1956 default:
1957 return stmt;
1961 /* Collect interesting labels in LABELS and return the statement preceding
1962 another case label, or a user-defined label. Store a location useful
1963 to give warnings at *PREVLOC (usually the location of the returned
1964 statement or of its surrounding scope). */
1966 static gimple *
1967 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1968 auto_vec <struct label_entry> *labels,
1969 location_t *prevloc)
1971 gimple *prev = NULL;
1973 *prevloc = UNKNOWN_LOCATION;
1976 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1978 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1979 which starts on a GIMPLE_SWITCH and ends with a break label.
1980 Handle that as a single statement that can fall through. */
1981 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1982 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1983 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1984 if (last
1985 && gimple_code (first) == GIMPLE_SWITCH
1986 && gimple_code (last) == GIMPLE_LABEL)
1988 tree label = gimple_label_label (as_a <glabel *> (last));
1989 if (SWITCH_BREAK_LABEL_P (label))
1991 prev = bind;
1992 gsi_next (gsi_p);
1993 continue;
1997 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1998 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2000 /* Nested scope. Only look at the last statement of
2001 the innermost scope. */
2002 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2003 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2004 if (last)
2006 prev = last;
2007 /* It might be a label without a location. Use the
2008 location of the scope then. */
2009 if (!gimple_has_location (prev))
2010 *prevloc = bind_loc;
2012 gsi_next (gsi_p);
2013 continue;
2016 /* Ifs are tricky. */
2017 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2019 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2020 tree false_lab = gimple_cond_false_label (cond_stmt);
2021 location_t if_loc = gimple_location (cond_stmt);
2023 /* If we have e.g.
2024 if (i > 1) goto <D.2259>; else goto D;
2025 we can't do much with the else-branch. */
2026 if (!DECL_ARTIFICIAL (false_lab))
2027 break;
2029 /* Go on until the false label, then one step back. */
2030 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2032 gimple *stmt = gsi_stmt (*gsi_p);
2033 if (gimple_code (stmt) == GIMPLE_LABEL
2034 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2035 break;
2038 /* Not found? Oops. */
2039 if (gsi_end_p (*gsi_p))
2040 break;
2042 struct label_entry l = { false_lab, if_loc };
2043 labels->safe_push (l);
2045 /* Go to the last statement of the then branch. */
2046 gsi_prev (gsi_p);
2048 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2049 <D.1759>:
2050 <stmt>;
2051 goto <D.1761>;
2052 <D.1760>:
2054 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2055 && !gimple_has_location (gsi_stmt (*gsi_p)))
2057 /* Look at the statement before, it might be
2058 attribute fallthrough, in which case don't warn. */
2059 gsi_prev (gsi_p);
2060 bool fallthru_before_dest
2061 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2062 gsi_next (gsi_p);
2063 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2064 if (!fallthru_before_dest)
2066 struct label_entry l = { goto_dest, if_loc };
2067 labels->safe_push (l);
2070 /* And move back. */
2071 gsi_next (gsi_p);
2074 /* Remember the last statement. Skip labels that are of no interest
2075 to us. */
2076 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2078 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2079 if (find_label_entry (labels, label))
2080 prev = gsi_stmt (*gsi_p);
2082 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2084 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2085 prev = gsi_stmt (*gsi_p);
2086 gsi_next (gsi_p);
2088 while (!gsi_end_p (*gsi_p)
2089 /* Stop if we find a case or a user-defined label. */
2090 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2091 || !gimple_has_location (gsi_stmt (*gsi_p))));
2093 if (prev && gimple_has_location (prev))
2094 *prevloc = gimple_location (prev);
2095 return prev;
2098 /* Return true if the switch fallthough warning should occur. LABEL is
2099 the label statement that we're falling through to. */
2101 static bool
2102 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2104 gimple_stmt_iterator gsi = *gsi_p;
2106 /* Don't warn if the label is marked with a "falls through" comment. */
2107 if (FALLTHROUGH_LABEL_P (label))
2108 return false;
2110 /* Don't warn for non-case labels followed by a statement:
2111 case 0:
2112 foo ();
2113 label:
2114 bar ();
2115 as these are likely intentional. */
2116 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2118 tree l;
2119 while (!gsi_end_p (gsi)
2120 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2121 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2122 && !case_label_p (&gimplify_ctxp->case_labels, l))
2123 gsi_next_nondebug (&gsi);
2124 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2125 return false;
2128 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2129 immediately breaks. */
2130 gsi = *gsi_p;
2132 /* Skip all immediately following labels. */
2133 while (!gsi_end_p (gsi)
2134 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2135 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2136 gsi_next_nondebug (&gsi);
2138 /* { ... something; default:; } */
2139 if (gsi_end_p (gsi)
2140 /* { ... something; default: break; } or
2141 { ... something; default: goto L; } */
2142 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2143 /* { ... something; default: return; } */
2144 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2145 return false;
2147 return true;
2150 /* Callback for walk_gimple_seq. */
2152 static tree
2153 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2154 struct walk_stmt_info *)
2156 gimple *stmt = gsi_stmt (*gsi_p);
2158 *handled_ops_p = true;
2159 switch (gimple_code (stmt))
2161 case GIMPLE_TRY:
2162 case GIMPLE_BIND:
2163 case GIMPLE_CATCH:
2164 case GIMPLE_EH_FILTER:
2165 case GIMPLE_TRANSACTION:
2166 /* Walk the sub-statements. */
2167 *handled_ops_p = false;
2168 break;
2170 /* Find a sequence of form:
2172 GIMPLE_LABEL
2173 [...]
2174 <may fallthru stmt>
2175 GIMPLE_LABEL
2177 and possibly warn. */
2178 case GIMPLE_LABEL:
2180 /* Found a label. Skip all immediately following labels. */
2181 while (!gsi_end_p (*gsi_p)
2182 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2183 gsi_next_nondebug (gsi_p);
2185 /* There might be no more statements. */
2186 if (gsi_end_p (*gsi_p))
2187 return integer_zero_node;
2189 /* Vector of labels that fall through. */
2190 auto_vec <struct label_entry> labels;
2191 location_t prevloc;
2192 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2194 /* There might be no more statements. */
2195 if (gsi_end_p (*gsi_p))
2196 return integer_zero_node;
2198 gimple *next = gsi_stmt (*gsi_p);
2199 tree label;
2200 /* If what follows is a label, then we may have a fallthrough. */
2201 if (gimple_code (next) == GIMPLE_LABEL
2202 && gimple_has_location (next)
2203 && (label = gimple_label_label (as_a <glabel *> (next)))
2204 && prev != NULL)
2206 struct label_entry *l;
2207 bool warned_p = false;
2208 auto_diagnostic_group d;
2209 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2210 /* Quiet. */;
2211 else if (gimple_code (prev) == GIMPLE_LABEL
2212 && (label = gimple_label_label (as_a <glabel *> (prev)))
2213 && (l = find_label_entry (&labels, label)))
2214 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2215 "this statement may fall through");
2216 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2217 /* Try to be clever and don't warn when the statement
2218 can't actually fall through. */
2219 && gimple_stmt_may_fallthru (prev)
2220 && prevloc != UNKNOWN_LOCATION)
2221 warned_p = warning_at (prevloc,
2222 OPT_Wimplicit_fallthrough_,
2223 "this statement may fall through");
2224 if (warned_p)
2225 inform (gimple_location (next), "here");
2227 /* Mark this label as processed so as to prevent multiple
2228 warnings in nested switches. */
2229 FALLTHROUGH_LABEL_P (label) = true;
2231 /* So that next warn_implicit_fallthrough_r will start looking for
2232 a new sequence starting with this label. */
2233 gsi_prev (gsi_p);
2236 break;
2237 default:
2238 break;
2240 return NULL_TREE;
2243 /* Warn when a switch case falls through. */
2245 static void
2246 maybe_warn_implicit_fallthrough (gimple_seq seq)
2248 if (!warn_implicit_fallthrough)
2249 return;
2251 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2252 if (!(lang_GNU_C ()
2253 || lang_GNU_CXX ()
2254 || lang_GNU_OBJC ()))
2255 return;
2257 struct walk_stmt_info wi;
2258 memset (&wi, 0, sizeof (wi));
2259 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2262 /* Callback for walk_gimple_seq. */
2264 static tree
2265 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2266 struct walk_stmt_info *)
2268 gimple *stmt = gsi_stmt (*gsi_p);
2270 *handled_ops_p = true;
2271 switch (gimple_code (stmt))
2273 case GIMPLE_TRY:
2274 case GIMPLE_BIND:
2275 case GIMPLE_CATCH:
2276 case GIMPLE_EH_FILTER:
2277 case GIMPLE_TRANSACTION:
2278 /* Walk the sub-statements. */
2279 *handled_ops_p = false;
2280 break;
2281 case GIMPLE_CALL:
2282 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2284 gsi_remove (gsi_p, true);
2285 if (gsi_end_p (*gsi_p))
2286 return integer_zero_node;
2288 bool found = false;
2289 location_t loc = gimple_location (stmt);
2291 gimple_stmt_iterator gsi2 = *gsi_p;
2292 stmt = gsi_stmt (gsi2);
2293 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2295 /* Go on until the artificial label. */
2296 tree goto_dest = gimple_goto_dest (stmt);
2297 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2299 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2300 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2301 == goto_dest)
2302 break;
2305 /* Not found? Stop. */
2306 if (gsi_end_p (gsi2))
2307 break;
2309 /* Look one past it. */
2310 gsi_next (&gsi2);
2313 /* We're looking for a case label or default label here. */
2314 while (!gsi_end_p (gsi2))
2316 stmt = gsi_stmt (gsi2);
2317 if (gimple_code (stmt) == GIMPLE_LABEL)
2319 tree label = gimple_label_label (as_a <glabel *> (stmt));
2320 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2322 found = true;
2323 break;
2326 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2328 else if (!is_gimple_debug (stmt))
2329 /* Anything else is not expected. */
2330 break;
2331 gsi_next (&gsi2);
2333 if (!found)
2334 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2335 "a case label or default label");
2337 break;
2338 default:
2339 break;
2341 return NULL_TREE;
2344 /* Expand all FALLTHROUGH () calls in SEQ. */
2346 static void
2347 expand_FALLTHROUGH (gimple_seq *seq_p)
2349 struct walk_stmt_info wi;
2350 memset (&wi, 0, sizeof (wi));
2351 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2355 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2356 branch to. */
2358 static enum gimplify_status
2359 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2361 tree switch_expr = *expr_p;
2362 gimple_seq switch_body_seq = NULL;
2363 enum gimplify_status ret;
2364 tree index_type = TREE_TYPE (switch_expr);
2365 if (index_type == NULL_TREE)
2366 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2368 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2369 fb_rvalue);
2370 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2371 return ret;
2373 if (SWITCH_BODY (switch_expr))
2375 vec<tree> labels;
2376 vec<tree> saved_labels;
2377 hash_set<tree> *saved_live_switch_vars = NULL;
2378 tree default_case = NULL_TREE;
2379 gswitch *switch_stmt;
2381 /* Save old labels, get new ones from body, then restore the old
2382 labels. Save all the things from the switch body to append after. */
2383 saved_labels = gimplify_ctxp->case_labels;
2384 gimplify_ctxp->case_labels.create (8);
2386 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2387 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2388 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2389 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2390 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2391 else
2392 gimplify_ctxp->live_switch_vars = NULL;
2394 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2395 gimplify_ctxp->in_switch_expr = true;
2397 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2399 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2400 maybe_warn_switch_unreachable (switch_body_seq);
2401 maybe_warn_implicit_fallthrough (switch_body_seq);
2402 /* Only do this for the outermost GIMPLE_SWITCH. */
2403 if (!gimplify_ctxp->in_switch_expr)
2404 expand_FALLTHROUGH (&switch_body_seq);
2406 labels = gimplify_ctxp->case_labels;
2407 gimplify_ctxp->case_labels = saved_labels;
2409 if (gimplify_ctxp->live_switch_vars)
2411 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2412 delete gimplify_ctxp->live_switch_vars;
2414 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2416 preprocess_case_label_vec_for_gimple (labels, index_type,
2417 &default_case);
2419 bool add_bind = false;
2420 if (!default_case)
2422 glabel *new_default;
2424 default_case
2425 = build_case_label (NULL_TREE, NULL_TREE,
2426 create_artificial_label (UNKNOWN_LOCATION));
2427 if (old_in_switch_expr)
2429 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2430 add_bind = true;
2432 new_default = gimple_build_label (CASE_LABEL (default_case));
2433 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2435 else if (old_in_switch_expr)
2437 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2438 if (last && gimple_code (last) == GIMPLE_LABEL)
2440 tree label = gimple_label_label (as_a <glabel *> (last));
2441 if (SWITCH_BREAK_LABEL_P (label))
2442 add_bind = true;
2446 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2447 default_case, labels);
2448 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2449 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2450 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2451 so that we can easily find the start and end of the switch
2452 statement. */
2453 if (add_bind)
2455 gimple_seq bind_body = NULL;
2456 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2457 gimple_seq_add_seq (&bind_body, switch_body_seq);
2458 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2459 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2460 gimplify_seq_add_stmt (pre_p, bind);
2462 else
2464 gimplify_seq_add_stmt (pre_p, switch_stmt);
2465 gimplify_seq_add_seq (pre_p, switch_body_seq);
2467 labels.release ();
2469 else
2470 gcc_unreachable ();
2472 return GS_ALL_DONE;
2475 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2477 static enum gimplify_status
2478 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2480 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2481 == current_function_decl);
2483 tree label = LABEL_EXPR_LABEL (*expr_p);
2484 glabel *label_stmt = gimple_build_label (label);
2485 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2486 gimplify_seq_add_stmt (pre_p, label_stmt);
2488 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2489 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2490 NOT_TAKEN));
2491 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2492 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2493 TAKEN));
2495 return GS_ALL_DONE;
2498 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2500 static enum gimplify_status
2501 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2503 struct gimplify_ctx *ctxp;
2504 glabel *label_stmt;
2506 /* Invalid programs can play Duff's Device type games with, for example,
2507 #pragma omp parallel. At least in the C front end, we don't
2508 detect such invalid branches until after gimplification, in the
2509 diagnose_omp_blocks pass. */
2510 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2511 if (ctxp->case_labels.exists ())
2512 break;
2514 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2515 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2516 ctxp->case_labels.safe_push (*expr_p);
2517 gimplify_seq_add_stmt (pre_p, label_stmt);
2519 return GS_ALL_DONE;
2522 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2523 if necessary. */
2525 tree
2526 build_and_jump (tree *label_p)
2528 if (label_p == NULL)
2529 /* If there's nowhere to jump, just fall through. */
2530 return NULL_TREE;
2532 if (*label_p == NULL_TREE)
2534 tree label = create_artificial_label (UNKNOWN_LOCATION);
2535 *label_p = label;
2538 return build1 (GOTO_EXPR, void_type_node, *label_p);
2541 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2542 This also involves building a label to jump to and communicating it to
2543 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2545 static enum gimplify_status
2546 gimplify_exit_expr (tree *expr_p)
2548 tree cond = TREE_OPERAND (*expr_p, 0);
2549 tree expr;
2551 expr = build_and_jump (&gimplify_ctxp->exit_label);
2552 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2553 *expr_p = expr;
2555 return GS_OK;
2558 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2559 different from its canonical type, wrap the whole thing inside a
2560 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2561 type.
2563 The canonical type of a COMPONENT_REF is the type of the field being
2564 referenced--unless the field is a bit-field which can be read directly
2565 in a smaller mode, in which case the canonical type is the
2566 sign-appropriate type corresponding to that mode. */
2568 static void
2569 canonicalize_component_ref (tree *expr_p)
2571 tree expr = *expr_p;
2572 tree type;
2574 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2576 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2577 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2578 else
2579 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2581 /* One could argue that all the stuff below is not necessary for
2582 the non-bitfield case and declare it a FE error if type
2583 adjustment would be needed. */
2584 if (TREE_TYPE (expr) != type)
2586 #ifdef ENABLE_TYPES_CHECKING
2587 tree old_type = TREE_TYPE (expr);
2588 #endif
2589 int type_quals;
2591 /* We need to preserve qualifiers and propagate them from
2592 operand 0. */
2593 type_quals = TYPE_QUALS (type)
2594 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2595 if (TYPE_QUALS (type) != type_quals)
2596 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2598 /* Set the type of the COMPONENT_REF to the underlying type. */
2599 TREE_TYPE (expr) = type;
2601 #ifdef ENABLE_TYPES_CHECKING
2602 /* It is now a FE error, if the conversion from the canonical
2603 type to the original expression type is not useless. */
2604 gcc_assert (useless_type_conversion_p (old_type, type));
2605 #endif
2609 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2610 to foo, embed that change in the ADDR_EXPR by converting
2611 T array[U];
2612 (T *)&array
2614 &array[L]
2615 where L is the lower bound. For simplicity, only do this for constant
2616 lower bound.
2617 The constraint is that the type of &array[L] is trivially convertible
2618 to T *. */
2620 static void
2621 canonicalize_addr_expr (tree *expr_p)
2623 tree expr = *expr_p;
2624 tree addr_expr = TREE_OPERAND (expr, 0);
2625 tree datype, ddatype, pddatype;
2627 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2628 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2629 || TREE_CODE (addr_expr) != ADDR_EXPR)
2630 return;
2632 /* The addr_expr type should be a pointer to an array. */
2633 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2634 if (TREE_CODE (datype) != ARRAY_TYPE)
2635 return;
2637 /* The pointer to element type shall be trivially convertible to
2638 the expression pointer type. */
2639 ddatype = TREE_TYPE (datype);
2640 pddatype = build_pointer_type (ddatype);
2641 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2642 pddatype))
2643 return;
2645 /* The lower bound and element sizes must be constant. */
2646 if (!TYPE_SIZE_UNIT (ddatype)
2647 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2648 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2649 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2650 return;
2652 /* All checks succeeded. Build a new node to merge the cast. */
2653 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2654 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2655 NULL_TREE, NULL_TREE);
2656 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2658 /* We can have stripped a required restrict qualifier above. */
2659 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2660 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2663 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2664 underneath as appropriate. */
2666 static enum gimplify_status
2667 gimplify_conversion (tree *expr_p)
2669 location_t loc = EXPR_LOCATION (*expr_p);
2670 gcc_assert (CONVERT_EXPR_P (*expr_p));
2672 /* Then strip away all but the outermost conversion. */
2673 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2675 /* And remove the outermost conversion if it's useless. */
2676 if (tree_ssa_useless_type_conversion (*expr_p))
2677 *expr_p = TREE_OPERAND (*expr_p, 0);
2679 /* If we still have a conversion at the toplevel,
2680 then canonicalize some constructs. */
2681 if (CONVERT_EXPR_P (*expr_p))
2683 tree sub = TREE_OPERAND (*expr_p, 0);
2685 /* If a NOP conversion is changing the type of a COMPONENT_REF
2686 expression, then canonicalize its type now in order to expose more
2687 redundant conversions. */
2688 if (TREE_CODE (sub) == COMPONENT_REF)
2689 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2691 /* If a NOP conversion is changing a pointer to array of foo
2692 to a pointer to foo, embed that change in the ADDR_EXPR. */
2693 else if (TREE_CODE (sub) == ADDR_EXPR)
2694 canonicalize_addr_expr (expr_p);
2697 /* If we have a conversion to a non-register type force the
2698 use of a VIEW_CONVERT_EXPR instead. */
2699 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2700 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2701 TREE_OPERAND (*expr_p, 0));
2703 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2704 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2705 TREE_SET_CODE (*expr_p, NOP_EXPR);
2707 return GS_OK;
2710 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2711 DECL_VALUE_EXPR, and it's worth re-examining things. */
2713 static enum gimplify_status
2714 gimplify_var_or_parm_decl (tree *expr_p)
2716 tree decl = *expr_p;
2718 /* ??? If this is a local variable, and it has not been seen in any
2719 outer BIND_EXPR, then it's probably the result of a duplicate
2720 declaration, for which we've already issued an error. It would
2721 be really nice if the front end wouldn't leak these at all.
2722 Currently the only known culprit is C++ destructors, as seen
2723 in g++.old-deja/g++.jason/binding.C. */
2724 if (VAR_P (decl)
2725 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2726 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2727 && decl_function_context (decl) == current_function_decl)
2729 gcc_assert (seen_error ());
2730 return GS_ERROR;
2733 /* When within an OMP context, notice uses of variables. */
2734 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2735 return GS_ALL_DONE;
2737 /* If the decl is an alias for another expression, substitute it now. */
2738 if (DECL_HAS_VALUE_EXPR_P (decl))
2740 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2741 return GS_OK;
2744 return GS_ALL_DONE;
2747 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2749 static void
2750 recalculate_side_effects (tree t)
2752 enum tree_code code = TREE_CODE (t);
2753 int len = TREE_OPERAND_LENGTH (t);
2754 int i;
2756 switch (TREE_CODE_CLASS (code))
2758 case tcc_expression:
2759 switch (code)
2761 case INIT_EXPR:
2762 case MODIFY_EXPR:
2763 case VA_ARG_EXPR:
2764 case PREDECREMENT_EXPR:
2765 case PREINCREMENT_EXPR:
2766 case POSTDECREMENT_EXPR:
2767 case POSTINCREMENT_EXPR:
2768 /* All of these have side-effects, no matter what their
2769 operands are. */
2770 return;
2772 default:
2773 break;
2775 /* Fall through. */
2777 case tcc_comparison: /* a comparison expression */
2778 case tcc_unary: /* a unary arithmetic expression */
2779 case tcc_binary: /* a binary arithmetic expression */
2780 case tcc_reference: /* a reference */
2781 case tcc_vl_exp: /* a function call */
2782 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2783 for (i = 0; i < len; ++i)
2785 tree op = TREE_OPERAND (t, i);
2786 if (op && TREE_SIDE_EFFECTS (op))
2787 TREE_SIDE_EFFECTS (t) = 1;
2789 break;
2791 case tcc_constant:
2792 /* No side-effects. */
2793 return;
2795 default:
2796 gcc_unreachable ();
2800 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2801 node *EXPR_P.
2803 compound_lval
2804 : min_lval '[' val ']'
2805 | min_lval '.' ID
2806 | compound_lval '[' val ']'
2807 | compound_lval '.' ID
2809 This is not part of the original SIMPLE definition, which separates
2810 array and member references, but it seems reasonable to handle them
2811 together. Also, this way we don't run into problems with union
2812 aliasing; gcc requires that for accesses through a union to alias, the
2813 union reference must be explicit, which was not always the case when we
2814 were splitting up array and member refs.
2816 PRE_P points to the sequence where side effects that must happen before
2817 *EXPR_P should be stored.
2819 POST_P points to the sequence where side effects that must happen after
2820 *EXPR_P should be stored. */
2822 static enum gimplify_status
2823 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2824 fallback_t fallback)
2826 tree *p;
2827 enum gimplify_status ret = GS_ALL_DONE, tret;
2828 int i;
2829 location_t loc = EXPR_LOCATION (*expr_p);
2830 tree expr = *expr_p;
2832 /* Create a stack of the subexpressions so later we can walk them in
2833 order from inner to outer. */
2834 auto_vec<tree, 10> expr_stack;
2836 /* We can handle anything that get_inner_reference can deal with. */
2837 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2839 restart:
2840 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2841 if (TREE_CODE (*p) == INDIRECT_REF)
2842 *p = fold_indirect_ref_loc (loc, *p);
2844 if (handled_component_p (*p))
2846 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2847 additional COMPONENT_REFs. */
2848 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2849 && gimplify_var_or_parm_decl (p) == GS_OK)
2850 goto restart;
2851 else
2852 break;
2854 expr_stack.safe_push (*p);
2857 gcc_assert (expr_stack.length ());
2859 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2860 walked through and P points to the innermost expression.
2862 Java requires that we elaborated nodes in source order. That
2863 means we must gimplify the inner expression followed by each of
2864 the indices, in order. But we can't gimplify the inner
2865 expression until we deal with any variable bounds, sizes, or
2866 positions in order to deal with PLACEHOLDER_EXPRs.
2868 So we do this in three steps. First we deal with the annotations
2869 for any variables in the components, then we gimplify the base,
2870 then we gimplify any indices, from left to right. */
2871 for (i = expr_stack.length () - 1; i >= 0; i--)
2873 tree t = expr_stack[i];
2875 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2877 /* Gimplify the low bound and element type size and put them into
2878 the ARRAY_REF. If these values are set, they have already been
2879 gimplified. */
2880 if (TREE_OPERAND (t, 2) == NULL_TREE)
2882 tree low = unshare_expr (array_ref_low_bound (t));
2883 if (!is_gimple_min_invariant (low))
2885 TREE_OPERAND (t, 2) = low;
2886 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2887 post_p, is_gimple_reg,
2888 fb_rvalue);
2889 ret = MIN (ret, tret);
2892 else
2894 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2895 is_gimple_reg, fb_rvalue);
2896 ret = MIN (ret, tret);
2899 if (TREE_OPERAND (t, 3) == NULL_TREE)
2901 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2902 tree elmt_size = unshare_expr (array_ref_element_size (t));
2903 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2905 /* Divide the element size by the alignment of the element
2906 type (above). */
2907 elmt_size
2908 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2910 if (!is_gimple_min_invariant (elmt_size))
2912 TREE_OPERAND (t, 3) = elmt_size;
2913 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2914 post_p, is_gimple_reg,
2915 fb_rvalue);
2916 ret = MIN (ret, tret);
2919 else
2921 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2922 is_gimple_reg, fb_rvalue);
2923 ret = MIN (ret, tret);
2926 else if (TREE_CODE (t) == COMPONENT_REF)
2928 /* Set the field offset into T and gimplify it. */
2929 if (TREE_OPERAND (t, 2) == NULL_TREE)
2931 tree offset = unshare_expr (component_ref_field_offset (t));
2932 tree field = TREE_OPERAND (t, 1);
2933 tree factor
2934 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2936 /* Divide the offset by its alignment. */
2937 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2939 if (!is_gimple_min_invariant (offset))
2941 TREE_OPERAND (t, 2) = offset;
2942 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2943 post_p, is_gimple_reg,
2944 fb_rvalue);
2945 ret = MIN (ret, tret);
2948 else
2950 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2951 is_gimple_reg, fb_rvalue);
2952 ret = MIN (ret, tret);
2957 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2958 so as to match the min_lval predicate. Failure to do so may result
2959 in the creation of large aggregate temporaries. */
2960 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2961 fallback | fb_lvalue);
2962 ret = MIN (ret, tret);
2964 /* And finally, the indices and operands of ARRAY_REF. During this
2965 loop we also remove any useless conversions. */
2966 for (; expr_stack.length () > 0; )
2968 tree t = expr_stack.pop ();
2970 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2972 /* Gimplify the dimension. */
2973 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2975 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2976 is_gimple_val, fb_rvalue);
2977 ret = MIN (ret, tret);
2981 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2983 /* The innermost expression P may have originally had
2984 TREE_SIDE_EFFECTS set which would have caused all the outer
2985 expressions in *EXPR_P leading to P to also have had
2986 TREE_SIDE_EFFECTS set. */
2987 recalculate_side_effects (t);
2990 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2991 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2993 canonicalize_component_ref (expr_p);
2996 expr_stack.release ();
2998 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3000 return ret;
3003 /* Gimplify the self modifying expression pointed to by EXPR_P
3004 (++, --, +=, -=).
3006 PRE_P points to the list where side effects that must happen before
3007 *EXPR_P should be stored.
3009 POST_P points to the list where side effects that must happen after
3010 *EXPR_P should be stored.
3012 WANT_VALUE is nonzero iff we want to use the value of this expression
3013 in another expression.
3015 ARITH_TYPE is the type the computation should be performed in. */
3017 enum gimplify_status
3018 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3019 bool want_value, tree arith_type)
3021 enum tree_code code;
3022 tree lhs, lvalue, rhs, t1;
3023 gimple_seq post = NULL, *orig_post_p = post_p;
3024 bool postfix;
3025 enum tree_code arith_code;
3026 enum gimplify_status ret;
3027 location_t loc = EXPR_LOCATION (*expr_p);
3029 code = TREE_CODE (*expr_p);
3031 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3032 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3034 /* Prefix or postfix? */
3035 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3036 /* Faster to treat as prefix if result is not used. */
3037 postfix = want_value;
3038 else
3039 postfix = false;
3041 /* For postfix, make sure the inner expression's post side effects
3042 are executed after side effects from this expression. */
3043 if (postfix)
3044 post_p = &post;
3046 /* Add or subtract? */
3047 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3048 arith_code = PLUS_EXPR;
3049 else
3050 arith_code = MINUS_EXPR;
3052 /* Gimplify the LHS into a GIMPLE lvalue. */
3053 lvalue = TREE_OPERAND (*expr_p, 0);
3054 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3055 if (ret == GS_ERROR)
3056 return ret;
3058 /* Extract the operands to the arithmetic operation. */
3059 lhs = lvalue;
3060 rhs = TREE_OPERAND (*expr_p, 1);
3062 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3063 that as the result value and in the postqueue operation. */
3064 if (postfix)
3066 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3067 if (ret == GS_ERROR)
3068 return ret;
3070 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3073 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3074 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3076 rhs = convert_to_ptrofftype_loc (loc, rhs);
3077 if (arith_code == MINUS_EXPR)
3078 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3079 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3081 else
3082 t1 = fold_convert (TREE_TYPE (*expr_p),
3083 fold_build2 (arith_code, arith_type,
3084 fold_convert (arith_type, lhs),
3085 fold_convert (arith_type, rhs)));
3087 if (postfix)
3089 gimplify_assign (lvalue, t1, pre_p);
3090 gimplify_seq_add_seq (orig_post_p, post);
3091 *expr_p = lhs;
3092 return GS_ALL_DONE;
3094 else
3096 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3097 return GS_OK;
3101 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3103 static void
3104 maybe_with_size_expr (tree *expr_p)
3106 tree expr = *expr_p;
3107 tree type = TREE_TYPE (expr);
3108 tree size;
3110 /* If we've already wrapped this or the type is error_mark_node, we can't do
3111 anything. */
3112 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3113 || type == error_mark_node)
3114 return;
3116 /* If the size isn't known or is a constant, we have nothing to do. */
3117 size = TYPE_SIZE_UNIT (type);
3118 if (!size || poly_int_tree_p (size))
3119 return;
3121 /* Otherwise, make a WITH_SIZE_EXPR. */
3122 size = unshare_expr (size);
3123 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3124 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3127 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3128 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3129 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3130 gimplified to an SSA name. */
3132 enum gimplify_status
3133 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3134 bool allow_ssa)
3136 bool (*test) (tree);
3137 fallback_t fb;
3139 /* In general, we allow lvalues for function arguments to avoid
3140 extra overhead of copying large aggregates out of even larger
3141 aggregates into temporaries only to copy the temporaries to
3142 the argument list. Make optimizers happy by pulling out to
3143 temporaries those types that fit in registers. */
3144 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3145 test = is_gimple_val, fb = fb_rvalue;
3146 else
3148 test = is_gimple_lvalue, fb = fb_either;
3149 /* Also strip a TARGET_EXPR that would force an extra copy. */
3150 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3152 tree init = TARGET_EXPR_INITIAL (*arg_p);
3153 if (init
3154 && !VOID_TYPE_P (TREE_TYPE (init)))
3155 *arg_p = init;
3159 /* If this is a variable sized type, we must remember the size. */
3160 maybe_with_size_expr (arg_p);
3162 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3163 /* Make sure arguments have the same location as the function call
3164 itself. */
3165 protected_set_expr_location (*arg_p, call_location);
3167 /* There is a sequence point before a function call. Side effects in
3168 the argument list must occur before the actual call. So, when
3169 gimplifying arguments, force gimplify_expr to use an internal
3170 post queue which is then appended to the end of PRE_P. */
3171 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3174 /* Don't fold inside offloading or taskreg regions: it can break code by
3175 adding decl references that weren't in the source. We'll do it during
3176 omplower pass instead. */
3178 static bool
3179 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3181 struct gimplify_omp_ctx *ctx;
3182 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3183 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3184 return false;
3185 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3186 return false;
3187 return fold_stmt (gsi);
3190 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3191 WANT_VALUE is true if the result of the call is desired. */
3193 static enum gimplify_status
3194 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3196 tree fndecl, parms, p, fnptrtype;
3197 enum gimplify_status ret;
3198 int i, nargs;
3199 gcall *call;
3200 bool builtin_va_start_p = false;
3201 location_t loc = EXPR_LOCATION (*expr_p);
3203 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3205 /* For reliable diagnostics during inlining, it is necessary that
3206 every call_expr be annotated with file and line. */
3207 if (! EXPR_HAS_LOCATION (*expr_p))
3208 SET_EXPR_LOCATION (*expr_p, input_location);
3210 /* Gimplify internal functions created in the FEs. */
3211 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3213 if (want_value)
3214 return GS_ALL_DONE;
3216 nargs = call_expr_nargs (*expr_p);
3217 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3218 auto_vec<tree> vargs (nargs);
3220 for (i = 0; i < nargs; i++)
3222 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3223 EXPR_LOCATION (*expr_p));
3224 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3227 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3228 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3229 gimplify_seq_add_stmt (pre_p, call);
3230 return GS_ALL_DONE;
3233 /* This may be a call to a builtin function.
3235 Builtin function calls may be transformed into different
3236 (and more efficient) builtin function calls under certain
3237 circumstances. Unfortunately, gimplification can muck things
3238 up enough that the builtin expanders are not aware that certain
3239 transformations are still valid.
3241 So we attempt transformation/gimplification of the call before
3242 we gimplify the CALL_EXPR. At this time we do not manage to
3243 transform all calls in the same manner as the expanders do, but
3244 we do transform most of them. */
3245 fndecl = get_callee_fndecl (*expr_p);
3246 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3247 switch (DECL_FUNCTION_CODE (fndecl))
3249 CASE_BUILT_IN_ALLOCA:
3250 /* If the call has been built for a variable-sized object, then we
3251 want to restore the stack level when the enclosing BIND_EXPR is
3252 exited to reclaim the allocated space; otherwise, we precisely
3253 need to do the opposite and preserve the latest stack level. */
3254 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3255 gimplify_ctxp->save_stack = true;
3256 else
3257 gimplify_ctxp->keep_stack = true;
3258 break;
3260 case BUILT_IN_VA_START:
3262 builtin_va_start_p = TRUE;
3263 if (call_expr_nargs (*expr_p) < 2)
3265 error ("too few arguments to function %<va_start%>");
3266 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3267 return GS_OK;
3270 if (fold_builtin_next_arg (*expr_p, true))
3272 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3273 return GS_OK;
3275 break;
3278 default:
3281 if (fndecl && fndecl_built_in_p (fndecl))
3283 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3284 if (new_tree && new_tree != *expr_p)
3286 /* There was a transformation of this call which computes the
3287 same value, but in a more efficient way. Return and try
3288 again. */
3289 *expr_p = new_tree;
3290 return GS_OK;
3294 /* Remember the original function pointer type. */
3295 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3297 /* There is a sequence point before the call, so any side effects in
3298 the calling expression must occur before the actual call. Force
3299 gimplify_expr to use an internal post queue. */
3300 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3301 is_gimple_call_addr, fb_rvalue);
3303 nargs = call_expr_nargs (*expr_p);
3305 /* Get argument types for verification. */
3306 fndecl = get_callee_fndecl (*expr_p);
3307 parms = NULL_TREE;
3308 if (fndecl)
3309 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3310 else
3311 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3313 if (fndecl && DECL_ARGUMENTS (fndecl))
3314 p = DECL_ARGUMENTS (fndecl);
3315 else if (parms)
3316 p = parms;
3317 else
3318 p = NULL_TREE;
3319 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3322 /* If the last argument is __builtin_va_arg_pack () and it is not
3323 passed as a named argument, decrease the number of CALL_EXPR
3324 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3325 if (!p
3326 && i < nargs
3327 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3329 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3330 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3332 if (last_arg_fndecl
3333 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3335 tree call = *expr_p;
3337 --nargs;
3338 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3339 CALL_EXPR_FN (call),
3340 nargs, CALL_EXPR_ARGP (call));
3342 /* Copy all CALL_EXPR flags, location and block, except
3343 CALL_EXPR_VA_ARG_PACK flag. */
3344 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3345 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3346 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3347 = CALL_EXPR_RETURN_SLOT_OPT (call);
3348 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3349 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3351 /* Set CALL_EXPR_VA_ARG_PACK. */
3352 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3356 /* If the call returns twice then after building the CFG the call
3357 argument computations will no longer dominate the call because
3358 we add an abnormal incoming edge to the call. So do not use SSA
3359 vars there. */
3360 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3362 /* Gimplify the function arguments. */
3363 if (nargs > 0)
3365 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3366 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3367 PUSH_ARGS_REVERSED ? i-- : i++)
3369 enum gimplify_status t;
3371 /* Avoid gimplifying the second argument to va_start, which needs to
3372 be the plain PARM_DECL. */
3373 if ((i != 1) || !builtin_va_start_p)
3375 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3376 EXPR_LOCATION (*expr_p), ! returns_twice);
3378 if (t == GS_ERROR)
3379 ret = GS_ERROR;
3384 /* Gimplify the static chain. */
3385 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3387 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3388 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3389 else
3391 enum gimplify_status t;
3392 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3393 EXPR_LOCATION (*expr_p), ! returns_twice);
3394 if (t == GS_ERROR)
3395 ret = GS_ERROR;
3399 /* Verify the function result. */
3400 if (want_value && fndecl
3401 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3403 error_at (loc, "using result of function returning %<void%>");
3404 ret = GS_ERROR;
3407 /* Try this again in case gimplification exposed something. */
3408 if (ret != GS_ERROR)
3410 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3412 if (new_tree && new_tree != *expr_p)
3414 /* There was a transformation of this call which computes the
3415 same value, but in a more efficient way. Return and try
3416 again. */
3417 *expr_p = new_tree;
3418 return GS_OK;
3421 else
3423 *expr_p = error_mark_node;
3424 return GS_ERROR;
3427 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3428 decl. This allows us to eliminate redundant or useless
3429 calls to "const" functions. */
3430 if (TREE_CODE (*expr_p) == CALL_EXPR)
3432 int flags = call_expr_flags (*expr_p);
3433 if (flags & (ECF_CONST | ECF_PURE)
3434 /* An infinite loop is considered a side effect. */
3435 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3436 TREE_SIDE_EFFECTS (*expr_p) = 0;
3439 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3440 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3441 form and delegate the creation of a GIMPLE_CALL to
3442 gimplify_modify_expr. This is always possible because when
3443 WANT_VALUE is true, the caller wants the result of this call into
3444 a temporary, which means that we will emit an INIT_EXPR in
3445 internal_get_tmp_var which will then be handled by
3446 gimplify_modify_expr. */
3447 if (!want_value)
3449 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3450 have to do is replicate it as a GIMPLE_CALL tuple. */
3451 gimple_stmt_iterator gsi;
3452 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3453 notice_special_calls (call);
3454 gimplify_seq_add_stmt (pre_p, call);
3455 gsi = gsi_last (*pre_p);
3456 maybe_fold_stmt (&gsi);
3457 *expr_p = NULL_TREE;
3459 else
3460 /* Remember the original function type. */
3461 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3462 CALL_EXPR_FN (*expr_p));
3464 return ret;
3467 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3468 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3470 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3471 condition is true or false, respectively. If null, we should generate
3472 our own to skip over the evaluation of this specific expression.
3474 LOCUS is the source location of the COND_EXPR.
3476 This function is the tree equivalent of do_jump.
3478 shortcut_cond_r should only be called by shortcut_cond_expr. */
3480 static tree
3481 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3482 location_t locus)
3484 tree local_label = NULL_TREE;
3485 tree t, expr = NULL;
3487 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3488 retain the shortcut semantics. Just insert the gotos here;
3489 shortcut_cond_expr will append the real blocks later. */
3490 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3492 location_t new_locus;
3494 /* Turn if (a && b) into
3496 if (a); else goto no;
3497 if (b) goto yes; else goto no;
3498 (no:) */
3500 if (false_label_p == NULL)
3501 false_label_p = &local_label;
3503 /* Keep the original source location on the first 'if'. */
3504 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3505 append_to_statement_list (t, &expr);
3507 /* Set the source location of the && on the second 'if'. */
3508 new_locus = rexpr_location (pred, locus);
3509 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3510 new_locus);
3511 append_to_statement_list (t, &expr);
3513 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3515 location_t new_locus;
3517 /* Turn if (a || b) into
3519 if (a) goto yes;
3520 if (b) goto yes; else goto no;
3521 (yes:) */
3523 if (true_label_p == NULL)
3524 true_label_p = &local_label;
3526 /* Keep the original source location on the first 'if'. */
3527 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3528 append_to_statement_list (t, &expr);
3530 /* Set the source location of the || on the second 'if'. */
3531 new_locus = rexpr_location (pred, locus);
3532 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3533 new_locus);
3534 append_to_statement_list (t, &expr);
3536 else if (TREE_CODE (pred) == COND_EXPR
3537 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3538 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3540 location_t new_locus;
3542 /* As long as we're messing with gotos, turn if (a ? b : c) into
3543 if (a)
3544 if (b) goto yes; else goto no;
3545 else
3546 if (c) goto yes; else goto no;
3548 Don't do this if one of the arms has void type, which can happen
3549 in C++ when the arm is throw. */
3551 /* Keep the original source location on the first 'if'. Set the source
3552 location of the ? on the second 'if'. */
3553 new_locus = rexpr_location (pred, locus);
3554 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3555 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3556 false_label_p, locus),
3557 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3558 false_label_p, new_locus));
3560 else
3562 expr = build3 (COND_EXPR, void_type_node, pred,
3563 build_and_jump (true_label_p),
3564 build_and_jump (false_label_p));
3565 SET_EXPR_LOCATION (expr, locus);
3568 if (local_label)
3570 t = build1 (LABEL_EXPR, void_type_node, local_label);
3571 append_to_statement_list (t, &expr);
3574 return expr;
3577 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3578 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3579 statement, if it is the last one. Otherwise, return NULL. */
3581 static tree
3582 find_goto (tree expr)
3584 if (!expr)
3585 return NULL_TREE;
3587 if (TREE_CODE (expr) == GOTO_EXPR)
3588 return expr;
3590 if (TREE_CODE (expr) != STATEMENT_LIST)
3591 return NULL_TREE;
3593 tree_stmt_iterator i = tsi_start (expr);
3595 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3596 tsi_next (&i);
3598 if (!tsi_one_before_end_p (i))
3599 return NULL_TREE;
3601 return find_goto (tsi_stmt (i));
3604 /* Same as find_goto, except that it returns NULL if the destination
3605 is not a LABEL_DECL. */
3607 static inline tree
3608 find_goto_label (tree expr)
3610 tree dest = find_goto (expr);
3611 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3612 return dest;
3613 return NULL_TREE;
3616 /* Given a conditional expression EXPR with short-circuit boolean
3617 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3618 predicate apart into the equivalent sequence of conditionals. */
3620 static tree
3621 shortcut_cond_expr (tree expr)
3623 tree pred = TREE_OPERAND (expr, 0);
3624 tree then_ = TREE_OPERAND (expr, 1);
3625 tree else_ = TREE_OPERAND (expr, 2);
3626 tree true_label, false_label, end_label, t;
3627 tree *true_label_p;
3628 tree *false_label_p;
3629 bool emit_end, emit_false, jump_over_else;
3630 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3631 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3633 /* First do simple transformations. */
3634 if (!else_se)
3636 /* If there is no 'else', turn
3637 if (a && b) then c
3638 into
3639 if (a) if (b) then c. */
3640 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3642 /* Keep the original source location on the first 'if'. */
3643 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3644 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3645 /* Set the source location of the && on the second 'if'. */
3646 if (rexpr_has_location (pred))
3647 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3648 then_ = shortcut_cond_expr (expr);
3649 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3650 pred = TREE_OPERAND (pred, 0);
3651 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3652 SET_EXPR_LOCATION (expr, locus);
3656 if (!then_se)
3658 /* If there is no 'then', turn
3659 if (a || b); else d
3660 into
3661 if (a); else if (b); else d. */
3662 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3664 /* Keep the original source location on the first 'if'. */
3665 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3666 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3667 /* Set the source location of the || on the second 'if'. */
3668 if (rexpr_has_location (pred))
3669 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3670 else_ = shortcut_cond_expr (expr);
3671 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3672 pred = TREE_OPERAND (pred, 0);
3673 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3674 SET_EXPR_LOCATION (expr, locus);
3678 /* If we're done, great. */
3679 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3680 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3681 return expr;
3683 /* Otherwise we need to mess with gotos. Change
3684 if (a) c; else d;
3686 if (a); else goto no;
3687 c; goto end;
3688 no: d; end:
3689 and recursively gimplify the condition. */
3691 true_label = false_label = end_label = NULL_TREE;
3693 /* If our arms just jump somewhere, hijack those labels so we don't
3694 generate jumps to jumps. */
3696 if (tree then_goto = find_goto_label (then_))
3698 true_label = GOTO_DESTINATION (then_goto);
3699 then_ = NULL;
3700 then_se = false;
3703 if (tree else_goto = find_goto_label (else_))
3705 false_label = GOTO_DESTINATION (else_goto);
3706 else_ = NULL;
3707 else_se = false;
3710 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3711 if (true_label)
3712 true_label_p = &true_label;
3713 else
3714 true_label_p = NULL;
3716 /* The 'else' branch also needs a label if it contains interesting code. */
3717 if (false_label || else_se)
3718 false_label_p = &false_label;
3719 else
3720 false_label_p = NULL;
3722 /* If there was nothing else in our arms, just forward the label(s). */
3723 if (!then_se && !else_se)
3724 return shortcut_cond_r (pred, true_label_p, false_label_p,
3725 EXPR_LOC_OR_LOC (expr, input_location));
3727 /* If our last subexpression already has a terminal label, reuse it. */
3728 if (else_se)
3729 t = expr_last (else_);
3730 else if (then_se)
3731 t = expr_last (then_);
3732 else
3733 t = NULL;
3734 if (t && TREE_CODE (t) == LABEL_EXPR)
3735 end_label = LABEL_EXPR_LABEL (t);
3737 /* If we don't care about jumping to the 'else' branch, jump to the end
3738 if the condition is false. */
3739 if (!false_label_p)
3740 false_label_p = &end_label;
3742 /* We only want to emit these labels if we aren't hijacking them. */
3743 emit_end = (end_label == NULL_TREE);
3744 emit_false = (false_label == NULL_TREE);
3746 /* We only emit the jump over the else clause if we have to--if the
3747 then clause may fall through. Otherwise we can wind up with a
3748 useless jump and a useless label at the end of gimplified code,
3749 which will cause us to think that this conditional as a whole
3750 falls through even if it doesn't. If we then inline a function
3751 which ends with such a condition, that can cause us to issue an
3752 inappropriate warning about control reaching the end of a
3753 non-void function. */
3754 jump_over_else = block_may_fallthru (then_);
3756 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3757 EXPR_LOC_OR_LOC (expr, input_location));
3759 expr = NULL;
3760 append_to_statement_list (pred, &expr);
3762 append_to_statement_list (then_, &expr);
3763 if (else_se)
3765 if (jump_over_else)
3767 tree last = expr_last (expr);
3768 t = build_and_jump (&end_label);
3769 if (rexpr_has_location (last))
3770 SET_EXPR_LOCATION (t, rexpr_location (last));
3771 append_to_statement_list (t, &expr);
3773 if (emit_false)
3775 t = build1 (LABEL_EXPR, void_type_node, false_label);
3776 append_to_statement_list (t, &expr);
3778 append_to_statement_list (else_, &expr);
3780 if (emit_end && end_label)
3782 t = build1 (LABEL_EXPR, void_type_node, end_label);
3783 append_to_statement_list (t, &expr);
3786 return expr;
3789 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3791 tree
3792 gimple_boolify (tree expr)
3794 tree type = TREE_TYPE (expr);
3795 location_t loc = EXPR_LOCATION (expr);
3797 if (TREE_CODE (expr) == NE_EXPR
3798 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3799 && integer_zerop (TREE_OPERAND (expr, 1)))
3801 tree call = TREE_OPERAND (expr, 0);
3802 tree fn = get_callee_fndecl (call);
3804 /* For __builtin_expect ((long) (x), y) recurse into x as well
3805 if x is truth_value_p. */
3806 if (fn
3807 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3808 && call_expr_nargs (call) == 2)
3810 tree arg = CALL_EXPR_ARG (call, 0);
3811 if (arg)
3813 if (TREE_CODE (arg) == NOP_EXPR
3814 && TREE_TYPE (arg) == TREE_TYPE (call))
3815 arg = TREE_OPERAND (arg, 0);
3816 if (truth_value_p (TREE_CODE (arg)))
3818 arg = gimple_boolify (arg);
3819 CALL_EXPR_ARG (call, 0)
3820 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3826 switch (TREE_CODE (expr))
3828 case TRUTH_AND_EXPR:
3829 case TRUTH_OR_EXPR:
3830 case TRUTH_XOR_EXPR:
3831 case TRUTH_ANDIF_EXPR:
3832 case TRUTH_ORIF_EXPR:
3833 /* Also boolify the arguments of truth exprs. */
3834 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3835 /* FALLTHRU */
3837 case TRUTH_NOT_EXPR:
3838 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3840 /* These expressions always produce boolean results. */
3841 if (TREE_CODE (type) != BOOLEAN_TYPE)
3842 TREE_TYPE (expr) = boolean_type_node;
3843 return expr;
3845 case ANNOTATE_EXPR:
3846 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3848 case annot_expr_ivdep_kind:
3849 case annot_expr_unroll_kind:
3850 case annot_expr_no_vector_kind:
3851 case annot_expr_vector_kind:
3852 case annot_expr_parallel_kind:
3853 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3854 if (TREE_CODE (type) != BOOLEAN_TYPE)
3855 TREE_TYPE (expr) = boolean_type_node;
3856 return expr;
3857 default:
3858 gcc_unreachable ();
3861 default:
3862 if (COMPARISON_CLASS_P (expr))
3864 /* There expressions always prduce boolean results. */
3865 if (TREE_CODE (type) != BOOLEAN_TYPE)
3866 TREE_TYPE (expr) = boolean_type_node;
3867 return expr;
3869 /* Other expressions that get here must have boolean values, but
3870 might need to be converted to the appropriate mode. */
3871 if (TREE_CODE (type) == BOOLEAN_TYPE)
3872 return expr;
3873 return fold_convert_loc (loc, boolean_type_node, expr);
3877 /* Given a conditional expression *EXPR_P without side effects, gimplify
3878 its operands. New statements are inserted to PRE_P. */
3880 static enum gimplify_status
3881 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3883 tree expr = *expr_p, cond;
3884 enum gimplify_status ret, tret;
3885 enum tree_code code;
3887 cond = gimple_boolify (COND_EXPR_COND (expr));
3889 /* We need to handle && and || specially, as their gimplification
3890 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3891 code = TREE_CODE (cond);
3892 if (code == TRUTH_ANDIF_EXPR)
3893 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3894 else if (code == TRUTH_ORIF_EXPR)
3895 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3896 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3897 COND_EXPR_COND (*expr_p) = cond;
3899 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3900 is_gimple_val, fb_rvalue);
3901 ret = MIN (ret, tret);
3902 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3903 is_gimple_val, fb_rvalue);
3905 return MIN (ret, tret);
3908 /* Return true if evaluating EXPR could trap.
3909 EXPR is GENERIC, while tree_could_trap_p can be called
3910 only on GIMPLE. */
3912 bool
3913 generic_expr_could_trap_p (tree expr)
3915 unsigned i, n;
3917 if (!expr || is_gimple_val (expr))
3918 return false;
3920 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3921 return true;
3923 n = TREE_OPERAND_LENGTH (expr);
3924 for (i = 0; i < n; i++)
3925 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3926 return true;
3928 return false;
3931 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3932 into
3934 if (p) if (p)
3935 t1 = a; a;
3936 else or else
3937 t1 = b; b;
3940 The second form is used when *EXPR_P is of type void.
3942 PRE_P points to the list where side effects that must happen before
3943 *EXPR_P should be stored. */
3945 static enum gimplify_status
3946 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3948 tree expr = *expr_p;
3949 tree type = TREE_TYPE (expr);
3950 location_t loc = EXPR_LOCATION (expr);
3951 tree tmp, arm1, arm2;
3952 enum gimplify_status ret;
3953 tree label_true, label_false, label_cont;
3954 bool have_then_clause_p, have_else_clause_p;
3955 gcond *cond_stmt;
3956 enum tree_code pred_code;
3957 gimple_seq seq = NULL;
3959 /* If this COND_EXPR has a value, copy the values into a temporary within
3960 the arms. */
3961 if (!VOID_TYPE_P (type))
3963 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3964 tree result;
3966 /* If either an rvalue is ok or we do not require an lvalue, create the
3967 temporary. But we cannot do that if the type is addressable. */
3968 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3969 && !TREE_ADDRESSABLE (type))
3971 if (gimplify_ctxp->allow_rhs_cond_expr
3972 /* If either branch has side effects or could trap, it can't be
3973 evaluated unconditionally. */
3974 && !TREE_SIDE_EFFECTS (then_)
3975 && !generic_expr_could_trap_p (then_)
3976 && !TREE_SIDE_EFFECTS (else_)
3977 && !generic_expr_could_trap_p (else_))
3978 return gimplify_pure_cond_expr (expr_p, pre_p);
3980 tmp = create_tmp_var (type, "iftmp");
3981 result = tmp;
3984 /* Otherwise, only create and copy references to the values. */
3985 else
3987 type = build_pointer_type (type);
3989 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3990 then_ = build_fold_addr_expr_loc (loc, then_);
3992 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3993 else_ = build_fold_addr_expr_loc (loc, else_);
3995 expr
3996 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3998 tmp = create_tmp_var (type, "iftmp");
3999 result = build_simple_mem_ref_loc (loc, tmp);
4002 /* Build the new then clause, `tmp = then_;'. But don't build the
4003 assignment if the value is void; in C++ it can be if it's a throw. */
4004 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4005 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
4007 /* Similarly, build the new else clause, `tmp = else_;'. */
4008 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4009 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
4011 TREE_TYPE (expr) = void_type_node;
4012 recalculate_side_effects (expr);
4014 /* Move the COND_EXPR to the prequeue. */
4015 gimplify_stmt (&expr, pre_p);
4017 *expr_p = result;
4018 return GS_ALL_DONE;
4021 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4022 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4023 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4024 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4026 /* Make sure the condition has BOOLEAN_TYPE. */
4027 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4029 /* Break apart && and || conditions. */
4030 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4031 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4033 expr = shortcut_cond_expr (expr);
4035 if (expr != *expr_p)
4037 *expr_p = expr;
4039 /* We can't rely on gimplify_expr to re-gimplify the expanded
4040 form properly, as cleanups might cause the target labels to be
4041 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4042 set up a conditional context. */
4043 gimple_push_condition ();
4044 gimplify_stmt (expr_p, &seq);
4045 gimple_pop_condition (pre_p);
4046 gimple_seq_add_seq (pre_p, seq);
4048 return GS_ALL_DONE;
4052 /* Now do the normal gimplification. */
4054 /* Gimplify condition. */
4055 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4056 fb_rvalue);
4057 if (ret == GS_ERROR)
4058 return GS_ERROR;
4059 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4061 gimple_push_condition ();
4063 have_then_clause_p = have_else_clause_p = false;
4064 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4065 if (label_true
4066 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4067 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4068 have different locations, otherwise we end up with incorrect
4069 location information on the branches. */
4070 && (optimize
4071 || !EXPR_HAS_LOCATION (expr)
4072 || !rexpr_has_location (label_true)
4073 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4075 have_then_clause_p = true;
4076 label_true = GOTO_DESTINATION (label_true);
4078 else
4079 label_true = create_artificial_label (UNKNOWN_LOCATION);
4080 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4081 if (label_false
4082 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4083 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4084 have different locations, otherwise we end up with incorrect
4085 location information on the branches. */
4086 && (optimize
4087 || !EXPR_HAS_LOCATION (expr)
4088 || !rexpr_has_location (label_false)
4089 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4091 have_else_clause_p = true;
4092 label_false = GOTO_DESTINATION (label_false);
4094 else
4095 label_false = create_artificial_label (UNKNOWN_LOCATION);
4097 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4098 &arm2);
4099 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4100 label_false);
4101 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4102 gimplify_seq_add_stmt (&seq, cond_stmt);
4103 gimple_stmt_iterator gsi = gsi_last (seq);
4104 maybe_fold_stmt (&gsi);
4106 label_cont = NULL_TREE;
4107 if (!have_then_clause_p)
4109 /* For if (...) {} else { code; } put label_true after
4110 the else block. */
4111 if (TREE_OPERAND (expr, 1) == NULL_TREE
4112 && !have_else_clause_p
4113 && TREE_OPERAND (expr, 2) != NULL_TREE)
4114 label_cont = label_true;
4115 else
4117 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4118 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4119 /* For if (...) { code; } else {} or
4120 if (...) { code; } else goto label; or
4121 if (...) { code; return; } else { ... }
4122 label_cont isn't needed. */
4123 if (!have_else_clause_p
4124 && TREE_OPERAND (expr, 2) != NULL_TREE
4125 && gimple_seq_may_fallthru (seq))
4127 gimple *g;
4128 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4130 g = gimple_build_goto (label_cont);
4132 /* GIMPLE_COND's are very low level; they have embedded
4133 gotos. This particular embedded goto should not be marked
4134 with the location of the original COND_EXPR, as it would
4135 correspond to the COND_EXPR's condition, not the ELSE or the
4136 THEN arms. To avoid marking it with the wrong location, flag
4137 it as "no location". */
4138 gimple_set_do_not_emit_location (g);
4140 gimplify_seq_add_stmt (&seq, g);
4144 if (!have_else_clause_p)
4146 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4147 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4149 if (label_cont)
4150 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4152 gimple_pop_condition (pre_p);
4153 gimple_seq_add_seq (pre_p, seq);
4155 if (ret == GS_ERROR)
4156 ; /* Do nothing. */
4157 else if (have_then_clause_p || have_else_clause_p)
4158 ret = GS_ALL_DONE;
4159 else
4161 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4162 expr = TREE_OPERAND (expr, 0);
4163 gimplify_stmt (&expr, pre_p);
4166 *expr_p = NULL;
4167 return ret;
4170 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4171 to be marked addressable.
4173 We cannot rely on such an expression being directly markable if a temporary
4174 has been created by the gimplification. In this case, we create another
4175 temporary and initialize it with a copy, which will become a store after we
4176 mark it addressable. This can happen if the front-end passed us something
4177 that it could not mark addressable yet, like a Fortran pass-by-reference
4178 parameter (int) floatvar. */
4180 static void
4181 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4183 while (handled_component_p (*expr_p))
4184 expr_p = &TREE_OPERAND (*expr_p, 0);
4185 if (is_gimple_reg (*expr_p))
4187 /* Do not allow an SSA name as the temporary. */
4188 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4189 DECL_GIMPLE_REG_P (var) = 0;
4190 *expr_p = var;
4194 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4195 a call to __builtin_memcpy. */
4197 static enum gimplify_status
4198 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4199 gimple_seq *seq_p)
4201 tree t, to, to_ptr, from, from_ptr;
4202 gcall *gs;
4203 location_t loc = EXPR_LOCATION (*expr_p);
4205 to = TREE_OPERAND (*expr_p, 0);
4206 from = TREE_OPERAND (*expr_p, 1);
4208 /* Mark the RHS addressable. Beware that it may not be possible to do so
4209 directly if a temporary has been created by the gimplification. */
4210 prepare_gimple_addressable (&from, seq_p);
4212 mark_addressable (from);
4213 from_ptr = build_fold_addr_expr_loc (loc, from);
4214 gimplify_arg (&from_ptr, seq_p, loc);
4216 mark_addressable (to);
4217 to_ptr = build_fold_addr_expr_loc (loc, to);
4218 gimplify_arg (&to_ptr, seq_p, loc);
4220 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4222 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4224 if (want_value)
4226 /* tmp = memcpy() */
4227 t = create_tmp_var (TREE_TYPE (to_ptr));
4228 gimple_call_set_lhs (gs, t);
4229 gimplify_seq_add_stmt (seq_p, gs);
4231 *expr_p = build_simple_mem_ref (t);
4232 return GS_ALL_DONE;
4235 gimplify_seq_add_stmt (seq_p, gs);
4236 *expr_p = NULL;
4237 return GS_ALL_DONE;
4240 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4241 a call to __builtin_memset. In this case we know that the RHS is
4242 a CONSTRUCTOR with an empty element list. */
4244 static enum gimplify_status
4245 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4246 gimple_seq *seq_p)
4248 tree t, from, to, to_ptr;
4249 gcall *gs;
4250 location_t loc = EXPR_LOCATION (*expr_p);
4252 /* Assert our assumptions, to abort instead of producing wrong code
4253 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4254 not be immediately exposed. */
4255 from = TREE_OPERAND (*expr_p, 1);
4256 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4257 from = TREE_OPERAND (from, 0);
4259 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4260 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4262 /* Now proceed. */
4263 to = TREE_OPERAND (*expr_p, 0);
4265 to_ptr = build_fold_addr_expr_loc (loc, to);
4266 gimplify_arg (&to_ptr, seq_p, loc);
4267 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4269 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4271 if (want_value)
4273 /* tmp = memset() */
4274 t = create_tmp_var (TREE_TYPE (to_ptr));
4275 gimple_call_set_lhs (gs, t);
4276 gimplify_seq_add_stmt (seq_p, gs);
4278 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4279 return GS_ALL_DONE;
4282 gimplify_seq_add_stmt (seq_p, gs);
4283 *expr_p = NULL;
4284 return GS_ALL_DONE;
4287 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4288 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4289 assignment. Return non-null if we detect a potential overlap. */
4291 struct gimplify_init_ctor_preeval_data
4293 /* The base decl of the lhs object. May be NULL, in which case we
4294 have to assume the lhs is indirect. */
4295 tree lhs_base_decl;
4297 /* The alias set of the lhs object. */
4298 alias_set_type lhs_alias_set;
4301 static tree
4302 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4304 struct gimplify_init_ctor_preeval_data *data
4305 = (struct gimplify_init_ctor_preeval_data *) xdata;
4306 tree t = *tp;
4308 /* If we find the base object, obviously we have overlap. */
4309 if (data->lhs_base_decl == t)
4310 return t;
4312 /* If the constructor component is indirect, determine if we have a
4313 potential overlap with the lhs. The only bits of information we
4314 have to go on at this point are addressability and alias sets. */
4315 if ((INDIRECT_REF_P (t)
4316 || TREE_CODE (t) == MEM_REF)
4317 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4318 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4319 return t;
4321 /* If the constructor component is a call, determine if it can hide a
4322 potential overlap with the lhs through an INDIRECT_REF like above.
4323 ??? Ugh - this is completely broken. In fact this whole analysis
4324 doesn't look conservative. */
4325 if (TREE_CODE (t) == CALL_EXPR)
4327 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4329 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4330 if (POINTER_TYPE_P (TREE_VALUE (type))
4331 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4332 && alias_sets_conflict_p (data->lhs_alias_set,
4333 get_alias_set
4334 (TREE_TYPE (TREE_VALUE (type)))))
4335 return t;
4338 if (IS_TYPE_OR_DECL_P (t))
4339 *walk_subtrees = 0;
4340 return NULL;
4343 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4344 force values that overlap with the lhs (as described by *DATA)
4345 into temporaries. */
4347 static void
4348 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4349 struct gimplify_init_ctor_preeval_data *data)
4351 enum gimplify_status one;
4353 /* If the value is constant, then there's nothing to pre-evaluate. */
4354 if (TREE_CONSTANT (*expr_p))
4356 /* Ensure it does not have side effects, it might contain a reference to
4357 the object we're initializing. */
4358 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4359 return;
4362 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4363 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4364 return;
4366 /* Recurse for nested constructors. */
4367 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4369 unsigned HOST_WIDE_INT ix;
4370 constructor_elt *ce;
4371 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4373 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4374 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4376 return;
4379 /* If this is a variable sized type, we must remember the size. */
4380 maybe_with_size_expr (expr_p);
4382 /* Gimplify the constructor element to something appropriate for the rhs
4383 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4384 the gimplifier will consider this a store to memory. Doing this
4385 gimplification now means that we won't have to deal with complicated
4386 language-specific trees, nor trees like SAVE_EXPR that can induce
4387 exponential search behavior. */
4388 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4389 if (one == GS_ERROR)
4391 *expr_p = NULL;
4392 return;
4395 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4396 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4397 always be true for all scalars, since is_gimple_mem_rhs insists on a
4398 temporary variable for them. */
4399 if (DECL_P (*expr_p))
4400 return;
4402 /* If this is of variable size, we have no choice but to assume it doesn't
4403 overlap since we can't make a temporary for it. */
4404 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4405 return;
4407 /* Otherwise, we must search for overlap ... */
4408 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4409 return;
4411 /* ... and if found, force the value into a temporary. */
4412 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4415 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4416 a RANGE_EXPR in a CONSTRUCTOR for an array.
4418 var = lower;
4419 loop_entry:
4420 object[var] = value;
4421 if (var == upper)
4422 goto loop_exit;
4423 var = var + 1;
4424 goto loop_entry;
4425 loop_exit:
4427 We increment var _after_ the loop exit check because we might otherwise
4428 fail if upper == TYPE_MAX_VALUE (type for upper).
4430 Note that we never have to deal with SAVE_EXPRs here, because this has
4431 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4433 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4434 gimple_seq *, bool);
4436 static void
4437 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4438 tree value, tree array_elt_type,
4439 gimple_seq *pre_p, bool cleared)
4441 tree loop_entry_label, loop_exit_label, fall_thru_label;
4442 tree var, var_type, cref, tmp;
4444 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4445 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4446 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4448 /* Create and initialize the index variable. */
4449 var_type = TREE_TYPE (upper);
4450 var = create_tmp_var (var_type);
4451 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4453 /* Add the loop entry label. */
4454 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4456 /* Build the reference. */
4457 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4458 var, NULL_TREE, NULL_TREE);
4460 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4461 the store. Otherwise just assign value to the reference. */
4463 if (TREE_CODE (value) == CONSTRUCTOR)
4464 /* NB we might have to call ourself recursively through
4465 gimplify_init_ctor_eval if the value is a constructor. */
4466 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4467 pre_p, cleared);
4468 else
4469 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4471 /* We exit the loop when the index var is equal to the upper bound. */
4472 gimplify_seq_add_stmt (pre_p,
4473 gimple_build_cond (EQ_EXPR, var, upper,
4474 loop_exit_label, fall_thru_label));
4476 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4478 /* Otherwise, increment the index var... */
4479 tmp = build2 (PLUS_EXPR, var_type, var,
4480 fold_convert (var_type, integer_one_node));
4481 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4483 /* ...and jump back to the loop entry. */
4484 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4486 /* Add the loop exit label. */
4487 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4490 /* Return true if FDECL is accessing a field that is zero sized. */
4492 static bool
4493 zero_sized_field_decl (const_tree fdecl)
4495 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4496 && integer_zerop (DECL_SIZE (fdecl)))
4497 return true;
4498 return false;
4501 /* Return true if TYPE is zero sized. */
4503 static bool
4504 zero_sized_type (const_tree type)
4506 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4507 && integer_zerop (TYPE_SIZE (type)))
4508 return true;
4509 return false;
4512 /* A subroutine of gimplify_init_constructor. Generate individual
4513 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4514 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4515 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4516 zeroed first. */
4518 static void
4519 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4520 gimple_seq *pre_p, bool cleared)
4522 tree array_elt_type = NULL;
4523 unsigned HOST_WIDE_INT ix;
4524 tree purpose, value;
4526 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4527 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4529 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4531 tree cref;
4533 /* NULL values are created above for gimplification errors. */
4534 if (value == NULL)
4535 continue;
4537 if (cleared && initializer_zerop (value))
4538 continue;
4540 /* ??? Here's to hoping the front end fills in all of the indices,
4541 so we don't have to figure out what's missing ourselves. */
4542 gcc_assert (purpose);
4544 /* Skip zero-sized fields, unless value has side-effects. This can
4545 happen with calls to functions returning a zero-sized type, which
4546 we shouldn't discard. As a number of downstream passes don't
4547 expect sets of zero-sized fields, we rely on the gimplification of
4548 the MODIFY_EXPR we make below to drop the assignment statement. */
4549 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4550 continue;
4552 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4553 whole range. */
4554 if (TREE_CODE (purpose) == RANGE_EXPR)
4556 tree lower = TREE_OPERAND (purpose, 0);
4557 tree upper = TREE_OPERAND (purpose, 1);
4559 /* If the lower bound is equal to upper, just treat it as if
4560 upper was the index. */
4561 if (simple_cst_equal (lower, upper))
4562 purpose = upper;
4563 else
4565 gimplify_init_ctor_eval_range (object, lower, upper, value,
4566 array_elt_type, pre_p, cleared);
4567 continue;
4571 if (array_elt_type)
4573 /* Do not use bitsizetype for ARRAY_REF indices. */
4574 if (TYPE_DOMAIN (TREE_TYPE (object)))
4575 purpose
4576 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4577 purpose);
4578 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4579 purpose, NULL_TREE, NULL_TREE);
4581 else
4583 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4584 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4585 unshare_expr (object), purpose, NULL_TREE);
4588 if (TREE_CODE (value) == CONSTRUCTOR
4589 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4590 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4591 pre_p, cleared);
4592 else
4594 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4595 gimplify_and_add (init, pre_p);
4596 ggc_free (init);
4601 /* Return the appropriate RHS predicate for this LHS. */
4603 gimple_predicate
4604 rhs_predicate_for (tree lhs)
4606 if (is_gimple_reg (lhs))
4607 return is_gimple_reg_rhs_or_call;
4608 else
4609 return is_gimple_mem_rhs_or_call;
4612 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4613 before the LHS has been gimplified. */
4615 static gimple_predicate
4616 initial_rhs_predicate_for (tree lhs)
4618 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4619 return is_gimple_reg_rhs_or_call;
4620 else
4621 return is_gimple_mem_rhs_or_call;
4624 /* Gimplify a C99 compound literal expression. This just means adding
4625 the DECL_EXPR before the current statement and using its anonymous
4626 decl instead. */
4628 static enum gimplify_status
4629 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4630 bool (*gimple_test_f) (tree),
4631 fallback_t fallback)
4633 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4634 tree decl = DECL_EXPR_DECL (decl_s);
4635 tree init = DECL_INITIAL (decl);
4636 /* Mark the decl as addressable if the compound literal
4637 expression is addressable now, otherwise it is marked too late
4638 after we gimplify the initialization expression. */
4639 if (TREE_ADDRESSABLE (*expr_p))
4640 TREE_ADDRESSABLE (decl) = 1;
4641 /* Otherwise, if we don't need an lvalue and have a literal directly
4642 substitute it. Check if it matches the gimple predicate, as
4643 otherwise we'd generate a new temporary, and we can as well just
4644 use the decl we already have. */
4645 else if (!TREE_ADDRESSABLE (decl)
4646 && init
4647 && (fallback & fb_lvalue) == 0
4648 && gimple_test_f (init))
4650 *expr_p = init;
4651 return GS_OK;
4654 /* Preliminarily mark non-addressed complex variables as eligible
4655 for promotion to gimple registers. We'll transform their uses
4656 as we find them. */
4657 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4658 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4659 && !TREE_THIS_VOLATILE (decl)
4660 && !needs_to_live_in_memory (decl))
4661 DECL_GIMPLE_REG_P (decl) = 1;
4663 /* If the decl is not addressable, then it is being used in some
4664 expression or on the right hand side of a statement, and it can
4665 be put into a readonly data section. */
4666 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4667 TREE_READONLY (decl) = 1;
4669 /* This decl isn't mentioned in the enclosing block, so add it to the
4670 list of temps. FIXME it seems a bit of a kludge to say that
4671 anonymous artificial vars aren't pushed, but everything else is. */
4672 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4673 gimple_add_tmp_var (decl);
4675 gimplify_and_add (decl_s, pre_p);
4676 *expr_p = decl;
4677 return GS_OK;
4680 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4681 return a new CONSTRUCTOR if something changed. */
4683 static tree
4684 optimize_compound_literals_in_ctor (tree orig_ctor)
4686 tree ctor = orig_ctor;
4687 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4688 unsigned int idx, num = vec_safe_length (elts);
4690 for (idx = 0; idx < num; idx++)
4692 tree value = (*elts)[idx].value;
4693 tree newval = value;
4694 if (TREE_CODE (value) == CONSTRUCTOR)
4695 newval = optimize_compound_literals_in_ctor (value);
4696 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4698 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4699 tree decl = DECL_EXPR_DECL (decl_s);
4700 tree init = DECL_INITIAL (decl);
4702 if (!TREE_ADDRESSABLE (value)
4703 && !TREE_ADDRESSABLE (decl)
4704 && init
4705 && TREE_CODE (init) == CONSTRUCTOR)
4706 newval = optimize_compound_literals_in_ctor (init);
4708 if (newval == value)
4709 continue;
4711 if (ctor == orig_ctor)
4713 ctor = copy_node (orig_ctor);
4714 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4715 elts = CONSTRUCTOR_ELTS (ctor);
4717 (*elts)[idx].value = newval;
4719 return ctor;
4722 /* A subroutine of gimplify_modify_expr. Break out elements of a
4723 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4725 Note that we still need to clear any elements that don't have explicit
4726 initializers, so if not all elements are initialized we keep the
4727 original MODIFY_EXPR, we just remove all of the constructor elements.
4729 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4730 GS_ERROR if we would have to create a temporary when gimplifying
4731 this constructor. Otherwise, return GS_OK.
4733 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4735 static enum gimplify_status
4736 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4737 bool want_value, bool notify_temp_creation)
4739 tree object, ctor, type;
4740 enum gimplify_status ret;
4741 vec<constructor_elt, va_gc> *elts;
4743 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4745 if (!notify_temp_creation)
4747 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4748 is_gimple_lvalue, fb_lvalue);
4749 if (ret == GS_ERROR)
4750 return ret;
4753 object = TREE_OPERAND (*expr_p, 0);
4754 ctor = TREE_OPERAND (*expr_p, 1)
4755 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4756 type = TREE_TYPE (ctor);
4757 elts = CONSTRUCTOR_ELTS (ctor);
4758 ret = GS_ALL_DONE;
4760 switch (TREE_CODE (type))
4762 case RECORD_TYPE:
4763 case UNION_TYPE:
4764 case QUAL_UNION_TYPE:
4765 case ARRAY_TYPE:
4767 struct gimplify_init_ctor_preeval_data preeval_data;
4768 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4769 bool cleared, complete_p, valid_const_initializer;
4771 /* Aggregate types must lower constructors to initialization of
4772 individual elements. The exception is that a CONSTRUCTOR node
4773 with no elements indicates zero-initialization of the whole. */
4774 if (vec_safe_is_empty (elts))
4776 if (notify_temp_creation)
4777 return GS_OK;
4778 break;
4781 /* Fetch information about the constructor to direct later processing.
4782 We might want to make static versions of it in various cases, and
4783 can only do so if it known to be a valid constant initializer. */
4784 valid_const_initializer
4785 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4786 &num_ctor_elements, &complete_p);
4788 /* If a const aggregate variable is being initialized, then it
4789 should never be a lose to promote the variable to be static. */
4790 if (valid_const_initializer
4791 && num_nonzero_elements > 1
4792 && TREE_READONLY (object)
4793 && VAR_P (object)
4794 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4796 if (notify_temp_creation)
4797 return GS_ERROR;
4798 DECL_INITIAL (object) = ctor;
4799 TREE_STATIC (object) = 1;
4800 if (!DECL_NAME (object))
4801 DECL_NAME (object) = create_tmp_var_name ("C");
4802 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4804 /* ??? C++ doesn't automatically append a .<number> to the
4805 assembler name, and even when it does, it looks at FE private
4806 data structures to figure out what that number should be,
4807 which are not set for this variable. I suppose this is
4808 important for local statics for inline functions, which aren't
4809 "local" in the object file sense. So in order to get a unique
4810 TU-local symbol, we must invoke the lhd version now. */
4811 lhd_set_decl_assembler_name (object);
4813 *expr_p = NULL_TREE;
4814 break;
4817 /* If there are "lots" of initialized elements, even discounting
4818 those that are not address constants (and thus *must* be
4819 computed at runtime), then partition the constructor into
4820 constant and non-constant parts. Block copy the constant
4821 parts in, then generate code for the non-constant parts. */
4822 /* TODO. There's code in cp/typeck.c to do this. */
4824 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4825 /* store_constructor will ignore the clearing of variable-sized
4826 objects. Initializers for such objects must explicitly set
4827 every field that needs to be set. */
4828 cleared = false;
4829 else if (!complete_p)
4830 /* If the constructor isn't complete, clear the whole object
4831 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4833 ??? This ought not to be needed. For any element not present
4834 in the initializer, we should simply set them to zero. Except
4835 we'd need to *find* the elements that are not present, and that
4836 requires trickery to avoid quadratic compile-time behavior in
4837 large cases or excessive memory use in small cases. */
4838 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4839 else if (num_ctor_elements - num_nonzero_elements
4840 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4841 && num_nonzero_elements < num_ctor_elements / 4)
4842 /* If there are "lots" of zeros, it's more efficient to clear
4843 the memory and then set the nonzero elements. */
4844 cleared = true;
4845 else
4846 cleared = false;
4848 /* If there are "lots" of initialized elements, and all of them
4849 are valid address constants, then the entire initializer can
4850 be dropped to memory, and then memcpy'd out. Don't do this
4851 for sparse arrays, though, as it's more efficient to follow
4852 the standard CONSTRUCTOR behavior of memset followed by
4853 individual element initialization. Also don't do this for small
4854 all-zero initializers (which aren't big enough to merit
4855 clearing), and don't try to make bitwise copies of
4856 TREE_ADDRESSABLE types. */
4858 if (valid_const_initializer
4859 && !(cleared || num_nonzero_elements == 0)
4860 && !TREE_ADDRESSABLE (type))
4862 HOST_WIDE_INT size = int_size_in_bytes (type);
4863 unsigned int align;
4865 /* ??? We can still get unbounded array types, at least
4866 from the C++ front end. This seems wrong, but attempt
4867 to work around it for now. */
4868 if (size < 0)
4870 size = int_size_in_bytes (TREE_TYPE (object));
4871 if (size >= 0)
4872 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4875 /* Find the maximum alignment we can assume for the object. */
4876 /* ??? Make use of DECL_OFFSET_ALIGN. */
4877 if (DECL_P (object))
4878 align = DECL_ALIGN (object);
4879 else
4880 align = TYPE_ALIGN (type);
4882 /* Do a block move either if the size is so small as to make
4883 each individual move a sub-unit move on average, or if it
4884 is so large as to make individual moves inefficient. */
4885 if (size > 0
4886 && num_nonzero_elements > 1
4887 && (size < num_nonzero_elements
4888 || !can_move_by_pieces (size, align)))
4890 if (notify_temp_creation)
4891 return GS_ERROR;
4893 walk_tree (&ctor, force_labels_r, NULL, NULL);
4894 ctor = tree_output_constant_def (ctor);
4895 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4896 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4897 TREE_OPERAND (*expr_p, 1) = ctor;
4899 /* This is no longer an assignment of a CONSTRUCTOR, but
4900 we still may have processing to do on the LHS. So
4901 pretend we didn't do anything here to let that happen. */
4902 return GS_UNHANDLED;
4906 /* If the target is volatile, we have non-zero elements and more than
4907 one field to assign, initialize the target from a temporary. */
4908 if (TREE_THIS_VOLATILE (object)
4909 && !TREE_ADDRESSABLE (type)
4910 && num_nonzero_elements > 0
4911 && vec_safe_length (elts) > 1)
4913 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4914 TREE_OPERAND (*expr_p, 0) = temp;
4915 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4916 *expr_p,
4917 build2 (MODIFY_EXPR, void_type_node,
4918 object, temp));
4919 return GS_OK;
4922 if (notify_temp_creation)
4923 return GS_OK;
4925 /* If there are nonzero elements and if needed, pre-evaluate to capture
4926 elements overlapping with the lhs into temporaries. We must do this
4927 before clearing to fetch the values before they are zeroed-out. */
4928 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4930 preeval_data.lhs_base_decl = get_base_address (object);
4931 if (!DECL_P (preeval_data.lhs_base_decl))
4932 preeval_data.lhs_base_decl = NULL;
4933 preeval_data.lhs_alias_set = get_alias_set (object);
4935 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4936 pre_p, post_p, &preeval_data);
4939 bool ctor_has_side_effects_p
4940 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4942 if (cleared)
4944 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4945 Note that we still have to gimplify, in order to handle the
4946 case of variable sized types. Avoid shared tree structures. */
4947 CONSTRUCTOR_ELTS (ctor) = NULL;
4948 TREE_SIDE_EFFECTS (ctor) = 0;
4949 object = unshare_expr (object);
4950 gimplify_stmt (expr_p, pre_p);
4953 /* If we have not block cleared the object, or if there are nonzero
4954 elements in the constructor, or if the constructor has side effects,
4955 add assignments to the individual scalar fields of the object. */
4956 if (!cleared
4957 || num_nonzero_elements > 0
4958 || ctor_has_side_effects_p)
4959 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4961 *expr_p = NULL_TREE;
4963 break;
4965 case COMPLEX_TYPE:
4967 tree r, i;
4969 if (notify_temp_creation)
4970 return GS_OK;
4972 /* Extract the real and imaginary parts out of the ctor. */
4973 gcc_assert (elts->length () == 2);
4974 r = (*elts)[0].value;
4975 i = (*elts)[1].value;
4976 if (r == NULL || i == NULL)
4978 tree zero = build_zero_cst (TREE_TYPE (type));
4979 if (r == NULL)
4980 r = zero;
4981 if (i == NULL)
4982 i = zero;
4985 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4986 represent creation of a complex value. */
4987 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4989 ctor = build_complex (type, r, i);
4990 TREE_OPERAND (*expr_p, 1) = ctor;
4992 else
4994 ctor = build2 (COMPLEX_EXPR, type, r, i);
4995 TREE_OPERAND (*expr_p, 1) = ctor;
4996 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4997 pre_p,
4998 post_p,
4999 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5000 fb_rvalue);
5003 break;
5005 case VECTOR_TYPE:
5007 unsigned HOST_WIDE_INT ix;
5008 constructor_elt *ce;
5010 if (notify_temp_creation)
5011 return GS_OK;
5013 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5014 if (TREE_CONSTANT (ctor))
5016 bool constant_p = true;
5017 tree value;
5019 /* Even when ctor is constant, it might contain non-*_CST
5020 elements, such as addresses or trapping values like
5021 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5022 in VECTOR_CST nodes. */
5023 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5024 if (!CONSTANT_CLASS_P (value))
5026 constant_p = false;
5027 break;
5030 if (constant_p)
5032 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5033 break;
5036 TREE_CONSTANT (ctor) = 0;
5039 /* Vector types use CONSTRUCTOR all the way through gimple
5040 compilation as a general initializer. */
5041 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5043 enum gimplify_status tret;
5044 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5045 fb_rvalue);
5046 if (tret == GS_ERROR)
5047 ret = GS_ERROR;
5048 else if (TREE_STATIC (ctor)
5049 && !initializer_constant_valid_p (ce->value,
5050 TREE_TYPE (ce->value)))
5051 TREE_STATIC (ctor) = 0;
5053 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5054 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5056 break;
5058 default:
5059 /* So how did we get a CONSTRUCTOR for a scalar type? */
5060 gcc_unreachable ();
5063 if (ret == GS_ERROR)
5064 return GS_ERROR;
5065 /* If we have gimplified both sides of the initializer but have
5066 not emitted an assignment, do so now. */
5067 if (*expr_p)
5069 tree lhs = TREE_OPERAND (*expr_p, 0);
5070 tree rhs = TREE_OPERAND (*expr_p, 1);
5071 if (want_value && object == lhs)
5072 lhs = unshare_expr (lhs);
5073 gassign *init = gimple_build_assign (lhs, rhs);
5074 gimplify_seq_add_stmt (pre_p, init);
5076 if (want_value)
5078 *expr_p = object;
5079 return GS_OK;
5081 else
5083 *expr_p = NULL;
5084 return GS_ALL_DONE;
5088 /* Given a pointer value OP0, return a simplified version of an
5089 indirection through OP0, or NULL_TREE if no simplification is
5090 possible. This may only be applied to a rhs of an expression.
5091 Note that the resulting type may be different from the type pointed
5092 to in the sense that it is still compatible from the langhooks
5093 point of view. */
5095 static tree
5096 gimple_fold_indirect_ref_rhs (tree t)
5098 return gimple_fold_indirect_ref (t);
5101 /* Subroutine of gimplify_modify_expr to do simplifications of
5102 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5103 something changes. */
5105 static enum gimplify_status
5106 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5107 gimple_seq *pre_p, gimple_seq *post_p,
5108 bool want_value)
5110 enum gimplify_status ret = GS_UNHANDLED;
5111 bool changed;
5115 changed = false;
5116 switch (TREE_CODE (*from_p))
5118 case VAR_DECL:
5119 /* If we're assigning from a read-only variable initialized with
5120 a constructor, do the direct assignment from the constructor,
5121 but only if neither source nor target are volatile since this
5122 latter assignment might end up being done on a per-field basis. */
5123 if (DECL_INITIAL (*from_p)
5124 && TREE_READONLY (*from_p)
5125 && !TREE_THIS_VOLATILE (*from_p)
5126 && !TREE_THIS_VOLATILE (*to_p)
5127 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5129 tree old_from = *from_p;
5130 enum gimplify_status subret;
5132 /* Move the constructor into the RHS. */
5133 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5135 /* Let's see if gimplify_init_constructor will need to put
5136 it in memory. */
5137 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5138 false, true);
5139 if (subret == GS_ERROR)
5141 /* If so, revert the change. */
5142 *from_p = old_from;
5144 else
5146 ret = GS_OK;
5147 changed = true;
5150 break;
5151 case INDIRECT_REF:
5153 /* If we have code like
5155 *(const A*)(A*)&x
5157 where the type of "x" is a (possibly cv-qualified variant
5158 of "A"), treat the entire expression as identical to "x".
5159 This kind of code arises in C++ when an object is bound
5160 to a const reference, and if "x" is a TARGET_EXPR we want
5161 to take advantage of the optimization below. */
5162 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5163 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5164 if (t)
5166 if (TREE_THIS_VOLATILE (t) != volatile_p)
5168 if (DECL_P (t))
5169 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5170 build_fold_addr_expr (t));
5171 if (REFERENCE_CLASS_P (t))
5172 TREE_THIS_VOLATILE (t) = volatile_p;
5174 *from_p = t;
5175 ret = GS_OK;
5176 changed = true;
5178 break;
5181 case TARGET_EXPR:
5183 /* If we are initializing something from a TARGET_EXPR, strip the
5184 TARGET_EXPR and initialize it directly, if possible. This can't
5185 be done if the initializer is void, since that implies that the
5186 temporary is set in some non-trivial way.
5188 ??? What about code that pulls out the temp and uses it
5189 elsewhere? I think that such code never uses the TARGET_EXPR as
5190 an initializer. If I'm wrong, we'll die because the temp won't
5191 have any RTL. In that case, I guess we'll need to replace
5192 references somehow. */
5193 tree init = TARGET_EXPR_INITIAL (*from_p);
5195 if (init
5196 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5197 || !TARGET_EXPR_NO_ELIDE (*from_p))
5198 && !VOID_TYPE_P (TREE_TYPE (init)))
5200 *from_p = init;
5201 ret = GS_OK;
5202 changed = true;
5205 break;
5207 case COMPOUND_EXPR:
5208 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5209 caught. */
5210 gimplify_compound_expr (from_p, pre_p, true);
5211 ret = GS_OK;
5212 changed = true;
5213 break;
5215 case CONSTRUCTOR:
5216 /* If we already made some changes, let the front end have a
5217 crack at this before we break it down. */
5218 if (ret != GS_UNHANDLED)
5219 break;
5220 /* If we're initializing from a CONSTRUCTOR, break this into
5221 individual MODIFY_EXPRs. */
5222 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5223 false);
5225 case COND_EXPR:
5226 /* If we're assigning to a non-register type, push the assignment
5227 down into the branches. This is mandatory for ADDRESSABLE types,
5228 since we cannot generate temporaries for such, but it saves a
5229 copy in other cases as well. */
5230 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5232 /* This code should mirror the code in gimplify_cond_expr. */
5233 enum tree_code code = TREE_CODE (*expr_p);
5234 tree cond = *from_p;
5235 tree result = *to_p;
5237 ret = gimplify_expr (&result, pre_p, post_p,
5238 is_gimple_lvalue, fb_lvalue);
5239 if (ret != GS_ERROR)
5240 ret = GS_OK;
5242 /* If we are going to write RESULT more than once, clear
5243 TREE_READONLY flag, otherwise we might incorrectly promote
5244 the variable to static const and initialize it at compile
5245 time in one of the branches. */
5246 if (VAR_P (result)
5247 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5248 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5249 TREE_READONLY (result) = 0;
5250 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5251 TREE_OPERAND (cond, 1)
5252 = build2 (code, void_type_node, result,
5253 TREE_OPERAND (cond, 1));
5254 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5255 TREE_OPERAND (cond, 2)
5256 = build2 (code, void_type_node, unshare_expr (result),
5257 TREE_OPERAND (cond, 2));
5259 TREE_TYPE (cond) = void_type_node;
5260 recalculate_side_effects (cond);
5262 if (want_value)
5264 gimplify_and_add (cond, pre_p);
5265 *expr_p = unshare_expr (result);
5267 else
5268 *expr_p = cond;
5269 return ret;
5271 break;
5273 case CALL_EXPR:
5274 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5275 return slot so that we don't generate a temporary. */
5276 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5277 && aggregate_value_p (*from_p, *from_p))
5279 bool use_target;
5281 if (!(rhs_predicate_for (*to_p))(*from_p))
5282 /* If we need a temporary, *to_p isn't accurate. */
5283 use_target = false;
5284 /* It's OK to use the return slot directly unless it's an NRV. */
5285 else if (TREE_CODE (*to_p) == RESULT_DECL
5286 && DECL_NAME (*to_p) == NULL_TREE
5287 && needs_to_live_in_memory (*to_p))
5288 use_target = true;
5289 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5290 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5291 /* Don't force regs into memory. */
5292 use_target = false;
5293 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5294 /* It's OK to use the target directly if it's being
5295 initialized. */
5296 use_target = true;
5297 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5298 != INTEGER_CST)
5299 /* Always use the target and thus RSO for variable-sized types.
5300 GIMPLE cannot deal with a variable-sized assignment
5301 embedded in a call statement. */
5302 use_target = true;
5303 else if (TREE_CODE (*to_p) != SSA_NAME
5304 && (!is_gimple_variable (*to_p)
5305 || needs_to_live_in_memory (*to_p)))
5306 /* Don't use the original target if it's already addressable;
5307 if its address escapes, and the called function uses the
5308 NRV optimization, a conforming program could see *to_p
5309 change before the called function returns; see c++/19317.
5310 When optimizing, the return_slot pass marks more functions
5311 as safe after we have escape info. */
5312 use_target = false;
5313 else
5314 use_target = true;
5316 if (use_target)
5318 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5319 mark_addressable (*to_p);
5322 break;
5324 case WITH_SIZE_EXPR:
5325 /* Likewise for calls that return an aggregate of non-constant size,
5326 since we would not be able to generate a temporary at all. */
5327 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5329 *from_p = TREE_OPERAND (*from_p, 0);
5330 /* We don't change ret in this case because the
5331 WITH_SIZE_EXPR might have been added in
5332 gimplify_modify_expr, so returning GS_OK would lead to an
5333 infinite loop. */
5334 changed = true;
5336 break;
5338 /* If we're initializing from a container, push the initialization
5339 inside it. */
5340 case CLEANUP_POINT_EXPR:
5341 case BIND_EXPR:
5342 case STATEMENT_LIST:
5344 tree wrap = *from_p;
5345 tree t;
5347 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5348 fb_lvalue);
5349 if (ret != GS_ERROR)
5350 ret = GS_OK;
5352 t = voidify_wrapper_expr (wrap, *expr_p);
5353 gcc_assert (t == *expr_p);
5355 if (want_value)
5357 gimplify_and_add (wrap, pre_p);
5358 *expr_p = unshare_expr (*to_p);
5360 else
5361 *expr_p = wrap;
5362 return GS_OK;
5365 case COMPOUND_LITERAL_EXPR:
5367 tree complit = TREE_OPERAND (*expr_p, 1);
5368 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5369 tree decl = DECL_EXPR_DECL (decl_s);
5370 tree init = DECL_INITIAL (decl);
5372 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5373 into struct T x = { 0, 1, 2 } if the address of the
5374 compound literal has never been taken. */
5375 if (!TREE_ADDRESSABLE (complit)
5376 && !TREE_ADDRESSABLE (decl)
5377 && init)
5379 *expr_p = copy_node (*expr_p);
5380 TREE_OPERAND (*expr_p, 1) = init;
5381 return GS_OK;
5385 default:
5386 break;
5389 while (changed);
5391 return ret;
5395 /* Return true if T looks like a valid GIMPLE statement. */
5397 static bool
5398 is_gimple_stmt (tree t)
5400 const enum tree_code code = TREE_CODE (t);
5402 switch (code)
5404 case NOP_EXPR:
5405 /* The only valid NOP_EXPR is the empty statement. */
5406 return IS_EMPTY_STMT (t);
5408 case BIND_EXPR:
5409 case COND_EXPR:
5410 /* These are only valid if they're void. */
5411 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5413 case SWITCH_EXPR:
5414 case GOTO_EXPR:
5415 case RETURN_EXPR:
5416 case LABEL_EXPR:
5417 case CASE_LABEL_EXPR:
5418 case TRY_CATCH_EXPR:
5419 case TRY_FINALLY_EXPR:
5420 case EH_FILTER_EXPR:
5421 case CATCH_EXPR:
5422 case ASM_EXPR:
5423 case STATEMENT_LIST:
5424 case OACC_PARALLEL:
5425 case OACC_KERNELS:
5426 case OACC_DATA:
5427 case OACC_HOST_DATA:
5428 case OACC_DECLARE:
5429 case OACC_UPDATE:
5430 case OACC_ENTER_DATA:
5431 case OACC_EXIT_DATA:
5432 case OACC_CACHE:
5433 case OMP_PARALLEL:
5434 case OMP_FOR:
5435 case OMP_SIMD:
5436 case OMP_DISTRIBUTE:
5437 case OACC_LOOP:
5438 case OMP_SECTIONS:
5439 case OMP_SECTION:
5440 case OMP_SINGLE:
5441 case OMP_MASTER:
5442 case OMP_TASKGROUP:
5443 case OMP_ORDERED:
5444 case OMP_CRITICAL:
5445 case OMP_TASK:
5446 case OMP_TARGET:
5447 case OMP_TARGET_DATA:
5448 case OMP_TARGET_UPDATE:
5449 case OMP_TARGET_ENTER_DATA:
5450 case OMP_TARGET_EXIT_DATA:
5451 case OMP_TASKLOOP:
5452 case OMP_TEAMS:
5453 /* These are always void. */
5454 return true;
5456 case CALL_EXPR:
5457 case MODIFY_EXPR:
5458 case PREDICT_EXPR:
5459 /* These are valid regardless of their type. */
5460 return true;
5462 default:
5463 return false;
5468 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5469 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5470 DECL_GIMPLE_REG_P set.
5472 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5473 other, unmodified part of the complex object just before the total store.
5474 As a consequence, if the object is still uninitialized, an undefined value
5475 will be loaded into a register, which may result in a spurious exception
5476 if the register is floating-point and the value happens to be a signaling
5477 NaN for example. Then the fully-fledged complex operations lowering pass
5478 followed by a DCE pass are necessary in order to fix things up. */
5480 static enum gimplify_status
5481 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5482 bool want_value)
5484 enum tree_code code, ocode;
5485 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5487 lhs = TREE_OPERAND (*expr_p, 0);
5488 rhs = TREE_OPERAND (*expr_p, 1);
5489 code = TREE_CODE (lhs);
5490 lhs = TREE_OPERAND (lhs, 0);
5492 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5493 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5494 TREE_NO_WARNING (other) = 1;
5495 other = get_formal_tmp_var (other, pre_p);
5497 realpart = code == REALPART_EXPR ? rhs : other;
5498 imagpart = code == REALPART_EXPR ? other : rhs;
5500 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5501 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5502 else
5503 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5505 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5506 *expr_p = (want_value) ? rhs : NULL_TREE;
5508 return GS_ALL_DONE;
5511 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5513 modify_expr
5514 : varname '=' rhs
5515 | '*' ID '=' rhs
5517 PRE_P points to the list where side effects that must happen before
5518 *EXPR_P should be stored.
5520 POST_P points to the list where side effects that must happen after
5521 *EXPR_P should be stored.
5523 WANT_VALUE is nonzero iff we want to use the value of this expression
5524 in another expression. */
5526 static enum gimplify_status
5527 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5528 bool want_value)
5530 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5531 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5532 enum gimplify_status ret = GS_UNHANDLED;
5533 gimple *assign;
5534 location_t loc = EXPR_LOCATION (*expr_p);
5535 gimple_stmt_iterator gsi;
5537 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5538 || TREE_CODE (*expr_p) == INIT_EXPR);
5540 /* Trying to simplify a clobber using normal logic doesn't work,
5541 so handle it here. */
5542 if (TREE_CLOBBER_P (*from_p))
5544 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5545 if (ret == GS_ERROR)
5546 return ret;
5547 gcc_assert (!want_value);
5548 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5550 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5551 pre_p, post_p);
5552 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5554 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5555 *expr_p = NULL;
5556 return GS_ALL_DONE;
5559 /* Insert pointer conversions required by the middle-end that are not
5560 required by the frontend. This fixes middle-end type checking for
5561 for example gcc.dg/redecl-6.c. */
5562 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5564 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5565 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5566 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5569 /* See if any simplifications can be done based on what the RHS is. */
5570 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5571 want_value);
5572 if (ret != GS_UNHANDLED)
5573 return ret;
5575 /* For zero sized types only gimplify the left hand side and right hand
5576 side as statements and throw away the assignment. Do this after
5577 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5578 types properly. */
5579 if (zero_sized_type (TREE_TYPE (*from_p))
5580 && !want_value
5581 /* Don't do this for calls that return addressable types, expand_call
5582 relies on those having a lhs. */
5583 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5584 && TREE_CODE (*from_p) == CALL_EXPR))
5586 gimplify_stmt (from_p, pre_p);
5587 gimplify_stmt (to_p, pre_p);
5588 *expr_p = NULL_TREE;
5589 return GS_ALL_DONE;
5592 /* If the value being copied is of variable width, compute the length
5593 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5594 before gimplifying any of the operands so that we can resolve any
5595 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5596 the size of the expression to be copied, not of the destination, so
5597 that is what we must do here. */
5598 maybe_with_size_expr (from_p);
5600 /* As a special case, we have to temporarily allow for assignments
5601 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5602 a toplevel statement, when gimplifying the GENERIC expression
5603 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5604 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5606 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5607 prevent gimplify_expr from trying to create a new temporary for
5608 foo's LHS, we tell it that it should only gimplify until it
5609 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5610 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5611 and all we need to do here is set 'a' to be its LHS. */
5613 /* Gimplify the RHS first for C++17 and bug 71104. */
5614 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5615 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5616 if (ret == GS_ERROR)
5617 return ret;
5619 /* Then gimplify the LHS. */
5620 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5621 twice we have to make sure to gimplify into non-SSA as otherwise
5622 the abnormal edge added later will make those defs not dominate
5623 their uses.
5624 ??? Technically this applies only to the registers used in the
5625 resulting non-register *TO_P. */
5626 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5627 if (saved_into_ssa
5628 && TREE_CODE (*from_p) == CALL_EXPR
5629 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5630 gimplify_ctxp->into_ssa = false;
5631 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5632 gimplify_ctxp->into_ssa = saved_into_ssa;
5633 if (ret == GS_ERROR)
5634 return ret;
5636 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5637 guess for the predicate was wrong. */
5638 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5639 if (final_pred != initial_pred)
5641 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5642 if (ret == GS_ERROR)
5643 return ret;
5646 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5647 size as argument to the call. */
5648 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5650 tree call = TREE_OPERAND (*from_p, 0);
5651 tree vlasize = TREE_OPERAND (*from_p, 1);
5653 if (TREE_CODE (call) == CALL_EXPR
5654 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5656 int nargs = call_expr_nargs (call);
5657 tree type = TREE_TYPE (call);
5658 tree ap = CALL_EXPR_ARG (call, 0);
5659 tree tag = CALL_EXPR_ARG (call, 1);
5660 tree aptag = CALL_EXPR_ARG (call, 2);
5661 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5662 IFN_VA_ARG, type,
5663 nargs + 1, ap, tag,
5664 aptag, vlasize);
5665 TREE_OPERAND (*from_p, 0) = newcall;
5669 /* Now see if the above changed *from_p to something we handle specially. */
5670 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5671 want_value);
5672 if (ret != GS_UNHANDLED)
5673 return ret;
5675 /* If we've got a variable sized assignment between two lvalues (i.e. does
5676 not involve a call), then we can make things a bit more straightforward
5677 by converting the assignment to memcpy or memset. */
5678 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5680 tree from = TREE_OPERAND (*from_p, 0);
5681 tree size = TREE_OPERAND (*from_p, 1);
5683 if (TREE_CODE (from) == CONSTRUCTOR)
5684 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5686 if (is_gimple_addressable (from))
5688 *from_p = from;
5689 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5690 pre_p);
5694 /* Transform partial stores to non-addressable complex variables into
5695 total stores. This allows us to use real instead of virtual operands
5696 for these variables, which improves optimization. */
5697 if ((TREE_CODE (*to_p) == REALPART_EXPR
5698 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5699 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5700 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5702 /* Try to alleviate the effects of the gimplification creating artificial
5703 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5704 make sure not to create DECL_DEBUG_EXPR links across functions. */
5705 if (!gimplify_ctxp->into_ssa
5706 && VAR_P (*from_p)
5707 && DECL_IGNORED_P (*from_p)
5708 && DECL_P (*to_p)
5709 && !DECL_IGNORED_P (*to_p)
5710 && decl_function_context (*to_p) == current_function_decl
5711 && decl_function_context (*from_p) == current_function_decl)
5713 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5714 DECL_NAME (*from_p)
5715 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5716 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5717 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5720 if (want_value && TREE_THIS_VOLATILE (*to_p))
5721 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5723 if (TREE_CODE (*from_p) == CALL_EXPR)
5725 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5726 instead of a GIMPLE_ASSIGN. */
5727 gcall *call_stmt;
5728 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5730 /* Gimplify internal functions created in the FEs. */
5731 int nargs = call_expr_nargs (*from_p), i;
5732 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5733 auto_vec<tree> vargs (nargs);
5735 for (i = 0; i < nargs; i++)
5737 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5738 EXPR_LOCATION (*from_p));
5739 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5741 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5742 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5743 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5745 else
5747 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5748 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5749 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5750 tree fndecl = get_callee_fndecl (*from_p);
5751 if (fndecl
5752 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5753 && call_expr_nargs (*from_p) == 3)
5754 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5755 CALL_EXPR_ARG (*from_p, 0),
5756 CALL_EXPR_ARG (*from_p, 1),
5757 CALL_EXPR_ARG (*from_p, 2));
5758 else
5760 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5763 notice_special_calls (call_stmt);
5764 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5765 gimple_call_set_lhs (call_stmt, *to_p);
5766 else if (TREE_CODE (*to_p) == SSA_NAME)
5767 /* The above is somewhat premature, avoid ICEing later for a
5768 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5769 ??? This doesn't make it a default-def. */
5770 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5772 assign = call_stmt;
5774 else
5776 assign = gimple_build_assign (*to_p, *from_p);
5777 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5778 if (COMPARISON_CLASS_P (*from_p))
5779 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5782 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5784 /* We should have got an SSA name from the start. */
5785 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5786 || ! gimple_in_ssa_p (cfun));
5789 gimplify_seq_add_stmt (pre_p, assign);
5790 gsi = gsi_last (*pre_p);
5791 maybe_fold_stmt (&gsi);
5793 if (want_value)
5795 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5796 return GS_OK;
5798 else
5799 *expr_p = NULL;
5801 return GS_ALL_DONE;
5804 /* Gimplify a comparison between two variable-sized objects. Do this
5805 with a call to BUILT_IN_MEMCMP. */
5807 static enum gimplify_status
5808 gimplify_variable_sized_compare (tree *expr_p)
5810 location_t loc = EXPR_LOCATION (*expr_p);
5811 tree op0 = TREE_OPERAND (*expr_p, 0);
5812 tree op1 = TREE_OPERAND (*expr_p, 1);
5813 tree t, arg, dest, src, expr;
5815 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5816 arg = unshare_expr (arg);
5817 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5818 src = build_fold_addr_expr_loc (loc, op1);
5819 dest = build_fold_addr_expr_loc (loc, op0);
5820 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5821 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5823 expr
5824 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5825 SET_EXPR_LOCATION (expr, loc);
5826 *expr_p = expr;
5828 return GS_OK;
5831 /* Gimplify a comparison between two aggregate objects of integral scalar
5832 mode as a comparison between the bitwise equivalent scalar values. */
5834 static enum gimplify_status
5835 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5837 location_t loc = EXPR_LOCATION (*expr_p);
5838 tree op0 = TREE_OPERAND (*expr_p, 0);
5839 tree op1 = TREE_OPERAND (*expr_p, 1);
5841 tree type = TREE_TYPE (op0);
5842 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5844 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5845 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5847 *expr_p
5848 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5850 return GS_OK;
5853 /* Gimplify an expression sequence. This function gimplifies each
5854 expression and rewrites the original expression with the last
5855 expression of the sequence in GIMPLE form.
5857 PRE_P points to the list where the side effects for all the
5858 expressions in the sequence will be emitted.
5860 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5862 static enum gimplify_status
5863 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5865 tree t = *expr_p;
5869 tree *sub_p = &TREE_OPERAND (t, 0);
5871 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5872 gimplify_compound_expr (sub_p, pre_p, false);
5873 else
5874 gimplify_stmt (sub_p, pre_p);
5876 t = TREE_OPERAND (t, 1);
5878 while (TREE_CODE (t) == COMPOUND_EXPR);
5880 *expr_p = t;
5881 if (want_value)
5882 return GS_OK;
5883 else
5885 gimplify_stmt (expr_p, pre_p);
5886 return GS_ALL_DONE;
5890 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5891 gimplify. After gimplification, EXPR_P will point to a new temporary
5892 that holds the original value of the SAVE_EXPR node.
5894 PRE_P points to the list where side effects that must happen before
5895 *EXPR_P should be stored. */
5897 static enum gimplify_status
5898 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5900 enum gimplify_status ret = GS_ALL_DONE;
5901 tree val;
5903 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5904 val = TREE_OPERAND (*expr_p, 0);
5906 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5907 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5909 /* The operand may be a void-valued expression. It is
5910 being executed only for its side-effects. */
5911 if (TREE_TYPE (val) == void_type_node)
5913 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5914 is_gimple_stmt, fb_none);
5915 val = NULL;
5917 else
5918 /* The temporary may not be an SSA name as later abnormal and EH
5919 control flow may invalidate use/def domination. When in SSA
5920 form then assume there are no such issues and SAVE_EXPRs only
5921 appear via GENERIC foldings. */
5922 val = get_initialized_tmp_var (val, pre_p, post_p,
5923 gimple_in_ssa_p (cfun));
5925 TREE_OPERAND (*expr_p, 0) = val;
5926 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5929 *expr_p = val;
5931 return ret;
5934 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5936 unary_expr
5937 : ...
5938 | '&' varname
5941 PRE_P points to the list where side effects that must happen before
5942 *EXPR_P should be stored.
5944 POST_P points to the list where side effects that must happen after
5945 *EXPR_P should be stored. */
5947 static enum gimplify_status
5948 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5950 tree expr = *expr_p;
5951 tree op0 = TREE_OPERAND (expr, 0);
5952 enum gimplify_status ret;
5953 location_t loc = EXPR_LOCATION (*expr_p);
5955 switch (TREE_CODE (op0))
5957 case INDIRECT_REF:
5958 do_indirect_ref:
5959 /* Check if we are dealing with an expression of the form '&*ptr'.
5960 While the front end folds away '&*ptr' into 'ptr', these
5961 expressions may be generated internally by the compiler (e.g.,
5962 builtins like __builtin_va_end). */
5963 /* Caution: the silent array decomposition semantics we allow for
5964 ADDR_EXPR means we can't always discard the pair. */
5965 /* Gimplification of the ADDR_EXPR operand may drop
5966 cv-qualification conversions, so make sure we add them if
5967 needed. */
5969 tree op00 = TREE_OPERAND (op0, 0);
5970 tree t_expr = TREE_TYPE (expr);
5971 tree t_op00 = TREE_TYPE (op00);
5973 if (!useless_type_conversion_p (t_expr, t_op00))
5974 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5975 *expr_p = op00;
5976 ret = GS_OK;
5978 break;
5980 case VIEW_CONVERT_EXPR:
5981 /* Take the address of our operand and then convert it to the type of
5982 this ADDR_EXPR.
5984 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5985 all clear. The impact of this transformation is even less clear. */
5987 /* If the operand is a useless conversion, look through it. Doing so
5988 guarantees that the ADDR_EXPR and its operand will remain of the
5989 same type. */
5990 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5991 op0 = TREE_OPERAND (op0, 0);
5993 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5994 build_fold_addr_expr_loc (loc,
5995 TREE_OPERAND (op0, 0)));
5996 ret = GS_OK;
5997 break;
5999 case MEM_REF:
6000 if (integer_zerop (TREE_OPERAND (op0, 1)))
6001 goto do_indirect_ref;
6003 /* fall through */
6005 default:
6006 /* If we see a call to a declared builtin or see its address
6007 being taken (we can unify those cases here) then we can mark
6008 the builtin for implicit generation by GCC. */
6009 if (TREE_CODE (op0) == FUNCTION_DECL
6010 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6011 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6012 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6014 /* We use fb_either here because the C frontend sometimes takes
6015 the address of a call that returns a struct; see
6016 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6017 the implied temporary explicit. */
6019 /* Make the operand addressable. */
6020 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6021 is_gimple_addressable, fb_either);
6022 if (ret == GS_ERROR)
6023 break;
6025 /* Then mark it. Beware that it may not be possible to do so directly
6026 if a temporary has been created by the gimplification. */
6027 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6029 op0 = TREE_OPERAND (expr, 0);
6031 /* For various reasons, the gimplification of the expression
6032 may have made a new INDIRECT_REF. */
6033 if (TREE_CODE (op0) == INDIRECT_REF)
6034 goto do_indirect_ref;
6036 mark_addressable (TREE_OPERAND (expr, 0));
6038 /* The FEs may end up building ADDR_EXPRs early on a decl with
6039 an incomplete type. Re-build ADDR_EXPRs in canonical form
6040 here. */
6041 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6042 *expr_p = build_fold_addr_expr (op0);
6044 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6045 recompute_tree_invariant_for_addr_expr (*expr_p);
6047 /* If we re-built the ADDR_EXPR add a conversion to the original type
6048 if required. */
6049 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6050 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6052 break;
6055 return ret;
6058 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6059 value; output operands should be a gimple lvalue. */
6061 static enum gimplify_status
6062 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6064 tree expr;
6065 int noutputs;
6066 const char **oconstraints;
6067 int i;
6068 tree link;
6069 const char *constraint;
6070 bool allows_mem, allows_reg, is_inout;
6071 enum gimplify_status ret, tret;
6072 gasm *stmt;
6073 vec<tree, va_gc> *inputs;
6074 vec<tree, va_gc> *outputs;
6075 vec<tree, va_gc> *clobbers;
6076 vec<tree, va_gc> *labels;
6077 tree link_next;
6079 expr = *expr_p;
6080 noutputs = list_length (ASM_OUTPUTS (expr));
6081 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6083 inputs = NULL;
6084 outputs = NULL;
6085 clobbers = NULL;
6086 labels = NULL;
6088 ret = GS_ALL_DONE;
6089 link_next = NULL_TREE;
6090 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6092 bool ok;
6093 size_t constraint_len;
6095 link_next = TREE_CHAIN (link);
6097 oconstraints[i]
6098 = constraint
6099 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6100 constraint_len = strlen (constraint);
6101 if (constraint_len == 0)
6102 continue;
6104 ok = parse_output_constraint (&constraint, i, 0, 0,
6105 &allows_mem, &allows_reg, &is_inout);
6106 if (!ok)
6108 ret = GS_ERROR;
6109 is_inout = false;
6112 if (!allows_reg && allows_mem)
6113 mark_addressable (TREE_VALUE (link));
6115 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6116 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6117 fb_lvalue | fb_mayfail);
6118 if (tret == GS_ERROR)
6120 error ("invalid lvalue in asm output %d", i);
6121 ret = tret;
6124 /* If the constraint does not allow memory make sure we gimplify
6125 it to a register if it is not already but its base is. This
6126 happens for complex and vector components. */
6127 if (!allows_mem)
6129 tree op = TREE_VALUE (link);
6130 if (! is_gimple_val (op)
6131 && is_gimple_reg_type (TREE_TYPE (op))
6132 && is_gimple_reg (get_base_address (op)))
6134 tree tem = create_tmp_reg (TREE_TYPE (op));
6135 tree ass;
6136 if (is_inout)
6138 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6139 tem, unshare_expr (op));
6140 gimplify_and_add (ass, pre_p);
6142 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6143 gimplify_and_add (ass, post_p);
6145 TREE_VALUE (link) = tem;
6146 tret = GS_OK;
6150 vec_safe_push (outputs, link);
6151 TREE_CHAIN (link) = NULL_TREE;
6153 if (is_inout)
6155 /* An input/output operand. To give the optimizers more
6156 flexibility, split it into separate input and output
6157 operands. */
6158 tree input;
6159 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6160 char buf[11];
6162 /* Turn the in/out constraint into an output constraint. */
6163 char *p = xstrdup (constraint);
6164 p[0] = '=';
6165 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6167 /* And add a matching input constraint. */
6168 if (allows_reg)
6170 sprintf (buf, "%u", i);
6172 /* If there are multiple alternatives in the constraint,
6173 handle each of them individually. Those that allow register
6174 will be replaced with operand number, the others will stay
6175 unchanged. */
6176 if (strchr (p, ',') != NULL)
6178 size_t len = 0, buflen = strlen (buf);
6179 char *beg, *end, *str, *dst;
6181 for (beg = p + 1;;)
6183 end = strchr (beg, ',');
6184 if (end == NULL)
6185 end = strchr (beg, '\0');
6186 if ((size_t) (end - beg) < buflen)
6187 len += buflen + 1;
6188 else
6189 len += end - beg + 1;
6190 if (*end)
6191 beg = end + 1;
6192 else
6193 break;
6196 str = (char *) alloca (len);
6197 for (beg = p + 1, dst = str;;)
6199 const char *tem;
6200 bool mem_p, reg_p, inout_p;
6202 end = strchr (beg, ',');
6203 if (end)
6204 *end = '\0';
6205 beg[-1] = '=';
6206 tem = beg - 1;
6207 parse_output_constraint (&tem, i, 0, 0,
6208 &mem_p, &reg_p, &inout_p);
6209 if (dst != str)
6210 *dst++ = ',';
6211 if (reg_p)
6213 memcpy (dst, buf, buflen);
6214 dst += buflen;
6216 else
6218 if (end)
6219 len = end - beg;
6220 else
6221 len = strlen (beg);
6222 memcpy (dst, beg, len);
6223 dst += len;
6225 if (end)
6226 beg = end + 1;
6227 else
6228 break;
6230 *dst = '\0';
6231 input = build_string (dst - str, str);
6233 else
6234 input = build_string (strlen (buf), buf);
6236 else
6237 input = build_string (constraint_len - 1, constraint + 1);
6239 free (p);
6241 input = build_tree_list (build_tree_list (NULL_TREE, input),
6242 unshare_expr (TREE_VALUE (link)));
6243 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6247 link_next = NULL_TREE;
6248 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6250 link_next = TREE_CHAIN (link);
6251 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6252 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6253 oconstraints, &allows_mem, &allows_reg);
6255 /* If we can't make copies, we can only accept memory. */
6256 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6258 if (allows_mem)
6259 allows_reg = 0;
6260 else
6262 error ("impossible constraint in %<asm%>");
6263 error ("non-memory input %d must stay in memory", i);
6264 return GS_ERROR;
6268 /* If the operand is a memory input, it should be an lvalue. */
6269 if (!allows_reg && allows_mem)
6271 tree inputv = TREE_VALUE (link);
6272 STRIP_NOPS (inputv);
6273 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6274 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6275 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6276 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6277 || TREE_CODE (inputv) == MODIFY_EXPR)
6278 TREE_VALUE (link) = error_mark_node;
6279 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6280 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6281 if (tret != GS_ERROR)
6283 /* Unlike output operands, memory inputs are not guaranteed
6284 to be lvalues by the FE, and while the expressions are
6285 marked addressable there, if it is e.g. a statement
6286 expression, temporaries in it might not end up being
6287 addressable. They might be already used in the IL and thus
6288 it is too late to make them addressable now though. */
6289 tree x = TREE_VALUE (link);
6290 while (handled_component_p (x))
6291 x = TREE_OPERAND (x, 0);
6292 if (TREE_CODE (x) == MEM_REF
6293 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6294 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6295 if ((VAR_P (x)
6296 || TREE_CODE (x) == PARM_DECL
6297 || TREE_CODE (x) == RESULT_DECL)
6298 && !TREE_ADDRESSABLE (x)
6299 && is_gimple_reg (x))
6301 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6302 input_location), 0,
6303 "memory input %d is not directly addressable",
6305 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6308 mark_addressable (TREE_VALUE (link));
6309 if (tret == GS_ERROR)
6311 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6312 "memory input %d is not directly addressable", i);
6313 ret = tret;
6316 else
6318 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6319 is_gimple_asm_val, fb_rvalue);
6320 if (tret == GS_ERROR)
6321 ret = tret;
6324 TREE_CHAIN (link) = NULL_TREE;
6325 vec_safe_push (inputs, link);
6328 link_next = NULL_TREE;
6329 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6331 link_next = TREE_CHAIN (link);
6332 TREE_CHAIN (link) = NULL_TREE;
6333 vec_safe_push (clobbers, link);
6336 link_next = NULL_TREE;
6337 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6339 link_next = TREE_CHAIN (link);
6340 TREE_CHAIN (link) = NULL_TREE;
6341 vec_safe_push (labels, link);
6344 /* Do not add ASMs with errors to the gimple IL stream. */
6345 if (ret != GS_ERROR)
6347 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6348 inputs, outputs, clobbers, labels);
6350 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6351 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6353 gimplify_seq_add_stmt (pre_p, stmt);
6356 return ret;
6359 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6360 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6361 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6362 return to this function.
6364 FIXME should we complexify the prequeue handling instead? Or use flags
6365 for all the cleanups and let the optimizer tighten them up? The current
6366 code seems pretty fragile; it will break on a cleanup within any
6367 non-conditional nesting. But any such nesting would be broken, anyway;
6368 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6369 and continues out of it. We can do that at the RTL level, though, so
6370 having an optimizer to tighten up try/finally regions would be a Good
6371 Thing. */
6373 static enum gimplify_status
6374 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6376 gimple_stmt_iterator iter;
6377 gimple_seq body_sequence = NULL;
6379 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6381 /* We only care about the number of conditions between the innermost
6382 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6383 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6384 int old_conds = gimplify_ctxp->conditions;
6385 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6386 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6387 gimplify_ctxp->conditions = 0;
6388 gimplify_ctxp->conditional_cleanups = NULL;
6389 gimplify_ctxp->in_cleanup_point_expr = true;
6391 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6393 gimplify_ctxp->conditions = old_conds;
6394 gimplify_ctxp->conditional_cleanups = old_cleanups;
6395 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6397 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6399 gimple *wce = gsi_stmt (iter);
6401 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6403 if (gsi_one_before_end_p (iter))
6405 /* Note that gsi_insert_seq_before and gsi_remove do not
6406 scan operands, unlike some other sequence mutators. */
6407 if (!gimple_wce_cleanup_eh_only (wce))
6408 gsi_insert_seq_before_without_update (&iter,
6409 gimple_wce_cleanup (wce),
6410 GSI_SAME_STMT);
6411 gsi_remove (&iter, true);
6412 break;
6414 else
6416 gtry *gtry;
6417 gimple_seq seq;
6418 enum gimple_try_flags kind;
6420 if (gimple_wce_cleanup_eh_only (wce))
6421 kind = GIMPLE_TRY_CATCH;
6422 else
6423 kind = GIMPLE_TRY_FINALLY;
6424 seq = gsi_split_seq_after (iter);
6426 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6427 /* Do not use gsi_replace here, as it may scan operands.
6428 We want to do a simple structural modification only. */
6429 gsi_set_stmt (&iter, gtry);
6430 iter = gsi_start (gtry->eval);
6433 else
6434 gsi_next (&iter);
6437 gimplify_seq_add_seq (pre_p, body_sequence);
6438 if (temp)
6440 *expr_p = temp;
6441 return GS_OK;
6443 else
6445 *expr_p = NULL;
6446 return GS_ALL_DONE;
6450 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6451 is the cleanup action required. EH_ONLY is true if the cleanup should
6452 only be executed if an exception is thrown, not on normal exit.
6453 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6454 only valid for clobbers. */
6456 static void
6457 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6458 bool force_uncond = false)
6460 gimple *wce;
6461 gimple_seq cleanup_stmts = NULL;
6463 /* Errors can result in improperly nested cleanups. Which results in
6464 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6465 if (seen_error ())
6466 return;
6468 if (gimple_conditional_context ())
6470 /* If we're in a conditional context, this is more complex. We only
6471 want to run the cleanup if we actually ran the initialization that
6472 necessitates it, but we want to run it after the end of the
6473 conditional context. So we wrap the try/finally around the
6474 condition and use a flag to determine whether or not to actually
6475 run the destructor. Thus
6477 test ? f(A()) : 0
6479 becomes (approximately)
6481 flag = 0;
6482 try {
6483 if (test) { A::A(temp); flag = 1; val = f(temp); }
6484 else { val = 0; }
6485 } finally {
6486 if (flag) A::~A(temp);
6490 if (force_uncond)
6492 gimplify_stmt (&cleanup, &cleanup_stmts);
6493 wce = gimple_build_wce (cleanup_stmts);
6494 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6496 else
6498 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6499 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6500 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6502 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6503 gimplify_stmt (&cleanup, &cleanup_stmts);
6504 wce = gimple_build_wce (cleanup_stmts);
6506 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6507 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6508 gimplify_seq_add_stmt (pre_p, ftrue);
6510 /* Because of this manipulation, and the EH edges that jump
6511 threading cannot redirect, the temporary (VAR) will appear
6512 to be used uninitialized. Don't warn. */
6513 TREE_NO_WARNING (var) = 1;
6516 else
6518 gimplify_stmt (&cleanup, &cleanup_stmts);
6519 wce = gimple_build_wce (cleanup_stmts);
6520 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6521 gimplify_seq_add_stmt (pre_p, wce);
6525 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6527 static enum gimplify_status
6528 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6530 tree targ = *expr_p;
6531 tree temp = TARGET_EXPR_SLOT (targ);
6532 tree init = TARGET_EXPR_INITIAL (targ);
6533 enum gimplify_status ret;
6535 bool unpoison_empty_seq = false;
6536 gimple_stmt_iterator unpoison_it;
6538 if (init)
6540 tree cleanup = NULL_TREE;
6542 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6543 to the temps list. Handle also variable length TARGET_EXPRs. */
6544 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6546 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6547 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6548 gimplify_vla_decl (temp, pre_p);
6550 else
6552 /* Save location where we need to place unpoisoning. It's possible
6553 that a variable will be converted to needs_to_live_in_memory. */
6554 unpoison_it = gsi_last (*pre_p);
6555 unpoison_empty_seq = gsi_end_p (unpoison_it);
6557 gimple_add_tmp_var (temp);
6560 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6561 expression is supposed to initialize the slot. */
6562 if (VOID_TYPE_P (TREE_TYPE (init)))
6563 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6564 else
6566 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6567 init = init_expr;
6568 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6569 init = NULL;
6570 ggc_free (init_expr);
6572 if (ret == GS_ERROR)
6574 /* PR c++/28266 Make sure this is expanded only once. */
6575 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6576 return GS_ERROR;
6578 if (init)
6579 gimplify_and_add (init, pre_p);
6581 /* If needed, push the cleanup for the temp. */
6582 if (TARGET_EXPR_CLEANUP (targ))
6584 if (CLEANUP_EH_ONLY (targ))
6585 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6586 CLEANUP_EH_ONLY (targ), pre_p);
6587 else
6588 cleanup = TARGET_EXPR_CLEANUP (targ);
6591 /* Add a clobber for the temporary going out of scope, like
6592 gimplify_bind_expr. */
6593 if (gimplify_ctxp->in_cleanup_point_expr
6594 && needs_to_live_in_memory (temp))
6596 if (flag_stack_reuse == SR_ALL)
6598 tree clobber = build_clobber (TREE_TYPE (temp));
6599 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6600 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6602 if (asan_poisoned_variables
6603 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6604 && dbg_cnt (asan_use_after_scope)
6605 && !gimplify_omp_ctxp)
6607 tree asan_cleanup = build_asan_poison_call_expr (temp);
6608 if (asan_cleanup)
6610 if (unpoison_empty_seq)
6611 unpoison_it = gsi_start (*pre_p);
6613 asan_poison_variable (temp, false, &unpoison_it,
6614 unpoison_empty_seq);
6615 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6619 if (cleanup)
6620 gimple_push_cleanup (temp, cleanup, false, pre_p);
6622 /* Only expand this once. */
6623 TREE_OPERAND (targ, 3) = init;
6624 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6626 else
6627 /* We should have expanded this before. */
6628 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6630 *expr_p = temp;
6631 return GS_OK;
6634 /* Gimplification of expression trees. */
6636 /* Gimplify an expression which appears at statement context. The
6637 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6638 NULL, a new sequence is allocated.
6640 Return true if we actually added a statement to the queue. */
6642 bool
6643 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6645 gimple_seq_node last;
6647 last = gimple_seq_last (*seq_p);
6648 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6649 return last != gimple_seq_last (*seq_p);
6652 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6653 to CTX. If entries already exist, force them to be some flavor of private.
6654 If there is no enclosing parallel, do nothing. */
6656 void
6657 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6659 splay_tree_node n;
6661 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6662 return;
6666 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6667 if (n != NULL)
6669 if (n->value & GOVD_SHARED)
6670 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6671 else if (n->value & GOVD_MAP)
6672 n->value |= GOVD_MAP_TO_ONLY;
6673 else
6674 return;
6676 else if ((ctx->region_type & ORT_TARGET) != 0)
6678 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6679 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6680 else
6681 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6683 else if (ctx->region_type != ORT_WORKSHARE
6684 && ctx->region_type != ORT_TASKGROUP
6685 && ctx->region_type != ORT_SIMD
6686 && ctx->region_type != ORT_ACC
6687 && !(ctx->region_type & ORT_TARGET_DATA))
6688 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6690 ctx = ctx->outer_context;
6692 while (ctx);
6695 /* Similarly for each of the type sizes of TYPE. */
6697 static void
6698 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6700 if (type == NULL || type == error_mark_node)
6701 return;
6702 type = TYPE_MAIN_VARIANT (type);
6704 if (ctx->privatized_types->add (type))
6705 return;
6707 switch (TREE_CODE (type))
6709 case INTEGER_TYPE:
6710 case ENUMERAL_TYPE:
6711 case BOOLEAN_TYPE:
6712 case REAL_TYPE:
6713 case FIXED_POINT_TYPE:
6714 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6715 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6716 break;
6718 case ARRAY_TYPE:
6719 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6720 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6721 break;
6723 case RECORD_TYPE:
6724 case UNION_TYPE:
6725 case QUAL_UNION_TYPE:
6727 tree field;
6728 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6729 if (TREE_CODE (field) == FIELD_DECL)
6731 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6732 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6735 break;
6737 case POINTER_TYPE:
6738 case REFERENCE_TYPE:
6739 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6740 break;
6742 default:
6743 break;
6746 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6747 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6748 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6751 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6753 static void
6754 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6756 splay_tree_node n;
6757 unsigned int nflags;
6758 tree t;
6760 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6761 return;
6763 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6764 there are constructors involved somewhere. Exception is a shared clause,
6765 there is nothing privatized in that case. */
6766 if ((flags & GOVD_SHARED) == 0
6767 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6768 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6769 flags |= GOVD_SEEN;
6771 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6772 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6774 /* We shouldn't be re-adding the decl with the same data
6775 sharing class. */
6776 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6777 nflags = n->value | flags;
6778 /* The only combination of data sharing classes we should see is
6779 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6780 reduction variables to be used in data sharing clauses. */
6781 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6782 || ((nflags & GOVD_DATA_SHARE_CLASS)
6783 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6784 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6785 n->value = nflags;
6786 return;
6789 /* When adding a variable-sized variable, we have to handle all sorts
6790 of additional bits of data: the pointer replacement variable, and
6791 the parameters of the type. */
6792 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6794 /* Add the pointer replacement variable as PRIVATE if the variable
6795 replacement is private, else FIRSTPRIVATE since we'll need the
6796 address of the original variable either for SHARED, or for the
6797 copy into or out of the context. */
6798 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6800 if (flags & GOVD_MAP)
6801 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6802 else if (flags & GOVD_PRIVATE)
6803 nflags = GOVD_PRIVATE;
6804 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6805 && (flags & GOVD_FIRSTPRIVATE))
6806 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6807 else
6808 nflags = GOVD_FIRSTPRIVATE;
6809 nflags |= flags & GOVD_SEEN;
6810 t = DECL_VALUE_EXPR (decl);
6811 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6812 t = TREE_OPERAND (t, 0);
6813 gcc_assert (DECL_P (t));
6814 omp_add_variable (ctx, t, nflags);
6817 /* Add all of the variable and type parameters (which should have
6818 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6819 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6820 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6821 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6823 /* The variable-sized variable itself is never SHARED, only some form
6824 of PRIVATE. The sharing would take place via the pointer variable
6825 which we remapped above. */
6826 if (flags & GOVD_SHARED)
6827 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6828 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6830 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6831 alloca statement we generate for the variable, so make sure it
6832 is available. This isn't automatically needed for the SHARED
6833 case, since we won't be allocating local storage then.
6834 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6835 in this case omp_notice_variable will be called later
6836 on when it is gimplified. */
6837 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6838 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6839 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6841 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6842 && lang_hooks.decls.omp_privatize_by_reference (decl))
6844 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6846 /* Similar to the direct variable sized case above, we'll need the
6847 size of references being privatized. */
6848 if ((flags & GOVD_SHARED) == 0)
6850 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6851 if (DECL_P (t))
6852 omp_notice_variable (ctx, t, true);
6856 if (n != NULL)
6857 n->value |= flags;
6858 else
6859 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6861 /* For reductions clauses in OpenACC loop directives, by default create a
6862 copy clause on the enclosing parallel construct for carrying back the
6863 results. */
6864 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6866 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6867 while (outer_ctx)
6869 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6870 if (n != NULL)
6872 /* Ignore local variables and explicitly declared clauses. */
6873 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6874 break;
6875 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6877 /* According to the OpenACC spec, such a reduction variable
6878 should already have a copy map on a kernels construct,
6879 verify that here. */
6880 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6881 && (n->value & GOVD_MAP));
6883 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6885 /* Remove firstprivate and make it a copy map. */
6886 n->value &= ~GOVD_FIRSTPRIVATE;
6887 n->value |= GOVD_MAP;
6890 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6892 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6893 GOVD_MAP | GOVD_SEEN);
6894 break;
6896 outer_ctx = outer_ctx->outer_context;
6901 /* Notice a threadprivate variable DECL used in OMP context CTX.
6902 This just prints out diagnostics about threadprivate variable uses
6903 in untied tasks. If DECL2 is non-NULL, prevent this warning
6904 on that variable. */
6906 static bool
6907 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6908 tree decl2)
6910 splay_tree_node n;
6911 struct gimplify_omp_ctx *octx;
6913 for (octx = ctx; octx; octx = octx->outer_context)
6914 if ((octx->region_type & ORT_TARGET) != 0)
6916 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6917 if (n == NULL)
6919 error ("threadprivate variable %qE used in target region",
6920 DECL_NAME (decl));
6921 error_at (octx->location, "enclosing target region");
6922 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6924 if (decl2)
6925 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6928 if (ctx->region_type != ORT_UNTIED_TASK)
6929 return false;
6930 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6931 if (n == NULL)
6933 error ("threadprivate variable %qE used in untied task",
6934 DECL_NAME (decl));
6935 error_at (ctx->location, "enclosing task");
6936 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6938 if (decl2)
6939 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6940 return false;
6943 /* Return true if global var DECL is device resident. */
6945 static bool
6946 device_resident_p (tree decl)
6948 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6950 if (!attr)
6951 return false;
6953 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6955 tree c = TREE_VALUE (t);
6956 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6957 return true;
6960 return false;
6963 /* Return true if DECL has an ACC DECLARE attribute. */
6965 static bool
6966 is_oacc_declared (tree decl)
6968 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6969 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6970 return declared != NULL_TREE;
6973 /* Determine outer default flags for DECL mentioned in an OMP region
6974 but not declared in an enclosing clause.
6976 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6977 remapped firstprivate instead of shared. To some extent this is
6978 addressed in omp_firstprivatize_type_sizes, but not
6979 effectively. */
6981 static unsigned
6982 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6983 bool in_code, unsigned flags)
6985 enum omp_clause_default_kind default_kind = ctx->default_kind;
6986 enum omp_clause_default_kind kind;
6988 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6989 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6990 default_kind = kind;
6992 switch (default_kind)
6994 case OMP_CLAUSE_DEFAULT_NONE:
6996 const char *rtype;
6998 if (ctx->region_type & ORT_PARALLEL)
6999 rtype = "parallel";
7000 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7001 rtype = "taskloop";
7002 else if (ctx->region_type & ORT_TASK)
7003 rtype = "task";
7004 else if (ctx->region_type & ORT_TEAMS)
7005 rtype = "teams";
7006 else
7007 gcc_unreachable ();
7009 error ("%qE not specified in enclosing %qs",
7010 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7011 error_at (ctx->location, "enclosing %qs", rtype);
7013 /* FALLTHRU */
7014 case OMP_CLAUSE_DEFAULT_SHARED:
7015 flags |= GOVD_SHARED;
7016 break;
7017 case OMP_CLAUSE_DEFAULT_PRIVATE:
7018 flags |= GOVD_PRIVATE;
7019 break;
7020 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7021 flags |= GOVD_FIRSTPRIVATE;
7022 break;
7023 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7024 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7025 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7026 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7028 omp_notice_variable (octx, decl, in_code);
7029 for (; octx; octx = octx->outer_context)
7031 splay_tree_node n2;
7033 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7034 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7035 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7036 continue;
7037 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7039 flags |= GOVD_FIRSTPRIVATE;
7040 goto found_outer;
7042 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7044 flags |= GOVD_SHARED;
7045 goto found_outer;
7050 if (TREE_CODE (decl) == PARM_DECL
7051 || (!is_global_var (decl)
7052 && DECL_CONTEXT (decl) == current_function_decl))
7053 flags |= GOVD_FIRSTPRIVATE;
7054 else
7055 flags |= GOVD_SHARED;
7056 found_outer:
7057 break;
7059 default:
7060 gcc_unreachable ();
7063 return flags;
7067 /* Determine outer default flags for DECL mentioned in an OACC region
7068 but not declared in an enclosing clause. */
7070 static unsigned
7071 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7073 const char *rkind;
7074 bool on_device = false;
7075 bool declared = is_oacc_declared (decl);
7076 tree type = TREE_TYPE (decl);
7078 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7079 type = TREE_TYPE (type);
7081 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7082 && is_global_var (decl)
7083 && device_resident_p (decl))
7085 on_device = true;
7086 flags |= GOVD_MAP_TO_ONLY;
7089 switch (ctx->region_type)
7091 case ORT_ACC_KERNELS:
7092 rkind = "kernels";
7094 if (AGGREGATE_TYPE_P (type))
7096 /* Aggregates default to 'present_or_copy', or 'present'. */
7097 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7098 flags |= GOVD_MAP;
7099 else
7100 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7102 else
7103 /* Scalars default to 'copy'. */
7104 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7106 break;
7108 case ORT_ACC_PARALLEL:
7109 rkind = "parallel";
7111 if (on_device || declared)
7112 flags |= GOVD_MAP;
7113 else if (AGGREGATE_TYPE_P (type))
7115 /* Aggregates default to 'present_or_copy', or 'present'. */
7116 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7117 flags |= GOVD_MAP;
7118 else
7119 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7121 else
7122 /* Scalars default to 'firstprivate'. */
7123 flags |= GOVD_FIRSTPRIVATE;
7125 break;
7127 default:
7128 gcc_unreachable ();
7131 if (DECL_ARTIFICIAL (decl))
7132 ; /* We can get compiler-generated decls, and should not complain
7133 about them. */
7134 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7136 error ("%qE not specified in enclosing OpenACC %qs construct",
7137 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7138 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7140 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7141 ; /* Handled above. */
7142 else
7143 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7145 return flags;
7148 /* Record the fact that DECL was used within the OMP context CTX.
7149 IN_CODE is true when real code uses DECL, and false when we should
7150 merely emit default(none) errors. Return true if DECL is going to
7151 be remapped and thus DECL shouldn't be gimplified into its
7152 DECL_VALUE_EXPR (if any). */
7154 static bool
7155 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7157 splay_tree_node n;
7158 unsigned flags = in_code ? GOVD_SEEN : 0;
7159 bool ret = false, shared;
7161 if (error_operand_p (decl))
7162 return false;
7164 if (ctx->region_type == ORT_NONE)
7165 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7167 if (is_global_var (decl))
7169 /* Threadprivate variables are predetermined. */
7170 if (DECL_THREAD_LOCAL_P (decl))
7171 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7173 if (DECL_HAS_VALUE_EXPR_P (decl))
7175 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7177 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7178 return omp_notice_threadprivate_variable (ctx, decl, value);
7181 if (gimplify_omp_ctxp->outer_context == NULL
7182 && VAR_P (decl)
7183 && oacc_get_fn_attrib (current_function_decl))
7185 location_t loc = DECL_SOURCE_LOCATION (decl);
7187 if (lookup_attribute ("omp declare target link",
7188 DECL_ATTRIBUTES (decl)))
7190 error_at (loc,
7191 "%qE with %<link%> clause used in %<routine%> function",
7192 DECL_NAME (decl));
7193 return false;
7195 else if (!lookup_attribute ("omp declare target",
7196 DECL_ATTRIBUTES (decl)))
7198 error_at (loc,
7199 "%qE requires a %<declare%> directive for use "
7200 "in a %<routine%> function", DECL_NAME (decl));
7201 return false;
7206 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7207 if ((ctx->region_type & ORT_TARGET) != 0)
7209 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7210 if (n == NULL)
7212 unsigned nflags = flags;
7213 if ((ctx->region_type & ORT_ACC) == 0)
7215 bool is_declare_target = false;
7216 if (is_global_var (decl)
7217 && varpool_node::get_create (decl)->offloadable)
7219 struct gimplify_omp_ctx *octx;
7220 for (octx = ctx->outer_context;
7221 octx; octx = octx->outer_context)
7223 n = splay_tree_lookup (octx->variables,
7224 (splay_tree_key)decl);
7225 if (n
7226 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7227 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7228 break;
7230 is_declare_target = octx == NULL;
7232 if (!is_declare_target)
7234 int gdmk;
7235 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7236 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7237 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7238 == POINTER_TYPE)))
7239 gdmk = GDMK_POINTER;
7240 else if (lang_hooks.decls.omp_scalar_p (decl))
7241 gdmk = GDMK_SCALAR;
7242 else
7243 gdmk = GDMK_AGGREGATE;
7244 if (ctx->defaultmap[gdmk] == 0)
7246 tree d = lang_hooks.decls.omp_report_decl (decl);
7247 error ("%qE not specified in enclosing %<target%>",
7248 DECL_NAME (d));
7249 error_at (ctx->location, "enclosing %<target%>");
7251 else if (ctx->defaultmap[gdmk]
7252 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7253 nflags |= ctx->defaultmap[gdmk];
7254 else
7256 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7257 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7262 struct gimplify_omp_ctx *octx = ctx->outer_context;
7263 if ((ctx->region_type & ORT_ACC) && octx)
7265 /* Look in outer OpenACC contexts, to see if there's a
7266 data attribute for this variable. */
7267 omp_notice_variable (octx, decl, in_code);
7269 for (; octx; octx = octx->outer_context)
7271 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7272 break;
7273 splay_tree_node n2
7274 = splay_tree_lookup (octx->variables,
7275 (splay_tree_key) decl);
7276 if (n2)
7278 if (octx->region_type == ORT_ACC_HOST_DATA)
7279 error ("variable %qE declared in enclosing "
7280 "%<host_data%> region", DECL_NAME (decl));
7281 nflags |= GOVD_MAP;
7282 if (octx->region_type == ORT_ACC_DATA
7283 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7284 nflags |= GOVD_MAP_0LEN_ARRAY;
7285 goto found_outer;
7290 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7291 | GOVD_MAP_ALLOC_ONLY)) == flags)
7293 tree type = TREE_TYPE (decl);
7295 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7296 && lang_hooks.decls.omp_privatize_by_reference (decl))
7297 type = TREE_TYPE (type);
7298 if (!lang_hooks.types.omp_mappable_type (type))
7300 error ("%qD referenced in target region does not have "
7301 "a mappable type", decl);
7302 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7304 else
7306 if ((ctx->region_type & ORT_ACC) != 0)
7307 nflags = oacc_default_clause (ctx, decl, flags);
7308 else
7309 nflags |= GOVD_MAP;
7312 found_outer:
7313 omp_add_variable (ctx, decl, nflags);
7315 else
7317 /* If nothing changed, there's nothing left to do. */
7318 if ((n->value & flags) == flags)
7319 return ret;
7320 flags |= n->value;
7321 n->value = flags;
7323 goto do_outer;
7326 if (n == NULL)
7328 if (ctx->region_type == ORT_WORKSHARE
7329 || ctx->region_type == ORT_TASKGROUP
7330 || ctx->region_type == ORT_SIMD
7331 || ctx->region_type == ORT_ACC
7332 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7333 goto do_outer;
7335 flags = omp_default_clause (ctx, decl, in_code, flags);
7337 if ((flags & GOVD_PRIVATE)
7338 && lang_hooks.decls.omp_private_outer_ref (decl))
7339 flags |= GOVD_PRIVATE_OUTER_REF;
7341 omp_add_variable (ctx, decl, flags);
7343 shared = (flags & GOVD_SHARED) != 0;
7344 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7345 goto do_outer;
7348 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7349 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7350 && DECL_SIZE (decl))
7352 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7354 splay_tree_node n2;
7355 tree t = DECL_VALUE_EXPR (decl);
7356 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7357 t = TREE_OPERAND (t, 0);
7358 gcc_assert (DECL_P (t));
7359 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7360 n2->value |= GOVD_SEEN;
7362 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7363 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7364 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7365 != INTEGER_CST))
7367 splay_tree_node n2;
7368 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7369 gcc_assert (DECL_P (t));
7370 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7371 if (n2)
7372 omp_notice_variable (ctx, t, true);
7376 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7377 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7379 /* If nothing changed, there's nothing left to do. */
7380 if ((n->value & flags) == flags)
7381 return ret;
7382 flags |= n->value;
7383 n->value = flags;
7385 do_outer:
7386 /* If the variable is private in the current context, then we don't
7387 need to propagate anything to an outer context. */
7388 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7389 return ret;
7390 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7391 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7392 return ret;
7393 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7394 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7395 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7396 return ret;
7397 if (ctx->outer_context
7398 && omp_notice_variable (ctx->outer_context, decl, in_code))
7399 return true;
7400 return ret;
7403 /* Verify that DECL is private within CTX. If there's specific information
7404 to the contrary in the innermost scope, generate an error. */
7406 static bool
7407 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7409 splay_tree_node n;
7411 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7412 if (n != NULL)
7414 if (n->value & GOVD_SHARED)
7416 if (ctx == gimplify_omp_ctxp)
7418 if (simd)
7419 error ("iteration variable %qE is predetermined linear",
7420 DECL_NAME (decl));
7421 else
7422 error ("iteration variable %qE should be private",
7423 DECL_NAME (decl));
7424 n->value = GOVD_PRIVATE;
7425 return true;
7427 else
7428 return false;
7430 else if ((n->value & GOVD_EXPLICIT) != 0
7431 && (ctx == gimplify_omp_ctxp
7432 || (ctx->region_type == ORT_COMBINED_PARALLEL
7433 && gimplify_omp_ctxp->outer_context == ctx)))
7435 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7436 error ("iteration variable %qE should not be firstprivate",
7437 DECL_NAME (decl));
7438 else if ((n->value & GOVD_REDUCTION) != 0)
7439 error ("iteration variable %qE should not be reduction",
7440 DECL_NAME (decl));
7441 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7442 error ("iteration variable %qE should not be linear",
7443 DECL_NAME (decl));
7445 return (ctx == gimplify_omp_ctxp
7446 || (ctx->region_type == ORT_COMBINED_PARALLEL
7447 && gimplify_omp_ctxp->outer_context == ctx));
7450 if (ctx->region_type != ORT_WORKSHARE
7451 && ctx->region_type != ORT_TASKGROUP
7452 && ctx->region_type != ORT_SIMD
7453 && ctx->region_type != ORT_ACC)
7454 return false;
7455 else if (ctx->outer_context)
7456 return omp_is_private (ctx->outer_context, decl, simd);
7457 return false;
7460 /* Return true if DECL is private within a parallel region
7461 that binds to the current construct's context or in parallel
7462 region's REDUCTION clause. */
7464 static bool
7465 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7467 splay_tree_node n;
7471 ctx = ctx->outer_context;
7472 if (ctx == NULL)
7474 if (is_global_var (decl))
7475 return false;
7477 /* References might be private, but might be shared too,
7478 when checking for copyprivate, assume they might be
7479 private, otherwise assume they might be shared. */
7480 if (copyprivate)
7481 return true;
7483 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7484 return false;
7486 /* Treat C++ privatized non-static data members outside
7487 of the privatization the same. */
7488 if (omp_member_access_dummy_var (decl))
7489 return false;
7491 return true;
7494 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7496 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7497 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7498 continue;
7500 if (n != NULL)
7502 if ((n->value & GOVD_LOCAL) != 0
7503 && omp_member_access_dummy_var (decl))
7504 return false;
7505 return (n->value & GOVD_SHARED) == 0;
7508 while (ctx->region_type == ORT_WORKSHARE
7509 || ctx->region_type == ORT_TASKGROUP
7510 || ctx->region_type == ORT_SIMD
7511 || ctx->region_type == ORT_ACC);
7512 return false;
7515 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7517 static tree
7518 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7520 tree t = *tp;
7522 /* If this node has been visited, unmark it and keep looking. */
7523 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7524 return t;
7526 if (IS_TYPE_OR_DECL_P (t))
7527 *walk_subtrees = 0;
7528 return NULL_TREE;
7531 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7532 lower all the depend clauses by populating corresponding depend
7533 array. Returns 0 if there are no such depend clauses, or
7534 2 if all depend clauses should be removed, 1 otherwise. */
7536 static int
7537 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7539 tree c;
7540 gimple *g;
7541 size_t n[4] = { 0, 0, 0, 0 };
7542 bool unused[4];
7543 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7544 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7545 size_t i, j;
7546 location_t first_loc = UNKNOWN_LOCATION;
7548 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7551 switch (OMP_CLAUSE_DEPEND_KIND (c))
7553 case OMP_CLAUSE_DEPEND_IN:
7554 i = 2;
7555 break;
7556 case OMP_CLAUSE_DEPEND_OUT:
7557 case OMP_CLAUSE_DEPEND_INOUT:
7558 i = 0;
7559 break;
7560 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7561 i = 1;
7562 break;
7563 case OMP_CLAUSE_DEPEND_DEPOBJ:
7564 i = 3;
7565 break;
7566 case OMP_CLAUSE_DEPEND_SOURCE:
7567 case OMP_CLAUSE_DEPEND_SINK:
7568 continue;
7569 default:
7570 gcc_unreachable ();
7572 tree t = OMP_CLAUSE_DECL (c);
7573 if (first_loc == UNKNOWN_LOCATION)
7574 first_loc = OMP_CLAUSE_LOCATION (c);
7575 if (TREE_CODE (t) == TREE_LIST
7576 && TREE_PURPOSE (t)
7577 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7579 if (TREE_PURPOSE (t) != last_iter)
7581 tree tcnt = size_one_node;
7582 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7584 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7585 is_gimple_val, fb_rvalue) == GS_ERROR
7586 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7587 is_gimple_val, fb_rvalue) == GS_ERROR
7588 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7589 is_gimple_val, fb_rvalue) == GS_ERROR
7590 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7591 is_gimple_val, fb_rvalue)
7592 == GS_ERROR))
7593 return 2;
7594 tree var = TREE_VEC_ELT (it, 0);
7595 tree begin = TREE_VEC_ELT (it, 1);
7596 tree end = TREE_VEC_ELT (it, 2);
7597 tree step = TREE_VEC_ELT (it, 3);
7598 tree orig_step = TREE_VEC_ELT (it, 4);
7599 tree type = TREE_TYPE (var);
7600 tree stype = TREE_TYPE (step);
7601 location_t loc = DECL_SOURCE_LOCATION (var);
7602 tree endmbegin;
7603 /* Compute count for this iterator as
7604 orig_step > 0
7605 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7606 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7607 and compute product of those for the entire depend
7608 clause. */
7609 if (POINTER_TYPE_P (type))
7610 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7611 stype, end, begin);
7612 else
7613 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7614 end, begin);
7615 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7616 step,
7617 build_int_cst (stype, 1));
7618 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7619 build_int_cst (stype, 1));
7620 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7621 unshare_expr (endmbegin),
7622 stepm1);
7623 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7624 pos, step);
7625 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7626 endmbegin, stepp1);
7627 if (TYPE_UNSIGNED (stype))
7629 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7630 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7632 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7633 neg, step);
7634 step = NULL_TREE;
7635 tree cond = fold_build2_loc (loc, LT_EXPR,
7636 boolean_type_node,
7637 begin, end);
7638 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7639 build_int_cst (stype, 0));
7640 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7641 end, begin);
7642 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7643 build_int_cst (stype, 0));
7644 tree osteptype = TREE_TYPE (orig_step);
7645 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7646 orig_step,
7647 build_int_cst (osteptype, 0));
7648 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7649 cond, pos, neg);
7650 cnt = fold_convert_loc (loc, sizetype, cnt);
7651 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7652 fb_rvalue) == GS_ERROR)
7653 return 2;
7654 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7656 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7657 fb_rvalue) == GS_ERROR)
7658 return 2;
7659 last_iter = TREE_PURPOSE (t);
7660 last_count = tcnt;
7662 if (counts[i] == NULL_TREE)
7663 counts[i] = last_count;
7664 else
7665 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7666 PLUS_EXPR, counts[i], last_count);
7668 else
7669 n[i]++;
7671 for (i = 0; i < 4; i++)
7672 if (counts[i])
7673 break;
7674 if (i == 4)
7675 return 0;
7677 tree total = size_zero_node;
7678 for (i = 0; i < 4; i++)
7680 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7681 if (counts[i] == NULL_TREE)
7682 counts[i] = size_zero_node;
7683 if (n[i])
7684 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7685 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7686 fb_rvalue) == GS_ERROR)
7687 return 2;
7688 total = size_binop (PLUS_EXPR, total, counts[i]);
7691 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7692 == GS_ERROR)
7693 return 2;
7694 bool is_old = unused[1] && unused[3];
7695 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7696 size_int (is_old ? 1 : 4));
7697 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7698 tree array = create_tmp_var_raw (type);
7699 TREE_ADDRESSABLE (array) = 1;
7700 if (TREE_CODE (totalpx) != INTEGER_CST)
7702 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7703 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7704 if (gimplify_omp_ctxp)
7706 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7707 while (ctx
7708 && (ctx->region_type == ORT_WORKSHARE
7709 || ctx->region_type == ORT_TASKGROUP
7710 || ctx->region_type == ORT_SIMD
7711 || ctx->region_type == ORT_ACC))
7712 ctx = ctx->outer_context;
7713 if (ctx)
7714 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7716 gimplify_vla_decl (array, pre_p);
7718 else
7719 gimple_add_tmp_var (array);
7720 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7721 NULL_TREE);
7722 tree tem;
7723 if (!is_old)
7725 tem = build2 (MODIFY_EXPR, void_type_node, r,
7726 build_int_cst (ptr_type_node, 0));
7727 gimplify_and_add (tem, pre_p);
7728 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7729 NULL_TREE);
7731 tem = build2 (MODIFY_EXPR, void_type_node, r,
7732 fold_convert (ptr_type_node, total));
7733 gimplify_and_add (tem, pre_p);
7734 for (i = 1; i < (is_old ? 2 : 4); i++)
7736 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7737 NULL_TREE, NULL_TREE);
7738 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7739 gimplify_and_add (tem, pre_p);
7742 tree cnts[4];
7743 for (j = 4; j; j--)
7744 if (!unused[j - 1])
7745 break;
7746 for (i = 0; i < 4; i++)
7748 if (i && (i >= j || unused[i - 1]))
7750 cnts[i] = cnts[i - 1];
7751 continue;
7753 cnts[i] = create_tmp_var (sizetype);
7754 if (i == 0)
7755 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7756 else
7758 tree t;
7759 if (is_old)
7760 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7761 else
7762 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7763 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7764 == GS_ERROR)
7765 return 2;
7766 g = gimple_build_assign (cnts[i], t);
7768 gimple_seq_add_stmt (pre_p, g);
7771 last_iter = NULL_TREE;
7772 tree last_bind = NULL_TREE;
7773 tree *last_body = NULL;
7774 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7777 switch (OMP_CLAUSE_DEPEND_KIND (c))
7779 case OMP_CLAUSE_DEPEND_IN:
7780 i = 2;
7781 break;
7782 case OMP_CLAUSE_DEPEND_OUT:
7783 case OMP_CLAUSE_DEPEND_INOUT:
7784 i = 0;
7785 break;
7786 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7787 i = 1;
7788 break;
7789 case OMP_CLAUSE_DEPEND_DEPOBJ:
7790 i = 3;
7791 break;
7792 case OMP_CLAUSE_DEPEND_SOURCE:
7793 case OMP_CLAUSE_DEPEND_SINK:
7794 continue;
7795 default:
7796 gcc_unreachable ();
7798 tree t = OMP_CLAUSE_DECL (c);
7799 if (TREE_CODE (t) == TREE_LIST
7800 && TREE_PURPOSE (t)
7801 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7803 if (TREE_PURPOSE (t) != last_iter)
7805 if (last_bind)
7806 gimplify_and_add (last_bind, pre_p);
7807 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7808 last_bind = build3 (BIND_EXPR, void_type_node,
7809 BLOCK_VARS (block), NULL, block);
7810 TREE_SIDE_EFFECTS (last_bind) = 1;
7811 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
7812 tree *p = &BIND_EXPR_BODY (last_bind);
7813 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7815 tree var = TREE_VEC_ELT (it, 0);
7816 tree begin = TREE_VEC_ELT (it, 1);
7817 tree end = TREE_VEC_ELT (it, 2);
7818 tree step = TREE_VEC_ELT (it, 3);
7819 tree orig_step = TREE_VEC_ELT (it, 4);
7820 tree type = TREE_TYPE (var);
7821 location_t loc = DECL_SOURCE_LOCATION (var);
7822 /* Emit:
7823 var = begin;
7824 goto cond_label;
7825 beg_label:
7827 var = var + step;
7828 cond_label:
7829 if (orig_step > 0) {
7830 if (var < end) goto beg_label;
7831 } else {
7832 if (var > end) goto beg_label;
7834 for each iterator, with inner iterators added to
7835 the ... above. */
7836 tree beg_label = create_artificial_label (loc);
7837 tree cond_label = NULL_TREE;
7838 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7839 var, begin);
7840 append_to_statement_list_force (tem, p);
7841 tem = build_and_jump (&cond_label);
7842 append_to_statement_list_force (tem, p);
7843 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
7844 append_to_statement_list (tem, p);
7845 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
7846 NULL_TREE, NULL_TREE);
7847 TREE_SIDE_EFFECTS (bind) = 1;
7848 SET_EXPR_LOCATION (bind, loc);
7849 append_to_statement_list_force (bind, p);
7850 if (POINTER_TYPE_P (type))
7851 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
7852 var, fold_convert_loc (loc, sizetype,
7853 step));
7854 else
7855 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
7856 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7857 var, tem);
7858 append_to_statement_list_force (tem, p);
7859 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
7860 append_to_statement_list (tem, p);
7861 tree cond = fold_build2_loc (loc, LT_EXPR,
7862 boolean_type_node,
7863 var, end);
7864 tree pos
7865 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7866 cond, build_and_jump (&beg_label),
7867 void_node);
7868 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7869 var, end);
7870 tree neg
7871 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7872 cond, build_and_jump (&beg_label),
7873 void_node);
7874 tree osteptype = TREE_TYPE (orig_step);
7875 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7876 orig_step,
7877 build_int_cst (osteptype, 0));
7878 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
7879 cond, pos, neg);
7880 append_to_statement_list_force (tem, p);
7881 p = &BIND_EXPR_BODY (bind);
7883 last_body = p;
7885 last_iter = TREE_PURPOSE (t);
7886 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
7888 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
7889 0), last_body);
7890 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
7892 if (error_operand_p (TREE_VALUE (t)))
7893 return 2;
7894 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
7895 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7896 NULL_TREE, NULL_TREE);
7897 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7898 void_type_node, r, TREE_VALUE (t));
7899 append_to_statement_list_force (tem, last_body);
7900 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7901 void_type_node, cnts[i],
7902 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
7903 append_to_statement_list_force (tem, last_body);
7904 TREE_VALUE (t) = null_pointer_node;
7906 else
7908 if (last_bind)
7910 gimplify_and_add (last_bind, pre_p);
7911 last_bind = NULL_TREE;
7913 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7915 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7916 NULL, is_gimple_val, fb_rvalue);
7917 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7919 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7920 return 2;
7921 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7922 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7923 is_gimple_val, fb_rvalue) == GS_ERROR)
7924 return 2;
7925 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7926 NULL_TREE, NULL_TREE);
7927 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
7928 gimplify_and_add (tem, pre_p);
7929 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
7930 size_int (1)));
7931 gimple_seq_add_stmt (pre_p, g);
7934 if (last_bind)
7935 gimplify_and_add (last_bind, pre_p);
7936 tree cond = boolean_false_node;
7937 if (is_old)
7939 if (!unused[0])
7940 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
7941 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
7942 size_int (2)));
7943 if (!unused[2])
7944 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
7945 build2_loc (first_loc, NE_EXPR, boolean_type_node,
7946 cnts[2],
7947 size_binop_loc (first_loc, PLUS_EXPR,
7948 totalpx,
7949 size_int (1))));
7951 else
7953 tree prev = size_int (5);
7954 for (i = 0; i < 4; i++)
7956 if (unused[i])
7957 continue;
7958 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
7959 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
7960 build2_loc (first_loc, NE_EXPR, boolean_type_node,
7961 cnts[i], unshare_expr (prev)));
7964 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
7965 build_call_expr_loc (first_loc,
7966 builtin_decl_explicit (BUILT_IN_TRAP),
7967 0), void_node);
7968 gimplify_and_add (tem, pre_p);
7969 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7970 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
7971 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7972 OMP_CLAUSE_CHAIN (c) = *list_p;
7973 *list_p = c;
7974 return 1;
7977 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7978 and previous omp contexts. */
7980 static void
7981 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7982 enum omp_region_type region_type,
7983 enum tree_code code)
7985 struct gimplify_omp_ctx *ctx, *outer_ctx;
7986 tree c;
7987 hash_map<tree, tree> *struct_map_to_clause = NULL;
7988 tree *prev_list_p = NULL;
7989 int handled_depend_iterators = -1;
7990 int nowait = -1;
7992 ctx = new_omp_context (region_type);
7993 outer_ctx = ctx->outer_context;
7994 if (code == OMP_TARGET)
7996 if (!lang_GNU_Fortran ())
7997 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7998 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8000 if (!lang_GNU_Fortran ())
8001 switch (code)
8003 case OMP_TARGET:
8004 case OMP_TARGET_DATA:
8005 case OMP_TARGET_ENTER_DATA:
8006 case OMP_TARGET_EXIT_DATA:
8007 case OACC_DECLARE:
8008 case OACC_HOST_DATA:
8009 case OACC_PARALLEL:
8010 case OACC_KERNELS:
8011 ctx->target_firstprivatize_array_bases = true;
8012 default:
8013 break;
8016 while ((c = *list_p) != NULL)
8018 bool remove = false;
8019 bool notice_outer = true;
8020 const char *check_non_private = NULL;
8021 unsigned int flags;
8022 tree decl;
8024 switch (OMP_CLAUSE_CODE (c))
8026 case OMP_CLAUSE_PRIVATE:
8027 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8028 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8030 flags |= GOVD_PRIVATE_OUTER_REF;
8031 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8033 else
8034 notice_outer = false;
8035 goto do_add;
8036 case OMP_CLAUSE_SHARED:
8037 flags = GOVD_SHARED | GOVD_EXPLICIT;
8038 goto do_add;
8039 case OMP_CLAUSE_FIRSTPRIVATE:
8040 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8041 check_non_private = "firstprivate";
8042 goto do_add;
8043 case OMP_CLAUSE_LASTPRIVATE:
8044 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8045 switch (code)
8047 case OMP_DISTRIBUTE:
8048 error_at (OMP_CLAUSE_LOCATION (c),
8049 "conditional %<lastprivate%> clause on "
8050 "%<distribute%> construct");
8051 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8052 break;
8053 case OMP_TASKLOOP:
8054 error_at (OMP_CLAUSE_LOCATION (c),
8055 "conditional %<lastprivate%> clause on "
8056 "%<taskloop%> construct");
8057 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8058 break;
8059 default:
8060 break;
8062 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8063 check_non_private = "lastprivate";
8064 decl = OMP_CLAUSE_DECL (c);
8065 if (error_operand_p (decl))
8066 goto do_add;
8067 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8068 && !lang_hooks.decls.omp_scalar_p (decl))
8070 error_at (OMP_CLAUSE_LOCATION (c),
8071 "non-scalar variable %qD in conditional "
8072 "%<lastprivate%> clause", decl);
8073 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8075 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8076 sorry_at (OMP_CLAUSE_LOCATION (c),
8077 "%<conditional%> modifier on %<lastprivate%> clause "
8078 "not supported yet");
8079 if (outer_ctx
8080 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8081 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8082 == ORT_COMBINED_TEAMS))
8083 && splay_tree_lookup (outer_ctx->variables,
8084 (splay_tree_key) decl) == NULL)
8086 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8087 if (outer_ctx->outer_context)
8088 omp_notice_variable (outer_ctx->outer_context, decl, true);
8090 else if (outer_ctx
8091 && (outer_ctx->region_type & ORT_TASK) != 0
8092 && outer_ctx->combined_loop
8093 && splay_tree_lookup (outer_ctx->variables,
8094 (splay_tree_key) decl) == NULL)
8096 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8097 if (outer_ctx->outer_context)
8098 omp_notice_variable (outer_ctx->outer_context, decl, true);
8100 else if (outer_ctx
8101 && (outer_ctx->region_type == ORT_WORKSHARE
8102 || outer_ctx->region_type == ORT_ACC)
8103 && outer_ctx->combined_loop
8104 && splay_tree_lookup (outer_ctx->variables,
8105 (splay_tree_key) decl) == NULL
8106 && !omp_check_private (outer_ctx, decl, false))
8108 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8109 if (outer_ctx->outer_context
8110 && (outer_ctx->outer_context->region_type
8111 == ORT_COMBINED_PARALLEL)
8112 && splay_tree_lookup (outer_ctx->outer_context->variables,
8113 (splay_tree_key) decl) == NULL)
8115 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8116 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8117 if (octx->outer_context)
8119 octx = octx->outer_context;
8120 if (octx->region_type == ORT_WORKSHARE
8121 && octx->combined_loop
8122 && splay_tree_lookup (octx->variables,
8123 (splay_tree_key) decl) == NULL
8124 && !omp_check_private (octx, decl, false))
8126 omp_add_variable (octx, decl,
8127 GOVD_LASTPRIVATE | GOVD_SEEN);
8128 octx = octx->outer_context;
8129 if (octx
8130 && ((octx->region_type & ORT_COMBINED_TEAMS)
8131 == ORT_COMBINED_TEAMS)
8132 && (splay_tree_lookup (octx->variables,
8133 (splay_tree_key) decl)
8134 == NULL))
8136 omp_add_variable (octx, decl,
8137 GOVD_SHARED | GOVD_SEEN);
8138 octx = octx->outer_context;
8141 if (octx)
8142 omp_notice_variable (octx, decl, true);
8145 else if (outer_ctx->outer_context)
8146 omp_notice_variable (outer_ctx->outer_context, decl, true);
8148 goto do_add;
8149 case OMP_CLAUSE_REDUCTION:
8150 if (OMP_CLAUSE_REDUCTION_TASK (c))
8152 if (region_type == ORT_WORKSHARE)
8154 if (nowait == -1)
8155 nowait = omp_find_clause (*list_p,
8156 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8157 if (nowait
8158 && (outer_ctx == NULL
8159 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8161 error_at (OMP_CLAUSE_LOCATION (c),
8162 "%<task%> reduction modifier on a construct "
8163 "with a %<nowait%> clause");
8164 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8167 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8169 error_at (OMP_CLAUSE_LOCATION (c),
8170 "invalid %<task%> reduction modifier on construct "
8171 "other than %<parallel%>, %<for%> or %<sections%>");
8172 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8175 /* FALLTHRU */
8176 case OMP_CLAUSE_IN_REDUCTION:
8177 case OMP_CLAUSE_TASK_REDUCTION:
8178 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8179 /* OpenACC permits reductions on private variables. */
8180 if (!(region_type & ORT_ACC)
8181 /* taskgroup is actually not a worksharing region. */
8182 && code != OMP_TASKGROUP)
8183 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8184 decl = OMP_CLAUSE_DECL (c);
8185 if (TREE_CODE (decl) == MEM_REF)
8187 tree type = TREE_TYPE (decl);
8188 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8189 NULL, is_gimple_val, fb_rvalue, false)
8190 == GS_ERROR)
8192 remove = true;
8193 break;
8195 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8196 if (DECL_P (v))
8198 omp_firstprivatize_variable (ctx, v);
8199 omp_notice_variable (ctx, v, true);
8201 decl = TREE_OPERAND (decl, 0);
8202 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8204 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8205 NULL, is_gimple_val, fb_rvalue, false)
8206 == GS_ERROR)
8208 remove = true;
8209 break;
8211 v = TREE_OPERAND (decl, 1);
8212 if (DECL_P (v))
8214 omp_firstprivatize_variable (ctx, v);
8215 omp_notice_variable (ctx, v, true);
8217 decl = TREE_OPERAND (decl, 0);
8219 if (TREE_CODE (decl) == ADDR_EXPR
8220 || TREE_CODE (decl) == INDIRECT_REF)
8221 decl = TREE_OPERAND (decl, 0);
8223 goto do_add_decl;
8224 case OMP_CLAUSE_LINEAR:
8225 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8226 is_gimple_val, fb_rvalue) == GS_ERROR)
8228 remove = true;
8229 break;
8231 else
8233 if (code == OMP_SIMD
8234 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8236 struct gimplify_omp_ctx *octx = outer_ctx;
8237 if (octx
8238 && octx->region_type == ORT_WORKSHARE
8239 && octx->combined_loop
8240 && !octx->distribute)
8242 if (octx->outer_context
8243 && (octx->outer_context->region_type
8244 == ORT_COMBINED_PARALLEL))
8245 octx = octx->outer_context->outer_context;
8246 else
8247 octx = octx->outer_context;
8249 if (octx
8250 && octx->region_type == ORT_WORKSHARE
8251 && octx->combined_loop
8252 && octx->distribute)
8254 error_at (OMP_CLAUSE_LOCATION (c),
8255 "%<linear%> clause for variable other than "
8256 "loop iterator specified on construct "
8257 "combined with %<distribute%>");
8258 remove = true;
8259 break;
8262 /* For combined #pragma omp parallel for simd, need to put
8263 lastprivate and perhaps firstprivate too on the
8264 parallel. Similarly for #pragma omp for simd. */
8265 struct gimplify_omp_ctx *octx = outer_ctx;
8266 decl = NULL_TREE;
8269 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8270 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8271 break;
8272 decl = OMP_CLAUSE_DECL (c);
8273 if (error_operand_p (decl))
8275 decl = NULL_TREE;
8276 break;
8278 flags = GOVD_SEEN;
8279 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8280 flags |= GOVD_FIRSTPRIVATE;
8281 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8282 flags |= GOVD_LASTPRIVATE;
8283 if (octx
8284 && octx->region_type == ORT_WORKSHARE
8285 && octx->combined_loop)
8287 if (octx->outer_context
8288 && (octx->outer_context->region_type
8289 == ORT_COMBINED_PARALLEL))
8290 octx = octx->outer_context;
8291 else if (omp_check_private (octx, decl, false))
8292 break;
8294 else if (octx
8295 && (octx->region_type & ORT_TASK) != 0
8296 && octx->combined_loop)
8298 else if (octx
8299 && octx->region_type == ORT_COMBINED_PARALLEL
8300 && ctx->region_type == ORT_WORKSHARE
8301 && octx == outer_ctx)
8302 flags = GOVD_SEEN | GOVD_SHARED;
8303 else if (octx
8304 && ((octx->region_type & ORT_COMBINED_TEAMS)
8305 == ORT_COMBINED_TEAMS))
8306 flags = GOVD_SEEN | GOVD_SHARED;
8307 else if (octx
8308 && octx->region_type == ORT_COMBINED_TARGET)
8310 flags &= ~GOVD_LASTPRIVATE;
8311 if (flags == GOVD_SEEN)
8312 break;
8314 else
8315 break;
8316 splay_tree_node on
8317 = splay_tree_lookup (octx->variables,
8318 (splay_tree_key) decl);
8319 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8321 octx = NULL;
8322 break;
8324 omp_add_variable (octx, decl, flags);
8325 if (octx->outer_context == NULL)
8326 break;
8327 octx = octx->outer_context;
8329 while (1);
8330 if (octx
8331 && decl
8332 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8333 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8334 omp_notice_variable (octx, decl, true);
8336 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8337 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8338 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8340 notice_outer = false;
8341 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8343 goto do_add;
8345 case OMP_CLAUSE_MAP:
8346 decl = OMP_CLAUSE_DECL (c);
8347 if (error_operand_p (decl))
8348 remove = true;
8349 switch (code)
8351 case OMP_TARGET:
8352 break;
8353 case OACC_DATA:
8354 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8355 break;
8356 /* FALLTHRU */
8357 case OMP_TARGET_DATA:
8358 case OMP_TARGET_ENTER_DATA:
8359 case OMP_TARGET_EXIT_DATA:
8360 case OACC_ENTER_DATA:
8361 case OACC_EXIT_DATA:
8362 case OACC_HOST_DATA:
8363 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8364 || (OMP_CLAUSE_MAP_KIND (c)
8365 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8366 /* For target {,enter ,exit }data only the array slice is
8367 mapped, but not the pointer to it. */
8368 remove = true;
8369 break;
8370 default:
8371 break;
8373 if (remove)
8374 break;
8375 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8377 struct gimplify_omp_ctx *octx;
8378 for (octx = outer_ctx; octx; octx = octx->outer_context)
8380 if (octx->region_type != ORT_ACC_HOST_DATA)
8381 break;
8382 splay_tree_node n2
8383 = splay_tree_lookup (octx->variables,
8384 (splay_tree_key) decl);
8385 if (n2)
8386 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8387 "declared in enclosing %<host_data%> region",
8388 DECL_NAME (decl));
8391 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8392 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8393 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8394 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8395 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8397 remove = true;
8398 break;
8400 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8401 || (OMP_CLAUSE_MAP_KIND (c)
8402 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8403 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8405 OMP_CLAUSE_SIZE (c)
8406 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8407 false);
8408 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8409 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8411 if (!DECL_P (decl))
8413 tree d = decl, *pd;
8414 if (TREE_CODE (d) == ARRAY_REF)
8416 while (TREE_CODE (d) == ARRAY_REF)
8417 d = TREE_OPERAND (d, 0);
8418 if (TREE_CODE (d) == COMPONENT_REF
8419 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8420 decl = d;
8422 pd = &OMP_CLAUSE_DECL (c);
8423 if (d == decl
8424 && TREE_CODE (decl) == INDIRECT_REF
8425 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8426 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8427 == REFERENCE_TYPE))
8429 pd = &TREE_OPERAND (decl, 0);
8430 decl = TREE_OPERAND (decl, 0);
8432 if (TREE_CODE (decl) == COMPONENT_REF)
8434 while (TREE_CODE (decl) == COMPONENT_REF)
8435 decl = TREE_OPERAND (decl, 0);
8436 if (TREE_CODE (decl) == INDIRECT_REF
8437 && DECL_P (TREE_OPERAND (decl, 0))
8438 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8439 == REFERENCE_TYPE))
8440 decl = TREE_OPERAND (decl, 0);
8442 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8443 == GS_ERROR)
8445 remove = true;
8446 break;
8448 if (DECL_P (decl))
8450 if (error_operand_p (decl))
8452 remove = true;
8453 break;
8456 tree stype = TREE_TYPE (decl);
8457 if (TREE_CODE (stype) == REFERENCE_TYPE)
8458 stype = TREE_TYPE (stype);
8459 if (TYPE_SIZE_UNIT (stype) == NULL
8460 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8462 error_at (OMP_CLAUSE_LOCATION (c),
8463 "mapping field %qE of variable length "
8464 "structure", OMP_CLAUSE_DECL (c));
8465 remove = true;
8466 break;
8469 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8471 /* Error recovery. */
8472 if (prev_list_p == NULL)
8474 remove = true;
8475 break;
8477 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8479 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8480 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8482 remove = true;
8483 break;
8488 tree offset;
8489 poly_int64 bitsize, bitpos;
8490 machine_mode mode;
8491 int unsignedp, reversep, volatilep = 0;
8492 tree base = OMP_CLAUSE_DECL (c);
8493 while (TREE_CODE (base) == ARRAY_REF)
8494 base = TREE_OPERAND (base, 0);
8495 if (TREE_CODE (base) == INDIRECT_REF)
8496 base = TREE_OPERAND (base, 0);
8497 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8498 &mode, &unsignedp, &reversep,
8499 &volatilep);
8500 tree orig_base = base;
8501 if ((TREE_CODE (base) == INDIRECT_REF
8502 || (TREE_CODE (base) == MEM_REF
8503 && integer_zerop (TREE_OPERAND (base, 1))))
8504 && DECL_P (TREE_OPERAND (base, 0))
8505 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8506 == REFERENCE_TYPE))
8507 base = TREE_OPERAND (base, 0);
8508 gcc_assert (base == decl
8509 && (offset == NULL_TREE
8510 || poly_int_tree_p (offset)));
8512 splay_tree_node n
8513 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8514 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8515 == GOMP_MAP_ALWAYS_POINTER);
8516 if (n == NULL || (n->value & GOVD_MAP) == 0)
8518 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8519 OMP_CLAUSE_MAP);
8520 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8521 if (orig_base != base)
8522 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8523 else
8524 OMP_CLAUSE_DECL (l) = decl;
8525 OMP_CLAUSE_SIZE (l) = size_int (1);
8526 if (struct_map_to_clause == NULL)
8527 struct_map_to_clause = new hash_map<tree, tree>;
8528 struct_map_to_clause->put (decl, l);
8529 if (ptr)
8531 enum gomp_map_kind mkind
8532 = code == OMP_TARGET_EXIT_DATA
8533 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8534 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8535 OMP_CLAUSE_MAP);
8536 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8537 OMP_CLAUSE_DECL (c2)
8538 = unshare_expr (OMP_CLAUSE_DECL (c));
8539 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8540 OMP_CLAUSE_SIZE (c2)
8541 = TYPE_SIZE_UNIT (ptr_type_node);
8542 OMP_CLAUSE_CHAIN (l) = c2;
8543 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8545 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8546 tree c3
8547 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8548 OMP_CLAUSE_MAP);
8549 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8550 OMP_CLAUSE_DECL (c3)
8551 = unshare_expr (OMP_CLAUSE_DECL (c4));
8552 OMP_CLAUSE_SIZE (c3)
8553 = TYPE_SIZE_UNIT (ptr_type_node);
8554 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8555 OMP_CLAUSE_CHAIN (c2) = c3;
8557 *prev_list_p = l;
8558 prev_list_p = NULL;
8560 else
8562 OMP_CLAUSE_CHAIN (l) = c;
8563 *list_p = l;
8564 list_p = &OMP_CLAUSE_CHAIN (l);
8566 if (orig_base != base && code == OMP_TARGET)
8568 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8569 OMP_CLAUSE_MAP);
8570 enum gomp_map_kind mkind
8571 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8572 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8573 OMP_CLAUSE_DECL (c2) = decl;
8574 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8575 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8576 OMP_CLAUSE_CHAIN (l) = c2;
8578 flags = GOVD_MAP | GOVD_EXPLICIT;
8579 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8580 flags |= GOVD_SEEN;
8581 goto do_add_decl;
8583 else
8585 tree *osc = struct_map_to_clause->get (decl);
8586 tree *sc = NULL, *scp = NULL;
8587 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8588 n->value |= GOVD_SEEN;
8589 poly_offset_int o1, o2;
8590 if (offset)
8591 o1 = wi::to_poly_offset (offset);
8592 else
8593 o1 = 0;
8594 if (maybe_ne (bitpos, 0))
8595 o1 += bits_to_bytes_round_down (bitpos);
8596 sc = &OMP_CLAUSE_CHAIN (*osc);
8597 if (*sc != c
8598 && (OMP_CLAUSE_MAP_KIND (*sc)
8599 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8600 sc = &OMP_CLAUSE_CHAIN (*sc);
8601 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8602 if (ptr && sc == prev_list_p)
8603 break;
8604 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8605 != COMPONENT_REF
8606 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8607 != INDIRECT_REF)
8608 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8609 != ARRAY_REF))
8610 break;
8611 else
8613 tree offset2;
8614 poly_int64 bitsize2, bitpos2;
8615 base = OMP_CLAUSE_DECL (*sc);
8616 if (TREE_CODE (base) == ARRAY_REF)
8618 while (TREE_CODE (base) == ARRAY_REF)
8619 base = TREE_OPERAND (base, 0);
8620 if (TREE_CODE (base) != COMPONENT_REF
8621 || (TREE_CODE (TREE_TYPE (base))
8622 != ARRAY_TYPE))
8623 break;
8625 else if (TREE_CODE (base) == INDIRECT_REF
8626 && (TREE_CODE (TREE_OPERAND (base, 0))
8627 == COMPONENT_REF)
8628 && (TREE_CODE (TREE_TYPE
8629 (TREE_OPERAND (base, 0)))
8630 == REFERENCE_TYPE))
8631 base = TREE_OPERAND (base, 0);
8632 base = get_inner_reference (base, &bitsize2,
8633 &bitpos2, &offset2,
8634 &mode, &unsignedp,
8635 &reversep, &volatilep);
8636 if ((TREE_CODE (base) == INDIRECT_REF
8637 || (TREE_CODE (base) == MEM_REF
8638 && integer_zerop (TREE_OPERAND (base,
8639 1))))
8640 && DECL_P (TREE_OPERAND (base, 0))
8641 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8642 0)))
8643 == REFERENCE_TYPE))
8644 base = TREE_OPERAND (base, 0);
8645 if (base != decl)
8646 break;
8647 if (scp)
8648 continue;
8649 gcc_assert (offset == NULL_TREE
8650 || poly_int_tree_p (offset));
8651 tree d1 = OMP_CLAUSE_DECL (*sc);
8652 tree d2 = OMP_CLAUSE_DECL (c);
8653 while (TREE_CODE (d1) == ARRAY_REF)
8654 d1 = TREE_OPERAND (d1, 0);
8655 while (TREE_CODE (d2) == ARRAY_REF)
8656 d2 = TREE_OPERAND (d2, 0);
8657 if (TREE_CODE (d1) == INDIRECT_REF)
8658 d1 = TREE_OPERAND (d1, 0);
8659 if (TREE_CODE (d2) == INDIRECT_REF)
8660 d2 = TREE_OPERAND (d2, 0);
8661 while (TREE_CODE (d1) == COMPONENT_REF)
8662 if (TREE_CODE (d2) == COMPONENT_REF
8663 && TREE_OPERAND (d1, 1)
8664 == TREE_OPERAND (d2, 1))
8666 d1 = TREE_OPERAND (d1, 0);
8667 d2 = TREE_OPERAND (d2, 0);
8669 else
8670 break;
8671 if (d1 == d2)
8673 error_at (OMP_CLAUSE_LOCATION (c),
8674 "%qE appears more than once in map "
8675 "clauses", OMP_CLAUSE_DECL (c));
8676 remove = true;
8677 break;
8679 if (offset2)
8680 o2 = wi::to_poly_offset (offset2);
8681 else
8682 o2 = 0;
8683 o2 += bits_to_bytes_round_down (bitpos2);
8684 if (maybe_lt (o1, o2)
8685 || (known_eq (o1, 2)
8686 && maybe_lt (bitpos, bitpos2)))
8688 if (ptr)
8689 scp = sc;
8690 else
8691 break;
8694 if (remove)
8695 break;
8696 OMP_CLAUSE_SIZE (*osc)
8697 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8698 size_one_node);
8699 if (ptr)
8701 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8702 OMP_CLAUSE_MAP);
8703 tree cl = NULL_TREE;
8704 enum gomp_map_kind mkind
8705 = code == OMP_TARGET_EXIT_DATA
8706 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8707 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8708 OMP_CLAUSE_DECL (c2)
8709 = unshare_expr (OMP_CLAUSE_DECL (c));
8710 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8711 OMP_CLAUSE_SIZE (c2)
8712 = TYPE_SIZE_UNIT (ptr_type_node);
8713 cl = scp ? *prev_list_p : c2;
8714 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8716 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8717 tree c3
8718 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8719 OMP_CLAUSE_MAP);
8720 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8721 OMP_CLAUSE_DECL (c3)
8722 = unshare_expr (OMP_CLAUSE_DECL (c4));
8723 OMP_CLAUSE_SIZE (c3)
8724 = TYPE_SIZE_UNIT (ptr_type_node);
8725 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8726 if (!scp)
8727 OMP_CLAUSE_CHAIN (c2) = c3;
8728 else
8729 cl = c3;
8731 if (scp)
8732 *scp = c2;
8733 if (sc == prev_list_p)
8735 *sc = cl;
8736 prev_list_p = NULL;
8738 else
8740 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8741 list_p = prev_list_p;
8742 prev_list_p = NULL;
8743 OMP_CLAUSE_CHAIN (c) = *sc;
8744 *sc = cl;
8745 continue;
8748 else if (*sc != c)
8750 *list_p = OMP_CLAUSE_CHAIN (c);
8751 OMP_CLAUSE_CHAIN (c) = *sc;
8752 *sc = c;
8753 continue;
8757 if (!remove
8758 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8759 && OMP_CLAUSE_CHAIN (c)
8760 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8761 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8762 == GOMP_MAP_ALWAYS_POINTER))
8763 prev_list_p = list_p;
8764 break;
8766 flags = GOVD_MAP | GOVD_EXPLICIT;
8767 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8768 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8769 flags |= GOVD_MAP_ALWAYS_TO;
8770 goto do_add;
8772 case OMP_CLAUSE_DEPEND:
8773 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8775 tree deps = OMP_CLAUSE_DECL (c);
8776 while (deps && TREE_CODE (deps) == TREE_LIST)
8778 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8779 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8780 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8781 pre_p, NULL, is_gimple_val, fb_rvalue);
8782 deps = TREE_CHAIN (deps);
8784 break;
8786 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8787 break;
8788 if (handled_depend_iterators == -1)
8789 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
8790 if (handled_depend_iterators)
8792 if (handled_depend_iterators == 2)
8793 remove = true;
8794 break;
8796 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8798 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8799 NULL, is_gimple_val, fb_rvalue);
8800 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8802 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8804 remove = true;
8805 break;
8807 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8808 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8809 is_gimple_val, fb_rvalue) == GS_ERROR)
8811 remove = true;
8812 break;
8814 break;
8816 case OMP_CLAUSE_TO:
8817 case OMP_CLAUSE_FROM:
8818 case OMP_CLAUSE__CACHE_:
8819 decl = OMP_CLAUSE_DECL (c);
8820 if (error_operand_p (decl))
8822 remove = true;
8823 break;
8825 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8826 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8827 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8828 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8829 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8831 remove = true;
8832 break;
8834 if (!DECL_P (decl))
8836 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8837 NULL, is_gimple_lvalue, fb_lvalue)
8838 == GS_ERROR)
8840 remove = true;
8841 break;
8843 break;
8845 goto do_notice;
8847 case OMP_CLAUSE_USE_DEVICE_PTR:
8848 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8849 goto do_add;
8850 case OMP_CLAUSE_IS_DEVICE_PTR:
8851 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8852 goto do_add;
8854 do_add:
8855 decl = OMP_CLAUSE_DECL (c);
8856 do_add_decl:
8857 if (error_operand_p (decl))
8859 remove = true;
8860 break;
8862 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8864 tree t = omp_member_access_dummy_var (decl);
8865 if (t)
8867 tree v = DECL_VALUE_EXPR (decl);
8868 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8869 if (outer_ctx)
8870 omp_notice_variable (outer_ctx, t, true);
8873 if (code == OACC_DATA
8874 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8875 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8876 flags |= GOVD_MAP_0LEN_ARRAY;
8877 omp_add_variable (ctx, decl, flags);
8878 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8879 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
8880 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8881 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8883 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8884 GOVD_LOCAL | GOVD_SEEN);
8885 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8886 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8887 find_decl_expr,
8888 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8889 NULL) == NULL_TREE)
8890 omp_add_variable (ctx,
8891 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8892 GOVD_LOCAL | GOVD_SEEN);
8893 gimplify_omp_ctxp = ctx;
8894 push_gimplify_context ();
8896 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8897 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8899 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8900 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8901 pop_gimplify_context
8902 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8903 push_gimplify_context ();
8904 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8905 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8906 pop_gimplify_context
8907 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8908 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8909 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8911 gimplify_omp_ctxp = outer_ctx;
8913 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8914 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8916 gimplify_omp_ctxp = ctx;
8917 push_gimplify_context ();
8918 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8920 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8921 NULL, NULL);
8922 TREE_SIDE_EFFECTS (bind) = 1;
8923 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8924 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8926 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8927 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8928 pop_gimplify_context
8929 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8930 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8932 gimplify_omp_ctxp = outer_ctx;
8934 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8935 && OMP_CLAUSE_LINEAR_STMT (c))
8937 gimplify_omp_ctxp = ctx;
8938 push_gimplify_context ();
8939 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8941 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8942 NULL, NULL);
8943 TREE_SIDE_EFFECTS (bind) = 1;
8944 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8945 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8947 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8948 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8949 pop_gimplify_context
8950 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8951 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8953 gimplify_omp_ctxp = outer_ctx;
8955 if (notice_outer)
8956 goto do_notice;
8957 break;
8959 case OMP_CLAUSE_COPYIN:
8960 case OMP_CLAUSE_COPYPRIVATE:
8961 decl = OMP_CLAUSE_DECL (c);
8962 if (error_operand_p (decl))
8964 remove = true;
8965 break;
8967 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8968 && !remove
8969 && !omp_check_private (ctx, decl, true))
8971 remove = true;
8972 if (is_global_var (decl))
8974 if (DECL_THREAD_LOCAL_P (decl))
8975 remove = false;
8976 else if (DECL_HAS_VALUE_EXPR_P (decl))
8978 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8980 if (value
8981 && DECL_P (value)
8982 && DECL_THREAD_LOCAL_P (value))
8983 remove = false;
8986 if (remove)
8987 error_at (OMP_CLAUSE_LOCATION (c),
8988 "copyprivate variable %qE is not threadprivate"
8989 " or private in outer context", DECL_NAME (decl));
8991 do_notice:
8992 if ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
8993 && outer_ctx
8994 && outer_ctx->region_type == ORT_COMBINED_PARALLEL
8995 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8996 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
8997 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE))
8999 splay_tree_node on
9000 = splay_tree_lookup (outer_ctx->variables,
9001 (splay_tree_key)decl);
9002 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9004 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9005 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9006 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9007 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9008 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9009 == POINTER_TYPE))))
9010 omp_firstprivatize_variable (outer_ctx, decl);
9011 else
9012 omp_add_variable (outer_ctx, decl,
9013 GOVD_SEEN | GOVD_SHARED);
9014 omp_notice_variable (outer_ctx, decl, true);
9017 if (outer_ctx)
9018 omp_notice_variable (outer_ctx, decl, true);
9019 if (check_non_private
9020 && region_type == ORT_WORKSHARE
9021 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9022 || decl == OMP_CLAUSE_DECL (c)
9023 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9024 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9025 == ADDR_EXPR
9026 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9027 == POINTER_PLUS_EXPR
9028 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9029 (OMP_CLAUSE_DECL (c), 0), 0))
9030 == ADDR_EXPR)))))
9031 && omp_check_private (ctx, decl, false))
9033 error ("%s variable %qE is private in outer context",
9034 check_non_private, DECL_NAME (decl));
9035 remove = true;
9037 break;
9039 case OMP_CLAUSE_IF:
9040 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9041 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9043 const char *p[2];
9044 for (int i = 0; i < 2; i++)
9045 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9047 case VOID_CST: p[i] = "cancel"; break;
9048 case OMP_PARALLEL: p[i] = "parallel"; break;
9049 case OMP_SIMD: p[i] = "simd"; break;
9050 case OMP_TASK: p[i] = "task"; break;
9051 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9052 case OMP_TARGET_DATA: p[i] = "target data"; break;
9053 case OMP_TARGET: p[i] = "target"; break;
9054 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9055 case OMP_TARGET_ENTER_DATA:
9056 p[i] = "target enter data"; break;
9057 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9058 default: gcc_unreachable ();
9060 error_at (OMP_CLAUSE_LOCATION (c),
9061 "expected %qs %<if%> clause modifier rather than %qs",
9062 p[0], p[1]);
9063 remove = true;
9065 /* Fall through. */
9067 case OMP_CLAUSE_FINAL:
9068 OMP_CLAUSE_OPERAND (c, 0)
9069 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9070 /* Fall through. */
9072 case OMP_CLAUSE_SCHEDULE:
9073 case OMP_CLAUSE_NUM_THREADS:
9074 case OMP_CLAUSE_NUM_TEAMS:
9075 case OMP_CLAUSE_THREAD_LIMIT:
9076 case OMP_CLAUSE_DIST_SCHEDULE:
9077 case OMP_CLAUSE_DEVICE:
9078 case OMP_CLAUSE_PRIORITY:
9079 case OMP_CLAUSE_GRAINSIZE:
9080 case OMP_CLAUSE_NUM_TASKS:
9081 case OMP_CLAUSE_HINT:
9082 case OMP_CLAUSE_ASYNC:
9083 case OMP_CLAUSE_WAIT:
9084 case OMP_CLAUSE_NUM_GANGS:
9085 case OMP_CLAUSE_NUM_WORKERS:
9086 case OMP_CLAUSE_VECTOR_LENGTH:
9087 case OMP_CLAUSE_WORKER:
9088 case OMP_CLAUSE_VECTOR:
9089 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9090 is_gimple_val, fb_rvalue) == GS_ERROR)
9091 remove = true;
9092 break;
9094 case OMP_CLAUSE_GANG:
9095 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9096 is_gimple_val, fb_rvalue) == GS_ERROR)
9097 remove = true;
9098 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9099 is_gimple_val, fb_rvalue) == GS_ERROR)
9100 remove = true;
9101 break;
9103 case OMP_CLAUSE_NOWAIT:
9104 nowait = 1;
9105 break;
9107 case OMP_CLAUSE_ORDERED:
9108 case OMP_CLAUSE_UNTIED:
9109 case OMP_CLAUSE_COLLAPSE:
9110 case OMP_CLAUSE_TILE:
9111 case OMP_CLAUSE_AUTO:
9112 case OMP_CLAUSE_SEQ:
9113 case OMP_CLAUSE_INDEPENDENT:
9114 case OMP_CLAUSE_MERGEABLE:
9115 case OMP_CLAUSE_PROC_BIND:
9116 case OMP_CLAUSE_SAFELEN:
9117 case OMP_CLAUSE_SIMDLEN:
9118 case OMP_CLAUSE_NOGROUP:
9119 case OMP_CLAUSE_THREADS:
9120 case OMP_CLAUSE_SIMD:
9121 case OMP_CLAUSE_IF_PRESENT:
9122 case OMP_CLAUSE_FINALIZE:
9123 break;
9125 case OMP_CLAUSE_DEFAULTMAP:
9126 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9127 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9129 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9130 gdmkmin = GDMK_SCALAR;
9131 gdmkmax = GDMK_POINTER;
9132 break;
9133 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9134 gdmkmin = gdmkmax = GDMK_SCALAR;
9135 break;
9136 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9137 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9138 break;
9139 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9140 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9141 break;
9142 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9143 gdmkmin = gdmkmax = GDMK_POINTER;
9144 break;
9145 default:
9146 gcc_unreachable ();
9148 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9149 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9151 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9152 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9153 break;
9154 case OMP_CLAUSE_DEFAULTMAP_TO:
9155 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9156 break;
9157 case OMP_CLAUSE_DEFAULTMAP_FROM:
9158 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9159 break;
9160 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9161 ctx->defaultmap[gdmk] = GOVD_MAP;
9162 break;
9163 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9164 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9165 break;
9166 case OMP_CLAUSE_DEFAULTMAP_NONE:
9167 ctx->defaultmap[gdmk] = 0;
9168 break;
9169 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9170 switch (gdmk)
9172 case GDMK_SCALAR:
9173 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9174 break;
9175 case GDMK_AGGREGATE:
9176 case GDMK_ALLOCATABLE:
9177 ctx->defaultmap[gdmk] = GOVD_MAP;
9178 break;
9179 case GDMK_POINTER:
9180 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9181 break;
9182 default:
9183 gcc_unreachable ();
9185 break;
9186 default:
9187 gcc_unreachable ();
9189 break;
9191 case OMP_CLAUSE_ALIGNED:
9192 decl = OMP_CLAUSE_DECL (c);
9193 if (error_operand_p (decl))
9195 remove = true;
9196 break;
9198 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9199 is_gimple_val, fb_rvalue) == GS_ERROR)
9201 remove = true;
9202 break;
9204 if (!is_global_var (decl)
9205 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9206 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9207 break;
9209 case OMP_CLAUSE_NONTEMPORAL:
9210 decl = OMP_CLAUSE_DECL (c);
9211 if (error_operand_p (decl))
9213 remove = true;
9214 break;
9216 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9217 break;
9219 case OMP_CLAUSE_DEFAULT:
9220 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9221 break;
9223 default:
9224 gcc_unreachable ();
9227 if (code == OACC_DATA
9228 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9229 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9230 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9231 remove = true;
9232 if (remove)
9233 *list_p = OMP_CLAUSE_CHAIN (c);
9234 else
9235 list_p = &OMP_CLAUSE_CHAIN (c);
9238 gimplify_omp_ctxp = ctx;
9239 if (struct_map_to_clause)
9240 delete struct_map_to_clause;
9243 /* Return true if DECL is a candidate for shared to firstprivate
9244 optimization. We only consider non-addressable scalars, not
9245 too big, and not references. */
9247 static bool
9248 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9250 if (TREE_ADDRESSABLE (decl))
9251 return false;
9252 tree type = TREE_TYPE (decl);
9253 if (!is_gimple_reg_type (type)
9254 || TREE_CODE (type) == REFERENCE_TYPE
9255 || TREE_ADDRESSABLE (type))
9256 return false;
9257 /* Don't optimize too large decls, as each thread/task will have
9258 its own. */
9259 HOST_WIDE_INT len = int_size_in_bytes (type);
9260 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9261 return false;
9262 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9263 return false;
9264 return true;
9267 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9268 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9269 GOVD_WRITTEN in outer contexts. */
9271 static void
9272 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9274 for (; ctx; ctx = ctx->outer_context)
9276 splay_tree_node n = splay_tree_lookup (ctx->variables,
9277 (splay_tree_key) decl);
9278 if (n == NULL)
9279 continue;
9280 else if (n->value & GOVD_SHARED)
9282 n->value |= GOVD_WRITTEN;
9283 return;
9285 else if (n->value & GOVD_DATA_SHARE_CLASS)
9286 return;
9290 /* Helper callback for walk_gimple_seq to discover possible stores
9291 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9292 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9293 for those. */
9295 static tree
9296 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9298 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9300 *walk_subtrees = 0;
9301 if (!wi->is_lhs)
9302 return NULL_TREE;
9304 tree op = *tp;
9307 if (handled_component_p (op))
9308 op = TREE_OPERAND (op, 0);
9309 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9310 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9311 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9312 else
9313 break;
9315 while (1);
9316 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9317 return NULL_TREE;
9319 omp_mark_stores (gimplify_omp_ctxp, op);
9320 return NULL_TREE;
9323 /* Helper callback for walk_gimple_seq to discover possible stores
9324 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9325 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9326 for those. */
9328 static tree
9329 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9330 bool *handled_ops_p,
9331 struct walk_stmt_info *wi)
9333 gimple *stmt = gsi_stmt (*gsi_p);
9334 switch (gimple_code (stmt))
9336 /* Don't recurse on OpenMP constructs for which
9337 gimplify_adjust_omp_clauses already handled the bodies,
9338 except handle gimple_omp_for_pre_body. */
9339 case GIMPLE_OMP_FOR:
9340 *handled_ops_p = true;
9341 if (gimple_omp_for_pre_body (stmt))
9342 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9343 omp_find_stores_stmt, omp_find_stores_op, wi);
9344 break;
9345 case GIMPLE_OMP_PARALLEL:
9346 case GIMPLE_OMP_TASK:
9347 case GIMPLE_OMP_SECTIONS:
9348 case GIMPLE_OMP_SINGLE:
9349 case GIMPLE_OMP_TARGET:
9350 case GIMPLE_OMP_TEAMS:
9351 case GIMPLE_OMP_CRITICAL:
9352 *handled_ops_p = true;
9353 break;
9354 default:
9355 break;
9357 return NULL_TREE;
9360 struct gimplify_adjust_omp_clauses_data
9362 tree *list_p;
9363 gimple_seq *pre_p;
9366 /* For all variables that were not actually used within the context,
9367 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9369 static int
9370 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9372 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9373 gimple_seq *pre_p
9374 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9375 tree decl = (tree) n->key;
9376 unsigned flags = n->value;
9377 enum omp_clause_code code;
9378 tree clause;
9379 bool private_debug;
9381 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9382 return 0;
9383 if ((flags & GOVD_SEEN) == 0)
9384 return 0;
9385 if (flags & GOVD_DEBUG_PRIVATE)
9387 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9388 private_debug = true;
9390 else if (flags & GOVD_MAP)
9391 private_debug = false;
9392 else
9393 private_debug
9394 = lang_hooks.decls.omp_private_debug_clause (decl,
9395 !!(flags & GOVD_SHARED));
9396 if (private_debug)
9397 code = OMP_CLAUSE_PRIVATE;
9398 else if (flags & GOVD_MAP)
9400 code = OMP_CLAUSE_MAP;
9401 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9402 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9404 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9405 return 0;
9408 else if (flags & GOVD_SHARED)
9410 if (is_global_var (decl))
9412 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9413 while (ctx != NULL)
9415 splay_tree_node on
9416 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9417 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9418 | GOVD_PRIVATE | GOVD_REDUCTION
9419 | GOVD_LINEAR | GOVD_MAP)) != 0)
9420 break;
9421 ctx = ctx->outer_context;
9423 if (ctx == NULL)
9424 return 0;
9426 code = OMP_CLAUSE_SHARED;
9428 else if (flags & GOVD_PRIVATE)
9429 code = OMP_CLAUSE_PRIVATE;
9430 else if (flags & GOVD_FIRSTPRIVATE)
9432 code = OMP_CLAUSE_FIRSTPRIVATE;
9433 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9434 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9435 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9437 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9438 "%<target%> construct", decl);
9439 return 0;
9442 else if (flags & GOVD_LASTPRIVATE)
9443 code = OMP_CLAUSE_LASTPRIVATE;
9444 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9445 return 0;
9446 else
9447 gcc_unreachable ();
9449 if (((flags & GOVD_LASTPRIVATE)
9450 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9451 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9452 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9454 tree chain = *list_p;
9455 clause = build_omp_clause (input_location, code);
9456 OMP_CLAUSE_DECL (clause) = decl;
9457 OMP_CLAUSE_CHAIN (clause) = chain;
9458 if (private_debug)
9459 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9460 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9461 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9462 else if (code == OMP_CLAUSE_SHARED
9463 && (flags & GOVD_WRITTEN) == 0
9464 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9465 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9466 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9467 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9468 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9470 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9471 OMP_CLAUSE_DECL (nc) = decl;
9472 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9473 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9474 OMP_CLAUSE_DECL (clause)
9475 = build_simple_mem_ref_loc (input_location, decl);
9476 OMP_CLAUSE_DECL (clause)
9477 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9478 build_int_cst (build_pointer_type (char_type_node), 0));
9479 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9480 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9481 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9482 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9483 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9484 OMP_CLAUSE_CHAIN (nc) = chain;
9485 OMP_CLAUSE_CHAIN (clause) = nc;
9486 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9487 gimplify_omp_ctxp = ctx->outer_context;
9488 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9489 pre_p, NULL, is_gimple_val, fb_rvalue);
9490 gimplify_omp_ctxp = ctx;
9492 else if (code == OMP_CLAUSE_MAP)
9494 int kind;
9495 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9496 switch (flags & (GOVD_MAP_TO_ONLY
9497 | GOVD_MAP_FORCE
9498 | GOVD_MAP_FORCE_PRESENT
9499 | GOVD_MAP_ALLOC_ONLY
9500 | GOVD_MAP_FROM_ONLY))
9502 case 0:
9503 kind = GOMP_MAP_TOFROM;
9504 break;
9505 case GOVD_MAP_FORCE:
9506 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
9507 break;
9508 case GOVD_MAP_TO_ONLY:
9509 kind = GOMP_MAP_TO;
9510 break;
9511 case GOVD_MAP_FROM_ONLY:
9512 kind = GOMP_MAP_FROM;
9513 break;
9514 case GOVD_MAP_ALLOC_ONLY:
9515 kind = GOMP_MAP_ALLOC;
9516 break;
9517 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
9518 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
9519 break;
9520 case GOVD_MAP_FORCE_PRESENT:
9521 kind = GOMP_MAP_FORCE_PRESENT;
9522 break;
9523 default:
9524 gcc_unreachable ();
9526 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
9527 if (DECL_SIZE (decl)
9528 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9530 tree decl2 = DECL_VALUE_EXPR (decl);
9531 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9532 decl2 = TREE_OPERAND (decl2, 0);
9533 gcc_assert (DECL_P (decl2));
9534 tree mem = build_simple_mem_ref (decl2);
9535 OMP_CLAUSE_DECL (clause) = mem;
9536 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9537 if (gimplify_omp_ctxp->outer_context)
9539 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9540 omp_notice_variable (ctx, decl2, true);
9541 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
9543 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9544 OMP_CLAUSE_MAP);
9545 OMP_CLAUSE_DECL (nc) = decl;
9546 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9547 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
9548 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9549 else
9550 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9551 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9552 OMP_CLAUSE_CHAIN (clause) = nc;
9554 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9555 && lang_hooks.decls.omp_privatize_by_reference (decl))
9557 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
9558 OMP_CLAUSE_SIZE (clause)
9559 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
9560 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9561 gimplify_omp_ctxp = ctx->outer_context;
9562 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
9563 pre_p, NULL, is_gimple_val, fb_rvalue);
9564 gimplify_omp_ctxp = ctx;
9565 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9566 OMP_CLAUSE_MAP);
9567 OMP_CLAUSE_DECL (nc) = decl;
9568 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9569 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
9570 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9571 OMP_CLAUSE_CHAIN (clause) = nc;
9573 else
9574 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
9576 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
9578 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
9579 OMP_CLAUSE_DECL (nc) = decl;
9580 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
9581 OMP_CLAUSE_CHAIN (nc) = chain;
9582 OMP_CLAUSE_CHAIN (clause) = nc;
9583 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9584 gimplify_omp_ctxp = ctx->outer_context;
9585 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9586 gimplify_omp_ctxp = ctx;
9588 *list_p = clause;
9589 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9590 gimplify_omp_ctxp = ctx->outer_context;
9591 lang_hooks.decls.omp_finish_clause (clause, pre_p);
9592 if (gimplify_omp_ctxp)
9593 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9594 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9595 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9596 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9597 true);
9598 gimplify_omp_ctxp = ctx;
9599 return 0;
9602 static void
9603 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9604 enum tree_code code)
9606 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9607 tree c, decl;
9609 if (body)
9611 struct gimplify_omp_ctx *octx;
9612 for (octx = ctx; octx; octx = octx->outer_context)
9613 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9614 break;
9615 if (octx)
9617 struct walk_stmt_info wi;
9618 memset (&wi, 0, sizeof (wi));
9619 walk_gimple_seq (body, omp_find_stores_stmt,
9620 omp_find_stores_op, &wi);
9623 while ((c = *list_p) != NULL)
9625 splay_tree_node n;
9626 bool remove = false;
9628 switch (OMP_CLAUSE_CODE (c))
9630 case OMP_CLAUSE_FIRSTPRIVATE:
9631 if ((ctx->region_type & ORT_TARGET)
9632 && (ctx->region_type & ORT_ACC) == 0
9633 && TYPE_ATOMIC (strip_array_types
9634 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9636 error_at (OMP_CLAUSE_LOCATION (c),
9637 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9638 "%<target%> construct", OMP_CLAUSE_DECL (c));
9639 remove = true;
9640 break;
9642 /* FALLTHRU */
9643 case OMP_CLAUSE_PRIVATE:
9644 case OMP_CLAUSE_SHARED:
9645 case OMP_CLAUSE_LINEAR:
9646 decl = OMP_CLAUSE_DECL (c);
9647 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9648 remove = !(n->value & GOVD_SEEN);
9649 if (! remove)
9651 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9652 if ((n->value & GOVD_DEBUG_PRIVATE)
9653 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9655 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9656 || ((n->value & GOVD_DATA_SHARE_CLASS)
9657 == GOVD_SHARED));
9658 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9659 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9661 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9662 && (n->value & GOVD_WRITTEN) == 0
9663 && DECL_P (decl)
9664 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9665 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9666 else if (DECL_P (decl)
9667 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9668 && (n->value & GOVD_WRITTEN) != 0)
9669 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9670 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9671 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9672 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9674 break;
9676 case OMP_CLAUSE_LASTPRIVATE:
9677 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9678 accurately reflect the presence of a FIRSTPRIVATE clause. */
9679 decl = OMP_CLAUSE_DECL (c);
9680 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9681 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9682 = (n->value & GOVD_FIRSTPRIVATE) != 0;
9683 if (code == OMP_DISTRIBUTE
9684 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9686 remove = true;
9687 error_at (OMP_CLAUSE_LOCATION (c),
9688 "same variable used in %<firstprivate%> and "
9689 "%<lastprivate%> clauses on %<distribute%> "
9690 "construct");
9692 if (!remove
9693 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9694 && DECL_P (decl)
9695 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9696 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9697 break;
9699 case OMP_CLAUSE_ALIGNED:
9700 decl = OMP_CLAUSE_DECL (c);
9701 if (!is_global_var (decl))
9703 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9704 remove = n == NULL || !(n->value & GOVD_SEEN);
9705 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9707 struct gimplify_omp_ctx *octx;
9708 if (n != NULL
9709 && (n->value & (GOVD_DATA_SHARE_CLASS
9710 & ~GOVD_FIRSTPRIVATE)))
9711 remove = true;
9712 else
9713 for (octx = ctx->outer_context; octx;
9714 octx = octx->outer_context)
9716 n = splay_tree_lookup (octx->variables,
9717 (splay_tree_key) decl);
9718 if (n == NULL)
9719 continue;
9720 if (n->value & GOVD_LOCAL)
9721 break;
9722 /* We have to avoid assigning a shared variable
9723 to itself when trying to add
9724 __builtin_assume_aligned. */
9725 if (n->value & GOVD_SHARED)
9727 remove = true;
9728 break;
9733 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9735 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9736 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9737 remove = true;
9739 break;
9741 case OMP_CLAUSE_NONTEMPORAL:
9742 decl = OMP_CLAUSE_DECL (c);
9743 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9744 remove = n == NULL || !(n->value & GOVD_SEEN);
9745 break;
9747 case OMP_CLAUSE_MAP:
9748 if (code == OMP_TARGET_EXIT_DATA
9749 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9751 remove = true;
9752 break;
9754 decl = OMP_CLAUSE_DECL (c);
9755 /* Data clauses associated with acc parallel reductions must be
9756 compatible with present_or_copy. Warn and adjust the clause
9757 if that is not the case. */
9758 if (ctx->region_type == ORT_ACC_PARALLEL)
9760 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9761 n = NULL;
9763 if (DECL_P (t))
9764 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9766 if (n && (n->value & GOVD_REDUCTION))
9768 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9770 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9771 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9772 && kind != GOMP_MAP_FORCE_PRESENT
9773 && kind != GOMP_MAP_POINTER)
9775 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9776 "incompatible data clause with reduction "
9777 "on %qE; promoting to present_or_copy",
9778 DECL_NAME (t));
9779 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9783 if (!DECL_P (decl))
9785 if ((ctx->region_type & ORT_TARGET) != 0
9786 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9788 if (TREE_CODE (decl) == INDIRECT_REF
9789 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9790 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9791 == REFERENCE_TYPE))
9792 decl = TREE_OPERAND (decl, 0);
9793 if (TREE_CODE (decl) == COMPONENT_REF)
9795 while (TREE_CODE (decl) == COMPONENT_REF)
9796 decl = TREE_OPERAND (decl, 0);
9797 if (DECL_P (decl))
9799 n = splay_tree_lookup (ctx->variables,
9800 (splay_tree_key) decl);
9801 if (!(n->value & GOVD_SEEN))
9802 remove = true;
9806 break;
9808 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9809 if ((ctx->region_type & ORT_TARGET) != 0
9810 && !(n->value & GOVD_SEEN)
9811 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9812 && (!is_global_var (decl)
9813 || !lookup_attribute ("omp declare target link",
9814 DECL_ATTRIBUTES (decl))))
9816 remove = true;
9817 /* For struct element mapping, if struct is never referenced
9818 in target block and none of the mapping has always modifier,
9819 remove all the struct element mappings, which immediately
9820 follow the GOMP_MAP_STRUCT map clause. */
9821 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9823 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9824 while (cnt--)
9825 OMP_CLAUSE_CHAIN (c)
9826 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9829 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9830 && code == OMP_TARGET_EXIT_DATA)
9831 remove = true;
9832 else if (DECL_SIZE (decl)
9833 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9834 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9835 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9836 && (OMP_CLAUSE_MAP_KIND (c)
9837 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9839 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9840 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9841 INTEGER_CST. */
9842 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9844 tree decl2 = DECL_VALUE_EXPR (decl);
9845 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9846 decl2 = TREE_OPERAND (decl2, 0);
9847 gcc_assert (DECL_P (decl2));
9848 tree mem = build_simple_mem_ref (decl2);
9849 OMP_CLAUSE_DECL (c) = mem;
9850 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9851 if (ctx->outer_context)
9853 omp_notice_variable (ctx->outer_context, decl2, true);
9854 omp_notice_variable (ctx->outer_context,
9855 OMP_CLAUSE_SIZE (c), true);
9857 if (((ctx->region_type & ORT_TARGET) != 0
9858 || !ctx->target_firstprivatize_array_bases)
9859 && ((n->value & GOVD_SEEN) == 0
9860 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9862 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9863 OMP_CLAUSE_MAP);
9864 OMP_CLAUSE_DECL (nc) = decl;
9865 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9866 if (ctx->target_firstprivatize_array_bases)
9867 OMP_CLAUSE_SET_MAP_KIND (nc,
9868 GOMP_MAP_FIRSTPRIVATE_POINTER);
9869 else
9870 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9871 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9872 OMP_CLAUSE_CHAIN (c) = nc;
9873 c = nc;
9876 else
9878 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9879 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9880 gcc_assert ((n->value & GOVD_SEEN) == 0
9881 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9882 == 0));
9884 break;
9886 case OMP_CLAUSE_TO:
9887 case OMP_CLAUSE_FROM:
9888 case OMP_CLAUSE__CACHE_:
9889 decl = OMP_CLAUSE_DECL (c);
9890 if (!DECL_P (decl))
9891 break;
9892 if (DECL_SIZE (decl)
9893 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9895 tree decl2 = DECL_VALUE_EXPR (decl);
9896 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9897 decl2 = TREE_OPERAND (decl2, 0);
9898 gcc_assert (DECL_P (decl2));
9899 tree mem = build_simple_mem_ref (decl2);
9900 OMP_CLAUSE_DECL (c) = mem;
9901 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9902 if (ctx->outer_context)
9904 omp_notice_variable (ctx->outer_context, decl2, true);
9905 omp_notice_variable (ctx->outer_context,
9906 OMP_CLAUSE_SIZE (c), true);
9909 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9910 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9911 break;
9913 case OMP_CLAUSE_REDUCTION:
9914 case OMP_CLAUSE_IN_REDUCTION:
9915 case OMP_CLAUSE_TASK_REDUCTION:
9916 decl = OMP_CLAUSE_DECL (c);
9917 /* OpenACC reductions need a present_or_copy data clause.
9918 Add one if necessary. Emit error when the reduction is private. */
9919 if (ctx->region_type == ORT_ACC_PARALLEL)
9921 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9922 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9924 remove = true;
9925 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9926 "reduction on %qE", DECL_NAME (decl));
9928 else if ((n->value & GOVD_MAP) == 0)
9930 tree next = OMP_CLAUSE_CHAIN (c);
9931 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9932 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9933 OMP_CLAUSE_DECL (nc) = decl;
9934 OMP_CLAUSE_CHAIN (c) = nc;
9935 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9936 while (1)
9938 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9939 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9940 break;
9941 nc = OMP_CLAUSE_CHAIN (nc);
9943 OMP_CLAUSE_CHAIN (nc) = next;
9944 n->value |= GOVD_MAP;
9947 if (DECL_P (decl)
9948 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9949 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9950 break;
9951 case OMP_CLAUSE_COPYIN:
9952 case OMP_CLAUSE_COPYPRIVATE:
9953 case OMP_CLAUSE_IF:
9954 case OMP_CLAUSE_NUM_THREADS:
9955 case OMP_CLAUSE_NUM_TEAMS:
9956 case OMP_CLAUSE_THREAD_LIMIT:
9957 case OMP_CLAUSE_DIST_SCHEDULE:
9958 case OMP_CLAUSE_DEVICE:
9959 case OMP_CLAUSE_SCHEDULE:
9960 case OMP_CLAUSE_NOWAIT:
9961 case OMP_CLAUSE_ORDERED:
9962 case OMP_CLAUSE_DEFAULT:
9963 case OMP_CLAUSE_UNTIED:
9964 case OMP_CLAUSE_COLLAPSE:
9965 case OMP_CLAUSE_FINAL:
9966 case OMP_CLAUSE_MERGEABLE:
9967 case OMP_CLAUSE_PROC_BIND:
9968 case OMP_CLAUSE_SAFELEN:
9969 case OMP_CLAUSE_SIMDLEN:
9970 case OMP_CLAUSE_DEPEND:
9971 case OMP_CLAUSE_PRIORITY:
9972 case OMP_CLAUSE_GRAINSIZE:
9973 case OMP_CLAUSE_NUM_TASKS:
9974 case OMP_CLAUSE_NOGROUP:
9975 case OMP_CLAUSE_THREADS:
9976 case OMP_CLAUSE_SIMD:
9977 case OMP_CLAUSE_HINT:
9978 case OMP_CLAUSE_DEFAULTMAP:
9979 case OMP_CLAUSE_USE_DEVICE_PTR:
9980 case OMP_CLAUSE_IS_DEVICE_PTR:
9981 case OMP_CLAUSE_ASYNC:
9982 case OMP_CLAUSE_WAIT:
9983 case OMP_CLAUSE_INDEPENDENT:
9984 case OMP_CLAUSE_NUM_GANGS:
9985 case OMP_CLAUSE_NUM_WORKERS:
9986 case OMP_CLAUSE_VECTOR_LENGTH:
9987 case OMP_CLAUSE_GANG:
9988 case OMP_CLAUSE_WORKER:
9989 case OMP_CLAUSE_VECTOR:
9990 case OMP_CLAUSE_AUTO:
9991 case OMP_CLAUSE_SEQ:
9992 case OMP_CLAUSE_TILE:
9993 case OMP_CLAUSE_IF_PRESENT:
9994 case OMP_CLAUSE_FINALIZE:
9995 break;
9997 default:
9998 gcc_unreachable ();
10001 if (remove)
10002 *list_p = OMP_CLAUSE_CHAIN (c);
10003 else
10004 list_p = &OMP_CLAUSE_CHAIN (c);
10007 /* Add in any implicit data sharing. */
10008 struct gimplify_adjust_omp_clauses_data data;
10009 data.list_p = list_p;
10010 data.pre_p = pre_p;
10011 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10013 gimplify_omp_ctxp = ctx->outer_context;
10014 delete_omp_context (ctx);
10017 /* Gimplify OACC_CACHE. */
10019 static void
10020 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10022 tree expr = *expr_p;
10024 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10025 OACC_CACHE);
10026 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10027 OACC_CACHE);
10029 /* TODO: Do something sensible with this information. */
10031 *expr_p = NULL_TREE;
10034 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10035 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10036 kind. The entry kind will replace the one in CLAUSE, while the exit
10037 kind will be used in a new omp_clause and returned to the caller. */
10039 static tree
10040 gimplify_oacc_declare_1 (tree clause)
10042 HOST_WIDE_INT kind, new_op;
10043 bool ret = false;
10044 tree c = NULL;
10046 kind = OMP_CLAUSE_MAP_KIND (clause);
10048 switch (kind)
10050 case GOMP_MAP_ALLOC:
10051 new_op = GOMP_MAP_RELEASE;
10052 ret = true;
10053 break;
10055 case GOMP_MAP_FROM:
10056 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10057 new_op = GOMP_MAP_FROM;
10058 ret = true;
10059 break;
10061 case GOMP_MAP_TOFROM:
10062 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10063 new_op = GOMP_MAP_FROM;
10064 ret = true;
10065 break;
10067 case GOMP_MAP_DEVICE_RESIDENT:
10068 case GOMP_MAP_FORCE_DEVICEPTR:
10069 case GOMP_MAP_FORCE_PRESENT:
10070 case GOMP_MAP_LINK:
10071 case GOMP_MAP_POINTER:
10072 case GOMP_MAP_TO:
10073 break;
10075 default:
10076 gcc_unreachable ();
10077 break;
10080 if (ret)
10082 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10083 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10084 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10087 return c;
10090 /* Gimplify OACC_DECLARE. */
10092 static void
10093 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10095 tree expr = *expr_p;
10096 gomp_target *stmt;
10097 tree clauses, t, decl;
10099 clauses = OACC_DECLARE_CLAUSES (expr);
10101 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10102 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10104 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10106 decl = OMP_CLAUSE_DECL (t);
10108 if (TREE_CODE (decl) == MEM_REF)
10109 decl = TREE_OPERAND (decl, 0);
10111 if (VAR_P (decl) && !is_oacc_declared (decl))
10113 tree attr = get_identifier ("oacc declare target");
10114 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10115 DECL_ATTRIBUTES (decl));
10118 if (VAR_P (decl)
10119 && !is_global_var (decl)
10120 && DECL_CONTEXT (decl) == current_function_decl)
10122 tree c = gimplify_oacc_declare_1 (t);
10123 if (c)
10125 if (oacc_declare_returns == NULL)
10126 oacc_declare_returns = new hash_map<tree, tree>;
10128 oacc_declare_returns->put (decl, c);
10132 if (gimplify_omp_ctxp)
10133 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10136 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10137 clauses);
10139 gimplify_seq_add_stmt (pre_p, stmt);
10141 *expr_p = NULL_TREE;
10144 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10145 gimplification of the body, as well as scanning the body for used
10146 variables. We need to do this scan now, because variable-sized
10147 decls will be decomposed during gimplification. */
10149 static void
10150 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10152 tree expr = *expr_p;
10153 gimple *g;
10154 gimple_seq body = NULL;
10156 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10157 OMP_PARALLEL_COMBINED (expr)
10158 ? ORT_COMBINED_PARALLEL
10159 : ORT_PARALLEL, OMP_PARALLEL);
10161 push_gimplify_context ();
10163 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10164 if (gimple_code (g) == GIMPLE_BIND)
10165 pop_gimplify_context (g);
10166 else
10167 pop_gimplify_context (NULL);
10169 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10170 OMP_PARALLEL);
10172 g = gimple_build_omp_parallel (body,
10173 OMP_PARALLEL_CLAUSES (expr),
10174 NULL_TREE, NULL_TREE);
10175 if (OMP_PARALLEL_COMBINED (expr))
10176 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10177 gimplify_seq_add_stmt (pre_p, g);
10178 *expr_p = NULL_TREE;
10181 /* Gimplify the contents of an OMP_TASK statement. This involves
10182 gimplification of the body, as well as scanning the body for used
10183 variables. We need to do this scan now, because variable-sized
10184 decls will be decomposed during gimplification. */
10186 static void
10187 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10189 tree expr = *expr_p;
10190 gimple *g;
10191 gimple_seq body = NULL;
10193 if (OMP_TASK_BODY (expr) == NULL_TREE)
10194 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10195 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10196 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10198 error_at (OMP_CLAUSE_LOCATION (c),
10199 "%<mutexinoutset%> kind in %<depend%> clause on a "
10200 "%<taskwait%> construct");
10201 break;
10204 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10205 omp_find_clause (OMP_TASK_CLAUSES (expr),
10206 OMP_CLAUSE_UNTIED)
10207 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10209 if (OMP_TASK_BODY (expr))
10211 push_gimplify_context ();
10213 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10214 if (gimple_code (g) == GIMPLE_BIND)
10215 pop_gimplify_context (g);
10216 else
10217 pop_gimplify_context (NULL);
10220 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10221 OMP_TASK);
10223 g = gimple_build_omp_task (body,
10224 OMP_TASK_CLAUSES (expr),
10225 NULL_TREE, NULL_TREE,
10226 NULL_TREE, NULL_TREE, NULL_TREE);
10227 if (OMP_TASK_BODY (expr) == NULL_TREE)
10228 gimple_omp_task_set_taskwait_p (g, true);
10229 gimplify_seq_add_stmt (pre_p, g);
10230 *expr_p = NULL_TREE;
10233 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10234 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10235 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10236 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10237 OMP_FOR in between if any and pdata[3] is address of the inner
10238 OMP_FOR/OMP_SIMD. */
10240 static tree
10241 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10243 tree **pdata = (tree **) data;
10244 *walk_subtrees = 0;
10245 switch (TREE_CODE (*tp))
10247 case OMP_FOR:
10248 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10250 pdata[3] = tp;
10251 return *tp;
10253 pdata[2] = tp;
10254 *walk_subtrees = 1;
10255 break;
10256 case OMP_SIMD:
10257 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10259 pdata[3] = tp;
10260 return *tp;
10262 break;
10263 case BIND_EXPR:
10264 if (BIND_EXPR_VARS (*tp)
10265 || (BIND_EXPR_BLOCK (*tp)
10266 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10267 pdata[0] = tp;
10268 *walk_subtrees = 1;
10269 break;
10270 case STATEMENT_LIST:
10271 if (!tsi_one_before_end_p (tsi_start (*tp)))
10272 pdata[0] = tp;
10273 *walk_subtrees = 1;
10274 break;
10275 case TRY_FINALLY_EXPR:
10276 pdata[0] = tp;
10277 *walk_subtrees = 1;
10278 break;
10279 case OMP_PARALLEL:
10280 pdata[1] = tp;
10281 *walk_subtrees = 1;
10282 break;
10283 default:
10284 break;
10286 return NULL_TREE;
10289 /* Gimplify the gross structure of an OMP_FOR statement. */
10291 static enum gimplify_status
10292 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
10294 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
10295 enum gimplify_status ret = GS_ALL_DONE;
10296 enum gimplify_status tret;
10297 gomp_for *gfor;
10298 gimple_seq for_body, for_pre_body;
10299 int i;
10300 bitmap has_decl_expr = NULL;
10301 enum omp_region_type ort = ORT_WORKSHARE;
10303 orig_for_stmt = for_stmt = *expr_p;
10305 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10307 tree *data[4] = { NULL, NULL, NULL, NULL };
10308 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10309 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10310 find_combined_omp_for, data, NULL);
10311 if (inner_for_stmt == NULL_TREE)
10313 gcc_assert (seen_error ());
10314 *expr_p = NULL_TREE;
10315 return GS_ERROR;
10317 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10319 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10320 &OMP_FOR_PRE_BODY (for_stmt));
10321 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
10323 if (OMP_FOR_PRE_BODY (inner_for_stmt))
10325 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
10326 &OMP_FOR_PRE_BODY (for_stmt));
10327 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
10330 if (data[0])
10332 /* We have some statements or variable declarations in between
10333 the composite construct directives. Move them around the
10334 inner_for_stmt. */
10335 data[0] = expr_p;
10336 for (i = 0; i < 3; i++)
10337 if (data[i])
10339 tree t = *data[i];
10340 if (i < 2 && data[i + 1] == &OMP_BODY (t))
10341 data[i + 1] = data[i];
10342 *data[i] = OMP_BODY (t);
10343 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10344 NULL_TREE, make_node (BLOCK));
10345 OMP_BODY (t) = body;
10346 append_to_statement_list_force (inner_for_stmt,
10347 &BIND_EXPR_BODY (body));
10348 *data[3] = t;
10349 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
10350 gcc_assert (*data[3] == inner_for_stmt);
10352 return GS_OK;
10355 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10356 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10357 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10358 i)) == TREE_LIST
10359 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10360 i)))
10362 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10363 /* Class iterators aren't allowed on OMP_SIMD, so the only
10364 case we need to solve is distribute parallel for. */
10365 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
10366 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
10367 && data[1]);
10368 tree orig_decl = TREE_PURPOSE (orig);
10369 tree last = TREE_VALUE (orig);
10370 tree *pc;
10371 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
10372 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
10373 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
10374 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
10375 && OMP_CLAUSE_DECL (*pc) == orig_decl)
10376 break;
10377 if (*pc == NULL_TREE)
10379 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
10381 /* private clause will appear only on inner_for_stmt.
10382 Change it into firstprivate, and add private clause
10383 on for_stmt. */
10384 tree c = copy_node (*pc);
10385 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10386 OMP_FOR_CLAUSES (for_stmt) = c;
10387 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
10388 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10390 else
10392 /* lastprivate clause will appear on both inner_for_stmt
10393 and for_stmt. Add firstprivate clause to
10394 inner_for_stmt. */
10395 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
10396 OMP_CLAUSE_FIRSTPRIVATE);
10397 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
10398 OMP_CLAUSE_CHAIN (c) = *pc;
10399 *pc = c;
10400 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10402 tree c = build_omp_clause (UNKNOWN_LOCATION,
10403 OMP_CLAUSE_FIRSTPRIVATE);
10404 OMP_CLAUSE_DECL (c) = last;
10405 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10406 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10407 c = build_omp_clause (UNKNOWN_LOCATION,
10408 *pc ? OMP_CLAUSE_SHARED
10409 : OMP_CLAUSE_FIRSTPRIVATE);
10410 OMP_CLAUSE_DECL (c) = orig_decl;
10411 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10412 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10414 /* Similarly, take care of C++ range for temporaries, those should
10415 be firstprivate on OMP_PARALLEL if any. */
10416 if (data[1])
10417 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10418 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10419 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10420 i)) == TREE_LIST
10421 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10422 i)))
10424 tree orig
10425 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10426 tree v = TREE_CHAIN (orig);
10427 tree c = build_omp_clause (UNKNOWN_LOCATION,
10428 OMP_CLAUSE_FIRSTPRIVATE);
10429 /* First add firstprivate clause for the __for_end artificial
10430 decl. */
10431 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
10432 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10433 == REFERENCE_TYPE)
10434 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10435 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10436 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10437 if (TREE_VEC_ELT (v, 0))
10439 /* And now the same for __for_range artificial decl if it
10440 exists. */
10441 c = build_omp_clause (UNKNOWN_LOCATION,
10442 OMP_CLAUSE_FIRSTPRIVATE);
10443 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
10444 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10445 == REFERENCE_TYPE)
10446 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10447 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10448 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10453 switch (TREE_CODE (for_stmt))
10455 case OMP_FOR:
10456 case OMP_DISTRIBUTE:
10457 break;
10458 case OACC_LOOP:
10459 ort = ORT_ACC;
10460 break;
10461 case OMP_TASKLOOP:
10462 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
10463 ort = ORT_UNTIED_TASKLOOP;
10464 else
10465 ort = ORT_TASKLOOP;
10466 break;
10467 case OMP_SIMD:
10468 ort = ORT_SIMD;
10469 break;
10470 default:
10471 gcc_unreachable ();
10474 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10475 clause for the IV. */
10476 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10478 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
10479 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10480 decl = TREE_OPERAND (t, 0);
10481 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10482 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10483 && OMP_CLAUSE_DECL (c) == decl)
10485 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10486 break;
10490 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
10491 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
10492 TREE_CODE (for_stmt));
10494 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
10495 gimplify_omp_ctxp->distribute = true;
10497 /* Handle OMP_FOR_INIT. */
10498 for_pre_body = NULL;
10499 if ((ort == ORT_SIMD
10500 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
10501 && OMP_FOR_PRE_BODY (for_stmt))
10503 has_decl_expr = BITMAP_ALLOC (NULL);
10504 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
10505 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
10506 == VAR_DECL)
10508 t = OMP_FOR_PRE_BODY (for_stmt);
10509 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10511 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
10513 tree_stmt_iterator si;
10514 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
10515 tsi_next (&si))
10517 t = tsi_stmt (si);
10518 if (TREE_CODE (t) == DECL_EXPR
10519 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
10520 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10524 if (OMP_FOR_PRE_BODY (for_stmt))
10526 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
10527 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10528 else
10530 struct gimplify_omp_ctx ctx;
10531 memset (&ctx, 0, sizeof (ctx));
10532 ctx.region_type = ORT_NONE;
10533 gimplify_omp_ctxp = &ctx;
10534 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10535 gimplify_omp_ctxp = NULL;
10538 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
10540 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10541 for_stmt = inner_for_stmt;
10543 /* For taskloop, need to gimplify the start, end and step before the
10544 taskloop, outside of the taskloop omp context. */
10545 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10547 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10549 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10550 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10552 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10553 TREE_OPERAND (t, 1)
10554 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10555 gimple_seq_empty_p (for_pre_body)
10556 ? pre_p : &for_pre_body, NULL,
10557 false);
10558 /* Reference to pointer conversion is considered useless,
10559 but is significant for firstprivate clause. Force it
10560 here. */
10561 if (TREE_CODE (type) == POINTER_TYPE
10562 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10563 == REFERENCE_TYPE))
10565 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10566 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10567 TREE_OPERAND (t, 1));
10568 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10569 ? pre_p : &for_pre_body);
10570 TREE_OPERAND (t, 1) = v;
10572 tree c = build_omp_clause (input_location,
10573 OMP_CLAUSE_FIRSTPRIVATE);
10574 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10575 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10576 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10579 /* Handle OMP_FOR_COND. */
10580 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10581 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10583 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10584 TREE_OPERAND (t, 1)
10585 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10586 gimple_seq_empty_p (for_pre_body)
10587 ? pre_p : &for_pre_body, NULL,
10588 false);
10589 /* Reference to pointer conversion is considered useless,
10590 but is significant for firstprivate clause. Force it
10591 here. */
10592 if (TREE_CODE (type) == POINTER_TYPE
10593 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10594 == REFERENCE_TYPE))
10596 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10597 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10598 TREE_OPERAND (t, 1));
10599 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10600 ? pre_p : &for_pre_body);
10601 TREE_OPERAND (t, 1) = v;
10603 tree c = build_omp_clause (input_location,
10604 OMP_CLAUSE_FIRSTPRIVATE);
10605 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10606 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10607 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10610 /* Handle OMP_FOR_INCR. */
10611 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10612 if (TREE_CODE (t) == MODIFY_EXPR)
10614 decl = TREE_OPERAND (t, 0);
10615 t = TREE_OPERAND (t, 1);
10616 tree *tp = &TREE_OPERAND (t, 1);
10617 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
10618 tp = &TREE_OPERAND (t, 0);
10620 if (!is_gimple_constant (*tp))
10622 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
10623 ? pre_p : &for_pre_body;
10624 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
10625 tree c = build_omp_clause (input_location,
10626 OMP_CLAUSE_FIRSTPRIVATE);
10627 OMP_CLAUSE_DECL (c) = *tp;
10628 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10629 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10634 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
10635 OMP_TASKLOOP);
10638 if (orig_for_stmt != for_stmt)
10639 gimplify_omp_ctxp->combined_loop = true;
10641 for_body = NULL;
10642 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10643 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
10644 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10645 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
10647 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
10648 bool is_doacross = false;
10649 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
10651 is_doacross = true;
10652 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
10653 (OMP_FOR_INIT (for_stmt))
10654 * 2);
10656 int collapse = 1, tile = 0;
10657 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
10658 if (c)
10659 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
10660 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
10661 if (c)
10662 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
10663 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10665 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10666 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10667 decl = TREE_OPERAND (t, 0);
10668 gcc_assert (DECL_P (decl));
10669 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
10670 || POINTER_TYPE_P (TREE_TYPE (decl)));
10671 if (is_doacross)
10673 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
10675 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
10676 if (TREE_CODE (orig_decl) == TREE_LIST)
10678 orig_decl = TREE_PURPOSE (orig_decl);
10679 if (!orig_decl)
10680 orig_decl = decl;
10682 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
10684 else
10685 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10686 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10689 /* Make sure the iteration variable is private. */
10690 tree c = NULL_TREE;
10691 tree c2 = NULL_TREE;
10692 if (orig_for_stmt != for_stmt)
10694 /* Preserve this information until we gimplify the inner simd. */
10695 if (has_decl_expr
10696 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10697 TREE_PRIVATE (t) = 1;
10699 else if (ort == ORT_SIMD)
10701 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10702 (splay_tree_key) decl);
10703 omp_is_private (gimplify_omp_ctxp, decl,
10704 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10705 != 1));
10706 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10707 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10708 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10710 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10711 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10712 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
10713 if ((has_decl_expr
10714 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10715 || TREE_PRIVATE (t))
10717 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10718 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10720 struct gimplify_omp_ctx *outer
10721 = gimplify_omp_ctxp->outer_context;
10722 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10724 if (outer->region_type == ORT_WORKSHARE
10725 && outer->combined_loop)
10727 n = splay_tree_lookup (outer->variables,
10728 (splay_tree_key)decl);
10729 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10731 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10732 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10734 else
10736 struct gimplify_omp_ctx *octx = outer->outer_context;
10737 if (octx
10738 && octx->region_type == ORT_COMBINED_PARALLEL
10739 && octx->outer_context
10740 && (octx->outer_context->region_type
10741 == ORT_WORKSHARE)
10742 && octx->outer_context->combined_loop)
10744 octx = octx->outer_context;
10745 n = splay_tree_lookup (octx->variables,
10746 (splay_tree_key)decl);
10747 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10749 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10750 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10757 OMP_CLAUSE_DECL (c) = decl;
10758 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10759 OMP_FOR_CLAUSES (for_stmt) = c;
10760 omp_add_variable (gimplify_omp_ctxp, decl, flags);
10761 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10763 if (outer->region_type == ORT_WORKSHARE
10764 && outer->combined_loop)
10766 if (outer->outer_context
10767 && (outer->outer_context->region_type
10768 == ORT_COMBINED_PARALLEL))
10769 outer = outer->outer_context;
10770 else if (omp_check_private (outer, decl, false))
10771 outer = NULL;
10773 else if (((outer->region_type & ORT_TASKLOOP)
10774 == ORT_TASKLOOP)
10775 && outer->combined_loop
10776 && !omp_check_private (gimplify_omp_ctxp,
10777 decl, false))
10779 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10781 omp_notice_variable (outer, decl, true);
10782 outer = NULL;
10784 if (outer)
10786 n = splay_tree_lookup (outer->variables,
10787 (splay_tree_key)decl);
10788 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10790 omp_add_variable (outer, decl,
10791 GOVD_LASTPRIVATE | GOVD_SEEN);
10792 if (outer->region_type == ORT_COMBINED_PARALLEL
10793 && outer->outer_context
10794 && (outer->outer_context->region_type
10795 == ORT_WORKSHARE)
10796 && outer->outer_context->combined_loop)
10798 outer = outer->outer_context;
10799 n = splay_tree_lookup (outer->variables,
10800 (splay_tree_key)decl);
10801 if (omp_check_private (outer, decl, false))
10802 outer = NULL;
10803 else if (n == NULL
10804 || ((n->value & GOVD_DATA_SHARE_CLASS)
10805 == 0))
10806 omp_add_variable (outer, decl,
10807 GOVD_LASTPRIVATE
10808 | GOVD_SEEN);
10809 else
10810 outer = NULL;
10812 if (outer && outer->outer_context
10813 && ((outer->outer_context->region_type
10814 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10815 || (((outer->region_type & ORT_TASKLOOP)
10816 == ORT_TASKLOOP)
10817 && (outer->outer_context->region_type
10818 == ORT_COMBINED_PARALLEL))))
10820 outer = outer->outer_context;
10821 n = splay_tree_lookup (outer->variables,
10822 (splay_tree_key)decl);
10823 if (n == NULL
10824 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10825 omp_add_variable (outer, decl,
10826 GOVD_SHARED | GOVD_SEEN);
10827 else
10828 outer = NULL;
10830 if (outer && outer->outer_context)
10831 omp_notice_variable (outer->outer_context, decl,
10832 true);
10837 else
10839 bool lastprivate
10840 = (!has_decl_expr
10841 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
10842 if (TREE_PRIVATE (t))
10843 lastprivate = false;
10844 struct gimplify_omp_ctx *outer
10845 = gimplify_omp_ctxp->outer_context;
10846 if (outer && lastprivate)
10848 if (outer->region_type == ORT_WORKSHARE
10849 && outer->combined_loop)
10851 n = splay_tree_lookup (outer->variables,
10852 (splay_tree_key)decl);
10853 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10855 lastprivate = false;
10856 outer = NULL;
10858 else if (outer->outer_context
10859 && (outer->outer_context->region_type
10860 == ORT_COMBINED_PARALLEL))
10861 outer = outer->outer_context;
10862 else if (omp_check_private (outer, decl, false))
10863 outer = NULL;
10865 else if (((outer->region_type & ORT_TASKLOOP)
10866 == ORT_TASKLOOP)
10867 && outer->combined_loop
10868 && !omp_check_private (gimplify_omp_ctxp,
10869 decl, false))
10871 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10873 omp_notice_variable (outer, decl, true);
10874 outer = NULL;
10876 if (outer)
10878 n = splay_tree_lookup (outer->variables,
10879 (splay_tree_key)decl);
10880 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10882 omp_add_variable (outer, decl,
10883 GOVD_LASTPRIVATE | GOVD_SEEN);
10884 if (outer->region_type == ORT_COMBINED_PARALLEL
10885 && outer->outer_context
10886 && (outer->outer_context->region_type
10887 == ORT_WORKSHARE)
10888 && outer->outer_context->combined_loop)
10890 outer = outer->outer_context;
10891 n = splay_tree_lookup (outer->variables,
10892 (splay_tree_key)decl);
10893 if (omp_check_private (outer, decl, false))
10894 outer = NULL;
10895 else if (n == NULL
10896 || ((n->value & GOVD_DATA_SHARE_CLASS)
10897 == 0))
10898 omp_add_variable (outer, decl,
10899 GOVD_LASTPRIVATE
10900 | GOVD_SEEN);
10901 else
10902 outer = NULL;
10904 if (outer && outer->outer_context
10905 && ((outer->outer_context->region_type
10906 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10907 || (((outer->region_type & ORT_TASKLOOP)
10908 == ORT_TASKLOOP)
10909 && (outer->outer_context->region_type
10910 == ORT_COMBINED_PARALLEL))))
10912 outer = outer->outer_context;
10913 n = splay_tree_lookup (outer->variables,
10914 (splay_tree_key)decl);
10915 if (n == NULL
10916 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10917 omp_add_variable (outer, decl,
10918 GOVD_SHARED | GOVD_SEEN);
10919 else
10920 outer = NULL;
10922 if (outer && outer->outer_context)
10923 omp_notice_variable (outer->outer_context, decl,
10924 true);
10929 c = build_omp_clause (input_location,
10930 lastprivate ? OMP_CLAUSE_LASTPRIVATE
10931 : OMP_CLAUSE_PRIVATE);
10932 OMP_CLAUSE_DECL (c) = decl;
10933 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10934 OMP_FOR_CLAUSES (for_stmt) = c;
10935 omp_add_variable (gimplify_omp_ctxp, decl,
10936 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
10937 | GOVD_EXPLICIT | GOVD_SEEN);
10938 c = NULL_TREE;
10941 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
10942 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10943 else
10944 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
10946 /* If DECL is not a gimple register, create a temporary variable to act
10947 as an iteration counter. This is valid, since DECL cannot be
10948 modified in the body of the loop. Similarly for any iteration vars
10949 in simd with collapse > 1 where the iterator vars must be
10950 lastprivate. */
10951 if (orig_for_stmt != for_stmt)
10952 var = decl;
10953 else if (!is_gimple_reg (decl)
10954 || (ort == ORT_SIMD
10955 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
10957 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10958 /* Make sure omp_add_variable is not called on it prematurely.
10959 We call it ourselves a few lines later. */
10960 gimplify_omp_ctxp = NULL;
10961 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10962 gimplify_omp_ctxp = ctx;
10963 TREE_OPERAND (t, 0) = var;
10965 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
10967 if (ort == ORT_SIMD
10968 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10970 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10971 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
10972 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
10973 OMP_CLAUSE_DECL (c2) = var;
10974 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
10975 OMP_FOR_CLAUSES (for_stmt) = c2;
10976 omp_add_variable (gimplify_omp_ctxp, var,
10977 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10978 if (c == NULL_TREE)
10980 c = c2;
10981 c2 = NULL_TREE;
10984 else
10985 omp_add_variable (gimplify_omp_ctxp, var,
10986 GOVD_PRIVATE | GOVD_SEEN);
10988 else
10989 var = decl;
10991 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10992 is_gimple_val, fb_rvalue, false);
10993 ret = MIN (ret, tret);
10994 if (ret == GS_ERROR)
10995 return ret;
10997 /* Handle OMP_FOR_COND. */
10998 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10999 gcc_assert (COMPARISON_CLASS_P (t));
11000 gcc_assert (TREE_OPERAND (t, 0) == decl);
11002 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11003 is_gimple_val, fb_rvalue, false);
11004 ret = MIN (ret, tret);
11006 /* Handle OMP_FOR_INCR. */
11007 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11008 switch (TREE_CODE (t))
11010 case PREINCREMENT_EXPR:
11011 case POSTINCREMENT_EXPR:
11013 tree decl = TREE_OPERAND (t, 0);
11014 /* c_omp_for_incr_canonicalize_ptr() should have been
11015 called to massage things appropriately. */
11016 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11018 if (orig_for_stmt != for_stmt)
11019 break;
11020 t = build_int_cst (TREE_TYPE (decl), 1);
11021 if (c)
11022 OMP_CLAUSE_LINEAR_STEP (c) = t;
11023 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11024 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11025 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11026 break;
11029 case PREDECREMENT_EXPR:
11030 case POSTDECREMENT_EXPR:
11031 /* c_omp_for_incr_canonicalize_ptr() should have been
11032 called to massage things appropriately. */
11033 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11034 if (orig_for_stmt != for_stmt)
11035 break;
11036 t = build_int_cst (TREE_TYPE (decl), -1);
11037 if (c)
11038 OMP_CLAUSE_LINEAR_STEP (c) = t;
11039 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11040 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11041 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11042 break;
11044 case MODIFY_EXPR:
11045 gcc_assert (TREE_OPERAND (t, 0) == decl);
11046 TREE_OPERAND (t, 0) = var;
11048 t = TREE_OPERAND (t, 1);
11049 switch (TREE_CODE (t))
11051 case PLUS_EXPR:
11052 if (TREE_OPERAND (t, 1) == decl)
11054 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11055 TREE_OPERAND (t, 0) = var;
11056 break;
11059 /* Fallthru. */
11060 case MINUS_EXPR:
11061 case POINTER_PLUS_EXPR:
11062 gcc_assert (TREE_OPERAND (t, 0) == decl);
11063 TREE_OPERAND (t, 0) = var;
11064 break;
11065 default:
11066 gcc_unreachable ();
11069 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11070 is_gimple_val, fb_rvalue, false);
11071 ret = MIN (ret, tret);
11072 if (c)
11074 tree step = TREE_OPERAND (t, 1);
11075 tree stept = TREE_TYPE (decl);
11076 if (POINTER_TYPE_P (stept))
11077 stept = sizetype;
11078 step = fold_convert (stept, step);
11079 if (TREE_CODE (t) == MINUS_EXPR)
11080 step = fold_build1 (NEGATE_EXPR, stept, step);
11081 OMP_CLAUSE_LINEAR_STEP (c) = step;
11082 if (step != TREE_OPERAND (t, 1))
11084 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11085 &for_pre_body, NULL,
11086 is_gimple_val, fb_rvalue, false);
11087 ret = MIN (ret, tret);
11090 break;
11092 default:
11093 gcc_unreachable ();
11096 if (c2)
11098 gcc_assert (c);
11099 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11102 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11104 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11105 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11106 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11107 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11108 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11109 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11110 && OMP_CLAUSE_DECL (c) == decl)
11112 if (is_doacross && (collapse == 1 || i >= collapse))
11113 t = var;
11114 else
11116 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11117 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11118 gcc_assert (TREE_OPERAND (t, 0) == var);
11119 t = TREE_OPERAND (t, 1);
11120 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11121 || TREE_CODE (t) == MINUS_EXPR
11122 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11123 gcc_assert (TREE_OPERAND (t, 0) == var);
11124 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11125 is_doacross ? var : decl,
11126 TREE_OPERAND (t, 1));
11128 gimple_seq *seq;
11129 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11130 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11131 else
11132 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11133 gimplify_assign (decl, t, seq);
11138 BITMAP_FREE (has_decl_expr);
11140 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11142 push_gimplify_context ();
11143 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11145 OMP_FOR_BODY (orig_for_stmt)
11146 = build3 (BIND_EXPR, void_type_node, NULL,
11147 OMP_FOR_BODY (orig_for_stmt), NULL);
11148 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11152 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11153 &for_body);
11155 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11157 if (gimple_code (g) == GIMPLE_BIND)
11158 pop_gimplify_context (g);
11159 else
11160 pop_gimplify_context (NULL);
11163 if (orig_for_stmt != for_stmt)
11164 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11166 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11167 decl = TREE_OPERAND (t, 0);
11168 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11169 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11170 gimplify_omp_ctxp = ctx->outer_context;
11171 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11172 gimplify_omp_ctxp = ctx;
11173 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11174 TREE_OPERAND (t, 0) = var;
11175 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11176 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11177 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11180 gimplify_adjust_omp_clauses (pre_p, for_body,
11181 &OMP_FOR_CLAUSES (orig_for_stmt),
11182 TREE_CODE (orig_for_stmt));
11184 int kind;
11185 switch (TREE_CODE (orig_for_stmt))
11187 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11188 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11189 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11190 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11191 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11192 default:
11193 gcc_unreachable ();
11195 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11196 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11197 for_pre_body);
11198 if (orig_for_stmt != for_stmt)
11199 gimple_omp_for_set_combined_p (gfor, true);
11200 if (gimplify_omp_ctxp
11201 && (gimplify_omp_ctxp->combined_loop
11202 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11203 && gimplify_omp_ctxp->outer_context
11204 && gimplify_omp_ctxp->outer_context->combined_loop)))
11206 gimple_omp_for_set_combined_into_p (gfor, true);
11207 if (gimplify_omp_ctxp->combined_loop)
11208 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11209 else
11210 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11213 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11215 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11216 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11217 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11218 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11219 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11220 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11221 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11222 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11225 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11226 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11227 The outer taskloop stands for computing the number of iterations,
11228 counts for collapsed loops and holding taskloop specific clauses.
11229 The task construct stands for the effect of data sharing on the
11230 explicit task it creates and the inner taskloop stands for expansion
11231 of the static loop inside of the explicit task construct. */
11232 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11234 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11235 tree task_clauses = NULL_TREE;
11236 tree c = *gfor_clauses_ptr;
11237 tree *gtask_clauses_ptr = &task_clauses;
11238 tree outer_for_clauses = NULL_TREE;
11239 tree *gforo_clauses_ptr = &outer_for_clauses;
11240 for (; c; c = OMP_CLAUSE_CHAIN (c))
11241 switch (OMP_CLAUSE_CODE (c))
11243 /* These clauses are allowed on task, move them there. */
11244 case OMP_CLAUSE_SHARED:
11245 case OMP_CLAUSE_FIRSTPRIVATE:
11246 case OMP_CLAUSE_DEFAULT:
11247 case OMP_CLAUSE_IF:
11248 case OMP_CLAUSE_UNTIED:
11249 case OMP_CLAUSE_FINAL:
11250 case OMP_CLAUSE_MERGEABLE:
11251 case OMP_CLAUSE_PRIORITY:
11252 case OMP_CLAUSE_REDUCTION:
11253 case OMP_CLAUSE_IN_REDUCTION:
11254 *gtask_clauses_ptr = c;
11255 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11256 break;
11257 case OMP_CLAUSE_PRIVATE:
11258 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
11260 /* We want private on outer for and firstprivate
11261 on task. */
11262 *gtask_clauses_ptr
11263 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11264 OMP_CLAUSE_FIRSTPRIVATE);
11265 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11266 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11267 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11268 *gforo_clauses_ptr = c;
11269 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11271 else
11273 *gtask_clauses_ptr = c;
11274 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11276 break;
11277 /* These clauses go into outer taskloop clauses. */
11278 case OMP_CLAUSE_GRAINSIZE:
11279 case OMP_CLAUSE_NUM_TASKS:
11280 case OMP_CLAUSE_NOGROUP:
11281 *gforo_clauses_ptr = c;
11282 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11283 break;
11284 /* Taskloop clause we duplicate on both taskloops. */
11285 case OMP_CLAUSE_COLLAPSE:
11286 *gfor_clauses_ptr = c;
11287 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11288 *gforo_clauses_ptr = copy_node (c);
11289 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11290 break;
11291 /* For lastprivate, keep the clause on inner taskloop, and add
11292 a shared clause on task. If the same decl is also firstprivate,
11293 add also firstprivate clause on the inner taskloop. */
11294 case OMP_CLAUSE_LASTPRIVATE:
11295 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
11297 /* For taskloop C++ lastprivate IVs, we want:
11298 1) private on outer taskloop
11299 2) firstprivate and shared on task
11300 3) lastprivate on inner taskloop */
11301 *gtask_clauses_ptr
11302 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11303 OMP_CLAUSE_FIRSTPRIVATE);
11304 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11305 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11306 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11307 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
11308 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11309 OMP_CLAUSE_PRIVATE);
11310 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
11311 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
11312 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
11313 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11315 *gfor_clauses_ptr = c;
11316 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11317 *gtask_clauses_ptr
11318 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
11319 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11320 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11321 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
11322 gtask_clauses_ptr
11323 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11324 break;
11325 default:
11326 gcc_unreachable ();
11328 *gfor_clauses_ptr = NULL_TREE;
11329 *gtask_clauses_ptr = NULL_TREE;
11330 *gforo_clauses_ptr = NULL_TREE;
11331 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
11332 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
11333 NULL_TREE, NULL_TREE, NULL_TREE);
11334 gimple_omp_task_set_taskloop_p (g, true);
11335 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
11336 gomp_for *gforo
11337 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
11338 gimple_omp_for_collapse (gfor),
11339 gimple_omp_for_pre_body (gfor));
11340 gimple_omp_for_set_pre_body (gfor, NULL);
11341 gimple_omp_for_set_combined_p (gforo, true);
11342 gimple_omp_for_set_combined_into_p (gfor, true);
11343 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
11345 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
11346 tree v = create_tmp_var (type);
11347 gimple_omp_for_set_index (gforo, i, v);
11348 t = unshare_expr (gimple_omp_for_initial (gfor, i));
11349 gimple_omp_for_set_initial (gforo, i, t);
11350 gimple_omp_for_set_cond (gforo, i,
11351 gimple_omp_for_cond (gfor, i));
11352 t = unshare_expr (gimple_omp_for_final (gfor, i));
11353 gimple_omp_for_set_final (gforo, i, t);
11354 t = unshare_expr (gimple_omp_for_incr (gfor, i));
11355 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
11356 TREE_OPERAND (t, 0) = v;
11357 gimple_omp_for_set_incr (gforo, i, t);
11358 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
11359 OMP_CLAUSE_DECL (t) = v;
11360 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
11361 gimple_omp_for_set_clauses (gforo, t);
11363 gimplify_seq_add_stmt (pre_p, gforo);
11365 else
11366 gimplify_seq_add_stmt (pre_p, gfor);
11367 if (ret != GS_ALL_DONE)
11368 return GS_ERROR;
11369 *expr_p = NULL_TREE;
11370 return GS_ALL_DONE;
11373 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
11374 of OMP_TARGET's body. */
11376 static tree
11377 find_omp_teams (tree *tp, int *walk_subtrees, void *)
11379 *walk_subtrees = 0;
11380 switch (TREE_CODE (*tp))
11382 case OMP_TEAMS:
11383 return *tp;
11384 case BIND_EXPR:
11385 case STATEMENT_LIST:
11386 *walk_subtrees = 1;
11387 break;
11388 default:
11389 break;
11391 return NULL_TREE;
11394 /* Helper function of optimize_target_teams, determine if the expression
11395 can be computed safely before the target construct on the host. */
11397 static tree
11398 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
11400 splay_tree_node n;
11402 if (TYPE_P (*tp))
11404 *walk_subtrees = 0;
11405 return NULL_TREE;
11407 switch (TREE_CODE (*tp))
11409 case VAR_DECL:
11410 case PARM_DECL:
11411 case RESULT_DECL:
11412 *walk_subtrees = 0;
11413 if (error_operand_p (*tp)
11414 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
11415 || DECL_HAS_VALUE_EXPR_P (*tp)
11416 || DECL_THREAD_LOCAL_P (*tp)
11417 || TREE_SIDE_EFFECTS (*tp)
11418 || TREE_THIS_VOLATILE (*tp))
11419 return *tp;
11420 if (is_global_var (*tp)
11421 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
11422 || lookup_attribute ("omp declare target link",
11423 DECL_ATTRIBUTES (*tp))))
11424 return *tp;
11425 if (VAR_P (*tp)
11426 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
11427 && !is_global_var (*tp)
11428 && decl_function_context (*tp) == current_function_decl)
11429 return *tp;
11430 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11431 (splay_tree_key) *tp);
11432 if (n == NULL)
11434 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
11435 return NULL_TREE;
11436 return *tp;
11438 else if (n->value & GOVD_LOCAL)
11439 return *tp;
11440 else if (n->value & GOVD_FIRSTPRIVATE)
11441 return NULL_TREE;
11442 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11443 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11444 return NULL_TREE;
11445 return *tp;
11446 case INTEGER_CST:
11447 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11448 return *tp;
11449 return NULL_TREE;
11450 case TARGET_EXPR:
11451 if (TARGET_EXPR_INITIAL (*tp)
11452 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
11453 return *tp;
11454 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
11455 walk_subtrees, NULL);
11456 /* Allow some reasonable subset of integral arithmetics. */
11457 case PLUS_EXPR:
11458 case MINUS_EXPR:
11459 case MULT_EXPR:
11460 case TRUNC_DIV_EXPR:
11461 case CEIL_DIV_EXPR:
11462 case FLOOR_DIV_EXPR:
11463 case ROUND_DIV_EXPR:
11464 case TRUNC_MOD_EXPR:
11465 case CEIL_MOD_EXPR:
11466 case FLOOR_MOD_EXPR:
11467 case ROUND_MOD_EXPR:
11468 case RDIV_EXPR:
11469 case EXACT_DIV_EXPR:
11470 case MIN_EXPR:
11471 case MAX_EXPR:
11472 case LSHIFT_EXPR:
11473 case RSHIFT_EXPR:
11474 case BIT_IOR_EXPR:
11475 case BIT_XOR_EXPR:
11476 case BIT_AND_EXPR:
11477 case NEGATE_EXPR:
11478 case ABS_EXPR:
11479 case BIT_NOT_EXPR:
11480 case NON_LVALUE_EXPR:
11481 CASE_CONVERT:
11482 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11483 return *tp;
11484 return NULL_TREE;
11485 /* And disallow anything else, except for comparisons. */
11486 default:
11487 if (COMPARISON_CLASS_P (*tp))
11488 return NULL_TREE;
11489 return *tp;
11493 /* Try to determine if the num_teams and/or thread_limit expressions
11494 can have their values determined already before entering the
11495 target construct.
11496 INTEGER_CSTs trivially are,
11497 integral decls that are firstprivate (explicitly or implicitly)
11498 or explicitly map(always, to:) or map(always, tofrom:) on the target
11499 region too, and expressions involving simple arithmetics on those
11500 too, function calls are not ok, dereferencing something neither etc.
11501 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
11502 EXPR based on what we find:
11503 0 stands for clause not specified at all, use implementation default
11504 -1 stands for value that can't be determined easily before entering
11505 the target construct.
11506 If teams construct is not present at all, use 1 for num_teams
11507 and 0 for thread_limit (only one team is involved, and the thread
11508 limit is implementation defined. */
11510 static void
11511 optimize_target_teams (tree target, gimple_seq *pre_p)
11513 tree body = OMP_BODY (target);
11514 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
11515 tree num_teams = integer_zero_node;
11516 tree thread_limit = integer_zero_node;
11517 location_t num_teams_loc = EXPR_LOCATION (target);
11518 location_t thread_limit_loc = EXPR_LOCATION (target);
11519 tree c, *p, expr;
11520 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
11522 if (teams == NULL_TREE)
11523 num_teams = integer_one_node;
11524 else
11525 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
11527 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
11529 p = &num_teams;
11530 num_teams_loc = OMP_CLAUSE_LOCATION (c);
11532 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
11534 p = &thread_limit;
11535 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
11537 else
11538 continue;
11539 expr = OMP_CLAUSE_OPERAND (c, 0);
11540 if (TREE_CODE (expr) == INTEGER_CST)
11542 *p = expr;
11543 continue;
11545 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
11547 *p = integer_minus_one_node;
11548 continue;
11550 *p = expr;
11551 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
11552 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
11553 == GS_ERROR)
11555 gimplify_omp_ctxp = target_ctx;
11556 *p = integer_minus_one_node;
11557 continue;
11559 gimplify_omp_ctxp = target_ctx;
11560 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
11561 OMP_CLAUSE_OPERAND (c, 0) = *p;
11563 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
11564 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
11565 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11566 OMP_TARGET_CLAUSES (target) = c;
11567 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
11568 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
11569 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11570 OMP_TARGET_CLAUSES (target) = c;
11573 /* Gimplify the gross structure of several OMP constructs. */
11575 static void
11576 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
11578 tree expr = *expr_p;
11579 gimple *stmt;
11580 gimple_seq body = NULL;
11581 enum omp_region_type ort;
11583 switch (TREE_CODE (expr))
11585 case OMP_SECTIONS:
11586 case OMP_SINGLE:
11587 ort = ORT_WORKSHARE;
11588 break;
11589 case OMP_TARGET:
11590 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
11591 break;
11592 case OACC_KERNELS:
11593 ort = ORT_ACC_KERNELS;
11594 break;
11595 case OACC_PARALLEL:
11596 ort = ORT_ACC_PARALLEL;
11597 break;
11598 case OACC_DATA:
11599 ort = ORT_ACC_DATA;
11600 break;
11601 case OMP_TARGET_DATA:
11602 ort = ORT_TARGET_DATA;
11603 break;
11604 case OMP_TEAMS:
11605 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
11606 if (gimplify_omp_ctxp == NULL
11607 || (gimplify_omp_ctxp->region_type == ORT_TARGET
11608 && gimplify_omp_ctxp->outer_context == NULL
11609 && lookup_attribute ("omp declare target",
11610 DECL_ATTRIBUTES (current_function_decl))))
11611 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
11612 break;
11613 case OACC_HOST_DATA:
11614 ort = ORT_ACC_HOST_DATA;
11615 break;
11616 default:
11617 gcc_unreachable ();
11619 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
11620 TREE_CODE (expr));
11621 if (TREE_CODE (expr) == OMP_TARGET)
11622 optimize_target_teams (expr, pre_p);
11623 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
11624 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11626 push_gimplify_context ();
11627 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
11628 if (gimple_code (g) == GIMPLE_BIND)
11629 pop_gimplify_context (g);
11630 else
11631 pop_gimplify_context (NULL);
11632 if ((ort & ORT_TARGET_DATA) != 0)
11634 enum built_in_function end_ix;
11635 switch (TREE_CODE (expr))
11637 case OACC_DATA:
11638 case OACC_HOST_DATA:
11639 end_ix = BUILT_IN_GOACC_DATA_END;
11640 break;
11641 case OMP_TARGET_DATA:
11642 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
11643 break;
11644 default:
11645 gcc_unreachable ();
11647 tree fn = builtin_decl_explicit (end_ix);
11648 g = gimple_build_call (fn, 0);
11649 gimple_seq cleanup = NULL;
11650 gimple_seq_add_stmt (&cleanup, g);
11651 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11652 body = NULL;
11653 gimple_seq_add_stmt (&body, g);
11656 else
11657 gimplify_and_add (OMP_BODY (expr), &body);
11658 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
11659 TREE_CODE (expr));
11661 switch (TREE_CODE (expr))
11663 case OACC_DATA:
11664 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
11665 OMP_CLAUSES (expr));
11666 break;
11667 case OACC_KERNELS:
11668 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
11669 OMP_CLAUSES (expr));
11670 break;
11671 case OACC_HOST_DATA:
11672 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
11673 OMP_CLAUSES (expr));
11674 break;
11675 case OACC_PARALLEL:
11676 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
11677 OMP_CLAUSES (expr));
11678 break;
11679 case OMP_SECTIONS:
11680 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
11681 break;
11682 case OMP_SINGLE:
11683 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
11684 break;
11685 case OMP_TARGET:
11686 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
11687 OMP_CLAUSES (expr));
11688 break;
11689 case OMP_TARGET_DATA:
11690 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
11691 OMP_CLAUSES (expr));
11692 break;
11693 case OMP_TEAMS:
11694 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
11695 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11696 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
11697 break;
11698 default:
11699 gcc_unreachable ();
11702 gimplify_seq_add_stmt (pre_p, stmt);
11703 *expr_p = NULL_TREE;
11706 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
11707 target update constructs. */
11709 static void
11710 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
11712 tree expr = *expr_p;
11713 int kind;
11714 gomp_target *stmt;
11715 enum omp_region_type ort = ORT_WORKSHARE;
11717 switch (TREE_CODE (expr))
11719 case OACC_ENTER_DATA:
11720 case OACC_EXIT_DATA:
11721 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
11722 ort = ORT_ACC;
11723 break;
11724 case OACC_UPDATE:
11725 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
11726 ort = ORT_ACC;
11727 break;
11728 case OMP_TARGET_UPDATE:
11729 kind = GF_OMP_TARGET_KIND_UPDATE;
11730 break;
11731 case OMP_TARGET_ENTER_DATA:
11732 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
11733 break;
11734 case OMP_TARGET_EXIT_DATA:
11735 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
11736 break;
11737 default:
11738 gcc_unreachable ();
11740 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
11741 ort, TREE_CODE (expr));
11742 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
11743 TREE_CODE (expr));
11744 if (TREE_CODE (expr) == OACC_UPDATE
11745 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11746 OMP_CLAUSE_IF_PRESENT))
11748 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
11749 clause. */
11750 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11751 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11752 switch (OMP_CLAUSE_MAP_KIND (c))
11754 case GOMP_MAP_FORCE_TO:
11755 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
11756 break;
11757 case GOMP_MAP_FORCE_FROM:
11758 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
11759 break;
11760 default:
11761 break;
11764 else if (TREE_CODE (expr) == OACC_EXIT_DATA
11765 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11766 OMP_CLAUSE_FINALIZE))
11768 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
11769 semantics apply to all mappings of this OpenACC directive. */
11770 bool finalize_marked = false;
11771 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11772 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11773 switch (OMP_CLAUSE_MAP_KIND (c))
11775 case GOMP_MAP_FROM:
11776 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
11777 finalize_marked = true;
11778 break;
11779 case GOMP_MAP_RELEASE:
11780 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
11781 finalize_marked = true;
11782 break;
11783 default:
11784 /* Check consistency: libgomp relies on the very first data
11785 mapping clause being marked, so make sure we did that before
11786 any other mapping clauses. */
11787 gcc_assert (finalize_marked);
11788 break;
11791 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
11793 gimplify_seq_add_stmt (pre_p, stmt);
11794 *expr_p = NULL_TREE;
11797 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
11798 stabilized the lhs of the atomic operation as *ADDR. Return true if
11799 EXPR is this stabilized form. */
11801 static bool
11802 goa_lhs_expr_p (tree expr, tree addr)
11804 /* Also include casts to other type variants. The C front end is fond
11805 of adding these for e.g. volatile variables. This is like
11806 STRIP_TYPE_NOPS but includes the main variant lookup. */
11807 STRIP_USELESS_TYPE_CONVERSION (expr);
11809 if (TREE_CODE (expr) == INDIRECT_REF)
11811 expr = TREE_OPERAND (expr, 0);
11812 while (expr != addr
11813 && (CONVERT_EXPR_P (expr)
11814 || TREE_CODE (expr) == NON_LVALUE_EXPR)
11815 && TREE_CODE (expr) == TREE_CODE (addr)
11816 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
11818 expr = TREE_OPERAND (expr, 0);
11819 addr = TREE_OPERAND (addr, 0);
11821 if (expr == addr)
11822 return true;
11823 return (TREE_CODE (addr) == ADDR_EXPR
11824 && TREE_CODE (expr) == ADDR_EXPR
11825 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
11827 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
11828 return true;
11829 return false;
11832 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
11833 expression does not involve the lhs, evaluate it into a temporary.
11834 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
11835 or -1 if an error was encountered. */
11837 static int
11838 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
11839 tree lhs_var)
11841 tree expr = *expr_p;
11842 int saw_lhs;
11844 if (goa_lhs_expr_p (expr, lhs_addr))
11846 *expr_p = lhs_var;
11847 return 1;
11849 if (is_gimple_val (expr))
11850 return 0;
11852 saw_lhs = 0;
11853 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
11855 case tcc_binary:
11856 case tcc_comparison:
11857 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
11858 lhs_var);
11859 /* FALLTHRU */
11860 case tcc_unary:
11861 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
11862 lhs_var);
11863 break;
11864 case tcc_expression:
11865 switch (TREE_CODE (expr))
11867 case TRUTH_ANDIF_EXPR:
11868 case TRUTH_ORIF_EXPR:
11869 case TRUTH_AND_EXPR:
11870 case TRUTH_OR_EXPR:
11871 case TRUTH_XOR_EXPR:
11872 case BIT_INSERT_EXPR:
11873 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
11874 lhs_addr, lhs_var);
11875 /* FALLTHRU */
11876 case TRUTH_NOT_EXPR:
11877 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11878 lhs_addr, lhs_var);
11879 break;
11880 case COMPOUND_EXPR:
11881 /* Break out any preevaluations from cp_build_modify_expr. */
11882 for (; TREE_CODE (expr) == COMPOUND_EXPR;
11883 expr = TREE_OPERAND (expr, 1))
11884 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
11885 *expr_p = expr;
11886 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
11887 default:
11888 break;
11890 break;
11891 case tcc_reference:
11892 if (TREE_CODE (expr) == BIT_FIELD_REF)
11893 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11894 lhs_addr, lhs_var);
11895 break;
11896 default:
11897 break;
11900 if (saw_lhs == 0)
11902 enum gimplify_status gs;
11903 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
11904 if (gs != GS_ALL_DONE)
11905 saw_lhs = -1;
11908 return saw_lhs;
11911 /* Gimplify an OMP_ATOMIC statement. */
11913 static enum gimplify_status
11914 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
11916 tree addr = TREE_OPERAND (*expr_p, 0);
11917 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
11918 ? NULL : TREE_OPERAND (*expr_p, 1);
11919 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
11920 tree tmp_load;
11921 gomp_atomic_load *loadstmt;
11922 gomp_atomic_store *storestmt;
11924 tmp_load = create_tmp_reg (type);
11925 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
11926 return GS_ERROR;
11928 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
11929 != GS_ALL_DONE)
11930 return GS_ERROR;
11932 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
11933 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
11934 gimplify_seq_add_stmt (pre_p, loadstmt);
11935 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
11936 != GS_ALL_DONE)
11937 return GS_ERROR;
11939 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
11940 rhs = tmp_load;
11941 storestmt
11942 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
11943 gimplify_seq_add_stmt (pre_p, storestmt);
11944 switch (TREE_CODE (*expr_p))
11946 case OMP_ATOMIC_READ:
11947 case OMP_ATOMIC_CAPTURE_OLD:
11948 *expr_p = tmp_load;
11949 gimple_omp_atomic_set_need_value (loadstmt);
11950 break;
11951 case OMP_ATOMIC_CAPTURE_NEW:
11952 *expr_p = rhs;
11953 gimple_omp_atomic_set_need_value (storestmt);
11954 break;
11955 default:
11956 *expr_p = NULL;
11957 break;
11960 return GS_ALL_DONE;
11963 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
11964 body, and adding some EH bits. */
11966 static enum gimplify_status
11967 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
11969 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
11970 gimple *body_stmt;
11971 gtransaction *trans_stmt;
11972 gimple_seq body = NULL;
11973 int subcode = 0;
11975 /* Wrap the transaction body in a BIND_EXPR so we have a context
11976 where to put decls for OMP. */
11977 if (TREE_CODE (tbody) != BIND_EXPR)
11979 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
11980 TREE_SIDE_EFFECTS (bind) = 1;
11981 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
11982 TRANSACTION_EXPR_BODY (expr) = bind;
11985 push_gimplify_context ();
11986 temp = voidify_wrapper_expr (*expr_p, NULL);
11988 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
11989 pop_gimplify_context (body_stmt);
11991 trans_stmt = gimple_build_transaction (body);
11992 if (TRANSACTION_EXPR_OUTER (expr))
11993 subcode = GTMA_IS_OUTER;
11994 else if (TRANSACTION_EXPR_RELAXED (expr))
11995 subcode = GTMA_IS_RELAXED;
11996 gimple_transaction_set_subcode (trans_stmt, subcode);
11998 gimplify_seq_add_stmt (pre_p, trans_stmt);
12000 if (temp)
12002 *expr_p = temp;
12003 return GS_OK;
12006 *expr_p = NULL_TREE;
12007 return GS_ALL_DONE;
12010 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12011 is the OMP_BODY of the original EXPR (which has already been
12012 gimplified so it's not present in the EXPR).
12014 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12016 static gimple *
12017 gimplify_omp_ordered (tree expr, gimple_seq body)
12019 tree c, decls;
12020 int failures = 0;
12021 unsigned int i;
12022 tree source_c = NULL_TREE;
12023 tree sink_c = NULL_TREE;
12025 if (gimplify_omp_ctxp)
12027 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12028 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12029 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
12030 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
12031 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
12033 error_at (OMP_CLAUSE_LOCATION (c),
12034 "%<ordered%> construct with %<depend%> clause must be "
12035 "closely nested inside a loop with %<ordered%> clause "
12036 "with a parameter");
12037 failures++;
12039 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12040 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
12042 bool fail = false;
12043 for (decls = OMP_CLAUSE_DECL (c), i = 0;
12044 decls && TREE_CODE (decls) == TREE_LIST;
12045 decls = TREE_CHAIN (decls), ++i)
12046 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
12047 continue;
12048 else if (TREE_VALUE (decls)
12049 != gimplify_omp_ctxp->loop_iter_var[2 * i])
12051 error_at (OMP_CLAUSE_LOCATION (c),
12052 "variable %qE is not an iteration "
12053 "of outermost loop %d, expected %qE",
12054 TREE_VALUE (decls), i + 1,
12055 gimplify_omp_ctxp->loop_iter_var[2 * i]);
12056 fail = true;
12057 failures++;
12059 else
12060 TREE_VALUE (decls)
12061 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
12062 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
12064 error_at (OMP_CLAUSE_LOCATION (c),
12065 "number of variables in %<depend(sink)%> "
12066 "clause does not match number of "
12067 "iteration variables");
12068 failures++;
12070 sink_c = c;
12072 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12073 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
12075 if (source_c)
12077 error_at (OMP_CLAUSE_LOCATION (c),
12078 "more than one %<depend(source)%> clause on an "
12079 "%<ordered%> construct");
12080 failures++;
12082 else
12083 source_c = c;
12086 if (source_c && sink_c)
12088 error_at (OMP_CLAUSE_LOCATION (source_c),
12089 "%<depend(source)%> clause specified together with "
12090 "%<depend(sink:)%> clauses on the same construct");
12091 failures++;
12094 if (failures)
12095 return gimple_build_nop ();
12096 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
12099 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12100 expression produces a value to be used as an operand inside a GIMPLE
12101 statement, the value will be stored back in *EXPR_P. This value will
12102 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12103 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12104 emitted in PRE_P and POST_P.
12106 Additionally, this process may overwrite parts of the input
12107 expression during gimplification. Ideally, it should be
12108 possible to do non-destructive gimplification.
12110 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12111 the expression needs to evaluate to a value to be used as
12112 an operand in a GIMPLE statement, this value will be stored in
12113 *EXPR_P on exit. This happens when the caller specifies one
12114 of fb_lvalue or fb_rvalue fallback flags.
12116 PRE_P will contain the sequence of GIMPLE statements corresponding
12117 to the evaluation of EXPR and all the side-effects that must
12118 be executed before the main expression. On exit, the last
12119 statement of PRE_P is the core statement being gimplified. For
12120 instance, when gimplifying 'if (++a)' the last statement in
12121 PRE_P will be 'if (t.1)' where t.1 is the result of
12122 pre-incrementing 'a'.
12124 POST_P will contain the sequence of GIMPLE statements corresponding
12125 to the evaluation of all the side-effects that must be executed
12126 after the main expression. If this is NULL, the post
12127 side-effects are stored at the end of PRE_P.
12129 The reason why the output is split in two is to handle post
12130 side-effects explicitly. In some cases, an expression may have
12131 inner and outer post side-effects which need to be emitted in
12132 an order different from the one given by the recursive
12133 traversal. For instance, for the expression (*p--)++ the post
12134 side-effects of '--' must actually occur *after* the post
12135 side-effects of '++'. However, gimplification will first visit
12136 the inner expression, so if a separate POST sequence was not
12137 used, the resulting sequence would be:
12139 1 t.1 = *p
12140 2 p = p - 1
12141 3 t.2 = t.1 + 1
12142 4 *p = t.2
12144 However, the post-decrement operation in line #2 must not be
12145 evaluated until after the store to *p at line #4, so the
12146 correct sequence should be:
12148 1 t.1 = *p
12149 2 t.2 = t.1 + 1
12150 3 *p = t.2
12151 4 p = p - 1
12153 So, by specifying a separate post queue, it is possible
12154 to emit the post side-effects in the correct order.
12155 If POST_P is NULL, an internal queue will be used. Before
12156 returning to the caller, the sequence POST_P is appended to
12157 the main output sequence PRE_P.
12159 GIMPLE_TEST_F points to a function that takes a tree T and
12160 returns nonzero if T is in the GIMPLE form requested by the
12161 caller. The GIMPLE predicates are in gimple.c.
12163 FALLBACK tells the function what sort of a temporary we want if
12164 gimplification cannot produce an expression that complies with
12165 GIMPLE_TEST_F.
12167 fb_none means that no temporary should be generated
12168 fb_rvalue means that an rvalue is OK to generate
12169 fb_lvalue means that an lvalue is OK to generate
12170 fb_either means that either is OK, but an lvalue is preferable.
12171 fb_mayfail means that gimplification may fail (in which case
12172 GS_ERROR will be returned)
12174 The return value is either GS_ERROR or GS_ALL_DONE, since this
12175 function iterates until EXPR is completely gimplified or an error
12176 occurs. */
12178 enum gimplify_status
12179 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12180 bool (*gimple_test_f) (tree), fallback_t fallback)
12182 tree tmp;
12183 gimple_seq internal_pre = NULL;
12184 gimple_seq internal_post = NULL;
12185 tree save_expr;
12186 bool is_statement;
12187 location_t saved_location;
12188 enum gimplify_status ret;
12189 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
12190 tree label;
12192 save_expr = *expr_p;
12193 if (save_expr == NULL_TREE)
12194 return GS_ALL_DONE;
12196 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12197 is_statement = gimple_test_f == is_gimple_stmt;
12198 if (is_statement)
12199 gcc_assert (pre_p);
12201 /* Consistency checks. */
12202 if (gimple_test_f == is_gimple_reg)
12203 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
12204 else if (gimple_test_f == is_gimple_val
12205 || gimple_test_f == is_gimple_call_addr
12206 || gimple_test_f == is_gimple_condexpr
12207 || gimple_test_f == is_gimple_mem_rhs
12208 || gimple_test_f == is_gimple_mem_rhs_or_call
12209 || gimple_test_f == is_gimple_reg_rhs
12210 || gimple_test_f == is_gimple_reg_rhs_or_call
12211 || gimple_test_f == is_gimple_asm_val
12212 || gimple_test_f == is_gimple_mem_ref_addr)
12213 gcc_assert (fallback & fb_rvalue);
12214 else if (gimple_test_f == is_gimple_min_lval
12215 || gimple_test_f == is_gimple_lvalue)
12216 gcc_assert (fallback & fb_lvalue);
12217 else if (gimple_test_f == is_gimple_addressable)
12218 gcc_assert (fallback & fb_either);
12219 else if (gimple_test_f == is_gimple_stmt)
12220 gcc_assert (fallback == fb_none);
12221 else
12223 /* We should have recognized the GIMPLE_TEST_F predicate to
12224 know what kind of fallback to use in case a temporary is
12225 needed to hold the value or address of *EXPR_P. */
12226 gcc_unreachable ();
12229 /* We used to check the predicate here and return immediately if it
12230 succeeds. This is wrong; the design is for gimplification to be
12231 idempotent, and for the predicates to only test for valid forms, not
12232 whether they are fully simplified. */
12233 if (pre_p == NULL)
12234 pre_p = &internal_pre;
12236 if (post_p == NULL)
12237 post_p = &internal_post;
12239 /* Remember the last statements added to PRE_P and POST_P. Every
12240 new statement added by the gimplification helpers needs to be
12241 annotated with location information. To centralize the
12242 responsibility, we remember the last statement that had been
12243 added to both queues before gimplifying *EXPR_P. If
12244 gimplification produces new statements in PRE_P and POST_P, those
12245 statements will be annotated with the same location information
12246 as *EXPR_P. */
12247 pre_last_gsi = gsi_last (*pre_p);
12248 post_last_gsi = gsi_last (*post_p);
12250 saved_location = input_location;
12251 if (save_expr != error_mark_node
12252 && EXPR_HAS_LOCATION (*expr_p))
12253 input_location = EXPR_LOCATION (*expr_p);
12255 /* Loop over the specific gimplifiers until the toplevel node
12256 remains the same. */
12259 /* Strip away as many useless type conversions as possible
12260 at the toplevel. */
12261 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
12263 /* Remember the expr. */
12264 save_expr = *expr_p;
12266 /* Die, die, die, my darling. */
12267 if (error_operand_p (save_expr))
12269 ret = GS_ERROR;
12270 break;
12273 /* Do any language-specific gimplification. */
12274 ret = ((enum gimplify_status)
12275 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
12276 if (ret == GS_OK)
12278 if (*expr_p == NULL_TREE)
12279 break;
12280 if (*expr_p != save_expr)
12281 continue;
12283 else if (ret != GS_UNHANDLED)
12284 break;
12286 /* Make sure that all the cases set 'ret' appropriately. */
12287 ret = GS_UNHANDLED;
12288 switch (TREE_CODE (*expr_p))
12290 /* First deal with the special cases. */
12292 case POSTINCREMENT_EXPR:
12293 case POSTDECREMENT_EXPR:
12294 case PREINCREMENT_EXPR:
12295 case PREDECREMENT_EXPR:
12296 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
12297 fallback != fb_none,
12298 TREE_TYPE (*expr_p));
12299 break;
12301 case VIEW_CONVERT_EXPR:
12302 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
12303 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
12305 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12306 post_p, is_gimple_val, fb_rvalue);
12307 recalculate_side_effects (*expr_p);
12308 break;
12310 /* Fallthru. */
12312 case ARRAY_REF:
12313 case ARRAY_RANGE_REF:
12314 case REALPART_EXPR:
12315 case IMAGPART_EXPR:
12316 case COMPONENT_REF:
12317 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
12318 fallback ? fallback : fb_rvalue);
12319 break;
12321 case COND_EXPR:
12322 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
12324 /* C99 code may assign to an array in a structure value of a
12325 conditional expression, and this has undefined behavior
12326 only on execution, so create a temporary if an lvalue is
12327 required. */
12328 if (fallback == fb_lvalue)
12330 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12331 mark_addressable (*expr_p);
12332 ret = GS_OK;
12334 break;
12336 case CALL_EXPR:
12337 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
12339 /* C99 code may assign to an array in a structure returned
12340 from a function, and this has undefined behavior only on
12341 execution, so create a temporary if an lvalue is
12342 required. */
12343 if (fallback == fb_lvalue)
12345 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12346 mark_addressable (*expr_p);
12347 ret = GS_OK;
12349 break;
12351 case TREE_LIST:
12352 gcc_unreachable ();
12354 case COMPOUND_EXPR:
12355 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
12356 break;
12358 case COMPOUND_LITERAL_EXPR:
12359 ret = gimplify_compound_literal_expr (expr_p, pre_p,
12360 gimple_test_f, fallback);
12361 break;
12363 case MODIFY_EXPR:
12364 case INIT_EXPR:
12365 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
12366 fallback != fb_none);
12367 break;
12369 case TRUTH_ANDIF_EXPR:
12370 case TRUTH_ORIF_EXPR:
12372 /* Preserve the original type of the expression and the
12373 source location of the outer expression. */
12374 tree org_type = TREE_TYPE (*expr_p);
12375 *expr_p = gimple_boolify (*expr_p);
12376 *expr_p = build3_loc (input_location, COND_EXPR,
12377 org_type, *expr_p,
12378 fold_convert_loc
12379 (input_location,
12380 org_type, boolean_true_node),
12381 fold_convert_loc
12382 (input_location,
12383 org_type, boolean_false_node));
12384 ret = GS_OK;
12385 break;
12388 case TRUTH_NOT_EXPR:
12390 tree type = TREE_TYPE (*expr_p);
12391 /* The parsers are careful to generate TRUTH_NOT_EXPR
12392 only with operands that are always zero or one.
12393 We do not fold here but handle the only interesting case
12394 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
12395 *expr_p = gimple_boolify (*expr_p);
12396 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
12397 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
12398 TREE_TYPE (*expr_p),
12399 TREE_OPERAND (*expr_p, 0));
12400 else
12401 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
12402 TREE_TYPE (*expr_p),
12403 TREE_OPERAND (*expr_p, 0),
12404 build_int_cst (TREE_TYPE (*expr_p), 1));
12405 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
12406 *expr_p = fold_convert_loc (input_location, type, *expr_p);
12407 ret = GS_OK;
12408 break;
12411 case ADDR_EXPR:
12412 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
12413 break;
12415 case ANNOTATE_EXPR:
12417 tree cond = TREE_OPERAND (*expr_p, 0);
12418 tree kind = TREE_OPERAND (*expr_p, 1);
12419 tree data = TREE_OPERAND (*expr_p, 2);
12420 tree type = TREE_TYPE (cond);
12421 if (!INTEGRAL_TYPE_P (type))
12423 *expr_p = cond;
12424 ret = GS_OK;
12425 break;
12427 tree tmp = create_tmp_var (type);
12428 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
12429 gcall *call
12430 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
12431 gimple_call_set_lhs (call, tmp);
12432 gimplify_seq_add_stmt (pre_p, call);
12433 *expr_p = tmp;
12434 ret = GS_ALL_DONE;
12435 break;
12438 case VA_ARG_EXPR:
12439 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
12440 break;
12442 CASE_CONVERT:
12443 if (IS_EMPTY_STMT (*expr_p))
12445 ret = GS_ALL_DONE;
12446 break;
12449 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
12450 || fallback == fb_none)
12452 /* Just strip a conversion to void (or in void context) and
12453 try again. */
12454 *expr_p = TREE_OPERAND (*expr_p, 0);
12455 ret = GS_OK;
12456 break;
12459 ret = gimplify_conversion (expr_p);
12460 if (ret == GS_ERROR)
12461 break;
12462 if (*expr_p != save_expr)
12463 break;
12464 /* FALLTHRU */
12466 case FIX_TRUNC_EXPR:
12467 /* unary_expr: ... | '(' cast ')' val | ... */
12468 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12469 is_gimple_val, fb_rvalue);
12470 recalculate_side_effects (*expr_p);
12471 break;
12473 case INDIRECT_REF:
12475 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
12476 bool notrap = TREE_THIS_NOTRAP (*expr_p);
12477 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
12479 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
12480 if (*expr_p != save_expr)
12482 ret = GS_OK;
12483 break;
12486 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12487 is_gimple_reg, fb_rvalue);
12488 if (ret == GS_ERROR)
12489 break;
12491 recalculate_side_effects (*expr_p);
12492 *expr_p = fold_build2_loc (input_location, MEM_REF,
12493 TREE_TYPE (*expr_p),
12494 TREE_OPERAND (*expr_p, 0),
12495 build_int_cst (saved_ptr_type, 0));
12496 TREE_THIS_VOLATILE (*expr_p) = volatilep;
12497 TREE_THIS_NOTRAP (*expr_p) = notrap;
12498 ret = GS_OK;
12499 break;
12502 /* We arrive here through the various re-gimplifcation paths. */
12503 case MEM_REF:
12504 /* First try re-folding the whole thing. */
12505 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
12506 TREE_OPERAND (*expr_p, 0),
12507 TREE_OPERAND (*expr_p, 1));
12508 if (tmp)
12510 REF_REVERSE_STORAGE_ORDER (tmp)
12511 = REF_REVERSE_STORAGE_ORDER (*expr_p);
12512 *expr_p = tmp;
12513 recalculate_side_effects (*expr_p);
12514 ret = GS_OK;
12515 break;
12517 /* Avoid re-gimplifying the address operand if it is already
12518 in suitable form. Re-gimplifying would mark the address
12519 operand addressable. Always gimplify when not in SSA form
12520 as we still may have to gimplify decls with value-exprs. */
12521 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
12522 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
12524 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12525 is_gimple_mem_ref_addr, fb_rvalue);
12526 if (ret == GS_ERROR)
12527 break;
12529 recalculate_side_effects (*expr_p);
12530 ret = GS_ALL_DONE;
12531 break;
12533 /* Constants need not be gimplified. */
12534 case INTEGER_CST:
12535 case REAL_CST:
12536 case FIXED_CST:
12537 case STRING_CST:
12538 case COMPLEX_CST:
12539 case VECTOR_CST:
12540 /* Drop the overflow flag on constants, we do not want
12541 that in the GIMPLE IL. */
12542 if (TREE_OVERFLOW_P (*expr_p))
12543 *expr_p = drop_tree_overflow (*expr_p);
12544 ret = GS_ALL_DONE;
12545 break;
12547 case CONST_DECL:
12548 /* If we require an lvalue, such as for ADDR_EXPR, retain the
12549 CONST_DECL node. Otherwise the decl is replaceable by its
12550 value. */
12551 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
12552 if (fallback & fb_lvalue)
12553 ret = GS_ALL_DONE;
12554 else
12556 *expr_p = DECL_INITIAL (*expr_p);
12557 ret = GS_OK;
12559 break;
12561 case DECL_EXPR:
12562 ret = gimplify_decl_expr (expr_p, pre_p);
12563 break;
12565 case BIND_EXPR:
12566 ret = gimplify_bind_expr (expr_p, pre_p);
12567 break;
12569 case LOOP_EXPR:
12570 ret = gimplify_loop_expr (expr_p, pre_p);
12571 break;
12573 case SWITCH_EXPR:
12574 ret = gimplify_switch_expr (expr_p, pre_p);
12575 break;
12577 case EXIT_EXPR:
12578 ret = gimplify_exit_expr (expr_p);
12579 break;
12581 case GOTO_EXPR:
12582 /* If the target is not LABEL, then it is a computed jump
12583 and the target needs to be gimplified. */
12584 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
12586 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
12587 NULL, is_gimple_val, fb_rvalue);
12588 if (ret == GS_ERROR)
12589 break;
12591 gimplify_seq_add_stmt (pre_p,
12592 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
12593 ret = GS_ALL_DONE;
12594 break;
12596 case PREDICT_EXPR:
12597 gimplify_seq_add_stmt (pre_p,
12598 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
12599 PREDICT_EXPR_OUTCOME (*expr_p)));
12600 ret = GS_ALL_DONE;
12601 break;
12603 case LABEL_EXPR:
12604 ret = gimplify_label_expr (expr_p, pre_p);
12605 label = LABEL_EXPR_LABEL (*expr_p);
12606 gcc_assert (decl_function_context (label) == current_function_decl);
12608 /* If the label is used in a goto statement, or address of the label
12609 is taken, we need to unpoison all variables that were seen so far.
12610 Doing so would prevent us from reporting a false positives. */
12611 if (asan_poisoned_variables
12612 && asan_used_labels != NULL
12613 && asan_used_labels->contains (label))
12614 asan_poison_variables (asan_poisoned_variables, false, pre_p);
12615 break;
12617 case CASE_LABEL_EXPR:
12618 ret = gimplify_case_label_expr (expr_p, pre_p);
12620 if (gimplify_ctxp->live_switch_vars)
12621 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
12622 pre_p);
12623 break;
12625 case RETURN_EXPR:
12626 ret = gimplify_return_expr (*expr_p, pre_p);
12627 break;
12629 case CONSTRUCTOR:
12630 /* Don't reduce this in place; let gimplify_init_constructor work its
12631 magic. Buf if we're just elaborating this for side effects, just
12632 gimplify any element that has side-effects. */
12633 if (fallback == fb_none)
12635 unsigned HOST_WIDE_INT ix;
12636 tree val;
12637 tree temp = NULL_TREE;
12638 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
12639 if (TREE_SIDE_EFFECTS (val))
12640 append_to_statement_list (val, &temp);
12642 *expr_p = temp;
12643 ret = temp ? GS_OK : GS_ALL_DONE;
12645 /* C99 code may assign to an array in a constructed
12646 structure or union, and this has undefined behavior only
12647 on execution, so create a temporary if an lvalue is
12648 required. */
12649 else if (fallback == fb_lvalue)
12651 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12652 mark_addressable (*expr_p);
12653 ret = GS_OK;
12655 else
12656 ret = GS_ALL_DONE;
12657 break;
12659 /* The following are special cases that are not handled by the
12660 original GIMPLE grammar. */
12662 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
12663 eliminated. */
12664 case SAVE_EXPR:
12665 ret = gimplify_save_expr (expr_p, pre_p, post_p);
12666 break;
12668 case BIT_FIELD_REF:
12669 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12670 post_p, is_gimple_lvalue, fb_either);
12671 recalculate_side_effects (*expr_p);
12672 break;
12674 case TARGET_MEM_REF:
12676 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
12678 if (TMR_BASE (*expr_p))
12679 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
12680 post_p, is_gimple_mem_ref_addr, fb_either);
12681 if (TMR_INDEX (*expr_p))
12682 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
12683 post_p, is_gimple_val, fb_rvalue);
12684 if (TMR_INDEX2 (*expr_p))
12685 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
12686 post_p, is_gimple_val, fb_rvalue);
12687 /* TMR_STEP and TMR_OFFSET are always integer constants. */
12688 ret = MIN (r0, r1);
12690 break;
12692 case NON_LVALUE_EXPR:
12693 /* This should have been stripped above. */
12694 gcc_unreachable ();
12696 case ASM_EXPR:
12697 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
12698 break;
12700 case TRY_FINALLY_EXPR:
12701 case TRY_CATCH_EXPR:
12703 gimple_seq eval, cleanup;
12704 gtry *try_;
12706 /* Calls to destructors are generated automatically in FINALLY/CATCH
12707 block. They should have location as UNKNOWN_LOCATION. However,
12708 gimplify_call_expr will reset these call stmts to input_location
12709 if it finds stmt's location is unknown. To prevent resetting for
12710 destructors, we set the input_location to unknown.
12711 Note that this only affects the destructor calls in FINALLY/CATCH
12712 block, and will automatically reset to its original value by the
12713 end of gimplify_expr. */
12714 input_location = UNKNOWN_LOCATION;
12715 eval = cleanup = NULL;
12716 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
12717 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
12718 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
12719 if (gimple_seq_empty_p (cleanup))
12721 gimple_seq_add_seq (pre_p, eval);
12722 ret = GS_ALL_DONE;
12723 break;
12725 try_ = gimple_build_try (eval, cleanup,
12726 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
12727 ? GIMPLE_TRY_FINALLY
12728 : GIMPLE_TRY_CATCH);
12729 if (EXPR_HAS_LOCATION (save_expr))
12730 gimple_set_location (try_, EXPR_LOCATION (save_expr));
12731 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
12732 gimple_set_location (try_, saved_location);
12733 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
12734 gimple_try_set_catch_is_cleanup (try_,
12735 TRY_CATCH_IS_CLEANUP (*expr_p));
12736 gimplify_seq_add_stmt (pre_p, try_);
12737 ret = GS_ALL_DONE;
12738 break;
12741 case CLEANUP_POINT_EXPR:
12742 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
12743 break;
12745 case TARGET_EXPR:
12746 ret = gimplify_target_expr (expr_p, pre_p, post_p);
12747 break;
12749 case CATCH_EXPR:
12751 gimple *c;
12752 gimple_seq handler = NULL;
12753 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
12754 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
12755 gimplify_seq_add_stmt (pre_p, c);
12756 ret = GS_ALL_DONE;
12757 break;
12760 case EH_FILTER_EXPR:
12762 gimple *ehf;
12763 gimple_seq failure = NULL;
12765 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
12766 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
12767 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
12768 gimplify_seq_add_stmt (pre_p, ehf);
12769 ret = GS_ALL_DONE;
12770 break;
12773 case OBJ_TYPE_REF:
12775 enum gimplify_status r0, r1;
12776 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
12777 post_p, is_gimple_val, fb_rvalue);
12778 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
12779 post_p, is_gimple_val, fb_rvalue);
12780 TREE_SIDE_EFFECTS (*expr_p) = 0;
12781 ret = MIN (r0, r1);
12783 break;
12785 case LABEL_DECL:
12786 /* We get here when taking the address of a label. We mark
12787 the label as "forced"; meaning it can never be removed and
12788 it is a potential target for any computed goto. */
12789 FORCED_LABEL (*expr_p) = 1;
12790 ret = GS_ALL_DONE;
12791 break;
12793 case STATEMENT_LIST:
12794 ret = gimplify_statement_list (expr_p, pre_p);
12795 break;
12797 case WITH_SIZE_EXPR:
12799 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12800 post_p == &internal_post ? NULL : post_p,
12801 gimple_test_f, fallback);
12802 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12803 is_gimple_val, fb_rvalue);
12804 ret = GS_ALL_DONE;
12806 break;
12808 case VAR_DECL:
12809 case PARM_DECL:
12810 ret = gimplify_var_or_parm_decl (expr_p);
12811 break;
12813 case RESULT_DECL:
12814 /* When within an OMP context, notice uses of variables. */
12815 if (gimplify_omp_ctxp)
12816 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
12817 ret = GS_ALL_DONE;
12818 break;
12820 case DEBUG_EXPR_DECL:
12821 gcc_unreachable ();
12823 case DEBUG_BEGIN_STMT:
12824 gimplify_seq_add_stmt (pre_p,
12825 gimple_build_debug_begin_stmt
12826 (TREE_BLOCK (*expr_p),
12827 EXPR_LOCATION (*expr_p)));
12828 ret = GS_ALL_DONE;
12829 *expr_p = NULL;
12830 break;
12832 case SSA_NAME:
12833 /* Allow callbacks into the gimplifier during optimization. */
12834 ret = GS_ALL_DONE;
12835 break;
12837 case OMP_PARALLEL:
12838 gimplify_omp_parallel (expr_p, pre_p);
12839 ret = GS_ALL_DONE;
12840 break;
12842 case OMP_TASK:
12843 gimplify_omp_task (expr_p, pre_p);
12844 ret = GS_ALL_DONE;
12845 break;
12847 case OMP_FOR:
12848 case OMP_SIMD:
12849 case OMP_DISTRIBUTE:
12850 case OMP_TASKLOOP:
12851 case OACC_LOOP:
12852 ret = gimplify_omp_for (expr_p, pre_p);
12853 break;
12855 case OACC_CACHE:
12856 gimplify_oacc_cache (expr_p, pre_p);
12857 ret = GS_ALL_DONE;
12858 break;
12860 case OACC_DECLARE:
12861 gimplify_oacc_declare (expr_p, pre_p);
12862 ret = GS_ALL_DONE;
12863 break;
12865 case OACC_HOST_DATA:
12866 case OACC_DATA:
12867 case OACC_KERNELS:
12868 case OACC_PARALLEL:
12869 case OMP_SECTIONS:
12870 case OMP_SINGLE:
12871 case OMP_TARGET:
12872 case OMP_TARGET_DATA:
12873 case OMP_TEAMS:
12874 gimplify_omp_workshare (expr_p, pre_p);
12875 ret = GS_ALL_DONE;
12876 break;
12878 case OACC_ENTER_DATA:
12879 case OACC_EXIT_DATA:
12880 case OACC_UPDATE:
12881 case OMP_TARGET_UPDATE:
12882 case OMP_TARGET_ENTER_DATA:
12883 case OMP_TARGET_EXIT_DATA:
12884 gimplify_omp_target_update (expr_p, pre_p);
12885 ret = GS_ALL_DONE;
12886 break;
12888 case OMP_SECTION:
12889 case OMP_MASTER:
12890 case OMP_ORDERED:
12891 case OMP_CRITICAL:
12893 gimple_seq body = NULL;
12894 gimple *g;
12896 gimplify_and_add (OMP_BODY (*expr_p), &body);
12897 switch (TREE_CODE (*expr_p))
12899 case OMP_SECTION:
12900 g = gimple_build_omp_section (body);
12901 break;
12902 case OMP_MASTER:
12903 g = gimple_build_omp_master (body);
12904 break;
12905 case OMP_ORDERED:
12906 g = gimplify_omp_ordered (*expr_p, body);
12907 break;
12908 case OMP_CRITICAL:
12909 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
12910 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
12911 gimplify_adjust_omp_clauses (pre_p, body,
12912 &OMP_CRITICAL_CLAUSES (*expr_p),
12913 OMP_CRITICAL);
12914 g = gimple_build_omp_critical (body,
12915 OMP_CRITICAL_NAME (*expr_p),
12916 OMP_CRITICAL_CLAUSES (*expr_p));
12917 break;
12918 default:
12919 gcc_unreachable ();
12921 gimplify_seq_add_stmt (pre_p, g);
12922 ret = GS_ALL_DONE;
12923 break;
12926 case OMP_TASKGROUP:
12928 gimple_seq body = NULL;
12930 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
12931 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
12932 OMP_TASKGROUP);
12933 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
12934 gimplify_and_add (OMP_BODY (*expr_p), &body);
12935 gimple_seq cleanup = NULL;
12936 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
12937 gimple *g = gimple_build_call (fn, 0);
12938 gimple_seq_add_stmt (&cleanup, g);
12939 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12940 body = NULL;
12941 gimple_seq_add_stmt (&body, g);
12942 g = gimple_build_omp_taskgroup (body, *pclauses);
12943 gimplify_seq_add_stmt (pre_p, g);
12944 ret = GS_ALL_DONE;
12945 break;
12948 case OMP_ATOMIC:
12949 case OMP_ATOMIC_READ:
12950 case OMP_ATOMIC_CAPTURE_OLD:
12951 case OMP_ATOMIC_CAPTURE_NEW:
12952 ret = gimplify_omp_atomic (expr_p, pre_p);
12953 break;
12955 case TRANSACTION_EXPR:
12956 ret = gimplify_transaction (expr_p, pre_p);
12957 break;
12959 case TRUTH_AND_EXPR:
12960 case TRUTH_OR_EXPR:
12961 case TRUTH_XOR_EXPR:
12963 tree orig_type = TREE_TYPE (*expr_p);
12964 tree new_type, xop0, xop1;
12965 *expr_p = gimple_boolify (*expr_p);
12966 new_type = TREE_TYPE (*expr_p);
12967 if (!useless_type_conversion_p (orig_type, new_type))
12969 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
12970 ret = GS_OK;
12971 break;
12974 /* Boolified binary truth expressions are semantically equivalent
12975 to bitwise binary expressions. Canonicalize them to the
12976 bitwise variant. */
12977 switch (TREE_CODE (*expr_p))
12979 case TRUTH_AND_EXPR:
12980 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
12981 break;
12982 case TRUTH_OR_EXPR:
12983 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
12984 break;
12985 case TRUTH_XOR_EXPR:
12986 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
12987 break;
12988 default:
12989 break;
12991 /* Now make sure that operands have compatible type to
12992 expression's new_type. */
12993 xop0 = TREE_OPERAND (*expr_p, 0);
12994 xop1 = TREE_OPERAND (*expr_p, 1);
12995 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
12996 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
12997 new_type,
12998 xop0);
12999 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
13000 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
13001 new_type,
13002 xop1);
13003 /* Continue classified as tcc_binary. */
13004 goto expr_2;
13007 case VEC_COND_EXPR:
13009 enum gimplify_status r0, r1, r2;
13011 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13012 post_p, is_gimple_condexpr, fb_rvalue);
13013 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13014 post_p, is_gimple_val, fb_rvalue);
13015 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13016 post_p, is_gimple_val, fb_rvalue);
13018 ret = MIN (MIN (r0, r1), r2);
13019 recalculate_side_effects (*expr_p);
13021 break;
13023 case VEC_PERM_EXPR:
13024 /* Classified as tcc_expression. */
13025 goto expr_3;
13027 case BIT_INSERT_EXPR:
13028 /* Argument 3 is a constant. */
13029 goto expr_2;
13031 case POINTER_PLUS_EXPR:
13033 enum gimplify_status r0, r1;
13034 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13035 post_p, is_gimple_val, fb_rvalue);
13036 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13037 post_p, is_gimple_val, fb_rvalue);
13038 recalculate_side_effects (*expr_p);
13039 ret = MIN (r0, r1);
13040 break;
13043 default:
13044 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
13046 case tcc_comparison:
13047 /* Handle comparison of objects of non scalar mode aggregates
13048 with a call to memcmp. It would be nice to only have to do
13049 this for variable-sized objects, but then we'd have to allow
13050 the same nest of reference nodes we allow for MODIFY_EXPR and
13051 that's too complex.
13053 Compare scalar mode aggregates as scalar mode values. Using
13054 memcmp for them would be very inefficient at best, and is
13055 plain wrong if bitfields are involved. */
13057 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
13059 /* Vector comparisons need no boolification. */
13060 if (TREE_CODE (type) == VECTOR_TYPE)
13061 goto expr_2;
13062 else if (!AGGREGATE_TYPE_P (type))
13064 tree org_type = TREE_TYPE (*expr_p);
13065 *expr_p = gimple_boolify (*expr_p);
13066 if (!useless_type_conversion_p (org_type,
13067 TREE_TYPE (*expr_p)))
13069 *expr_p = fold_convert_loc (input_location,
13070 org_type, *expr_p);
13071 ret = GS_OK;
13073 else
13074 goto expr_2;
13076 else if (TYPE_MODE (type) != BLKmode)
13077 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
13078 else
13079 ret = gimplify_variable_sized_compare (expr_p);
13081 break;
13084 /* If *EXPR_P does not need to be special-cased, handle it
13085 according to its class. */
13086 case tcc_unary:
13087 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13088 post_p, is_gimple_val, fb_rvalue);
13089 break;
13091 case tcc_binary:
13092 expr_2:
13094 enum gimplify_status r0, r1;
13096 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13097 post_p, is_gimple_val, fb_rvalue);
13098 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13099 post_p, is_gimple_val, fb_rvalue);
13101 ret = MIN (r0, r1);
13102 break;
13105 expr_3:
13107 enum gimplify_status r0, r1, r2;
13109 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13110 post_p, is_gimple_val, fb_rvalue);
13111 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13112 post_p, is_gimple_val, fb_rvalue);
13113 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13114 post_p, is_gimple_val, fb_rvalue);
13116 ret = MIN (MIN (r0, r1), r2);
13117 break;
13120 case tcc_declaration:
13121 case tcc_constant:
13122 ret = GS_ALL_DONE;
13123 goto dont_recalculate;
13125 default:
13126 gcc_unreachable ();
13129 recalculate_side_effects (*expr_p);
13131 dont_recalculate:
13132 break;
13135 gcc_assert (*expr_p || ret != GS_OK);
13137 while (ret == GS_OK);
13139 /* If we encountered an error_mark somewhere nested inside, either
13140 stub out the statement or propagate the error back out. */
13141 if (ret == GS_ERROR)
13143 if (is_statement)
13144 *expr_p = NULL;
13145 goto out;
13148 /* This was only valid as a return value from the langhook, which
13149 we handled. Make sure it doesn't escape from any other context. */
13150 gcc_assert (ret != GS_UNHANDLED);
13152 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
13154 /* We aren't looking for a value, and we don't have a valid
13155 statement. If it doesn't have side-effects, throw it away.
13156 We can also get here with code such as "*&&L;", where L is
13157 a LABEL_DECL that is marked as FORCED_LABEL. */
13158 if (TREE_CODE (*expr_p) == LABEL_DECL
13159 || !TREE_SIDE_EFFECTS (*expr_p))
13160 *expr_p = NULL;
13161 else if (!TREE_THIS_VOLATILE (*expr_p))
13163 /* This is probably a _REF that contains something nested that
13164 has side effects. Recurse through the operands to find it. */
13165 enum tree_code code = TREE_CODE (*expr_p);
13167 switch (code)
13169 case COMPONENT_REF:
13170 case REALPART_EXPR:
13171 case IMAGPART_EXPR:
13172 case VIEW_CONVERT_EXPR:
13173 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13174 gimple_test_f, fallback);
13175 break;
13177 case ARRAY_REF:
13178 case ARRAY_RANGE_REF:
13179 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13180 gimple_test_f, fallback);
13181 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13182 gimple_test_f, fallback);
13183 break;
13185 default:
13186 /* Anything else with side-effects must be converted to
13187 a valid statement before we get here. */
13188 gcc_unreachable ();
13191 *expr_p = NULL;
13193 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
13194 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
13196 /* Historically, the compiler has treated a bare reference
13197 to a non-BLKmode volatile lvalue as forcing a load. */
13198 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
13200 /* Normally, we do not want to create a temporary for a
13201 TREE_ADDRESSABLE type because such a type should not be
13202 copied by bitwise-assignment. However, we make an
13203 exception here, as all we are doing here is ensuring that
13204 we read the bytes that make up the type. We use
13205 create_tmp_var_raw because create_tmp_var will abort when
13206 given a TREE_ADDRESSABLE type. */
13207 tree tmp = create_tmp_var_raw (type, "vol");
13208 gimple_add_tmp_var (tmp);
13209 gimplify_assign (tmp, *expr_p, pre_p);
13210 *expr_p = NULL;
13212 else
13213 /* We can't do anything useful with a volatile reference to
13214 an incomplete type, so just throw it away. Likewise for
13215 a BLKmode type, since any implicit inner load should
13216 already have been turned into an explicit one by the
13217 gimplification process. */
13218 *expr_p = NULL;
13221 /* If we are gimplifying at the statement level, we're done. Tack
13222 everything together and return. */
13223 if (fallback == fb_none || is_statement)
13225 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
13226 it out for GC to reclaim it. */
13227 *expr_p = NULL_TREE;
13229 if (!gimple_seq_empty_p (internal_pre)
13230 || !gimple_seq_empty_p (internal_post))
13232 gimplify_seq_add_seq (&internal_pre, internal_post);
13233 gimplify_seq_add_seq (pre_p, internal_pre);
13236 /* The result of gimplifying *EXPR_P is going to be the last few
13237 statements in *PRE_P and *POST_P. Add location information
13238 to all the statements that were added by the gimplification
13239 helpers. */
13240 if (!gimple_seq_empty_p (*pre_p))
13241 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
13243 if (!gimple_seq_empty_p (*post_p))
13244 annotate_all_with_location_after (*post_p, post_last_gsi,
13245 input_location);
13247 goto out;
13250 #ifdef ENABLE_GIMPLE_CHECKING
13251 if (*expr_p)
13253 enum tree_code code = TREE_CODE (*expr_p);
13254 /* These expressions should already be in gimple IR form. */
13255 gcc_assert (code != MODIFY_EXPR
13256 && code != ASM_EXPR
13257 && code != BIND_EXPR
13258 && code != CATCH_EXPR
13259 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
13260 && code != EH_FILTER_EXPR
13261 && code != GOTO_EXPR
13262 && code != LABEL_EXPR
13263 && code != LOOP_EXPR
13264 && code != SWITCH_EXPR
13265 && code != TRY_FINALLY_EXPR
13266 && code != OACC_PARALLEL
13267 && code != OACC_KERNELS
13268 && code != OACC_DATA
13269 && code != OACC_HOST_DATA
13270 && code != OACC_DECLARE
13271 && code != OACC_UPDATE
13272 && code != OACC_ENTER_DATA
13273 && code != OACC_EXIT_DATA
13274 && code != OACC_CACHE
13275 && code != OMP_CRITICAL
13276 && code != OMP_FOR
13277 && code != OACC_LOOP
13278 && code != OMP_MASTER
13279 && code != OMP_TASKGROUP
13280 && code != OMP_ORDERED
13281 && code != OMP_PARALLEL
13282 && code != OMP_SECTIONS
13283 && code != OMP_SECTION
13284 && code != OMP_SINGLE);
13286 #endif
13288 /* Otherwise we're gimplifying a subexpression, so the resulting
13289 value is interesting. If it's a valid operand that matches
13290 GIMPLE_TEST_F, we're done. Unless we are handling some
13291 post-effects internally; if that's the case, we need to copy into
13292 a temporary before adding the post-effects to POST_P. */
13293 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
13294 goto out;
13296 /* Otherwise, we need to create a new temporary for the gimplified
13297 expression. */
13299 /* We can't return an lvalue if we have an internal postqueue. The
13300 object the lvalue refers to would (probably) be modified by the
13301 postqueue; we need to copy the value out first, which means an
13302 rvalue. */
13303 if ((fallback & fb_lvalue)
13304 && gimple_seq_empty_p (internal_post)
13305 && is_gimple_addressable (*expr_p))
13307 /* An lvalue will do. Take the address of the expression, store it
13308 in a temporary, and replace the expression with an INDIRECT_REF of
13309 that temporary. */
13310 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
13311 unsigned int ref_align = get_object_alignment (*expr_p);
13312 tree ref_type = TREE_TYPE (*expr_p);
13313 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
13314 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
13315 if (TYPE_ALIGN (ref_type) != ref_align)
13316 ref_type = build_aligned_type (ref_type, ref_align);
13317 *expr_p = build2 (MEM_REF, ref_type,
13318 tmp, build_zero_cst (ref_alias_type));
13320 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
13322 /* An rvalue will do. Assign the gimplified expression into a
13323 new temporary TMP and replace the original expression with
13324 TMP. First, make sure that the expression has a type so that
13325 it can be assigned into a temporary. */
13326 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
13327 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
13329 else
13331 #ifdef ENABLE_GIMPLE_CHECKING
13332 if (!(fallback & fb_mayfail))
13334 fprintf (stderr, "gimplification failed:\n");
13335 print_generic_expr (stderr, *expr_p);
13336 debug_tree (*expr_p);
13337 internal_error ("gimplification failed");
13339 #endif
13340 gcc_assert (fallback & fb_mayfail);
13342 /* If this is an asm statement, and the user asked for the
13343 impossible, don't die. Fail and let gimplify_asm_expr
13344 issue an error. */
13345 ret = GS_ERROR;
13346 goto out;
13349 /* Make sure the temporary matches our predicate. */
13350 gcc_assert ((*gimple_test_f) (*expr_p));
13352 if (!gimple_seq_empty_p (internal_post))
13354 annotate_all_with_location (internal_post, input_location);
13355 gimplify_seq_add_seq (pre_p, internal_post);
13358 out:
13359 input_location = saved_location;
13360 return ret;
13363 /* Like gimplify_expr but make sure the gimplified result is not itself
13364 a SSA name (but a decl if it were). Temporaries required by
13365 evaluating *EXPR_P may be still SSA names. */
13367 static enum gimplify_status
13368 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13369 bool (*gimple_test_f) (tree), fallback_t fallback,
13370 bool allow_ssa)
13372 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
13373 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
13374 gimple_test_f, fallback);
13375 if (! allow_ssa
13376 && TREE_CODE (*expr_p) == SSA_NAME)
13378 tree name = *expr_p;
13379 if (was_ssa_name_p)
13380 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
13381 else
13383 /* Avoid the extra copy if possible. */
13384 *expr_p = create_tmp_reg (TREE_TYPE (name));
13385 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
13386 release_ssa_name (name);
13389 return ret;
13392 /* Look through TYPE for variable-sized objects and gimplify each such
13393 size that we find. Add to LIST_P any statements generated. */
13395 void
13396 gimplify_type_sizes (tree type, gimple_seq *list_p)
13398 tree field, t;
13400 if (type == NULL || type == error_mark_node)
13401 return;
13403 /* We first do the main variant, then copy into any other variants. */
13404 type = TYPE_MAIN_VARIANT (type);
13406 /* Avoid infinite recursion. */
13407 if (TYPE_SIZES_GIMPLIFIED (type))
13408 return;
13410 TYPE_SIZES_GIMPLIFIED (type) = 1;
13412 switch (TREE_CODE (type))
13414 case INTEGER_TYPE:
13415 case ENUMERAL_TYPE:
13416 case BOOLEAN_TYPE:
13417 case REAL_TYPE:
13418 case FIXED_POINT_TYPE:
13419 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
13420 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
13422 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13424 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
13425 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
13427 break;
13429 case ARRAY_TYPE:
13430 /* These types may not have declarations, so handle them here. */
13431 gimplify_type_sizes (TREE_TYPE (type), list_p);
13432 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
13433 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
13434 with assigned stack slots, for -O1+ -g they should be tracked
13435 by VTA. */
13436 if (!(TYPE_NAME (type)
13437 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13438 && DECL_IGNORED_P (TYPE_NAME (type)))
13439 && TYPE_DOMAIN (type)
13440 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
13442 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
13443 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13444 DECL_IGNORED_P (t) = 0;
13445 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
13446 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13447 DECL_IGNORED_P (t) = 0;
13449 break;
13451 case RECORD_TYPE:
13452 case UNION_TYPE:
13453 case QUAL_UNION_TYPE:
13454 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
13455 if (TREE_CODE (field) == FIELD_DECL)
13457 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
13458 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
13459 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
13460 gimplify_type_sizes (TREE_TYPE (field), list_p);
13462 break;
13464 case POINTER_TYPE:
13465 case REFERENCE_TYPE:
13466 /* We used to recurse on the pointed-to type here, which turned out to
13467 be incorrect because its definition might refer to variables not
13468 yet initialized at this point if a forward declaration is involved.
13470 It was actually useful for anonymous pointed-to types to ensure
13471 that the sizes evaluation dominates every possible later use of the
13472 values. Restricting to such types here would be safe since there
13473 is no possible forward declaration around, but would introduce an
13474 undesirable middle-end semantic to anonymity. We then defer to
13475 front-ends the responsibility of ensuring that the sizes are
13476 evaluated both early and late enough, e.g. by attaching artificial
13477 type declarations to the tree. */
13478 break;
13480 default:
13481 break;
13484 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
13485 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
13487 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13489 TYPE_SIZE (t) = TYPE_SIZE (type);
13490 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
13491 TYPE_SIZES_GIMPLIFIED (t) = 1;
13495 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
13496 a size or position, has had all of its SAVE_EXPRs evaluated.
13497 We add any required statements to *STMT_P. */
13499 void
13500 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
13502 tree expr = *expr_p;
13504 /* We don't do anything if the value isn't there, is constant, or contains
13505 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
13506 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
13507 will want to replace it with a new variable, but that will cause problems
13508 if this type is from outside the function. It's OK to have that here. */
13509 if (expr == NULL_TREE
13510 || is_gimple_constant (expr)
13511 || TREE_CODE (expr) == VAR_DECL
13512 || CONTAINS_PLACEHOLDER_P (expr))
13513 return;
13515 *expr_p = unshare_expr (expr);
13517 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
13518 if the def vanishes. */
13519 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
13521 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
13522 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
13523 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
13524 if (is_gimple_constant (*expr_p))
13525 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
13528 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
13529 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
13530 is true, also gimplify the parameters. */
13532 gbind *
13533 gimplify_body (tree fndecl, bool do_parms)
13535 location_t saved_location = input_location;
13536 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
13537 gimple *outer_stmt;
13538 gbind *outer_bind;
13540 timevar_push (TV_TREE_GIMPLIFY);
13542 init_tree_ssa (cfun);
13544 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
13545 gimplification. */
13546 default_rtl_profile ();
13548 gcc_assert (gimplify_ctxp == NULL);
13549 push_gimplify_context (true);
13551 if (flag_openacc || flag_openmp)
13553 gcc_assert (gimplify_omp_ctxp == NULL);
13554 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
13555 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
13558 /* Unshare most shared trees in the body and in that of any nested functions.
13559 It would seem we don't have to do this for nested functions because
13560 they are supposed to be output and then the outer function gimplified
13561 first, but the g++ front end doesn't always do it that way. */
13562 unshare_body (fndecl);
13563 unvisit_body (fndecl);
13565 /* Make sure input_location isn't set to something weird. */
13566 input_location = DECL_SOURCE_LOCATION (fndecl);
13568 /* Resolve callee-copies. This has to be done before processing
13569 the body so that DECL_VALUE_EXPR gets processed correctly. */
13570 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
13572 /* Gimplify the function's body. */
13573 seq = NULL;
13574 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
13575 outer_stmt = gimple_seq_first_stmt (seq);
13576 if (!outer_stmt)
13578 outer_stmt = gimple_build_nop ();
13579 gimplify_seq_add_stmt (&seq, outer_stmt);
13582 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
13583 not the case, wrap everything in a GIMPLE_BIND to make it so. */
13584 if (gimple_code (outer_stmt) == GIMPLE_BIND
13585 && gimple_seq_first (seq) == gimple_seq_last (seq))
13586 outer_bind = as_a <gbind *> (outer_stmt);
13587 else
13588 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
13590 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13592 /* If we had callee-copies statements, insert them at the beginning
13593 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
13594 if (!gimple_seq_empty_p (parm_stmts))
13596 tree parm;
13598 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
13599 if (parm_cleanup)
13601 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
13602 GIMPLE_TRY_FINALLY);
13603 parm_stmts = NULL;
13604 gimple_seq_add_stmt (&parm_stmts, g);
13606 gimple_bind_set_body (outer_bind, parm_stmts);
13608 for (parm = DECL_ARGUMENTS (current_function_decl);
13609 parm; parm = DECL_CHAIN (parm))
13610 if (DECL_HAS_VALUE_EXPR_P (parm))
13612 DECL_HAS_VALUE_EXPR_P (parm) = 0;
13613 DECL_IGNORED_P (parm) = 0;
13617 if ((flag_openacc || flag_openmp || flag_openmp_simd)
13618 && gimplify_omp_ctxp)
13620 delete_omp_context (gimplify_omp_ctxp);
13621 gimplify_omp_ctxp = NULL;
13624 pop_gimplify_context (outer_bind);
13625 gcc_assert (gimplify_ctxp == NULL);
13627 if (flag_checking && !seen_error ())
13628 verify_gimple_in_seq (gimple_bind_body (outer_bind));
13630 timevar_pop (TV_TREE_GIMPLIFY);
13631 input_location = saved_location;
13633 return outer_bind;
13636 typedef char *char_p; /* For DEF_VEC_P. */
13638 /* Return whether we should exclude FNDECL from instrumentation. */
13640 static bool
13641 flag_instrument_functions_exclude_p (tree fndecl)
13643 vec<char_p> *v;
13645 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
13646 if (v && v->length () > 0)
13648 const char *name;
13649 int i;
13650 char *s;
13652 name = lang_hooks.decl_printable_name (fndecl, 0);
13653 FOR_EACH_VEC_ELT (*v, i, s)
13654 if (strstr (name, s) != NULL)
13655 return true;
13658 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
13659 if (v && v->length () > 0)
13661 const char *name;
13662 int i;
13663 char *s;
13665 name = DECL_SOURCE_FILE (fndecl);
13666 FOR_EACH_VEC_ELT (*v, i, s)
13667 if (strstr (name, s) != NULL)
13668 return true;
13671 return false;
13674 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
13675 node for the function we want to gimplify.
13677 Return the sequence of GIMPLE statements corresponding to the body
13678 of FNDECL. */
13680 void
13681 gimplify_function_tree (tree fndecl)
13683 tree parm, ret;
13684 gimple_seq seq;
13685 gbind *bind;
13687 gcc_assert (!gimple_body (fndecl));
13689 if (DECL_STRUCT_FUNCTION (fndecl))
13690 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
13691 else
13692 push_struct_function (fndecl);
13694 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
13695 if necessary. */
13696 cfun->curr_properties |= PROP_gimple_lva;
13698 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
13700 /* Preliminarily mark non-addressed complex variables as eligible
13701 for promotion to gimple registers. We'll transform their uses
13702 as we find them. */
13703 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
13704 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
13705 && !TREE_THIS_VOLATILE (parm)
13706 && !needs_to_live_in_memory (parm))
13707 DECL_GIMPLE_REG_P (parm) = 1;
13710 ret = DECL_RESULT (fndecl);
13711 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
13712 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
13713 && !needs_to_live_in_memory (ret))
13714 DECL_GIMPLE_REG_P (ret) = 1;
13716 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
13717 asan_poisoned_variables = new hash_set<tree> ();
13718 bind = gimplify_body (fndecl, true);
13719 if (asan_poisoned_variables)
13721 delete asan_poisoned_variables;
13722 asan_poisoned_variables = NULL;
13725 /* The tree body of the function is no longer needed, replace it
13726 with the new GIMPLE body. */
13727 seq = NULL;
13728 gimple_seq_add_stmt (&seq, bind);
13729 gimple_set_body (fndecl, seq);
13731 /* If we're instrumenting function entry/exit, then prepend the call to
13732 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
13733 catch the exit hook. */
13734 /* ??? Add some way to ignore exceptions for this TFE. */
13735 if (flag_instrument_function_entry_exit
13736 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
13737 /* Do not instrument extern inline functions. */
13738 && !(DECL_DECLARED_INLINE_P (fndecl)
13739 && DECL_EXTERNAL (fndecl)
13740 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
13741 && !flag_instrument_functions_exclude_p (fndecl))
13743 tree x;
13744 gbind *new_bind;
13745 gimple *tf;
13746 gimple_seq cleanup = NULL, body = NULL;
13747 tree tmp_var, this_fn_addr;
13748 gcall *call;
13750 /* The instrumentation hooks aren't going to call the instrumented
13751 function and the address they receive is expected to be matchable
13752 against symbol addresses. Make sure we don't create a trampoline,
13753 in case the current function is nested. */
13754 this_fn_addr = build_fold_addr_expr (current_function_decl);
13755 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
13757 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13758 call = gimple_build_call (x, 1, integer_zero_node);
13759 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13760 gimple_call_set_lhs (call, tmp_var);
13761 gimplify_seq_add_stmt (&cleanup, call);
13762 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
13763 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13764 gimplify_seq_add_stmt (&cleanup, call);
13765 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
13767 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13768 call = gimple_build_call (x, 1, integer_zero_node);
13769 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13770 gimple_call_set_lhs (call, tmp_var);
13771 gimplify_seq_add_stmt (&body, call);
13772 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
13773 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13774 gimplify_seq_add_stmt (&body, call);
13775 gimplify_seq_add_stmt (&body, tf);
13776 new_bind = gimple_build_bind (NULL, body, NULL);
13778 /* Replace the current function body with the body
13779 wrapped in the try/finally TF. */
13780 seq = NULL;
13781 gimple_seq_add_stmt (&seq, new_bind);
13782 gimple_set_body (fndecl, seq);
13783 bind = new_bind;
13786 if (sanitize_flags_p (SANITIZE_THREAD))
13788 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
13789 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
13790 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
13791 /* Replace the current function body with the body
13792 wrapped in the try/finally TF. */
13793 seq = NULL;
13794 gimple_seq_add_stmt (&seq, new_bind);
13795 gimple_set_body (fndecl, seq);
13798 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13799 cfun->curr_properties |= PROP_gimple_any;
13801 pop_cfun ();
13803 dump_function (TDI_gimple, fndecl);
13806 /* Return a dummy expression of type TYPE in order to keep going after an
13807 error. */
13809 static tree
13810 dummy_object (tree type)
13812 tree t = build_int_cst (build_pointer_type (type), 0);
13813 return build2 (MEM_REF, type, t, t);
13816 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
13817 builtin function, but a very special sort of operator. */
13819 enum gimplify_status
13820 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
13821 gimple_seq *post_p ATTRIBUTE_UNUSED)
13823 tree promoted_type, have_va_type;
13824 tree valist = TREE_OPERAND (*expr_p, 0);
13825 tree type = TREE_TYPE (*expr_p);
13826 tree t, tag, aptag;
13827 location_t loc = EXPR_LOCATION (*expr_p);
13829 /* Verify that valist is of the proper type. */
13830 have_va_type = TREE_TYPE (valist);
13831 if (have_va_type == error_mark_node)
13832 return GS_ERROR;
13833 have_va_type = targetm.canonical_va_list_type (have_va_type);
13834 if (have_va_type == NULL_TREE
13835 && POINTER_TYPE_P (TREE_TYPE (valist)))
13836 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
13837 have_va_type
13838 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
13839 gcc_assert (have_va_type != NULL_TREE);
13841 /* Generate a diagnostic for requesting data of a type that cannot
13842 be passed through `...' due to type promotion at the call site. */
13843 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
13844 != type)
13846 static bool gave_help;
13847 bool warned;
13848 /* Use the expansion point to handle cases such as passing bool (defined
13849 in a system header) through `...'. */
13850 location_t xloc
13851 = expansion_point_location_if_in_system_header (loc);
13853 /* Unfortunately, this is merely undefined, rather than a constraint
13854 violation, so we cannot make this an error. If this call is never
13855 executed, the program is still strictly conforming. */
13856 auto_diagnostic_group d;
13857 warned = warning_at (xloc, 0,
13858 "%qT is promoted to %qT when passed through %<...%>",
13859 type, promoted_type);
13860 if (!gave_help && warned)
13862 gave_help = true;
13863 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
13864 promoted_type, type);
13867 /* We can, however, treat "undefined" any way we please.
13868 Call abort to encourage the user to fix the program. */
13869 if (warned)
13870 inform (xloc, "if this code is reached, the program will abort");
13871 /* Before the abort, allow the evaluation of the va_list
13872 expression to exit or longjmp. */
13873 gimplify_and_add (valist, pre_p);
13874 t = build_call_expr_loc (loc,
13875 builtin_decl_implicit (BUILT_IN_TRAP), 0);
13876 gimplify_and_add (t, pre_p);
13878 /* This is dead code, but go ahead and finish so that the
13879 mode of the result comes out right. */
13880 *expr_p = dummy_object (type);
13881 return GS_ALL_DONE;
13884 tag = build_int_cst (build_pointer_type (type), 0);
13885 aptag = build_int_cst (TREE_TYPE (valist), 0);
13887 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
13888 valist, tag, aptag);
13890 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
13891 needs to be expanded. */
13892 cfun->curr_properties &= ~PROP_gimple_lva;
13894 return GS_OK;
13897 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
13899 DST/SRC are the destination and source respectively. You can pass
13900 ungimplified trees in DST or SRC, in which case they will be
13901 converted to a gimple operand if necessary.
13903 This function returns the newly created GIMPLE_ASSIGN tuple. */
13905 gimple *
13906 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
13908 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
13909 gimplify_and_add (t, seq_p);
13910 ggc_free (t);
13911 return gimple_seq_last_stmt (*seq_p);
13914 inline hashval_t
13915 gimplify_hasher::hash (const elt_t *p)
13917 tree t = p->val;
13918 return iterative_hash_expr (t, 0);
13921 inline bool
13922 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
13924 tree t1 = p1->val;
13925 tree t2 = p2->val;
13926 enum tree_code code = TREE_CODE (t1);
13928 if (TREE_CODE (t2) != code
13929 || TREE_TYPE (t1) != TREE_TYPE (t2))
13930 return false;
13932 if (!operand_equal_p (t1, t2, 0))
13933 return false;
13935 /* Only allow them to compare equal if they also hash equal; otherwise
13936 results are nondeterminate, and we fail bootstrap comparison. */
13937 gcc_checking_assert (hash (p1) == hash (p2));
13939 return true;