Daily bump.
[official-gcc.git] / gcc / gimplify.cc
blob6ebca964cb219204f020e29b77b4b5e112b21385
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "diagnostic.h" /* For errorcount. */
40 #include "alias.h"
41 #include "fold-const.h"
42 #include "calls.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "gimple-iterator.h"
47 #include "gimple-fold.h"
48 #include "tree-eh.h"
49 #include "gimplify.h"
50 #include "stor-layout.h"
51 #include "print-tree.h"
52 #include "tree-iterator.h"
53 #include "tree-inline.h"
54 #include "langhooks.h"
55 #include "tree-cfg.h"
56 #include "tree-ssa.h"
57 #include "tree-hash-traits.h"
58 #include "omp-general.h"
59 #include "omp-low.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
62 #include "splay-tree.h"
63 #include "gimple-walk.h"
64 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "builtins.h"
66 #include "stringpool.h"
67 #include "attribs.h"
68 #include "asan.h"
69 #include "dbgcnt.h"
70 #include "omp-offload.h"
71 #include "context.h"
72 #include "tree-nested.h"
74 /* Hash set of poisoned variables in a bind expr. */
75 static hash_set<tree> *asan_poisoned_variables = NULL;
77 enum gimplify_omp_var_data
79 GOVD_SEEN = 0x000001,
80 GOVD_EXPLICIT = 0x000002,
81 GOVD_SHARED = 0x000004,
82 GOVD_PRIVATE = 0x000008,
83 GOVD_FIRSTPRIVATE = 0x000010,
84 GOVD_LASTPRIVATE = 0x000020,
85 GOVD_REDUCTION = 0x000040,
86 GOVD_LOCAL = 0x00080,
87 GOVD_MAP = 0x000100,
88 GOVD_DEBUG_PRIVATE = 0x000200,
89 GOVD_PRIVATE_OUTER_REF = 0x000400,
90 GOVD_LINEAR = 0x000800,
91 GOVD_ALIGNED = 0x001000,
93 /* Flag for GOVD_MAP: don't copy back. */
94 GOVD_MAP_TO_ONLY = 0x002000,
96 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
97 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
99 GOVD_MAP_0LEN_ARRAY = 0x008000,
101 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
102 GOVD_MAP_ALWAYS_TO = 0x010000,
104 /* Flag for shared vars that are or might be stored to in the region. */
105 GOVD_WRITTEN = 0x020000,
107 /* Flag for GOVD_MAP, if it is a forced mapping. */
108 GOVD_MAP_FORCE = 0x040000,
110 /* Flag for GOVD_MAP: must be present already. */
111 GOVD_MAP_FORCE_PRESENT = 0x080000,
113 /* Flag for GOVD_MAP: only allocate. */
114 GOVD_MAP_ALLOC_ONLY = 0x100000,
116 /* Flag for GOVD_MAP: only copy back. */
117 GOVD_MAP_FROM_ONLY = 0x200000,
119 GOVD_NONTEMPORAL = 0x400000,
121 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
122 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
124 GOVD_CONDTEMP = 0x1000000,
126 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
127 GOVD_REDUCTION_INSCAN = 0x2000000,
129 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
130 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
132 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
133 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
134 | GOVD_LOCAL)
138 enum omp_region_type
140 ORT_WORKSHARE = 0x00,
141 ORT_TASKGROUP = 0x01,
142 ORT_SIMD = 0x04,
144 ORT_PARALLEL = 0x08,
145 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
147 ORT_TASK = 0x10,
148 ORT_UNTIED_TASK = ORT_TASK | 1,
149 ORT_TASKLOOP = ORT_TASK | 2,
150 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
152 ORT_TEAMS = 0x20,
153 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
154 ORT_HOST_TEAMS = ORT_TEAMS | 2,
155 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
157 /* Data region. */
158 ORT_TARGET_DATA = 0x40,
160 /* Data region with offloading. */
161 ORT_TARGET = 0x80,
162 ORT_COMBINED_TARGET = ORT_TARGET | 1,
163 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
165 /* OpenACC variants. */
166 ORT_ACC = 0x100, /* A generic OpenACC region. */
167 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
168 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
169 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
170 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
171 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
173 /* Dummy OpenMP region, used to disable expansion of
174 DECL_VALUE_EXPRs in taskloop pre body. */
175 ORT_NONE = 0x200
178 /* Gimplify hashtable helper. */
180 struct gimplify_hasher : free_ptr_hash <elt_t>
182 static inline hashval_t hash (const elt_t *);
183 static inline bool equal (const elt_t *, const elt_t *);
186 struct gimplify_ctx
188 struct gimplify_ctx *prev_context;
190 vec<gbind *> bind_expr_stack;
191 tree temps;
192 gimple_seq conditional_cleanups;
193 tree exit_label;
194 tree return_temp;
196 vec<tree> case_labels;
197 hash_set<tree> *live_switch_vars;
198 /* The formal temporary table. Should this be persistent? */
199 hash_table<gimplify_hasher> *temp_htab;
201 int conditions;
202 unsigned into_ssa : 1;
203 unsigned allow_rhs_cond_expr : 1;
204 unsigned in_cleanup_point_expr : 1;
205 unsigned keep_stack : 1;
206 unsigned save_stack : 1;
207 unsigned in_switch_expr : 1;
210 enum gimplify_defaultmap_kind
212 GDMK_SCALAR,
213 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
214 GDMK_AGGREGATE,
215 GDMK_ALLOCATABLE,
216 GDMK_POINTER
219 struct gimplify_omp_ctx
221 struct gimplify_omp_ctx *outer_context;
222 splay_tree variables;
223 hash_set<tree> *privatized_types;
224 tree clauses;
225 /* Iteration variables in an OMP_FOR. */
226 vec<tree> loop_iter_var;
227 location_t location;
228 enum omp_clause_default_kind default_kind;
229 enum omp_region_type region_type;
230 enum tree_code code;
231 bool combined_loop;
232 bool distribute;
233 bool target_firstprivatize_array_bases;
234 bool add_safelen1;
235 bool order_concurrent;
236 bool has_depend;
237 bool in_for_exprs;
238 int defaultmap[5];
241 static struct gimplify_ctx *gimplify_ctxp;
242 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
243 static bool in_omp_construct;
245 /* Forward declaration. */
246 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
247 static hash_map<tree, tree> *oacc_declare_returns;
248 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
249 bool (*) (tree), fallback_t, bool);
250 static void prepare_gimple_addressable (tree *, gimple_seq *);
252 /* Shorter alias name for the above function for use in gimplify.cc
253 only. */
255 static inline void
256 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
258 gimple_seq_add_stmt_without_update (seq_p, gs);
261 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
262 NULL, a new sequence is allocated. This function is
263 similar to gimple_seq_add_seq, but does not scan the operands.
264 During gimplification, we need to manipulate statement sequences
265 before the def/use vectors have been constructed. */
267 static void
268 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
270 gimple_stmt_iterator si;
272 if (src == NULL)
273 return;
275 si = gsi_last (*dst_p);
276 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
280 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
281 and popping gimplify contexts. */
283 static struct gimplify_ctx *ctx_pool = NULL;
285 /* Return a gimplify context struct from the pool. */
287 static inline struct gimplify_ctx *
288 ctx_alloc (void)
290 struct gimplify_ctx * c = ctx_pool;
292 if (c)
293 ctx_pool = c->prev_context;
294 else
295 c = XNEW (struct gimplify_ctx);
297 memset (c, '\0', sizeof (*c));
298 return c;
301 /* Put gimplify context C back into the pool. */
303 static inline void
304 ctx_free (struct gimplify_ctx *c)
306 c->prev_context = ctx_pool;
307 ctx_pool = c;
310 /* Free allocated ctx stack memory. */
312 void
313 free_gimplify_stack (void)
315 struct gimplify_ctx *c;
317 while ((c = ctx_pool))
319 ctx_pool = c->prev_context;
320 free (c);
325 /* Set up a context for the gimplifier. */
327 void
328 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
330 struct gimplify_ctx *c = ctx_alloc ();
332 c->prev_context = gimplify_ctxp;
333 gimplify_ctxp = c;
334 gimplify_ctxp->into_ssa = in_ssa;
335 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
338 /* Tear down a context for the gimplifier. If BODY is non-null, then
339 put the temporaries into the outer BIND_EXPR. Otherwise, put them
340 in the local_decls.
342 BODY is not a sequence, but the first tuple in a sequence. */
344 void
345 pop_gimplify_context (gimple *body)
347 struct gimplify_ctx *c = gimplify_ctxp;
349 gcc_assert (c
350 && (!c->bind_expr_stack.exists ()
351 || c->bind_expr_stack.is_empty ()));
352 c->bind_expr_stack.release ();
353 gimplify_ctxp = c->prev_context;
355 if (body)
356 declare_vars (c->temps, body, false);
357 else
358 record_vars (c->temps);
360 delete c->temp_htab;
361 c->temp_htab = NULL;
362 ctx_free (c);
365 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
367 static void
368 gimple_push_bind_expr (gbind *bind_stmt)
370 gimplify_ctxp->bind_expr_stack.reserve (8);
371 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
374 /* Pop the first element off the stack of bindings. */
376 static void
377 gimple_pop_bind_expr (void)
379 gimplify_ctxp->bind_expr_stack.pop ();
382 /* Return the first element of the stack of bindings. */
384 gbind *
385 gimple_current_bind_expr (void)
387 return gimplify_ctxp->bind_expr_stack.last ();
390 /* Return the stack of bindings created during gimplification. */
392 vec<gbind *>
393 gimple_bind_expr_stack (void)
395 return gimplify_ctxp->bind_expr_stack;
398 /* Return true iff there is a COND_EXPR between us and the innermost
399 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
401 static bool
402 gimple_conditional_context (void)
404 return gimplify_ctxp->conditions > 0;
407 /* Note that we've entered a COND_EXPR. */
409 static void
410 gimple_push_condition (void)
412 #ifdef ENABLE_GIMPLE_CHECKING
413 if (gimplify_ctxp->conditions == 0)
414 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
415 #endif
416 ++(gimplify_ctxp->conditions);
419 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
420 now, add any conditional cleanups we've seen to the prequeue. */
422 static void
423 gimple_pop_condition (gimple_seq *pre_p)
425 int conds = --(gimplify_ctxp->conditions);
427 gcc_assert (conds >= 0);
428 if (conds == 0)
430 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
431 gimplify_ctxp->conditional_cleanups = NULL;
435 /* A stable comparison routine for use with splay trees and DECLs. */
437 static int
438 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
440 tree a = (tree) xa;
441 tree b = (tree) xb;
443 return DECL_UID (a) - DECL_UID (b);
446 /* Create a new omp construct that deals with variable remapping. */
448 static struct gimplify_omp_ctx *
449 new_omp_context (enum omp_region_type region_type)
451 struct gimplify_omp_ctx *c;
453 c = XCNEW (struct gimplify_omp_ctx);
454 c->outer_context = gimplify_omp_ctxp;
455 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
456 c->privatized_types = new hash_set<tree>;
457 c->location = input_location;
458 c->region_type = region_type;
459 if ((region_type & ORT_TASK) == 0)
460 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
461 else
462 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
463 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
464 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
465 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
466 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
467 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
469 return c;
472 /* Destroy an omp construct that deals with variable remapping. */
474 static void
475 delete_omp_context (struct gimplify_omp_ctx *c)
477 splay_tree_delete (c->variables);
478 delete c->privatized_types;
479 c->loop_iter_var.release ();
480 XDELETE (c);
483 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
484 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
486 /* Both gimplify the statement T and append it to *SEQ_P. This function
487 behaves exactly as gimplify_stmt, but you don't have to pass T as a
488 reference. */
490 void
491 gimplify_and_add (tree t, gimple_seq *seq_p)
493 gimplify_stmt (&t, seq_p);
496 /* Gimplify statement T into sequence *SEQ_P, and return the first
497 tuple in the sequence of generated tuples for this statement.
498 Return NULL if gimplifying T produced no tuples. */
500 static gimple *
501 gimplify_and_return_first (tree t, gimple_seq *seq_p)
503 gimple_stmt_iterator last = gsi_last (*seq_p);
505 gimplify_and_add (t, seq_p);
507 if (!gsi_end_p (last))
509 gsi_next (&last);
510 return gsi_stmt (last);
512 else
513 return gimple_seq_first_stmt (*seq_p);
516 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
517 LHS, or for a call argument. */
519 static bool
520 is_gimple_mem_rhs (tree t)
522 /* If we're dealing with a renamable type, either source or dest must be
523 a renamed variable. */
524 if (is_gimple_reg_type (TREE_TYPE (t)))
525 return is_gimple_val (t);
526 else
527 return is_gimple_val (t) || is_gimple_lvalue (t);
530 /* Return true if T is a CALL_EXPR or an expression that can be
531 assigned to a temporary. Note that this predicate should only be
532 used during gimplification. See the rationale for this in
533 gimplify_modify_expr. */
535 static bool
536 is_gimple_reg_rhs_or_call (tree t)
538 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
539 || TREE_CODE (t) == CALL_EXPR);
542 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
543 this predicate should only be used during gimplification. See the
544 rationale for this in gimplify_modify_expr. */
546 static bool
547 is_gimple_mem_rhs_or_call (tree t)
549 /* If we're dealing with a renamable type, either source or dest must be
550 a renamed variable. */
551 if (is_gimple_reg_type (TREE_TYPE (t)))
552 return is_gimple_val (t);
553 else
554 return (is_gimple_val (t)
555 || is_gimple_lvalue (t)
556 || TREE_CLOBBER_P (t)
557 || TREE_CODE (t) == CALL_EXPR);
560 /* Create a temporary with a name derived from VAL. Subroutine of
561 lookup_tmp_var; nobody else should call this function. */
563 static inline tree
564 create_tmp_from_val (tree val)
566 /* Drop all qualifiers and address-space information from the value type. */
567 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
568 tree var = create_tmp_var (type, get_name (val));
569 return var;
572 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
573 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
575 static tree
576 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
578 tree ret;
580 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
581 gcc_assert (!is_formal || !not_gimple_reg);
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
590 ret = create_tmp_from_val (val);
591 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
593 else
595 elt_t elt, *elt_p;
596 elt_t **slot;
598 elt.val = val;
599 if (!gimplify_ctxp->temp_htab)
600 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
601 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
602 if (*slot == NULL)
604 elt_p = XNEW (elt_t);
605 elt_p->val = val;
606 elt_p->temp = ret = create_tmp_from_val (val);
607 *slot = elt_p;
609 else
611 elt_p = *slot;
612 ret = elt_p->temp;
616 return ret;
619 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
621 static tree
622 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
623 bool is_formal, bool allow_ssa, bool not_gimple_reg)
625 tree t, mod;
627 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
628 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
629 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
630 fb_rvalue);
632 if (allow_ssa
633 && gimplify_ctxp->into_ssa
634 && is_gimple_reg_type (TREE_TYPE (val)))
636 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
637 if (! gimple_in_ssa_p (cfun))
639 const char *name = get_name (val);
640 if (name)
641 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
644 else
645 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
647 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
649 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
651 /* gimplify_modify_expr might want to reduce this further. */
652 gimplify_and_add (mod, pre_p);
653 ggc_free (mod);
655 return t;
658 /* Return a formal temporary variable initialized with VAL. PRE_P is as
659 in gimplify_expr. Only use this function if:
661 1) The value of the unfactored expression represented by VAL will not
662 change between the initialization and use of the temporary, and
663 2) The temporary will not be otherwise modified.
665 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
666 and #2 means it is inappropriate for && temps.
668 For other cases, use get_initialized_tmp_var instead. */
670 tree
671 get_formal_tmp_var (tree val, gimple_seq *pre_p)
673 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
676 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
677 are as in gimplify_expr. */
679 tree
680 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
681 gimple_seq *post_p /* = NULL */,
682 bool allow_ssa /* = true */)
684 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
687 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
688 generate debug info for them; otherwise don't. */
690 void
691 declare_vars (tree vars, gimple *gs, bool debug_info)
693 tree last = vars;
694 if (last)
696 tree temps, block;
698 gbind *scope = as_a <gbind *> (gs);
700 temps = nreverse (last);
702 block = gimple_bind_block (scope);
703 gcc_assert (!block || TREE_CODE (block) == BLOCK);
704 if (!block || !debug_info)
706 DECL_CHAIN (last) = gimple_bind_vars (scope);
707 gimple_bind_set_vars (scope, temps);
709 else
711 /* We need to attach the nodes both to the BIND_EXPR and to its
712 associated BLOCK for debugging purposes. The key point here
713 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
714 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
715 if (BLOCK_VARS (block))
716 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
717 else
719 gimple_bind_set_vars (scope,
720 chainon (gimple_bind_vars (scope), temps));
721 BLOCK_VARS (block) = temps;
727 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
728 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
729 no such upper bound can be obtained. */
731 static void
732 force_constant_size (tree var)
734 /* The only attempt we make is by querying the maximum size of objects
735 of the variable's type. */
737 HOST_WIDE_INT max_size;
739 gcc_assert (VAR_P (var));
741 max_size = max_int_size_in_bytes (TREE_TYPE (var));
743 gcc_assert (max_size >= 0);
745 DECL_SIZE_UNIT (var)
746 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
747 DECL_SIZE (var)
748 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
751 /* Push the temporary variable TMP into the current binding. */
753 void
754 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
756 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
758 /* Later processing assumes that the object size is constant, which might
759 not be true at this point. Force the use of a constant upper bound in
760 this case. */
761 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
762 force_constant_size (tmp);
764 DECL_CONTEXT (tmp) = fn->decl;
765 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
767 record_vars_into (tmp, fn->decl);
770 /* Push the temporary variable TMP into the current binding. */
772 void
773 gimple_add_tmp_var (tree tmp)
775 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
777 /* Later processing assumes that the object size is constant, which might
778 not be true at this point. Force the use of a constant upper bound in
779 this case. */
780 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
781 force_constant_size (tmp);
783 DECL_CONTEXT (tmp) = current_function_decl;
784 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
786 if (gimplify_ctxp)
788 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
789 gimplify_ctxp->temps = tmp;
791 /* Mark temporaries local within the nearest enclosing parallel. */
792 if (gimplify_omp_ctxp)
794 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
795 int flag = GOVD_LOCAL | GOVD_SEEN;
796 while (ctx
797 && (ctx->region_type == ORT_WORKSHARE
798 || ctx->region_type == ORT_TASKGROUP
799 || ctx->region_type == ORT_SIMD
800 || ctx->region_type == ORT_ACC))
802 if (ctx->region_type == ORT_SIMD
803 && TREE_ADDRESSABLE (tmp)
804 && !TREE_STATIC (tmp))
806 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
807 ctx->add_safelen1 = true;
808 else if (ctx->in_for_exprs)
809 flag = GOVD_PRIVATE;
810 else
811 flag = GOVD_PRIVATE | GOVD_SEEN;
812 break;
814 ctx = ctx->outer_context;
816 if (ctx)
817 omp_add_variable (ctx, tmp, flag);
820 else if (cfun)
821 record_vars (tmp);
822 else
824 gimple_seq body_seq;
826 /* This case is for nested functions. We need to expose the locals
827 they create. */
828 body_seq = gimple_body (current_function_decl);
829 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
835 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
836 nodes that are referenced more than once in GENERIC functions. This is
837 necessary because gimplification (translation into GIMPLE) is performed
838 by modifying tree nodes in-place, so gimplication of a shared node in a
839 first context could generate an invalid GIMPLE form in a second context.
841 This is achieved with a simple mark/copy/unmark algorithm that walks the
842 GENERIC representation top-down, marks nodes with TREE_VISITED the first
843 time it encounters them, duplicates them if they already have TREE_VISITED
844 set, and finally removes the TREE_VISITED marks it has set.
846 The algorithm works only at the function level, i.e. it generates a GENERIC
847 representation of a function with no nodes shared within the function when
848 passed a GENERIC function (except for nodes that are allowed to be shared).
850 At the global level, it is also necessary to unshare tree nodes that are
851 referenced in more than one function, for the same aforementioned reason.
852 This requires some cooperation from the front-end. There are 2 strategies:
854 1. Manual unsharing. The front-end needs to call unshare_expr on every
855 expression that might end up being shared across functions.
857 2. Deep unsharing. This is an extension of regular unsharing. Instead
858 of calling unshare_expr on expressions that might be shared across
859 functions, the front-end pre-marks them with TREE_VISITED. This will
860 ensure that they are unshared on the first reference within functions
861 when the regular unsharing algorithm runs. The counterpart is that
862 this algorithm must look deeper than for manual unsharing, which is
863 specified by LANG_HOOKS_DEEP_UNSHARING.
865 If there are only few specific cases of node sharing across functions, it is
866 probably easier for a front-end to unshare the expressions manually. On the
867 contrary, if the expressions generated at the global level are as widespread
868 as expressions generated within functions, deep unsharing is very likely the
869 way to go. */
871 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
872 These nodes model computations that must be done once. If we were to
873 unshare something like SAVE_EXPR(i++), the gimplification process would
874 create wrong code. However, if DATA is non-null, it must hold a pointer
875 set that is used to unshare the subtrees of these nodes. */
877 static tree
878 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
880 tree t = *tp;
881 enum tree_code code = TREE_CODE (t);
883 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
884 copy their subtrees if we can make sure to do it only once. */
885 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
887 if (data && !((hash_set<tree> *)data)->add (t))
889 else
890 *walk_subtrees = 0;
893 /* Stop at types, decls, constants like copy_tree_r. */
894 else if (TREE_CODE_CLASS (code) == tcc_type
895 || TREE_CODE_CLASS (code) == tcc_declaration
896 || TREE_CODE_CLASS (code) == tcc_constant)
897 *walk_subtrees = 0;
899 /* Cope with the statement expression extension. */
900 else if (code == STATEMENT_LIST)
903 /* Leave the bulk of the work to copy_tree_r itself. */
904 else
905 copy_tree_r (tp, walk_subtrees, NULL);
907 return NULL_TREE;
910 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
911 If *TP has been visited already, then *TP is deeply copied by calling
912 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
914 static tree
915 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
917 tree t = *tp;
918 enum tree_code code = TREE_CODE (t);
920 /* Skip types, decls, and constants. But we do want to look at their
921 types and the bounds of types. Mark them as visited so we properly
922 unmark their subtrees on the unmark pass. If we've already seen them,
923 don't look down further. */
924 if (TREE_CODE_CLASS (code) == tcc_type
925 || TREE_CODE_CLASS (code) == tcc_declaration
926 || TREE_CODE_CLASS (code) == tcc_constant)
928 if (TREE_VISITED (t))
929 *walk_subtrees = 0;
930 else
931 TREE_VISITED (t) = 1;
934 /* If this node has been visited already, unshare it and don't look
935 any deeper. */
936 else if (TREE_VISITED (t))
938 walk_tree (tp, mostly_copy_tree_r, data, NULL);
939 *walk_subtrees = 0;
942 /* Otherwise, mark the node as visited and keep looking. */
943 else
944 TREE_VISITED (t) = 1;
946 return NULL_TREE;
949 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
950 copy_if_shared_r callback unmodified. */
952 void
953 copy_if_shared (tree *tp, void *data)
955 walk_tree (tp, copy_if_shared_r, data, NULL);
958 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
959 any nested functions. */
961 static void
962 unshare_body (tree fndecl)
964 struct cgraph_node *cgn = cgraph_node::get (fndecl);
965 /* If the language requires deep unsharing, we need a pointer set to make
966 sure we don't repeatedly unshare subtrees of unshareable nodes. */
967 hash_set<tree> *visited
968 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
970 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
971 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
972 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
974 delete visited;
976 if (cgn)
977 for (cgn = first_nested_function (cgn); cgn;
978 cgn = next_nested_function (cgn))
979 unshare_body (cgn->decl);
982 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
983 Subtrees are walked until the first unvisited node is encountered. */
985 static tree
986 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
988 tree t = *tp;
990 /* If this node has been visited, unmark it and keep looking. */
991 if (TREE_VISITED (t))
992 TREE_VISITED (t) = 0;
994 /* Otherwise, don't look any deeper. */
995 else
996 *walk_subtrees = 0;
998 return NULL_TREE;
1001 /* Unmark the visited trees rooted at *TP. */
1003 static inline void
1004 unmark_visited (tree *tp)
1006 walk_tree (tp, unmark_visited_r, NULL, NULL);
1009 /* Likewise, but mark all trees as not visited. */
1011 static void
1012 unvisit_body (tree fndecl)
1014 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1016 unmark_visited (&DECL_SAVED_TREE (fndecl));
1017 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1018 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1020 if (cgn)
1021 for (cgn = first_nested_function (cgn);
1022 cgn; cgn = next_nested_function (cgn))
1023 unvisit_body (cgn->decl);
1026 /* Unconditionally make an unshared copy of EXPR. This is used when using
1027 stored expressions which span multiple functions, such as BINFO_VTABLE,
1028 as the normal unsharing process can't tell that they're shared. */
1030 tree
1031 unshare_expr (tree expr)
1033 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1034 return expr;
1037 /* Worker for unshare_expr_without_location. */
1039 static tree
1040 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1042 if (EXPR_P (*tp))
1043 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1044 else
1045 *walk_subtrees = 0;
1046 return NULL_TREE;
1049 /* Similar to unshare_expr but also prune all expression locations
1050 from EXPR. */
1052 tree
1053 unshare_expr_without_location (tree expr)
1055 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1056 if (EXPR_P (expr))
1057 walk_tree (&expr, prune_expr_location, NULL, NULL);
1058 return expr;
1061 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1062 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1063 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1064 EXPR is the location of the EXPR. */
1066 static location_t
1067 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1069 if (!expr)
1070 return or_else;
1072 if (EXPR_HAS_LOCATION (expr))
1073 return EXPR_LOCATION (expr);
1075 if (TREE_CODE (expr) != STATEMENT_LIST)
1076 return or_else;
1078 tree_stmt_iterator i = tsi_start (expr);
1080 bool found = false;
1081 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1083 found = true;
1084 tsi_next (&i);
1087 if (!found || !tsi_one_before_end_p (i))
1088 return or_else;
1090 return rexpr_location (tsi_stmt (i), or_else);
1093 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1094 rexpr_location for the potential recursion. */
1096 static inline bool
1097 rexpr_has_location (tree expr)
1099 return rexpr_location (expr) != UNKNOWN_LOCATION;
1103 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1104 contain statements and have a value. Assign its value to a temporary
1105 and give it void_type_node. Return the temporary, or NULL_TREE if
1106 WRAPPER was already void. */
1108 tree
1109 voidify_wrapper_expr (tree wrapper, tree temp)
1111 tree type = TREE_TYPE (wrapper);
1112 if (type && !VOID_TYPE_P (type))
1114 tree *p;
1116 /* Set p to point to the body of the wrapper. Loop until we find
1117 something that isn't a wrapper. */
1118 for (p = &wrapper; p && *p; )
1120 switch (TREE_CODE (*p))
1122 case BIND_EXPR:
1123 TREE_SIDE_EFFECTS (*p) = 1;
1124 TREE_TYPE (*p) = void_type_node;
1125 /* For a BIND_EXPR, the body is operand 1. */
1126 p = &BIND_EXPR_BODY (*p);
1127 break;
1129 case CLEANUP_POINT_EXPR:
1130 case TRY_FINALLY_EXPR:
1131 case TRY_CATCH_EXPR:
1132 TREE_SIDE_EFFECTS (*p) = 1;
1133 TREE_TYPE (*p) = void_type_node;
1134 p = &TREE_OPERAND (*p, 0);
1135 break;
1137 case STATEMENT_LIST:
1139 tree_stmt_iterator i = tsi_last (*p);
1140 TREE_SIDE_EFFECTS (*p) = 1;
1141 TREE_TYPE (*p) = void_type_node;
1142 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1144 break;
1146 case COMPOUND_EXPR:
1147 /* Advance to the last statement. Set all container types to
1148 void. */
1149 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1151 TREE_SIDE_EFFECTS (*p) = 1;
1152 TREE_TYPE (*p) = void_type_node;
1154 break;
1156 case TRANSACTION_EXPR:
1157 TREE_SIDE_EFFECTS (*p) = 1;
1158 TREE_TYPE (*p) = void_type_node;
1159 p = &TRANSACTION_EXPR_BODY (*p);
1160 break;
1162 default:
1163 /* Assume that any tree upon which voidify_wrapper_expr is
1164 directly called is a wrapper, and that its body is op0. */
1165 if (p == &wrapper)
1167 TREE_SIDE_EFFECTS (*p) = 1;
1168 TREE_TYPE (*p) = void_type_node;
1169 p = &TREE_OPERAND (*p, 0);
1170 break;
1172 goto out;
1176 out:
1177 if (p == NULL || IS_EMPTY_STMT (*p))
1178 temp = NULL_TREE;
1179 else if (temp)
1181 /* The wrapper is on the RHS of an assignment that we're pushing
1182 down. */
1183 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1184 || TREE_CODE (temp) == MODIFY_EXPR);
1185 TREE_OPERAND (temp, 1) = *p;
1186 *p = temp;
1188 else
1190 temp = create_tmp_var (type, "retval");
1191 *p = build2 (INIT_EXPR, type, temp, *p);
1194 return temp;
1197 return NULL_TREE;
1200 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1201 a temporary through which they communicate. */
1203 static void
1204 build_stack_save_restore (gcall **save, gcall **restore)
1206 tree tmp_var;
1208 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1209 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1210 gimple_call_set_lhs (*save, tmp_var);
1212 *restore
1213 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1214 1, tmp_var);
1217 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1219 static tree
1220 build_asan_poison_call_expr (tree decl)
1222 /* Do not poison variables that have size equal to zero. */
1223 tree unit_size = DECL_SIZE_UNIT (decl);
1224 if (zerop (unit_size))
1225 return NULL_TREE;
1227 tree base = build_fold_addr_expr (decl);
1229 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1230 void_type_node, 3,
1231 build_int_cst (integer_type_node,
1232 ASAN_MARK_POISON),
1233 base, unit_size);
1236 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1237 on POISON flag, shadow memory of a DECL variable. The call will be
1238 put on location identified by IT iterator, where BEFORE flag drives
1239 position where the stmt will be put. */
1241 static void
1242 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1243 bool before)
1245 tree unit_size = DECL_SIZE_UNIT (decl);
1246 tree base = build_fold_addr_expr (decl);
1248 /* Do not poison variables that have size equal to zero. */
1249 if (zerop (unit_size))
1250 return;
1252 /* It's necessary to have all stack variables aligned to ASAN granularity
1253 bytes. */
1254 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1255 unsigned shadow_granularity
1256 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1257 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1258 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1260 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1262 gimple *g
1263 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1264 build_int_cst (integer_type_node, flags),
1265 base, unit_size);
1267 if (before)
1268 gsi_insert_before (it, g, GSI_NEW_STMT);
1269 else
1270 gsi_insert_after (it, g, GSI_NEW_STMT);
1273 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1274 either poisons or unpoisons a DECL. Created statement is appended
1275 to SEQ_P gimple sequence. */
1277 static void
1278 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1280 gimple_stmt_iterator it = gsi_last (*seq_p);
1281 bool before = false;
1283 if (gsi_end_p (it))
1284 before = true;
1286 asan_poison_variable (decl, poison, &it, before);
1289 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1291 static int
1292 sort_by_decl_uid (const void *a, const void *b)
1294 const tree *t1 = (const tree *)a;
1295 const tree *t2 = (const tree *)b;
1297 int uid1 = DECL_UID (*t1);
1298 int uid2 = DECL_UID (*t2);
1300 if (uid1 < uid2)
1301 return -1;
1302 else if (uid1 > uid2)
1303 return 1;
1304 else
1305 return 0;
1308 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1309 depending on POISON flag. Created statement is appended
1310 to SEQ_P gimple sequence. */
1312 static void
1313 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1315 unsigned c = variables->elements ();
1316 if (c == 0)
1317 return;
1319 auto_vec<tree> sorted_variables (c);
1321 for (hash_set<tree>::iterator it = variables->begin ();
1322 it != variables->end (); ++it)
1323 sorted_variables.safe_push (*it);
1325 sorted_variables.qsort (sort_by_decl_uid);
1327 unsigned i;
1328 tree var;
1329 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1331 asan_poison_variable (var, poison, seq_p);
1333 /* Add use_after_scope_memory attribute for the variable in order
1334 to prevent re-written into SSA. */
1335 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1336 DECL_ATTRIBUTES (var)))
1337 DECL_ATTRIBUTES (var)
1338 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1339 integer_one_node,
1340 DECL_ATTRIBUTES (var));
1344 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1346 static enum gimplify_status
1347 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1349 tree bind_expr = *expr_p;
1350 bool old_keep_stack = gimplify_ctxp->keep_stack;
1351 bool old_save_stack = gimplify_ctxp->save_stack;
1352 tree t;
1353 gbind *bind_stmt;
1354 gimple_seq body, cleanup;
1355 gcall *stack_save;
1356 location_t start_locus = 0, end_locus = 0;
1357 tree ret_clauses = NULL;
1359 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1361 /* Mark variables seen in this bind expr. */
1362 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1364 if (VAR_P (t))
1366 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 tree attr;
1369 if (flag_openmp
1370 && !is_global_var (t)
1371 && DECL_CONTEXT (t) == current_function_decl
1372 && TREE_USED (t)
1373 && (attr = lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1374 != NULL_TREE)
1376 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
1377 tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1378 tree align = TREE_VALUE (TREE_VALUE (attr));
1379 /* Allocate directives that appear in a target region must specify
1380 an allocator clause unless a requires directive with the
1381 dynamic_allocators clause is present in the same compilation
1382 unit. */
1383 bool missing_dyn_alloc = false;
1384 if (alloc == NULL_TREE
1385 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1386 == 0))
1388 /* This comes too early for omp_discover_declare_target...,
1389 but should at least catch the most common cases. */
1390 missing_dyn_alloc
1391 = cgraph_node::get (current_function_decl)->offloadable;
1392 for (struct gimplify_omp_ctx *ctx2 = ctx;
1393 ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1394 if (ctx2->code == OMP_TARGET)
1395 missing_dyn_alloc = true;
1397 if (missing_dyn_alloc)
1398 error_at (DECL_SOURCE_LOCATION (t),
1399 "%<allocate%> directive for %qD inside a target "
1400 "region must specify an %<allocator%> clause", t);
1401 /* Skip for omp_default_mem_alloc (= 1),
1402 unless align is present. */
1403 else if (!errorcount
1404 && (align != NULL_TREE
1405 || alloc == NULL_TREE
1406 || !integer_onep (alloc)))
1408 /* Fortran might already use a pointer type internally;
1409 use that pointer except for type(C_ptr) and type(C_funptr);
1410 note that normal proc pointers are rejected. */
1411 tree type = TREE_TYPE (t);
1412 tree tmp, v;
1413 if (lang_GNU_Fortran ()
1414 && POINTER_TYPE_P (type)
1415 && TREE_TYPE (type) != void_type_node
1416 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
1418 type = TREE_TYPE (type);
1419 v = t;
1421 else
1423 tmp = build_pointer_type (type);
1424 v = create_tmp_var (tmp, get_name (t));
1425 DECL_IGNORED_P (v) = 0;
1426 DECL_ATTRIBUTES (v)
1427 = tree_cons (get_identifier ("omp allocate var"),
1428 build_tree_list (NULL_TREE, t),
1429 remove_attribute ("omp allocate",
1430 DECL_ATTRIBUTES (t)));
1431 tmp = build_fold_indirect_ref (v);
1432 TREE_THIS_NOTRAP (tmp) = 1;
1433 SET_DECL_VALUE_EXPR (t, tmp);
1434 DECL_HAS_VALUE_EXPR_P (t) = 1;
1436 tree sz = TYPE_SIZE_UNIT (type);
1437 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1438 hence, for some decls, a size variable is saved in the
1439 attributes; use it, if available. */
1440 if (TREE_CHAIN (TREE_VALUE (attr))
1441 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))
1442 && TREE_PURPOSE (
1443 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))))
1445 sz = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1446 sz = TREE_PURPOSE (sz);
1448 if (alloc == NULL_TREE)
1449 alloc = build_zero_cst (ptr_type_node);
1450 if (align == NULL_TREE)
1451 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (t));
1452 else
1453 align = build_int_cst (size_type_node,
1454 MAX (tree_to_uhwi (align),
1455 DECL_ALIGN_UNIT (t)));
1456 location_t loc = DECL_SOURCE_LOCATION (t);
1457 tmp = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
1458 tmp = build_call_expr_loc (loc, tmp, 3, align, sz, alloc);
1459 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1460 fold_convert (TREE_TYPE (v), tmp));
1461 gcc_assert (BIND_EXPR_BODY (bind_expr) != NULL_TREE);
1462 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1463 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1464 is set, using in a condition much further below. */
1465 gcc_assert (DECL_HAS_VALUE_EXPR_P (t)
1466 || TREE_CHAIN (TREE_VALUE (attr)));
1467 if (TREE_CHAIN (TREE_VALUE (attr)))
1469 /* Fortran is special as it does not have properly nest
1470 declarations in blocks. And as there is no
1471 initializer, there is also no expression to look for.
1472 Hence, the FE makes the statement list of the
1473 try-finally block available. We can put the GOMP_alloc
1474 at the top, unless an allocator or size expression
1475 requires to put it afterward; note that the size is
1476 always later in generated code; for strings, no
1477 size expr but still an expr might be available.
1478 As LTO does not handle a statement list, 'sl' has
1479 to be removed; done so by removing the attribute. */
1480 DECL_ATTRIBUTES (t)
1481 = remove_attribute ("omp allocate",
1482 DECL_ATTRIBUTES (t));
1483 tree sl = TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr)));
1484 tree_stmt_iterator e = tsi_start (sl);
1485 tree needle = NULL_TREE;
1486 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1488 needle = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1489 needle = (TREE_VALUE (needle) ? TREE_VALUE (needle)
1490 : sz);
1492 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1493 needle = sz;
1494 else if (DECL_P (alloc) && DECL_ARTIFICIAL (alloc))
1495 needle = alloc;
1497 if (needle != NULL_TREE)
1499 while (!tsi_end_p (e))
1501 if (*e == needle
1502 || (TREE_CODE (*e) == MODIFY_EXPR
1503 && TREE_OPERAND (*e, 0) == needle))
1504 break;
1505 ++e;
1507 gcc_assert (!tsi_end_p (e));
1509 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1511 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1512 here; for C/C++ it will be added in the 'cleanup'
1513 section after gimplification. But Fortran already has
1514 a try-finally block. */
1515 sl = TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr)));
1516 e = tsi_last (sl);
1517 tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1518 tmp = build_call_expr_loc (EXPR_LOCATION (*e), tmp, 2, v,
1519 build_zero_cst (ptr_type_node));
1520 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1521 tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1522 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1523 fold_convert (TREE_TYPE (v), tmp));
1524 ++e;
1525 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1527 else
1529 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1530 == STATEMENT_LIST);
1531 tree_stmt_iterator e;
1532 e = tsi_start (BIND_EXPR_BODY (bind_expr));
1533 while (!tsi_end_p (e))
1535 if ((TREE_CODE (*e) == DECL_EXPR
1536 && TREE_OPERAND (*e, 0) == t)
1537 || (TREE_CODE (*e) == CLEANUP_POINT_EXPR
1538 && (TREE_CODE (TREE_OPERAND (*e, 0))
1539 == DECL_EXPR)
1540 && (TREE_OPERAND (TREE_OPERAND (*e, 0), 0)
1541 == t)))
1542 break;
1543 ++e;
1545 gcc_assert (!tsi_end_p (e));
1546 tsi_link_before (&e, tmp, TSI_SAME_STMT);
1551 /* Mark variable as local. */
1552 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1554 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1555 || splay_tree_lookup (ctx->variables,
1556 (splay_tree_key) t) == NULL)
1558 int flag = GOVD_LOCAL;
1559 if (ctx->region_type == ORT_SIMD
1560 && TREE_ADDRESSABLE (t)
1561 && !TREE_STATIC (t))
1563 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1564 ctx->add_safelen1 = true;
1565 else
1566 flag = GOVD_PRIVATE;
1568 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1570 /* Static locals inside of target construct or offloaded
1571 routines need to be "omp declare target". */
1572 if (TREE_STATIC (t))
1573 for (; ctx; ctx = ctx->outer_context)
1574 if ((ctx->region_type & ORT_TARGET) != 0)
1576 if (!lookup_attribute ("omp declare target",
1577 DECL_ATTRIBUTES (t)))
1579 tree id = get_identifier ("omp declare target");
1580 DECL_ATTRIBUTES (t)
1581 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1582 varpool_node *node = varpool_node::get (t);
1583 if (node)
1585 node->offloadable = 1;
1586 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1588 g->have_offload = true;
1589 if (!in_lto_p)
1590 vec_safe_push (offload_vars, t);
1594 break;
1598 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1600 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1601 cfun->has_local_explicit_reg_vars = true;
1605 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1606 BIND_EXPR_BLOCK (bind_expr));
1607 gimple_push_bind_expr (bind_stmt);
1609 gimplify_ctxp->keep_stack = false;
1610 gimplify_ctxp->save_stack = false;
1612 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1613 body = NULL;
1614 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1615 gimple_bind_set_body (bind_stmt, body);
1617 /* Source location wise, the cleanup code (stack_restore and clobbers)
1618 belongs to the end of the block, so propagate what we have. The
1619 stack_save operation belongs to the beginning of block, which we can
1620 infer from the bind_expr directly if the block has no explicit
1621 assignment. */
1622 if (BIND_EXPR_BLOCK (bind_expr))
1624 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1625 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1627 if (start_locus == 0)
1628 start_locus = EXPR_LOCATION (bind_expr);
1630 cleanup = NULL;
1631 stack_save = NULL;
1633 /* Add clobbers for all variables that go out of scope. */
1634 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1636 if (VAR_P (t)
1637 && !is_global_var (t)
1638 && DECL_CONTEXT (t) == current_function_decl)
1640 if (flag_openmp
1641 && DECL_HAS_VALUE_EXPR_P (t)
1642 && TREE_USED (t)
1643 && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1645 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1646 causes that the GOMP_free call is already added above;
1647 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1648 tree v = TREE_OPERAND (DECL_VALUE_EXPR (t), 0);
1649 tree tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1650 tmp = build_call_expr_loc (end_locus, tmp, 2, v,
1651 build_zero_cst (ptr_type_node));
1652 gimplify_and_add (tmp, &cleanup);
1653 gimple *clobber_stmt;
1654 tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1655 clobber_stmt = gimple_build_assign (v, tmp);
1656 gimple_set_location (clobber_stmt, end_locus);
1657 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1659 if (!DECL_HARD_REGISTER (t)
1660 && !TREE_THIS_VOLATILE (t)
1661 && !DECL_HAS_VALUE_EXPR_P (t)
1662 /* Only care for variables that have to be in memory. Others
1663 will be rewritten into SSA names, hence moved to the
1664 top-level. */
1665 && !is_gimple_reg (t)
1666 && flag_stack_reuse != SR_NONE)
1668 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_STORAGE_END);
1669 gimple *clobber_stmt;
1670 clobber_stmt = gimple_build_assign (t, clobber);
1671 gimple_set_location (clobber_stmt, end_locus);
1672 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1675 if (flag_openacc && oacc_declare_returns != NULL)
1677 tree key = t;
1678 if (DECL_HAS_VALUE_EXPR_P (key))
1680 key = DECL_VALUE_EXPR (key);
1681 if (INDIRECT_REF_P (key))
1682 key = TREE_OPERAND (key, 0);
1684 tree *c = oacc_declare_returns->get (key);
1685 if (c != NULL)
1687 if (ret_clauses)
1688 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1690 ret_clauses = unshare_expr (*c);
1692 oacc_declare_returns->remove (key);
1694 if (oacc_declare_returns->is_empty ())
1696 delete oacc_declare_returns;
1697 oacc_declare_returns = NULL;
1703 if (asan_poisoned_variables != NULL
1704 && asan_poisoned_variables->contains (t))
1706 asan_poisoned_variables->remove (t);
1707 asan_poison_variable (t, true, &cleanup);
1710 if (gimplify_ctxp->live_switch_vars != NULL
1711 && gimplify_ctxp->live_switch_vars->contains (t))
1712 gimplify_ctxp->live_switch_vars->remove (t);
1715 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1716 the stack space allocated to the VLAs. */
1717 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1719 gcall *stack_restore;
1721 /* Save stack on entry and restore it on exit. Add a try_finally
1722 block to achieve this. */
1723 build_stack_save_restore (&stack_save, &stack_restore);
1725 gimple_set_location (stack_save, start_locus);
1726 gimple_set_location (stack_restore, end_locus);
1728 gimplify_seq_add_stmt (&cleanup, stack_restore);
1731 if (ret_clauses)
1733 gomp_target *stmt;
1734 gimple_stmt_iterator si = gsi_start (cleanup);
1736 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1737 ret_clauses);
1738 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1741 if (cleanup)
1743 gtry *gs;
1744 gimple_seq new_body;
1746 new_body = NULL;
1747 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1748 GIMPLE_TRY_FINALLY);
1750 if (stack_save)
1751 gimplify_seq_add_stmt (&new_body, stack_save);
1752 gimplify_seq_add_stmt (&new_body, gs);
1753 gimple_bind_set_body (bind_stmt, new_body);
1756 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1757 if (!gimplify_ctxp->keep_stack)
1758 gimplify_ctxp->keep_stack = old_keep_stack;
1759 gimplify_ctxp->save_stack = old_save_stack;
1761 gimple_pop_bind_expr ();
1763 gimplify_seq_add_stmt (pre_p, bind_stmt);
1765 if (temp)
1767 *expr_p = temp;
1768 return GS_OK;
1771 *expr_p = NULL_TREE;
1772 return GS_ALL_DONE;
1775 /* Maybe add early return predict statement to PRE_P sequence. */
1777 static void
1778 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1780 /* If we are not in a conditional context, add PREDICT statement. */
1781 if (gimple_conditional_context ())
1783 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1784 NOT_TAKEN);
1785 gimplify_seq_add_stmt (pre_p, predict);
1789 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1790 GIMPLE value, it is assigned to a new temporary and the statement is
1791 re-written to return the temporary.
1793 PRE_P points to the sequence where side effects that must happen before
1794 STMT should be stored. */
1796 static enum gimplify_status
1797 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1799 greturn *ret;
1800 tree ret_expr = TREE_OPERAND (stmt, 0);
1801 tree result_decl, result;
1803 if (ret_expr == error_mark_node)
1804 return GS_ERROR;
1806 if (!ret_expr
1807 || TREE_CODE (ret_expr) == RESULT_DECL)
1809 maybe_add_early_return_predict_stmt (pre_p);
1810 greturn *ret = gimple_build_return (ret_expr);
1811 copy_warning (ret, stmt);
1812 gimplify_seq_add_stmt (pre_p, ret);
1813 return GS_ALL_DONE;
1816 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1817 result_decl = NULL_TREE;
1818 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1820 /* Used in C++ for handling EH cleanup of the return value if a local
1821 cleanup throws. Assume the front-end knows what it's doing. */
1822 result_decl = DECL_RESULT (current_function_decl);
1823 /* But crash if we end up trying to modify ret_expr below. */
1824 ret_expr = NULL_TREE;
1826 else
1828 result_decl = TREE_OPERAND (ret_expr, 0);
1830 /* See through a return by reference. */
1831 if (INDIRECT_REF_P (result_decl))
1832 result_decl = TREE_OPERAND (result_decl, 0);
1834 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1835 || TREE_CODE (ret_expr) == INIT_EXPR)
1836 && TREE_CODE (result_decl) == RESULT_DECL);
1839 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1840 Recall that aggregate_value_p is FALSE for any aggregate type that is
1841 returned in registers. If we're returning values in registers, then
1842 we don't want to extend the lifetime of the RESULT_DECL, particularly
1843 across another call. In addition, for those aggregates for which
1844 hard_function_value generates a PARALLEL, we'll die during normal
1845 expansion of structure assignments; there's special code in expand_return
1846 to handle this case that does not exist in expand_expr. */
1847 if (!result_decl)
1848 result = NULL_TREE;
1849 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1851 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1853 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1854 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1855 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1856 should be effectively allocated by the caller, i.e. all calls to
1857 this function must be subject to the Return Slot Optimization. */
1858 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1859 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1861 result = result_decl;
1863 else if (gimplify_ctxp->return_temp)
1864 result = gimplify_ctxp->return_temp;
1865 else
1867 result = create_tmp_reg (TREE_TYPE (result_decl));
1869 /* ??? With complex control flow (usually involving abnormal edges),
1870 we can wind up warning about an uninitialized value for this. Due
1871 to how this variable is constructed and initialized, this is never
1872 true. Give up and never warn. */
1873 suppress_warning (result, OPT_Wuninitialized);
1875 gimplify_ctxp->return_temp = result;
1878 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1879 Then gimplify the whole thing. */
1880 if (result != result_decl)
1881 TREE_OPERAND (ret_expr, 0) = result;
1883 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1885 maybe_add_early_return_predict_stmt (pre_p);
1886 ret = gimple_build_return (result);
1887 copy_warning (ret, stmt);
1888 gimplify_seq_add_stmt (pre_p, ret);
1890 return GS_ALL_DONE;
1893 /* Gimplify a variable-length array DECL. */
1895 static void
1896 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1898 /* This is a variable-sized decl. Simplify its size and mark it
1899 for deferred expansion. */
1900 tree t, addr, ptr_type;
1902 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1903 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1905 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1906 if (DECL_HAS_VALUE_EXPR_P (decl))
1907 return;
1909 /* All occurrences of this decl in final gimplified code will be
1910 replaced by indirection. Setting DECL_VALUE_EXPR does two
1911 things: First, it lets the rest of the gimplifier know what
1912 replacement to use. Second, it lets the debug info know
1913 where to find the value. */
1914 ptr_type = build_pointer_type (TREE_TYPE (decl));
1915 addr = create_tmp_var (ptr_type, get_name (decl));
1916 DECL_IGNORED_P (addr) = 0;
1917 t = build_fold_indirect_ref (addr);
1918 TREE_THIS_NOTRAP (t) = 1;
1919 SET_DECL_VALUE_EXPR (decl, t);
1920 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1922 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1923 max_int_size_in_bytes (TREE_TYPE (decl)));
1924 /* The call has been built for a variable-sized object. */
1925 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1926 t = fold_convert (ptr_type, t);
1927 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1929 gimplify_and_add (t, seq_p);
1931 /* Record the dynamic allocation associated with DECL if requested. */
1932 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1933 record_dynamic_alloc (decl);
1936 /* A helper function to be called via walk_tree. Mark all labels under *TP
1937 as being forced. To be called for DECL_INITIAL of static variables. */
1939 static tree
1940 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1942 if (TYPE_P (*tp))
1943 *walk_subtrees = 0;
1944 if (TREE_CODE (*tp) == LABEL_DECL)
1946 FORCED_LABEL (*tp) = 1;
1947 cfun->has_forced_label_in_static = 1;
1950 return NULL_TREE;
1953 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1954 Build a call to internal const function DEFERRED_INIT:
1955 1st argument: SIZE of the DECL;
1956 2nd argument: INIT_TYPE;
1957 3rd argument: NAME of the DECL;
1959 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1961 static void
1962 gimple_add_init_for_auto_var (tree decl,
1963 enum auto_init_type init_type,
1964 gimple_seq *seq_p)
1966 gcc_assert (auto_var_p (decl));
1967 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1968 location_t loc = EXPR_LOCATION (decl);
1969 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1971 tree init_type_node
1972 = build_int_cst (integer_type_node, (int) init_type);
1974 tree decl_name = NULL_TREE;
1975 if (DECL_NAME (decl))
1977 decl_name = build_string_literal (DECL_NAME (decl));
1979 else
1981 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1982 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
1983 decl_name = build_string_literal (decl_name_anonymous);
1986 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1987 TREE_TYPE (decl), 3,
1988 decl_size, init_type_node,
1989 decl_name);
1991 gimplify_assign (decl, call, seq_p);
1994 /* Generate padding initialization for automatic vairable DECL.
1995 C guarantees that brace-init with fewer initializers than members
1996 aggregate will initialize the rest of the aggregate as-if it were
1997 static initialization. In turn static initialization guarantees
1998 that padding is initialized to zero. So, we always initialize paddings
1999 to zeroes regardless INIT_TYPE.
2000 To do the padding initialization, we insert a call to
2001 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2002 Note, we add an additional dummy argument for __builtin_clear_padding,
2003 'for_auto_init' to distinguish whether this call is for automatic
2004 variable initialization or not.
2006 static void
2007 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
2008 gimple_seq *seq_p)
2010 tree addr_of_decl = NULL_TREE;
2011 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
2013 if (is_vla)
2015 /* The temporary address variable for this vla should be
2016 created in gimplify_vla_decl. */
2017 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
2018 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
2019 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
2021 else
2023 mark_addressable (decl);
2024 addr_of_decl = build_fold_addr_expr (decl);
2027 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
2028 build_one_cst (TREE_TYPE (addr_of_decl)));
2029 gimplify_seq_add_stmt (seq_p, call);
2032 /* Return true if the DECL need to be automaticly initialized by the
2033 compiler. */
2034 static bool
2035 is_var_need_auto_init (tree decl)
2037 if (auto_var_p (decl)
2038 && (TREE_CODE (decl) != VAR_DECL
2039 || !DECL_HARD_REGISTER (decl))
2040 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2041 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
2042 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
2043 && !is_empty_type (TREE_TYPE (decl)))
2044 return true;
2045 return false;
2048 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2049 and initialization explicit. */
2051 static enum gimplify_status
2052 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
2054 tree stmt = *stmt_p;
2055 tree decl = DECL_EXPR_DECL (stmt);
2057 *stmt_p = NULL_TREE;
2059 if (TREE_TYPE (decl) == error_mark_node)
2060 return GS_ERROR;
2062 if ((TREE_CODE (decl) == TYPE_DECL
2063 || VAR_P (decl))
2064 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
2066 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
2067 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2068 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
2071 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2072 in case its size expressions contain problematic nodes like CALL_EXPR. */
2073 if (TREE_CODE (decl) == TYPE_DECL
2074 && DECL_ORIGINAL_TYPE (decl)
2075 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
2077 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
2078 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
2079 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
2082 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
2084 tree init = DECL_INITIAL (decl);
2085 bool is_vla = false;
2086 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2087 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2088 If the decl has VALUE_EXPR that was created by FE (usually
2089 C++FE), it's a proxy varaible, and FE already initialized
2090 the VALUE_EXPR of it, we should not initialize it anymore. */
2091 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
2093 poly_uint64 size;
2094 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
2095 || (!TREE_STATIC (decl)
2096 && flag_stack_check == GENERIC_STACK_CHECK
2097 && maybe_gt (size,
2098 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
2100 gimplify_vla_decl (decl, seq_p);
2101 is_vla = true;
2104 if (asan_poisoned_variables
2105 && !is_vla
2106 && TREE_ADDRESSABLE (decl)
2107 && !TREE_STATIC (decl)
2108 && !DECL_HAS_VALUE_EXPR_P (decl)
2109 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
2110 && dbg_cnt (asan_use_after_scope)
2111 && !gimplify_omp_ctxp
2112 /* GNAT introduces temporaries to hold return values of calls in
2113 initializers of variables defined in other units, so the
2114 declaration of the variable is discarded completely. We do not
2115 want to issue poison calls for such dropped variables. */
2116 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
2117 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
2119 asan_poisoned_variables->add (decl);
2120 asan_poison_variable (decl, false, seq_p);
2121 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
2122 gimplify_ctxp->live_switch_vars->add (decl);
2125 /* Some front ends do not explicitly declare all anonymous
2126 artificial variables. We compensate here by declaring the
2127 variables, though it would be better if the front ends would
2128 explicitly declare them. */
2129 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
2130 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
2131 gimple_add_tmp_var (decl);
2133 if (init && init != error_mark_node)
2135 if (!TREE_STATIC (decl))
2137 DECL_INITIAL (decl) = NULL_TREE;
2138 init = build2 (INIT_EXPR, void_type_node, decl, init);
2139 gimplify_and_add (init, seq_p);
2140 ggc_free (init);
2141 /* Clear TREE_READONLY if we really have an initialization. */
2142 if (!DECL_INITIAL (decl)
2143 && !omp_privatize_by_reference (decl))
2144 TREE_READONLY (decl) = 0;
2146 else
2147 /* We must still examine initializers for static variables
2148 as they may contain a label address. */
2149 walk_tree (&init, force_labels_r, NULL, NULL);
2151 /* When there is no explicit initializer, if the user requested,
2152 We should insert an artifical initializer for this automatic
2153 variable. */
2154 else if (is_var_need_auto_init (decl)
2155 && !decl_had_value_expr_p)
2157 gimple_add_init_for_auto_var (decl,
2158 flag_auto_var_init,
2159 seq_p);
2160 /* The expanding of a call to the above .DEFERRED_INIT will apply
2161 block initialization to the whole space covered by this variable.
2162 As a result, all the paddings will be initialized to zeroes
2163 for zero initialization and 0xFE byte-repeatable patterns for
2164 pattern initialization.
2165 In order to make the paddings as zeroes for pattern init, We
2166 should add a call to __builtin_clear_padding to clear the
2167 paddings to zero in compatiple with CLANG.
2168 We cannot insert this call if the variable is a gimple register
2169 since __builtin_clear_padding will take the address of the
2170 variable. As a result, if a long double/_Complex long double
2171 variable will spilled into stack later, its padding is 0XFE. */
2172 if (flag_auto_var_init == AUTO_INIT_PATTERN
2173 && !is_gimple_reg (decl)
2174 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2175 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2179 return GS_ALL_DONE;
2182 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2183 and replacing the LOOP_EXPR with goto, but if the loop contains an
2184 EXIT_EXPR, we need to append a label for it to jump to. */
2186 static enum gimplify_status
2187 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2189 tree saved_label = gimplify_ctxp->exit_label;
2190 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2192 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
2194 gimplify_ctxp->exit_label = NULL_TREE;
2196 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
2198 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
2200 if (gimplify_ctxp->exit_label)
2201 gimplify_seq_add_stmt (pre_p,
2202 gimple_build_label (gimplify_ctxp->exit_label));
2204 gimplify_ctxp->exit_label = saved_label;
2206 *expr_p = NULL;
2207 return GS_ALL_DONE;
2210 /* Gimplify a statement list onto a sequence. These may be created either
2211 by an enlightened front-end, or by shortcut_cond_expr. */
2213 static enum gimplify_status
2214 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2216 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2218 tree_stmt_iterator i = tsi_start (*expr_p);
2220 while (!tsi_end_p (i))
2222 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2223 tsi_delink (&i);
2226 if (temp)
2228 *expr_p = temp;
2229 return GS_OK;
2232 return GS_ALL_DONE;
2236 /* Emit warning for the unreachable statment STMT if needed.
2237 Return the gimple itself when the warning is emitted, otherwise
2238 return NULL. */
2239 static gimple *
2240 emit_warn_switch_unreachable (gimple *stmt)
2242 if (gimple_code (stmt) == GIMPLE_GOTO
2243 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2244 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2245 /* Don't warn for compiler-generated gotos. These occur
2246 in Duff's devices, for example. */
2247 return NULL;
2248 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2249 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2250 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2251 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2252 || (is_gimple_assign (stmt)
2253 && gimple_assign_single_p (stmt)
2254 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2255 && gimple_call_internal_p (
2256 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2257 IFN_DEFERRED_INIT))))
2258 /* Don't warn for compiler-generated initializations for
2259 -ftrivial-auto-var-init.
2260 There are 3 cases:
2261 case 1: a call to .DEFERRED_INIT;
2262 case 2: a call to __builtin_clear_padding with the 2nd argument is
2263 present and non-zero;
2264 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2265 that has the LHS of .DEFERRED_INIT as the RHS as following:
2266 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2267 i1 = _1. */
2268 return NULL;
2269 else
2270 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2271 "statement will never be executed");
2272 return stmt;
2275 /* Callback for walk_gimple_seq. */
2277 static tree
2278 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2279 bool *handled_ops_p,
2280 struct walk_stmt_info *wi)
2282 gimple *stmt = gsi_stmt (*gsi_p);
2283 bool unreachable_issued = wi->info != NULL;
2285 *handled_ops_p = true;
2286 switch (gimple_code (stmt))
2288 case GIMPLE_TRY:
2289 /* A compiler-generated cleanup or a user-written try block.
2290 If it's empty, don't dive into it--that would result in
2291 worse location info. */
2292 if (gimple_try_eval (stmt) == NULL)
2294 if (warn_switch_unreachable && !unreachable_issued)
2295 wi->info = emit_warn_switch_unreachable (stmt);
2297 /* Stop when auto var init warning is not on. */
2298 if (!warn_trivial_auto_var_init)
2299 return integer_zero_node;
2301 /* Fall through. */
2302 case GIMPLE_BIND:
2303 case GIMPLE_CATCH:
2304 case GIMPLE_EH_FILTER:
2305 case GIMPLE_TRANSACTION:
2306 /* Walk the sub-statements. */
2307 *handled_ops_p = false;
2308 break;
2310 case GIMPLE_DEBUG:
2311 /* Ignore these. We may generate them before declarations that
2312 are never executed. If there's something to warn about,
2313 there will be non-debug stmts too, and we'll catch those. */
2314 break;
2316 case GIMPLE_LABEL:
2317 /* Stop till the first Label. */
2318 return integer_zero_node;
2319 case GIMPLE_CALL:
2320 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2322 *handled_ops_p = false;
2323 break;
2325 if (warn_trivial_auto_var_init
2326 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2327 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2329 /* Get the variable name from the 3rd argument of call. */
2330 tree var_name = gimple_call_arg (stmt, 2);
2331 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2332 const char *var_name_str = TREE_STRING_POINTER (var_name);
2334 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2335 "%qs cannot be initialized with"
2336 "%<-ftrivial-auto-var_init%>",
2337 var_name_str);
2338 break;
2341 /* Fall through. */
2342 default:
2343 /* check the first "real" statement (not a decl/lexical scope/...), issue
2344 warning if needed. */
2345 if (warn_switch_unreachable && !unreachable_issued)
2346 wi->info = emit_warn_switch_unreachable (stmt);
2347 /* Stop when auto var init warning is not on. */
2348 if (!warn_trivial_auto_var_init)
2349 return integer_zero_node;
2350 break;
2352 return NULL_TREE;
2356 /* Possibly warn about unreachable statements between switch's controlling
2357 expression and the first case. Also warn about -ftrivial-auto-var-init
2358 cannot initialize the auto variable under such situation.
2359 SEQ is the body of a switch expression. */
2361 static void
2362 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2364 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2365 /* This warning doesn't play well with Fortran when optimizations
2366 are on. */
2367 || lang_GNU_Fortran ()
2368 || seq == NULL)
2369 return;
2371 struct walk_stmt_info wi;
2373 memset (&wi, 0, sizeof (wi));
2374 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2378 /* A label entry that pairs label and a location. */
2379 struct label_entry
2381 tree label;
2382 location_t loc;
2385 /* Find LABEL in vector of label entries VEC. */
2387 static struct label_entry *
2388 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2390 unsigned int i;
2391 struct label_entry *l;
2393 FOR_EACH_VEC_ELT (*vec, i, l)
2394 if (l->label == label)
2395 return l;
2396 return NULL;
2399 /* Return true if LABEL, a LABEL_DECL, represents a case label
2400 in a vector of labels CASES. */
2402 static bool
2403 case_label_p (const vec<tree> *cases, tree label)
2405 unsigned int i;
2406 tree l;
2408 FOR_EACH_VEC_ELT (*cases, i, l)
2409 if (CASE_LABEL (l) == label)
2410 return true;
2411 return false;
2414 /* Find the last nondebug statement in a scope STMT. */
2416 static gimple *
2417 last_stmt_in_scope (gimple *stmt)
2419 if (!stmt)
2420 return NULL;
2422 switch (gimple_code (stmt))
2424 case GIMPLE_BIND:
2426 gbind *bind = as_a <gbind *> (stmt);
2427 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2428 return last_stmt_in_scope (stmt);
2431 case GIMPLE_TRY:
2433 gtry *try_stmt = as_a <gtry *> (stmt);
2434 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2435 gimple *last_eval = last_stmt_in_scope (stmt);
2436 if (gimple_stmt_may_fallthru (last_eval)
2437 && (last_eval == NULL
2438 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2439 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2441 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2442 return last_stmt_in_scope (stmt);
2444 else
2445 return last_eval;
2448 case GIMPLE_DEBUG:
2449 gcc_unreachable ();
2451 default:
2452 return stmt;
2456 /* Collect labels that may fall through into LABELS and return the statement
2457 preceding another case label, or a user-defined label. Store a location
2458 useful to give warnings at *PREVLOC (usually the location of the returned
2459 statement or of its surrounding scope). */
2461 static gimple *
2462 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2463 auto_vec <struct label_entry> *labels,
2464 location_t *prevloc)
2466 gimple *prev = NULL;
2468 *prevloc = UNKNOWN_LOCATION;
2471 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2473 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2474 which starts on a GIMPLE_SWITCH and ends with a break label.
2475 Handle that as a single statement that can fall through. */
2476 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2477 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2478 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2479 if (last
2480 && gimple_code (first) == GIMPLE_SWITCH
2481 && gimple_code (last) == GIMPLE_LABEL)
2483 tree label = gimple_label_label (as_a <glabel *> (last));
2484 if (SWITCH_BREAK_LABEL_P (label))
2486 prev = bind;
2487 gsi_next (gsi_p);
2488 continue;
2492 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2493 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2495 /* Nested scope. Only look at the last statement of
2496 the innermost scope. */
2497 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2498 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2499 if (last)
2501 prev = last;
2502 /* It might be a label without a location. Use the
2503 location of the scope then. */
2504 if (!gimple_has_location (prev))
2505 *prevloc = bind_loc;
2507 gsi_next (gsi_p);
2508 continue;
2511 /* Ifs are tricky. */
2512 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2514 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2515 tree false_lab = gimple_cond_false_label (cond_stmt);
2516 location_t if_loc = gimple_location (cond_stmt);
2518 /* If we have e.g.
2519 if (i > 1) goto <D.2259>; else goto D;
2520 we can't do much with the else-branch. */
2521 if (!DECL_ARTIFICIAL (false_lab))
2522 break;
2524 /* Go on until the false label, then one step back. */
2525 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2527 gimple *stmt = gsi_stmt (*gsi_p);
2528 if (gimple_code (stmt) == GIMPLE_LABEL
2529 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2530 break;
2533 /* Not found? Oops. */
2534 if (gsi_end_p (*gsi_p))
2535 break;
2537 /* A dead label can't fall through. */
2538 if (!UNUSED_LABEL_P (false_lab))
2540 struct label_entry l = { false_lab, if_loc };
2541 labels->safe_push (l);
2544 /* Go to the last statement of the then branch. */
2545 gsi_prev (gsi_p);
2547 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2548 <D.1759>:
2549 <stmt>;
2550 goto <D.1761>;
2551 <D.1760>:
2553 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2554 && !gimple_has_location (gsi_stmt (*gsi_p)))
2556 /* Look at the statement before, it might be
2557 attribute fallthrough, in which case don't warn. */
2558 gsi_prev (gsi_p);
2559 bool fallthru_before_dest
2560 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2561 gsi_next (gsi_p);
2562 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2563 if (!fallthru_before_dest)
2565 struct label_entry l = { goto_dest, if_loc };
2566 labels->safe_push (l);
2569 /* This case is about
2570 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2571 <D.2022>:
2572 n = n + 1; // #1
2573 <D.2023>: // #2
2574 <D.1988>: // #3
2575 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2576 through to #3. So set PREV to #1. */
2577 else if (UNUSED_LABEL_P (false_lab))
2578 prev = gsi_stmt (*gsi_p);
2580 /* And move back. */
2581 gsi_next (gsi_p);
2584 /* Remember the last statement. Skip labels that are of no interest
2585 to us. */
2586 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2588 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2589 if (find_label_entry (labels, label))
2590 prev = gsi_stmt (*gsi_p);
2592 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2594 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2596 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2597 prev = gsi_stmt (*gsi_p);
2598 gsi_next (gsi_p);
2600 while (!gsi_end_p (*gsi_p)
2601 /* Stop if we find a case or a user-defined label. */
2602 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2603 || !gimple_has_location (gsi_stmt (*gsi_p))));
2605 if (prev && gimple_has_location (prev))
2606 *prevloc = gimple_location (prev);
2607 return prev;
2610 /* Return true if the switch fallthough warning should occur. LABEL is
2611 the label statement that we're falling through to. */
2613 static bool
2614 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2616 gimple_stmt_iterator gsi = *gsi_p;
2618 /* Don't warn if the label is marked with a "falls through" comment. */
2619 if (FALLTHROUGH_LABEL_P (label))
2620 return false;
2622 /* Don't warn for non-case labels followed by a statement:
2623 case 0:
2624 foo ();
2625 label:
2626 bar ();
2627 as these are likely intentional. */
2628 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2630 tree l;
2631 while (!gsi_end_p (gsi)
2632 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2633 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2634 && !case_label_p (&gimplify_ctxp->case_labels, l))
2635 gsi_next_nondebug (&gsi);
2636 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2637 return false;
2640 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2641 immediately breaks. */
2642 gsi = *gsi_p;
2644 /* Skip all immediately following labels. */
2645 while (!gsi_end_p (gsi)
2646 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2647 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2648 gsi_next_nondebug (&gsi);
2650 /* { ... something; default:; } */
2651 if (gsi_end_p (gsi)
2652 /* { ... something; default: break; } or
2653 { ... something; default: goto L; } */
2654 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2655 /* { ... something; default: return; } */
2656 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2657 return false;
2659 return true;
2662 /* Callback for walk_gimple_seq. */
2664 static tree
2665 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2666 struct walk_stmt_info *)
2668 gimple *stmt = gsi_stmt (*gsi_p);
2670 *handled_ops_p = true;
2671 switch (gimple_code (stmt))
2673 case GIMPLE_TRY:
2674 case GIMPLE_BIND:
2675 case GIMPLE_CATCH:
2676 case GIMPLE_EH_FILTER:
2677 case GIMPLE_TRANSACTION:
2678 /* Walk the sub-statements. */
2679 *handled_ops_p = false;
2680 break;
2682 /* Find a sequence of form:
2684 GIMPLE_LABEL
2685 [...]
2686 <may fallthru stmt>
2687 GIMPLE_LABEL
2689 and possibly warn. */
2690 case GIMPLE_LABEL:
2692 /* Found a label. Skip all immediately following labels. */
2693 while (!gsi_end_p (*gsi_p)
2694 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2695 gsi_next_nondebug (gsi_p);
2697 /* There might be no more statements. */
2698 if (gsi_end_p (*gsi_p))
2699 return integer_zero_node;
2701 /* Vector of labels that fall through. */
2702 auto_vec <struct label_entry> labels;
2703 location_t prevloc;
2704 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2706 /* There might be no more statements. */
2707 if (gsi_end_p (*gsi_p))
2708 return integer_zero_node;
2710 gimple *next = gsi_stmt (*gsi_p);
2711 tree label;
2712 /* If what follows is a label, then we may have a fallthrough. */
2713 if (gimple_code (next) == GIMPLE_LABEL
2714 && gimple_has_location (next)
2715 && (label = gimple_label_label (as_a <glabel *> (next)))
2716 && prev != NULL)
2718 struct label_entry *l;
2719 bool warned_p = false;
2720 auto_diagnostic_group d;
2721 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2722 /* Quiet. */;
2723 else if (gimple_code (prev) == GIMPLE_LABEL
2724 && (label = gimple_label_label (as_a <glabel *> (prev)))
2725 && (l = find_label_entry (&labels, label)))
2726 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2727 "this statement may fall through");
2728 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2729 /* Try to be clever and don't warn when the statement
2730 can't actually fall through. */
2731 && gimple_stmt_may_fallthru (prev)
2732 && prevloc != UNKNOWN_LOCATION)
2733 warned_p = warning_at (prevloc,
2734 OPT_Wimplicit_fallthrough_,
2735 "this statement may fall through");
2736 if (warned_p)
2737 inform (gimple_location (next), "here");
2739 /* Mark this label as processed so as to prevent multiple
2740 warnings in nested switches. */
2741 FALLTHROUGH_LABEL_P (label) = true;
2743 /* So that next warn_implicit_fallthrough_r will start looking for
2744 a new sequence starting with this label. */
2745 gsi_prev (gsi_p);
2748 break;
2749 default:
2750 break;
2752 return NULL_TREE;
2755 /* Warn when a switch case falls through. */
2757 static void
2758 maybe_warn_implicit_fallthrough (gimple_seq seq)
2760 if (!warn_implicit_fallthrough)
2761 return;
2763 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2764 if (!(lang_GNU_C ()
2765 || lang_GNU_CXX ()
2766 || lang_GNU_OBJC ()))
2767 return;
2769 struct walk_stmt_info wi;
2770 memset (&wi, 0, sizeof (wi));
2771 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2774 /* Callback for walk_gimple_seq. */
2776 static tree
2777 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2778 struct walk_stmt_info *wi)
2780 gimple *stmt = gsi_stmt (*gsi_p);
2782 *handled_ops_p = true;
2783 switch (gimple_code (stmt))
2785 case GIMPLE_TRY:
2786 case GIMPLE_BIND:
2787 case GIMPLE_CATCH:
2788 case GIMPLE_EH_FILTER:
2789 case GIMPLE_TRANSACTION:
2790 /* Walk the sub-statements. */
2791 *handled_ops_p = false;
2792 break;
2793 case GIMPLE_CALL:
2794 static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2795 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2797 location_t loc = gimple_location (stmt);
2798 gsi_remove (gsi_p, true);
2799 wi->removed_stmt = true;
2801 /* nothrow flag is added by genericize_c_loop to mark fallthrough
2802 statement at the end of some loop's body. Those should be
2803 always diagnosed, either because they indeed don't precede
2804 a case label or default label, or because the next statement
2805 is not within the same iteration statement. */
2806 if ((stmt->subcode & GF_CALL_NOTHROW) != 0)
2808 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2809 "a case label or default label");
2810 break;
2813 if (gsi_end_p (*gsi_p))
2815 static_cast<location_t *>(wi->info)[0] = BUILTINS_LOCATION;
2816 static_cast<location_t *>(wi->info)[1] = loc;
2817 break;
2820 bool found = false;
2822 gimple_stmt_iterator gsi2 = *gsi_p;
2823 stmt = gsi_stmt (gsi2);
2824 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2826 /* Go on until the artificial label. */
2827 tree goto_dest = gimple_goto_dest (stmt);
2828 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2830 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2831 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2832 == goto_dest)
2833 break;
2836 /* Not found? Stop. */
2837 if (gsi_end_p (gsi2))
2838 break;
2840 /* Look one past it. */
2841 gsi_next (&gsi2);
2844 /* We're looking for a case label or default label here. */
2845 while (!gsi_end_p (gsi2))
2847 stmt = gsi_stmt (gsi2);
2848 if (gimple_code (stmt) == GIMPLE_LABEL)
2850 tree label = gimple_label_label (as_a <glabel *> (stmt));
2851 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2853 found = true;
2854 break;
2857 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2859 else if (!is_gimple_debug (stmt))
2860 /* Anything else is not expected. */
2861 break;
2862 gsi_next (&gsi2);
2864 if (!found)
2865 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2866 "a case label or default label");
2868 break;
2869 default:
2870 static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2871 break;
2873 return NULL_TREE;
2876 /* Expand all FALLTHROUGH () calls in SEQ. */
2878 static void
2879 expand_FALLTHROUGH (gimple_seq *seq_p)
2881 struct walk_stmt_info wi;
2882 location_t loc[2];
2883 memset (&wi, 0, sizeof (wi));
2884 loc[0] = UNKNOWN_LOCATION;
2885 loc[1] = UNKNOWN_LOCATION;
2886 wi.info = (void *) &loc[0];
2887 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2888 if (loc[0] != UNKNOWN_LOCATION)
2889 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2890 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2891 pedwarn (loc[1], 0, "attribute %<fallthrough%> not preceding "
2892 "a case label or default label");
2896 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2897 branch to. */
2899 static enum gimplify_status
2900 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2902 tree switch_expr = *expr_p;
2903 gimple_seq switch_body_seq = NULL;
2904 enum gimplify_status ret;
2905 tree index_type = TREE_TYPE (switch_expr);
2906 if (index_type == NULL_TREE)
2907 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2909 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2910 fb_rvalue);
2911 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2912 return ret;
2914 if (SWITCH_BODY (switch_expr))
2916 vec<tree> labels;
2917 vec<tree> saved_labels;
2918 hash_set<tree> *saved_live_switch_vars = NULL;
2919 tree default_case = NULL_TREE;
2920 gswitch *switch_stmt;
2922 /* Save old labels, get new ones from body, then restore the old
2923 labels. Save all the things from the switch body to append after. */
2924 saved_labels = gimplify_ctxp->case_labels;
2925 gimplify_ctxp->case_labels.create (8);
2927 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2928 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2929 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2930 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2931 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2932 else
2933 gimplify_ctxp->live_switch_vars = NULL;
2935 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2936 gimplify_ctxp->in_switch_expr = true;
2938 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2940 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2941 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2942 maybe_warn_implicit_fallthrough (switch_body_seq);
2943 /* Only do this for the outermost GIMPLE_SWITCH. */
2944 if (!gimplify_ctxp->in_switch_expr)
2945 expand_FALLTHROUGH (&switch_body_seq);
2947 labels = gimplify_ctxp->case_labels;
2948 gimplify_ctxp->case_labels = saved_labels;
2950 if (gimplify_ctxp->live_switch_vars)
2952 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2953 delete gimplify_ctxp->live_switch_vars;
2955 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2957 preprocess_case_label_vec_for_gimple (labels, index_type,
2958 &default_case);
2960 bool add_bind = false;
2961 if (!default_case)
2963 glabel *new_default;
2965 default_case
2966 = build_case_label (NULL_TREE, NULL_TREE,
2967 create_artificial_label (UNKNOWN_LOCATION));
2968 if (old_in_switch_expr)
2970 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2971 add_bind = true;
2973 new_default = gimple_build_label (CASE_LABEL (default_case));
2974 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2976 else if (old_in_switch_expr)
2978 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2979 if (last && gimple_code (last) == GIMPLE_LABEL)
2981 tree label = gimple_label_label (as_a <glabel *> (last));
2982 if (SWITCH_BREAK_LABEL_P (label))
2983 add_bind = true;
2987 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2988 default_case, labels);
2989 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2990 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2991 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2992 so that we can easily find the start and end of the switch
2993 statement. */
2994 if (add_bind)
2996 gimple_seq bind_body = NULL;
2997 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2998 gimple_seq_add_seq (&bind_body, switch_body_seq);
2999 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
3000 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
3001 gimplify_seq_add_stmt (pre_p, bind);
3003 else
3005 gimplify_seq_add_stmt (pre_p, switch_stmt);
3006 gimplify_seq_add_seq (pre_p, switch_body_seq);
3008 labels.release ();
3010 else
3011 gcc_unreachable ();
3013 return GS_ALL_DONE;
3016 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3018 static enum gimplify_status
3019 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
3021 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
3022 == current_function_decl);
3024 tree label = LABEL_EXPR_LABEL (*expr_p);
3025 glabel *label_stmt = gimple_build_label (label);
3026 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3027 gimplify_seq_add_stmt (pre_p, label_stmt);
3029 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3030 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3031 NOT_TAKEN));
3032 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3033 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3034 TAKEN));
3036 return GS_ALL_DONE;
3039 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3041 static enum gimplify_status
3042 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
3044 struct gimplify_ctx *ctxp;
3045 glabel *label_stmt;
3047 /* Invalid programs can play Duff's Device type games with, for example,
3048 #pragma omp parallel. At least in the C front end, we don't
3049 detect such invalid branches until after gimplification, in the
3050 diagnose_omp_blocks pass. */
3051 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
3052 if (ctxp->case_labels.exists ())
3053 break;
3055 tree label = CASE_LABEL (*expr_p);
3056 label_stmt = gimple_build_label (label);
3057 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3058 ctxp->case_labels.safe_push (*expr_p);
3059 gimplify_seq_add_stmt (pre_p, label_stmt);
3061 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3062 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3063 NOT_TAKEN));
3064 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3065 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3066 TAKEN));
3068 return GS_ALL_DONE;
3071 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3072 if necessary. */
3074 tree
3075 build_and_jump (tree *label_p)
3077 if (label_p == NULL)
3078 /* If there's nowhere to jump, just fall through. */
3079 return NULL_TREE;
3081 if (*label_p == NULL_TREE)
3083 tree label = create_artificial_label (UNKNOWN_LOCATION);
3084 *label_p = label;
3087 return build1 (GOTO_EXPR, void_type_node, *label_p);
3090 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3091 This also involves building a label to jump to and communicating it to
3092 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3094 static enum gimplify_status
3095 gimplify_exit_expr (tree *expr_p)
3097 tree cond = TREE_OPERAND (*expr_p, 0);
3098 tree expr;
3100 expr = build_and_jump (&gimplify_ctxp->exit_label);
3101 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
3102 *expr_p = expr;
3104 return GS_OK;
3107 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3108 different from its canonical type, wrap the whole thing inside a
3109 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3110 type.
3112 The canonical type of a COMPONENT_REF is the type of the field being
3113 referenced--unless the field is a bit-field which can be read directly
3114 in a smaller mode, in which case the canonical type is the
3115 sign-appropriate type corresponding to that mode. */
3117 static void
3118 canonicalize_component_ref (tree *expr_p)
3120 tree expr = *expr_p;
3121 tree type;
3123 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
3125 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
3126 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
3127 else
3128 type = TREE_TYPE (TREE_OPERAND (expr, 1));
3130 /* One could argue that all the stuff below is not necessary for
3131 the non-bitfield case and declare it a FE error if type
3132 adjustment would be needed. */
3133 if (TREE_TYPE (expr) != type)
3135 #ifdef ENABLE_TYPES_CHECKING
3136 tree old_type = TREE_TYPE (expr);
3137 #endif
3138 int type_quals;
3140 /* We need to preserve qualifiers and propagate them from
3141 operand 0. */
3142 type_quals = TYPE_QUALS (type)
3143 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
3144 if (TYPE_QUALS (type) != type_quals)
3145 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
3147 /* Set the type of the COMPONENT_REF to the underlying type. */
3148 TREE_TYPE (expr) = type;
3150 #ifdef ENABLE_TYPES_CHECKING
3151 /* It is now a FE error, if the conversion from the canonical
3152 type to the original expression type is not useless. */
3153 gcc_assert (useless_type_conversion_p (old_type, type));
3154 #endif
3158 /* If a NOP conversion is changing a pointer to array of foo to a pointer
3159 to foo, embed that change in the ADDR_EXPR by converting
3160 T array[U];
3161 (T *)&array
3163 &array[L]
3164 where L is the lower bound. For simplicity, only do this for constant
3165 lower bound.
3166 The constraint is that the type of &array[L] is trivially convertible
3167 to T *. */
3169 static void
3170 canonicalize_addr_expr (tree *expr_p)
3172 tree expr = *expr_p;
3173 tree addr_expr = TREE_OPERAND (expr, 0);
3174 tree datype, ddatype, pddatype;
3176 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3177 if (!POINTER_TYPE_P (TREE_TYPE (expr))
3178 || TREE_CODE (addr_expr) != ADDR_EXPR)
3179 return;
3181 /* The addr_expr type should be a pointer to an array. */
3182 datype = TREE_TYPE (TREE_TYPE (addr_expr));
3183 if (TREE_CODE (datype) != ARRAY_TYPE)
3184 return;
3186 /* The pointer to element type shall be trivially convertible to
3187 the expression pointer type. */
3188 ddatype = TREE_TYPE (datype);
3189 pddatype = build_pointer_type (ddatype);
3190 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3191 pddatype))
3192 return;
3194 /* The lower bound and element sizes must be constant. */
3195 if (!TYPE_SIZE_UNIT (ddatype)
3196 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3197 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3198 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3199 return;
3201 /* All checks succeeded. Build a new node to merge the cast. */
3202 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3203 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3204 NULL_TREE, NULL_TREE);
3205 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3207 /* We can have stripped a required restrict qualifier above. */
3208 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3209 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3212 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3213 underneath as appropriate. */
3215 static enum gimplify_status
3216 gimplify_conversion (tree *expr_p)
3218 location_t loc = EXPR_LOCATION (*expr_p);
3219 gcc_assert (CONVERT_EXPR_P (*expr_p));
3221 /* Then strip away all but the outermost conversion. */
3222 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3224 /* And remove the outermost conversion if it's useless. */
3225 if (tree_ssa_useless_type_conversion (*expr_p))
3226 *expr_p = TREE_OPERAND (*expr_p, 0);
3228 /* If we still have a conversion at the toplevel,
3229 then canonicalize some constructs. */
3230 if (CONVERT_EXPR_P (*expr_p))
3232 tree sub = TREE_OPERAND (*expr_p, 0);
3234 /* If a NOP conversion is changing the type of a COMPONENT_REF
3235 expression, then canonicalize its type now in order to expose more
3236 redundant conversions. */
3237 if (TREE_CODE (sub) == COMPONENT_REF)
3238 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3240 /* If a NOP conversion is changing a pointer to array of foo
3241 to a pointer to foo, embed that change in the ADDR_EXPR. */
3242 else if (TREE_CODE (sub) == ADDR_EXPR)
3243 canonicalize_addr_expr (expr_p);
3246 /* If we have a conversion to a non-register type force the
3247 use of a VIEW_CONVERT_EXPR instead. */
3248 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3249 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3250 TREE_OPERAND (*expr_p, 0));
3252 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3253 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3254 TREE_SET_CODE (*expr_p, NOP_EXPR);
3256 return GS_OK;
3259 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3260 DECL_VALUE_EXPR, and it's worth re-examining things. */
3262 static enum gimplify_status
3263 gimplify_var_or_parm_decl (tree *expr_p)
3265 tree decl = *expr_p;
3267 /* ??? If this is a local variable, and it has not been seen in any
3268 outer BIND_EXPR, then it's probably the result of a duplicate
3269 declaration, for which we've already issued an error. It would
3270 be really nice if the front end wouldn't leak these at all.
3271 Currently the only known culprit is C++ destructors, as seen
3272 in g++.old-deja/g++.jason/binding.C.
3273 Another possible culpit are size expressions for variably modified
3274 types which are lost in the FE or not gimplified correctly. */
3275 if (VAR_P (decl)
3276 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3277 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3278 && decl_function_context (decl) == current_function_decl)
3280 gcc_assert (seen_error ());
3281 return GS_ERROR;
3284 /* When within an OMP context, notice uses of variables. */
3285 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3286 return GS_ALL_DONE;
3288 /* If the decl is an alias for another expression, substitute it now. */
3289 if (DECL_HAS_VALUE_EXPR_P (decl))
3291 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3292 return GS_OK;
3295 return GS_ALL_DONE;
3298 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3300 static void
3301 recalculate_side_effects (tree t)
3303 enum tree_code code = TREE_CODE (t);
3304 int len = TREE_OPERAND_LENGTH (t);
3305 int i;
3307 switch (TREE_CODE_CLASS (code))
3309 case tcc_expression:
3310 switch (code)
3312 case INIT_EXPR:
3313 case MODIFY_EXPR:
3314 case VA_ARG_EXPR:
3315 case PREDECREMENT_EXPR:
3316 case PREINCREMENT_EXPR:
3317 case POSTDECREMENT_EXPR:
3318 case POSTINCREMENT_EXPR:
3319 /* All of these have side-effects, no matter what their
3320 operands are. */
3321 return;
3323 default:
3324 break;
3326 /* Fall through. */
3328 case tcc_comparison: /* a comparison expression */
3329 case tcc_unary: /* a unary arithmetic expression */
3330 case tcc_binary: /* a binary arithmetic expression */
3331 case tcc_reference: /* a reference */
3332 case tcc_vl_exp: /* a function call */
3333 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3334 for (i = 0; i < len; ++i)
3336 tree op = TREE_OPERAND (t, i);
3337 if (op && TREE_SIDE_EFFECTS (op))
3338 TREE_SIDE_EFFECTS (t) = 1;
3340 break;
3342 case tcc_constant:
3343 /* No side-effects. */
3344 return;
3346 default:
3347 if (code == SSA_NAME)
3348 /* No side-effects. */
3349 return;
3350 gcc_unreachable ();
3354 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3355 node *EXPR_P.
3357 compound_lval
3358 : min_lval '[' val ']'
3359 | min_lval '.' ID
3360 | compound_lval '[' val ']'
3361 | compound_lval '.' ID
3363 This is not part of the original SIMPLE definition, which separates
3364 array and member references, but it seems reasonable to handle them
3365 together. Also, this way we don't run into problems with union
3366 aliasing; gcc requires that for accesses through a union to alias, the
3367 union reference must be explicit, which was not always the case when we
3368 were splitting up array and member refs.
3370 PRE_P points to the sequence where side effects that must happen before
3371 *EXPR_P should be stored.
3373 POST_P points to the sequence where side effects that must happen after
3374 *EXPR_P should be stored. */
3376 static enum gimplify_status
3377 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3378 fallback_t fallback)
3380 tree *p;
3381 enum gimplify_status ret = GS_ALL_DONE, tret;
3382 int i;
3383 location_t loc = EXPR_LOCATION (*expr_p);
3384 tree expr = *expr_p;
3386 /* Create a stack of the subexpressions so later we can walk them in
3387 order from inner to outer. */
3388 auto_vec<tree, 10> expr_stack;
3390 /* We can handle anything that get_inner_reference can deal with. */
3391 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3393 restart:
3394 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3395 if (TREE_CODE (*p) == INDIRECT_REF)
3396 *p = fold_indirect_ref_loc (loc, *p);
3398 if (handled_component_p (*p))
3400 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3401 additional COMPONENT_REFs. */
3402 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3403 && gimplify_var_or_parm_decl (p) == GS_OK)
3404 goto restart;
3405 else
3406 break;
3408 expr_stack.safe_push (*p);
3411 gcc_assert (expr_stack.length ());
3413 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3414 walked through and P points to the innermost expression.
3416 Java requires that we elaborated nodes in source order. That
3417 means we must gimplify the inner expression followed by each of
3418 the indices, in order. But we can't gimplify the inner
3419 expression until we deal with any variable bounds, sizes, or
3420 positions in order to deal with PLACEHOLDER_EXPRs.
3422 The base expression may contain a statement expression that
3423 has declarations used in size expressions, so has to be
3424 gimplified before gimplifying the size expressions.
3426 So we do this in three steps. First we deal with variable
3427 bounds, sizes, and positions, then we gimplify the base and
3428 ensure it is memory if needed, then we deal with the annotations
3429 for any variables in the components and any indices, from left
3430 to right. */
3432 bool need_non_reg = false;
3433 for (i = expr_stack.length () - 1; i >= 0; i--)
3435 tree t = expr_stack[i];
3437 if (error_operand_p (TREE_OPERAND (t, 0)))
3438 return GS_ERROR;
3440 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3442 /* Deal with the low bound and element type size and put them into
3443 the ARRAY_REF. If these values are set, they have already been
3444 gimplified. */
3445 if (TREE_OPERAND (t, 2) == NULL_TREE)
3447 tree low = unshare_expr (array_ref_low_bound (t));
3448 if (!is_gimple_min_invariant (low))
3450 TREE_OPERAND (t, 2) = low;
3454 if (TREE_OPERAND (t, 3) == NULL_TREE)
3456 tree elmt_size = array_ref_element_size (t);
3457 if (!is_gimple_min_invariant (elmt_size))
3459 elmt_size = unshare_expr (elmt_size);
3460 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3461 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3463 /* Divide the element size by the alignment of the element
3464 type (above). */
3465 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3466 elmt_size, factor);
3468 TREE_OPERAND (t, 3) = elmt_size;
3471 need_non_reg = true;
3473 else if (TREE_CODE (t) == COMPONENT_REF)
3475 /* Set the field offset into T and gimplify it. */
3476 if (TREE_OPERAND (t, 2) == NULL_TREE)
3478 tree offset = component_ref_field_offset (t);
3479 if (!is_gimple_min_invariant (offset))
3481 offset = unshare_expr (offset);
3482 tree field = TREE_OPERAND (t, 1);
3483 tree factor
3484 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3486 /* Divide the offset by its alignment. */
3487 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3488 offset, factor);
3490 TREE_OPERAND (t, 2) = offset;
3493 need_non_reg = true;
3495 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3496 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3497 is a non-register type then require the base object to be a
3498 non-register as well. */
3499 need_non_reg = true;
3502 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3503 so as to match the min_lval predicate. Failure to do so may result
3504 in the creation of large aggregate temporaries. */
3505 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3506 fallback | fb_lvalue);
3507 ret = MIN (ret, tret);
3508 if (ret == GS_ERROR)
3509 return GS_ERROR;
3511 /* Step 2a: if we have component references we do not support on
3512 registers then make sure the base isn't a register. Of course
3513 we can only do so if an rvalue is OK. */
3514 if (need_non_reg && (fallback & fb_rvalue))
3515 prepare_gimple_addressable (p, pre_p);
3518 /* Step 3: gimplify size expressions and the indices and operands of
3519 ARRAY_REF. During this loop we also remove any useless conversions.
3520 If we operate on a register also make sure to properly gimplify
3521 to individual operations. */
3523 bool reg_operations = is_gimple_reg (*p);
3524 for (; expr_stack.length () > 0; )
3526 tree t = expr_stack.pop ();
3528 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3530 gcc_assert (!reg_operations);
3532 /* Gimplify the low bound and element type size. */
3533 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3534 is_gimple_reg, fb_rvalue);
3535 ret = MIN (ret, tret);
3537 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3538 is_gimple_reg, fb_rvalue);
3539 ret = MIN (ret, tret);
3541 /* Gimplify the dimension. */
3542 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3543 is_gimple_val, fb_rvalue);
3544 ret = MIN (ret, tret);
3546 else if (TREE_CODE (t) == COMPONENT_REF)
3548 gcc_assert (!reg_operations);
3550 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3551 is_gimple_reg, fb_rvalue);
3552 ret = MIN (ret, tret);
3554 else if (reg_operations)
3556 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3557 is_gimple_val, fb_rvalue);
3558 ret = MIN (ret, tret);
3561 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3563 /* The innermost expression P may have originally had
3564 TREE_SIDE_EFFECTS set which would have caused all the outer
3565 expressions in *EXPR_P leading to P to also have had
3566 TREE_SIDE_EFFECTS set. */
3567 recalculate_side_effects (t);
3570 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3571 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3573 canonicalize_component_ref (expr_p);
3576 expr_stack.release ();
3578 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3580 return ret;
3583 /* Gimplify the self modifying expression pointed to by EXPR_P
3584 (++, --, +=, -=).
3586 PRE_P points to the list where side effects that must happen before
3587 *EXPR_P should be stored.
3589 POST_P points to the list where side effects that must happen after
3590 *EXPR_P should be stored.
3592 WANT_VALUE is nonzero iff we want to use the value of this expression
3593 in another expression.
3595 ARITH_TYPE is the type the computation should be performed in. */
3597 enum gimplify_status
3598 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3599 bool want_value, tree arith_type)
3601 enum tree_code code;
3602 tree lhs, lvalue, rhs, t1;
3603 gimple_seq post = NULL, *orig_post_p = post_p;
3604 bool postfix;
3605 enum tree_code arith_code;
3606 enum gimplify_status ret;
3607 location_t loc = EXPR_LOCATION (*expr_p);
3609 code = TREE_CODE (*expr_p);
3611 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3612 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3614 /* Prefix or postfix? */
3615 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3616 /* Faster to treat as prefix if result is not used. */
3617 postfix = want_value;
3618 else
3619 postfix = false;
3621 /* For postfix, make sure the inner expression's post side effects
3622 are executed after side effects from this expression. */
3623 if (postfix)
3624 post_p = &post;
3626 /* Add or subtract? */
3627 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3628 arith_code = PLUS_EXPR;
3629 else
3630 arith_code = MINUS_EXPR;
3632 /* Gimplify the LHS into a GIMPLE lvalue. */
3633 lvalue = TREE_OPERAND (*expr_p, 0);
3634 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3635 if (ret == GS_ERROR)
3636 return ret;
3638 /* Extract the operands to the arithmetic operation. */
3639 lhs = lvalue;
3640 rhs = TREE_OPERAND (*expr_p, 1);
3642 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3643 that as the result value and in the postqueue operation. */
3644 if (postfix)
3646 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3647 if (ret == GS_ERROR)
3648 return ret;
3650 lhs = get_initialized_tmp_var (lhs, pre_p);
3653 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3654 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3656 rhs = convert_to_ptrofftype_loc (loc, rhs);
3657 if (arith_code == MINUS_EXPR)
3658 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3659 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3661 else
3662 t1 = fold_convert (TREE_TYPE (*expr_p),
3663 fold_build2 (arith_code, arith_type,
3664 fold_convert (arith_type, lhs),
3665 fold_convert (arith_type, rhs)));
3667 if (postfix)
3669 gimplify_assign (lvalue, t1, pre_p);
3670 gimplify_seq_add_seq (orig_post_p, post);
3671 *expr_p = lhs;
3672 return GS_ALL_DONE;
3674 else
3676 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3677 return GS_OK;
3681 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3683 static void
3684 maybe_with_size_expr (tree *expr_p)
3686 tree expr = *expr_p;
3687 tree type = TREE_TYPE (expr);
3688 tree size;
3690 /* If we've already wrapped this or the type is error_mark_node, we can't do
3691 anything. */
3692 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3693 || type == error_mark_node)
3694 return;
3696 /* If the size isn't known or is a constant, we have nothing to do. */
3697 size = TYPE_SIZE_UNIT (type);
3698 if (!size || poly_int_tree_p (size))
3699 return;
3701 /* Otherwise, make a WITH_SIZE_EXPR. */
3702 size = unshare_expr (size);
3703 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3704 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3707 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3708 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3709 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3710 gimplified to an SSA name. */
3712 enum gimplify_status
3713 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3714 bool allow_ssa)
3716 bool (*test) (tree);
3717 fallback_t fb;
3719 /* In general, we allow lvalues for function arguments to avoid
3720 extra overhead of copying large aggregates out of even larger
3721 aggregates into temporaries only to copy the temporaries to
3722 the argument list. Make optimizers happy by pulling out to
3723 temporaries those types that fit in registers. */
3724 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3725 test = is_gimple_val, fb = fb_rvalue;
3726 else
3728 test = is_gimple_lvalue, fb = fb_either;
3729 /* Also strip a TARGET_EXPR that would force an extra copy. */
3730 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3732 tree init = TARGET_EXPR_INITIAL (*arg_p);
3733 if (init
3734 && !VOID_TYPE_P (TREE_TYPE (init)))
3735 *arg_p = init;
3739 /* If this is a variable sized type, we must remember the size. */
3740 maybe_with_size_expr (arg_p);
3742 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3743 /* Make sure arguments have the same location as the function call
3744 itself. */
3745 protected_set_expr_location (*arg_p, call_location);
3747 /* There is a sequence point before a function call. Side effects in
3748 the argument list must occur before the actual call. So, when
3749 gimplifying arguments, force gimplify_expr to use an internal
3750 post queue which is then appended to the end of PRE_P. */
3751 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3754 /* Don't fold inside offloading or taskreg regions: it can break code by
3755 adding decl references that weren't in the source. We'll do it during
3756 omplower pass instead. */
3758 static bool
3759 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3761 struct gimplify_omp_ctx *ctx;
3762 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3763 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3764 return false;
3765 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3766 return false;
3767 /* Delay folding of builtins until the IL is in consistent state
3768 so the diagnostic machinery can do a better job. */
3769 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3770 return false;
3771 return fold_stmt (gsi);
3774 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3775 WANT_VALUE is true if the result of the call is desired. */
3777 static enum gimplify_status
3778 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3780 tree fndecl, parms, p, fnptrtype;
3781 enum gimplify_status ret;
3782 int i, nargs;
3783 gcall *call;
3784 bool builtin_va_start_p = false;
3785 location_t loc = EXPR_LOCATION (*expr_p);
3787 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3789 /* For reliable diagnostics during inlining, it is necessary that
3790 every call_expr be annotated with file and line. */
3791 if (! EXPR_HAS_LOCATION (*expr_p))
3792 SET_EXPR_LOCATION (*expr_p, input_location);
3794 /* Gimplify internal functions created in the FEs. */
3795 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3797 if (want_value)
3798 return GS_ALL_DONE;
3800 nargs = call_expr_nargs (*expr_p);
3801 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3802 auto_vec<tree> vargs (nargs);
3804 if (ifn == IFN_ASSUME)
3806 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3808 /* If the [[assume (cond)]]; condition is simple
3809 enough and can be evaluated unconditionally
3810 without side-effects, expand it as
3811 if (!cond) __builtin_unreachable (); */
3812 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3813 *expr_p = build3 (COND_EXPR, void_type_node,
3814 CALL_EXPR_ARG (*expr_p, 0), void_node,
3815 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3816 fndecl, 0));
3817 return GS_OK;
3819 /* If not optimizing, ignore the assumptions. */
3820 if (!optimize || seen_error ())
3822 *expr_p = NULL_TREE;
3823 return GS_ALL_DONE;
3825 /* Temporarily, until gimple lowering, transform
3826 .ASSUME (cond);
3827 into:
3828 [[assume (guard)]]
3830 guard = cond;
3832 such that gimple lowering can outline the condition into
3833 a separate function easily. */
3834 tree guard = create_tmp_var (boolean_type_node);
3835 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3836 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3837 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3838 push_gimplify_context ();
3839 gimple_seq body = NULL;
3840 gimple *g = gimplify_and_return_first (*expr_p, &body);
3841 pop_gimplify_context (g);
3842 g = gimple_build_assume (guard, body);
3843 gimple_set_location (g, loc);
3844 gimplify_seq_add_stmt (pre_p, g);
3845 *expr_p = NULL_TREE;
3846 return GS_ALL_DONE;
3849 for (i = 0; i < nargs; i++)
3851 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3852 EXPR_LOCATION (*expr_p));
3853 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3856 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3857 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3858 gimplify_seq_add_stmt (pre_p, call);
3859 return GS_ALL_DONE;
3862 /* This may be a call to a builtin function.
3864 Builtin function calls may be transformed into different
3865 (and more efficient) builtin function calls under certain
3866 circumstances. Unfortunately, gimplification can muck things
3867 up enough that the builtin expanders are not aware that certain
3868 transformations are still valid.
3870 So we attempt transformation/gimplification of the call before
3871 we gimplify the CALL_EXPR. At this time we do not manage to
3872 transform all calls in the same manner as the expanders do, but
3873 we do transform most of them. */
3874 fndecl = get_callee_fndecl (*expr_p);
3875 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3876 switch (DECL_FUNCTION_CODE (fndecl))
3878 CASE_BUILT_IN_ALLOCA:
3879 /* If the call has been built for a variable-sized object, then we
3880 want to restore the stack level when the enclosing BIND_EXPR is
3881 exited to reclaim the allocated space; otherwise, we precisely
3882 need to do the opposite and preserve the latest stack level. */
3883 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3884 gimplify_ctxp->save_stack = true;
3885 else
3886 gimplify_ctxp->keep_stack = true;
3887 break;
3889 case BUILT_IN_VA_START:
3891 builtin_va_start_p = true;
3892 if (call_expr_nargs (*expr_p) < 2)
3894 error ("too few arguments to function %<va_start%>");
3895 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3896 return GS_OK;
3899 if (fold_builtin_next_arg (*expr_p, true))
3901 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3902 return GS_OK;
3904 break;
3907 case BUILT_IN_EH_RETURN:
3908 cfun->calls_eh_return = true;
3909 break;
3911 case BUILT_IN_CLEAR_PADDING:
3912 if (call_expr_nargs (*expr_p) == 1)
3914 /* Remember the original type of the argument in an internal
3915 dummy second argument, as in GIMPLE pointer conversions are
3916 useless. Also mark this call as not for automatic
3917 initialization in the internal dummy third argument. */
3918 p = CALL_EXPR_ARG (*expr_p, 0);
3919 *expr_p
3920 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3921 build_zero_cst (TREE_TYPE (p)));
3922 return GS_OK;
3924 break;
3926 default:
3929 if (fndecl && fndecl_built_in_p (fndecl))
3931 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3932 if (new_tree && new_tree != *expr_p)
3934 /* There was a transformation of this call which computes the
3935 same value, but in a more efficient way. Return and try
3936 again. */
3937 *expr_p = new_tree;
3938 return GS_OK;
3942 /* Remember the original function pointer type. */
3943 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3945 if (flag_openmp
3946 && fndecl
3947 && cfun
3948 && (cfun->curr_properties & PROP_gimple_any) == 0)
3950 tree variant = omp_resolve_declare_variant (fndecl);
3951 if (variant != fndecl)
3952 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3955 /* There is a sequence point before the call, so any side effects in
3956 the calling expression must occur before the actual call. Force
3957 gimplify_expr to use an internal post queue. */
3958 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3959 is_gimple_call_addr, fb_rvalue);
3961 if (ret == GS_ERROR)
3962 return GS_ERROR;
3964 nargs = call_expr_nargs (*expr_p);
3966 /* Get argument types for verification. */
3967 fndecl = get_callee_fndecl (*expr_p);
3968 parms = NULL_TREE;
3969 if (fndecl)
3970 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3971 else
3972 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3974 if (fndecl && DECL_ARGUMENTS (fndecl))
3975 p = DECL_ARGUMENTS (fndecl);
3976 else if (parms)
3977 p = parms;
3978 else
3979 p = NULL_TREE;
3980 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3983 /* If the last argument is __builtin_va_arg_pack () and it is not
3984 passed as a named argument, decrease the number of CALL_EXPR
3985 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3986 if (!p
3987 && i < nargs
3988 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3990 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3991 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3993 if (last_arg_fndecl
3994 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3996 tree call = *expr_p;
3998 --nargs;
3999 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
4000 CALL_EXPR_FN (call),
4001 nargs, CALL_EXPR_ARGP (call));
4003 /* Copy all CALL_EXPR flags, location and block, except
4004 CALL_EXPR_VA_ARG_PACK flag. */
4005 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
4006 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
4007 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
4008 = CALL_EXPR_RETURN_SLOT_OPT (call);
4009 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
4010 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
4012 /* Set CALL_EXPR_VA_ARG_PACK. */
4013 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
4017 /* If the call returns twice then after building the CFG the call
4018 argument computations will no longer dominate the call because
4019 we add an abnormal incoming edge to the call. So do not use SSA
4020 vars there. */
4021 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
4023 /* Gimplify the function arguments. */
4024 if (nargs > 0)
4026 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
4027 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
4028 PUSH_ARGS_REVERSED ? i-- : i++)
4030 enum gimplify_status t;
4032 /* Avoid gimplifying the second argument to va_start, which needs to
4033 be the plain PARM_DECL. */
4034 if ((i != 1) || !builtin_va_start_p)
4036 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
4037 EXPR_LOCATION (*expr_p), ! returns_twice);
4039 if (t == GS_ERROR)
4040 ret = GS_ERROR;
4045 /* Gimplify the static chain. */
4046 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
4048 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
4049 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
4050 else
4052 enum gimplify_status t;
4053 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
4054 EXPR_LOCATION (*expr_p), ! returns_twice);
4055 if (t == GS_ERROR)
4056 ret = GS_ERROR;
4060 /* Verify the function result. */
4061 if (want_value && fndecl
4062 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
4064 error_at (loc, "using result of function returning %<void%>");
4065 ret = GS_ERROR;
4068 /* Try this again in case gimplification exposed something. */
4069 if (ret != GS_ERROR)
4071 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4073 if (new_tree && new_tree != *expr_p)
4075 /* There was a transformation of this call which computes the
4076 same value, but in a more efficient way. Return and try
4077 again. */
4078 *expr_p = new_tree;
4079 return GS_OK;
4082 else
4084 *expr_p = error_mark_node;
4085 return GS_ERROR;
4088 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4089 decl. This allows us to eliminate redundant or useless
4090 calls to "const" functions. */
4091 if (TREE_CODE (*expr_p) == CALL_EXPR)
4093 int flags = call_expr_flags (*expr_p);
4094 if (flags & (ECF_CONST | ECF_PURE)
4095 /* An infinite loop is considered a side effect. */
4096 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
4097 TREE_SIDE_EFFECTS (*expr_p) = 0;
4100 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4101 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4102 form and delegate the creation of a GIMPLE_CALL to
4103 gimplify_modify_expr. This is always possible because when
4104 WANT_VALUE is true, the caller wants the result of this call into
4105 a temporary, which means that we will emit an INIT_EXPR in
4106 internal_get_tmp_var which will then be handled by
4107 gimplify_modify_expr. */
4108 if (!want_value)
4110 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4111 have to do is replicate it as a GIMPLE_CALL tuple. */
4112 gimple_stmt_iterator gsi;
4113 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
4114 notice_special_calls (call);
4115 gimplify_seq_add_stmt (pre_p, call);
4116 gsi = gsi_last (*pre_p);
4117 maybe_fold_stmt (&gsi);
4118 *expr_p = NULL_TREE;
4120 else
4121 /* Remember the original function type. */
4122 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
4123 CALL_EXPR_FN (*expr_p));
4125 return ret;
4128 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4129 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4131 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4132 condition is true or false, respectively. If null, we should generate
4133 our own to skip over the evaluation of this specific expression.
4135 LOCUS is the source location of the COND_EXPR.
4137 This function is the tree equivalent of do_jump.
4139 shortcut_cond_r should only be called by shortcut_cond_expr. */
4141 static tree
4142 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
4143 location_t locus)
4145 tree local_label = NULL_TREE;
4146 tree t, expr = NULL;
4148 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4149 retain the shortcut semantics. Just insert the gotos here;
4150 shortcut_cond_expr will append the real blocks later. */
4151 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4153 location_t new_locus;
4155 /* Turn if (a && b) into
4157 if (a); else goto no;
4158 if (b) goto yes; else goto no;
4159 (no:) */
4161 if (false_label_p == NULL)
4162 false_label_p = &local_label;
4164 /* Keep the original source location on the first 'if'. */
4165 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
4166 append_to_statement_list (t, &expr);
4168 /* Set the source location of the && on the second 'if'. */
4169 new_locus = rexpr_location (pred, locus);
4170 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4171 new_locus);
4172 append_to_statement_list (t, &expr);
4174 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4176 location_t new_locus;
4178 /* Turn if (a || b) into
4180 if (a) goto yes;
4181 if (b) goto yes; else goto no;
4182 (yes:) */
4184 if (true_label_p == NULL)
4185 true_label_p = &local_label;
4187 /* Keep the original source location on the first 'if'. */
4188 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
4189 append_to_statement_list (t, &expr);
4191 /* Set the source location of the || on the second 'if'. */
4192 new_locus = rexpr_location (pred, locus);
4193 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4194 new_locus);
4195 append_to_statement_list (t, &expr);
4197 else if (TREE_CODE (pred) == COND_EXPR
4198 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
4199 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
4201 location_t new_locus;
4203 /* As long as we're messing with gotos, turn if (a ? b : c) into
4204 if (a)
4205 if (b) goto yes; else goto no;
4206 else
4207 if (c) goto yes; else goto no;
4209 Don't do this if one of the arms has void type, which can happen
4210 in C++ when the arm is throw. */
4212 /* Keep the original source location on the first 'if'. Set the source
4213 location of the ? on the second 'if'. */
4214 new_locus = rexpr_location (pred, locus);
4215 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
4216 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
4217 false_label_p, locus),
4218 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
4219 false_label_p, new_locus));
4221 else
4223 expr = build3 (COND_EXPR, void_type_node, pred,
4224 build_and_jump (true_label_p),
4225 build_and_jump (false_label_p));
4226 SET_EXPR_LOCATION (expr, locus);
4229 if (local_label)
4231 t = build1 (LABEL_EXPR, void_type_node, local_label);
4232 append_to_statement_list (t, &expr);
4235 return expr;
4238 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4239 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4240 statement, if it is the last one. Otherwise, return NULL. */
4242 static tree
4243 find_goto (tree expr)
4245 if (!expr)
4246 return NULL_TREE;
4248 if (TREE_CODE (expr) == GOTO_EXPR)
4249 return expr;
4251 if (TREE_CODE (expr) != STATEMENT_LIST)
4252 return NULL_TREE;
4254 tree_stmt_iterator i = tsi_start (expr);
4256 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4257 tsi_next (&i);
4259 if (!tsi_one_before_end_p (i))
4260 return NULL_TREE;
4262 return find_goto (tsi_stmt (i));
4265 /* Same as find_goto, except that it returns NULL if the destination
4266 is not a LABEL_DECL. */
4268 static inline tree
4269 find_goto_label (tree expr)
4271 tree dest = find_goto (expr);
4272 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4273 return dest;
4274 return NULL_TREE;
4277 /* Given a conditional expression EXPR with short-circuit boolean
4278 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4279 predicate apart into the equivalent sequence of conditionals. */
4281 static tree
4282 shortcut_cond_expr (tree expr)
4284 tree pred = TREE_OPERAND (expr, 0);
4285 tree then_ = TREE_OPERAND (expr, 1);
4286 tree else_ = TREE_OPERAND (expr, 2);
4287 tree true_label, false_label, end_label, t;
4288 tree *true_label_p;
4289 tree *false_label_p;
4290 bool emit_end, emit_false, jump_over_else;
4291 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4292 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4294 /* First do simple transformations. */
4295 if (!else_se)
4297 /* If there is no 'else', turn
4298 if (a && b) then c
4299 into
4300 if (a) if (b) then c. */
4301 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4303 /* Keep the original source location on the first 'if'. */
4304 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4305 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4306 /* Set the source location of the && on the second 'if'. */
4307 if (rexpr_has_location (pred))
4308 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4309 then_ = shortcut_cond_expr (expr);
4310 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4311 pred = TREE_OPERAND (pred, 0);
4312 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4313 SET_EXPR_LOCATION (expr, locus);
4317 if (!then_se)
4319 /* If there is no 'then', turn
4320 if (a || b); else d
4321 into
4322 if (a); else if (b); else d. */
4323 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4325 /* Keep the original source location on the first 'if'. */
4326 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4327 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4328 /* Set the source location of the || on the second 'if'. */
4329 if (rexpr_has_location (pred))
4330 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4331 else_ = shortcut_cond_expr (expr);
4332 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4333 pred = TREE_OPERAND (pred, 0);
4334 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4335 SET_EXPR_LOCATION (expr, locus);
4339 /* If we're done, great. */
4340 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4341 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4342 return expr;
4344 /* Otherwise we need to mess with gotos. Change
4345 if (a) c; else d;
4347 if (a); else goto no;
4348 c; goto end;
4349 no: d; end:
4350 and recursively gimplify the condition. */
4352 true_label = false_label = end_label = NULL_TREE;
4354 /* If our arms just jump somewhere, hijack those labels so we don't
4355 generate jumps to jumps. */
4357 if (tree then_goto = find_goto_label (then_))
4359 true_label = GOTO_DESTINATION (then_goto);
4360 then_ = NULL;
4361 then_se = false;
4364 if (tree else_goto = find_goto_label (else_))
4366 false_label = GOTO_DESTINATION (else_goto);
4367 else_ = NULL;
4368 else_se = false;
4371 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4372 if (true_label)
4373 true_label_p = &true_label;
4374 else
4375 true_label_p = NULL;
4377 /* The 'else' branch also needs a label if it contains interesting code. */
4378 if (false_label || else_se)
4379 false_label_p = &false_label;
4380 else
4381 false_label_p = NULL;
4383 /* If there was nothing else in our arms, just forward the label(s). */
4384 if (!then_se && !else_se)
4385 return shortcut_cond_r (pred, true_label_p, false_label_p,
4386 EXPR_LOC_OR_LOC (expr, input_location));
4388 /* If our last subexpression already has a terminal label, reuse it. */
4389 if (else_se)
4390 t = expr_last (else_);
4391 else if (then_se)
4392 t = expr_last (then_);
4393 else
4394 t = NULL;
4395 if (t && TREE_CODE (t) == LABEL_EXPR)
4396 end_label = LABEL_EXPR_LABEL (t);
4398 /* If we don't care about jumping to the 'else' branch, jump to the end
4399 if the condition is false. */
4400 if (!false_label_p)
4401 false_label_p = &end_label;
4403 /* We only want to emit these labels if we aren't hijacking them. */
4404 emit_end = (end_label == NULL_TREE);
4405 emit_false = (false_label == NULL_TREE);
4407 /* We only emit the jump over the else clause if we have to--if the
4408 then clause may fall through. Otherwise we can wind up with a
4409 useless jump and a useless label at the end of gimplified code,
4410 which will cause us to think that this conditional as a whole
4411 falls through even if it doesn't. If we then inline a function
4412 which ends with such a condition, that can cause us to issue an
4413 inappropriate warning about control reaching the end of a
4414 non-void function. */
4415 jump_over_else = block_may_fallthru (then_);
4417 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4418 EXPR_LOC_OR_LOC (expr, input_location));
4420 expr = NULL;
4421 append_to_statement_list (pred, &expr);
4423 append_to_statement_list (then_, &expr);
4424 if (else_se)
4426 if (jump_over_else)
4428 tree last = expr_last (expr);
4429 t = build_and_jump (&end_label);
4430 if (rexpr_has_location (last))
4431 SET_EXPR_LOCATION (t, rexpr_location (last));
4432 append_to_statement_list (t, &expr);
4434 if (emit_false)
4436 t = build1 (LABEL_EXPR, void_type_node, false_label);
4437 append_to_statement_list (t, &expr);
4439 append_to_statement_list (else_, &expr);
4441 if (emit_end && end_label)
4443 t = build1 (LABEL_EXPR, void_type_node, end_label);
4444 append_to_statement_list (t, &expr);
4447 return expr;
4450 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4452 tree
4453 gimple_boolify (tree expr)
4455 tree type = TREE_TYPE (expr);
4456 location_t loc = EXPR_LOCATION (expr);
4458 if (TREE_CODE (expr) == NE_EXPR
4459 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4460 && integer_zerop (TREE_OPERAND (expr, 1)))
4462 tree call = TREE_OPERAND (expr, 0);
4463 tree fn = get_callee_fndecl (call);
4465 /* For __builtin_expect ((long) (x), y) recurse into x as well
4466 if x is truth_value_p. */
4467 if (fn
4468 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4469 && call_expr_nargs (call) == 2)
4471 tree arg = CALL_EXPR_ARG (call, 0);
4472 if (arg)
4474 if (TREE_CODE (arg) == NOP_EXPR
4475 && TREE_TYPE (arg) == TREE_TYPE (call))
4476 arg = TREE_OPERAND (arg, 0);
4477 if (truth_value_p (TREE_CODE (arg)))
4479 arg = gimple_boolify (arg);
4480 CALL_EXPR_ARG (call, 0)
4481 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4487 switch (TREE_CODE (expr))
4489 case TRUTH_AND_EXPR:
4490 case TRUTH_OR_EXPR:
4491 case TRUTH_XOR_EXPR:
4492 case TRUTH_ANDIF_EXPR:
4493 case TRUTH_ORIF_EXPR:
4494 /* Also boolify the arguments of truth exprs. */
4495 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4496 /* FALLTHRU */
4498 case TRUTH_NOT_EXPR:
4499 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4501 /* These expressions always produce boolean results. */
4502 if (TREE_CODE (type) != BOOLEAN_TYPE)
4503 TREE_TYPE (expr) = boolean_type_node;
4504 return expr;
4506 case ANNOTATE_EXPR:
4507 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4509 case annot_expr_ivdep_kind:
4510 case annot_expr_unroll_kind:
4511 case annot_expr_no_vector_kind:
4512 case annot_expr_vector_kind:
4513 case annot_expr_parallel_kind:
4514 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4515 if (TREE_CODE (type) != BOOLEAN_TYPE)
4516 TREE_TYPE (expr) = boolean_type_node;
4517 return expr;
4518 default:
4519 gcc_unreachable ();
4522 default:
4523 if (COMPARISON_CLASS_P (expr))
4525 /* These expressions always produce boolean results. */
4526 if (TREE_CODE (type) != BOOLEAN_TYPE)
4527 TREE_TYPE (expr) = boolean_type_node;
4528 return expr;
4530 /* Other expressions that get here must have boolean values, but
4531 might need to be converted to the appropriate mode. */
4532 if (TREE_CODE (type) == BOOLEAN_TYPE)
4533 return expr;
4534 return fold_convert_loc (loc, boolean_type_node, expr);
4538 /* Given a conditional expression *EXPR_P without side effects, gimplify
4539 its operands. New statements are inserted to PRE_P. */
4541 static enum gimplify_status
4542 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4544 tree expr = *expr_p, cond;
4545 enum gimplify_status ret, tret;
4546 enum tree_code code;
4548 cond = gimple_boolify (COND_EXPR_COND (expr));
4550 /* We need to handle && and || specially, as their gimplification
4551 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4552 code = TREE_CODE (cond);
4553 if (code == TRUTH_ANDIF_EXPR)
4554 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4555 else if (code == TRUTH_ORIF_EXPR)
4556 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4557 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4558 COND_EXPR_COND (*expr_p) = cond;
4560 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4561 is_gimple_val, fb_rvalue);
4562 ret = MIN (ret, tret);
4563 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4564 is_gimple_val, fb_rvalue);
4566 return MIN (ret, tret);
4569 /* Return true if evaluating EXPR could trap.
4570 EXPR is GENERIC, while tree_could_trap_p can be called
4571 only on GIMPLE. */
4573 bool
4574 generic_expr_could_trap_p (tree expr)
4576 unsigned i, n;
4578 if (!expr || is_gimple_val (expr))
4579 return false;
4581 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4582 return true;
4584 n = TREE_OPERAND_LENGTH (expr);
4585 for (i = 0; i < n; i++)
4586 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4587 return true;
4589 return false;
4592 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4593 into
4595 if (p) if (p)
4596 t1 = a; a;
4597 else or else
4598 t1 = b; b;
4601 The second form is used when *EXPR_P is of type void.
4603 PRE_P points to the list where side effects that must happen before
4604 *EXPR_P should be stored. */
4606 static enum gimplify_status
4607 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4609 tree expr = *expr_p;
4610 tree type = TREE_TYPE (expr);
4611 location_t loc = EXPR_LOCATION (expr);
4612 tree tmp, arm1, arm2;
4613 enum gimplify_status ret;
4614 tree label_true, label_false, label_cont;
4615 bool have_then_clause_p, have_else_clause_p;
4616 gcond *cond_stmt;
4617 enum tree_code pred_code;
4618 gimple_seq seq = NULL;
4620 /* If this COND_EXPR has a value, copy the values into a temporary within
4621 the arms. */
4622 if (!VOID_TYPE_P (type))
4624 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4625 tree result;
4627 /* If either an rvalue is ok or we do not require an lvalue, create the
4628 temporary. But we cannot do that if the type is addressable. */
4629 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4630 && !TREE_ADDRESSABLE (type))
4632 if (gimplify_ctxp->allow_rhs_cond_expr
4633 /* If either branch has side effects or could trap, it can't be
4634 evaluated unconditionally. */
4635 && !TREE_SIDE_EFFECTS (then_)
4636 && !generic_expr_could_trap_p (then_)
4637 && !TREE_SIDE_EFFECTS (else_)
4638 && !generic_expr_could_trap_p (else_))
4639 return gimplify_pure_cond_expr (expr_p, pre_p);
4641 tmp = create_tmp_var (type, "iftmp");
4642 result = tmp;
4645 /* Otherwise, only create and copy references to the values. */
4646 else
4648 type = build_pointer_type (type);
4650 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4651 then_ = build_fold_addr_expr_loc (loc, then_);
4653 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4654 else_ = build_fold_addr_expr_loc (loc, else_);
4656 expr
4657 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4659 tmp = create_tmp_var (type, "iftmp");
4660 result = build_simple_mem_ref_loc (loc, tmp);
4663 /* Build the new then clause, `tmp = then_;'. But don't build the
4664 assignment if the value is void; in C++ it can be if it's a throw. */
4665 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4666 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4668 /* Similarly, build the new else clause, `tmp = else_;'. */
4669 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4670 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4672 TREE_TYPE (expr) = void_type_node;
4673 recalculate_side_effects (expr);
4675 /* Move the COND_EXPR to the prequeue. */
4676 gimplify_stmt (&expr, pre_p);
4678 *expr_p = result;
4679 return GS_ALL_DONE;
4682 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4683 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4684 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4685 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4687 /* Make sure the condition has BOOLEAN_TYPE. */
4688 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4690 /* Break apart && and || conditions. */
4691 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4692 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4694 expr = shortcut_cond_expr (expr);
4696 if (expr != *expr_p)
4698 *expr_p = expr;
4700 /* We can't rely on gimplify_expr to re-gimplify the expanded
4701 form properly, as cleanups might cause the target labels to be
4702 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4703 set up a conditional context. */
4704 gimple_push_condition ();
4705 gimplify_stmt (expr_p, &seq);
4706 gimple_pop_condition (pre_p);
4707 gimple_seq_add_seq (pre_p, seq);
4709 return GS_ALL_DONE;
4713 /* Now do the normal gimplification. */
4715 /* Gimplify condition. */
4716 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4717 is_gimple_condexpr_for_cond, fb_rvalue);
4718 if (ret == GS_ERROR)
4719 return GS_ERROR;
4720 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4722 gimple_push_condition ();
4724 have_then_clause_p = have_else_clause_p = false;
4725 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4726 if (label_true
4727 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4728 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4729 have different locations, otherwise we end up with incorrect
4730 location information on the branches. */
4731 && (optimize
4732 || !EXPR_HAS_LOCATION (expr)
4733 || !rexpr_has_location (label_true)
4734 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4736 have_then_clause_p = true;
4737 label_true = GOTO_DESTINATION (label_true);
4739 else
4740 label_true = create_artificial_label (UNKNOWN_LOCATION);
4741 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4742 if (label_false
4743 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4744 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4745 have different locations, otherwise we end up with incorrect
4746 location information on the branches. */
4747 && (optimize
4748 || !EXPR_HAS_LOCATION (expr)
4749 || !rexpr_has_location (label_false)
4750 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4752 have_else_clause_p = true;
4753 label_false = GOTO_DESTINATION (label_false);
4755 else
4756 label_false = create_artificial_label (UNKNOWN_LOCATION);
4758 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4759 &arm2);
4760 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4761 label_false);
4762 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4763 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4764 gimplify_seq_add_stmt (&seq, cond_stmt);
4765 gimple_stmt_iterator gsi = gsi_last (seq);
4766 maybe_fold_stmt (&gsi);
4768 label_cont = NULL_TREE;
4769 if (!have_then_clause_p)
4771 /* For if (...) {} else { code; } put label_true after
4772 the else block. */
4773 if (TREE_OPERAND (expr, 1) == NULL_TREE
4774 && !have_else_clause_p
4775 && TREE_OPERAND (expr, 2) != NULL_TREE)
4777 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4778 handling that label_cont == label_true can be only reached
4779 through fallthrough from { code; }. */
4780 if (integer_zerop (COND_EXPR_COND (expr)))
4781 UNUSED_LABEL_P (label_true) = 1;
4782 label_cont = label_true;
4784 else
4786 bool then_side_effects
4787 = (TREE_OPERAND (expr, 1)
4788 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4789 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4790 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4791 /* For if (...) { code; } else {} or
4792 if (...) { code; } else goto label; or
4793 if (...) { code; return; } else { ... }
4794 label_cont isn't needed. */
4795 if (!have_else_clause_p
4796 && TREE_OPERAND (expr, 2) != NULL_TREE
4797 && gimple_seq_may_fallthru (seq))
4799 gimple *g;
4800 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4802 /* For if (0) { non-side-effect-code } else { code }
4803 tell -Wimplicit-fallthrough handling that label_cont can
4804 be only reached through fallthrough from { code }. */
4805 if (integer_zerop (COND_EXPR_COND (expr)))
4807 UNUSED_LABEL_P (label_true) = 1;
4808 if (!then_side_effects)
4809 UNUSED_LABEL_P (label_cont) = 1;
4812 g = gimple_build_goto (label_cont);
4814 /* GIMPLE_COND's are very low level; they have embedded
4815 gotos. This particular embedded goto should not be marked
4816 with the location of the original COND_EXPR, as it would
4817 correspond to the COND_EXPR's condition, not the ELSE or the
4818 THEN arms. To avoid marking it with the wrong location, flag
4819 it as "no location". */
4820 gimple_set_do_not_emit_location (g);
4822 gimplify_seq_add_stmt (&seq, g);
4826 if (!have_else_clause_p)
4828 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4829 tell -Wimplicit-fallthrough handling that label_false can be only
4830 reached through fallthrough from { code }. */
4831 if (integer_nonzerop (COND_EXPR_COND (expr))
4832 && (TREE_OPERAND (expr, 2) == NULL_TREE
4833 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4834 UNUSED_LABEL_P (label_false) = 1;
4835 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4836 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4838 if (label_cont)
4839 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4841 gimple_pop_condition (pre_p);
4842 gimple_seq_add_seq (pre_p, seq);
4844 if (ret == GS_ERROR)
4845 ; /* Do nothing. */
4846 else if (have_then_clause_p || have_else_clause_p)
4847 ret = GS_ALL_DONE;
4848 else
4850 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4851 expr = TREE_OPERAND (expr, 0);
4852 gimplify_stmt (&expr, pre_p);
4855 *expr_p = NULL;
4856 return ret;
4859 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4860 to be marked addressable.
4862 We cannot rely on such an expression being directly markable if a temporary
4863 has been created by the gimplification. In this case, we create another
4864 temporary and initialize it with a copy, which will become a store after we
4865 mark it addressable. This can happen if the front-end passed us something
4866 that it could not mark addressable yet, like a Fortran pass-by-reference
4867 parameter (int) floatvar. */
4869 static void
4870 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4872 while (handled_component_p (*expr_p))
4873 expr_p = &TREE_OPERAND (*expr_p, 0);
4875 /* Do not allow an SSA name as the temporary. */
4876 if (is_gimple_reg (*expr_p))
4877 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4880 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4881 a call to __builtin_memcpy. */
4883 static enum gimplify_status
4884 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4885 gimple_seq *seq_p)
4887 tree t, to, to_ptr, from, from_ptr;
4888 gcall *gs;
4889 location_t loc = EXPR_LOCATION (*expr_p);
4891 to = TREE_OPERAND (*expr_p, 0);
4892 from = TREE_OPERAND (*expr_p, 1);
4893 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to)))
4894 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))));
4896 /* Mark the RHS addressable. Beware that it may not be possible to do so
4897 directly if a temporary has been created by the gimplification. */
4898 prepare_gimple_addressable (&from, seq_p);
4900 mark_addressable (from);
4901 from_ptr = build_fold_addr_expr_loc (loc, from);
4902 gimplify_arg (&from_ptr, seq_p, loc);
4904 mark_addressable (to);
4905 to_ptr = build_fold_addr_expr_loc (loc, to);
4906 gimplify_arg (&to_ptr, seq_p, loc);
4908 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4910 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4911 gimple_call_set_alloca_for_var (gs, true);
4913 if (want_value)
4915 /* tmp = memcpy() */
4916 t = create_tmp_var (TREE_TYPE (to_ptr));
4917 gimple_call_set_lhs (gs, t);
4918 gimplify_seq_add_stmt (seq_p, gs);
4920 *expr_p = build_simple_mem_ref (t);
4921 return GS_ALL_DONE;
4924 gimplify_seq_add_stmt (seq_p, gs);
4925 *expr_p = NULL;
4926 return GS_ALL_DONE;
4929 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4930 a call to __builtin_memset. In this case we know that the RHS is
4931 a CONSTRUCTOR with an empty element list. */
4933 static enum gimplify_status
4934 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4935 gimple_seq *seq_p)
4937 tree t, from, to, to_ptr;
4938 gcall *gs;
4939 location_t loc = EXPR_LOCATION (*expr_p);
4941 /* Assert our assumptions, to abort instead of producing wrong code
4942 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4943 not be immediately exposed. */
4944 from = TREE_OPERAND (*expr_p, 1);
4945 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4946 from = TREE_OPERAND (from, 0);
4948 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4949 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4951 /* Now proceed. */
4952 to = TREE_OPERAND (*expr_p, 0);
4953 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to))));
4955 to_ptr = build_fold_addr_expr_loc (loc, to);
4956 gimplify_arg (&to_ptr, seq_p, loc);
4957 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4959 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4961 if (want_value)
4963 /* tmp = memset() */
4964 t = create_tmp_var (TREE_TYPE (to_ptr));
4965 gimple_call_set_lhs (gs, t);
4966 gimplify_seq_add_stmt (seq_p, gs);
4968 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4969 return GS_ALL_DONE;
4972 gimplify_seq_add_stmt (seq_p, gs);
4973 *expr_p = NULL;
4974 return GS_ALL_DONE;
4977 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4978 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4979 assignment. Return non-null if we detect a potential overlap. */
4981 struct gimplify_init_ctor_preeval_data
4983 /* The base decl of the lhs object. May be NULL, in which case we
4984 have to assume the lhs is indirect. */
4985 tree lhs_base_decl;
4987 /* The alias set of the lhs object. */
4988 alias_set_type lhs_alias_set;
4991 static tree
4992 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4994 struct gimplify_init_ctor_preeval_data *data
4995 = (struct gimplify_init_ctor_preeval_data *) xdata;
4996 tree t = *tp;
4998 /* If we find the base object, obviously we have overlap. */
4999 if (data->lhs_base_decl == t)
5000 return t;
5002 /* If the constructor component is indirect, determine if we have a
5003 potential overlap with the lhs. The only bits of information we
5004 have to go on at this point are addressability and alias sets. */
5005 if ((INDIRECT_REF_P (t)
5006 || TREE_CODE (t) == MEM_REF)
5007 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5008 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
5009 return t;
5011 /* If the constructor component is a call, determine if it can hide a
5012 potential overlap with the lhs through an INDIRECT_REF like above.
5013 ??? Ugh - this is completely broken. In fact this whole analysis
5014 doesn't look conservative. */
5015 if (TREE_CODE (t) == CALL_EXPR)
5017 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
5019 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
5020 if (POINTER_TYPE_P (TREE_VALUE (type))
5021 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5022 && alias_sets_conflict_p (data->lhs_alias_set,
5023 get_alias_set
5024 (TREE_TYPE (TREE_VALUE (type)))))
5025 return t;
5028 if (IS_TYPE_OR_DECL_P (t))
5029 *walk_subtrees = 0;
5030 return NULL;
5033 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5034 force values that overlap with the lhs (as described by *DATA)
5035 into temporaries. */
5037 static void
5038 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5039 struct gimplify_init_ctor_preeval_data *data)
5041 enum gimplify_status one;
5043 /* If the value is constant, then there's nothing to pre-evaluate. */
5044 if (TREE_CONSTANT (*expr_p))
5046 /* Ensure it does not have side effects, it might contain a reference to
5047 the object we're initializing. */
5048 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
5049 return;
5052 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5053 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
5054 return;
5056 /* Recurse for nested constructors. */
5057 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
5059 unsigned HOST_WIDE_INT ix;
5060 constructor_elt *ce;
5061 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
5063 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
5064 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
5066 return;
5069 /* If this is a variable sized type, we must remember the size. */
5070 maybe_with_size_expr (expr_p);
5072 /* Gimplify the constructor element to something appropriate for the rhs
5073 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5074 the gimplifier will consider this a store to memory. Doing this
5075 gimplification now means that we won't have to deal with complicated
5076 language-specific trees, nor trees like SAVE_EXPR that can induce
5077 exponential search behavior. */
5078 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
5079 if (one == GS_ERROR)
5081 *expr_p = NULL;
5082 return;
5085 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5086 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5087 always be true for all scalars, since is_gimple_mem_rhs insists on a
5088 temporary variable for them. */
5089 if (DECL_P (*expr_p))
5090 return;
5092 /* If this is of variable size, we have no choice but to assume it doesn't
5093 overlap since we can't make a temporary for it. */
5094 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
5095 return;
5097 /* Otherwise, we must search for overlap ... */
5098 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
5099 return;
5101 /* ... and if found, force the value into a temporary. */
5102 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
5105 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
5106 a RANGE_EXPR in a CONSTRUCTOR for an array.
5108 var = lower;
5109 loop_entry:
5110 object[var] = value;
5111 if (var == upper)
5112 goto loop_exit;
5113 var = var + 1;
5114 goto loop_entry;
5115 loop_exit:
5117 We increment var _after_ the loop exit check because we might otherwise
5118 fail if upper == TYPE_MAX_VALUE (type for upper).
5120 Note that we never have to deal with SAVE_EXPRs here, because this has
5121 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5123 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
5124 gimple_seq *, bool);
5126 static void
5127 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
5128 tree value, tree array_elt_type,
5129 gimple_seq *pre_p, bool cleared)
5131 tree loop_entry_label, loop_exit_label, fall_thru_label;
5132 tree var, var_type, cref, tmp;
5134 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
5135 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
5136 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
5138 /* Create and initialize the index variable. */
5139 var_type = TREE_TYPE (upper);
5140 var = create_tmp_var (var_type);
5141 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
5143 /* Add the loop entry label. */
5144 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
5146 /* Build the reference. */
5147 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5148 var, NULL_TREE, NULL_TREE);
5150 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5151 the store. Otherwise just assign value to the reference. */
5153 if (TREE_CODE (value) == CONSTRUCTOR)
5154 /* NB we might have to call ourself recursively through
5155 gimplify_init_ctor_eval if the value is a constructor. */
5156 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5157 pre_p, cleared);
5158 else
5160 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
5161 != GS_ERROR)
5162 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
5165 /* We exit the loop when the index var is equal to the upper bound. */
5166 gimplify_seq_add_stmt (pre_p,
5167 gimple_build_cond (EQ_EXPR, var, upper,
5168 loop_exit_label, fall_thru_label));
5170 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
5172 /* Otherwise, increment the index var... */
5173 tmp = build2 (PLUS_EXPR, var_type, var,
5174 fold_convert (var_type, integer_one_node));
5175 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
5177 /* ...and jump back to the loop entry. */
5178 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
5180 /* Add the loop exit label. */
5181 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
5184 /* A subroutine of gimplify_init_constructor. Generate individual
5185 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5186 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5187 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5188 zeroed first. */
5190 static void
5191 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
5192 gimple_seq *pre_p, bool cleared)
5194 tree array_elt_type = NULL;
5195 unsigned HOST_WIDE_INT ix;
5196 tree purpose, value;
5198 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
5199 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
5201 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
5203 tree cref;
5205 /* NULL values are created above for gimplification errors. */
5206 if (value == NULL)
5207 continue;
5209 if (cleared && initializer_zerop (value))
5210 continue;
5212 /* ??? Here's to hoping the front end fills in all of the indices,
5213 so we don't have to figure out what's missing ourselves. */
5214 gcc_assert (purpose);
5216 /* Skip zero-sized fields, unless value has side-effects. This can
5217 happen with calls to functions returning a empty type, which
5218 we shouldn't discard. As a number of downstream passes don't
5219 expect sets of empty type fields, we rely on the gimplification of
5220 the MODIFY_EXPR we make below to drop the assignment statement. */
5221 if (!TREE_SIDE_EFFECTS (value)
5222 && TREE_CODE (purpose) == FIELD_DECL
5223 && is_empty_type (TREE_TYPE (purpose)))
5224 continue;
5226 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5227 whole range. */
5228 if (TREE_CODE (purpose) == RANGE_EXPR)
5230 tree lower = TREE_OPERAND (purpose, 0);
5231 tree upper = TREE_OPERAND (purpose, 1);
5233 /* If the lower bound is equal to upper, just treat it as if
5234 upper was the index. */
5235 if (simple_cst_equal (lower, upper))
5236 purpose = upper;
5237 else
5239 gimplify_init_ctor_eval_range (object, lower, upper, value,
5240 array_elt_type, pre_p, cleared);
5241 continue;
5245 if (array_elt_type)
5247 /* Do not use bitsizetype for ARRAY_REF indices. */
5248 if (TYPE_DOMAIN (TREE_TYPE (object)))
5249 purpose
5250 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5251 purpose);
5252 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5253 purpose, NULL_TREE, NULL_TREE);
5255 else
5257 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5258 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5259 unshare_expr (object), purpose, NULL_TREE);
5262 if (TREE_CODE (value) == CONSTRUCTOR
5263 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5264 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5265 pre_p, cleared);
5266 else
5268 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5269 gimplify_and_add (init, pre_p);
5270 ggc_free (init);
5275 /* Return the appropriate RHS predicate for this LHS. */
5277 gimple_predicate
5278 rhs_predicate_for (tree lhs)
5280 if (is_gimple_reg (lhs))
5281 return is_gimple_reg_rhs_or_call;
5282 else
5283 return is_gimple_mem_rhs_or_call;
5286 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5287 before the LHS has been gimplified. */
5289 static gimple_predicate
5290 initial_rhs_predicate_for (tree lhs)
5292 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5293 return is_gimple_reg_rhs_or_call;
5294 else
5295 return is_gimple_mem_rhs_or_call;
5298 /* Gimplify a C99 compound literal expression. This just means adding
5299 the DECL_EXPR before the current statement and using its anonymous
5300 decl instead. */
5302 static enum gimplify_status
5303 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5304 bool (*gimple_test_f) (tree),
5305 fallback_t fallback)
5307 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5308 tree decl = DECL_EXPR_DECL (decl_s);
5309 tree init = DECL_INITIAL (decl);
5310 /* Mark the decl as addressable if the compound literal
5311 expression is addressable now, otherwise it is marked too late
5312 after we gimplify the initialization expression. */
5313 if (TREE_ADDRESSABLE (*expr_p))
5314 TREE_ADDRESSABLE (decl) = 1;
5315 /* Otherwise, if we don't need an lvalue and have a literal directly
5316 substitute it. Check if it matches the gimple predicate, as
5317 otherwise we'd generate a new temporary, and we can as well just
5318 use the decl we already have. */
5319 else if (!TREE_ADDRESSABLE (decl)
5320 && !TREE_THIS_VOLATILE (decl)
5321 && init
5322 && (fallback & fb_lvalue) == 0
5323 && gimple_test_f (init))
5325 *expr_p = init;
5326 return GS_OK;
5329 /* If the decl is not addressable, then it is being used in some
5330 expression or on the right hand side of a statement, and it can
5331 be put into a readonly data section. */
5332 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5333 TREE_READONLY (decl) = 1;
5335 /* This decl isn't mentioned in the enclosing block, so add it to the
5336 list of temps. FIXME it seems a bit of a kludge to say that
5337 anonymous artificial vars aren't pushed, but everything else is. */
5338 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5339 gimple_add_tmp_var (decl);
5341 gimplify_and_add (decl_s, pre_p);
5342 *expr_p = decl;
5343 return GS_OK;
5346 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5347 return a new CONSTRUCTOR if something changed. */
5349 static tree
5350 optimize_compound_literals_in_ctor (tree orig_ctor)
5352 tree ctor = orig_ctor;
5353 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5354 unsigned int idx, num = vec_safe_length (elts);
5356 for (idx = 0; idx < num; idx++)
5358 tree value = (*elts)[idx].value;
5359 tree newval = value;
5360 if (TREE_CODE (value) == CONSTRUCTOR)
5361 newval = optimize_compound_literals_in_ctor (value);
5362 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5364 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5365 tree decl = DECL_EXPR_DECL (decl_s);
5366 tree init = DECL_INITIAL (decl);
5368 if (!TREE_ADDRESSABLE (value)
5369 && !TREE_ADDRESSABLE (decl)
5370 && init
5371 && TREE_CODE (init) == CONSTRUCTOR)
5372 newval = optimize_compound_literals_in_ctor (init);
5374 if (newval == value)
5375 continue;
5377 if (ctor == orig_ctor)
5379 ctor = copy_node (orig_ctor);
5380 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5381 elts = CONSTRUCTOR_ELTS (ctor);
5383 (*elts)[idx].value = newval;
5385 return ctor;
5388 /* A subroutine of gimplify_modify_expr. Break out elements of a
5389 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5391 Note that we still need to clear any elements that don't have explicit
5392 initializers, so if not all elements are initialized we keep the
5393 original MODIFY_EXPR, we just remove all of the constructor elements.
5395 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5396 GS_ERROR if we would have to create a temporary when gimplifying
5397 this constructor. Otherwise, return GS_OK.
5399 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5401 static enum gimplify_status
5402 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5403 bool want_value, bool notify_temp_creation)
5405 tree object, ctor, type;
5406 enum gimplify_status ret;
5407 vec<constructor_elt, va_gc> *elts;
5408 bool cleared = false;
5409 bool is_empty_ctor = false;
5410 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5412 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5414 if (!notify_temp_creation)
5416 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5417 is_gimple_lvalue, fb_lvalue);
5418 if (ret == GS_ERROR)
5419 return ret;
5422 object = TREE_OPERAND (*expr_p, 0);
5423 ctor = TREE_OPERAND (*expr_p, 1)
5424 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5425 type = TREE_TYPE (ctor);
5426 elts = CONSTRUCTOR_ELTS (ctor);
5427 ret = GS_ALL_DONE;
5429 switch (TREE_CODE (type))
5431 case RECORD_TYPE:
5432 case UNION_TYPE:
5433 case QUAL_UNION_TYPE:
5434 case ARRAY_TYPE:
5436 /* Use readonly data for initializers of this or smaller size
5437 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5438 ratio. */
5439 const HOST_WIDE_INT min_unique_size = 64;
5440 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5441 is smaller than this, use readonly data. */
5442 const int unique_nonzero_ratio = 8;
5443 /* True if a single access of the object must be ensured. This is the
5444 case if the target is volatile, the type is non-addressable and more
5445 than one field need to be assigned. */
5446 const bool ensure_single_access
5447 = TREE_THIS_VOLATILE (object)
5448 && !TREE_ADDRESSABLE (type)
5449 && vec_safe_length (elts) > 1;
5450 struct gimplify_init_ctor_preeval_data preeval_data;
5451 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5452 HOST_WIDE_INT num_unique_nonzero_elements;
5453 bool complete_p, valid_const_initializer;
5455 /* Aggregate types must lower constructors to initialization of
5456 individual elements. The exception is that a CONSTRUCTOR node
5457 with no elements indicates zero-initialization of the whole. */
5458 if (vec_safe_is_empty (elts))
5460 if (notify_temp_creation)
5461 return GS_OK;
5463 /* The var will be initialized and so appear on lhs of
5464 assignment, it can't be TREE_READONLY anymore. */
5465 if (VAR_P (object))
5466 TREE_READONLY (object) = 0;
5468 is_empty_ctor = true;
5469 break;
5472 /* Fetch information about the constructor to direct later processing.
5473 We might want to make static versions of it in various cases, and
5474 can only do so if it known to be a valid constant initializer. */
5475 valid_const_initializer
5476 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5477 &num_unique_nonzero_elements,
5478 &num_ctor_elements, &complete_p);
5480 /* If a const aggregate variable is being initialized, then it
5481 should never be a lose to promote the variable to be static. */
5482 if (valid_const_initializer
5483 && num_nonzero_elements > 1
5484 && TREE_READONLY (object)
5485 && VAR_P (object)
5486 && !DECL_REGISTER (object)
5487 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5488 || DECL_MERGEABLE (object))
5489 /* For ctors that have many repeated nonzero elements
5490 represented through RANGE_EXPRs, prefer initializing
5491 those through runtime loops over copies of large amounts
5492 of data from readonly data section. */
5493 && (num_unique_nonzero_elements
5494 > num_nonzero_elements / unique_nonzero_ratio
5495 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5496 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5498 if (notify_temp_creation)
5499 return GS_ERROR;
5501 DECL_INITIAL (object) = ctor;
5502 TREE_STATIC (object) = 1;
5503 if (!DECL_NAME (object))
5504 DECL_NAME (object) = create_tmp_var_name ("C");
5505 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5507 /* ??? C++ doesn't automatically append a .<number> to the
5508 assembler name, and even when it does, it looks at FE private
5509 data structures to figure out what that number should be,
5510 which are not set for this variable. I suppose this is
5511 important for local statics for inline functions, which aren't
5512 "local" in the object file sense. So in order to get a unique
5513 TU-local symbol, we must invoke the lhd version now. */
5514 lhd_set_decl_assembler_name (object);
5516 *expr_p = NULL_TREE;
5517 break;
5520 /* The var will be initialized and so appear on lhs of
5521 assignment, it can't be TREE_READONLY anymore. */
5522 if (VAR_P (object) && !notify_temp_creation)
5523 TREE_READONLY (object) = 0;
5525 /* If there are "lots" of initialized elements, even discounting
5526 those that are not address constants (and thus *must* be
5527 computed at runtime), then partition the constructor into
5528 constant and non-constant parts. Block copy the constant
5529 parts in, then generate code for the non-constant parts. */
5530 /* TODO. There's code in cp/typeck.cc to do this. */
5532 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5533 /* store_constructor will ignore the clearing of variable-sized
5534 objects. Initializers for such objects must explicitly set
5535 every field that needs to be set. */
5536 cleared = false;
5537 else if (!complete_p)
5538 /* If the constructor isn't complete, clear the whole object
5539 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5541 ??? This ought not to be needed. For any element not present
5542 in the initializer, we should simply set them to zero. Except
5543 we'd need to *find* the elements that are not present, and that
5544 requires trickery to avoid quadratic compile-time behavior in
5545 large cases or excessive memory use in small cases. */
5546 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5547 else if (num_ctor_elements - num_nonzero_elements
5548 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5549 && num_nonzero_elements < num_ctor_elements / 4)
5550 /* If there are "lots" of zeros, it's more efficient to clear
5551 the memory and then set the nonzero elements. */
5552 cleared = true;
5553 else if (ensure_single_access && num_nonzero_elements == 0)
5554 /* If a single access to the target must be ensured and all elements
5555 are zero, then it's optimal to clear whatever their number. */
5556 cleared = true;
5557 else
5558 cleared = false;
5560 /* If there are "lots" of initialized elements, and all of them
5561 are valid address constants, then the entire initializer can
5562 be dropped to memory, and then memcpy'd out. Don't do this
5563 for sparse arrays, though, as it's more efficient to follow
5564 the standard CONSTRUCTOR behavior of memset followed by
5565 individual element initialization. Also don't do this for small
5566 all-zero initializers (which aren't big enough to merit
5567 clearing), and don't try to make bitwise copies of
5568 TREE_ADDRESSABLE types. */
5569 if (valid_const_initializer
5570 && complete_p
5571 && !(cleared || num_nonzero_elements == 0)
5572 && !TREE_ADDRESSABLE (type))
5574 HOST_WIDE_INT size = int_size_in_bytes (type);
5575 unsigned int align;
5577 /* ??? We can still get unbounded array types, at least
5578 from the C++ front end. This seems wrong, but attempt
5579 to work around it for now. */
5580 if (size < 0)
5582 size = int_size_in_bytes (TREE_TYPE (object));
5583 if (size >= 0)
5584 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5587 /* Find the maximum alignment we can assume for the object. */
5588 /* ??? Make use of DECL_OFFSET_ALIGN. */
5589 if (DECL_P (object))
5590 align = DECL_ALIGN (object);
5591 else
5592 align = TYPE_ALIGN (type);
5594 /* Do a block move either if the size is so small as to make
5595 each individual move a sub-unit move on average, or if it
5596 is so large as to make individual moves inefficient. */
5597 if (size > 0
5598 && num_nonzero_elements > 1
5599 /* For ctors that have many repeated nonzero elements
5600 represented through RANGE_EXPRs, prefer initializing
5601 those through runtime loops over copies of large amounts
5602 of data from readonly data section. */
5603 && (num_unique_nonzero_elements
5604 > num_nonzero_elements / unique_nonzero_ratio
5605 || size <= min_unique_size)
5606 && (size < num_nonzero_elements
5607 || !can_move_by_pieces (size, align)))
5609 if (notify_temp_creation)
5610 return GS_ERROR;
5612 walk_tree (&ctor, force_labels_r, NULL, NULL);
5613 ctor = tree_output_constant_def (ctor);
5614 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5615 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5616 TREE_OPERAND (*expr_p, 1) = ctor;
5618 /* This is no longer an assignment of a CONSTRUCTOR, but
5619 we still may have processing to do on the LHS. So
5620 pretend we didn't do anything here to let that happen. */
5621 return GS_UNHANDLED;
5625 /* If a single access to the target must be ensured and there are
5626 nonzero elements or the zero elements are not assigned en masse,
5627 initialize the target from a temporary. */
5628 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5630 if (notify_temp_creation)
5631 return GS_ERROR;
5633 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5634 TREE_OPERAND (*expr_p, 0) = temp;
5635 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5636 *expr_p,
5637 build2 (MODIFY_EXPR, void_type_node,
5638 object, temp));
5639 return GS_OK;
5642 if (notify_temp_creation)
5643 return GS_OK;
5645 /* If there are nonzero elements and if needed, pre-evaluate to capture
5646 elements overlapping with the lhs into temporaries. We must do this
5647 before clearing to fetch the values before they are zeroed-out. */
5648 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5650 preeval_data.lhs_base_decl = get_base_address (object);
5651 if (!DECL_P (preeval_data.lhs_base_decl))
5652 preeval_data.lhs_base_decl = NULL;
5653 preeval_data.lhs_alias_set = get_alias_set (object);
5655 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5656 pre_p, post_p, &preeval_data);
5659 bool ctor_has_side_effects_p
5660 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5662 if (cleared)
5664 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5665 Note that we still have to gimplify, in order to handle the
5666 case of variable sized types. Avoid shared tree structures. */
5667 CONSTRUCTOR_ELTS (ctor) = NULL;
5668 TREE_SIDE_EFFECTS (ctor) = 0;
5669 object = unshare_expr (object);
5670 gimplify_stmt (expr_p, pre_p);
5673 /* If we have not block cleared the object, or if there are nonzero
5674 elements in the constructor, or if the constructor has side effects,
5675 add assignments to the individual scalar fields of the object. */
5676 if (!cleared
5677 || num_nonzero_elements > 0
5678 || ctor_has_side_effects_p)
5679 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5681 *expr_p = NULL_TREE;
5683 break;
5685 case COMPLEX_TYPE:
5687 tree r, i;
5689 if (notify_temp_creation)
5690 return GS_OK;
5692 /* Extract the real and imaginary parts out of the ctor. */
5693 gcc_assert (elts->length () == 2);
5694 r = (*elts)[0].value;
5695 i = (*elts)[1].value;
5696 if (r == NULL || i == NULL)
5698 tree zero = build_zero_cst (TREE_TYPE (type));
5699 if (r == NULL)
5700 r = zero;
5701 if (i == NULL)
5702 i = zero;
5705 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5706 represent creation of a complex value. */
5707 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5709 ctor = build_complex (type, r, i);
5710 TREE_OPERAND (*expr_p, 1) = ctor;
5712 else
5714 ctor = build2 (COMPLEX_EXPR, type, r, i);
5715 TREE_OPERAND (*expr_p, 1) = ctor;
5716 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5717 pre_p,
5718 post_p,
5719 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5720 fb_rvalue);
5723 break;
5725 case VECTOR_TYPE:
5727 unsigned HOST_WIDE_INT ix;
5728 constructor_elt *ce;
5730 if (notify_temp_creation)
5731 return GS_OK;
5733 /* Vector types use CONSTRUCTOR all the way through gimple
5734 compilation as a general initializer. */
5735 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5737 enum gimplify_status tret;
5738 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5739 fb_rvalue);
5740 if (tret == GS_ERROR)
5741 ret = GS_ERROR;
5742 else if (TREE_STATIC (ctor)
5743 && !initializer_constant_valid_p (ce->value,
5744 TREE_TYPE (ce->value)))
5745 TREE_STATIC (ctor) = 0;
5747 recompute_constructor_flags (ctor);
5749 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5750 if (TREE_CONSTANT (ctor))
5752 bool constant_p = true;
5753 tree value;
5755 /* Even when ctor is constant, it might contain non-*_CST
5756 elements, such as addresses or trapping values like
5757 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5758 in VECTOR_CST nodes. */
5759 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5760 if (!CONSTANT_CLASS_P (value))
5762 constant_p = false;
5763 break;
5766 if (constant_p)
5768 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5769 break;
5773 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5774 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5776 break;
5778 default:
5779 /* So how did we get a CONSTRUCTOR for a scalar type? */
5780 gcc_unreachable ();
5783 if (ret == GS_ERROR)
5784 return GS_ERROR;
5785 /* If we have gimplified both sides of the initializer but have
5786 not emitted an assignment, do so now. */
5787 if (*expr_p
5788 /* If the type is an empty type, we don't need to emit the
5789 assignment. */
5790 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5792 tree lhs = TREE_OPERAND (*expr_p, 0);
5793 tree rhs = TREE_OPERAND (*expr_p, 1);
5794 if (want_value && object == lhs)
5795 lhs = unshare_expr (lhs);
5796 gassign *init = gimple_build_assign (lhs, rhs);
5797 gimplify_seq_add_stmt (pre_p, init);
5799 if (want_value)
5801 *expr_p = object;
5802 ret = GS_OK;
5804 else
5806 *expr_p = NULL;
5807 ret = GS_ALL_DONE;
5810 /* If the user requests to initialize automatic variables, we
5811 should initialize paddings inside the variable. Add a call to
5812 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5813 initialize paddings of object always to zero regardless of
5814 INIT_TYPE. Note, we will not insert this call if the aggregate
5815 variable has be completely cleared already or it's initialized
5816 with an empty constructor. We cannot insert this call if the
5817 variable is a gimple register since __builtin_clear_padding will take
5818 the address of the variable. As a result, if a long double/_Complex long
5819 double variable will be spilled into stack later, its padding cannot
5820 be cleared with __builtin_clear_padding. We should clear its padding
5821 when it is spilled into memory. */
5822 if (is_init_expr
5823 && !is_gimple_reg (object)
5824 && clear_padding_type_may_have_padding_p (type)
5825 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5826 || !AGGREGATE_TYPE_P (type))
5827 && is_var_need_auto_init (object))
5828 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5830 return ret;
5833 /* Given a pointer value OP0, return a simplified version of an
5834 indirection through OP0, or NULL_TREE if no simplification is
5835 possible. This may only be applied to a rhs of an expression.
5836 Note that the resulting type may be different from the type pointed
5837 to in the sense that it is still compatible from the langhooks
5838 point of view. */
5840 static tree
5841 gimple_fold_indirect_ref_rhs (tree t)
5843 return gimple_fold_indirect_ref (t);
5846 /* Subroutine of gimplify_modify_expr to do simplifications of
5847 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5848 something changes. */
5850 static enum gimplify_status
5851 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5852 gimple_seq *pre_p, gimple_seq *post_p,
5853 bool want_value)
5855 enum gimplify_status ret = GS_UNHANDLED;
5856 bool changed;
5860 changed = false;
5861 switch (TREE_CODE (*from_p))
5863 case VAR_DECL:
5864 /* If we're assigning from a read-only variable initialized with
5865 a constructor and not volatile, do the direct assignment from
5866 the constructor, but only if the target is not volatile either
5867 since this latter assignment might end up being done on a per
5868 field basis. However, if the target is volatile and the type
5869 is aggregate and non-addressable, gimplify_init_constructor
5870 knows that it needs to ensure a single access to the target
5871 and it will return GS_OK only in this case. */
5872 if (TREE_READONLY (*from_p)
5873 && DECL_INITIAL (*from_p)
5874 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5875 && !TREE_THIS_VOLATILE (*from_p)
5876 && (!TREE_THIS_VOLATILE (*to_p)
5877 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5878 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5880 tree old_from = *from_p;
5881 enum gimplify_status subret;
5883 /* Move the constructor into the RHS. */
5884 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5886 /* Let's see if gimplify_init_constructor will need to put
5887 it in memory. */
5888 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5889 false, true);
5890 if (subret == GS_ERROR)
5892 /* If so, revert the change. */
5893 *from_p = old_from;
5895 else
5897 ret = GS_OK;
5898 changed = true;
5901 break;
5902 case INDIRECT_REF:
5903 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5904 /* If we have code like
5906 *(const A*)(A*)&x
5908 where the type of "x" is a (possibly cv-qualified variant
5909 of "A"), treat the entire expression as identical to "x".
5910 This kind of code arises in C++ when an object is bound
5911 to a const reference, and if "x" is a TARGET_EXPR we want
5912 to take advantage of the optimization below. But not if
5913 the type is TREE_ADDRESSABLE; then C++17 says that the
5914 TARGET_EXPR needs to be a temporary. */
5915 if (tree t
5916 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5918 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5919 if (TREE_THIS_VOLATILE (t) != volatile_p)
5921 if (DECL_P (t))
5922 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5923 build_fold_addr_expr (t));
5924 if (REFERENCE_CLASS_P (t))
5925 TREE_THIS_VOLATILE (t) = volatile_p;
5927 *from_p = t;
5928 ret = GS_OK;
5929 changed = true;
5931 break;
5933 case TARGET_EXPR:
5935 /* If we are initializing something from a TARGET_EXPR, strip the
5936 TARGET_EXPR and initialize it directly, if possible. This can't
5937 be done if the initializer is void, since that implies that the
5938 temporary is set in some non-trivial way.
5940 ??? What about code that pulls out the temp and uses it
5941 elsewhere? I think that such code never uses the TARGET_EXPR as
5942 an initializer. If I'm wrong, we'll die because the temp won't
5943 have any RTL. In that case, I guess we'll need to replace
5944 references somehow. */
5945 tree init = TARGET_EXPR_INITIAL (*from_p);
5947 if (init
5948 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5949 || !TARGET_EXPR_NO_ELIDE (*from_p))
5950 && !VOID_TYPE_P (TREE_TYPE (init)))
5952 *from_p = init;
5953 ret = GS_OK;
5954 changed = true;
5957 break;
5959 case COMPOUND_EXPR:
5960 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5961 caught. */
5962 gimplify_compound_expr (from_p, pre_p, true);
5963 ret = GS_OK;
5964 changed = true;
5965 break;
5967 case CONSTRUCTOR:
5968 /* If we already made some changes, let the front end have a
5969 crack at this before we break it down. */
5970 if (ret != GS_UNHANDLED)
5971 break;
5973 /* If we're initializing from a CONSTRUCTOR, break this into
5974 individual MODIFY_EXPRs. */
5975 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5976 false);
5977 return ret;
5979 case COND_EXPR:
5980 /* If we're assigning to a non-register type, push the assignment
5981 down into the branches. This is mandatory for ADDRESSABLE types,
5982 since we cannot generate temporaries for such, but it saves a
5983 copy in other cases as well. */
5984 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5986 /* This code should mirror the code in gimplify_cond_expr. */
5987 enum tree_code code = TREE_CODE (*expr_p);
5988 tree cond = *from_p;
5989 tree result = *to_p;
5991 ret = gimplify_expr (&result, pre_p, post_p,
5992 is_gimple_lvalue, fb_lvalue);
5993 if (ret != GS_ERROR)
5994 ret = GS_OK;
5996 /* If we are going to write RESULT more than once, clear
5997 TREE_READONLY flag, otherwise we might incorrectly promote
5998 the variable to static const and initialize it at compile
5999 time in one of the branches. */
6000 if (VAR_P (result)
6001 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
6002 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
6003 TREE_READONLY (result) = 0;
6004 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
6005 TREE_OPERAND (cond, 1)
6006 = build2 (code, void_type_node, result,
6007 TREE_OPERAND (cond, 1));
6008 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
6009 TREE_OPERAND (cond, 2)
6010 = build2 (code, void_type_node, unshare_expr (result),
6011 TREE_OPERAND (cond, 2));
6013 TREE_TYPE (cond) = void_type_node;
6014 recalculate_side_effects (cond);
6016 if (want_value)
6018 gimplify_and_add (cond, pre_p);
6019 *expr_p = unshare_expr (result);
6021 else
6022 *expr_p = cond;
6023 return ret;
6025 break;
6027 case CALL_EXPR:
6028 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6029 return slot so that we don't generate a temporary. */
6030 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
6031 && aggregate_value_p (*from_p, *from_p))
6033 bool use_target;
6035 if (!(rhs_predicate_for (*to_p))(*from_p))
6036 /* If we need a temporary, *to_p isn't accurate. */
6037 use_target = false;
6038 /* It's OK to use the return slot directly unless it's an NRV. */
6039 else if (TREE_CODE (*to_p) == RESULT_DECL
6040 && DECL_NAME (*to_p) == NULL_TREE
6041 && needs_to_live_in_memory (*to_p))
6042 use_target = true;
6043 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
6044 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
6045 /* Don't force regs into memory. */
6046 use_target = false;
6047 else if (TREE_CODE (*expr_p) == INIT_EXPR)
6048 /* It's OK to use the target directly if it's being
6049 initialized. */
6050 use_target = true;
6051 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
6052 != INTEGER_CST)
6053 /* Always use the target and thus RSO for variable-sized types.
6054 GIMPLE cannot deal with a variable-sized assignment
6055 embedded in a call statement. */
6056 use_target = true;
6057 else if (TREE_CODE (*to_p) != SSA_NAME
6058 && (!is_gimple_variable (*to_p)
6059 || needs_to_live_in_memory (*to_p)))
6060 /* Don't use the original target if it's already addressable;
6061 if its address escapes, and the called function uses the
6062 NRV optimization, a conforming program could see *to_p
6063 change before the called function returns; see c++/19317.
6064 When optimizing, the return_slot pass marks more functions
6065 as safe after we have escape info. */
6066 use_target = false;
6067 else
6068 use_target = true;
6070 if (use_target)
6072 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
6073 mark_addressable (*to_p);
6076 break;
6078 case WITH_SIZE_EXPR:
6079 /* Likewise for calls that return an aggregate of non-constant size,
6080 since we would not be able to generate a temporary at all. */
6081 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
6083 *from_p = TREE_OPERAND (*from_p, 0);
6084 /* We don't change ret in this case because the
6085 WITH_SIZE_EXPR might have been added in
6086 gimplify_modify_expr, so returning GS_OK would lead to an
6087 infinite loop. */
6088 changed = true;
6090 break;
6092 /* If we're initializing from a container, push the initialization
6093 inside it. */
6094 case CLEANUP_POINT_EXPR:
6095 case BIND_EXPR:
6096 case STATEMENT_LIST:
6098 tree wrap = *from_p;
6099 tree t;
6101 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
6102 fb_lvalue);
6103 if (ret != GS_ERROR)
6104 ret = GS_OK;
6106 t = voidify_wrapper_expr (wrap, *expr_p);
6107 gcc_assert (t == *expr_p);
6109 if (want_value)
6111 gimplify_and_add (wrap, pre_p);
6112 *expr_p = unshare_expr (*to_p);
6114 else
6115 *expr_p = wrap;
6116 return GS_OK;
6119 case NOP_EXPR:
6120 /* Pull out compound literal expressions from a NOP_EXPR.
6121 Those are created in the C FE to drop qualifiers during
6122 lvalue conversion. */
6123 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
6124 && tree_ssa_useless_type_conversion (*from_p))
6126 *from_p = TREE_OPERAND (*from_p, 0);
6127 ret = GS_OK;
6128 changed = true;
6130 break;
6132 case COMPOUND_LITERAL_EXPR:
6134 tree complit = TREE_OPERAND (*expr_p, 1);
6135 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
6136 tree decl = DECL_EXPR_DECL (decl_s);
6137 tree init = DECL_INITIAL (decl);
6139 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6140 into struct T x = { 0, 1, 2 } if the address of the
6141 compound literal has never been taken. */
6142 if (!TREE_ADDRESSABLE (complit)
6143 && !TREE_ADDRESSABLE (decl)
6144 && init)
6146 *expr_p = copy_node (*expr_p);
6147 TREE_OPERAND (*expr_p, 1) = init;
6148 return GS_OK;
6152 default:
6153 break;
6156 while (changed);
6158 return ret;
6162 /* Return true if T looks like a valid GIMPLE statement. */
6164 static bool
6165 is_gimple_stmt (tree t)
6167 const enum tree_code code = TREE_CODE (t);
6169 switch (code)
6171 case NOP_EXPR:
6172 /* The only valid NOP_EXPR is the empty statement. */
6173 return IS_EMPTY_STMT (t);
6175 case BIND_EXPR:
6176 case COND_EXPR:
6177 /* These are only valid if they're void. */
6178 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
6180 case SWITCH_EXPR:
6181 case GOTO_EXPR:
6182 case RETURN_EXPR:
6183 case LABEL_EXPR:
6184 case CASE_LABEL_EXPR:
6185 case TRY_CATCH_EXPR:
6186 case TRY_FINALLY_EXPR:
6187 case EH_FILTER_EXPR:
6188 case CATCH_EXPR:
6189 case ASM_EXPR:
6190 case STATEMENT_LIST:
6191 case OACC_PARALLEL:
6192 case OACC_KERNELS:
6193 case OACC_SERIAL:
6194 case OACC_DATA:
6195 case OACC_HOST_DATA:
6196 case OACC_DECLARE:
6197 case OACC_UPDATE:
6198 case OACC_ENTER_DATA:
6199 case OACC_EXIT_DATA:
6200 case OACC_CACHE:
6201 case OMP_PARALLEL:
6202 case OMP_FOR:
6203 case OMP_SIMD:
6204 case OMP_DISTRIBUTE:
6205 case OMP_LOOP:
6206 case OACC_LOOP:
6207 case OMP_SCAN:
6208 case OMP_SCOPE:
6209 case OMP_SECTIONS:
6210 case OMP_SECTION:
6211 case OMP_STRUCTURED_BLOCK:
6212 case OMP_SINGLE:
6213 case OMP_MASTER:
6214 case OMP_MASKED:
6215 case OMP_TASKGROUP:
6216 case OMP_ORDERED:
6217 case OMP_CRITICAL:
6218 case OMP_TASK:
6219 case OMP_TARGET:
6220 case OMP_TARGET_DATA:
6221 case OMP_TARGET_UPDATE:
6222 case OMP_TARGET_ENTER_DATA:
6223 case OMP_TARGET_EXIT_DATA:
6224 case OMP_TASKLOOP:
6225 case OMP_TEAMS:
6226 /* These are always void. */
6227 return true;
6229 case CALL_EXPR:
6230 case MODIFY_EXPR:
6231 case PREDICT_EXPR:
6232 /* These are valid regardless of their type. */
6233 return true;
6235 default:
6236 return false;
6241 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6242 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6244 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6245 other, unmodified part of the complex object just before the total store.
6246 As a consequence, if the object is still uninitialized, an undefined value
6247 will be loaded into a register, which may result in a spurious exception
6248 if the register is floating-point and the value happens to be a signaling
6249 NaN for example. Then the fully-fledged complex operations lowering pass
6250 followed by a DCE pass are necessary in order to fix things up. */
6252 static enum gimplify_status
6253 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6254 bool want_value)
6256 enum tree_code code, ocode;
6257 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6259 lhs = TREE_OPERAND (*expr_p, 0);
6260 rhs = TREE_OPERAND (*expr_p, 1);
6261 code = TREE_CODE (lhs);
6262 lhs = TREE_OPERAND (lhs, 0);
6264 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6265 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6266 suppress_warning (other);
6267 other = get_formal_tmp_var (other, pre_p);
6269 realpart = code == REALPART_EXPR ? rhs : other;
6270 imagpart = code == REALPART_EXPR ? other : rhs;
6272 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6273 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6274 else
6275 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6277 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6278 *expr_p = (want_value) ? rhs : NULL_TREE;
6280 return GS_ALL_DONE;
6283 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6285 modify_expr
6286 : varname '=' rhs
6287 | '*' ID '=' rhs
6289 PRE_P points to the list where side effects that must happen before
6290 *EXPR_P should be stored.
6292 POST_P points to the list where side effects that must happen after
6293 *EXPR_P should be stored.
6295 WANT_VALUE is nonzero iff we want to use the value of this expression
6296 in another expression. */
6298 static enum gimplify_status
6299 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6300 bool want_value)
6302 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6303 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6304 enum gimplify_status ret = GS_UNHANDLED;
6305 gimple *assign;
6306 location_t loc = EXPR_LOCATION (*expr_p);
6307 gimple_stmt_iterator gsi;
6309 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6310 return GS_ERROR;
6312 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6313 || TREE_CODE (*expr_p) == INIT_EXPR);
6315 /* Trying to simplify a clobber using normal logic doesn't work,
6316 so handle it here. */
6317 if (TREE_CLOBBER_P (*from_p))
6319 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6320 if (ret == GS_ERROR)
6321 return ret;
6322 gcc_assert (!want_value);
6323 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6325 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6326 pre_p, post_p);
6327 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6329 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6330 *expr_p = NULL;
6331 return GS_ALL_DONE;
6334 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6335 memset. */
6336 if (TREE_TYPE (*from_p) != error_mark_node
6337 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6338 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6339 && TREE_CODE (*from_p) == CONSTRUCTOR
6340 && CONSTRUCTOR_NELTS (*from_p) == 0)
6342 maybe_with_size_expr (from_p);
6343 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6344 return gimplify_modify_expr_to_memset (expr_p,
6345 TREE_OPERAND (*from_p, 1),
6346 want_value, pre_p);
6349 /* Insert pointer conversions required by the middle-end that are not
6350 required by the frontend. This fixes middle-end type checking for
6351 for example gcc.dg/redecl-6.c. */
6352 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6354 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6355 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6356 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6359 /* See if any simplifications can be done based on what the RHS is. */
6360 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6361 want_value);
6362 if (ret != GS_UNHANDLED)
6363 return ret;
6365 /* For empty types only gimplify the left hand side and right hand
6366 side as statements and throw away the assignment. Do this after
6367 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6368 types properly. */
6369 if (is_empty_type (TREE_TYPE (*from_p))
6370 && !want_value
6371 /* Don't do this for calls that return addressable types, expand_call
6372 relies on those having a lhs. */
6373 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6374 && TREE_CODE (*from_p) == CALL_EXPR))
6376 gimplify_stmt (from_p, pre_p);
6377 gimplify_stmt (to_p, pre_p);
6378 *expr_p = NULL_TREE;
6379 return GS_ALL_DONE;
6382 /* If the value being copied is of variable width, compute the length
6383 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6384 before gimplifying any of the operands so that we can resolve any
6385 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6386 the size of the expression to be copied, not of the destination, so
6387 that is what we must do here. */
6388 maybe_with_size_expr (from_p);
6390 /* As a special case, we have to temporarily allow for assignments
6391 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6392 a toplevel statement, when gimplifying the GENERIC expression
6393 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6394 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6396 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6397 prevent gimplify_expr from trying to create a new temporary for
6398 foo's LHS, we tell it that it should only gimplify until it
6399 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6400 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6401 and all we need to do here is set 'a' to be its LHS. */
6403 /* Gimplify the RHS first for C++17 and bug 71104. */
6404 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6405 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6406 if (ret == GS_ERROR)
6407 return ret;
6409 /* Then gimplify the LHS. */
6410 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6411 twice we have to make sure to gimplify into non-SSA as otherwise
6412 the abnormal edge added later will make those defs not dominate
6413 their uses.
6414 ??? Technically this applies only to the registers used in the
6415 resulting non-register *TO_P. */
6416 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6417 if (saved_into_ssa
6418 && TREE_CODE (*from_p) == CALL_EXPR
6419 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6420 gimplify_ctxp->into_ssa = false;
6421 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6422 gimplify_ctxp->into_ssa = saved_into_ssa;
6423 if (ret == GS_ERROR)
6424 return ret;
6426 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6427 guess for the predicate was wrong. */
6428 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6429 if (final_pred != initial_pred)
6431 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6432 if (ret == GS_ERROR)
6433 return ret;
6436 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6437 size as argument to the call. */
6438 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6440 tree call = TREE_OPERAND (*from_p, 0);
6441 tree vlasize = TREE_OPERAND (*from_p, 1);
6443 if (TREE_CODE (call) == CALL_EXPR
6444 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6446 int nargs = call_expr_nargs (call);
6447 tree type = TREE_TYPE (call);
6448 tree ap = CALL_EXPR_ARG (call, 0);
6449 tree tag = CALL_EXPR_ARG (call, 1);
6450 tree aptag = CALL_EXPR_ARG (call, 2);
6451 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6452 IFN_VA_ARG, type,
6453 nargs + 1, ap, tag,
6454 aptag, vlasize);
6455 TREE_OPERAND (*from_p, 0) = newcall;
6459 /* Now see if the above changed *from_p to something we handle specially. */
6460 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6461 want_value);
6462 if (ret != GS_UNHANDLED)
6463 return ret;
6465 /* If we've got a variable sized assignment between two lvalues (i.e. does
6466 not involve a call), then we can make things a bit more straightforward
6467 by converting the assignment to memcpy or memset. */
6468 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6470 tree from = TREE_OPERAND (*from_p, 0);
6471 tree size = TREE_OPERAND (*from_p, 1);
6473 if (TREE_CODE (from) == CONSTRUCTOR)
6474 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6475 else if (is_gimple_addressable (from)
6476 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p)))
6477 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))))
6479 *from_p = from;
6480 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6481 pre_p);
6485 /* Transform partial stores to non-addressable complex variables into
6486 total stores. This allows us to use real instead of virtual operands
6487 for these variables, which improves optimization. */
6488 if ((TREE_CODE (*to_p) == REALPART_EXPR
6489 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6490 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6491 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6493 /* Try to alleviate the effects of the gimplification creating artificial
6494 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6495 make sure not to create DECL_DEBUG_EXPR links across functions. */
6496 if (!gimplify_ctxp->into_ssa
6497 && VAR_P (*from_p)
6498 && DECL_IGNORED_P (*from_p)
6499 && DECL_P (*to_p)
6500 && !DECL_IGNORED_P (*to_p)
6501 && decl_function_context (*to_p) == current_function_decl
6502 && decl_function_context (*from_p) == current_function_decl)
6504 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6505 DECL_NAME (*from_p)
6506 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6507 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6508 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6511 if (want_value && TREE_THIS_VOLATILE (*to_p))
6512 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6514 if (TREE_CODE (*from_p) == CALL_EXPR)
6516 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6517 instead of a GIMPLE_ASSIGN. */
6518 gcall *call_stmt;
6519 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6521 /* Gimplify internal functions created in the FEs. */
6522 int nargs = call_expr_nargs (*from_p), i;
6523 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6524 auto_vec<tree> vargs (nargs);
6526 for (i = 0; i < nargs; i++)
6528 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6529 EXPR_LOCATION (*from_p));
6530 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6532 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6533 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6534 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6536 else
6538 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6539 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6540 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6541 tree fndecl = get_callee_fndecl (*from_p);
6542 if (fndecl
6543 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6544 && call_expr_nargs (*from_p) == 3)
6545 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6546 CALL_EXPR_ARG (*from_p, 0),
6547 CALL_EXPR_ARG (*from_p, 1),
6548 CALL_EXPR_ARG (*from_p, 2));
6549 else
6551 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6554 notice_special_calls (call_stmt);
6555 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6556 gimple_call_set_lhs (call_stmt, *to_p);
6557 else if (TREE_CODE (*to_p) == SSA_NAME)
6558 /* The above is somewhat premature, avoid ICEing later for a
6559 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6560 ??? This doesn't make it a default-def. */
6561 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6563 assign = call_stmt;
6565 else
6567 assign = gimple_build_assign (*to_p, *from_p);
6568 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6569 if (COMPARISON_CLASS_P (*from_p))
6570 copy_warning (assign, *from_p);
6573 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6575 /* We should have got an SSA name from the start. */
6576 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6577 || ! gimple_in_ssa_p (cfun));
6580 gimplify_seq_add_stmt (pre_p, assign);
6581 gsi = gsi_last (*pre_p);
6582 maybe_fold_stmt (&gsi);
6584 if (want_value)
6586 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6587 return GS_OK;
6589 else
6590 *expr_p = NULL;
6592 return GS_ALL_DONE;
6595 /* Gimplify a comparison between two variable-sized objects. Do this
6596 with a call to BUILT_IN_MEMCMP. */
6598 static enum gimplify_status
6599 gimplify_variable_sized_compare (tree *expr_p)
6601 location_t loc = EXPR_LOCATION (*expr_p);
6602 tree op0 = TREE_OPERAND (*expr_p, 0);
6603 tree op1 = TREE_OPERAND (*expr_p, 1);
6604 tree t, arg, dest, src, expr;
6606 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6607 arg = unshare_expr (arg);
6608 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6609 src = build_fold_addr_expr_loc (loc, op1);
6610 dest = build_fold_addr_expr_loc (loc, op0);
6611 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6612 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6614 expr
6615 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6616 SET_EXPR_LOCATION (expr, loc);
6617 *expr_p = expr;
6619 return GS_OK;
6622 /* Gimplify a comparison between two aggregate objects of integral scalar
6623 mode as a comparison between the bitwise equivalent scalar values. */
6625 static enum gimplify_status
6626 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6628 location_t loc = EXPR_LOCATION (*expr_p);
6629 tree op0 = TREE_OPERAND (*expr_p, 0);
6630 tree op1 = TREE_OPERAND (*expr_p, 1);
6632 tree type = TREE_TYPE (op0);
6633 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6635 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6636 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6638 *expr_p
6639 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6641 return GS_OK;
6644 /* Gimplify an expression sequence. This function gimplifies each
6645 expression and rewrites the original expression with the last
6646 expression of the sequence in GIMPLE form.
6648 PRE_P points to the list where the side effects for all the
6649 expressions in the sequence will be emitted.
6651 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6653 static enum gimplify_status
6654 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6656 tree t = *expr_p;
6660 tree *sub_p = &TREE_OPERAND (t, 0);
6662 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6663 gimplify_compound_expr (sub_p, pre_p, false);
6664 else
6665 gimplify_stmt (sub_p, pre_p);
6667 t = TREE_OPERAND (t, 1);
6669 while (TREE_CODE (t) == COMPOUND_EXPR);
6671 *expr_p = t;
6672 if (want_value)
6673 return GS_OK;
6674 else
6676 gimplify_stmt (expr_p, pre_p);
6677 return GS_ALL_DONE;
6681 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6682 gimplify. After gimplification, EXPR_P will point to a new temporary
6683 that holds the original value of the SAVE_EXPR node.
6685 PRE_P points to the list where side effects that must happen before
6686 *EXPR_P should be stored. */
6688 static enum gimplify_status
6689 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6691 enum gimplify_status ret = GS_ALL_DONE;
6692 tree val;
6694 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6695 val = TREE_OPERAND (*expr_p, 0);
6697 if (val && TREE_TYPE (val) == error_mark_node)
6698 return GS_ERROR;
6700 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6701 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6703 /* The operand may be a void-valued expression. It is
6704 being executed only for its side-effects. */
6705 if (TREE_TYPE (val) == void_type_node)
6707 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6708 is_gimple_stmt, fb_none);
6709 val = NULL;
6711 else
6712 /* The temporary may not be an SSA name as later abnormal and EH
6713 control flow may invalidate use/def domination. When in SSA
6714 form then assume there are no such issues and SAVE_EXPRs only
6715 appear via GENERIC foldings. */
6716 val = get_initialized_tmp_var (val, pre_p, post_p,
6717 gimple_in_ssa_p (cfun));
6719 TREE_OPERAND (*expr_p, 0) = val;
6720 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6723 *expr_p = val;
6725 return ret;
6728 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6730 unary_expr
6731 : ...
6732 | '&' varname
6735 PRE_P points to the list where side effects that must happen before
6736 *EXPR_P should be stored.
6738 POST_P points to the list where side effects that must happen after
6739 *EXPR_P should be stored. */
6741 static enum gimplify_status
6742 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6744 tree expr = *expr_p;
6745 tree op0 = TREE_OPERAND (expr, 0);
6746 enum gimplify_status ret;
6747 location_t loc = EXPR_LOCATION (*expr_p);
6749 switch (TREE_CODE (op0))
6751 case INDIRECT_REF:
6752 do_indirect_ref:
6753 /* Check if we are dealing with an expression of the form '&*ptr'.
6754 While the front end folds away '&*ptr' into 'ptr', these
6755 expressions may be generated internally by the compiler (e.g.,
6756 builtins like __builtin_va_end). */
6757 /* Caution: the silent array decomposition semantics we allow for
6758 ADDR_EXPR means we can't always discard the pair. */
6759 /* Gimplification of the ADDR_EXPR operand may drop
6760 cv-qualification conversions, so make sure we add them if
6761 needed. */
6763 tree op00 = TREE_OPERAND (op0, 0);
6764 tree t_expr = TREE_TYPE (expr);
6765 tree t_op00 = TREE_TYPE (op00);
6767 if (!useless_type_conversion_p (t_expr, t_op00))
6768 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6769 *expr_p = op00;
6770 ret = GS_OK;
6772 break;
6774 case VIEW_CONVERT_EXPR:
6775 /* Take the address of our operand and then convert it to the type of
6776 this ADDR_EXPR.
6778 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6779 all clear. The impact of this transformation is even less clear. */
6781 /* If the operand is a useless conversion, look through it. Doing so
6782 guarantees that the ADDR_EXPR and its operand will remain of the
6783 same type. */
6784 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6785 op0 = TREE_OPERAND (op0, 0);
6787 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6788 build_fold_addr_expr_loc (loc,
6789 TREE_OPERAND (op0, 0)));
6790 ret = GS_OK;
6791 break;
6793 case MEM_REF:
6794 if (integer_zerop (TREE_OPERAND (op0, 1)))
6795 goto do_indirect_ref;
6797 /* fall through */
6799 default:
6800 /* If we see a call to a declared builtin or see its address
6801 being taken (we can unify those cases here) then we can mark
6802 the builtin for implicit generation by GCC. */
6803 if (TREE_CODE (op0) == FUNCTION_DECL
6804 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6805 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6806 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6808 /* We use fb_either here because the C frontend sometimes takes
6809 the address of a call that returns a struct; see
6810 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6811 the implied temporary explicit. */
6813 /* Make the operand addressable. */
6814 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6815 is_gimple_addressable, fb_either);
6816 if (ret == GS_ERROR)
6817 break;
6819 /* Then mark it. Beware that it may not be possible to do so directly
6820 if a temporary has been created by the gimplification. */
6821 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6823 op0 = TREE_OPERAND (expr, 0);
6825 /* For various reasons, the gimplification of the expression
6826 may have made a new INDIRECT_REF. */
6827 if (INDIRECT_REF_P (op0)
6828 || (TREE_CODE (op0) == MEM_REF
6829 && integer_zerop (TREE_OPERAND (op0, 1))))
6830 goto do_indirect_ref;
6832 mark_addressable (TREE_OPERAND (expr, 0));
6834 /* The FEs may end up building ADDR_EXPRs early on a decl with
6835 an incomplete type. Re-build ADDR_EXPRs in canonical form
6836 here. */
6837 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6838 *expr_p = build_fold_addr_expr (op0);
6840 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6841 recompute_tree_invariant_for_addr_expr (*expr_p);
6843 /* If we re-built the ADDR_EXPR add a conversion to the original type
6844 if required. */
6845 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6846 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6848 break;
6851 return ret;
6854 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6855 value; output operands should be a gimple lvalue. */
6857 static enum gimplify_status
6858 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6860 tree expr;
6861 int noutputs;
6862 const char **oconstraints;
6863 int i;
6864 tree link;
6865 const char *constraint;
6866 bool allows_mem, allows_reg, is_inout;
6867 enum gimplify_status ret, tret;
6868 gasm *stmt;
6869 vec<tree, va_gc> *inputs;
6870 vec<tree, va_gc> *outputs;
6871 vec<tree, va_gc> *clobbers;
6872 vec<tree, va_gc> *labels;
6873 tree link_next;
6875 expr = *expr_p;
6876 noutputs = list_length (ASM_OUTPUTS (expr));
6877 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6879 inputs = NULL;
6880 outputs = NULL;
6881 clobbers = NULL;
6882 labels = NULL;
6884 ret = GS_ALL_DONE;
6885 link_next = NULL_TREE;
6886 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6888 bool ok;
6889 size_t constraint_len;
6891 link_next = TREE_CHAIN (link);
6893 oconstraints[i]
6894 = constraint
6895 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6896 constraint_len = strlen (constraint);
6897 if (constraint_len == 0)
6898 continue;
6900 ok = parse_output_constraint (&constraint, i, 0, 0,
6901 &allows_mem, &allows_reg, &is_inout);
6902 if (!ok)
6904 ret = GS_ERROR;
6905 is_inout = false;
6908 /* If we can't make copies, we can only accept memory.
6909 Similarly for VLAs. */
6910 tree outtype = TREE_TYPE (TREE_VALUE (link));
6911 if (outtype != error_mark_node
6912 && (TREE_ADDRESSABLE (outtype)
6913 || !COMPLETE_TYPE_P (outtype)
6914 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6916 if (allows_mem)
6917 allows_reg = 0;
6918 else
6920 error ("impossible constraint in %<asm%>");
6921 error ("non-memory output %d must stay in memory", i);
6922 return GS_ERROR;
6926 if (!allows_reg && allows_mem)
6927 mark_addressable (TREE_VALUE (link));
6929 tree orig = TREE_VALUE (link);
6930 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6931 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6932 fb_lvalue | fb_mayfail);
6933 if (tret == GS_ERROR)
6935 if (orig != error_mark_node)
6936 error ("invalid lvalue in %<asm%> output %d", i);
6937 ret = tret;
6940 /* If the constraint does not allow memory make sure we gimplify
6941 it to a register if it is not already but its base is. This
6942 happens for complex and vector components. */
6943 if (!allows_mem)
6945 tree op = TREE_VALUE (link);
6946 if (! is_gimple_val (op)
6947 && is_gimple_reg_type (TREE_TYPE (op))
6948 && is_gimple_reg (get_base_address (op)))
6950 tree tem = create_tmp_reg (TREE_TYPE (op));
6951 tree ass;
6952 if (is_inout)
6954 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6955 tem, unshare_expr (op));
6956 gimplify_and_add (ass, pre_p);
6958 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6959 gimplify_and_add (ass, post_p);
6961 TREE_VALUE (link) = tem;
6962 tret = GS_OK;
6966 vec_safe_push (outputs, link);
6967 TREE_CHAIN (link) = NULL_TREE;
6969 if (is_inout)
6971 /* An input/output operand. To give the optimizers more
6972 flexibility, split it into separate input and output
6973 operands. */
6974 tree input;
6975 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6976 char buf[11];
6978 /* Turn the in/out constraint into an output constraint. */
6979 char *p = xstrdup (constraint);
6980 p[0] = '=';
6981 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6983 /* And add a matching input constraint. */
6984 if (allows_reg)
6986 sprintf (buf, "%u", i);
6988 /* If there are multiple alternatives in the constraint,
6989 handle each of them individually. Those that allow register
6990 will be replaced with operand number, the others will stay
6991 unchanged. */
6992 if (strchr (p, ',') != NULL)
6994 size_t len = 0, buflen = strlen (buf);
6995 char *beg, *end, *str, *dst;
6997 for (beg = p + 1;;)
6999 end = strchr (beg, ',');
7000 if (end == NULL)
7001 end = strchr (beg, '\0');
7002 if ((size_t) (end - beg) < buflen)
7003 len += buflen + 1;
7004 else
7005 len += end - beg + 1;
7006 if (*end)
7007 beg = end + 1;
7008 else
7009 break;
7012 str = (char *) alloca (len);
7013 for (beg = p + 1, dst = str;;)
7015 const char *tem;
7016 bool mem_p, reg_p, inout_p;
7018 end = strchr (beg, ',');
7019 if (end)
7020 *end = '\0';
7021 beg[-1] = '=';
7022 tem = beg - 1;
7023 parse_output_constraint (&tem, i, 0, 0,
7024 &mem_p, &reg_p, &inout_p);
7025 if (dst != str)
7026 *dst++ = ',';
7027 if (reg_p)
7029 memcpy (dst, buf, buflen);
7030 dst += buflen;
7032 else
7034 if (end)
7035 len = end - beg;
7036 else
7037 len = strlen (beg);
7038 memcpy (dst, beg, len);
7039 dst += len;
7041 if (end)
7042 beg = end + 1;
7043 else
7044 break;
7046 *dst = '\0';
7047 input = build_string (dst - str, str);
7049 else
7050 input = build_string (strlen (buf), buf);
7052 else
7053 input = build_string (constraint_len - 1, constraint + 1);
7055 free (p);
7057 input = build_tree_list (build_tree_list (NULL_TREE, input),
7058 unshare_expr (TREE_VALUE (link)));
7059 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
7063 link_next = NULL_TREE;
7064 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
7066 link_next = TREE_CHAIN (link);
7067 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7068 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7069 oconstraints, &allows_mem, &allows_reg);
7071 /* If we can't make copies, we can only accept memory. */
7072 tree intype = TREE_TYPE (TREE_VALUE (link));
7073 if (intype != error_mark_node
7074 && (TREE_ADDRESSABLE (intype)
7075 || !COMPLETE_TYPE_P (intype)
7076 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
7078 if (allows_mem)
7079 allows_reg = 0;
7080 else
7082 error ("impossible constraint in %<asm%>");
7083 error ("non-memory input %d must stay in memory", i);
7084 return GS_ERROR;
7088 /* If the operand is a memory input, it should be an lvalue. */
7089 if (!allows_reg && allows_mem)
7091 tree inputv = TREE_VALUE (link);
7092 STRIP_NOPS (inputv);
7093 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
7094 || TREE_CODE (inputv) == PREINCREMENT_EXPR
7095 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
7096 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
7097 || TREE_CODE (inputv) == MODIFY_EXPR)
7098 TREE_VALUE (link) = error_mark_node;
7099 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7100 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7101 if (tret != GS_ERROR)
7103 /* Unlike output operands, memory inputs are not guaranteed
7104 to be lvalues by the FE, and while the expressions are
7105 marked addressable there, if it is e.g. a statement
7106 expression, temporaries in it might not end up being
7107 addressable. They might be already used in the IL and thus
7108 it is too late to make them addressable now though. */
7109 tree x = TREE_VALUE (link);
7110 while (handled_component_p (x))
7111 x = TREE_OPERAND (x, 0);
7112 if (TREE_CODE (x) == MEM_REF
7113 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
7114 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
7115 if ((VAR_P (x)
7116 || TREE_CODE (x) == PARM_DECL
7117 || TREE_CODE (x) == RESULT_DECL)
7118 && !TREE_ADDRESSABLE (x)
7119 && is_gimple_reg (x))
7121 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
7122 input_location), 0,
7123 "memory input %d is not directly addressable",
7125 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
7128 mark_addressable (TREE_VALUE (link));
7129 if (tret == GS_ERROR)
7131 if (inputv != error_mark_node)
7132 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
7133 "memory input %d is not directly addressable", i);
7134 ret = tret;
7137 else
7139 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7140 is_gimple_asm_val, fb_rvalue);
7141 if (tret == GS_ERROR)
7142 ret = tret;
7145 TREE_CHAIN (link) = NULL_TREE;
7146 vec_safe_push (inputs, link);
7149 link_next = NULL_TREE;
7150 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
7152 link_next = TREE_CHAIN (link);
7153 TREE_CHAIN (link) = NULL_TREE;
7154 vec_safe_push (clobbers, link);
7157 link_next = NULL_TREE;
7158 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
7160 link_next = TREE_CHAIN (link);
7161 TREE_CHAIN (link) = NULL_TREE;
7162 vec_safe_push (labels, link);
7165 /* Do not add ASMs with errors to the gimple IL stream. */
7166 if (ret != GS_ERROR)
7168 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
7169 inputs, outputs, clobbers, labels);
7171 /* asm is volatile if it was marked by the user as volatile or
7172 there are no outputs or this is an asm goto. */
7173 gimple_asm_set_volatile (stmt,
7174 ASM_VOLATILE_P (expr)
7175 || noutputs == 0
7176 || labels);
7177 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
7178 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
7180 gimplify_seq_add_stmt (pre_p, stmt);
7183 return ret;
7186 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7187 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7188 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7189 return to this function.
7191 FIXME should we complexify the prequeue handling instead? Or use flags
7192 for all the cleanups and let the optimizer tighten them up? The current
7193 code seems pretty fragile; it will break on a cleanup within any
7194 non-conditional nesting. But any such nesting would be broken, anyway;
7195 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7196 and continues out of it. We can do that at the RTL level, though, so
7197 having an optimizer to tighten up try/finally regions would be a Good
7198 Thing. */
7200 static enum gimplify_status
7201 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
7203 gimple_stmt_iterator iter;
7204 gimple_seq body_sequence = NULL;
7206 tree temp = voidify_wrapper_expr (*expr_p, NULL);
7208 /* We only care about the number of conditions between the innermost
7209 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7210 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7211 int old_conds = gimplify_ctxp->conditions;
7212 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
7213 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
7214 gimplify_ctxp->conditions = 0;
7215 gimplify_ctxp->conditional_cleanups = NULL;
7216 gimplify_ctxp->in_cleanup_point_expr = true;
7218 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
7220 gimplify_ctxp->conditions = old_conds;
7221 gimplify_ctxp->conditional_cleanups = old_cleanups;
7222 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
7224 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
7226 gimple *wce = gsi_stmt (iter);
7228 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
7230 if (gsi_one_before_end_p (iter))
7232 /* Note that gsi_insert_seq_before and gsi_remove do not
7233 scan operands, unlike some other sequence mutators. */
7234 if (!gimple_wce_cleanup_eh_only (wce))
7235 gsi_insert_seq_before_without_update (&iter,
7236 gimple_wce_cleanup (wce),
7237 GSI_SAME_STMT);
7238 gsi_remove (&iter, true);
7239 break;
7241 else
7243 gtry *gtry;
7244 gimple_seq seq;
7245 enum gimple_try_flags kind;
7247 if (gimple_wce_cleanup_eh_only (wce))
7248 kind = GIMPLE_TRY_CATCH;
7249 else
7250 kind = GIMPLE_TRY_FINALLY;
7251 seq = gsi_split_seq_after (iter);
7253 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
7254 /* Do not use gsi_replace here, as it may scan operands.
7255 We want to do a simple structural modification only. */
7256 gsi_set_stmt (&iter, gtry);
7257 iter = gsi_start (gtry->eval);
7260 else
7261 gsi_next (&iter);
7264 gimplify_seq_add_seq (pre_p, body_sequence);
7265 if (temp)
7267 *expr_p = temp;
7268 return GS_OK;
7270 else
7272 *expr_p = NULL;
7273 return GS_ALL_DONE;
7277 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7278 is the cleanup action required. EH_ONLY is true if the cleanup should
7279 only be executed if an exception is thrown, not on normal exit.
7280 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7281 only valid for clobbers. */
7283 static void
7284 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7285 bool force_uncond = false)
7287 gimple *wce;
7288 gimple_seq cleanup_stmts = NULL;
7290 /* Errors can result in improperly nested cleanups. Which results in
7291 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7292 if (seen_error ())
7293 return;
7295 if (gimple_conditional_context ())
7297 /* If we're in a conditional context, this is more complex. We only
7298 want to run the cleanup if we actually ran the initialization that
7299 necessitates it, but we want to run it after the end of the
7300 conditional context. So we wrap the try/finally around the
7301 condition and use a flag to determine whether or not to actually
7302 run the destructor. Thus
7304 test ? f(A()) : 0
7306 becomes (approximately)
7308 flag = 0;
7309 try {
7310 if (test) { A::A(temp); flag = 1; val = f(temp); }
7311 else { val = 0; }
7312 } finally {
7313 if (flag) A::~A(temp);
7317 if (force_uncond)
7319 gimplify_stmt (&cleanup, &cleanup_stmts);
7320 wce = gimple_build_wce (cleanup_stmts);
7321 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7323 else
7325 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7326 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7327 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7329 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7330 gimplify_stmt (&cleanup, &cleanup_stmts);
7331 wce = gimple_build_wce (cleanup_stmts);
7332 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7334 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7335 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7336 gimplify_seq_add_stmt (pre_p, ftrue);
7338 /* Because of this manipulation, and the EH edges that jump
7339 threading cannot redirect, the temporary (VAR) will appear
7340 to be used uninitialized. Don't warn. */
7341 suppress_warning (var, OPT_Wuninitialized);
7344 else
7346 gimplify_stmt (&cleanup, &cleanup_stmts);
7347 wce = gimple_build_wce (cleanup_stmts);
7348 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7349 gimplify_seq_add_stmt (pre_p, wce);
7353 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7355 static enum gimplify_status
7356 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7358 tree targ = *expr_p;
7359 tree temp = TARGET_EXPR_SLOT (targ);
7360 tree init = TARGET_EXPR_INITIAL (targ);
7361 enum gimplify_status ret;
7363 bool unpoison_empty_seq = false;
7364 gimple_stmt_iterator unpoison_it;
7366 if (init)
7368 gimple_seq init_pre_p = NULL;
7370 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7371 to the temps list. Handle also variable length TARGET_EXPRs. */
7372 if (!poly_int_tree_p (DECL_SIZE (temp)))
7374 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7375 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7376 /* FIXME: this is correct only when the size of the type does
7377 not depend on expressions evaluated in init. */
7378 gimplify_vla_decl (temp, &init_pre_p);
7380 else
7382 /* Save location where we need to place unpoisoning. It's possible
7383 that a variable will be converted to needs_to_live_in_memory. */
7384 unpoison_it = gsi_last (*pre_p);
7385 unpoison_empty_seq = gsi_end_p (unpoison_it);
7387 gimple_add_tmp_var (temp);
7390 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7391 expression is supposed to initialize the slot. */
7392 if (VOID_TYPE_P (TREE_TYPE (init)))
7393 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7394 fb_none);
7395 else
7397 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7398 init = init_expr;
7399 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7400 fb_none);
7401 init = NULL;
7402 ggc_free (init_expr);
7404 if (ret == GS_ERROR)
7406 /* PR c++/28266 Make sure this is expanded only once. */
7407 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7408 return GS_ERROR;
7411 if (init)
7412 gimplify_and_add (init, &init_pre_p);
7414 /* Add a clobber for the temporary going out of scope, like
7415 gimplify_bind_expr. But only if we did not promote the
7416 temporary to static storage. */
7417 if (gimplify_ctxp->in_cleanup_point_expr
7418 && !TREE_STATIC (temp)
7419 && needs_to_live_in_memory (temp))
7421 if (flag_stack_reuse == SR_ALL)
7423 tree clobber = build_clobber (TREE_TYPE (temp),
7424 CLOBBER_STORAGE_END);
7425 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7426 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7428 if (asan_poisoned_variables
7429 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7430 && !TREE_STATIC (temp)
7431 && dbg_cnt (asan_use_after_scope)
7432 && !gimplify_omp_ctxp)
7434 tree asan_cleanup = build_asan_poison_call_expr (temp);
7435 if (asan_cleanup)
7437 if (unpoison_empty_seq)
7438 unpoison_it = gsi_start (*pre_p);
7440 asan_poison_variable (temp, false, &unpoison_it,
7441 unpoison_empty_seq);
7442 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7447 gimple_seq_add_seq (pre_p, init_pre_p);
7449 /* If needed, push the cleanup for the temp. */
7450 if (TARGET_EXPR_CLEANUP (targ))
7451 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7452 CLEANUP_EH_ONLY (targ), pre_p);
7454 /* Only expand this once. */
7455 TREE_OPERAND (targ, 3) = init;
7456 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7458 else
7459 /* We should have expanded this before. */
7460 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7462 *expr_p = temp;
7463 return GS_OK;
7466 /* Gimplification of expression trees. */
7468 /* Gimplify an expression which appears at statement context. The
7469 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7470 NULL, a new sequence is allocated.
7472 Return true if we actually added a statement to the queue. */
7474 bool
7475 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7477 gimple_seq_node last;
7479 last = gimple_seq_last (*seq_p);
7480 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7481 return last != gimple_seq_last (*seq_p);
7484 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7485 to CTX. If entries already exist, force them to be some flavor of private.
7486 If there is no enclosing parallel, do nothing. */
7488 void
7489 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7491 splay_tree_node n;
7493 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7494 return;
7498 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7499 if (n != NULL)
7501 if (n->value & GOVD_SHARED)
7502 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7503 else if (n->value & GOVD_MAP)
7504 n->value |= GOVD_MAP_TO_ONLY;
7505 else
7506 return;
7508 else if ((ctx->region_type & ORT_TARGET) != 0)
7510 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7511 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7512 else
7513 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7515 else if (ctx->region_type != ORT_WORKSHARE
7516 && ctx->region_type != ORT_TASKGROUP
7517 && ctx->region_type != ORT_SIMD
7518 && ctx->region_type != ORT_ACC
7519 && !(ctx->region_type & ORT_TARGET_DATA))
7520 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7522 ctx = ctx->outer_context;
7524 while (ctx);
7527 /* Similarly for each of the type sizes of TYPE. */
7529 static void
7530 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7532 if (type == NULL || type == error_mark_node)
7533 return;
7534 type = TYPE_MAIN_VARIANT (type);
7536 if (ctx->privatized_types->add (type))
7537 return;
7539 switch (TREE_CODE (type))
7541 case INTEGER_TYPE:
7542 case ENUMERAL_TYPE:
7543 case BOOLEAN_TYPE:
7544 case REAL_TYPE:
7545 case FIXED_POINT_TYPE:
7546 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7547 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7548 break;
7550 case ARRAY_TYPE:
7551 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7552 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7553 break;
7555 case RECORD_TYPE:
7556 case UNION_TYPE:
7557 case QUAL_UNION_TYPE:
7559 tree field;
7560 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7561 if (TREE_CODE (field) == FIELD_DECL)
7563 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7564 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7567 break;
7569 case POINTER_TYPE:
7570 case REFERENCE_TYPE:
7571 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7572 break;
7574 default:
7575 break;
7578 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7579 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7580 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7583 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7585 static void
7586 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7588 splay_tree_node n;
7589 unsigned int nflags;
7590 tree t;
7592 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7593 return;
7595 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7596 there are constructors involved somewhere. Exception is a shared clause,
7597 there is nothing privatized in that case. */
7598 if ((flags & GOVD_SHARED) == 0
7599 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7600 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7601 flags |= GOVD_SEEN;
7603 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7604 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7606 /* We shouldn't be re-adding the decl with the same data
7607 sharing class. */
7608 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7609 nflags = n->value | flags;
7610 /* The only combination of data sharing classes we should see is
7611 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7612 reduction variables to be used in data sharing clauses. */
7613 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7614 || ((nflags & GOVD_DATA_SHARE_CLASS)
7615 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7616 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7617 n->value = nflags;
7618 return;
7621 /* When adding a variable-sized variable, we have to handle all sorts
7622 of additional bits of data: the pointer replacement variable, and
7623 the parameters of the type. */
7624 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7626 /* Add the pointer replacement variable as PRIVATE if the variable
7627 replacement is private, else FIRSTPRIVATE since we'll need the
7628 address of the original variable either for SHARED, or for the
7629 copy into or out of the context. */
7630 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7632 if (flags & GOVD_MAP)
7633 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7634 else if (flags & GOVD_PRIVATE)
7635 nflags = GOVD_PRIVATE;
7636 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7637 && (flags & GOVD_FIRSTPRIVATE))
7638 || (ctx->region_type == ORT_TARGET_DATA
7639 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7640 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7641 else
7642 nflags = GOVD_FIRSTPRIVATE;
7643 nflags |= flags & GOVD_SEEN;
7644 t = DECL_VALUE_EXPR (decl);
7645 gcc_assert (INDIRECT_REF_P (t));
7646 t = TREE_OPERAND (t, 0);
7647 gcc_assert (DECL_P (t));
7648 omp_add_variable (ctx, t, nflags);
7651 /* Add all of the variable and type parameters (which should have
7652 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7653 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7654 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7655 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7657 /* The variable-sized variable itself is never SHARED, only some form
7658 of PRIVATE. The sharing would take place via the pointer variable
7659 which we remapped above. */
7660 if (flags & GOVD_SHARED)
7661 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7662 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7664 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7665 alloca statement we generate for the variable, so make sure it
7666 is available. This isn't automatically needed for the SHARED
7667 case, since we won't be allocating local storage then.
7668 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7669 in this case omp_notice_variable will be called later
7670 on when it is gimplified. */
7671 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7672 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7673 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7675 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7676 && omp_privatize_by_reference (decl))
7678 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7680 /* Similar to the direct variable sized case above, we'll need the
7681 size of references being privatized. */
7682 if ((flags & GOVD_SHARED) == 0)
7684 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7685 if (t && DECL_P (t))
7686 omp_notice_variable (ctx, t, true);
7690 if (n != NULL)
7691 n->value |= flags;
7692 else
7693 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7695 /* For reductions clauses in OpenACC loop directives, by default create a
7696 copy clause on the enclosing parallel construct for carrying back the
7697 results. */
7698 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7700 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7701 while (outer_ctx)
7703 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7704 if (n != NULL)
7706 /* Ignore local variables and explicitly declared clauses. */
7707 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7708 break;
7709 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7711 /* According to the OpenACC spec, such a reduction variable
7712 should already have a copy map on a kernels construct,
7713 verify that here. */
7714 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7715 && (n->value & GOVD_MAP));
7717 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7719 /* Remove firstprivate and make it a copy map. */
7720 n->value &= ~GOVD_FIRSTPRIVATE;
7721 n->value |= GOVD_MAP;
7724 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7726 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7727 GOVD_MAP | GOVD_SEEN);
7728 break;
7730 outer_ctx = outer_ctx->outer_context;
7735 /* Notice a threadprivate variable DECL used in OMP context CTX.
7736 This just prints out diagnostics about threadprivate variable uses
7737 in untied tasks. If DECL2 is non-NULL, prevent this warning
7738 on that variable. */
7740 static bool
7741 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7742 tree decl2)
7744 splay_tree_node n;
7745 struct gimplify_omp_ctx *octx;
7747 for (octx = ctx; octx; octx = octx->outer_context)
7748 if ((octx->region_type & ORT_TARGET) != 0
7749 || octx->order_concurrent)
7751 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7752 if (n == NULL)
7754 if (octx->order_concurrent)
7756 error ("threadprivate variable %qE used in a region with"
7757 " %<order(concurrent)%> clause", DECL_NAME (decl));
7758 inform (octx->location, "enclosing region");
7760 else
7762 error ("threadprivate variable %qE used in target region",
7763 DECL_NAME (decl));
7764 inform (octx->location, "enclosing target region");
7766 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7768 if (decl2)
7769 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7772 if (ctx->region_type != ORT_UNTIED_TASK)
7773 return false;
7774 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7775 if (n == NULL)
7777 error ("threadprivate variable %qE used in untied task",
7778 DECL_NAME (decl));
7779 inform (ctx->location, "enclosing task");
7780 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7782 if (decl2)
7783 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7784 return false;
7787 /* Return true if global var DECL is device resident. */
7789 static bool
7790 device_resident_p (tree decl)
7792 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7794 if (!attr)
7795 return false;
7797 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7799 tree c = TREE_VALUE (t);
7800 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7801 return true;
7804 return false;
7807 /* Return true if DECL has an ACC DECLARE attribute. */
7809 static bool
7810 is_oacc_declared (tree decl)
7812 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7813 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7814 return declared != NULL_TREE;
7817 /* Determine outer default flags for DECL mentioned in an OMP region
7818 but not declared in an enclosing clause.
7820 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7821 remapped firstprivate instead of shared. To some extent this is
7822 addressed in omp_firstprivatize_type_sizes, but not
7823 effectively. */
7825 static unsigned
7826 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7827 bool in_code, unsigned flags)
7829 enum omp_clause_default_kind default_kind = ctx->default_kind;
7830 enum omp_clause_default_kind kind;
7832 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7833 if (ctx->region_type & ORT_TASK)
7835 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7837 /* The event-handle specified by a detach clause should always be firstprivate,
7838 regardless of the current default. */
7839 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7840 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7842 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7843 default_kind = kind;
7844 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7845 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7846 /* For C/C++ default({,first}private), variables with static storage duration
7847 declared in a namespace or global scope and referenced in construct
7848 must be explicitly specified, i.e. acts as default(none). */
7849 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7850 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7851 && VAR_P (decl)
7852 && is_global_var (decl)
7853 && (DECL_FILE_SCOPE_P (decl)
7854 || (DECL_CONTEXT (decl)
7855 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7856 && !lang_GNU_Fortran ())
7857 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7859 switch (default_kind)
7861 case OMP_CLAUSE_DEFAULT_NONE:
7863 const char *rtype;
7865 if (ctx->region_type & ORT_PARALLEL)
7866 rtype = "parallel";
7867 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7868 rtype = "taskloop";
7869 else if (ctx->region_type & ORT_TASK)
7870 rtype = "task";
7871 else if (ctx->region_type & ORT_TEAMS)
7872 rtype = "teams";
7873 else
7874 gcc_unreachable ();
7876 error ("%qE not specified in enclosing %qs",
7877 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7878 inform (ctx->location, "enclosing %qs", rtype);
7880 /* FALLTHRU */
7881 case OMP_CLAUSE_DEFAULT_SHARED:
7882 flags |= GOVD_SHARED;
7883 break;
7884 case OMP_CLAUSE_DEFAULT_PRIVATE:
7885 flags |= GOVD_PRIVATE;
7886 break;
7887 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7888 flags |= GOVD_FIRSTPRIVATE;
7889 break;
7890 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7891 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7892 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7893 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7895 omp_notice_variable (octx, decl, in_code);
7896 for (; octx; octx = octx->outer_context)
7898 splay_tree_node n2;
7900 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7901 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7902 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7903 continue;
7904 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7906 flags |= GOVD_FIRSTPRIVATE;
7907 goto found_outer;
7909 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7911 flags |= GOVD_SHARED;
7912 goto found_outer;
7917 if (TREE_CODE (decl) == PARM_DECL
7918 || (!is_global_var (decl)
7919 && DECL_CONTEXT (decl) == current_function_decl))
7920 flags |= GOVD_FIRSTPRIVATE;
7921 else
7922 flags |= GOVD_SHARED;
7923 found_outer:
7924 break;
7926 default:
7927 gcc_unreachable ();
7930 return flags;
7933 /* Return string name for types of OpenACC constructs from ORT_* values. */
7935 static const char *
7936 oacc_region_type_name (enum omp_region_type region_type)
7938 switch (region_type)
7940 case ORT_ACC_DATA:
7941 return "data";
7942 case ORT_ACC_PARALLEL:
7943 return "parallel";
7944 case ORT_ACC_KERNELS:
7945 return "kernels";
7946 case ORT_ACC_SERIAL:
7947 return "serial";
7948 default:
7949 gcc_unreachable ();
7953 /* Determine outer default flags for DECL mentioned in an OACC region
7954 but not declared in an enclosing clause. */
7956 static unsigned
7957 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7959 struct gimplify_omp_ctx *ctx_default = ctx;
7960 /* If no 'default' clause appears on this compute construct... */
7961 if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
7963 /* ..., see if one appears on a lexically containing 'data'
7964 construct. */
7965 while ((ctx_default = ctx_default->outer_context))
7967 if (ctx_default->region_type == ORT_ACC_DATA
7968 && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
7969 break;
7971 /* If not, reset. */
7972 if (!ctx_default)
7973 ctx_default = ctx;
7976 bool on_device = false;
7977 bool is_private = false;
7978 bool declared = is_oacc_declared (decl);
7979 tree type = TREE_TYPE (decl);
7981 if (omp_privatize_by_reference (decl))
7982 type = TREE_TYPE (type);
7984 /* For Fortran COMMON blocks, only used variables in those blocks are
7985 transfered and remapped. The block itself will have a private clause to
7986 avoid transfering the data twice.
7987 The hook evaluates to false by default. For a variable in Fortran's COMMON
7988 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7989 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7990 the whole block. For C++ and Fortran, it can also be true under certain
7991 other conditions, if DECL_HAS_VALUE_EXPR. */
7992 if (RECORD_OR_UNION_TYPE_P (type))
7993 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7995 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7996 && is_global_var (decl)
7997 && device_resident_p (decl)
7998 && !is_private)
8000 on_device = true;
8001 flags |= GOVD_MAP_TO_ONLY;
8004 switch (ctx->region_type)
8006 case ORT_ACC_KERNELS:
8007 if (is_private)
8008 flags |= GOVD_FIRSTPRIVATE;
8009 else if (AGGREGATE_TYPE_P (type))
8011 /* Aggregates default to 'present_or_copy', or 'present'. */
8012 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8013 flags |= GOVD_MAP;
8014 else
8015 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8017 else
8018 /* Scalars default to 'copy'. */
8019 flags |= GOVD_MAP | GOVD_MAP_FORCE;
8021 break;
8023 case ORT_ACC_PARALLEL:
8024 case ORT_ACC_SERIAL:
8025 if (is_private)
8026 flags |= GOVD_FIRSTPRIVATE;
8027 else if (on_device || declared)
8028 flags |= GOVD_MAP;
8029 else if (AGGREGATE_TYPE_P (type))
8031 /* Aggregates default to 'present_or_copy', or 'present'. */
8032 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8033 flags |= GOVD_MAP;
8034 else
8035 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8037 else
8038 /* Scalars default to 'firstprivate'. */
8039 flags |= GOVD_FIRSTPRIVATE;
8041 break;
8043 default:
8044 gcc_unreachable ();
8047 if (DECL_ARTIFICIAL (decl))
8048 ; /* We can get compiler-generated decls, and should not complain
8049 about them. */
8050 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
8052 error ("%qE not specified in enclosing OpenACC %qs construct",
8053 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
8054 oacc_region_type_name (ctx->region_type));
8055 if (ctx_default != ctx)
8056 inform (ctx->location, "enclosing OpenACC %qs construct and",
8057 oacc_region_type_name (ctx->region_type));
8058 inform (ctx_default->location,
8059 "enclosing OpenACC %qs construct with %qs clause",
8060 oacc_region_type_name (ctx_default->region_type),
8061 "default(none)");
8063 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
8064 ; /* Handled above. */
8065 else
8066 gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
8068 return flags;
8071 /* Record the fact that DECL was used within the OMP context CTX.
8072 IN_CODE is true when real code uses DECL, and false when we should
8073 merely emit default(none) errors. Return true if DECL is going to
8074 be remapped and thus DECL shouldn't be gimplified into its
8075 DECL_VALUE_EXPR (if any). */
8077 static bool
8078 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
8080 splay_tree_node n;
8081 unsigned flags = in_code ? GOVD_SEEN : 0;
8082 bool ret = false, shared;
8084 if (error_operand_p (decl))
8085 return false;
8087 if (DECL_ARTIFICIAL (decl))
8089 tree attr = lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl));
8090 if (attr)
8091 decl = TREE_VALUE (TREE_VALUE (attr));
8094 if (ctx->region_type == ORT_NONE)
8095 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
8097 if (is_global_var (decl))
8099 /* Threadprivate variables are predetermined. */
8100 if (DECL_THREAD_LOCAL_P (decl))
8101 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
8103 if (DECL_HAS_VALUE_EXPR_P (decl))
8105 if (ctx->region_type & ORT_ACC)
8106 /* For OpenACC, defer expansion of value to avoid transfering
8107 privatized common block data instead of im-/explicitly transfered
8108 variables which are in common blocks. */
8110 else
8112 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8114 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
8115 return omp_notice_threadprivate_variable (ctx, decl, value);
8119 if (gimplify_omp_ctxp->outer_context == NULL
8120 && VAR_P (decl)
8121 && oacc_get_fn_attrib (current_function_decl))
8123 location_t loc = DECL_SOURCE_LOCATION (decl);
8125 if (lookup_attribute ("omp declare target link",
8126 DECL_ATTRIBUTES (decl)))
8128 error_at (loc,
8129 "%qE with %<link%> clause used in %<routine%> function",
8130 DECL_NAME (decl));
8131 return false;
8133 else if (!lookup_attribute ("omp declare target",
8134 DECL_ATTRIBUTES (decl)))
8136 error_at (loc,
8137 "%qE requires a %<declare%> directive for use "
8138 "in a %<routine%> function", DECL_NAME (decl));
8139 return false;
8144 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8145 if ((ctx->region_type & ORT_TARGET) != 0)
8147 if (n == NULL)
8149 unsigned nflags = flags;
8150 if ((ctx->region_type & ORT_ACC) == 0)
8152 bool is_declare_target = false;
8153 if (is_global_var (decl)
8154 && varpool_node::get_create (decl)->offloadable)
8156 struct gimplify_omp_ctx *octx;
8157 for (octx = ctx->outer_context;
8158 octx; octx = octx->outer_context)
8160 n = splay_tree_lookup (octx->variables,
8161 (splay_tree_key)decl);
8162 if (n
8163 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
8164 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8165 break;
8167 is_declare_target = octx == NULL;
8169 if (!is_declare_target)
8171 int gdmk;
8172 enum omp_clause_defaultmap_kind kind;
8173 if (lang_hooks.decls.omp_allocatable_p (decl))
8174 gdmk = GDMK_ALLOCATABLE;
8175 else if (lang_hooks.decls.omp_scalar_target_p (decl))
8176 gdmk = GDMK_SCALAR_TARGET;
8177 else if (lang_hooks.decls.omp_scalar_p (decl, false))
8178 gdmk = GDMK_SCALAR;
8179 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
8180 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8181 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
8182 == POINTER_TYPE)))
8183 gdmk = GDMK_POINTER;
8184 else
8185 gdmk = GDMK_AGGREGATE;
8186 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
8187 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
8189 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
8190 nflags |= GOVD_FIRSTPRIVATE;
8191 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
8192 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
8193 else
8194 gcc_unreachable ();
8196 else if (ctx->defaultmap[gdmk] == 0)
8198 tree d = lang_hooks.decls.omp_report_decl (decl);
8199 error ("%qE not specified in enclosing %<target%>",
8200 DECL_NAME (d));
8201 inform (ctx->location, "enclosing %<target%>");
8203 else if (ctx->defaultmap[gdmk]
8204 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
8205 nflags |= ctx->defaultmap[gdmk];
8206 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
8208 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8209 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
8211 else
8213 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8214 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
8219 struct gimplify_omp_ctx *octx = ctx->outer_context;
8220 if ((ctx->region_type & ORT_ACC) && octx)
8222 /* Look in outer OpenACC contexts, to see if there's a
8223 data attribute for this variable. */
8224 omp_notice_variable (octx, decl, in_code);
8226 for (; octx; octx = octx->outer_context)
8228 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
8229 break;
8230 splay_tree_node n2
8231 = splay_tree_lookup (octx->variables,
8232 (splay_tree_key) decl);
8233 if (n2)
8235 if (octx->region_type == ORT_ACC_HOST_DATA)
8236 error ("variable %qE declared in enclosing "
8237 "%<host_data%> region", DECL_NAME (decl));
8238 nflags |= GOVD_MAP;
8239 if (octx->region_type == ORT_ACC_DATA
8240 && (n2->value & GOVD_MAP_0LEN_ARRAY))
8241 nflags |= GOVD_MAP_0LEN_ARRAY;
8242 goto found_outer;
8247 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
8248 | GOVD_MAP_ALLOC_ONLY)) == flags)
8250 tree type = TREE_TYPE (decl);
8252 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8253 && omp_privatize_by_reference (decl))
8254 type = TREE_TYPE (type);
8255 if (!omp_mappable_type (type))
8257 error ("%qD referenced in target region does not have "
8258 "a mappable type", decl);
8259 nflags |= GOVD_MAP | GOVD_EXPLICIT;
8261 else
8263 if ((ctx->region_type & ORT_ACC) != 0)
8264 nflags = oacc_default_clause (ctx, decl, flags);
8265 else
8266 nflags |= GOVD_MAP;
8269 found_outer:
8270 omp_add_variable (ctx, decl, nflags);
8271 if (ctx->region_type & ORT_ACC)
8272 /* For OpenACC, as remarked above, defer expansion. */
8273 shared = false;
8274 else
8275 shared = (nflags & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
8276 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8278 else
8280 if (ctx->region_type & ORT_ACC)
8281 /* For OpenACC, as remarked above, defer expansion. */
8282 shared = false;
8283 else
8284 shared = ((n->value | flags)
8285 & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
8286 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8287 /* If nothing changed, there's nothing left to do. */
8288 if ((n->value & flags) == flags)
8289 return ret;
8290 flags |= n->value;
8291 n->value = flags;
8293 goto do_outer;
8296 if (n == NULL)
8298 if (ctx->region_type == ORT_WORKSHARE
8299 || ctx->region_type == ORT_TASKGROUP
8300 || ctx->region_type == ORT_SIMD
8301 || ctx->region_type == ORT_ACC
8302 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8303 goto do_outer;
8305 flags = omp_default_clause (ctx, decl, in_code, flags);
8307 if ((flags & GOVD_PRIVATE)
8308 && lang_hooks.decls.omp_private_outer_ref (decl))
8309 flags |= GOVD_PRIVATE_OUTER_REF;
8311 omp_add_variable (ctx, decl, flags);
8313 shared = (flags & GOVD_SHARED) != 0;
8314 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8315 goto do_outer;
8318 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8319 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8320 if (ctx->region_type == ORT_SIMD
8321 && ctx->in_for_exprs
8322 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8323 == GOVD_PRIVATE))
8324 flags &= ~GOVD_SEEN;
8326 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8327 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8328 && DECL_SIZE (decl))
8330 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8332 splay_tree_node n2;
8333 tree t = DECL_VALUE_EXPR (decl);
8334 gcc_assert (INDIRECT_REF_P (t));
8335 t = TREE_OPERAND (t, 0);
8336 gcc_assert (DECL_P (t));
8337 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8338 n2->value |= GOVD_SEEN;
8340 else if (omp_privatize_by_reference (decl)
8341 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8342 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8343 != INTEGER_CST))
8345 splay_tree_node n2;
8346 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8347 gcc_assert (DECL_P (t));
8348 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8349 if (n2)
8350 omp_notice_variable (ctx, t, true);
8354 if (ctx->region_type & ORT_ACC)
8355 /* For OpenACC, as remarked above, defer expansion. */
8356 shared = false;
8357 else
8358 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8359 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8361 /* If nothing changed, there's nothing left to do. */
8362 if ((n->value & flags) == flags)
8363 return ret;
8364 flags |= n->value;
8365 n->value = flags;
8367 do_outer:
8368 /* If the variable is private in the current context, then we don't
8369 need to propagate anything to an outer context. */
8370 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8371 return ret;
8372 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8373 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8374 return ret;
8375 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8376 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8377 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8378 return ret;
8379 if (ctx->outer_context
8380 && omp_notice_variable (ctx->outer_context, decl, in_code))
8381 return true;
8382 return ret;
8385 /* Verify that DECL is private within CTX. If there's specific information
8386 to the contrary in the innermost scope, generate an error. */
8388 static bool
8389 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8391 splay_tree_node n;
8393 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8394 if (n != NULL)
8396 if (n->value & GOVD_SHARED)
8398 if (ctx == gimplify_omp_ctxp)
8400 if (simd)
8401 error ("iteration variable %qE is predetermined linear",
8402 DECL_NAME (decl));
8403 else
8404 error ("iteration variable %qE should be private",
8405 DECL_NAME (decl));
8406 n->value = GOVD_PRIVATE;
8407 return true;
8409 else
8410 return false;
8412 else if ((n->value & GOVD_EXPLICIT) != 0
8413 && (ctx == gimplify_omp_ctxp
8414 || (ctx->region_type == ORT_COMBINED_PARALLEL
8415 && gimplify_omp_ctxp->outer_context == ctx)))
8417 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8418 error ("iteration variable %qE should not be firstprivate",
8419 DECL_NAME (decl));
8420 else if ((n->value & GOVD_REDUCTION) != 0)
8421 error ("iteration variable %qE should not be reduction",
8422 DECL_NAME (decl));
8423 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8424 error ("iteration variable %qE should not be linear",
8425 DECL_NAME (decl));
8427 return (ctx == gimplify_omp_ctxp
8428 || (ctx->region_type == ORT_COMBINED_PARALLEL
8429 && gimplify_omp_ctxp->outer_context == ctx));
8432 if (ctx->region_type != ORT_WORKSHARE
8433 && ctx->region_type != ORT_TASKGROUP
8434 && ctx->region_type != ORT_SIMD
8435 && ctx->region_type != ORT_ACC)
8436 return false;
8437 else if (ctx->outer_context)
8438 return omp_is_private (ctx->outer_context, decl, simd);
8439 return false;
8442 /* Return true if DECL is private within a parallel region
8443 that binds to the current construct's context or in parallel
8444 region's REDUCTION clause. */
8446 static bool
8447 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8449 splay_tree_node n;
8453 ctx = ctx->outer_context;
8454 if (ctx == NULL)
8456 if (is_global_var (decl))
8457 return false;
8459 /* References might be private, but might be shared too,
8460 when checking for copyprivate, assume they might be
8461 private, otherwise assume they might be shared. */
8462 if (copyprivate)
8463 return true;
8465 if (omp_privatize_by_reference (decl))
8466 return false;
8468 /* Treat C++ privatized non-static data members outside
8469 of the privatization the same. */
8470 if (omp_member_access_dummy_var (decl))
8471 return false;
8473 return true;
8476 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8478 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8479 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8481 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8482 || n == NULL
8483 || (n->value & GOVD_MAP) == 0)
8484 continue;
8485 return false;
8488 if (n != NULL)
8490 if ((n->value & GOVD_LOCAL) != 0
8491 && omp_member_access_dummy_var (decl))
8492 return false;
8493 return (n->value & GOVD_SHARED) == 0;
8496 if (ctx->region_type == ORT_WORKSHARE
8497 || ctx->region_type == ORT_TASKGROUP
8498 || ctx->region_type == ORT_SIMD
8499 || ctx->region_type == ORT_ACC)
8500 continue;
8502 break;
8504 while (1);
8505 return false;
8508 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8510 static tree
8511 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8513 tree t = *tp;
8515 /* If this node has been visited, unmark it and keep looking. */
8516 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8517 return t;
8519 if (IS_TYPE_OR_DECL_P (t))
8520 *walk_subtrees = 0;
8521 return NULL_TREE;
8525 /* Gimplify the affinity clause but effectively ignore it.
8526 Generate:
8527 var = begin;
8528 if ((step > 1) ? var <= end : var > end)
8529 locatator_var_expr; */
8531 static void
8532 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8534 tree last_iter = NULL_TREE;
8535 tree last_bind = NULL_TREE;
8536 tree label = NULL_TREE;
8537 tree *last_body = NULL;
8538 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8539 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8541 tree t = OMP_CLAUSE_DECL (c);
8542 if (TREE_CODE (t) == TREE_LIST
8543 && TREE_PURPOSE (t)
8544 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8546 if (TREE_VALUE (t) == null_pointer_node)
8547 continue;
8548 if (TREE_PURPOSE (t) != last_iter)
8550 if (last_bind)
8552 append_to_statement_list (label, last_body);
8553 gimplify_and_add (last_bind, pre_p);
8554 last_bind = NULL_TREE;
8556 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8558 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8559 is_gimple_val, fb_rvalue) == GS_ERROR
8560 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8561 is_gimple_val, fb_rvalue) == GS_ERROR
8562 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8563 is_gimple_val, fb_rvalue) == GS_ERROR
8564 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8565 is_gimple_val, fb_rvalue)
8566 == GS_ERROR))
8567 return;
8569 last_iter = TREE_PURPOSE (t);
8570 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8571 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8572 NULL, block);
8573 last_body = &BIND_EXPR_BODY (last_bind);
8574 tree cond = NULL_TREE;
8575 location_t loc = OMP_CLAUSE_LOCATION (c);
8576 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8578 tree var = TREE_VEC_ELT (it, 0);
8579 tree begin = TREE_VEC_ELT (it, 1);
8580 tree end = TREE_VEC_ELT (it, 2);
8581 tree step = TREE_VEC_ELT (it, 3);
8582 loc = DECL_SOURCE_LOCATION (var);
8583 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8584 var, begin);
8585 append_to_statement_list_force (tem, last_body);
8587 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8588 step, build_zero_cst (TREE_TYPE (step)));
8589 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8590 var, end);
8591 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8592 var, end);
8593 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8594 cond1, cond2, cond3);
8595 if (cond)
8596 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8597 boolean_type_node, cond, cond1);
8598 else
8599 cond = cond1;
8601 tree cont_label = create_artificial_label (loc);
8602 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8603 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8604 void_node,
8605 build_and_jump (&cont_label));
8606 append_to_statement_list_force (tem, last_body);
8608 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8610 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8611 last_body);
8612 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8614 if (error_operand_p (TREE_VALUE (t)))
8615 return;
8616 append_to_statement_list_force (TREE_VALUE (t), last_body);
8617 TREE_VALUE (t) = null_pointer_node;
8619 else
8621 if (last_bind)
8623 append_to_statement_list (label, last_body);
8624 gimplify_and_add (last_bind, pre_p);
8625 last_bind = NULL_TREE;
8627 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8629 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8630 NULL, is_gimple_val, fb_rvalue);
8631 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8633 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8634 return;
8635 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8636 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8637 return;
8638 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8641 if (last_bind)
8643 append_to_statement_list (label, last_body);
8644 gimplify_and_add (last_bind, pre_p);
8646 return;
8649 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8650 lower all the depend clauses by populating corresponding depend
8651 array. Returns 0 if there are no such depend clauses, or
8652 2 if all depend clauses should be removed, 1 otherwise. */
8654 static int
8655 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8657 tree c;
8658 gimple *g;
8659 size_t n[5] = { 0, 0, 0, 0, 0 };
8660 bool unused[5];
8661 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8662 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8663 size_t i, j;
8664 location_t first_loc = UNKNOWN_LOCATION;
8666 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8667 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8669 switch (OMP_CLAUSE_DEPEND_KIND (c))
8671 case OMP_CLAUSE_DEPEND_IN:
8672 i = 2;
8673 break;
8674 case OMP_CLAUSE_DEPEND_OUT:
8675 case OMP_CLAUSE_DEPEND_INOUT:
8676 i = 0;
8677 break;
8678 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8679 i = 1;
8680 break;
8681 case OMP_CLAUSE_DEPEND_DEPOBJ:
8682 i = 3;
8683 break;
8684 case OMP_CLAUSE_DEPEND_INOUTSET:
8685 i = 4;
8686 break;
8687 default:
8688 gcc_unreachable ();
8690 tree t = OMP_CLAUSE_DECL (c);
8691 if (first_loc == UNKNOWN_LOCATION)
8692 first_loc = OMP_CLAUSE_LOCATION (c);
8693 if (TREE_CODE (t) == TREE_LIST
8694 && TREE_PURPOSE (t)
8695 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8697 if (TREE_PURPOSE (t) != last_iter)
8699 tree tcnt = size_one_node;
8700 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8702 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8703 is_gimple_val, fb_rvalue) == GS_ERROR
8704 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8705 is_gimple_val, fb_rvalue) == GS_ERROR
8706 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8707 is_gimple_val, fb_rvalue) == GS_ERROR
8708 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8709 is_gimple_val, fb_rvalue)
8710 == GS_ERROR))
8711 return 2;
8712 tree var = TREE_VEC_ELT (it, 0);
8713 tree begin = TREE_VEC_ELT (it, 1);
8714 tree end = TREE_VEC_ELT (it, 2);
8715 tree step = TREE_VEC_ELT (it, 3);
8716 tree orig_step = TREE_VEC_ELT (it, 4);
8717 tree type = TREE_TYPE (var);
8718 tree stype = TREE_TYPE (step);
8719 location_t loc = DECL_SOURCE_LOCATION (var);
8720 tree endmbegin;
8721 /* Compute count for this iterator as
8722 orig_step > 0
8723 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8724 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8725 and compute product of those for the entire depend
8726 clause. */
8727 if (POINTER_TYPE_P (type))
8728 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8729 stype, end, begin);
8730 else
8731 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8732 end, begin);
8733 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8734 step,
8735 build_int_cst (stype, 1));
8736 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8737 build_int_cst (stype, 1));
8738 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8739 unshare_expr (endmbegin),
8740 stepm1);
8741 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8742 pos, step);
8743 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8744 endmbegin, stepp1);
8745 if (TYPE_UNSIGNED (stype))
8747 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8748 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8750 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8751 neg, step);
8752 step = NULL_TREE;
8753 tree cond = fold_build2_loc (loc, LT_EXPR,
8754 boolean_type_node,
8755 begin, end);
8756 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8757 build_int_cst (stype, 0));
8758 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8759 end, begin);
8760 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8761 build_int_cst (stype, 0));
8762 tree osteptype = TREE_TYPE (orig_step);
8763 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8764 orig_step,
8765 build_int_cst (osteptype, 0));
8766 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8767 cond, pos, neg);
8768 cnt = fold_convert_loc (loc, sizetype, cnt);
8769 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8770 fb_rvalue) == GS_ERROR)
8771 return 2;
8772 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8774 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8775 fb_rvalue) == GS_ERROR)
8776 return 2;
8777 last_iter = TREE_PURPOSE (t);
8778 last_count = tcnt;
8780 if (counts[i] == NULL_TREE)
8781 counts[i] = last_count;
8782 else
8783 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8784 PLUS_EXPR, counts[i], last_count);
8786 else
8787 n[i]++;
8789 for (i = 0; i < 5; i++)
8790 if (counts[i])
8791 break;
8792 if (i == 5)
8793 return 0;
8795 tree total = size_zero_node;
8796 for (i = 0; i < 5; i++)
8798 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8799 if (counts[i] == NULL_TREE)
8800 counts[i] = size_zero_node;
8801 if (n[i])
8802 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8803 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8804 fb_rvalue) == GS_ERROR)
8805 return 2;
8806 total = size_binop (PLUS_EXPR, total, counts[i]);
8809 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8810 == GS_ERROR)
8811 return 2;
8812 bool is_old = unused[1] && unused[3] && unused[4];
8813 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8814 size_int (is_old ? 1 : 4));
8815 if (!unused[4])
8816 totalpx = size_binop (PLUS_EXPR, totalpx,
8817 size_binop (MULT_EXPR, counts[4], size_int (2)));
8818 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8819 tree array = create_tmp_var_raw (type);
8820 TREE_ADDRESSABLE (array) = 1;
8821 if (!poly_int_tree_p (totalpx))
8823 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8824 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8825 if (gimplify_omp_ctxp)
8827 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8828 while (ctx
8829 && (ctx->region_type == ORT_WORKSHARE
8830 || ctx->region_type == ORT_TASKGROUP
8831 || ctx->region_type == ORT_SIMD
8832 || ctx->region_type == ORT_ACC))
8833 ctx = ctx->outer_context;
8834 if (ctx)
8835 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8837 gimplify_vla_decl (array, pre_p);
8839 else
8840 gimple_add_tmp_var (array);
8841 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8842 NULL_TREE);
8843 tree tem;
8844 if (!is_old)
8846 tem = build2 (MODIFY_EXPR, void_type_node, r,
8847 build_int_cst (ptr_type_node, 0));
8848 gimplify_and_add (tem, pre_p);
8849 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8850 NULL_TREE);
8852 tem = build2 (MODIFY_EXPR, void_type_node, r,
8853 fold_convert (ptr_type_node, total));
8854 gimplify_and_add (tem, pre_p);
8855 for (i = 1; i < (is_old ? 2 : 4); i++)
8857 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8858 NULL_TREE, NULL_TREE);
8859 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8860 gimplify_and_add (tem, pre_p);
8863 tree cnts[6];
8864 for (j = 5; j; j--)
8865 if (!unused[j - 1])
8866 break;
8867 for (i = 0; i < 5; i++)
8869 if (i && (i >= j || unused[i - 1]))
8871 cnts[i] = cnts[i - 1];
8872 continue;
8874 cnts[i] = create_tmp_var (sizetype);
8875 if (i == 0)
8876 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8877 else
8879 tree t;
8880 if (is_old)
8881 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8882 else
8883 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8884 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8885 == GS_ERROR)
8886 return 2;
8887 g = gimple_build_assign (cnts[i], t);
8889 gimple_seq_add_stmt (pre_p, g);
8891 if (unused[4])
8892 cnts[5] = NULL_TREE;
8893 else
8895 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8896 cnts[5] = create_tmp_var (sizetype);
8897 g = gimple_build_assign (cnts[i], t);
8898 gimple_seq_add_stmt (pre_p, g);
8901 last_iter = NULL_TREE;
8902 tree last_bind = NULL_TREE;
8903 tree *last_body = NULL;
8904 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8905 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8907 switch (OMP_CLAUSE_DEPEND_KIND (c))
8909 case OMP_CLAUSE_DEPEND_IN:
8910 i = 2;
8911 break;
8912 case OMP_CLAUSE_DEPEND_OUT:
8913 case OMP_CLAUSE_DEPEND_INOUT:
8914 i = 0;
8915 break;
8916 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8917 i = 1;
8918 break;
8919 case OMP_CLAUSE_DEPEND_DEPOBJ:
8920 i = 3;
8921 break;
8922 case OMP_CLAUSE_DEPEND_INOUTSET:
8923 i = 4;
8924 break;
8925 default:
8926 gcc_unreachable ();
8928 tree t = OMP_CLAUSE_DECL (c);
8929 if (TREE_CODE (t) == TREE_LIST
8930 && TREE_PURPOSE (t)
8931 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8933 if (TREE_PURPOSE (t) != last_iter)
8935 if (last_bind)
8936 gimplify_and_add (last_bind, pre_p);
8937 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8938 last_bind = build3 (BIND_EXPR, void_type_node,
8939 BLOCK_VARS (block), NULL, block);
8940 TREE_SIDE_EFFECTS (last_bind) = 1;
8941 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8942 tree *p = &BIND_EXPR_BODY (last_bind);
8943 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8945 tree var = TREE_VEC_ELT (it, 0);
8946 tree begin = TREE_VEC_ELT (it, 1);
8947 tree end = TREE_VEC_ELT (it, 2);
8948 tree step = TREE_VEC_ELT (it, 3);
8949 tree orig_step = TREE_VEC_ELT (it, 4);
8950 tree type = TREE_TYPE (var);
8951 location_t loc = DECL_SOURCE_LOCATION (var);
8952 /* Emit:
8953 var = begin;
8954 goto cond_label;
8955 beg_label:
8957 var = var + step;
8958 cond_label:
8959 if (orig_step > 0) {
8960 if (var < end) goto beg_label;
8961 } else {
8962 if (var > end) goto beg_label;
8964 for each iterator, with inner iterators added to
8965 the ... above. */
8966 tree beg_label = create_artificial_label (loc);
8967 tree cond_label = NULL_TREE;
8968 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8969 var, begin);
8970 append_to_statement_list_force (tem, p);
8971 tem = build_and_jump (&cond_label);
8972 append_to_statement_list_force (tem, p);
8973 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8974 append_to_statement_list (tem, p);
8975 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8976 NULL_TREE, NULL_TREE);
8977 TREE_SIDE_EFFECTS (bind) = 1;
8978 SET_EXPR_LOCATION (bind, loc);
8979 append_to_statement_list_force (bind, p);
8980 if (POINTER_TYPE_P (type))
8981 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8982 var, fold_convert_loc (loc, sizetype,
8983 step));
8984 else
8985 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8986 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8987 var, tem);
8988 append_to_statement_list_force (tem, p);
8989 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8990 append_to_statement_list (tem, p);
8991 tree cond = fold_build2_loc (loc, LT_EXPR,
8992 boolean_type_node,
8993 var, end);
8994 tree pos
8995 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8996 cond, build_and_jump (&beg_label),
8997 void_node);
8998 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8999 var, end);
9000 tree neg
9001 = fold_build3_loc (loc, COND_EXPR, void_type_node,
9002 cond, build_and_jump (&beg_label),
9003 void_node);
9004 tree osteptype = TREE_TYPE (orig_step);
9005 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9006 orig_step,
9007 build_int_cst (osteptype, 0));
9008 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
9009 cond, pos, neg);
9010 append_to_statement_list_force (tem, p);
9011 p = &BIND_EXPR_BODY (bind);
9013 last_body = p;
9015 last_iter = TREE_PURPOSE (t);
9016 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
9018 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
9019 0), last_body);
9020 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
9022 if (error_operand_p (TREE_VALUE (t)))
9023 return 2;
9024 if (TREE_VALUE (t) != null_pointer_node)
9025 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
9026 if (i == 4)
9028 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9029 NULL_TREE, NULL_TREE);
9030 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9031 NULL_TREE, NULL_TREE);
9032 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9033 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9034 void_type_node, r, r2);
9035 append_to_statement_list_force (tem, last_body);
9036 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9037 void_type_node, cnts[i],
9038 size_binop (PLUS_EXPR, cnts[i],
9039 size_int (1)));
9040 append_to_statement_list_force (tem, last_body);
9041 i = 5;
9043 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9044 NULL_TREE, NULL_TREE);
9045 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9046 void_type_node, r, TREE_VALUE (t));
9047 append_to_statement_list_force (tem, last_body);
9048 if (i == 5)
9050 r = build4 (ARRAY_REF, ptr_type_node, array,
9051 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9052 NULL_TREE, NULL_TREE);
9053 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9054 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9055 void_type_node, r, tem);
9056 append_to_statement_list_force (tem, last_body);
9058 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9059 void_type_node, cnts[i],
9060 size_binop (PLUS_EXPR, cnts[i],
9061 size_int (1 + (i == 5))));
9062 append_to_statement_list_force (tem, last_body);
9063 TREE_VALUE (t) = null_pointer_node;
9065 else
9067 if (last_bind)
9069 gimplify_and_add (last_bind, pre_p);
9070 last_bind = NULL_TREE;
9072 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9074 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9075 NULL, is_gimple_val, fb_rvalue);
9076 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9078 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9079 return 2;
9080 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
9081 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9082 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9083 is_gimple_val, fb_rvalue) == GS_ERROR)
9084 return 2;
9085 if (i == 4)
9087 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9088 NULL_TREE, NULL_TREE);
9089 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9090 NULL_TREE, NULL_TREE);
9091 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9092 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
9093 gimplify_and_add (tem, pre_p);
9094 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
9095 cnts[i],
9096 size_int (1)));
9097 gimple_seq_add_stmt (pre_p, g);
9098 i = 5;
9100 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9101 NULL_TREE, NULL_TREE);
9102 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
9103 gimplify_and_add (tem, pre_p);
9104 if (i == 5)
9106 r = build4 (ARRAY_REF, ptr_type_node, array,
9107 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9108 NULL_TREE, NULL_TREE);
9109 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9110 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
9111 append_to_statement_list_force (tem, last_body);
9112 gimplify_and_add (tem, pre_p);
9114 g = gimple_build_assign (cnts[i],
9115 size_binop (PLUS_EXPR, cnts[i],
9116 size_int (1 + (i == 5))));
9117 gimple_seq_add_stmt (pre_p, g);
9120 if (last_bind)
9121 gimplify_and_add (last_bind, pre_p);
9122 tree cond = boolean_false_node;
9123 if (is_old)
9125 if (!unused[0])
9126 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
9127 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
9128 size_int (2)));
9129 if (!unused[2])
9130 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
9131 build2_loc (first_loc, NE_EXPR, boolean_type_node,
9132 cnts[2],
9133 size_binop_loc (first_loc, PLUS_EXPR,
9134 totalpx,
9135 size_int (1))));
9137 else
9139 tree prev = size_int (5);
9140 for (i = 0; i < 5; i++)
9142 if (unused[i])
9143 continue;
9144 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
9145 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
9146 build2_loc (first_loc, NE_EXPR, boolean_type_node,
9147 cnts[i], unshare_expr (prev)));
9150 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
9151 build_call_expr_loc (first_loc,
9152 builtin_decl_explicit (BUILT_IN_TRAP),
9153 0), void_node);
9154 gimplify_and_add (tem, pre_p);
9155 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9156 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
9157 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9158 OMP_CLAUSE_CHAIN (c) = *list_p;
9159 *list_p = c;
9160 return 1;
9163 /* True if mapping node C maps, or unmaps, a (Fortran) array descriptor. */
9165 static bool
9166 omp_map_clause_descriptor_p (tree c)
9168 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
9169 return false;
9171 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
9172 return true;
9174 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_RELEASE
9175 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DELETE)
9176 && OMP_CLAUSE_RELEASE_DESCRIPTOR (c))
9177 return true;
9179 return false;
9182 /* For a set of mappings describing an array section pointed to by a struct
9183 (or derived type, etc.) component, create an "alloc" or "release" node to
9184 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9185 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9186 be created that is inserted into the list of mapping nodes attached to the
9187 directive being processed -- not part of the sorted list of nodes after
9188 GOMP_MAP_STRUCT.
9190 CODE is the code of the directive being processed. GRP_START and GRP_END
9191 are the first and last of two or three nodes representing this array section
9192 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9193 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9194 filled with the additional node described above, if needed.
9196 This function does not add the new nodes to any lists itself. It is the
9197 responsibility of the caller to do that. */
9199 static tree
9200 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
9201 tree *extra_node)
9203 enum gomp_map_kind mkind
9204 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
9205 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
9207 gcc_assert (grp_start != grp_end);
9209 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9210 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9211 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
9212 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
9213 tree grp_mid = NULL_TREE;
9214 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
9215 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
9217 if (grp_mid && omp_map_clause_descriptor_p (grp_mid))
9218 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
9219 else
9220 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
9222 if (grp_mid
9223 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9224 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER)
9226 tree c3
9227 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9228 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
9229 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
9230 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
9231 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
9233 *extra_node = c3;
9235 else
9236 *extra_node = NULL_TREE;
9238 return c2;
9241 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9242 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9243 If BASE_REF is non-NULL and the containing object is a reference, set
9244 *BASE_REF to that reference before dereferencing the object.
9245 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9246 has array type, else return NULL. */
9248 static tree
9249 extract_base_bit_offset (tree base, poly_int64 *bitposp,
9250 poly_offset_int *poffsetp,
9251 bool *variable_offset)
9253 tree offset;
9254 poly_int64 bitsize, bitpos;
9255 machine_mode mode;
9256 int unsignedp, reversep, volatilep = 0;
9257 poly_offset_int poffset;
9259 STRIP_NOPS (base);
9261 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
9262 &unsignedp, &reversep, &volatilep);
9264 STRIP_NOPS (base);
9266 if (offset && poly_int_tree_p (offset))
9268 poffset = wi::to_poly_offset (offset);
9269 *variable_offset = false;
9271 else
9273 poffset = 0;
9274 *variable_offset = (offset != NULL_TREE);
9277 if (maybe_ne (bitpos, 0))
9278 poffset += bits_to_bytes_round_down (bitpos);
9280 *bitposp = bitpos;
9281 *poffsetp = poffset;
9283 return base;
9286 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
9287 started processing the group yet. The TEMPORARY mark is used when we first
9288 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9289 when we have processed all the group's children (i.e. all the base pointers
9290 referred to by the group's mapping nodes, recursively). */
9292 enum omp_tsort_mark {
9293 UNVISITED,
9294 TEMPORARY,
9295 PERMANENT
9298 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
9299 but ignores side effects in the equality comparisons. */
9301 struct tree_operand_hash_no_se : tree_operand_hash
9303 static inline bool equal (const value_type &,
9304 const compare_type &);
9307 inline bool
9308 tree_operand_hash_no_se::equal (const value_type &t1,
9309 const compare_type &t2)
9311 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
9314 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9315 clause. */
9317 struct omp_mapping_group {
9318 tree *grp_start;
9319 tree grp_end;
9320 omp_tsort_mark mark;
9321 /* If we've removed the group but need to reindex, mark the group as
9322 deleted. */
9323 bool deleted;
9324 /* The group points to an already-created "GOMP_MAP_STRUCT
9325 GOMP_MAP_ATTACH_DETACH" pair. */
9326 bool reprocess_struct;
9327 /* The group should use "zero-length" allocations for pointers that are not
9328 mapped "to" on the same directive. */
9329 bool fragile;
9330 struct omp_mapping_group *sibling;
9331 struct omp_mapping_group *next;
9334 DEBUG_FUNCTION void
9335 debug_mapping_group (omp_mapping_group *grp)
9337 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9338 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9339 debug_generic_expr (*grp->grp_start);
9340 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9343 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9344 isn't one. */
9346 static tree
9347 omp_get_base_pointer (tree expr)
9349 while (TREE_CODE (expr) == ARRAY_REF
9350 || TREE_CODE (expr) == COMPONENT_REF)
9351 expr = TREE_OPERAND (expr, 0);
9353 if (INDIRECT_REF_P (expr)
9354 || (TREE_CODE (expr) == MEM_REF
9355 && integer_zerop (TREE_OPERAND (expr, 1))))
9357 expr = TREE_OPERAND (expr, 0);
9358 while (TREE_CODE (expr) == COMPOUND_EXPR)
9359 expr = TREE_OPERAND (expr, 1);
9360 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9361 expr = TREE_OPERAND (expr, 0);
9362 if (TREE_CODE (expr) == SAVE_EXPR)
9363 expr = TREE_OPERAND (expr, 0);
9364 STRIP_NOPS (expr);
9365 return expr;
9368 return NULL_TREE;
9371 /* An attach or detach operation depends directly on the address being
9372 attached/detached. Return that address, or none if there are no
9373 attachments/detachments. */
9375 static tree
9376 omp_get_attachment (omp_mapping_group *grp)
9378 tree node = *grp->grp_start;
9380 switch (OMP_CLAUSE_MAP_KIND (node))
9382 case GOMP_MAP_TO:
9383 case GOMP_MAP_FROM:
9384 case GOMP_MAP_TOFROM:
9385 case GOMP_MAP_ALWAYS_FROM:
9386 case GOMP_MAP_ALWAYS_TO:
9387 case GOMP_MAP_ALWAYS_TOFROM:
9388 case GOMP_MAP_FORCE_FROM:
9389 case GOMP_MAP_FORCE_TO:
9390 case GOMP_MAP_FORCE_TOFROM:
9391 case GOMP_MAP_FORCE_PRESENT:
9392 case GOMP_MAP_PRESENT_ALLOC:
9393 case GOMP_MAP_PRESENT_FROM:
9394 case GOMP_MAP_PRESENT_TO:
9395 case GOMP_MAP_PRESENT_TOFROM:
9396 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9397 case GOMP_MAP_ALWAYS_PRESENT_TO:
9398 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9399 case GOMP_MAP_ALLOC:
9400 case GOMP_MAP_RELEASE:
9401 case GOMP_MAP_DELETE:
9402 case GOMP_MAP_FORCE_ALLOC:
9403 if (node == grp->grp_end)
9404 return NULL_TREE;
9406 node = OMP_CLAUSE_CHAIN (node);
9407 if (node && omp_map_clause_descriptor_p (node))
9409 gcc_assert (node != grp->grp_end);
9410 node = OMP_CLAUSE_CHAIN (node);
9412 if (node)
9413 switch (OMP_CLAUSE_MAP_KIND (node))
9415 case GOMP_MAP_POINTER:
9416 case GOMP_MAP_ALWAYS_POINTER:
9417 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9418 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9419 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9420 return NULL_TREE;
9422 case GOMP_MAP_ATTACH_DETACH:
9423 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9424 case GOMP_MAP_DETACH:
9425 return OMP_CLAUSE_DECL (node);
9427 default:
9428 internal_error ("unexpected mapping node");
9430 return error_mark_node;
9432 case GOMP_MAP_TO_PSET:
9433 gcc_assert (node != grp->grp_end);
9434 node = OMP_CLAUSE_CHAIN (node);
9435 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9436 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9437 return OMP_CLAUSE_DECL (node);
9438 else
9439 internal_error ("unexpected mapping node");
9440 return error_mark_node;
9442 case GOMP_MAP_ATTACH:
9443 case GOMP_MAP_DETACH:
9444 node = OMP_CLAUSE_CHAIN (node);
9445 if (!node || *grp->grp_start == grp->grp_end)
9446 return OMP_CLAUSE_DECL (*grp->grp_start);
9447 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9448 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9449 return OMP_CLAUSE_DECL (*grp->grp_start);
9450 else
9451 internal_error ("unexpected mapping node");
9452 return error_mark_node;
9454 case GOMP_MAP_STRUCT:
9455 case GOMP_MAP_STRUCT_UNORD:
9456 case GOMP_MAP_FORCE_DEVICEPTR:
9457 case GOMP_MAP_DEVICE_RESIDENT:
9458 case GOMP_MAP_LINK:
9459 case GOMP_MAP_IF_PRESENT:
9460 case GOMP_MAP_FIRSTPRIVATE:
9461 case GOMP_MAP_FIRSTPRIVATE_INT:
9462 case GOMP_MAP_USE_DEVICE_PTR:
9463 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9464 return NULL_TREE;
9466 default:
9467 internal_error ("unexpected mapping node");
9470 return error_mark_node;
9473 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9474 mappings, return the chain pointer to the end of that group in the list. */
9476 static tree *
9477 omp_group_last (tree *start_p)
9479 tree c = *start_p, nc, *grp_last_p = start_p;
9481 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9483 nc = OMP_CLAUSE_CHAIN (c);
9485 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9486 return grp_last_p;
9488 switch (OMP_CLAUSE_MAP_KIND (c))
9490 default:
9491 while (nc
9492 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9493 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9494 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9495 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9496 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9497 || (OMP_CLAUSE_MAP_KIND (nc)
9498 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9499 || (OMP_CLAUSE_MAP_KIND (nc)
9500 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9501 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH
9502 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9503 || omp_map_clause_descriptor_p (nc)))
9505 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9506 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH)
9508 /* In the specific case we're doing "exit data" on an array
9509 slice of a reference-to-pointer struct component, we will see
9510 DETACH followed by ATTACH_DETACH here. We want to treat that
9511 as a single group. In other cases DETACH might represent a
9512 stand-alone "detach" clause, so we don't want to consider
9513 that part of the group. */
9514 if (nc2
9515 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9516 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH_DETACH)
9517 goto consume_two_nodes;
9518 else
9519 break;
9521 if (nc2
9522 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9523 && (OMP_CLAUSE_MAP_KIND (nc)
9524 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9525 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9527 consume_two_nodes:
9528 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9529 c = nc2;
9530 nc = OMP_CLAUSE_CHAIN (nc2);
9532 else
9534 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9535 c = nc;
9536 nc = nc2;
9539 break;
9541 case GOMP_MAP_ATTACH:
9542 case GOMP_MAP_DETACH:
9543 /* This is a weird artifact of how directives are parsed: bare attach or
9544 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9545 FIRSTPRIVATE_REFERENCE node. FIXME. */
9546 if (nc
9547 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9548 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9549 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9550 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9551 break;
9553 case GOMP_MAP_TO_PSET:
9554 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9555 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9556 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9557 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9558 break;
9560 case GOMP_MAP_STRUCT:
9561 case GOMP_MAP_STRUCT_UNORD:
9563 unsigned HOST_WIDE_INT num_mappings
9564 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9565 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9566 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9567 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9568 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9569 for (unsigned i = 0; i < num_mappings; i++)
9570 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9572 break;
9575 return grp_last_p;
9578 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9579 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9580 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9581 if we have more than one such group, else return NULL. */
9583 static void
9584 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9585 tree gather_sentinel)
9587 for (tree *cp = list_p;
9588 *cp && *cp != gather_sentinel;
9589 cp = &OMP_CLAUSE_CHAIN (*cp))
9591 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9592 continue;
9594 tree *grp_last_p = omp_group_last (cp);
9595 omp_mapping_group grp;
9597 grp.grp_start = cp;
9598 grp.grp_end = *grp_last_p;
9599 grp.mark = UNVISITED;
9600 grp.sibling = NULL;
9601 grp.deleted = false;
9602 grp.reprocess_struct = false;
9603 grp.fragile = false;
9604 grp.next = NULL;
9605 groups->safe_push (grp);
9607 cp = grp_last_p;
9611 static vec<omp_mapping_group> *
9612 omp_gather_mapping_groups (tree *list_p)
9614 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9616 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9618 if (groups->length () > 0)
9619 return groups;
9620 else
9622 delete groups;
9623 return NULL;
9627 /* A pointer mapping group GRP may define a block of memory starting at some
9628 base address, and maybe also define a firstprivate pointer or firstprivate
9629 reference that points to that block. The return value is a node containing
9630 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9631 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9632 return the number of consecutive chained nodes in CHAINED. */
9634 static tree
9635 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9636 tree *firstprivate)
9638 tree node = *grp->grp_start;
9640 *firstprivate = NULL_TREE;
9641 *chained = 1;
9643 switch (OMP_CLAUSE_MAP_KIND (node))
9645 case GOMP_MAP_TO:
9646 case GOMP_MAP_FROM:
9647 case GOMP_MAP_TOFROM:
9648 case GOMP_MAP_ALWAYS_FROM:
9649 case GOMP_MAP_ALWAYS_TO:
9650 case GOMP_MAP_ALWAYS_TOFROM:
9651 case GOMP_MAP_FORCE_FROM:
9652 case GOMP_MAP_FORCE_TO:
9653 case GOMP_MAP_FORCE_TOFROM:
9654 case GOMP_MAP_FORCE_PRESENT:
9655 case GOMP_MAP_PRESENT_ALLOC:
9656 case GOMP_MAP_PRESENT_FROM:
9657 case GOMP_MAP_PRESENT_TO:
9658 case GOMP_MAP_PRESENT_TOFROM:
9659 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9660 case GOMP_MAP_ALWAYS_PRESENT_TO:
9661 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9662 case GOMP_MAP_ALLOC:
9663 case GOMP_MAP_RELEASE:
9664 case GOMP_MAP_DELETE:
9665 case GOMP_MAP_FORCE_ALLOC:
9666 case GOMP_MAP_IF_PRESENT:
9667 if (node == grp->grp_end)
9668 return node;
9670 node = OMP_CLAUSE_CHAIN (node);
9671 if (!node)
9672 internal_error ("unexpected mapping node");
9673 if (omp_map_clause_descriptor_p (node))
9675 if (node == grp->grp_end)
9676 return *grp->grp_start;
9677 node = OMP_CLAUSE_CHAIN (node);
9679 switch (OMP_CLAUSE_MAP_KIND (node))
9681 case GOMP_MAP_POINTER:
9682 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9683 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9684 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9685 *firstprivate = OMP_CLAUSE_DECL (node);
9686 return *grp->grp_start;
9688 case GOMP_MAP_ALWAYS_POINTER:
9689 case GOMP_MAP_ATTACH_DETACH:
9690 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9691 case GOMP_MAP_DETACH:
9692 return *grp->grp_start;
9694 default:
9695 internal_error ("unexpected mapping node");
9697 return error_mark_node;
9699 case GOMP_MAP_TO_PSET:
9700 gcc_assert (node != grp->grp_end);
9701 node = OMP_CLAUSE_CHAIN (node);
9702 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9703 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9704 return NULL_TREE;
9705 else
9706 internal_error ("unexpected mapping node");
9707 return error_mark_node;
9709 case GOMP_MAP_ATTACH:
9710 case GOMP_MAP_DETACH:
9711 node = OMP_CLAUSE_CHAIN (node);
9712 if (!node || *grp->grp_start == grp->grp_end)
9713 return NULL_TREE;
9714 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9715 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9717 /* We're mapping the base pointer itself in a bare attach or detach
9718 node. This is a side effect of how parsing works, and the mapping
9719 will be removed anyway (at least for enter/exit data directives).
9720 We should ignore the mapping here. FIXME. */
9721 return NULL_TREE;
9723 else
9724 internal_error ("unexpected mapping node");
9725 return error_mark_node;
9727 case GOMP_MAP_STRUCT:
9728 case GOMP_MAP_STRUCT_UNORD:
9730 unsigned HOST_WIDE_INT num_mappings
9731 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9732 node = OMP_CLAUSE_CHAIN (node);
9733 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9734 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9736 *firstprivate = OMP_CLAUSE_DECL (node);
9737 node = OMP_CLAUSE_CHAIN (node);
9739 else if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH_DETACH)
9740 node = OMP_CLAUSE_CHAIN (node);
9741 *chained = num_mappings;
9742 return node;
9745 case GOMP_MAP_FORCE_DEVICEPTR:
9746 case GOMP_MAP_DEVICE_RESIDENT:
9747 case GOMP_MAP_LINK:
9748 case GOMP_MAP_FIRSTPRIVATE:
9749 case GOMP_MAP_FIRSTPRIVATE_INT:
9750 case GOMP_MAP_USE_DEVICE_PTR:
9751 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9752 return NULL_TREE;
9754 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9755 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9756 case GOMP_MAP_POINTER:
9757 case GOMP_MAP_ALWAYS_POINTER:
9758 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9759 /* These shouldn't appear by themselves. */
9760 if (!seen_error ())
9761 internal_error ("unexpected pointer mapping node");
9762 return error_mark_node;
9764 default:
9765 gcc_unreachable ();
9768 return error_mark_node;
9771 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9772 nodes by tree_operand_hash_no_se. */
9774 static void
9775 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9776 omp_mapping_group *> *grpmap,
9777 vec<omp_mapping_group> *groups,
9778 tree reindex_sentinel)
9780 omp_mapping_group *grp;
9781 unsigned int i;
9782 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9784 FOR_EACH_VEC_ELT (*groups, i, grp)
9786 if (reindexing && *grp->grp_start == reindex_sentinel)
9787 above_hwm = true;
9789 if (reindexing && !above_hwm)
9790 continue;
9792 if (grp->reprocess_struct)
9793 continue;
9795 tree fpp;
9796 unsigned int chained;
9797 tree node = omp_group_base (grp, &chained, &fpp);
9799 if (node == error_mark_node || (!node && !fpp))
9800 continue;
9802 for (unsigned j = 0;
9803 node && j < chained;
9804 node = OMP_CLAUSE_CHAIN (node), j++)
9806 tree decl = OMP_CLAUSE_DECL (node);
9807 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9808 meaning node-hash lookups don't work. This is a workaround for
9809 that, but ideally we should just create the INDIRECT_REF at
9810 source instead. FIXME. */
9811 if (TREE_CODE (decl) == MEM_REF
9812 && integer_zerop (TREE_OPERAND (decl, 1)))
9813 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9815 omp_mapping_group **prev = grpmap->get (decl);
9817 if (prev && *prev == grp)
9818 /* Empty. */;
9819 else if (prev)
9821 /* Mapping the same thing twice is normally diagnosed as an error,
9822 but can happen under some circumstances, e.g. in pr99928-16.c,
9823 the directive:
9825 #pragma omp target simd reduction(+:a[:3]) \
9826 map(always, tofrom: a[:6])
9829 will result in two "a[0]" mappings (of different sizes). */
9831 grp->sibling = (*prev)->sibling;
9832 (*prev)->sibling = grp;
9834 else
9835 grpmap->put (decl, grp);
9838 if (!fpp)
9839 continue;
9841 omp_mapping_group **prev = grpmap->get (fpp);
9842 if (prev && *prev != grp)
9844 grp->sibling = (*prev)->sibling;
9845 (*prev)->sibling = grp;
9847 else
9848 grpmap->put (fpp, grp);
9852 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9853 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9855 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9856 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9858 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9860 return grpmap;
9863 /* Rebuild group map from partially-processed clause list (during
9864 omp_build_struct_sibling_lists). We have already processed nodes up until
9865 a high-water mark (HWM). This is a bit tricky because the list is being
9866 reordered as it is scanned, but we know:
9868 1. The list after HWM has not been touched yet, so we can reindex it safely.
9870 2. The list before and including HWM has been altered, but remains
9871 well-formed throughout the sibling-list building operation.
9873 so, we can do the reindex operation in two parts, on the processed and
9874 then the unprocessed halves of the list. */
9876 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9877 omp_reindex_mapping_groups (tree *list_p,
9878 vec<omp_mapping_group> *groups,
9879 vec<omp_mapping_group> *processed_groups,
9880 tree sentinel)
9882 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9883 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9885 processed_groups->truncate (0);
9887 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9888 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9889 if (sentinel)
9890 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9892 return grpmap;
9895 /* Find the immediately-containing struct for a component ref (etc.)
9896 expression EXPR. */
9898 static tree
9899 omp_containing_struct (tree expr)
9901 tree expr0 = expr;
9903 STRIP_NOPS (expr);
9905 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9906 component ref. */
9907 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9908 return expr0;
9910 while (TREE_CODE (expr) == ARRAY_REF)
9911 expr = TREE_OPERAND (expr, 0);
9913 if (TREE_CODE (expr) == COMPONENT_REF)
9914 expr = TREE_OPERAND (expr, 0);
9916 return expr;
9919 /* Return TRUE if DECL describes a component that is part of a whole structure
9920 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9921 that maps that structure, if present. */
9923 static bool
9924 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9925 omp_mapping_group *> *grpmap,
9926 tree decl,
9927 omp_mapping_group **mapped_by_group)
9929 tree wsdecl = NULL_TREE;
9931 *mapped_by_group = NULL;
9933 while (true)
9935 wsdecl = omp_containing_struct (decl);
9936 if (wsdecl == decl)
9937 break;
9938 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9939 if (!wholestruct
9940 && TREE_CODE (wsdecl) == MEM_REF
9941 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9943 tree deref = TREE_OPERAND (wsdecl, 0);
9944 deref = build_fold_indirect_ref (deref);
9945 wholestruct = grpmap->get (deref);
9947 if (wholestruct)
9949 *mapped_by_group = *wholestruct;
9950 return true;
9952 decl = wsdecl;
9955 return false;
9958 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9959 FALSE on error. */
9961 static bool
9962 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9963 vec<omp_mapping_group> *groups,
9964 hash_map<tree_operand_hash_no_se,
9965 omp_mapping_group *> *grpmap,
9966 omp_mapping_group *grp)
9968 if (grp->mark == PERMANENT)
9969 return true;
9970 if (grp->mark == TEMPORARY)
9972 fprintf (stderr, "when processing group:\n");
9973 debug_mapping_group (grp);
9974 internal_error ("base pointer cycle detected");
9975 return false;
9977 grp->mark = TEMPORARY;
9979 tree attaches_to = omp_get_attachment (grp);
9981 if (attaches_to)
9983 omp_mapping_group **basep = grpmap->get (attaches_to);
9985 if (basep && *basep != grp)
9987 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9988 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9989 return false;
9993 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9995 while (decl)
9997 tree base = omp_get_base_pointer (decl);
9999 if (!base)
10000 break;
10002 omp_mapping_group **innerp = grpmap->get (base);
10003 omp_mapping_group *wholestruct;
10005 /* We should treat whole-structure mappings as if all (pointer, in this
10006 case) members are mapped as individual list items. Check if we have
10007 such a whole-structure mapping, if we don't have an explicit reference
10008 to the pointer member itself. */
10009 if (!innerp
10010 && TREE_CODE (base) == COMPONENT_REF
10011 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
10012 innerp = &wholestruct;
10014 if (innerp && *innerp != grp)
10016 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
10017 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
10018 return false;
10019 break;
10022 decl = base;
10025 grp->mark = PERMANENT;
10027 /* Emit grp to output list. */
10029 **outlist = grp;
10030 *outlist = &grp->next;
10032 return true;
10035 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
10036 before mappings that use those pointers. This is an implementation of the
10037 depth-first search algorithm, described e.g. at:
10039 https://en.wikipedia.org/wiki/Topological_sorting
10042 static omp_mapping_group *
10043 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
10044 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
10045 *grpmap,
10046 bool enter_exit_data)
10048 omp_mapping_group *grp, *outlist = NULL, **cursor;
10049 unsigned int i;
10050 bool saw_runtime_implicit = false;
10052 cursor = &outlist;
10054 FOR_EACH_VEC_ELT (*groups, i, grp)
10056 if (grp->mark != PERMANENT)
10058 if (OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
10060 saw_runtime_implicit = true;
10061 continue;
10063 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
10064 return NULL;
10068 if (!saw_runtime_implicit)
10069 return outlist;
10071 FOR_EACH_VEC_ELT (*groups, i, grp)
10073 if (grp->mark != PERMANENT
10074 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
10076 /* Clear the flag for enter/exit data because it is currently
10077 meaningless for those operations in libgomp. */
10078 if (enter_exit_data)
10079 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start) = 0;
10081 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
10082 return NULL;
10086 return outlist;
10089 /* Split INLIST into three parts:
10091 - "present" alloc/to/from groups
10092 - other to/from groups
10093 - other alloc/release/delete groups
10095 These sub-lists are then concatenated together to form the final list.
10096 Each sub-list retains the order of the original list.
10097 Note that ATTACH nodes are later moved to the end of the list in
10098 gimplify_adjust_omp_clauses, for target regions. */
10100 static omp_mapping_group *
10101 omp_segregate_mapping_groups (omp_mapping_group *inlist)
10103 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
10104 omp_mapping_group *p_groups = NULL;
10105 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
10106 omp_mapping_group **p_tail = &p_groups;
10108 for (omp_mapping_group *w = inlist; w;)
10110 tree c = *w->grp_start;
10111 omp_mapping_group *next = w->next;
10113 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
10115 switch (OMP_CLAUSE_MAP_KIND (c))
10117 case GOMP_MAP_ALLOC:
10118 case GOMP_MAP_RELEASE:
10119 case GOMP_MAP_DELETE:
10120 *ard_tail = w;
10121 w->next = NULL;
10122 ard_tail = &w->next;
10123 break;
10125 /* These map types are all semantically identical, so are moved into a
10126 single group. They will each be changed into GOMP_MAP_FORCE_PRESENT
10127 in gimplify_adjust_omp_clauses. */
10128 case GOMP_MAP_PRESENT_ALLOC:
10129 case GOMP_MAP_PRESENT_FROM:
10130 case GOMP_MAP_PRESENT_TO:
10131 case GOMP_MAP_PRESENT_TOFROM:
10132 *p_tail = w;
10133 w->next = NULL;
10134 p_tail = &w->next;
10135 break;
10137 default:
10138 *tf_tail = w;
10139 w->next = NULL;
10140 tf_tail = &w->next;
10143 w = next;
10146 /* Now splice the lists together... */
10147 *tf_tail = ard_groups;
10148 *p_tail = tf_groups;
10150 return p_groups;
10153 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10154 those groups based on the output list of omp_tsort_mapping_groups --
10155 singly-linked, threaded through each element's NEXT pointer starting at
10156 HEAD. Each list element appears exactly once in that linked list.
10158 Each element of GROUPS may correspond to one or several mapping nodes.
10159 Node groups are kept together, and in the reordered list, the positions of
10160 the original groups are reused for the positions of the reordered list.
10161 Hence if we have e.g.
10163 {to ptr ptr} firstprivate {tofrom ptr} ...
10164 ^ ^ ^
10165 first group non-"map" second group
10167 and say the second group contains a base pointer for the first so must be
10168 moved before it, the resulting list will contain:
10170 {tofrom ptr} firstprivate {to ptr ptr} ...
10171 ^ prev. second group ^ prev. first group
10174 static tree *
10175 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
10176 omp_mapping_group *head,
10177 tree *list_p)
10179 omp_mapping_group *grp;
10180 unsigned int i;
10181 unsigned numgroups = groups->length ();
10182 auto_vec<tree> old_heads (numgroups);
10183 auto_vec<tree *> old_headps (numgroups);
10184 auto_vec<tree> new_heads (numgroups);
10185 auto_vec<tree> old_succs (numgroups);
10186 bool map_at_start = (list_p == (*groups)[0].grp_start);
10188 tree *new_grp_tail = NULL;
10190 /* Stash the start & end nodes of each mapping group before we start
10191 modifying the list. */
10192 FOR_EACH_VEC_ELT (*groups, i, grp)
10194 old_headps.quick_push (grp->grp_start);
10195 old_heads.quick_push (*grp->grp_start);
10196 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
10199 /* And similarly, the heads of the groups in the order we want to rearrange
10200 the list to. */
10201 for (omp_mapping_group *w = head; w; w = w->next)
10202 new_heads.quick_push (*w->grp_start);
10204 FOR_EACH_VEC_ELT (*groups, i, grp)
10206 gcc_assert (head);
10208 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
10210 /* a {b c d} {e f g} h i j (original)
10212 a {k l m} {e f g} h i j (inserted new group on last iter)
10214 a {k l m} {n o p} h i j (this time, chain last group to new one)
10215 ^new_grp_tail
10217 *new_grp_tail = new_heads[i];
10219 else if (new_grp_tail)
10221 /* a {b c d} e {f g h} i j k (original)
10223 a {l m n} e {f g h} i j k (gap after last iter's group)
10225 a {l m n} e {o p q} h i j (chain last group to old successor)
10226 ^new_grp_tail
10228 *new_grp_tail = old_succs[i - 1];
10229 *old_headps[i] = new_heads[i];
10231 else
10233 /* The first inserted group -- point to new group, and leave end
10234 open.
10235 a {b c d} e f
10237 a {g h i...
10239 *grp->grp_start = new_heads[i];
10242 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
10244 head = head->next;
10247 if (new_grp_tail)
10248 *new_grp_tail = old_succs[numgroups - 1];
10250 gcc_assert (!head);
10252 return map_at_start ? (*groups)[0].grp_start : list_p;
10255 /* DECL is supposed to have lastprivate semantics in the outer contexts
10256 of combined/composite constructs, starting with OCTX.
10257 Add needed lastprivate, shared or map clause if no data sharing or
10258 mapping clause are present. IMPLICIT_P is true if it is an implicit
10259 clause (IV on simd), in which case the lastprivate will not be
10260 copied to some constructs. */
10262 static void
10263 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
10264 tree decl, bool implicit_p)
10266 struct gimplify_omp_ctx *orig_octx = octx;
10267 for (; octx; octx = octx->outer_context)
10269 if ((octx->region_type == ORT_COMBINED_PARALLEL
10270 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
10271 && splay_tree_lookup (octx->variables,
10272 (splay_tree_key) decl) == NULL)
10274 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
10275 continue;
10277 if ((octx->region_type & ORT_TASK) != 0
10278 && octx->combined_loop
10279 && splay_tree_lookup (octx->variables,
10280 (splay_tree_key) decl) == NULL)
10282 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10283 continue;
10285 if (implicit_p
10286 && octx->region_type == ORT_WORKSHARE
10287 && octx->combined_loop
10288 && splay_tree_lookup (octx->variables,
10289 (splay_tree_key) decl) == NULL
10290 && octx->outer_context
10291 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
10292 && splay_tree_lookup (octx->outer_context->variables,
10293 (splay_tree_key) decl) == NULL)
10295 octx = octx->outer_context;
10296 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10297 continue;
10299 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
10300 && octx->combined_loop
10301 && splay_tree_lookup (octx->variables,
10302 (splay_tree_key) decl) == NULL
10303 && !omp_check_private (octx, decl, false))
10305 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10306 continue;
10308 if (octx->region_type == ORT_COMBINED_TARGET)
10310 splay_tree_node n = splay_tree_lookup (octx->variables,
10311 (splay_tree_key) decl);
10312 if (n == NULL)
10314 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10315 octx = octx->outer_context;
10317 else if (!implicit_p
10318 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
10320 n->value &= ~(GOVD_FIRSTPRIVATE
10321 | GOVD_FIRSTPRIVATE_IMPLICIT
10322 | GOVD_EXPLICIT);
10323 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10324 octx = octx->outer_context;
10327 break;
10329 if (octx && (implicit_p || octx != orig_octx))
10330 omp_notice_variable (octx, decl, true);
10333 /* We might have indexed several groups for DECL, e.g. a "TO" mapping and also
10334 a "FIRSTPRIVATE" mapping. Return the one that isn't firstprivate, etc. */
10336 static omp_mapping_group *
10337 omp_get_nonfirstprivate_group (hash_map<tree_operand_hash_no_se,
10338 omp_mapping_group *> *grpmap,
10339 tree decl, bool allow_deleted = false)
10341 omp_mapping_group **to_group_p = grpmap->get (decl);
10343 if (!to_group_p)
10344 return NULL;
10346 omp_mapping_group *to_group = *to_group_p;
10348 for (; to_group; to_group = to_group->sibling)
10350 tree grp_end = to_group->grp_end;
10351 switch (OMP_CLAUSE_MAP_KIND (grp_end))
10353 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10354 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10355 break;
10357 default:
10358 if (allow_deleted || !to_group->deleted)
10359 return to_group;
10363 return NULL;
10366 /* Return TRUE if the directive (whose clauses are described by the hash table
10367 of mapping groups, GRPMAP) maps DECL explicitly. If TO_SPECIFICALLY is
10368 true, only count TO mappings. If ALLOW_DELETED is true, ignore the
10369 "deleted" flag for groups. If CONTAINED_IN_STRUCT is true, also return
10370 TRUE if DECL is mapped as a member of a whole-struct mapping. */
10372 static bool
10373 omp_directive_maps_explicitly (hash_map<tree_operand_hash_no_se,
10374 omp_mapping_group *> *grpmap,
10375 tree decl, omp_mapping_group **base_group,
10376 bool to_specifically, bool allow_deleted,
10377 bool contained_in_struct)
10379 omp_mapping_group *decl_group
10380 = omp_get_nonfirstprivate_group (grpmap, decl, allow_deleted);
10382 *base_group = NULL;
10384 if (decl_group)
10386 tree grp_first = *decl_group->grp_start;
10387 /* We might be called during omp_build_struct_sibling_lists, when
10388 GOMP_MAP_STRUCT might have been inserted at the start of the group.
10389 Skip over that, and also possibly the node after it. */
10390 if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT
10391 || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT_UNORD)
10393 grp_first = OMP_CLAUSE_CHAIN (grp_first);
10394 if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_FIRSTPRIVATE_POINTER
10395 || (OMP_CLAUSE_MAP_KIND (grp_first)
10396 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10397 || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_ATTACH_DETACH)
10398 grp_first = OMP_CLAUSE_CHAIN (grp_first);
10400 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10401 if (!to_specifically
10402 || GOMP_MAP_COPY_TO_P (first_kind)
10403 || first_kind == GOMP_MAP_ALLOC)
10405 *base_group = decl_group;
10406 return true;
10410 if (contained_in_struct
10411 && omp_mapped_by_containing_struct (grpmap, decl, base_group))
10412 return true;
10414 return false;
10417 /* If we have mappings INNER and OUTER, where INNER is a component access and
10418 OUTER is a mapping of the whole containing struct, check that the mappings
10419 are compatible. We'll be deleting the inner mapping, so we need to make
10420 sure the outer mapping does (at least) the same transfers to/from the device
10421 as the inner mapping. */
10423 bool
10424 omp_check_mapping_compatibility (location_t loc,
10425 omp_mapping_group *outer,
10426 omp_mapping_group *inner)
10428 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
10430 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
10431 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
10433 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
10434 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
10436 if (outer_kind == inner_kind)
10437 return true;
10439 switch (outer_kind)
10441 case GOMP_MAP_ALWAYS_TO:
10442 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10443 || inner_kind == GOMP_MAP_ALLOC
10444 || inner_kind == GOMP_MAP_TO)
10445 return true;
10446 break;
10448 case GOMP_MAP_ALWAYS_FROM:
10449 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10450 || inner_kind == GOMP_MAP_RELEASE
10451 || inner_kind == GOMP_MAP_FROM)
10452 return true;
10453 break;
10455 case GOMP_MAP_TO:
10456 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10457 || inner_kind == GOMP_MAP_ALLOC)
10458 return true;
10459 break;
10461 case GOMP_MAP_FROM:
10462 if (inner_kind == GOMP_MAP_RELEASE
10463 || inner_kind == GOMP_MAP_FORCE_PRESENT)
10464 return true;
10465 break;
10467 case GOMP_MAP_ALWAYS_TOFROM:
10468 case GOMP_MAP_TOFROM:
10469 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10470 || inner_kind == GOMP_MAP_ALLOC
10471 || inner_kind == GOMP_MAP_TO
10472 || inner_kind == GOMP_MAP_FROM
10473 || inner_kind == GOMP_MAP_TOFROM)
10474 return true;
10475 break;
10477 default:
10481 error_at (loc, "data movement for component %qE is not compatible with "
10482 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10483 OMP_CLAUSE_DECL (first_outer));
10485 return false;
10488 /* This function handles several cases where clauses on a mapping directive
10489 can interact with each other.
10491 If we have a FIRSTPRIVATE_POINTER node and we're also mapping the pointer
10492 on the same directive, change the mapping of the first node to
10493 ATTACH_DETACH. We should have detected that this will happen already in
10494 c-omp.cc:c_omp_adjust_map_clauses and marked the appropriate decl
10495 as addressable. (If we didn't, bail out.)
10497 If we have a FIRSTPRIVATE_REFERENCE (for a reference to pointer) and we're
10498 mapping the base pointer also, we may need to change the mapping type to
10499 ATTACH_DETACH and synthesize an alloc node for the reference itself.
10501 If we have an ATTACH_DETACH node, this is an array section with a pointer
10502 base. If we're mapping the base on the same directive too, we can drop its
10503 mapping. However, if we have a reference to pointer, make other appropriate
10504 adjustments to the mapping nodes instead.
10506 If we have an ATTACH_DETACH node with a Fortran pointer-set (array
10507 descriptor) mapping for a derived-type component, and we're also mapping the
10508 whole of the derived-type variable on another clause, the pointer-set
10509 mapping is removed.
10511 If we have a component access but we're also mapping the whole of the
10512 containing struct, drop the former access.
10514 If the expression is a component access, and we're also mapping a base
10515 pointer used in that component access in the same expression, change the
10516 mapping type of the latter to ALLOC (ready for processing by
10517 omp_build_struct_sibling_lists). */
10519 void
10520 omp_resolve_clause_dependencies (enum tree_code code,
10521 vec<omp_mapping_group> *groups,
10522 hash_map<tree_operand_hash_no_se,
10523 omp_mapping_group *> *grpmap)
10525 int i;
10526 omp_mapping_group *grp;
10527 bool repair_chain = false;
10529 FOR_EACH_VEC_ELT (*groups, i, grp)
10531 tree grp_end = grp->grp_end;
10532 tree decl = OMP_CLAUSE_DECL (grp_end);
10534 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10536 switch (OMP_CLAUSE_MAP_KIND (grp_end))
10538 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10540 omp_mapping_group *to_group
10541 = omp_get_nonfirstprivate_group (grpmap, decl);
10543 if (!to_group || to_group == grp)
10544 continue;
10546 tree grp_first = *to_group->grp_start;
10547 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10549 if ((GOMP_MAP_COPY_TO_P (first_kind)
10550 || first_kind == GOMP_MAP_ALLOC)
10551 && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
10552 != GOMP_MAP_FIRSTPRIVATE_POINTER))
10554 gcc_assert (TREE_ADDRESSABLE (OMP_CLAUSE_DECL (grp_end)));
10555 OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
10558 break;
10560 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10562 tree ptr = build_fold_indirect_ref (decl);
10564 omp_mapping_group *to_group
10565 = omp_get_nonfirstprivate_group (grpmap, ptr);
10567 if (!to_group || to_group == grp)
10568 continue;
10570 tree grp_first = *to_group->grp_start;
10571 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10573 if (GOMP_MAP_COPY_TO_P (first_kind)
10574 || first_kind == GOMP_MAP_ALLOC)
10576 OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
10577 OMP_CLAUSE_DECL (grp_end) = ptr;
10578 if ((OMP_CLAUSE_CHAIN (*to_group->grp_start)
10579 == to_group->grp_end)
10580 && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
10581 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10583 gcc_assert (TREE_ADDRESSABLE
10584 (OMP_CLAUSE_DECL (to_group->grp_end)));
10585 OMP_CLAUSE_SET_MAP_KIND (to_group->grp_end,
10586 GOMP_MAP_ATTACH_DETACH);
10588 location_t loc = OMP_CLAUSE_LOCATION (to_group->grp_end);
10589 tree alloc
10590 = build_omp_clause (loc, OMP_CLAUSE_MAP);
10591 OMP_CLAUSE_SET_MAP_KIND (alloc, GOMP_MAP_ALLOC);
10592 tree tmp = build_fold_addr_expr (OMP_CLAUSE_DECL
10593 (to_group->grp_end));
10594 tree char_ptr_type = build_pointer_type (char_type_node);
10595 OMP_CLAUSE_DECL (alloc)
10596 = build2 (MEM_REF, char_type_node,
10597 tmp,
10598 build_int_cst (char_ptr_type, 0));
10599 OMP_CLAUSE_SIZE (alloc) = TYPE_SIZE_UNIT (TREE_TYPE (tmp));
10601 OMP_CLAUSE_CHAIN (alloc)
10602 = OMP_CLAUSE_CHAIN (*to_group->grp_start);
10603 OMP_CLAUSE_CHAIN (*to_group->grp_start) = alloc;
10607 break;
10609 case GOMP_MAP_ATTACH_DETACH:
10610 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
10612 tree base_ptr, referenced_ptr_node = NULL_TREE;
10614 while (TREE_CODE (decl) == ARRAY_REF)
10615 decl = TREE_OPERAND (decl, 0);
10617 if (TREE_CODE (decl) == INDIRECT_REF)
10618 decl = TREE_OPERAND (decl, 0);
10620 /* Only component accesses. */
10621 if (DECL_P (decl))
10622 continue;
10624 /* We want the pointer itself when checking if the base pointer is
10625 mapped elsewhere in the same directive -- if we have a
10626 reference to the pointer, don't use that. */
10628 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10629 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10631 referenced_ptr_node = OMP_CLAUSE_CHAIN (*grp->grp_start);
10632 base_ptr = OMP_CLAUSE_DECL (referenced_ptr_node);
10634 else
10635 base_ptr = decl;
10637 gomp_map_kind zlas_kind
10638 = (code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
10639 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION;
10641 if (TREE_CODE (TREE_TYPE (base_ptr)) == POINTER_TYPE)
10643 /* If we map the base TO, and we're doing an attachment, we can
10644 skip the TO mapping altogether and create an ALLOC mapping
10645 instead, since the attachment will overwrite the device
10646 pointer in that location immediately anyway. Otherwise,
10647 change our mapping to
10648 GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION in case the
10649 attachment target has not been copied to the device already
10650 by some earlier directive. */
10652 bool base_mapped_to = false;
10654 omp_mapping_group *base_group;
10656 if (omp_directive_maps_explicitly (grpmap, base_ptr,
10657 &base_group, false, true,
10658 false))
10660 if (referenced_ptr_node)
10662 base_mapped_to = true;
10663 if ((OMP_CLAUSE_MAP_KIND (base_group->grp_end)
10664 == GOMP_MAP_ATTACH_DETACH)
10665 && (OMP_CLAUSE_CHAIN (*base_group->grp_start)
10666 == base_group->grp_end))
10668 OMP_CLAUSE_CHAIN (*base_group->grp_start)
10669 = OMP_CLAUSE_CHAIN (base_group->grp_end);
10670 base_group->grp_end = *base_group->grp_start;
10671 repair_chain = true;
10674 else
10676 base_group->deleted = true;
10677 OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end) = 1;
10681 /* We're dealing with a reference to a pointer, and we are
10682 attaching both the reference and the pointer. We know the
10683 reference itself is on the target, because we are going to
10684 create an ALLOC node for it in accumulate_sibling_list. The
10685 pointer might be on the target already or it might not, but
10686 if it isn't then it's not an error, so use
10687 GOMP_MAP_ATTACH_ZLAS for it. */
10688 if (!base_mapped_to && referenced_ptr_node)
10689 OMP_CLAUSE_SET_MAP_KIND (referenced_ptr_node, zlas_kind);
10691 omp_mapping_group *struct_group;
10692 tree desc;
10693 if ((desc = OMP_CLAUSE_CHAIN (*grp->grp_start))
10694 && omp_map_clause_descriptor_p (desc)
10695 && omp_mapped_by_containing_struct (grpmap, decl,
10696 &struct_group))
10697 /* If we have a pointer set but we're mapping (or unmapping)
10698 the whole of the containing struct, we can remove the
10699 pointer set mapping. */
10700 OMP_CLAUSE_CHAIN (*grp->grp_start) = OMP_CLAUSE_CHAIN (desc);
10702 else if (TREE_CODE (TREE_TYPE (base_ptr)) == REFERENCE_TYPE
10703 && (TREE_CODE (TREE_TYPE (TREE_TYPE (base_ptr)))
10704 == ARRAY_TYPE)
10705 && OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION
10706 (*grp->grp_start))
10707 OMP_CLAUSE_SET_MAP_KIND (grp->grp_end, zlas_kind);
10709 break;
10711 case GOMP_MAP_ATTACH:
10712 /* Ignore standalone attach here. */
10713 break;
10715 default:
10717 omp_mapping_group *struct_group;
10718 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10719 && *grp->grp_start == grp_end)
10721 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10722 struct_group, grp);
10723 /* Remove the whole of this mapping -- redundant. */
10724 grp->deleted = true;
10727 tree base = decl;
10728 while ((base = omp_get_base_pointer (base)))
10730 omp_mapping_group *base_group;
10732 if (omp_directive_maps_explicitly (grpmap, base, &base_group,
10733 true, true, false))
10735 tree grp_first = *base_group->grp_start;
10736 OMP_CLAUSE_SET_MAP_KIND (grp_first, GOMP_MAP_ALLOC);
10743 if (repair_chain)
10745 /* Group start pointers may have become detached from the
10746 OMP_CLAUSE_CHAIN of previous groups if elements were removed from the
10747 end of those groups. Fix that now. */
10748 tree *new_next = NULL;
10749 FOR_EACH_VEC_ELT (*groups, i, grp)
10751 if (new_next)
10752 grp->grp_start = new_next;
10754 new_next = &OMP_CLAUSE_CHAIN (grp->grp_end);
10759 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10760 clause dependencies we handle for now are struct element mappings and
10761 whole-struct mappings on the same directive, and duplicate clause
10762 detection. */
10764 void
10765 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10766 hash_map<tree_operand_hash_no_se,
10767 omp_mapping_group *> *grpmap)
10769 int i;
10770 omp_mapping_group *grp;
10771 hash_set<tree_operand_hash> *seen_components = NULL;
10772 hash_set<tree_operand_hash> *shown_error = NULL;
10774 FOR_EACH_VEC_ELT (*groups, i, grp)
10776 tree grp_end = grp->grp_end;
10777 tree decl = OMP_CLAUSE_DECL (grp_end);
10779 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10781 if (DECL_P (grp_end))
10782 continue;
10784 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10785 while (TREE_CODE (c) == ARRAY_REF)
10786 c = TREE_OPERAND (c, 0);
10787 if (TREE_CODE (c) != COMPONENT_REF)
10788 continue;
10789 if (!seen_components)
10790 seen_components = new hash_set<tree_operand_hash> ();
10791 if (!shown_error)
10792 shown_error = new hash_set<tree_operand_hash> ();
10793 if (seen_components->contains (c)
10794 && !shown_error->contains (c))
10796 error_at (OMP_CLAUSE_LOCATION (grp_end),
10797 "%qE appears more than once in map clauses",
10798 OMP_CLAUSE_DECL (grp_end));
10799 shown_error->add (c);
10801 else
10802 seen_components->add (c);
10804 omp_mapping_group *struct_group;
10805 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10806 && *grp->grp_start == grp_end)
10808 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10809 struct_group, grp);
10810 /* Remove the whole of this mapping -- redundant. */
10811 grp->deleted = true;
10815 if (seen_components)
10816 delete seen_components;
10817 if (shown_error)
10818 delete shown_error;
10821 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10822 is linked to the previous node pointed to by INSERT_AT. */
10824 static tree *
10825 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10827 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10828 *insert_at = newnode;
10829 return &OMP_CLAUSE_CHAIN (newnode);
10832 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10833 pointed to by chain MOVE_AFTER instead. */
10835 static void
10836 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10838 gcc_assert (node == *old_pos);
10839 *old_pos = OMP_CLAUSE_CHAIN (node);
10840 OMP_CLAUSE_CHAIN (node) = *move_after;
10841 *move_after = node;
10844 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10845 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10846 new nodes are prepended to the list before splicing into the new position.
10847 Return the position we should continue scanning the list at, or NULL to
10848 stay where we were. */
10850 static tree *
10851 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10852 tree *move_after)
10854 if (first_ptr == move_after)
10855 return NULL;
10857 tree tmp = *first_ptr;
10858 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10859 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10860 *move_after = tmp;
10862 return first_ptr;
10865 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10866 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10867 pointer MOVE_AFTER.
10869 The latter list was previously part of the OMP clause list, and the former
10870 (prepended) part is comprised of new nodes.
10872 We start with a list of nodes starting with a struct mapping node. We
10873 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10874 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10875 the group of mapping nodes we are currently processing (from the chain
10876 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10877 we should continue processing from, or NULL to stay where we were.
10879 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10880 different) is worked through below. Here we are processing LAST_NODE, and
10881 FIRST_PTR points at the preceding mapping clause:
10883 #. mapping node chain
10884 ---------------------------------------------------
10885 A. struct_node [->B]
10886 B. comp_1 [->C]
10887 C. comp_2 [->D (move_after)]
10888 D. map_to_3 [->E]
10889 E. attach_3 [->F (first_ptr)]
10890 F. map_to_4 [->G (continue_at)]
10891 G. attach_4 (last_node) [->H]
10892 H. ...
10894 *last_new_tail = *first_ptr;
10896 I. new_node (first_new) [->F (last_new_tail)]
10898 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10900 #. mapping node chain
10901 ----------------------------------------------------
10902 A. struct_node [->B]
10903 B. comp_1 [->C]
10904 C. comp_2 [->D (move_after)]
10905 D. map_to_3 [->E]
10906 E. attach_3 [->H (first_ptr)]
10907 F. map_to_4 [->G (continue_at)]
10908 G. attach_4 (last_node) [->H]
10909 H. ...
10911 I. new_node (first_new) [->F (last_new_tail)]
10913 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10915 #. mapping node chain
10916 ---------------------------------------------------
10917 A. struct_node [->B]
10918 B. comp_1 [->C]
10919 C. comp_2 [->D (move_after)]
10920 D. map_to_3 [->E]
10921 E. attach_3 [->H (continue_at)]
10922 F. map_to_4 [->G]
10923 G. attach_4 (last_node) [->D]
10924 H. ...
10926 I. new_node (first_new) [->F (last_new_tail)]
10928 *move_after = first_new;
10930 #. mapping node chain
10931 ---------------------------------------------------
10932 A. struct_node [->B]
10933 B. comp_1 [->C]
10934 C. comp_2 [->I (move_after)]
10935 D. map_to_3 [->E]
10936 E. attach_3 [->H (continue_at)]
10937 F. map_to_4 [->G]
10938 G. attach_4 (last_node) [->D]
10939 H. ...
10940 I. new_node (first_new) [->F (last_new_tail)]
10942 or, in order:
10944 #. mapping node chain
10945 ---------------------------------------------------
10946 A. struct_node [->B]
10947 B. comp_1 [->C]
10948 C. comp_2 [->I (move_after)]
10949 I. new_node (first_new) [->F (last_new_tail)]
10950 F. map_to_4 [->G]
10951 G. attach_4 (last_node) [->D]
10952 D. map_to_3 [->E]
10953 E. attach_3 [->H (continue_at)]
10954 H. ...
10957 static tree *
10958 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10959 tree *first_ptr, tree last_node,
10960 tree *move_after)
10962 tree *continue_at = NULL;
10963 *last_new_tail = *first_ptr;
10964 if (first_ptr == move_after)
10965 *move_after = first_new;
10966 else
10968 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10969 continue_at = first_ptr;
10970 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10971 *move_after = first_new;
10973 return continue_at;
10976 static omp_addr_token *
10977 omp_first_chained_access_token (vec<omp_addr_token *> &addr_tokens)
10979 using namespace omp_addr_tokenizer;
10980 int idx = addr_tokens.length () - 1;
10981 gcc_assert (idx >= 0);
10982 if (addr_tokens[idx]->type != ACCESS_METHOD)
10983 return addr_tokens[idx];
10984 while (idx > 0 && addr_tokens[idx - 1]->type == ACCESS_METHOD)
10985 idx--;
10986 return addr_tokens[idx];
10989 /* Mapping struct members causes an additional set of nodes to be created,
10990 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10991 number of members being mapped, in order of ascending position (address or
10992 bitwise).
10994 We scan through the list of mapping clauses, calling this function for each
10995 struct member mapping we find, and build up the list of mappings after the
10996 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10997 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10998 moved into place in the sorted list.
11000 struct {
11001 int *a;
11002 int *b;
11003 int c;
11004 int *d;
11007 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
11008 struct.d[0:n])
11010 GOMP_MAP_STRUCT (4)
11011 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
11012 GOMP_MAP_ALLOC (struct.a)
11013 GOMP_MAP_ALLOC (struct.b)
11014 GOMP_MAP_TO (struct.c)
11015 GOMP_MAP_ALLOC (struct.d)
11018 In the case where we are mapping references to pointers, or in Fortran if
11019 we are mapping an array with a descriptor, additional nodes may be created
11020 after the struct node list also.
11022 The return code is either a pointer to the next node to process (if the
11023 list has been rearranged), else NULL to continue with the next node in the
11024 original list. */
11026 static tree *
11027 omp_accumulate_sibling_list (enum omp_region_type region_type,
11028 enum tree_code code,
11029 hash_map<tree_operand_hash, tree>
11030 *&struct_map_to_clause,
11031 hash_map<tree_operand_hash_no_se,
11032 omp_mapping_group *> *group_map,
11033 tree *grp_start_p, tree grp_end,
11034 vec<omp_addr_token *> &addr_tokens, tree **inner,
11035 bool *fragile_p, bool reprocessing_struct,
11036 tree **added_tail)
11038 using namespace omp_addr_tokenizer;
11039 poly_offset_int coffset;
11040 poly_int64 cbitpos;
11041 tree ocd = OMP_CLAUSE_DECL (grp_end);
11042 bool openmp = !(region_type & ORT_ACC);
11043 bool target = (region_type & ORT_TARGET) != 0;
11044 tree *continue_at = NULL;
11046 while (TREE_CODE (ocd) == ARRAY_REF)
11047 ocd = TREE_OPERAND (ocd, 0);
11049 if (*fragile_p)
11051 omp_mapping_group *to_group
11052 = omp_get_nonfirstprivate_group (group_map, ocd, true);
11054 if (to_group)
11055 return NULL;
11058 omp_addr_token *last_token = omp_first_chained_access_token (addr_tokens);
11059 if (last_token->type == ACCESS_METHOD)
11061 switch (last_token->u.access_kind)
11063 case ACCESS_REF:
11064 case ACCESS_REF_TO_POINTER:
11065 case ACCESS_REF_TO_POINTER_OFFSET:
11066 case ACCESS_INDEXED_REF_TO_ARRAY:
11067 /* We may see either a bare reference or a dereferenced
11068 "convert_from_reference"-like one here. Handle either way. */
11069 if (TREE_CODE (ocd) == INDIRECT_REF)
11070 ocd = TREE_OPERAND (ocd, 0);
11071 gcc_assert (TREE_CODE (TREE_TYPE (ocd)) == REFERENCE_TYPE);
11072 break;
11074 default:
11079 bool variable_offset;
11080 tree base
11081 = extract_base_bit_offset (ocd, &cbitpos, &coffset, &variable_offset);
11083 int base_token;
11084 for (base_token = addr_tokens.length () - 1; base_token >= 0; base_token--)
11086 if (addr_tokens[base_token]->type == ARRAY_BASE
11087 || addr_tokens[base_token]->type == STRUCTURE_BASE)
11088 break;
11091 /* The two expressions in the assertion below aren't quite the same: if we
11092 have 'struct_base_decl access_indexed_array' for something like
11093 "myvar[2].x" then base will be "myvar" and addr_tokens[base_token]->expr
11094 will be "myvar[2]" -- the actual base of the structure.
11095 The former interpretation leads to a strange situation where we get
11096 struct(myvar) alloc(myvar[2].ptr1)
11097 That is, the array of structures is kind of treated as one big structure
11098 for the purposes of gathering sibling lists, etc. */
11099 /* gcc_assert (base == addr_tokens[base_token]->expr); */
11101 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
11102 == GOMP_MAP_ATTACH_DETACH)
11103 || (OMP_CLAUSE_MAP_KIND (grp_end)
11104 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
11105 bool has_descriptor = false;
11106 if (OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
11108 tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
11109 if (grp_mid && omp_map_clause_descriptor_p (grp_mid))
11110 has_descriptor = true;
11113 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
11115 enum gomp_map_kind str_kind = GOMP_MAP_STRUCT;
11117 if (struct_map_to_clause == NULL)
11118 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
11120 if (variable_offset)
11121 str_kind = GOMP_MAP_STRUCT_UNORD;
11123 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
11125 OMP_CLAUSE_SET_MAP_KIND (l, str_kind);
11126 OMP_CLAUSE_DECL (l) = unshare_expr (base);
11127 OMP_CLAUSE_SIZE (l) = size_int (1);
11129 struct_map_to_clause->put (base, l);
11131 /* On first iterating through the clause list, we insert the struct node
11132 just before the component access node that triggers the initial
11133 omp_accumulate_sibling_list call for a particular sibling list (and
11134 it then forms the first entry in that list). When reprocessing
11135 struct bases that are themselves component accesses, we insert the
11136 struct node on an off-side list to avoid inserting the new
11137 GOMP_MAP_STRUCT into the middle of the old one. */
11138 tree *insert_node_pos = reprocessing_struct ? *added_tail : grp_start_p;
11140 if (has_descriptor)
11142 tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
11143 if (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
11144 OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
11145 tree sc = *insert_node_pos;
11146 OMP_CLAUSE_CHAIN (l) = desc;
11147 OMP_CLAUSE_CHAIN (*grp_start_p) = OMP_CLAUSE_CHAIN (desc);
11148 OMP_CLAUSE_CHAIN (desc) = sc;
11149 *insert_node_pos = l;
11151 else if (attach_detach)
11153 tree extra_node;
11154 tree alloc_node
11155 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
11156 &extra_node);
11157 tree *tail;
11158 OMP_CLAUSE_CHAIN (l) = alloc_node;
11160 if (extra_node)
11162 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
11163 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
11164 tail = &OMP_CLAUSE_CHAIN (extra_node);
11166 else
11168 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
11169 tail = &OMP_CLAUSE_CHAIN (alloc_node);
11172 /* For OpenMP semantics, we don't want to implicitly allocate
11173 space for the pointer here for non-compute regions (e.g. "enter
11174 data"). A FRAGILE_P node is only being created so that
11175 omp-low.cc is able to rewrite the struct properly.
11176 For references (to pointers), we want to actually allocate the
11177 space for the reference itself in the sorted list following the
11178 struct node.
11179 For pointers, we want to allocate space if we had an explicit
11180 mapping of the attachment point, but not otherwise. */
11181 if (*fragile_p
11182 || (openmp
11183 && !target
11184 && attach_detach
11185 && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
11186 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
11188 if (!lang_GNU_Fortran ())
11189 /* In Fortran, pointers are dereferenced automatically, but may
11190 be unassociated. So we still want to allocate space for the
11191 pointer (as the base for an attach operation that should be
11192 present in the same directive's clause list also). */
11193 OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
11194 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
11197 *insert_node_pos = l;
11199 if (reprocessing_struct)
11201 /* When reprocessing a struct node group used as the base of a
11202 subcomponent access, if we have a reference-to-pointer base,
11203 we will see:
11204 struct(**ptr) attach(*ptr)
11205 whereas for a non-reprocess-struct group, we see, e.g.:
11206 tofrom(**ptr) attach(*ptr) attach(ptr)
11207 and we create the "alloc" for the second "attach", i.e.
11208 for the reference itself. When reprocessing a struct group we
11209 thus change the pointer attachment into a reference attachment
11210 by stripping the indirection. (The attachment of the
11211 referenced pointer must happen elsewhere, either on the same
11212 directive, or otherwise.) */
11213 tree adecl = OMP_CLAUSE_DECL (alloc_node);
11215 if ((TREE_CODE (adecl) == INDIRECT_REF
11216 || (TREE_CODE (adecl) == MEM_REF
11217 && integer_zerop (TREE_OPERAND (adecl, 1))))
11218 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (adecl, 0)))
11219 == REFERENCE_TYPE)
11220 && (TREE_CODE (TREE_TYPE (TREE_TYPE
11221 (TREE_OPERAND (adecl, 0)))) == POINTER_TYPE))
11222 OMP_CLAUSE_DECL (alloc_node) = TREE_OPERAND (adecl, 0);
11224 *added_tail = tail;
11227 else
11229 gcc_assert (*grp_start_p == grp_end);
11230 if (reprocessing_struct)
11232 /* If we don't have an attach/detach node, this is a
11233 "target data" directive or similar, not an offload region.
11234 Synthesize an "alloc" node using just the initiating
11235 GOMP_MAP_STRUCT decl. */
11236 gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
11237 || code == OACC_EXIT_DATA)
11238 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
11239 tree alloc_node
11240 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11241 OMP_CLAUSE_MAP);
11242 OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
11243 OMP_CLAUSE_DECL (alloc_node) = unshare_expr (last_token->expr);
11244 OMP_CLAUSE_SIZE (alloc_node)
11245 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
11247 OMP_CLAUSE_CHAIN (alloc_node) = OMP_CLAUSE_CHAIN (l);
11248 OMP_CLAUSE_CHAIN (l) = alloc_node;
11249 *insert_node_pos = l;
11250 *added_tail = &OMP_CLAUSE_CHAIN (alloc_node);
11252 else
11253 grp_start_p = omp_siblist_insert_node_after (l, insert_node_pos);
11256 unsigned last_access = base_token + 1;
11258 while (last_access + 1 < addr_tokens.length ()
11259 && addr_tokens[last_access + 1]->type == ACCESS_METHOD)
11260 last_access++;
11262 if ((region_type & ORT_TARGET)
11263 && addr_tokens[base_token + 1]->type == ACCESS_METHOD)
11265 bool base_ref = false;
11266 access_method_kinds access_kind
11267 = addr_tokens[last_access]->u.access_kind;
11269 switch (access_kind)
11271 case ACCESS_DIRECT:
11272 case ACCESS_INDEXED_ARRAY:
11273 return NULL;
11275 case ACCESS_REF:
11276 case ACCESS_REF_TO_POINTER:
11277 case ACCESS_REF_TO_POINTER_OFFSET:
11278 case ACCESS_INDEXED_REF_TO_ARRAY:
11279 base_ref = true;
11280 break;
11282 default:
11285 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11286 OMP_CLAUSE_MAP);
11287 enum gomp_map_kind mkind;
11288 omp_mapping_group *decl_group;
11289 tree use_base;
11290 switch (access_kind)
11292 case ACCESS_POINTER:
11293 case ACCESS_POINTER_OFFSET:
11294 use_base = addr_tokens[last_access]->expr;
11295 break;
11296 case ACCESS_REF_TO_POINTER:
11297 case ACCESS_REF_TO_POINTER_OFFSET:
11298 use_base
11299 = build_fold_indirect_ref (addr_tokens[last_access]->expr);
11300 break;
11301 default:
11302 use_base = addr_tokens[base_token]->expr;
11304 bool mapped_to_p
11305 = omp_directive_maps_explicitly (group_map, use_base, &decl_group,
11306 true, false, true);
11307 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11308 && DECL_P (addr_tokens[last_access]->expr)
11309 && !mapped_to_p)
11310 mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
11311 : GOMP_MAP_FIRSTPRIVATE_POINTER;
11312 else
11313 mkind = GOMP_MAP_ATTACH_DETACH;
11315 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
11316 /* If we have a reference to pointer base, we want to attach the
11317 pointer here, not the reference. The reference attachment happens
11318 elsewhere. */
11319 bool ref_to_ptr
11320 = (access_kind == ACCESS_REF_TO_POINTER
11321 || access_kind == ACCESS_REF_TO_POINTER_OFFSET);
11322 tree sdecl = addr_tokens[last_access]->expr;
11323 tree sdecl_ptr = ref_to_ptr ? build_fold_indirect_ref (sdecl)
11324 : sdecl;
11325 /* For the FIRSTPRIVATE_REFERENCE after the struct node, we
11326 want to use the reference itself for the decl, but we
11327 still want to use the pointer to calculate the bias. */
11328 OMP_CLAUSE_DECL (c2) = (mkind == GOMP_MAP_ATTACH_DETACH)
11329 ? sdecl_ptr : sdecl;
11330 sdecl = sdecl_ptr;
11331 tree baddr = build_fold_addr_expr (base);
11332 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
11333 ptrdiff_type_node, baddr);
11334 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
11335 ptrdiff_type_node, sdecl);
11336 OMP_CLAUSE_SIZE (c2)
11337 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
11338 ptrdiff_type_node, baddr, decladdr);
11339 /* Insert after struct node. */
11340 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
11341 OMP_CLAUSE_CHAIN (l) = c2;
11343 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11344 && (addr_tokens[base_token]->u.structure_base_kind
11345 == BASE_COMPONENT_EXPR)
11346 && mkind == GOMP_MAP_ATTACH_DETACH
11347 && addr_tokens[last_access]->u.access_kind != ACCESS_REF)
11349 *inner = insert_node_pos;
11350 if (openmp)
11351 *fragile_p = true;
11352 return NULL;
11356 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11357 && (addr_tokens[base_token]->u.structure_base_kind
11358 == BASE_COMPONENT_EXPR)
11359 && addr_tokens[last_access]->u.access_kind == ACCESS_REF)
11360 *inner = insert_node_pos;
11362 return NULL;
11364 else if (struct_map_to_clause)
11366 tree *osc = struct_map_to_clause->get (base);
11367 tree *sc = NULL, *scp = NULL;
11368 bool unordered = false;
11370 if (osc && OMP_CLAUSE_MAP_KIND (*osc) == GOMP_MAP_STRUCT_UNORD)
11371 unordered = true;
11373 unsigned HOST_WIDE_INT i, elems = tree_to_uhwi (OMP_CLAUSE_SIZE (*osc));
11374 sc = &OMP_CLAUSE_CHAIN (*osc);
11375 /* The struct mapping might be immediately followed by a
11376 FIRSTPRIVATE_POINTER, FIRSTPRIVATE_REFERENCE or an ATTACH_DETACH --
11377 if it's an indirect access or a reference, or if the structure base
11378 is not a decl. The FIRSTPRIVATE_* nodes are removed in omp-low.cc
11379 after they have been processed there, and ATTACH_DETACH nodes are
11380 recomputed and moved out of the GOMP_MAP_STRUCT construct once
11381 sibling list building is complete. */
11382 if (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
11383 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11384 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_ATTACH_DETACH)
11385 sc = &OMP_CLAUSE_CHAIN (*sc);
11386 for (i = 0; i < elems; i++, sc = &OMP_CLAUSE_CHAIN (*sc))
11387 if (attach_detach && sc == grp_start_p)
11388 break;
11389 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
11390 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
11391 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
11392 break;
11393 else
11395 tree sc_decl = OMP_CLAUSE_DECL (*sc);
11396 poly_offset_int offset;
11397 poly_int64 bitpos;
11399 if (TREE_CODE (sc_decl) == ARRAY_REF)
11401 while (TREE_CODE (sc_decl) == ARRAY_REF)
11402 sc_decl = TREE_OPERAND (sc_decl, 0);
11403 if (TREE_CODE (sc_decl) != COMPONENT_REF
11404 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
11405 break;
11407 else if (INDIRECT_REF_P (sc_decl)
11408 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
11409 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
11410 == REFERENCE_TYPE))
11411 sc_decl = TREE_OPERAND (sc_decl, 0);
11413 bool variable_offset2;
11414 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset,
11415 &variable_offset2);
11416 if (!base2 || !operand_equal_p (base2, base, 0))
11417 break;
11418 if (scp)
11419 continue;
11420 if (variable_offset2)
11422 OMP_CLAUSE_SET_MAP_KIND (*osc, GOMP_MAP_STRUCT_UNORD);
11423 unordered = true;
11424 break;
11426 else if ((region_type & ORT_ACC) != 0)
11428 /* For OpenACC, allow (ignore) duplicate struct accesses in
11429 the middle of a mapping clause, e.g. "mystruct->foo" in:
11430 copy(mystruct->foo->bar) copy(mystruct->foo->qux). */
11431 if (reprocessing_struct
11432 && known_eq (coffset, offset)
11433 && known_eq (cbitpos, bitpos))
11434 return NULL;
11436 else if (known_eq (coffset, offset)
11437 && known_eq (cbitpos, bitpos))
11439 /* Having two struct members at the same offset doesn't work,
11440 so make sure we don't. (We're allowed to ignore this.
11441 Should we report the error?) */
11442 /*error_at (OMP_CLAUSE_LOCATION (grp_end),
11443 "duplicate struct member %qE in map clauses",
11444 OMP_CLAUSE_DECL (grp_end));*/
11445 return NULL;
11447 if (maybe_lt (coffset, offset)
11448 || (known_eq (coffset, offset)
11449 && maybe_lt (cbitpos, bitpos)))
11451 if (attach_detach)
11452 scp = sc;
11453 else
11454 break;
11458 /* If this is an unordered struct, just insert the new element at the
11459 end of the list. */
11460 if (unordered)
11462 for (; i < elems; i++)
11463 sc = &OMP_CLAUSE_CHAIN (*sc);
11464 scp = NULL;
11467 OMP_CLAUSE_SIZE (*osc)
11468 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
11470 if (reprocessing_struct)
11472 /* If we're reprocessing a struct node, we don't want to do most of
11473 the list manipulation below. We only need to handle the (pointer
11474 or reference) attach/detach case. */
11475 tree extra_node, alloc_node;
11476 if (has_descriptor)
11477 gcc_unreachable ();
11478 else if (attach_detach)
11479 alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
11480 grp_end, &extra_node);
11481 else
11483 /* If we don't have an attach/detach node, this is a
11484 "target data" directive or similar, not an offload region.
11485 Synthesize an "alloc" node using just the initiating
11486 GOMP_MAP_STRUCT decl. */
11487 gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
11488 || code == OACC_EXIT_DATA)
11489 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
11490 alloc_node
11491 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11492 OMP_CLAUSE_MAP);
11493 OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
11494 OMP_CLAUSE_DECL (alloc_node) = unshare_expr (last_token->expr);
11495 OMP_CLAUSE_SIZE (alloc_node)
11496 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
11499 if (scp)
11500 omp_siblist_insert_node_after (alloc_node, scp);
11501 else
11503 tree *new_end = omp_siblist_insert_node_after (alloc_node, sc);
11504 if (sc == *added_tail)
11505 *added_tail = new_end;
11508 return NULL;
11511 if (has_descriptor)
11513 tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
11514 if (code == OMP_TARGET_EXIT_DATA
11515 || code == OACC_EXIT_DATA)
11516 OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
11517 omp_siblist_move_node_after (desc,
11518 &OMP_CLAUSE_CHAIN (*grp_start_p),
11519 scp ? scp : sc);
11521 else if (attach_detach)
11523 tree cl = NULL_TREE, extra_node;
11524 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
11525 grp_end, &extra_node);
11526 tree *tail_chain = NULL;
11528 if (*fragile_p
11529 || (openmp
11530 && !target
11531 && attach_detach
11532 && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
11533 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
11535 if (!lang_GNU_Fortran ())
11536 OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
11537 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
11540 /* Here, we have:
11542 grp_end : the last (or only) node in this group.
11543 grp_start_p : pointer to the first node in a pointer mapping group
11544 up to and including GRP_END.
11545 sc : pointer to the chain for the end of the struct component
11546 list.
11547 scp : pointer to the chain for the sorted position at which we
11548 should insert in the middle of the struct component list
11549 (else NULL to insert at end).
11550 alloc_node : the "alloc" node for the structure (pointer-type)
11551 component. We insert at SCP (if present), else SC
11552 (the end of the struct component list).
11553 extra_node : a newly-synthesized node for an additional indirect
11554 pointer mapping or a Fortran pointer set, if needed.
11555 cl : first node to prepend before grp_start_p.
11556 tail_chain : pointer to chain of last prepended node.
11558 The general idea is we move the nodes for this struct mapping
11559 together: the alloc node goes into the sorted list directly after
11560 the struct mapping, and any extra nodes (together with the nodes
11561 mapping arrays pointed to by struct components) get moved after
11562 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
11563 the end of the struct component mapping list. It's important that
11564 the alloc_node comes first in that case because it's part of the
11565 sorted component mapping list (but subsequent nodes are not!). */
11567 if (scp)
11568 omp_siblist_insert_node_after (alloc_node, scp);
11570 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
11571 already inserted it) and the extra_node (if it is present). The
11572 list can be empty if we added alloc_node above and there is no
11573 extra node. */
11574 if (scp && extra_node)
11576 cl = extra_node;
11577 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
11579 else if (extra_node)
11581 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
11582 cl = alloc_node;
11583 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
11585 else if (!scp)
11587 cl = alloc_node;
11588 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
11591 continue_at
11592 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
11593 grp_start_p, grp_end,
11595 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
11597 else if (*sc != grp_end)
11599 gcc_assert (*grp_start_p == grp_end);
11601 /* We are moving the current node back to a previous struct node:
11602 the node that used to point to the current node will now point to
11603 the next node. */
11604 continue_at = grp_start_p;
11605 /* In the non-pointer case, the mapping clause itself is moved into
11606 the correct position in the struct component list, which in this
11607 case is just SC. */
11608 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
11611 return continue_at;
11614 /* Scan through GROUPS, and create sorted structure sibling lists without
11615 gimplifying. */
11617 static bool
11618 omp_build_struct_sibling_lists (enum tree_code code,
11619 enum omp_region_type region_type,
11620 vec<omp_mapping_group> *groups,
11621 hash_map<tree_operand_hash_no_se,
11622 omp_mapping_group *> **grpmap,
11623 tree *list_p)
11625 using namespace omp_addr_tokenizer;
11626 unsigned i;
11627 omp_mapping_group *grp;
11628 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
11629 bool success = true;
11630 tree *new_next = NULL;
11631 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
11632 tree added_nodes = NULL_TREE;
11633 tree *added_tail = &added_nodes;
11634 auto_vec<omp_mapping_group> pre_hwm_groups;
11636 FOR_EACH_VEC_ELT (*groups, i, grp)
11638 tree c = grp->grp_end;
11639 tree decl = OMP_CLAUSE_DECL (c);
11640 tree grp_end = grp->grp_end;
11641 auto_vec<omp_addr_token *> addr_tokens;
11642 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
11644 if (new_next && !grp->reprocess_struct)
11645 grp->grp_start = new_next;
11647 new_next = NULL;
11649 tree *grp_start_p = grp->grp_start;
11651 if (DECL_P (decl))
11652 continue;
11654 /* Skip groups we marked for deletion in
11655 {omp,oacc}_resolve_clause_dependencies. */
11656 if (grp->deleted)
11657 continue;
11659 if (OMP_CLAUSE_CHAIN (*grp_start_p)
11660 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
11662 /* Don't process an array descriptor that isn't inside a derived type
11663 as a struct (the GOMP_MAP_POINTER following will have the form
11664 "var.data", but such mappings are handled specially). */
11665 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
11666 if (omp_map_clause_descriptor_p (grpmid)
11667 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
11668 continue;
11671 tree expr = decl;
11673 while (TREE_CODE (expr) == ARRAY_REF)
11674 expr = TREE_OPERAND (expr, 0);
11676 if (!omp_parse_expr (addr_tokens, expr))
11677 continue;
11679 omp_addr_token *last_token
11680 = omp_first_chained_access_token (addr_tokens);
11682 /* A mapping of a reference to a pointer member that doesn't specify an
11683 array section, etc., like this:
11684 *mystruct.ref_to_ptr
11685 should not be processed by the struct sibling-list handling code --
11686 it just transfers the referenced pointer.
11688 In contrast, the quite similar-looking construct:
11689 *mystruct.ptr
11690 which is equivalent to e.g.
11691 mystruct.ptr[0]
11692 *does* trigger sibling-list processing.
11694 An exception for the former case is for "fragile" groups where the
11695 reference itself is not handled otherwise; this is subject to special
11696 handling in omp_accumulate_sibling_list also. */
11698 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11699 && last_token->type == ACCESS_METHOD
11700 && last_token->u.access_kind == ACCESS_REF
11701 && !grp->fragile)
11702 continue;
11704 tree d = decl;
11705 if (TREE_CODE (d) == ARRAY_REF)
11707 while (TREE_CODE (d) == ARRAY_REF)
11708 d = TREE_OPERAND (d, 0);
11709 if (TREE_CODE (d) == COMPONENT_REF
11710 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11711 decl = d;
11713 if (d == decl
11714 && INDIRECT_REF_P (decl)
11715 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11716 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11717 == REFERENCE_TYPE)
11718 && (OMP_CLAUSE_MAP_KIND (c)
11719 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11720 decl = TREE_OPERAND (decl, 0);
11722 STRIP_NOPS (decl);
11724 if (TREE_CODE (decl) != COMPONENT_REF)
11725 continue;
11727 /* If we're mapping the whole struct in another node, skip adding this
11728 node to a sibling list. */
11729 omp_mapping_group *wholestruct;
11730 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
11731 &wholestruct))
11732 continue;
11734 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
11735 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
11736 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
11737 && code != OACC_UPDATE
11738 && code != OMP_TARGET_UPDATE)
11740 if (error_operand_p (decl))
11742 success = false;
11743 goto error_out;
11746 tree stype = TREE_TYPE (decl);
11747 if (TREE_CODE (stype) == REFERENCE_TYPE)
11748 stype = TREE_TYPE (stype);
11749 if (TYPE_SIZE_UNIT (stype) == NULL
11750 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
11752 error_at (OMP_CLAUSE_LOCATION (c),
11753 "mapping field %qE of variable length "
11754 "structure", OMP_CLAUSE_DECL (c));
11755 success = false;
11756 goto error_out;
11759 tree *inner = NULL;
11760 bool fragile_p = grp->fragile;
11762 new_next
11763 = omp_accumulate_sibling_list (region_type, code,
11764 struct_map_to_clause, *grpmap,
11765 grp_start_p, grp_end, addr_tokens,
11766 &inner, &fragile_p,
11767 grp->reprocess_struct, &added_tail);
11769 if (inner)
11771 omp_mapping_group newgrp;
11772 newgrp.grp_start = inner;
11773 if (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (*inner))
11774 == GOMP_MAP_ATTACH_DETACH)
11775 newgrp.grp_end = OMP_CLAUSE_CHAIN (*inner);
11776 else
11777 newgrp.grp_end = *inner;
11778 newgrp.mark = UNVISITED;
11779 newgrp.sibling = NULL;
11780 newgrp.deleted = false;
11781 newgrp.reprocess_struct = true;
11782 newgrp.fragile = fragile_p;
11783 newgrp.next = NULL;
11784 groups->safe_push (newgrp);
11786 /* !!! Growing GROUPS might invalidate the pointers in the group
11787 map. Rebuild it here. This is a bit inefficient, but
11788 shouldn't happen very often. */
11789 delete (*grpmap);
11790 *grpmap
11791 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
11792 sentinel);
11797 /* Delete groups marked for deletion above. At this point the order of the
11798 groups may no longer correspond to the order of the underlying list,
11799 which complicates this a little. First clear out OMP_CLAUSE_DECL for
11800 deleted nodes... */
11802 FOR_EACH_VEC_ELT (*groups, i, grp)
11803 if (grp->deleted)
11804 for (tree d = *grp->grp_start;
11805 d != OMP_CLAUSE_CHAIN (grp->grp_end);
11806 d = OMP_CLAUSE_CHAIN (d))
11807 OMP_CLAUSE_DECL (d) = NULL_TREE;
11809 /* ...then sweep through the list removing the now-empty nodes. */
11811 tail = list_p;
11812 while (*tail)
11814 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
11815 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
11816 *tail = OMP_CLAUSE_CHAIN (*tail);
11817 else
11818 tail = &OMP_CLAUSE_CHAIN (*tail);
11821 /* Tack on the struct nodes added during nested struct reprocessing. */
11822 if (added_nodes)
11824 *tail = added_nodes;
11825 tail = added_tail;
11828 /* Now we have finished building the struct sibling lists, reprocess
11829 newly-added "attach" nodes: we need the address of the first
11830 mapped element of each struct sibling list for the bias of the attach
11831 operation -- not necessarily the base address of the whole struct. */
11832 if (struct_map_to_clause)
11833 for (hash_map<tree_operand_hash, tree>::iterator iter
11834 = struct_map_to_clause->begin ();
11835 iter != struct_map_to_clause->end ();
11836 ++iter)
11838 tree struct_node = (*iter).second;
11839 gcc_assert (OMP_CLAUSE_CODE (struct_node) == OMP_CLAUSE_MAP);
11840 tree attach = OMP_CLAUSE_CHAIN (struct_node);
11842 if (OMP_CLAUSE_CODE (attach) != OMP_CLAUSE_MAP
11843 || OMP_CLAUSE_MAP_KIND (attach) != GOMP_MAP_ATTACH_DETACH)
11844 continue;
11846 OMP_CLAUSE_SET_MAP_KIND (attach, GOMP_MAP_ATTACH);
11848 /* Sanity check: the standalone attach node will not work if we have
11849 an "enter data" operation (because for those, variables need to be
11850 mapped separately and attach nodes must be grouped together with the
11851 base they attach to). We should only have created the
11852 ATTACH_DETACH node after GOMP_MAP_STRUCT for a target region, so
11853 this should never be true. */
11854 gcc_assert ((region_type & ORT_TARGET) != 0);
11856 /* This is the first sorted node in the struct sibling list. Use it
11857 to recalculate the correct bias to use.
11858 (&first_node - attach_decl).
11859 For GOMP_MAP_STRUCT_UNORD, we need e.g. the
11860 min(min(min(first,second),third),fourth) element, because the
11861 elements aren't in any particular order. */
11862 tree lowest_addr;
11863 if (OMP_CLAUSE_MAP_KIND (struct_node) == GOMP_MAP_STRUCT_UNORD)
11865 tree first_node = OMP_CLAUSE_CHAIN (attach);
11866 unsigned HOST_WIDE_INT num_mappings
11867 = tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node));
11868 lowest_addr = OMP_CLAUSE_DECL (first_node);
11869 lowest_addr = build_fold_addr_expr (lowest_addr);
11870 lowest_addr = fold_convert (pointer_sized_int_node, lowest_addr);
11871 tree next_node = OMP_CLAUSE_CHAIN (first_node);
11872 while (num_mappings > 1)
11874 tree tmp = OMP_CLAUSE_DECL (next_node);
11875 tmp = build_fold_addr_expr (tmp);
11876 tmp = fold_convert (pointer_sized_int_node, tmp);
11877 lowest_addr = fold_build2 (MIN_EXPR, pointer_sized_int_node,
11878 lowest_addr, tmp);
11879 next_node = OMP_CLAUSE_CHAIN (next_node);
11880 num_mappings--;
11882 lowest_addr = fold_convert (ptrdiff_type_node, lowest_addr);
11884 else
11886 tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
11887 first_node = build_fold_addr_expr (first_node);
11888 lowest_addr = fold_convert (ptrdiff_type_node, first_node);
11890 tree attach_decl = OMP_CLAUSE_DECL (attach);
11891 attach_decl = fold_convert (ptrdiff_type_node, attach_decl);
11892 OMP_CLAUSE_SIZE (attach)
11893 = fold_build2 (MINUS_EXPR, ptrdiff_type_node, lowest_addr,
11894 attach_decl);
11896 /* Remove GOMP_MAP_ATTACH node from after struct node. */
11897 OMP_CLAUSE_CHAIN (struct_node) = OMP_CLAUSE_CHAIN (attach);
11898 /* ...and re-insert it at the end of our clause list. */
11899 *tail = attach;
11900 OMP_CLAUSE_CHAIN (attach) = NULL_TREE;
11901 tail = &OMP_CLAUSE_CHAIN (attach);
11904 error_out:
11905 if (struct_map_to_clause)
11906 delete struct_map_to_clause;
11908 return success;
11911 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
11912 and previous omp contexts. */
11914 static void
11915 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
11916 enum omp_region_type region_type,
11917 enum tree_code code)
11919 using namespace omp_addr_tokenizer;
11920 struct gimplify_omp_ctx *ctx, *outer_ctx;
11921 tree c;
11922 tree *orig_list_p = list_p;
11923 int handled_depend_iterators = -1;
11924 int nowait = -1;
11926 ctx = new_omp_context (region_type);
11927 ctx->code = code;
11928 outer_ctx = ctx->outer_context;
11929 if (code == OMP_TARGET)
11931 if (!lang_GNU_Fortran ())
11932 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
11933 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
11934 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
11935 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11937 if (!lang_GNU_Fortran ())
11938 switch (code)
11940 case OMP_TARGET:
11941 case OMP_TARGET_DATA:
11942 case OMP_TARGET_ENTER_DATA:
11943 case OMP_TARGET_EXIT_DATA:
11944 case OACC_DECLARE:
11945 case OACC_HOST_DATA:
11946 case OACC_PARALLEL:
11947 case OACC_KERNELS:
11948 ctx->target_firstprivatize_array_bases = true;
11949 default:
11950 break;
11953 vec<omp_mapping_group> *groups = NULL;
11954 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
11955 unsigned grpnum = 0;
11956 tree *grp_start_p = NULL, grp_end = NULL_TREE;
11958 if (code == OMP_TARGET
11959 || code == OMP_TARGET_DATA
11960 || code == OMP_TARGET_ENTER_DATA
11961 || code == OMP_TARGET_EXIT_DATA
11962 || code == OACC_DATA
11963 || code == OACC_KERNELS
11964 || code == OACC_PARALLEL
11965 || code == OACC_SERIAL
11966 || code == OACC_ENTER_DATA
11967 || code == OACC_EXIT_DATA
11968 || code == OACC_UPDATE
11969 || code == OACC_DECLARE)
11971 groups = omp_gather_mapping_groups (list_p);
11973 if (groups)
11974 grpmap = omp_index_mapping_groups (groups);
11977 while ((c = *list_p) != NULL)
11979 bool remove = false;
11980 bool notice_outer = true;
11981 bool map_descriptor;
11982 const char *check_non_private = NULL;
11983 unsigned int flags;
11984 tree decl;
11985 auto_vec<omp_addr_token *, 10> addr_tokens;
11987 if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
11989 grp_start_p = NULL;
11990 grp_end = NULL_TREE;
11993 switch (OMP_CLAUSE_CODE (c))
11995 case OMP_CLAUSE_PRIVATE:
11996 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
11997 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
11999 flags |= GOVD_PRIVATE_OUTER_REF;
12000 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
12002 else
12003 notice_outer = false;
12004 goto do_add;
12005 case OMP_CLAUSE_SHARED:
12006 flags = GOVD_SHARED | GOVD_EXPLICIT;
12007 goto do_add;
12008 case OMP_CLAUSE_FIRSTPRIVATE:
12009 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
12010 check_non_private = "firstprivate";
12011 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12013 gcc_assert (code == OMP_TARGET);
12014 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
12016 goto do_add;
12017 case OMP_CLAUSE_LASTPRIVATE:
12018 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12019 switch (code)
12021 case OMP_DISTRIBUTE:
12022 error_at (OMP_CLAUSE_LOCATION (c),
12023 "conditional %<lastprivate%> clause on "
12024 "%qs construct", "distribute");
12025 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12026 break;
12027 case OMP_TASKLOOP:
12028 error_at (OMP_CLAUSE_LOCATION (c),
12029 "conditional %<lastprivate%> clause on "
12030 "%qs construct", "taskloop");
12031 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12032 break;
12033 default:
12034 break;
12036 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
12037 if (code != OMP_LOOP)
12038 check_non_private = "lastprivate";
12039 decl = OMP_CLAUSE_DECL (c);
12040 if (error_operand_p (decl))
12041 goto do_add;
12042 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
12043 && !lang_hooks.decls.omp_scalar_p (decl, true))
12045 error_at (OMP_CLAUSE_LOCATION (c),
12046 "non-scalar variable %qD in conditional "
12047 "%<lastprivate%> clause", decl);
12048 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12050 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12051 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
12052 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
12053 false);
12054 goto do_add;
12055 case OMP_CLAUSE_REDUCTION:
12056 if (OMP_CLAUSE_REDUCTION_TASK (c))
12058 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
12060 if (nowait == -1)
12061 nowait = omp_find_clause (*list_p,
12062 OMP_CLAUSE_NOWAIT) != NULL_TREE;
12063 if (nowait
12064 && (outer_ctx == NULL
12065 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
12067 error_at (OMP_CLAUSE_LOCATION (c),
12068 "%<task%> reduction modifier on a construct "
12069 "with a %<nowait%> clause");
12070 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
12073 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
12075 error_at (OMP_CLAUSE_LOCATION (c),
12076 "invalid %<task%> reduction modifier on construct "
12077 "other than %<parallel%>, %qs, %<sections%> or "
12078 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
12079 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
12082 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
12083 switch (code)
12085 case OMP_SECTIONS:
12086 error_at (OMP_CLAUSE_LOCATION (c),
12087 "%<inscan%> %<reduction%> clause on "
12088 "%qs construct", "sections");
12089 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12090 break;
12091 case OMP_PARALLEL:
12092 error_at (OMP_CLAUSE_LOCATION (c),
12093 "%<inscan%> %<reduction%> clause on "
12094 "%qs construct", "parallel");
12095 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12096 break;
12097 case OMP_TEAMS:
12098 error_at (OMP_CLAUSE_LOCATION (c),
12099 "%<inscan%> %<reduction%> clause on "
12100 "%qs construct", "teams");
12101 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12102 break;
12103 case OMP_TASKLOOP:
12104 error_at (OMP_CLAUSE_LOCATION (c),
12105 "%<inscan%> %<reduction%> clause on "
12106 "%qs construct", "taskloop");
12107 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12108 break;
12109 case OMP_SCOPE:
12110 error_at (OMP_CLAUSE_LOCATION (c),
12111 "%<inscan%> %<reduction%> clause on "
12112 "%qs construct", "scope");
12113 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12114 break;
12115 default:
12116 break;
12118 /* FALLTHRU */
12119 case OMP_CLAUSE_IN_REDUCTION:
12120 case OMP_CLAUSE_TASK_REDUCTION:
12121 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
12122 /* OpenACC permits reductions on private variables. */
12123 if (!(region_type & ORT_ACC)
12124 /* taskgroup is actually not a worksharing region. */
12125 && code != OMP_TASKGROUP)
12126 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
12127 decl = OMP_CLAUSE_DECL (c);
12128 if (TREE_CODE (decl) == MEM_REF)
12130 tree type = TREE_TYPE (decl);
12131 bool saved_into_ssa = gimplify_ctxp->into_ssa;
12132 gimplify_ctxp->into_ssa = false;
12133 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
12134 NULL, is_gimple_val, fb_rvalue, false)
12135 == GS_ERROR)
12137 gimplify_ctxp->into_ssa = saved_into_ssa;
12138 remove = true;
12139 break;
12141 gimplify_ctxp->into_ssa = saved_into_ssa;
12142 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12143 if (DECL_P (v))
12145 omp_firstprivatize_variable (ctx, v);
12146 omp_notice_variable (ctx, v, true);
12148 decl = TREE_OPERAND (decl, 0);
12149 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12151 gimplify_ctxp->into_ssa = false;
12152 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
12153 NULL, is_gimple_val, fb_rvalue, false)
12154 == GS_ERROR)
12156 gimplify_ctxp->into_ssa = saved_into_ssa;
12157 remove = true;
12158 break;
12160 gimplify_ctxp->into_ssa = saved_into_ssa;
12161 v = TREE_OPERAND (decl, 1);
12162 if (DECL_P (v))
12164 omp_firstprivatize_variable (ctx, v);
12165 omp_notice_variable (ctx, v, true);
12167 decl = TREE_OPERAND (decl, 0);
12169 if (TREE_CODE (decl) == ADDR_EXPR
12170 || TREE_CODE (decl) == INDIRECT_REF)
12171 decl = TREE_OPERAND (decl, 0);
12173 goto do_add_decl;
12174 case OMP_CLAUSE_LINEAR:
12175 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
12176 is_gimple_val, fb_rvalue) == GS_ERROR)
12178 remove = true;
12179 break;
12181 else
12183 if (code == OMP_SIMD
12184 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
12186 struct gimplify_omp_ctx *octx = outer_ctx;
12187 if (octx
12188 && octx->region_type == ORT_WORKSHARE
12189 && octx->combined_loop
12190 && !octx->distribute)
12192 if (octx->outer_context
12193 && (octx->outer_context->region_type
12194 == ORT_COMBINED_PARALLEL))
12195 octx = octx->outer_context->outer_context;
12196 else
12197 octx = octx->outer_context;
12199 if (octx
12200 && octx->region_type == ORT_WORKSHARE
12201 && octx->combined_loop
12202 && octx->distribute)
12204 error_at (OMP_CLAUSE_LOCATION (c),
12205 "%<linear%> clause for variable other than "
12206 "loop iterator specified on construct "
12207 "combined with %<distribute%>");
12208 remove = true;
12209 break;
12212 /* For combined #pragma omp parallel for simd, need to put
12213 lastprivate and perhaps firstprivate too on the
12214 parallel. Similarly for #pragma omp for simd. */
12215 struct gimplify_omp_ctx *octx = outer_ctx;
12216 bool taskloop_seen = false;
12217 decl = NULL_TREE;
12220 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12221 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12222 break;
12223 decl = OMP_CLAUSE_DECL (c);
12224 if (error_operand_p (decl))
12226 decl = NULL_TREE;
12227 break;
12229 flags = GOVD_SEEN;
12230 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
12231 flags |= GOVD_FIRSTPRIVATE;
12232 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12233 flags |= GOVD_LASTPRIVATE;
12234 if (octx
12235 && octx->region_type == ORT_WORKSHARE
12236 && octx->combined_loop)
12238 if (octx->outer_context
12239 && (octx->outer_context->region_type
12240 == ORT_COMBINED_PARALLEL))
12241 octx = octx->outer_context;
12242 else if (omp_check_private (octx, decl, false))
12243 break;
12245 else if (octx
12246 && (octx->region_type & ORT_TASK) != 0
12247 && octx->combined_loop)
12248 taskloop_seen = true;
12249 else if (octx
12250 && octx->region_type == ORT_COMBINED_PARALLEL
12251 && ((ctx->region_type == ORT_WORKSHARE
12252 && octx == outer_ctx)
12253 || taskloop_seen))
12254 flags = GOVD_SEEN | GOVD_SHARED;
12255 else if (octx
12256 && ((octx->region_type & ORT_COMBINED_TEAMS)
12257 == ORT_COMBINED_TEAMS))
12258 flags = GOVD_SEEN | GOVD_SHARED;
12259 else if (octx
12260 && octx->region_type == ORT_COMBINED_TARGET)
12262 if (flags & GOVD_LASTPRIVATE)
12263 flags = GOVD_SEEN | GOVD_MAP;
12265 else
12266 break;
12267 splay_tree_node on
12268 = splay_tree_lookup (octx->variables,
12269 (splay_tree_key) decl);
12270 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
12272 octx = NULL;
12273 break;
12275 omp_add_variable (octx, decl, flags);
12276 if (octx->outer_context == NULL)
12277 break;
12278 octx = octx->outer_context;
12280 while (1);
12281 if (octx
12282 && decl
12283 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12284 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12285 omp_notice_variable (octx, decl, true);
12287 flags = GOVD_LINEAR | GOVD_EXPLICIT;
12288 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12289 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12291 notice_outer = false;
12292 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12294 goto do_add;
12296 case OMP_CLAUSE_MAP:
12297 if (!grp_start_p)
12299 grp_start_p = list_p;
12300 grp_end = (*groups)[grpnum].grp_end;
12301 grpnum++;
12303 decl = OMP_CLAUSE_DECL (c);
12305 if (error_operand_p (decl))
12307 remove = true;
12308 break;
12311 if (!omp_parse_expr (addr_tokens, decl))
12313 remove = true;
12314 break;
12317 if (remove)
12318 break;
12319 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
12321 struct gimplify_omp_ctx *octx;
12322 for (octx = outer_ctx; octx; octx = octx->outer_context)
12324 if (octx->region_type != ORT_ACC_HOST_DATA)
12325 break;
12326 splay_tree_node n2
12327 = splay_tree_lookup (octx->variables,
12328 (splay_tree_key) decl);
12329 if (n2)
12330 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
12331 "declared in enclosing %<host_data%> region",
12332 DECL_NAME (decl));
12336 map_descriptor = false;
12338 /* This condition checks if we're mapping an array descriptor that
12339 isn't inside a derived type -- these have special handling, and
12340 are not handled as structs in omp_build_struct_sibling_lists.
12341 See that function for further details. */
12342 if (*grp_start_p != grp_end
12343 && OMP_CLAUSE_CHAIN (*grp_start_p)
12344 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
12346 tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
12347 if (omp_map_clause_descriptor_p (grp_mid)
12348 && DECL_P (OMP_CLAUSE_DECL (grp_mid)))
12349 map_descriptor = true;
12351 else if (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP
12352 && (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_RELEASE
12353 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DELETE)
12354 && OMP_CLAUSE_RELEASE_DESCRIPTOR (grp_end))
12355 map_descriptor = true;
12357 /* Adding the decl for a struct access: we haven't created
12358 GOMP_MAP_STRUCT nodes yet, so this statement needs to predict
12359 whether they will be created in gimplify_adjust_omp_clauses.
12360 NOTE: Technically we should probably look through DECL_VALUE_EXPR
12361 here because something that looks like a DECL_P may actually be a
12362 struct access, e.g. variables in a lambda closure
12363 (__closure->__foo) or class members (this->foo). Currently in both
12364 those cases we map the whole of the containing object (directly in
12365 the C++ FE) though, so struct nodes are not created. */
12366 if (c == grp_end
12367 && addr_tokens[0]->type == STRUCTURE_BASE
12368 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12369 && !map_descriptor)
12371 gcc_assert (addr_tokens[1]->type == ACCESS_METHOD);
12372 /* If we got to this struct via a chain of pointers, maybe we
12373 want to map it implicitly instead. */
12374 if (omp_access_chain_p (addr_tokens, 1))
12375 break;
12376 omp_mapping_group *wholestruct;
12377 if (!(region_type & ORT_ACC)
12378 && omp_mapped_by_containing_struct (grpmap,
12379 OMP_CLAUSE_DECL (c),
12380 &wholestruct))
12381 break;
12382 decl = addr_tokens[1]->expr;
12383 if (splay_tree_lookup (ctx->variables, (splay_tree_key) decl))
12384 break;
12385 /* Standalone attach or detach clauses for a struct element
12386 should not inhibit implicit mapping of the whole struct. */
12387 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12388 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12389 break;
12390 flags = GOVD_MAP | GOVD_EXPLICIT;
12392 gcc_assert (addr_tokens[1]->u.access_kind != ACCESS_DIRECT
12393 || TREE_ADDRESSABLE (decl));
12394 goto do_add_decl;
12397 if (!DECL_P (decl))
12399 tree d = decl, *pd;
12400 if (TREE_CODE (d) == ARRAY_REF)
12402 while (TREE_CODE (d) == ARRAY_REF)
12403 d = TREE_OPERAND (d, 0);
12404 if (TREE_CODE (d) == COMPONENT_REF
12405 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
12406 decl = d;
12408 pd = &OMP_CLAUSE_DECL (c);
12409 if (d == decl
12410 && TREE_CODE (decl) == INDIRECT_REF
12411 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12412 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12413 == REFERENCE_TYPE)
12414 && (OMP_CLAUSE_MAP_KIND (c)
12415 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
12417 pd = &TREE_OPERAND (decl, 0);
12418 decl = TREE_OPERAND (decl, 0);
12421 if (addr_tokens[0]->type == STRUCTURE_BASE
12422 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12423 && addr_tokens[1]->type == ACCESS_METHOD
12424 && (addr_tokens[1]->u.access_kind == ACCESS_POINTER
12425 || (addr_tokens[1]->u.access_kind
12426 == ACCESS_POINTER_OFFSET))
12427 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)))
12429 tree base = addr_tokens[1]->expr;
12430 splay_tree_node n
12431 = splay_tree_lookup (ctx->variables,
12432 (splay_tree_key) base);
12433 n->value |= GOVD_SEEN;
12436 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12438 /* Don't gimplify *pd fully at this point, as the base
12439 will need to be adjusted during omp lowering. */
12440 auto_vec<tree, 10> expr_stack;
12441 tree *p = pd;
12442 while (handled_component_p (*p)
12443 || TREE_CODE (*p) == INDIRECT_REF
12444 || TREE_CODE (*p) == ADDR_EXPR
12445 || TREE_CODE (*p) == MEM_REF
12446 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12448 expr_stack.safe_push (*p);
12449 p = &TREE_OPERAND (*p, 0);
12451 for (int i = expr_stack.length () - 1; i >= 0; i--)
12453 tree t = expr_stack[i];
12454 if (TREE_CODE (t) == ARRAY_REF
12455 || TREE_CODE (t) == ARRAY_RANGE_REF)
12457 if (TREE_OPERAND (t, 2) == NULL_TREE)
12459 tree low = unshare_expr (array_ref_low_bound (t));
12460 if (!is_gimple_min_invariant (low))
12462 TREE_OPERAND (t, 2) = low;
12463 if (gimplify_expr (&TREE_OPERAND (t, 2),
12464 pre_p, NULL,
12465 is_gimple_reg,
12466 fb_rvalue) == GS_ERROR)
12467 remove = true;
12470 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
12471 NULL, is_gimple_reg,
12472 fb_rvalue) == GS_ERROR)
12473 remove = true;
12474 if (TREE_OPERAND (t, 3) == NULL_TREE)
12476 tree elmt_size = array_ref_element_size (t);
12477 if (!is_gimple_min_invariant (elmt_size))
12479 elmt_size = unshare_expr (elmt_size);
12480 tree elmt_type
12481 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
12482 0)));
12483 tree factor
12484 = size_int (TYPE_ALIGN_UNIT (elmt_type));
12485 elmt_size
12486 = size_binop (EXACT_DIV_EXPR, elmt_size,
12487 factor);
12488 TREE_OPERAND (t, 3) = elmt_size;
12489 if (gimplify_expr (&TREE_OPERAND (t, 3),
12490 pre_p, NULL,
12491 is_gimple_reg,
12492 fb_rvalue) == GS_ERROR)
12493 remove = true;
12496 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
12497 NULL, is_gimple_reg,
12498 fb_rvalue) == GS_ERROR)
12499 remove = true;
12501 else if (TREE_CODE (t) == COMPONENT_REF)
12503 if (TREE_OPERAND (t, 2) == NULL_TREE)
12505 tree offset = component_ref_field_offset (t);
12506 if (!is_gimple_min_invariant (offset))
12508 offset = unshare_expr (offset);
12509 tree field = TREE_OPERAND (t, 1);
12510 tree factor
12511 = size_int (DECL_OFFSET_ALIGN (field)
12512 / BITS_PER_UNIT);
12513 offset = size_binop (EXACT_DIV_EXPR, offset,
12514 factor);
12515 TREE_OPERAND (t, 2) = offset;
12516 if (gimplify_expr (&TREE_OPERAND (t, 2),
12517 pre_p, NULL,
12518 is_gimple_reg,
12519 fb_rvalue) == GS_ERROR)
12520 remove = true;
12523 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
12524 NULL, is_gimple_reg,
12525 fb_rvalue) == GS_ERROR)
12526 remove = true;
12529 for (; expr_stack.length () > 0; )
12531 tree t = expr_stack.pop ();
12533 if (TREE_CODE (t) == ARRAY_REF
12534 || TREE_CODE (t) == ARRAY_RANGE_REF)
12536 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
12537 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
12538 NULL, is_gimple_val,
12539 fb_rvalue) == GS_ERROR)
12540 remove = true;
12544 break;
12547 if ((code == OMP_TARGET
12548 || code == OMP_TARGET_DATA
12549 || code == OMP_TARGET_ENTER_DATA
12550 || code == OMP_TARGET_EXIT_DATA)
12551 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
12553 /* If we have attach/detach but the decl we have is a pointer to
12554 pointer, we're probably mapping the "base level" array
12555 implicitly. Make sure we don't add the decl as if we mapped
12556 it explicitly. That is,
12558 int **arr;
12559 [...]
12560 #pragma omp target map(arr[a][b:c])
12562 should *not* map "arr" explicitly. That way we get a
12563 zero-length "alloc" mapping for it, and assuming it's been
12564 mapped by some previous directive, etc., things work as they
12565 should. */
12567 tree basetype = TREE_TYPE (addr_tokens[0]->expr);
12569 if (TREE_CODE (basetype) == REFERENCE_TYPE)
12570 basetype = TREE_TYPE (basetype);
12572 if (code == OMP_TARGET
12573 && addr_tokens[0]->type == ARRAY_BASE
12574 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12575 && TREE_CODE (basetype) == POINTER_TYPE
12576 && TREE_CODE (TREE_TYPE (basetype)) == POINTER_TYPE)
12577 break;
12580 flags = GOVD_MAP | GOVD_EXPLICIT;
12581 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
12582 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM
12583 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TO
12584 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TOFROM)
12585 flags |= GOVD_MAP_ALWAYS_TO;
12587 goto do_add;
12589 case OMP_CLAUSE_AFFINITY:
12590 gimplify_omp_affinity (list_p, pre_p);
12591 remove = true;
12592 break;
12593 case OMP_CLAUSE_DOACROSS:
12594 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
12596 tree deps = OMP_CLAUSE_DECL (c);
12597 while (deps && TREE_CODE (deps) == TREE_LIST)
12599 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
12600 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
12601 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
12602 pre_p, NULL, is_gimple_val, fb_rvalue);
12603 deps = TREE_CHAIN (deps);
12606 else
12607 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
12608 == OMP_CLAUSE_DOACROSS_SOURCE);
12609 break;
12610 case OMP_CLAUSE_DEPEND:
12611 if (handled_depend_iterators == -1)
12612 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
12613 if (handled_depend_iterators)
12615 if (handled_depend_iterators == 2)
12616 remove = true;
12617 break;
12619 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
12621 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
12622 NULL, is_gimple_val, fb_rvalue);
12623 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
12625 if (error_operand_p (OMP_CLAUSE_DECL (c)))
12627 remove = true;
12628 break;
12630 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
12632 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
12633 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
12634 is_gimple_val, fb_rvalue) == GS_ERROR)
12636 remove = true;
12637 break;
12640 if (code == OMP_TASK)
12641 ctx->has_depend = true;
12642 break;
12644 case OMP_CLAUSE_TO:
12645 case OMP_CLAUSE_FROM:
12646 case OMP_CLAUSE__CACHE_:
12647 decl = OMP_CLAUSE_DECL (c);
12648 if (error_operand_p (decl))
12650 remove = true;
12651 break;
12653 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12654 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
12655 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
12656 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
12657 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
12659 remove = true;
12660 break;
12662 if (!DECL_P (decl))
12664 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
12665 NULL, is_gimple_lvalue, fb_lvalue)
12666 == GS_ERROR)
12668 remove = true;
12669 break;
12671 break;
12673 goto do_notice;
12675 case OMP_CLAUSE_USE_DEVICE_PTR:
12676 case OMP_CLAUSE_USE_DEVICE_ADDR:
12677 flags = GOVD_EXPLICIT;
12678 goto do_add;
12680 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12681 decl = OMP_CLAUSE_DECL (c);
12682 while (TREE_CODE (decl) == INDIRECT_REF
12683 || TREE_CODE (decl) == ARRAY_REF)
12684 decl = TREE_OPERAND (decl, 0);
12685 flags = GOVD_EXPLICIT;
12686 goto do_add_decl;
12688 case OMP_CLAUSE_IS_DEVICE_PTR:
12689 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
12690 goto do_add;
12692 do_add:
12693 decl = OMP_CLAUSE_DECL (c);
12694 do_add_decl:
12695 if (error_operand_p (decl))
12697 remove = true;
12698 break;
12700 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
12702 tree t = omp_member_access_dummy_var (decl);
12703 if (t)
12705 tree v = DECL_VALUE_EXPR (decl);
12706 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
12707 if (outer_ctx)
12708 omp_notice_variable (outer_ctx, t, true);
12711 if (code == OACC_DATA
12712 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12713 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12714 flags |= GOVD_MAP_0LEN_ARRAY;
12715 omp_add_variable (ctx, decl, flags);
12716 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12717 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
12718 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
12719 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
12721 struct gimplify_omp_ctx *pctx
12722 = code == OMP_TARGET ? outer_ctx : ctx;
12723 if (pctx)
12724 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
12725 GOVD_LOCAL | GOVD_SEEN);
12726 if (pctx
12727 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
12728 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
12729 find_decl_expr,
12730 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
12731 NULL) == NULL_TREE)
12732 omp_add_variable (pctx,
12733 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
12734 GOVD_LOCAL | GOVD_SEEN);
12735 gimplify_omp_ctxp = pctx;
12736 push_gimplify_context ();
12738 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
12739 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
12741 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
12742 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
12743 pop_gimplify_context
12744 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
12745 push_gimplify_context ();
12746 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
12747 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
12748 pop_gimplify_context
12749 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
12750 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
12751 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
12753 gimplify_omp_ctxp = outer_ctx;
12755 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12756 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
12758 gimplify_omp_ctxp = ctx;
12759 push_gimplify_context ();
12760 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
12762 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
12763 NULL, NULL);
12764 TREE_SIDE_EFFECTS (bind) = 1;
12765 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
12766 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
12768 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
12769 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
12770 pop_gimplify_context
12771 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
12772 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
12774 gimplify_omp_ctxp = outer_ctx;
12776 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12777 && OMP_CLAUSE_LINEAR_STMT (c))
12779 gimplify_omp_ctxp = ctx;
12780 push_gimplify_context ();
12781 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
12783 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
12784 NULL, NULL);
12785 TREE_SIDE_EFFECTS (bind) = 1;
12786 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
12787 OMP_CLAUSE_LINEAR_STMT (c) = bind;
12789 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
12790 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
12791 pop_gimplify_context
12792 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
12793 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
12795 gimplify_omp_ctxp = outer_ctx;
12797 if (notice_outer)
12798 goto do_notice;
12799 break;
12801 case OMP_CLAUSE_COPYIN:
12802 case OMP_CLAUSE_COPYPRIVATE:
12803 decl = OMP_CLAUSE_DECL (c);
12804 if (error_operand_p (decl))
12806 remove = true;
12807 break;
12809 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
12810 && !remove
12811 && !omp_check_private (ctx, decl, true))
12813 remove = true;
12814 if (is_global_var (decl))
12816 if (DECL_THREAD_LOCAL_P (decl))
12817 remove = false;
12818 else if (DECL_HAS_VALUE_EXPR_P (decl))
12820 tree value = get_base_address (DECL_VALUE_EXPR (decl));
12822 if (value
12823 && DECL_P (value)
12824 && DECL_THREAD_LOCAL_P (value))
12825 remove = false;
12828 if (remove)
12829 error_at (OMP_CLAUSE_LOCATION (c),
12830 "copyprivate variable %qE is not threadprivate"
12831 " or private in outer context", DECL_NAME (decl));
12833 do_notice:
12834 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12835 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
12836 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12837 && outer_ctx
12838 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
12839 || (region_type == ORT_WORKSHARE
12840 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12841 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
12842 || code == OMP_LOOP)))
12843 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
12844 || (code == OMP_LOOP
12845 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12846 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
12847 == ORT_COMBINED_TEAMS))))
12849 splay_tree_node on
12850 = splay_tree_lookup (outer_ctx->variables,
12851 (splay_tree_key)decl);
12852 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
12854 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12855 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12856 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
12857 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12858 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
12859 == POINTER_TYPE))))
12860 omp_firstprivatize_variable (outer_ctx, decl);
12861 else
12863 omp_add_variable (outer_ctx, decl,
12864 GOVD_SEEN | GOVD_SHARED);
12865 if (outer_ctx->outer_context)
12866 omp_notice_variable (outer_ctx->outer_context, decl,
12867 true);
12871 if (outer_ctx)
12872 omp_notice_variable (outer_ctx, decl, true);
12873 if (check_non_private
12874 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
12875 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
12876 || decl == OMP_CLAUSE_DECL (c)
12877 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12878 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12879 == ADDR_EXPR
12880 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12881 == POINTER_PLUS_EXPR
12882 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
12883 (OMP_CLAUSE_DECL (c), 0), 0))
12884 == ADDR_EXPR)))))
12885 && omp_check_private (ctx, decl, false))
12887 error ("%s variable %qE is private in outer context",
12888 check_non_private, DECL_NAME (decl));
12889 remove = true;
12891 break;
12893 case OMP_CLAUSE_DETACH:
12894 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
12895 goto do_add;
12897 case OMP_CLAUSE_IF:
12898 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
12899 && OMP_CLAUSE_IF_MODIFIER (c) != code)
12901 const char *p[2];
12902 for (int i = 0; i < 2; i++)
12903 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
12905 case VOID_CST: p[i] = "cancel"; break;
12906 case OMP_PARALLEL: p[i] = "parallel"; break;
12907 case OMP_SIMD: p[i] = "simd"; break;
12908 case OMP_TASK: p[i] = "task"; break;
12909 case OMP_TASKLOOP: p[i] = "taskloop"; break;
12910 case OMP_TARGET_DATA: p[i] = "target data"; break;
12911 case OMP_TARGET: p[i] = "target"; break;
12912 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
12913 case OMP_TARGET_ENTER_DATA:
12914 p[i] = "target enter data"; break;
12915 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
12916 default: gcc_unreachable ();
12918 error_at (OMP_CLAUSE_LOCATION (c),
12919 "expected %qs %<if%> clause modifier rather than %qs",
12920 p[0], p[1]);
12921 remove = true;
12923 /* Fall through. */
12925 case OMP_CLAUSE_SELF:
12926 case OMP_CLAUSE_FINAL:
12927 OMP_CLAUSE_OPERAND (c, 0)
12928 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
12929 /* Fall through. */
12931 case OMP_CLAUSE_NUM_TEAMS:
12932 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
12933 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
12934 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
12936 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
12938 remove = true;
12939 break;
12941 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
12942 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
12943 pre_p, NULL, true);
12945 /* Fall through. */
12947 case OMP_CLAUSE_SCHEDULE:
12948 case OMP_CLAUSE_NUM_THREADS:
12949 case OMP_CLAUSE_THREAD_LIMIT:
12950 case OMP_CLAUSE_DIST_SCHEDULE:
12951 case OMP_CLAUSE_DEVICE:
12952 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
12953 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
12955 if (code != OMP_TARGET)
12957 error_at (OMP_CLAUSE_LOCATION (c),
12958 "%<device%> clause with %<ancestor%> is only "
12959 "allowed on %<target%> construct");
12960 remove = true;
12961 break;
12964 tree clauses = *orig_list_p;
12965 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
12966 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
12967 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
12968 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
12969 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
12970 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
12973 error_at (OMP_CLAUSE_LOCATION (c),
12974 "with %<ancestor%>, only the %<device%>, "
12975 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
12976 "and %<map%> clauses may appear on the "
12977 "construct");
12978 remove = true;
12979 break;
12982 /* Fall through. */
12984 case OMP_CLAUSE_PRIORITY:
12985 case OMP_CLAUSE_GRAINSIZE:
12986 case OMP_CLAUSE_NUM_TASKS:
12987 case OMP_CLAUSE_FILTER:
12988 case OMP_CLAUSE_HINT:
12989 case OMP_CLAUSE_ASYNC:
12990 case OMP_CLAUSE_WAIT:
12991 case OMP_CLAUSE_NUM_GANGS:
12992 case OMP_CLAUSE_NUM_WORKERS:
12993 case OMP_CLAUSE_VECTOR_LENGTH:
12994 case OMP_CLAUSE_WORKER:
12995 case OMP_CLAUSE_VECTOR:
12996 if (OMP_CLAUSE_OPERAND (c, 0)
12997 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
12999 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
13001 remove = true;
13002 break;
13004 /* All these clauses care about value, not a particular decl,
13005 so try to force it into a SSA_NAME or fresh temporary. */
13006 OMP_CLAUSE_OPERAND (c, 0)
13007 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
13008 pre_p, NULL, true);
13010 break;
13012 case OMP_CLAUSE_GANG:
13013 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
13014 is_gimple_val, fb_rvalue) == GS_ERROR)
13015 remove = true;
13016 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
13017 is_gimple_val, fb_rvalue) == GS_ERROR)
13018 remove = true;
13019 break;
13021 case OMP_CLAUSE_NOWAIT:
13022 nowait = 1;
13023 break;
13025 case OMP_CLAUSE_ORDERED:
13026 case OMP_CLAUSE_UNTIED:
13027 case OMP_CLAUSE_COLLAPSE:
13028 case OMP_CLAUSE_TILE:
13029 case OMP_CLAUSE_AUTO:
13030 case OMP_CLAUSE_SEQ:
13031 case OMP_CLAUSE_INDEPENDENT:
13032 case OMP_CLAUSE_MERGEABLE:
13033 case OMP_CLAUSE_PROC_BIND:
13034 case OMP_CLAUSE_SAFELEN:
13035 case OMP_CLAUSE_SIMDLEN:
13036 case OMP_CLAUSE_NOGROUP:
13037 case OMP_CLAUSE_THREADS:
13038 case OMP_CLAUSE_SIMD:
13039 case OMP_CLAUSE_BIND:
13040 case OMP_CLAUSE_IF_PRESENT:
13041 case OMP_CLAUSE_FINALIZE:
13042 break;
13044 case OMP_CLAUSE_ORDER:
13045 ctx->order_concurrent = true;
13046 break;
13048 case OMP_CLAUSE_DEFAULTMAP:
13049 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
13050 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
13052 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
13053 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
13054 gdmkmin = GDMK_SCALAR;
13055 gdmkmax = GDMK_POINTER;
13056 break;
13057 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
13058 gdmkmin = GDMK_SCALAR;
13059 gdmkmax = GDMK_SCALAR_TARGET;
13060 break;
13061 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
13062 gdmkmin = gdmkmax = GDMK_AGGREGATE;
13063 break;
13064 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
13065 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
13066 break;
13067 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
13068 gdmkmin = gdmkmax = GDMK_POINTER;
13069 break;
13070 default:
13071 gcc_unreachable ();
13073 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
13074 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
13076 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
13077 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
13078 break;
13079 case OMP_CLAUSE_DEFAULTMAP_TO:
13080 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
13081 break;
13082 case OMP_CLAUSE_DEFAULTMAP_FROM:
13083 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
13084 break;
13085 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
13086 ctx->defaultmap[gdmk] = GOVD_MAP;
13087 break;
13088 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
13089 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
13090 break;
13091 case OMP_CLAUSE_DEFAULTMAP_NONE:
13092 ctx->defaultmap[gdmk] = 0;
13093 break;
13094 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
13095 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
13096 break;
13097 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
13098 switch (gdmk)
13100 case GDMK_SCALAR:
13101 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
13102 break;
13103 case GDMK_SCALAR_TARGET:
13104 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
13105 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
13106 break;
13107 case GDMK_AGGREGATE:
13108 case GDMK_ALLOCATABLE:
13109 ctx->defaultmap[gdmk] = GOVD_MAP;
13110 break;
13111 case GDMK_POINTER:
13112 ctx->defaultmap[gdmk] = GOVD_MAP;
13113 if (!lang_GNU_Fortran ())
13114 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
13115 break;
13116 default:
13117 gcc_unreachable ();
13119 break;
13120 default:
13121 gcc_unreachable ();
13123 break;
13125 case OMP_CLAUSE_ALIGNED:
13126 decl = OMP_CLAUSE_DECL (c);
13127 if (error_operand_p (decl))
13129 remove = true;
13130 break;
13132 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
13133 is_gimple_val, fb_rvalue) == GS_ERROR)
13135 remove = true;
13136 break;
13138 if (!is_global_var (decl)
13139 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
13140 omp_add_variable (ctx, decl, GOVD_ALIGNED);
13141 break;
13143 case OMP_CLAUSE_NONTEMPORAL:
13144 decl = OMP_CLAUSE_DECL (c);
13145 if (error_operand_p (decl))
13147 remove = true;
13148 break;
13150 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
13151 break;
13153 case OMP_CLAUSE_ALLOCATE:
13154 decl = OMP_CLAUSE_DECL (c);
13155 if (error_operand_p (decl))
13157 remove = true;
13158 break;
13160 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
13161 is_gimple_val, fb_rvalue) == GS_ERROR)
13163 remove = true;
13164 break;
13166 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
13167 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
13168 == INTEGER_CST))
13170 else if (code == OMP_TASKLOOP
13171 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
13172 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13173 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13174 pre_p, NULL, false);
13175 break;
13177 case OMP_CLAUSE_DEFAULT:
13178 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
13179 break;
13181 case OMP_CLAUSE_INCLUSIVE:
13182 case OMP_CLAUSE_EXCLUSIVE:
13183 decl = OMP_CLAUSE_DECL (c);
13185 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
13186 (splay_tree_key) decl);
13187 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
13189 error_at (OMP_CLAUSE_LOCATION (c),
13190 "%qD specified in %qs clause but not in %<inscan%> "
13191 "%<reduction%> clause on the containing construct",
13192 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
13193 remove = true;
13195 else
13197 n->value |= GOVD_REDUCTION_INSCAN;
13198 if (outer_ctx->region_type == ORT_SIMD
13199 && outer_ctx->outer_context
13200 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
13202 n = splay_tree_lookup (outer_ctx->outer_context->variables,
13203 (splay_tree_key) decl);
13204 if (n && (n->value & GOVD_REDUCTION) != 0)
13205 n->value |= GOVD_REDUCTION_INSCAN;
13209 break;
13211 case OMP_CLAUSE_NOHOST:
13212 default:
13213 gcc_unreachable ();
13216 if (code == OACC_DATA
13217 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13218 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13219 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
13220 remove = true;
13221 if (remove)
13222 *list_p = OMP_CLAUSE_CHAIN (c);
13223 else
13224 list_p = &OMP_CLAUSE_CHAIN (c);
13227 if (groups)
13229 delete grpmap;
13230 delete groups;
13233 ctx->clauses = *orig_list_p;
13234 gimplify_omp_ctxp = ctx;
13237 /* Return true if DECL is a candidate for shared to firstprivate
13238 optimization. We only consider non-addressable scalars, not
13239 too big, and not references. */
13241 static bool
13242 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
13244 if (TREE_ADDRESSABLE (decl))
13245 return false;
13246 tree type = TREE_TYPE (decl);
13247 if (!is_gimple_reg_type (type)
13248 || TREE_CODE (type) == REFERENCE_TYPE
13249 || TREE_ADDRESSABLE (type))
13250 return false;
13251 /* Don't optimize too large decls, as each thread/task will have
13252 its own. */
13253 HOST_WIDE_INT len = int_size_in_bytes (type);
13254 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
13255 return false;
13256 if (omp_privatize_by_reference (decl))
13257 return false;
13258 return true;
13261 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
13262 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
13263 GOVD_WRITTEN in outer contexts. */
13265 static void
13266 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
13268 for (; ctx; ctx = ctx->outer_context)
13270 splay_tree_node n = splay_tree_lookup (ctx->variables,
13271 (splay_tree_key) decl);
13272 if (n == NULL)
13273 continue;
13274 else if (n->value & GOVD_SHARED)
13276 n->value |= GOVD_WRITTEN;
13277 return;
13279 else if (n->value & GOVD_DATA_SHARE_CLASS)
13280 return;
13284 /* Helper callback for walk_gimple_seq to discover possible stores
13285 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13286 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13287 for those. */
13289 static tree
13290 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
13292 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13294 *walk_subtrees = 0;
13295 if (!wi->is_lhs)
13296 return NULL_TREE;
13298 tree op = *tp;
13301 if (handled_component_p (op))
13302 op = TREE_OPERAND (op, 0);
13303 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
13304 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
13305 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
13306 else
13307 break;
13309 while (1);
13310 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
13311 return NULL_TREE;
13313 omp_mark_stores (gimplify_omp_ctxp, op);
13314 return NULL_TREE;
13317 /* Helper callback for walk_gimple_seq to discover possible stores
13318 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13319 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13320 for those. */
13322 static tree
13323 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
13324 bool *handled_ops_p,
13325 struct walk_stmt_info *wi)
13327 gimple *stmt = gsi_stmt (*gsi_p);
13328 switch (gimple_code (stmt))
13330 /* Don't recurse on OpenMP constructs for which
13331 gimplify_adjust_omp_clauses already handled the bodies,
13332 except handle gimple_omp_for_pre_body. */
13333 case GIMPLE_OMP_FOR:
13334 *handled_ops_p = true;
13335 if (gimple_omp_for_pre_body (stmt))
13336 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13337 omp_find_stores_stmt, omp_find_stores_op, wi);
13338 break;
13339 case GIMPLE_OMP_PARALLEL:
13340 case GIMPLE_OMP_TASK:
13341 case GIMPLE_OMP_SECTIONS:
13342 case GIMPLE_OMP_SINGLE:
13343 case GIMPLE_OMP_SCOPE:
13344 case GIMPLE_OMP_TARGET:
13345 case GIMPLE_OMP_TEAMS:
13346 case GIMPLE_OMP_CRITICAL:
13347 *handled_ops_p = true;
13348 break;
13349 default:
13350 break;
13352 return NULL_TREE;
13355 struct gimplify_adjust_omp_clauses_data
13357 tree *list_p;
13358 gimple_seq *pre_p;
13361 /* For all variables that were not actually used within the context,
13362 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
13364 static int
13365 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
13367 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
13368 gimple_seq *pre_p
13369 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
13370 tree decl = (tree) n->key;
13371 unsigned flags = n->value;
13372 enum omp_clause_code code;
13373 tree clause;
13374 bool private_debug;
13376 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
13377 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
13378 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
13379 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
13380 return 0;
13381 if ((flags & GOVD_SEEN) == 0)
13382 return 0;
13383 if (flags & GOVD_DEBUG_PRIVATE)
13385 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
13386 private_debug = true;
13388 else if (flags & GOVD_MAP)
13389 private_debug = false;
13390 else
13391 private_debug
13392 = lang_hooks.decls.omp_private_debug_clause (decl,
13393 !!(flags & GOVD_SHARED));
13394 if (private_debug)
13395 code = OMP_CLAUSE_PRIVATE;
13396 else if (flags & GOVD_MAP)
13398 code = OMP_CLAUSE_MAP;
13399 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
13400 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
13402 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
13403 return 0;
13405 if (VAR_P (decl)
13406 && DECL_IN_CONSTANT_POOL (decl)
13407 && !lookup_attribute ("omp declare target",
13408 DECL_ATTRIBUTES (decl)))
13410 tree id = get_identifier ("omp declare target");
13411 DECL_ATTRIBUTES (decl)
13412 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13413 varpool_node *node = varpool_node::get (decl);
13414 if (node)
13416 node->offloadable = 1;
13417 if (ENABLE_OFFLOADING)
13418 g->have_offload = true;
13422 else if (flags & GOVD_SHARED)
13424 if (is_global_var (decl))
13426 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
13427 while (ctx != NULL)
13429 splay_tree_node on
13430 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13431 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
13432 | GOVD_PRIVATE | GOVD_REDUCTION
13433 | GOVD_LINEAR | GOVD_MAP)) != 0)
13434 break;
13435 ctx = ctx->outer_context;
13437 if (ctx == NULL)
13438 return 0;
13440 code = OMP_CLAUSE_SHARED;
13441 /* Don't optimize shared into firstprivate for read-only vars
13442 on tasks with depend clause, we shouldn't try to copy them
13443 until the dependencies are satisfied. */
13444 if (gimplify_omp_ctxp->has_depend)
13445 flags |= GOVD_WRITTEN;
13447 else if (flags & GOVD_PRIVATE)
13448 code = OMP_CLAUSE_PRIVATE;
13449 else if (flags & GOVD_FIRSTPRIVATE)
13451 code = OMP_CLAUSE_FIRSTPRIVATE;
13452 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
13453 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
13454 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
13456 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
13457 "%<target%> construct", decl);
13458 return 0;
13461 else if (flags & GOVD_LASTPRIVATE)
13462 code = OMP_CLAUSE_LASTPRIVATE;
13463 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
13464 return 0;
13465 else if (flags & GOVD_CONDTEMP)
13467 code = OMP_CLAUSE__CONDTEMP_;
13468 gimple_add_tmp_var (decl);
13470 else
13471 gcc_unreachable ();
13473 if (((flags & GOVD_LASTPRIVATE)
13474 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
13475 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13476 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13478 tree chain = *list_p;
13479 clause = build_omp_clause (input_location, code);
13480 OMP_CLAUSE_DECL (clause) = decl;
13481 OMP_CLAUSE_CHAIN (clause) = chain;
13482 if (private_debug)
13483 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
13484 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
13485 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
13486 else if (code == OMP_CLAUSE_SHARED
13487 && (flags & GOVD_WRITTEN) == 0
13488 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13489 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
13490 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
13491 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
13492 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
13494 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
13495 OMP_CLAUSE_DECL (nc) = decl;
13496 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
13497 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
13498 OMP_CLAUSE_DECL (clause)
13499 = build_fold_indirect_ref_loc (input_location, decl);
13500 OMP_CLAUSE_DECL (clause)
13501 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
13502 build_int_cst (build_pointer_type (char_type_node), 0));
13503 OMP_CLAUSE_SIZE (clause) = size_zero_node;
13504 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13505 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
13506 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
13507 tree dtype = TREE_TYPE (decl);
13508 if (TREE_CODE (dtype) == REFERENCE_TYPE)
13509 dtype = TREE_TYPE (dtype);
13510 /* FIRSTPRIVATE_POINTER doesn't work well if we have a
13511 multiply-indirected pointer. If we have a reference to a pointer to
13512 a pointer, it's possible that this should really be
13513 GOMP_MAP_FIRSTPRIVATE_REFERENCE -- but that also doesn't work at the
13514 moment, so stick with this. (See PR113279 and testcases
13515 baseptrs-{4,6}.C:ref2ptrptr_offset_decl_member_slice). */
13516 if (TREE_CODE (dtype) == POINTER_TYPE
13517 && TREE_CODE (TREE_TYPE (dtype)) == POINTER_TYPE)
13518 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13519 else
13520 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
13521 OMP_CLAUSE_CHAIN (nc) = chain;
13522 OMP_CLAUSE_CHAIN (clause) = nc;
13523 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13524 gimplify_omp_ctxp = ctx->outer_context;
13525 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
13526 pre_p, NULL, is_gimple_val, fb_rvalue);
13527 gimplify_omp_ctxp = ctx;
13529 else if (code == OMP_CLAUSE_MAP)
13531 int kind;
13532 /* Not all combinations of these GOVD_MAP flags are actually valid. */
13533 switch (flags & (GOVD_MAP_TO_ONLY
13534 | GOVD_MAP_FORCE
13535 | GOVD_MAP_FORCE_PRESENT
13536 | GOVD_MAP_ALLOC_ONLY
13537 | GOVD_MAP_FROM_ONLY))
13539 case 0:
13540 kind = GOMP_MAP_TOFROM;
13541 break;
13542 case GOVD_MAP_FORCE:
13543 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
13544 break;
13545 case GOVD_MAP_TO_ONLY:
13546 kind = GOMP_MAP_TO;
13547 break;
13548 case GOVD_MAP_FROM_ONLY:
13549 kind = GOMP_MAP_FROM;
13550 break;
13551 case GOVD_MAP_ALLOC_ONLY:
13552 kind = GOMP_MAP_ALLOC;
13553 break;
13554 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
13555 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
13556 break;
13557 case GOVD_MAP_FORCE_PRESENT:
13558 kind = GOMP_MAP_FORCE_PRESENT;
13559 break;
13560 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
13561 kind = GOMP_MAP_FORCE_PRESENT;
13562 break;
13563 default:
13564 gcc_unreachable ();
13566 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
13567 /* Setting of the implicit flag for the runtime is currently disabled for
13568 OpenACC. */
13569 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13570 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
13571 if (DECL_SIZE (decl)
13572 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
13574 tree decl2 = DECL_VALUE_EXPR (decl);
13575 gcc_assert (INDIRECT_REF_P (decl2));
13576 decl2 = TREE_OPERAND (decl2, 0);
13577 gcc_assert (DECL_P (decl2));
13578 tree mem = build_simple_mem_ref (decl2);
13579 OMP_CLAUSE_DECL (clause) = mem;
13580 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13581 if (gimplify_omp_ctxp->outer_context)
13583 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
13584 omp_notice_variable (ctx, decl2, true);
13585 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
13587 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
13588 OMP_CLAUSE_MAP);
13589 OMP_CLAUSE_DECL (nc) = decl;
13590 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13591 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
13592 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
13593 else
13594 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13595 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
13596 OMP_CLAUSE_CHAIN (clause) = nc;
13598 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
13599 && omp_privatize_by_reference (decl))
13601 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
13602 OMP_CLAUSE_SIZE (clause)
13603 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
13604 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13605 gimplify_omp_ctxp = ctx->outer_context;
13606 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
13607 pre_p, NULL, is_gimple_val, fb_rvalue);
13608 gimplify_omp_ctxp = ctx;
13609 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
13610 OMP_CLAUSE_MAP);
13611 OMP_CLAUSE_DECL (nc) = decl;
13612 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13613 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
13614 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
13615 OMP_CLAUSE_CHAIN (clause) = nc;
13617 else
13618 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
13620 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
13622 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
13623 OMP_CLAUSE_DECL (nc) = decl;
13624 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
13625 OMP_CLAUSE_CHAIN (nc) = chain;
13626 OMP_CLAUSE_CHAIN (clause) = nc;
13627 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13628 gimplify_omp_ctxp = ctx->outer_context;
13629 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13630 (ctx->region_type & ORT_ACC) != 0);
13631 gimplify_omp_ctxp = ctx;
13633 *list_p = clause;
13634 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13635 gimplify_omp_ctxp = ctx->outer_context;
13636 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
13637 in simd. Those are only added for the local vars inside of simd body
13638 and they don't need to be e.g. default constructible. */
13639 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
13640 lang_hooks.decls.omp_finish_clause (clause, pre_p,
13641 (ctx->region_type & ORT_ACC) != 0);
13642 if (gimplify_omp_ctxp)
13643 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
13644 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
13645 && DECL_P (OMP_CLAUSE_SIZE (clause)))
13646 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
13647 true);
13648 gimplify_omp_ctxp = ctx;
13649 return 0;
13652 static void
13653 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
13654 enum tree_code code)
13656 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13657 tree *orig_list_p = list_p;
13658 tree c, decl;
13659 bool has_inscan_reductions = false;
13661 if (body)
13663 struct gimplify_omp_ctx *octx;
13664 for (octx = ctx; octx; octx = octx->outer_context)
13665 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
13666 break;
13667 if (octx)
13669 struct walk_stmt_info wi;
13670 memset (&wi, 0, sizeof (wi));
13671 walk_gimple_seq (body, omp_find_stores_stmt,
13672 omp_find_stores_op, &wi);
13676 if (ctx->add_safelen1)
13678 /* If there are VLAs in the body of simd loop, prevent
13679 vectorization. */
13680 gcc_assert (ctx->region_type == ORT_SIMD);
13681 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
13682 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
13683 OMP_CLAUSE_CHAIN (c) = *list_p;
13684 *list_p = c;
13685 list_p = &OMP_CLAUSE_CHAIN (c);
13688 if (ctx->region_type == ORT_WORKSHARE
13689 && ctx->outer_context
13690 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
13692 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
13693 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13694 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13696 decl = OMP_CLAUSE_DECL (c);
13697 splay_tree_node n
13698 = splay_tree_lookup (ctx->outer_context->variables,
13699 (splay_tree_key) decl);
13700 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
13701 (splay_tree_key) decl));
13702 omp_add_variable (ctx, decl, n->value);
13703 tree c2 = copy_node (c);
13704 OMP_CLAUSE_CHAIN (c2) = *list_p;
13705 *list_p = c2;
13706 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
13707 continue;
13708 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13709 OMP_CLAUSE_FIRSTPRIVATE);
13710 OMP_CLAUSE_DECL (c2) = decl;
13711 OMP_CLAUSE_CHAIN (c2) = *list_p;
13712 *list_p = c2;
13716 if (code == OMP_TARGET
13717 || code == OMP_TARGET_DATA
13718 || code == OMP_TARGET_ENTER_DATA
13719 || code == OMP_TARGET_EXIT_DATA)
13721 vec<omp_mapping_group> *groups;
13722 groups = omp_gather_mapping_groups (list_p);
13723 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
13725 if (groups)
13727 grpmap = omp_index_mapping_groups (groups);
13729 omp_resolve_clause_dependencies (code, groups, grpmap);
13730 omp_build_struct_sibling_lists (code, ctx->region_type, groups,
13731 &grpmap, list_p);
13733 omp_mapping_group *outlist = NULL;
13735 delete grpmap;
13736 delete groups;
13738 /* Rebuild now we have struct sibling lists. */
13739 groups = omp_gather_mapping_groups (list_p);
13740 grpmap = omp_index_mapping_groups (groups);
13742 bool enter_exit = (code == OMP_TARGET_ENTER_DATA
13743 || code == OMP_TARGET_EXIT_DATA);
13745 outlist = omp_tsort_mapping_groups (groups, grpmap, enter_exit);
13746 outlist = omp_segregate_mapping_groups (outlist);
13747 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
13749 delete grpmap;
13750 delete groups;
13753 else if (ctx->region_type & ORT_ACC)
13755 vec<omp_mapping_group> *groups;
13756 groups = omp_gather_mapping_groups (list_p);
13757 if (groups)
13759 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
13760 grpmap = omp_index_mapping_groups (groups);
13762 oacc_resolve_clause_dependencies (groups, grpmap);
13763 omp_build_struct_sibling_lists (code, ctx->region_type, groups,
13764 &grpmap, list_p);
13766 delete groups;
13767 delete grpmap;
13771 tree attach_list = NULL_TREE;
13772 tree *attach_tail = &attach_list;
13774 tree *grp_start_p = NULL, grp_end = NULL_TREE;
13776 while ((c = *list_p) != NULL)
13778 splay_tree_node n;
13779 bool remove = false;
13780 bool move_attach = false;
13782 if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
13783 grp_end = NULL_TREE;
13785 switch (OMP_CLAUSE_CODE (c))
13787 case OMP_CLAUSE_FIRSTPRIVATE:
13788 if ((ctx->region_type & ORT_TARGET)
13789 && (ctx->region_type & ORT_ACC) == 0
13790 && TYPE_ATOMIC (strip_array_types
13791 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
13793 error_at (OMP_CLAUSE_LOCATION (c),
13794 "%<_Atomic%> %qD in %<firstprivate%> clause on "
13795 "%<target%> construct", OMP_CLAUSE_DECL (c));
13796 remove = true;
13797 break;
13799 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13801 decl = OMP_CLAUSE_DECL (c);
13802 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13803 if ((n->value & GOVD_MAP) != 0)
13805 remove = true;
13806 break;
13808 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
13809 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
13811 /* FALLTHRU */
13812 case OMP_CLAUSE_PRIVATE:
13813 case OMP_CLAUSE_SHARED:
13814 case OMP_CLAUSE_LINEAR:
13815 decl = OMP_CLAUSE_DECL (c);
13816 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13817 remove = !(n->value & GOVD_SEEN);
13818 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
13819 && code == OMP_PARALLEL
13820 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13821 remove = true;
13822 if (! remove)
13824 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
13825 if ((n->value & GOVD_DEBUG_PRIVATE)
13826 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
13828 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
13829 || ((n->value & GOVD_DATA_SHARE_CLASS)
13830 == GOVD_SHARED));
13831 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
13832 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
13834 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
13835 && ctx->has_depend
13836 && DECL_P (decl))
13837 n->value |= GOVD_WRITTEN;
13838 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
13839 && (n->value & GOVD_WRITTEN) == 0
13840 && DECL_P (decl)
13841 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13842 OMP_CLAUSE_SHARED_READONLY (c) = 1;
13843 else if (DECL_P (decl)
13844 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
13845 && (n->value & GOVD_WRITTEN) != 0)
13846 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13847 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
13848 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13849 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13851 else
13852 n->value &= ~GOVD_EXPLICIT;
13853 break;
13855 case OMP_CLAUSE_LASTPRIVATE:
13856 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
13857 accurately reflect the presence of a FIRSTPRIVATE clause. */
13858 decl = OMP_CLAUSE_DECL (c);
13859 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13860 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
13861 = (n->value & GOVD_FIRSTPRIVATE) != 0;
13862 if (code == OMP_DISTRIBUTE
13863 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
13865 remove = true;
13866 error_at (OMP_CLAUSE_LOCATION (c),
13867 "same variable used in %<firstprivate%> and "
13868 "%<lastprivate%> clauses on %<distribute%> "
13869 "construct");
13871 if (!remove
13872 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13873 && DECL_P (decl)
13874 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13875 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13876 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
13877 remove = true;
13878 break;
13880 case OMP_CLAUSE_ALIGNED:
13881 decl = OMP_CLAUSE_DECL (c);
13882 if (!is_global_var (decl))
13884 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13885 remove = n == NULL || !(n->value & GOVD_SEEN);
13886 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
13888 struct gimplify_omp_ctx *octx;
13889 if (n != NULL
13890 && (n->value & (GOVD_DATA_SHARE_CLASS
13891 & ~GOVD_FIRSTPRIVATE)))
13892 remove = true;
13893 else
13894 for (octx = ctx->outer_context; octx;
13895 octx = octx->outer_context)
13897 n = splay_tree_lookup (octx->variables,
13898 (splay_tree_key) decl);
13899 if (n == NULL)
13900 continue;
13901 if (n->value & GOVD_LOCAL)
13902 break;
13903 /* We have to avoid assigning a shared variable
13904 to itself when trying to add
13905 __builtin_assume_aligned. */
13906 if (n->value & GOVD_SHARED)
13908 remove = true;
13909 break;
13914 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
13916 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13917 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
13918 remove = true;
13920 break;
13922 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13923 decl = OMP_CLAUSE_DECL (c);
13924 while (INDIRECT_REF_P (decl)
13925 || TREE_CODE (decl) == ARRAY_REF)
13926 decl = TREE_OPERAND (decl, 0);
13927 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13928 remove = n == NULL || !(n->value & GOVD_SEEN);
13929 break;
13931 case OMP_CLAUSE_IS_DEVICE_PTR:
13932 case OMP_CLAUSE_NONTEMPORAL:
13933 decl = OMP_CLAUSE_DECL (c);
13934 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13935 remove = n == NULL || !(n->value & GOVD_SEEN);
13936 break;
13938 case OMP_CLAUSE_MAP:
13939 decl = OMP_CLAUSE_DECL (c);
13940 if (!grp_end)
13942 grp_start_p = list_p;
13943 grp_end = *omp_group_last (grp_start_p);
13945 switch (OMP_CLAUSE_MAP_KIND (c))
13947 case GOMP_MAP_PRESENT_ALLOC:
13948 case GOMP_MAP_PRESENT_TO:
13949 case GOMP_MAP_PRESENT_FROM:
13950 case GOMP_MAP_PRESENT_TOFROM:
13951 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
13952 break;
13953 default:
13954 break;
13956 switch (code)
13958 case OACC_DATA:
13959 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
13960 break;
13961 /* Fallthrough. */
13962 case OACC_HOST_DATA:
13963 case OACC_ENTER_DATA:
13964 case OACC_EXIT_DATA:
13965 case OMP_TARGET_DATA:
13966 case OMP_TARGET_ENTER_DATA:
13967 case OMP_TARGET_EXIT_DATA:
13968 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13969 || (OMP_CLAUSE_MAP_KIND (c)
13970 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
13971 /* For target {,enter ,exit }data only the array slice is
13972 mapped, but not the pointer to it. */
13973 remove = true;
13974 if (code == OMP_TARGET_EXIT_DATA
13975 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
13976 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER))
13977 remove = true;
13978 break;
13979 case OMP_TARGET:
13980 break;
13981 default:
13982 break;
13984 if (remove)
13985 break;
13986 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13988 /* Sanity check: attach/detach map kinds use the size as a bias,
13989 and it's never right to use the decl size for such
13990 mappings. */
13991 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
13992 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
13993 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DETACH
13994 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
13995 && (OMP_CLAUSE_MAP_KIND (c)
13996 != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
13997 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
13998 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
14000 gimplify_omp_ctxp = ctx->outer_context;
14001 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, NULL,
14002 is_gimple_val, fb_rvalue) == GS_ERROR)
14004 gimplify_omp_ctxp = ctx;
14005 remove = true;
14006 break;
14008 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
14009 || (OMP_CLAUSE_MAP_KIND (c)
14010 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14011 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14012 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
14014 OMP_CLAUSE_SIZE (c)
14015 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
14016 false);
14017 if ((ctx->region_type & ORT_TARGET) != 0)
14018 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
14019 GOVD_FIRSTPRIVATE | GOVD_SEEN);
14021 gimplify_omp_ctxp = ctx;
14022 /* Data clauses associated with reductions must be
14023 compatible with present_or_copy. Warn and adjust the clause
14024 if that is not the case. */
14025 if (ctx->region_type == ORT_ACC_PARALLEL
14026 || ctx->region_type == ORT_ACC_SERIAL)
14028 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
14029 n = NULL;
14031 if (DECL_P (t))
14032 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
14034 if (n && (n->value & GOVD_REDUCTION))
14036 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
14038 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
14039 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
14040 && kind != GOMP_MAP_FORCE_PRESENT
14041 && kind != GOMP_MAP_POINTER)
14043 warning_at (OMP_CLAUSE_LOCATION (c), 0,
14044 "incompatible data clause with reduction "
14045 "on %qE; promoting to %<present_or_copy%>",
14046 DECL_NAME (t));
14047 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
14051 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
14052 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
14053 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
14055 remove = true;
14056 break;
14058 /* If we have a DECL_VALUE_EXPR (e.g. this is a class member and/or
14059 a variable captured in a lambda closure), look through that now
14060 before the DECL_P check below. (A code other than COMPONENT_REF,
14061 i.e. INDIRECT_REF, will be a VLA/variable-length array
14062 section. A global var may be a variable in a common block. We
14063 don't want to do this here for either of those.) */
14064 if ((ctx->region_type & ORT_ACC) == 0
14065 && DECL_P (decl)
14066 && !is_global_var (decl)
14067 && DECL_HAS_VALUE_EXPR_P (decl)
14068 && TREE_CODE (DECL_VALUE_EXPR (decl)) == COMPONENT_REF)
14069 decl = OMP_CLAUSE_DECL (c) = DECL_VALUE_EXPR (decl);
14070 if (TREE_CODE (decl) == TARGET_EXPR)
14072 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
14073 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
14074 remove = true;
14076 else if (!DECL_P (decl))
14078 if ((ctx->region_type & ORT_TARGET) != 0
14079 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
14081 if (INDIRECT_REF_P (decl)
14082 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
14083 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
14084 == REFERENCE_TYPE))
14085 decl = TREE_OPERAND (decl, 0);
14086 if (TREE_CODE (decl) == COMPONENT_REF)
14088 while (TREE_CODE (decl) == COMPONENT_REF)
14089 decl = TREE_OPERAND (decl, 0);
14090 if (DECL_P (decl))
14092 n = splay_tree_lookup (ctx->variables,
14093 (splay_tree_key) decl);
14094 if (!(n->value & GOVD_SEEN))
14095 remove = true;
14100 tree d = decl, *pd;
14101 if (TREE_CODE (d) == ARRAY_REF)
14103 while (TREE_CODE (d) == ARRAY_REF)
14104 d = TREE_OPERAND (d, 0);
14105 if (TREE_CODE (d) == COMPONENT_REF
14106 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
14107 decl = d;
14109 pd = &OMP_CLAUSE_DECL (c);
14110 if (d == decl
14111 && TREE_CODE (decl) == INDIRECT_REF
14112 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
14113 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
14114 == REFERENCE_TYPE)
14115 && (OMP_CLAUSE_MAP_KIND (c)
14116 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
14118 pd = &TREE_OPERAND (decl, 0);
14119 decl = TREE_OPERAND (decl, 0);
14122 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14123 switch (code)
14125 case OACC_ENTER_DATA:
14126 case OACC_EXIT_DATA:
14127 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14128 == ARRAY_TYPE)
14129 remove = true;
14130 else if (code == OACC_ENTER_DATA)
14131 goto change_to_attach;
14132 /* Fallthrough. */
14133 case OMP_TARGET_EXIT_DATA:
14134 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DETACH);
14135 break;
14136 case OACC_UPDATE:
14137 /* An "attach/detach" operation on an update directive
14138 should behave as a GOMP_MAP_ALWAYS_POINTER. Note that
14139 both GOMP_MAP_ATTACH_DETACH and GOMP_MAP_ALWAYS_POINTER
14140 kinds depend on the previous mapping (for non-TARGET
14141 regions). */
14142 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
14143 break;
14144 default:
14145 change_to_attach:
14146 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ATTACH);
14147 if ((ctx->region_type & ORT_TARGET) != 0)
14148 move_attach = true;
14150 else if ((ctx->region_type & ORT_TARGET) != 0
14151 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
14152 || (OMP_CLAUSE_MAP_KIND (c)
14153 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
14154 move_attach = true;
14156 /* If we have e.g. map(struct: *var), don't gimplify the
14157 argument since omp-low.cc wants to see the decl itself. */
14158 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
14159 break;
14161 /* We've already partly gimplified this in
14162 gimplify_scan_omp_clauses. Don't do any more. */
14163 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
14164 break;
14166 gimplify_omp_ctxp = ctx->outer_context;
14167 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
14168 fb_lvalue) == GS_ERROR)
14169 remove = true;
14170 gimplify_omp_ctxp = ctx;
14171 break;
14174 if ((code == OMP_TARGET
14175 || code == OMP_TARGET_DATA
14176 || code == OMP_TARGET_ENTER_DATA
14177 || code == OMP_TARGET_EXIT_DATA)
14178 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14180 bool firstprivatize = false;
14182 for (struct gimplify_omp_ctx *octx = ctx->outer_context; octx;
14183 octx = octx->outer_context)
14185 splay_tree_node n
14186 = splay_tree_lookup (octx->variables,
14187 (splay_tree_key) OMP_CLAUSE_DECL (c));
14188 /* If this is contained in an outer OpenMP region as a
14189 firstprivate value, remove the attach/detach. */
14190 if (n && (n->value & GOVD_FIRSTPRIVATE))
14192 firstprivatize = true;
14193 break;
14197 enum gomp_map_kind map_kind;
14198 if (firstprivatize)
14199 map_kind = GOMP_MAP_FIRSTPRIVATE_POINTER;
14200 else if (code == OMP_TARGET_EXIT_DATA)
14201 map_kind = GOMP_MAP_DETACH;
14202 else
14203 map_kind = GOMP_MAP_ATTACH;
14204 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
14206 else if ((ctx->region_type & ORT_ACC) != 0
14207 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14209 enum gomp_map_kind map_kind = (code == OACC_EXIT_DATA
14210 ? GOMP_MAP_DETACH
14211 : GOMP_MAP_ATTACH);
14212 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
14215 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14216 if ((ctx->region_type & ORT_TARGET) != 0
14217 && !(n->value & GOVD_SEEN)
14218 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
14219 && (!is_global_var (decl)
14220 || !lookup_attribute ("omp declare target link",
14221 DECL_ATTRIBUTES (decl))))
14223 remove = true;
14224 /* For struct element mapping, if struct is never referenced
14225 in target block and none of the mapping has always modifier,
14226 remove all the struct element mappings, which immediately
14227 follow the GOMP_MAP_STRUCT map clause. */
14228 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
14229 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
14231 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
14232 while (cnt--)
14233 OMP_CLAUSE_CHAIN (c)
14234 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
14237 else if (DECL_SIZE (decl)
14238 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
14239 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
14240 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
14241 && (OMP_CLAUSE_MAP_KIND (c)
14242 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
14244 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
14245 for these, TREE_CODE (DECL_SIZE (decl)) will always be
14246 INTEGER_CST. */
14247 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
14249 tree decl2 = DECL_VALUE_EXPR (decl);
14250 gcc_assert (INDIRECT_REF_P (decl2));
14251 decl2 = TREE_OPERAND (decl2, 0);
14252 gcc_assert (DECL_P (decl2));
14253 tree mem = build_simple_mem_ref (decl2);
14254 OMP_CLAUSE_DECL (c) = mem;
14255 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
14256 if (ctx->outer_context)
14258 omp_notice_variable (ctx->outer_context, decl2, true);
14259 omp_notice_variable (ctx->outer_context,
14260 OMP_CLAUSE_SIZE (c), true);
14262 if (((ctx->region_type & ORT_TARGET) != 0
14263 || !ctx->target_firstprivatize_array_bases)
14264 && ((n->value & GOVD_SEEN) == 0
14265 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
14267 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14268 OMP_CLAUSE_MAP);
14269 OMP_CLAUSE_DECL (nc) = decl;
14270 OMP_CLAUSE_SIZE (nc) = size_zero_node;
14271 if (ctx->target_firstprivatize_array_bases)
14272 OMP_CLAUSE_SET_MAP_KIND (nc,
14273 GOMP_MAP_FIRSTPRIVATE_POINTER);
14274 else
14275 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
14276 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
14277 OMP_CLAUSE_CHAIN (c) = nc;
14278 c = nc;
14281 else
14283 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14284 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
14285 gcc_assert ((n->value & GOVD_SEEN) == 0
14286 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
14287 == 0));
14290 /* If we have a target region, we can push all the attaches to the
14291 end of the list (we may have standalone "attach" operations
14292 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
14293 the attachment point AND the pointed-to block have been mapped).
14294 If we have something else, e.g. "enter data", we need to keep
14295 "attach" nodes together with the previous node they attach to so
14296 that separate "exit data" operations work properly (see
14297 libgomp/target.c). */
14298 if ((ctx->region_type & ORT_TARGET) != 0
14299 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
14300 || (OMP_CLAUSE_MAP_KIND (c)
14301 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
14302 move_attach = true;
14304 break;
14306 case OMP_CLAUSE_TO:
14307 case OMP_CLAUSE_FROM:
14308 case OMP_CLAUSE__CACHE_:
14309 decl = OMP_CLAUSE_DECL (c);
14310 if (!DECL_P (decl))
14311 break;
14312 if (DECL_SIZE (decl)
14313 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
14315 tree decl2 = DECL_VALUE_EXPR (decl);
14316 gcc_assert (INDIRECT_REF_P (decl2));
14317 decl2 = TREE_OPERAND (decl2, 0);
14318 gcc_assert (DECL_P (decl2));
14319 tree mem = build_simple_mem_ref (decl2);
14320 OMP_CLAUSE_DECL (c) = mem;
14321 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
14322 if (ctx->outer_context)
14324 omp_notice_variable (ctx->outer_context, decl2, true);
14325 omp_notice_variable (ctx->outer_context,
14326 OMP_CLAUSE_SIZE (c), true);
14329 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14330 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
14331 break;
14333 case OMP_CLAUSE_REDUCTION:
14334 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
14336 decl = OMP_CLAUSE_DECL (c);
14337 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14338 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
14340 remove = true;
14341 error_at (OMP_CLAUSE_LOCATION (c),
14342 "%qD specified in %<inscan%> %<reduction%> clause "
14343 "but not in %<scan%> directive clause", decl);
14344 break;
14346 has_inscan_reductions = true;
14348 /* FALLTHRU */
14349 case OMP_CLAUSE_IN_REDUCTION:
14350 case OMP_CLAUSE_TASK_REDUCTION:
14351 decl = OMP_CLAUSE_DECL (c);
14352 /* OpenACC reductions need a present_or_copy data clause.
14353 Add one if necessary. Emit error when the reduction is private. */
14354 if (ctx->region_type == ORT_ACC_PARALLEL
14355 || ctx->region_type == ORT_ACC_SERIAL)
14357 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14358 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
14360 remove = true;
14361 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
14362 "reduction on %qE", DECL_NAME (decl));
14364 else if ((n->value & GOVD_MAP) == 0)
14366 tree next = OMP_CLAUSE_CHAIN (c);
14367 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
14368 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
14369 OMP_CLAUSE_DECL (nc) = decl;
14370 OMP_CLAUSE_CHAIN (c) = nc;
14371 lang_hooks.decls.omp_finish_clause (nc, pre_p,
14372 (ctx->region_type
14373 & ORT_ACC) != 0);
14374 while (1)
14376 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
14377 if (OMP_CLAUSE_CHAIN (nc) == NULL)
14378 break;
14379 nc = OMP_CLAUSE_CHAIN (nc);
14381 OMP_CLAUSE_CHAIN (nc) = next;
14382 n->value |= GOVD_MAP;
14385 if (DECL_P (decl)
14386 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
14387 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
14388 break;
14390 case OMP_CLAUSE_ALLOCATE:
14391 decl = OMP_CLAUSE_DECL (c);
14392 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14393 if (n != NULL && !(n->value & GOVD_SEEN))
14395 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
14396 != 0
14397 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
14398 remove = true;
14400 if (!remove
14401 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14402 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
14403 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
14404 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
14405 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
14407 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14408 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
14409 if (n == NULL)
14411 enum omp_clause_default_kind default_kind
14412 = ctx->default_kind;
14413 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
14414 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
14415 true);
14416 ctx->default_kind = default_kind;
14418 else
14419 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
14420 true);
14422 break;
14424 case OMP_CLAUSE_COPYIN:
14425 case OMP_CLAUSE_COPYPRIVATE:
14426 case OMP_CLAUSE_IF:
14427 case OMP_CLAUSE_SELF:
14428 case OMP_CLAUSE_NUM_THREADS:
14429 case OMP_CLAUSE_NUM_TEAMS:
14430 case OMP_CLAUSE_THREAD_LIMIT:
14431 case OMP_CLAUSE_DIST_SCHEDULE:
14432 case OMP_CLAUSE_DEVICE:
14433 case OMP_CLAUSE_SCHEDULE:
14434 case OMP_CLAUSE_NOWAIT:
14435 case OMP_CLAUSE_ORDERED:
14436 case OMP_CLAUSE_DEFAULT:
14437 case OMP_CLAUSE_UNTIED:
14438 case OMP_CLAUSE_COLLAPSE:
14439 case OMP_CLAUSE_FINAL:
14440 case OMP_CLAUSE_MERGEABLE:
14441 case OMP_CLAUSE_PROC_BIND:
14442 case OMP_CLAUSE_SAFELEN:
14443 case OMP_CLAUSE_SIMDLEN:
14444 case OMP_CLAUSE_DEPEND:
14445 case OMP_CLAUSE_DOACROSS:
14446 case OMP_CLAUSE_PRIORITY:
14447 case OMP_CLAUSE_GRAINSIZE:
14448 case OMP_CLAUSE_NUM_TASKS:
14449 case OMP_CLAUSE_NOGROUP:
14450 case OMP_CLAUSE_THREADS:
14451 case OMP_CLAUSE_SIMD:
14452 case OMP_CLAUSE_FILTER:
14453 case OMP_CLAUSE_HINT:
14454 case OMP_CLAUSE_DEFAULTMAP:
14455 case OMP_CLAUSE_ORDER:
14456 case OMP_CLAUSE_BIND:
14457 case OMP_CLAUSE_DETACH:
14458 case OMP_CLAUSE_USE_DEVICE_PTR:
14459 case OMP_CLAUSE_USE_DEVICE_ADDR:
14460 case OMP_CLAUSE_ASYNC:
14461 case OMP_CLAUSE_WAIT:
14462 case OMP_CLAUSE_INDEPENDENT:
14463 case OMP_CLAUSE_NUM_GANGS:
14464 case OMP_CLAUSE_NUM_WORKERS:
14465 case OMP_CLAUSE_VECTOR_LENGTH:
14466 case OMP_CLAUSE_GANG:
14467 case OMP_CLAUSE_WORKER:
14468 case OMP_CLAUSE_VECTOR:
14469 case OMP_CLAUSE_AUTO:
14470 case OMP_CLAUSE_SEQ:
14471 case OMP_CLAUSE_TILE:
14472 case OMP_CLAUSE_IF_PRESENT:
14473 case OMP_CLAUSE_FINALIZE:
14474 case OMP_CLAUSE_INCLUSIVE:
14475 case OMP_CLAUSE_EXCLUSIVE:
14476 break;
14478 case OMP_CLAUSE_NOHOST:
14479 default:
14480 gcc_unreachable ();
14483 if (remove)
14484 *list_p = OMP_CLAUSE_CHAIN (c);
14485 else if (move_attach)
14487 /* Remove attach node from here, separate out into its own list. */
14488 *attach_tail = c;
14489 *list_p = OMP_CLAUSE_CHAIN (c);
14490 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
14491 attach_tail = &OMP_CLAUSE_CHAIN (c);
14493 else
14494 list_p = &OMP_CLAUSE_CHAIN (c);
14497 /* Splice attach nodes at the end of the list. */
14498 if (attach_list)
14500 *list_p = attach_list;
14501 list_p = attach_tail;
14504 /* Add in any implicit data sharing. */
14505 struct gimplify_adjust_omp_clauses_data data;
14506 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
14508 /* OpenMP. Implicit clauses are added at the start of the clause list,
14509 but after any non-map clauses. */
14510 tree *implicit_add_list_p = orig_list_p;
14511 while (*implicit_add_list_p
14512 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
14513 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
14514 data.list_p = implicit_add_list_p;
14516 else
14517 /* OpenACC. */
14518 data.list_p = list_p;
14519 data.pre_p = pre_p;
14520 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
14522 if (has_inscan_reductions)
14523 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
14524 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14525 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
14527 error_at (OMP_CLAUSE_LOCATION (c),
14528 "%<inscan%> %<reduction%> clause used together with "
14529 "%<linear%> clause for a variable other than loop "
14530 "iterator");
14531 break;
14534 gimplify_omp_ctxp = ctx->outer_context;
14535 delete_omp_context (ctx);
14538 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
14539 -1 if unknown yet (simd is involved, won't be known until vectorization)
14540 and 1 if they do. If SCORES is non-NULL, it should point to an array
14541 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
14542 of the CONSTRUCTS (position -1 if it will never match) followed by
14543 number of constructs in the OpenMP context construct trait. If the
14544 score depends on whether it will be in a declare simd clone or not,
14545 the function returns 2 and there will be two sets of the scores, the first
14546 one for the case that it is not in a declare simd clone, the other
14547 that it is in a declare simd clone. */
14550 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
14551 int *scores)
14553 int matched = 0, cnt = 0;
14554 bool simd_seen = false;
14555 bool target_seen = false;
14556 int declare_simd_cnt = -1;
14557 auto_vec<enum tree_code, 16> codes;
14558 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
14560 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
14561 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
14562 == ORT_TARGET && ctx->code == OMP_TARGET)
14563 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
14564 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
14565 || (ctx->region_type == ORT_SIMD
14566 && ctx->code == OMP_SIMD
14567 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
14569 ++cnt;
14570 if (scores)
14571 codes.safe_push (ctx->code);
14572 else if (matched < nconstructs && ctx->code == constructs[matched])
14574 if (ctx->code == OMP_SIMD)
14576 if (matched)
14577 return 0;
14578 simd_seen = true;
14580 ++matched;
14582 if (ctx->code == OMP_TARGET)
14584 if (scores == NULL)
14585 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
14586 target_seen = true;
14587 break;
14590 else if (ctx->region_type == ORT_WORKSHARE
14591 && ctx->code == OMP_LOOP
14592 && ctx->outer_context
14593 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
14594 && ctx->outer_context->outer_context
14595 && ctx->outer_context->outer_context->code == OMP_LOOP
14596 && ctx->outer_context->outer_context->distribute)
14597 ctx = ctx->outer_context->outer_context;
14598 ctx = ctx->outer_context;
14600 if (!target_seen
14601 && lookup_attribute ("omp declare simd",
14602 DECL_ATTRIBUTES (current_function_decl)))
14604 /* Declare simd is a maybe case, it is supposed to be added only to the
14605 omp-simd-clone.cc added clones and not to the base function. */
14606 declare_simd_cnt = cnt++;
14607 if (scores)
14608 codes.safe_push (OMP_SIMD);
14609 else if (cnt == 0
14610 && constructs[0] == OMP_SIMD)
14612 gcc_assert (matched == 0);
14613 simd_seen = true;
14614 if (++matched == nconstructs)
14615 return -1;
14618 if (tree attr = lookup_attribute ("omp declare variant variant",
14619 DECL_ATTRIBUTES (current_function_decl)))
14621 tree selectors = TREE_VALUE (attr);
14622 int variant_nconstructs = list_length (selectors);
14623 enum tree_code *variant_constructs = NULL;
14624 if (!target_seen && variant_nconstructs)
14626 variant_constructs
14627 = (enum tree_code *) alloca (variant_nconstructs
14628 * sizeof (enum tree_code));
14629 omp_construct_traits_to_codes (selectors, variant_nconstructs,
14630 variant_constructs);
14632 for (int i = 0; i < variant_nconstructs; i++)
14634 ++cnt;
14635 if (scores)
14636 codes.safe_push (variant_constructs[i]);
14637 else if (matched < nconstructs
14638 && variant_constructs[i] == constructs[matched])
14640 if (variant_constructs[i] == OMP_SIMD)
14642 if (matched)
14643 return 0;
14644 simd_seen = true;
14646 ++matched;
14650 if (!target_seen
14651 && lookup_attribute ("omp declare target block",
14652 DECL_ATTRIBUTES (current_function_decl)))
14654 if (scores)
14655 codes.safe_push (OMP_TARGET);
14656 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
14657 ++matched;
14659 if (scores)
14661 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
14663 int j = codes.length () - 1;
14664 for (int i = nconstructs - 1; i >= 0; i--)
14666 while (j >= 0
14667 && (pass != 0 || declare_simd_cnt != j)
14668 && constructs[i] != codes[j])
14669 --j;
14670 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
14671 *scores++ = j - 1;
14672 else
14673 *scores++ = j;
14675 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
14676 ? codes.length () - 1 : codes.length ());
14678 return declare_simd_cnt == -1 ? 1 : 2;
14680 if (matched == nconstructs)
14681 return simd_seen ? -1 : 1;
14682 return 0;
14685 /* Gimplify OACC_CACHE. */
14687 static void
14688 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
14690 tree expr = *expr_p;
14692 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
14693 OACC_CACHE);
14694 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
14695 OACC_CACHE);
14697 /* TODO: Do something sensible with this information. */
14699 *expr_p = NULL_TREE;
14702 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
14703 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
14704 kind. The entry kind will replace the one in CLAUSE, while the exit
14705 kind will be used in a new omp_clause and returned to the caller. */
14707 static tree
14708 gimplify_oacc_declare_1 (tree clause)
14710 HOST_WIDE_INT kind, new_op;
14711 bool ret = false;
14712 tree c = NULL;
14714 kind = OMP_CLAUSE_MAP_KIND (clause);
14716 switch (kind)
14718 case GOMP_MAP_ALLOC:
14719 new_op = GOMP_MAP_RELEASE;
14720 ret = true;
14721 break;
14723 case GOMP_MAP_FROM:
14724 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
14725 new_op = GOMP_MAP_FROM;
14726 ret = true;
14727 break;
14729 case GOMP_MAP_TOFROM:
14730 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
14731 new_op = GOMP_MAP_FROM;
14732 ret = true;
14733 break;
14735 case GOMP_MAP_DEVICE_RESIDENT:
14736 case GOMP_MAP_FORCE_DEVICEPTR:
14737 case GOMP_MAP_FORCE_PRESENT:
14738 case GOMP_MAP_LINK:
14739 case GOMP_MAP_POINTER:
14740 case GOMP_MAP_TO:
14741 break;
14743 default:
14744 gcc_unreachable ();
14745 break;
14748 if (ret)
14750 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
14751 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
14752 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
14755 return c;
14758 /* Gimplify OACC_DECLARE. */
14760 static void
14761 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
14763 tree expr = *expr_p;
14764 gomp_target *stmt;
14765 tree clauses, t, decl;
14767 clauses = OACC_DECLARE_CLAUSES (expr);
14769 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
14770 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
14772 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
14774 decl = OMP_CLAUSE_DECL (t);
14776 if (TREE_CODE (decl) == MEM_REF)
14777 decl = TREE_OPERAND (decl, 0);
14779 if (VAR_P (decl) && !is_oacc_declared (decl))
14781 tree attr = get_identifier ("oacc declare target");
14782 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
14783 DECL_ATTRIBUTES (decl));
14786 if (VAR_P (decl)
14787 && !is_global_var (decl)
14788 && DECL_CONTEXT (decl) == current_function_decl)
14790 tree c = gimplify_oacc_declare_1 (t);
14791 if (c)
14793 if (oacc_declare_returns == NULL)
14794 oacc_declare_returns = new hash_map<tree, tree>;
14796 oacc_declare_returns->put (decl, c);
14800 if (gimplify_omp_ctxp)
14801 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
14804 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
14805 clauses);
14807 gimplify_seq_add_stmt (pre_p, stmt);
14809 *expr_p = NULL_TREE;
14812 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
14813 gimplification of the body, as well as scanning the body for used
14814 variables. We need to do this scan now, because variable-sized
14815 decls will be decomposed during gimplification. */
14817 static void
14818 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
14820 tree expr = *expr_p;
14821 gimple *g;
14822 gimple_seq body = NULL;
14824 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
14825 OMP_PARALLEL_COMBINED (expr)
14826 ? ORT_COMBINED_PARALLEL
14827 : ORT_PARALLEL, OMP_PARALLEL);
14829 push_gimplify_context ();
14831 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
14832 if (gimple_code (g) == GIMPLE_BIND)
14833 pop_gimplify_context (g);
14834 else
14835 pop_gimplify_context (NULL);
14837 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
14838 OMP_PARALLEL);
14840 g = gimple_build_omp_parallel (body,
14841 OMP_PARALLEL_CLAUSES (expr),
14842 NULL_TREE, NULL_TREE);
14843 if (OMP_PARALLEL_COMBINED (expr))
14844 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
14845 gimplify_seq_add_stmt (pre_p, g);
14846 *expr_p = NULL_TREE;
14849 /* Gimplify the contents of an OMP_TASK statement. This involves
14850 gimplification of the body, as well as scanning the body for used
14851 variables. We need to do this scan now, because variable-sized
14852 decls will be decomposed during gimplification. */
14854 static void
14855 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
14857 tree expr = *expr_p;
14858 gimple *g;
14859 gimple_seq body = NULL;
14860 bool nowait = false;
14861 bool has_depend = false;
14863 if (OMP_TASK_BODY (expr) == NULL_TREE)
14865 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14866 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
14868 has_depend = true;
14869 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
14871 error_at (OMP_CLAUSE_LOCATION (c),
14872 "%<mutexinoutset%> kind in %<depend%> clause on a "
14873 "%<taskwait%> construct");
14874 break;
14877 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
14878 nowait = true;
14879 if (nowait && !has_depend)
14881 error_at (EXPR_LOCATION (expr),
14882 "%<taskwait%> construct with %<nowait%> clause but no "
14883 "%<depend%> clauses");
14884 *expr_p = NULL_TREE;
14885 return;
14889 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
14890 omp_find_clause (OMP_TASK_CLAUSES (expr),
14891 OMP_CLAUSE_UNTIED)
14892 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
14894 if (OMP_TASK_BODY (expr))
14896 push_gimplify_context ();
14898 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
14899 if (gimple_code (g) == GIMPLE_BIND)
14900 pop_gimplify_context (g);
14901 else
14902 pop_gimplify_context (NULL);
14905 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
14906 OMP_TASK);
14908 g = gimple_build_omp_task (body,
14909 OMP_TASK_CLAUSES (expr),
14910 NULL_TREE, NULL_TREE,
14911 NULL_TREE, NULL_TREE, NULL_TREE);
14912 if (OMP_TASK_BODY (expr) == NULL_TREE)
14913 gimple_omp_task_set_taskwait_p (g, true);
14914 gimplify_seq_add_stmt (pre_p, g);
14915 *expr_p = NULL_TREE;
14918 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
14919 force it into a temporary initialized in PRE_P and add firstprivate clause
14920 to ORIG_FOR_STMT. */
14922 static void
14923 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
14924 tree orig_for_stmt)
14926 if (*tp == NULL || is_gimple_constant (*tp))
14927 return;
14929 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
14930 /* Reference to pointer conversion is considered useless,
14931 but is significant for firstprivate clause. Force it
14932 here. */
14933 if (type
14934 && TREE_CODE (type) == POINTER_TYPE
14935 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
14937 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
14938 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
14939 gimplify_and_add (m, pre_p);
14940 *tp = v;
14943 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
14944 OMP_CLAUSE_DECL (c) = *tp;
14945 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
14946 OMP_FOR_CLAUSES (orig_for_stmt) = c;
14949 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
14950 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
14952 static tree
14953 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
14955 switch (TREE_CODE (*tp))
14957 case OMP_ORDERED:
14958 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
14959 return *tp;
14960 break;
14961 case OMP_SIMD:
14962 case OMP_PARALLEL:
14963 case OMP_TARGET:
14964 *walk_subtrees = 0;
14965 break;
14966 default:
14967 break;
14969 return NULL_TREE;
14972 /* Gimplify the gross structure of an OMP_FOR statement. */
14974 static enum gimplify_status
14975 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
14977 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
14978 enum gimplify_status ret = GS_ALL_DONE;
14979 enum gimplify_status tret;
14980 gomp_for *gfor;
14981 gimple_seq for_body, for_pre_body;
14982 int i;
14983 bitmap has_decl_expr = NULL;
14984 enum omp_region_type ort = ORT_WORKSHARE;
14985 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
14987 orig_for_stmt = for_stmt = *expr_p;
14989 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
14990 != NULL_TREE);
14991 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
14993 tree *data[4] = { NULL, NULL, NULL, NULL };
14994 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
14995 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
14996 find_combined_omp_for, data, NULL);
14997 if (inner_for_stmt == NULL_TREE)
14999 gcc_assert (seen_error ());
15000 *expr_p = NULL_TREE;
15001 return GS_ERROR;
15003 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
15005 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
15006 &OMP_FOR_PRE_BODY (for_stmt));
15007 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
15009 if (OMP_FOR_PRE_BODY (inner_for_stmt))
15011 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
15012 &OMP_FOR_PRE_BODY (for_stmt));
15013 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
15016 if (data[0])
15018 /* We have some statements or variable declarations in between
15019 the composite construct directives. Move them around the
15020 inner_for_stmt. */
15021 data[0] = expr_p;
15022 for (i = 0; i < 3; i++)
15023 if (data[i])
15025 tree t = *data[i];
15026 if (i < 2 && data[i + 1] == &OMP_BODY (t))
15027 data[i + 1] = data[i];
15028 *data[i] = OMP_BODY (t);
15029 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
15030 NULL_TREE, make_node (BLOCK));
15031 OMP_BODY (t) = body;
15032 append_to_statement_list_force (inner_for_stmt,
15033 &BIND_EXPR_BODY (body));
15034 *data[3] = t;
15035 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
15036 gcc_assert (*data[3] == inner_for_stmt);
15038 return GS_OK;
15041 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
15042 if (!loop_p
15043 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
15044 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15045 i)) == TREE_LIST
15046 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15047 i)))
15049 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
15050 /* Class iterators aren't allowed on OMP_SIMD, so the only
15051 case we need to solve is distribute parallel for. They are
15052 allowed on the loop construct, but that is already handled
15053 in gimplify_omp_loop. */
15054 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
15055 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
15056 && data[1]);
15057 tree orig_decl = TREE_PURPOSE (orig);
15058 tree last = TREE_VALUE (orig);
15059 tree *pc;
15060 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
15061 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
15062 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
15063 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
15064 && OMP_CLAUSE_DECL (*pc) == orig_decl)
15065 break;
15066 if (*pc == NULL_TREE)
15068 tree *spc;
15069 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
15070 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
15071 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
15072 && OMP_CLAUSE_DECL (*spc) == orig_decl)
15073 break;
15074 if (*spc)
15076 tree c = *spc;
15077 *spc = OMP_CLAUSE_CHAIN (c);
15078 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
15079 *pc = c;
15082 if (*pc == NULL_TREE)
15084 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
15086 /* private clause will appear only on inner_for_stmt.
15087 Change it into firstprivate, and add private clause
15088 on for_stmt. */
15089 tree c = copy_node (*pc);
15090 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15091 OMP_FOR_CLAUSES (for_stmt) = c;
15092 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
15093 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
15095 else
15097 /* lastprivate clause will appear on both inner_for_stmt
15098 and for_stmt. Add firstprivate clause to
15099 inner_for_stmt. */
15100 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
15101 OMP_CLAUSE_FIRSTPRIVATE);
15102 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
15103 OMP_CLAUSE_CHAIN (c) = *pc;
15104 *pc = c;
15105 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
15107 tree c = build_omp_clause (UNKNOWN_LOCATION,
15108 OMP_CLAUSE_FIRSTPRIVATE);
15109 OMP_CLAUSE_DECL (c) = last;
15110 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15111 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15112 c = build_omp_clause (UNKNOWN_LOCATION,
15113 *pc ? OMP_CLAUSE_SHARED
15114 : OMP_CLAUSE_FIRSTPRIVATE);
15115 OMP_CLAUSE_DECL (c) = orig_decl;
15116 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15117 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15119 /* Similarly, take care of C++ range for temporaries, those should
15120 be firstprivate on OMP_PARALLEL if any. */
15121 if (data[1])
15122 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
15123 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
15124 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15125 i)) == TREE_LIST
15126 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15127 i)))
15129 tree orig
15130 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
15131 tree v = TREE_CHAIN (orig);
15132 tree c = build_omp_clause (UNKNOWN_LOCATION,
15133 OMP_CLAUSE_FIRSTPRIVATE);
15134 /* First add firstprivate clause for the __for_end artificial
15135 decl. */
15136 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
15137 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
15138 == REFERENCE_TYPE)
15139 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
15140 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15141 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15142 if (TREE_VEC_ELT (v, 0))
15144 /* And now the same for __for_range artificial decl if it
15145 exists. */
15146 c = build_omp_clause (UNKNOWN_LOCATION,
15147 OMP_CLAUSE_FIRSTPRIVATE);
15148 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
15149 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
15150 == REFERENCE_TYPE)
15151 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
15152 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15153 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15158 switch (TREE_CODE (for_stmt))
15160 case OMP_FOR:
15161 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
15163 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15164 OMP_CLAUSE_SCHEDULE))
15165 error_at (EXPR_LOCATION (for_stmt),
15166 "%qs clause may not appear on non-rectangular %qs",
15167 "schedule", lang_GNU_Fortran () ? "do" : "for");
15168 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
15169 error_at (EXPR_LOCATION (for_stmt),
15170 "%qs clause may not appear on non-rectangular %qs",
15171 "ordered", lang_GNU_Fortran () ? "do" : "for");
15173 break;
15174 case OMP_DISTRIBUTE:
15175 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
15176 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15177 OMP_CLAUSE_DIST_SCHEDULE))
15178 error_at (EXPR_LOCATION (for_stmt),
15179 "%qs clause may not appear on non-rectangular %qs",
15180 "dist_schedule", "distribute");
15181 break;
15182 case OACC_LOOP:
15183 ort = ORT_ACC;
15184 break;
15185 case OMP_TASKLOOP:
15186 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
15188 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15189 OMP_CLAUSE_GRAINSIZE))
15190 error_at (EXPR_LOCATION (for_stmt),
15191 "%qs clause may not appear on non-rectangular %qs",
15192 "grainsize", "taskloop");
15193 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15194 OMP_CLAUSE_NUM_TASKS))
15195 error_at (EXPR_LOCATION (for_stmt),
15196 "%qs clause may not appear on non-rectangular %qs",
15197 "num_tasks", "taskloop");
15199 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
15200 ort = ORT_UNTIED_TASKLOOP;
15201 else
15202 ort = ORT_TASKLOOP;
15203 break;
15204 case OMP_SIMD:
15205 ort = ORT_SIMD;
15206 break;
15207 default:
15208 gcc_unreachable ();
15211 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
15212 clause for the IV. */
15213 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
15215 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
15216 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15217 decl = TREE_OPERAND (t, 0);
15218 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15219 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
15220 && OMP_CLAUSE_DECL (c) == decl)
15222 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
15223 break;
15227 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
15228 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
15229 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
15230 ? OMP_LOOP : TREE_CODE (for_stmt));
15232 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
15233 gimplify_omp_ctxp->distribute = true;
15235 /* Handle OMP_FOR_INIT. */
15236 for_pre_body = NULL;
15237 if ((ort == ORT_SIMD
15238 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
15239 && OMP_FOR_PRE_BODY (for_stmt))
15241 has_decl_expr = BITMAP_ALLOC (NULL);
15242 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
15243 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
15245 t = OMP_FOR_PRE_BODY (for_stmt);
15246 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
15248 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
15250 tree_stmt_iterator si;
15251 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
15252 tsi_next (&si))
15254 t = tsi_stmt (si);
15255 if (TREE_CODE (t) == DECL_EXPR
15256 && VAR_P (DECL_EXPR_DECL (t)))
15257 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
15261 if (OMP_FOR_PRE_BODY (for_stmt))
15263 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
15264 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
15265 else
15267 struct gimplify_omp_ctx ctx;
15268 memset (&ctx, 0, sizeof (ctx));
15269 ctx.region_type = ORT_NONE;
15270 gimplify_omp_ctxp = &ctx;
15271 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
15272 gimplify_omp_ctxp = NULL;
15275 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
15277 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
15278 for_stmt = inner_for_stmt;
15280 /* For taskloop, need to gimplify the start, end and step before the
15281 taskloop, outside of the taskloop omp context. */
15282 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
15284 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15286 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15287 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
15288 ? pre_p : &for_pre_body);
15289 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
15290 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15292 tree v = TREE_OPERAND (t, 1);
15293 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
15294 for_pre_p, orig_for_stmt);
15295 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
15296 for_pre_p, orig_for_stmt);
15298 else
15299 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
15300 orig_for_stmt);
15302 /* Handle OMP_FOR_COND. */
15303 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15304 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15306 tree v = TREE_OPERAND (t, 1);
15307 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
15308 for_pre_p, orig_for_stmt);
15309 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
15310 for_pre_p, orig_for_stmt);
15312 else
15313 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
15314 orig_for_stmt);
15316 /* Handle OMP_FOR_INCR. */
15317 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15318 if (TREE_CODE (t) == MODIFY_EXPR)
15320 decl = TREE_OPERAND (t, 0);
15321 t = TREE_OPERAND (t, 1);
15322 tree *tp = &TREE_OPERAND (t, 1);
15323 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
15324 tp = &TREE_OPERAND (t, 0);
15326 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
15327 orig_for_stmt);
15331 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
15332 OMP_TASKLOOP);
15335 if (orig_for_stmt != for_stmt)
15336 gimplify_omp_ctxp->combined_loop = true;
15338 for_body = NULL;
15339 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15340 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
15341 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15342 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
15344 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
15345 bool is_doacross = false;
15346 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
15347 find_standalone_omp_ordered, NULL))
15349 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
15350 is_doacross = true;
15351 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
15352 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
15353 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
15354 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
15356 error_at (OMP_CLAUSE_LOCATION (*pc),
15357 "%<linear%> clause may not be specified together "
15358 "with %<ordered%> clause if stand-alone %<ordered%> "
15359 "construct is nested in it");
15360 *pc = OMP_CLAUSE_CHAIN (*pc);
15362 else
15363 pc = &OMP_CLAUSE_CHAIN (*pc);
15365 int collapse = 1, tile = 0;
15366 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
15367 if (c)
15368 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
15369 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
15370 if (c)
15371 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
15372 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
15373 hash_set<tree> *allocate_uids = NULL;
15374 if (c)
15376 allocate_uids = new hash_set<tree>;
15377 for (; c; c = OMP_CLAUSE_CHAIN (c))
15378 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
15379 allocate_uids->add (OMP_CLAUSE_DECL (c));
15381 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15383 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15384 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15385 decl = TREE_OPERAND (t, 0);
15386 gcc_assert (DECL_P (decl));
15387 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
15388 || POINTER_TYPE_P (TREE_TYPE (decl)));
15389 if (is_doacross)
15391 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
15393 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15394 if (TREE_CODE (orig_decl) == TREE_LIST)
15396 orig_decl = TREE_PURPOSE (orig_decl);
15397 if (!orig_decl)
15398 orig_decl = decl;
15400 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
15402 else
15403 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
15404 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
15407 if (for_stmt == orig_for_stmt)
15409 tree orig_decl = decl;
15410 if (OMP_FOR_ORIG_DECLS (for_stmt))
15412 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15413 if (TREE_CODE (orig_decl) == TREE_LIST)
15415 orig_decl = TREE_PURPOSE (orig_decl);
15416 if (!orig_decl)
15417 orig_decl = decl;
15420 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
15421 error_at (EXPR_LOCATION (for_stmt),
15422 "threadprivate iteration variable %qD", orig_decl);
15425 /* Make sure the iteration variable is private. */
15426 tree c = NULL_TREE;
15427 tree c2 = NULL_TREE;
15428 if (orig_for_stmt != for_stmt)
15430 /* Preserve this information until we gimplify the inner simd. */
15431 if (has_decl_expr
15432 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
15433 TREE_PRIVATE (t) = 1;
15435 else if (ort == ORT_SIMD)
15437 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15438 (splay_tree_key) decl);
15439 omp_is_private (gimplify_omp_ctxp, decl,
15440 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15441 != 1));
15442 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
15444 omp_notice_variable (gimplify_omp_ctxp, decl, true);
15445 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
15446 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15447 OMP_CLAUSE_LASTPRIVATE);
15448 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
15449 OMP_CLAUSE_LASTPRIVATE))
15450 if (OMP_CLAUSE_DECL (c3) == decl)
15452 warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
15453 "conditional %<lastprivate%> on loop "
15454 "iterator %qD ignored", decl);
15455 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
15456 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
15459 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
15461 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
15462 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
15463 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
15464 if ((has_decl_expr
15465 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
15466 || TREE_PRIVATE (t))
15468 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15469 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15471 struct gimplify_omp_ctx *outer
15472 = gimplify_omp_ctxp->outer_context;
15473 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
15475 if (outer->region_type == ORT_WORKSHARE
15476 && outer->combined_loop)
15478 n = splay_tree_lookup (outer->variables,
15479 (splay_tree_key)decl);
15480 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
15482 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15483 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15485 else
15487 struct gimplify_omp_ctx *octx = outer->outer_context;
15488 if (octx
15489 && octx->region_type == ORT_COMBINED_PARALLEL
15490 && octx->outer_context
15491 && (octx->outer_context->region_type
15492 == ORT_WORKSHARE)
15493 && octx->outer_context->combined_loop)
15495 octx = octx->outer_context;
15496 n = splay_tree_lookup (octx->variables,
15497 (splay_tree_key)decl);
15498 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
15500 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15501 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15508 OMP_CLAUSE_DECL (c) = decl;
15509 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15510 OMP_FOR_CLAUSES (for_stmt) = c;
15511 omp_add_variable (gimplify_omp_ctxp, decl, flags);
15512 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
15513 omp_lastprivate_for_combined_outer_constructs (outer, decl,
15514 true);
15516 else
15518 bool lastprivate
15519 = (!has_decl_expr
15520 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
15521 if (TREE_PRIVATE (t))
15522 lastprivate = false;
15523 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
15525 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15526 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
15527 lastprivate = false;
15530 struct gimplify_omp_ctx *outer
15531 = gimplify_omp_ctxp->outer_context;
15532 if (outer && lastprivate)
15533 omp_lastprivate_for_combined_outer_constructs (outer, decl,
15534 true);
15536 c = build_omp_clause (input_location,
15537 lastprivate ? OMP_CLAUSE_LASTPRIVATE
15538 : OMP_CLAUSE_PRIVATE);
15539 OMP_CLAUSE_DECL (c) = decl;
15540 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15541 OMP_FOR_CLAUSES (for_stmt) = c;
15542 omp_add_variable (gimplify_omp_ctxp, decl,
15543 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
15544 | GOVD_EXPLICIT | GOVD_SEEN);
15545 c = NULL_TREE;
15548 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
15550 omp_notice_variable (gimplify_omp_ctxp, decl, true);
15551 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15552 (splay_tree_key) decl);
15553 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
15554 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15555 OMP_CLAUSE_LASTPRIVATE);
15556 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
15557 OMP_CLAUSE_LASTPRIVATE))
15558 if (OMP_CLAUSE_DECL (c3) == decl)
15560 warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
15561 "conditional %<lastprivate%> on loop "
15562 "iterator %qD ignored", decl);
15563 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
15564 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
15567 else
15568 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
15570 /* If DECL is not a gimple register, create a temporary variable to act
15571 as an iteration counter. This is valid, since DECL cannot be
15572 modified in the body of the loop. Similarly for any iteration vars
15573 in simd with collapse > 1 where the iterator vars must be
15574 lastprivate. And similarly for vars mentioned in allocate clauses. */
15575 if (orig_for_stmt != for_stmt)
15576 var = decl;
15577 else if (!is_gimple_reg (decl)
15578 || (ort == ORT_SIMD
15579 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
15580 || (allocate_uids && allocate_uids->contains (decl)))
15582 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15583 /* Make sure omp_add_variable is not called on it prematurely.
15584 We call it ourselves a few lines later. */
15585 gimplify_omp_ctxp = NULL;
15586 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
15587 gimplify_omp_ctxp = ctx;
15588 TREE_OPERAND (t, 0) = var;
15590 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
15592 if (ort == ORT_SIMD
15593 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
15595 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
15596 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
15597 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
15598 OMP_CLAUSE_DECL (c2) = var;
15599 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
15600 OMP_FOR_CLAUSES (for_stmt) = c2;
15601 omp_add_variable (gimplify_omp_ctxp, var,
15602 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
15603 if (c == NULL_TREE)
15605 c = c2;
15606 c2 = NULL_TREE;
15609 else
15610 omp_add_variable (gimplify_omp_ctxp, var,
15611 GOVD_PRIVATE | GOVD_SEEN);
15613 else
15614 var = decl;
15616 gimplify_omp_ctxp->in_for_exprs = true;
15617 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15619 tree lb = TREE_OPERAND (t, 1);
15620 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
15621 is_gimple_val, fb_rvalue, false);
15622 ret = MIN (ret, tret);
15623 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
15624 is_gimple_val, fb_rvalue, false);
15626 else
15627 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
15628 is_gimple_val, fb_rvalue, false);
15629 gimplify_omp_ctxp->in_for_exprs = false;
15630 ret = MIN (ret, tret);
15631 if (ret == GS_ERROR)
15632 return ret;
15634 /* Handle OMP_FOR_COND. */
15635 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15636 gcc_assert (COMPARISON_CLASS_P (t));
15637 gcc_assert (TREE_OPERAND (t, 0) == decl);
15639 gimplify_omp_ctxp->in_for_exprs = true;
15640 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15642 tree ub = TREE_OPERAND (t, 1);
15643 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
15644 is_gimple_val, fb_rvalue, false);
15645 ret = MIN (ret, tret);
15646 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
15647 is_gimple_val, fb_rvalue, false);
15649 else
15650 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
15651 is_gimple_val, fb_rvalue, false);
15652 gimplify_omp_ctxp->in_for_exprs = false;
15653 ret = MIN (ret, tret);
15655 /* Handle OMP_FOR_INCR. */
15656 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15657 switch (TREE_CODE (t))
15659 case PREINCREMENT_EXPR:
15660 case POSTINCREMENT_EXPR:
15662 tree decl = TREE_OPERAND (t, 0);
15663 /* c_omp_for_incr_canonicalize_ptr() should have been
15664 called to massage things appropriately. */
15665 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
15667 if (orig_for_stmt != for_stmt)
15668 break;
15669 t = build_int_cst (TREE_TYPE (decl), 1);
15670 if (c)
15671 OMP_CLAUSE_LINEAR_STEP (c) = t;
15672 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
15673 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
15674 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
15675 break;
15678 case PREDECREMENT_EXPR:
15679 case POSTDECREMENT_EXPR:
15680 /* c_omp_for_incr_canonicalize_ptr() should have been
15681 called to massage things appropriately. */
15682 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
15683 if (orig_for_stmt != for_stmt)
15684 break;
15685 t = build_int_cst (TREE_TYPE (decl), -1);
15686 if (c)
15687 OMP_CLAUSE_LINEAR_STEP (c) = t;
15688 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
15689 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
15690 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
15691 break;
15693 case MODIFY_EXPR:
15694 gcc_assert (TREE_OPERAND (t, 0) == decl);
15695 TREE_OPERAND (t, 0) = var;
15697 t = TREE_OPERAND (t, 1);
15698 switch (TREE_CODE (t))
15700 case PLUS_EXPR:
15701 if (TREE_OPERAND (t, 1) == decl)
15703 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
15704 TREE_OPERAND (t, 0) = var;
15705 break;
15708 /* Fallthru. */
15709 case MINUS_EXPR:
15710 case POINTER_PLUS_EXPR:
15711 gcc_assert (TREE_OPERAND (t, 0) == decl);
15712 TREE_OPERAND (t, 0) = var;
15713 break;
15714 default:
15715 gcc_unreachable ();
15718 gimplify_omp_ctxp->in_for_exprs = true;
15719 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
15720 is_gimple_val, fb_rvalue, false);
15721 ret = MIN (ret, tret);
15722 if (c)
15724 tree step = TREE_OPERAND (t, 1);
15725 tree stept = TREE_TYPE (decl);
15726 if (POINTER_TYPE_P (stept))
15727 stept = sizetype;
15728 step = fold_convert (stept, step);
15729 if (TREE_CODE (t) == MINUS_EXPR)
15730 step = fold_build1 (NEGATE_EXPR, stept, step);
15731 OMP_CLAUSE_LINEAR_STEP (c) = step;
15732 if (step != TREE_OPERAND (t, 1))
15734 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
15735 &for_pre_body, NULL,
15736 is_gimple_val, fb_rvalue, false);
15737 ret = MIN (ret, tret);
15740 gimplify_omp_ctxp->in_for_exprs = false;
15741 break;
15743 default:
15744 gcc_unreachable ();
15747 if (c2)
15749 gcc_assert (c);
15750 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
15753 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
15755 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
15756 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15757 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
15758 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
15759 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
15760 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
15761 && OMP_CLAUSE_DECL (c) == decl)
15763 if (is_doacross && (collapse == 1 || i >= collapse))
15764 t = var;
15765 else
15767 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15768 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15769 gcc_assert (TREE_OPERAND (t, 0) == var);
15770 t = TREE_OPERAND (t, 1);
15771 gcc_assert (TREE_CODE (t) == PLUS_EXPR
15772 || TREE_CODE (t) == MINUS_EXPR
15773 || TREE_CODE (t) == POINTER_PLUS_EXPR);
15774 gcc_assert (TREE_OPERAND (t, 0) == var);
15775 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
15776 is_doacross ? var : decl,
15777 TREE_OPERAND (t, 1));
15779 gimple_seq *seq;
15780 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
15781 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
15782 else
15783 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
15784 push_gimplify_context ();
15785 gimplify_assign (decl, t, seq);
15786 gimple *bind = NULL;
15787 if (gimplify_ctxp->temps)
15789 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
15790 *seq = NULL;
15791 gimplify_seq_add_stmt (seq, bind);
15793 pop_gimplify_context (bind);
15796 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
15797 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
15799 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
15800 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15801 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
15802 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
15803 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
15804 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
15805 gcc_assert (COMPARISON_CLASS_P (t));
15806 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
15807 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
15808 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
15812 BITMAP_FREE (has_decl_expr);
15813 delete allocate_uids;
15815 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
15816 || (loop_p && orig_for_stmt == for_stmt))
15818 push_gimplify_context ();
15819 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
15821 OMP_FOR_BODY (orig_for_stmt)
15822 = build3 (BIND_EXPR, void_type_node, NULL,
15823 OMP_FOR_BODY (orig_for_stmt), NULL);
15824 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
15828 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
15829 &for_body);
15831 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
15832 || (loop_p && orig_for_stmt == for_stmt))
15834 if (gimple_code (g) == GIMPLE_BIND)
15835 pop_gimplify_context (g);
15836 else
15837 pop_gimplify_context (NULL);
15840 if (orig_for_stmt != for_stmt)
15841 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15843 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15844 decl = TREE_OPERAND (t, 0);
15845 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15846 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
15847 gimplify_omp_ctxp = ctx->outer_context;
15848 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
15849 gimplify_omp_ctxp = ctx;
15850 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
15851 TREE_OPERAND (t, 0) = var;
15852 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15853 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
15854 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
15855 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
15856 for (int j = i + 1;
15857 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
15859 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
15860 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15861 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
15862 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
15864 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
15865 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
15867 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
15868 gcc_assert (COMPARISON_CLASS_P (t));
15869 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
15870 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
15872 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
15873 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
15878 gimplify_adjust_omp_clauses (pre_p, for_body,
15879 &OMP_FOR_CLAUSES (orig_for_stmt),
15880 TREE_CODE (orig_for_stmt));
15882 int kind;
15883 switch (TREE_CODE (orig_for_stmt))
15885 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
15886 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
15887 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
15888 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
15889 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
15890 default:
15891 gcc_unreachable ();
15893 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
15895 gimplify_seq_add_seq (pre_p, for_pre_body);
15896 for_pre_body = NULL;
15898 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
15899 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
15900 for_pre_body);
15901 if (orig_for_stmt != for_stmt)
15902 gimple_omp_for_set_combined_p (gfor, true);
15903 if (gimplify_omp_ctxp
15904 && (gimplify_omp_ctxp->combined_loop
15905 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
15906 && gimplify_omp_ctxp->outer_context
15907 && gimplify_omp_ctxp->outer_context->combined_loop)))
15909 gimple_omp_for_set_combined_into_p (gfor, true);
15910 if (gimplify_omp_ctxp->combined_loop)
15911 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
15912 else
15913 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
15916 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15918 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15919 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
15920 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
15921 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15922 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
15923 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
15924 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15925 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
15928 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
15929 constructs with GIMPLE_OMP_TASK sandwiched in between them.
15930 The outer taskloop stands for computing the number of iterations,
15931 counts for collapsed loops and holding taskloop specific clauses.
15932 The task construct stands for the effect of data sharing on the
15933 explicit task it creates and the inner taskloop stands for expansion
15934 of the static loop inside of the explicit task construct. */
15935 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
15937 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
15938 tree task_clauses = NULL_TREE;
15939 tree c = *gfor_clauses_ptr;
15940 tree *gtask_clauses_ptr = &task_clauses;
15941 tree outer_for_clauses = NULL_TREE;
15942 tree *gforo_clauses_ptr = &outer_for_clauses;
15943 bitmap lastprivate_uids = NULL;
15944 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
15946 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
15947 if (c)
15949 lastprivate_uids = BITMAP_ALLOC (NULL);
15950 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
15951 OMP_CLAUSE_LASTPRIVATE))
15952 bitmap_set_bit (lastprivate_uids,
15953 DECL_UID (OMP_CLAUSE_DECL (c)));
15955 c = *gfor_clauses_ptr;
15957 for (; c; c = OMP_CLAUSE_CHAIN (c))
15958 switch (OMP_CLAUSE_CODE (c))
15960 /* These clauses are allowed on task, move them there. */
15961 case OMP_CLAUSE_SHARED:
15962 case OMP_CLAUSE_FIRSTPRIVATE:
15963 case OMP_CLAUSE_DEFAULT:
15964 case OMP_CLAUSE_IF:
15965 case OMP_CLAUSE_UNTIED:
15966 case OMP_CLAUSE_FINAL:
15967 case OMP_CLAUSE_MERGEABLE:
15968 case OMP_CLAUSE_PRIORITY:
15969 case OMP_CLAUSE_REDUCTION:
15970 case OMP_CLAUSE_IN_REDUCTION:
15971 *gtask_clauses_ptr = c;
15972 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
15973 break;
15974 case OMP_CLAUSE_PRIVATE:
15975 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
15977 /* We want private on outer for and firstprivate
15978 on task. */
15979 *gtask_clauses_ptr
15980 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15981 OMP_CLAUSE_FIRSTPRIVATE);
15982 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
15983 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
15984 openacc);
15985 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
15986 *gforo_clauses_ptr = c;
15987 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
15989 else
15991 *gtask_clauses_ptr = c;
15992 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
15994 break;
15995 /* These clauses go into outer taskloop clauses. */
15996 case OMP_CLAUSE_GRAINSIZE:
15997 case OMP_CLAUSE_NUM_TASKS:
15998 case OMP_CLAUSE_NOGROUP:
15999 *gforo_clauses_ptr = c;
16000 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16001 break;
16002 /* Collapse clause we duplicate on both taskloops. */
16003 case OMP_CLAUSE_COLLAPSE:
16004 *gfor_clauses_ptr = c;
16005 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16006 *gforo_clauses_ptr = copy_node (c);
16007 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
16008 break;
16009 /* For lastprivate, keep the clause on inner taskloop, and add
16010 a shared clause on task. If the same decl is also firstprivate,
16011 add also firstprivate clause on the inner taskloop. */
16012 case OMP_CLAUSE_LASTPRIVATE:
16013 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
16015 /* For taskloop C++ lastprivate IVs, we want:
16016 1) private on outer taskloop
16017 2) firstprivate and shared on task
16018 3) lastprivate on inner taskloop */
16019 *gtask_clauses_ptr
16020 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16021 OMP_CLAUSE_FIRSTPRIVATE);
16022 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
16023 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
16024 openacc);
16025 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16026 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
16027 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16028 OMP_CLAUSE_PRIVATE);
16029 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
16030 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
16031 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
16032 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
16034 *gfor_clauses_ptr = c;
16035 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16036 *gtask_clauses_ptr
16037 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
16038 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
16039 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
16040 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
16041 gtask_clauses_ptr
16042 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16043 break;
16044 /* Allocate clause we duplicate on task and inner taskloop
16045 if the decl is lastprivate, otherwise just put on task. */
16046 case OMP_CLAUSE_ALLOCATE:
16047 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
16048 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
16050 /* Additionally, put firstprivate clause on task
16051 for the allocator if it is not constant. */
16052 *gtask_clauses_ptr
16053 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16054 OMP_CLAUSE_FIRSTPRIVATE);
16055 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
16056 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
16057 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16059 if (lastprivate_uids
16060 && bitmap_bit_p (lastprivate_uids,
16061 DECL_UID (OMP_CLAUSE_DECL (c))))
16063 *gfor_clauses_ptr = c;
16064 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16065 *gtask_clauses_ptr = copy_node (c);
16066 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16068 else
16070 *gtask_clauses_ptr = c;
16071 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16073 break;
16074 default:
16075 gcc_unreachable ();
16077 *gfor_clauses_ptr = NULL_TREE;
16078 *gtask_clauses_ptr = NULL_TREE;
16079 *gforo_clauses_ptr = NULL_TREE;
16080 BITMAP_FREE (lastprivate_uids);
16081 gimple_set_location (gfor, input_location);
16082 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
16083 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
16084 NULL_TREE, NULL_TREE, NULL_TREE);
16085 gimple_set_location (g, input_location);
16086 gimple_omp_task_set_taskloop_p (g, true);
16087 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
16088 gomp_for *gforo
16089 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
16090 gimple_omp_for_collapse (gfor),
16091 gimple_omp_for_pre_body (gfor));
16092 gimple_omp_for_set_pre_body (gfor, NULL);
16093 gimple_omp_for_set_combined_p (gforo, true);
16094 gimple_omp_for_set_combined_into_p (gfor, true);
16095 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
16097 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
16098 tree v = create_tmp_var (type);
16099 gimple_omp_for_set_index (gforo, i, v);
16100 t = unshare_expr (gimple_omp_for_initial (gfor, i));
16101 gimple_omp_for_set_initial (gforo, i, t);
16102 gimple_omp_for_set_cond (gforo, i,
16103 gimple_omp_for_cond (gfor, i));
16104 t = unshare_expr (gimple_omp_for_final (gfor, i));
16105 gimple_omp_for_set_final (gforo, i, t);
16106 t = unshare_expr (gimple_omp_for_incr (gfor, i));
16107 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
16108 TREE_OPERAND (t, 0) = v;
16109 gimple_omp_for_set_incr (gforo, i, t);
16110 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
16111 OMP_CLAUSE_DECL (t) = v;
16112 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
16113 gimple_omp_for_set_clauses (gforo, t);
16114 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
16116 tree *p1 = NULL, *p2 = NULL;
16117 t = gimple_omp_for_initial (gforo, i);
16118 if (TREE_CODE (t) == TREE_VEC)
16119 p1 = &TREE_VEC_ELT (t, 0);
16120 t = gimple_omp_for_final (gforo, i);
16121 if (TREE_CODE (t) == TREE_VEC)
16123 if (p1)
16124 p2 = &TREE_VEC_ELT (t, 0);
16125 else
16126 p1 = &TREE_VEC_ELT (t, 0);
16128 if (p1)
16130 int j;
16131 for (j = 0; j < i; j++)
16132 if (*p1 == gimple_omp_for_index (gfor, j))
16134 *p1 = gimple_omp_for_index (gforo, j);
16135 if (p2)
16136 *p2 = *p1;
16137 break;
16139 gcc_assert (j < i);
16143 gimplify_seq_add_stmt (pre_p, gforo);
16145 else
16146 gimplify_seq_add_stmt (pre_p, gfor);
16148 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
16150 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16151 unsigned lastprivate_conditional = 0;
16152 while (ctx
16153 && (ctx->region_type == ORT_TARGET_DATA
16154 || ctx->region_type == ORT_TASKGROUP))
16155 ctx = ctx->outer_context;
16156 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
16157 for (tree c = gimple_omp_for_clauses (gfor);
16158 c; c = OMP_CLAUSE_CHAIN (c))
16159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
16160 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
16161 ++lastprivate_conditional;
16162 if (lastprivate_conditional)
16164 struct omp_for_data fd;
16165 omp_extract_for_data (gfor, &fd, NULL);
16166 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
16167 lastprivate_conditional);
16168 tree var = create_tmp_var_raw (type);
16169 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
16170 OMP_CLAUSE_DECL (c) = var;
16171 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
16172 gimple_omp_for_set_clauses (gfor, c);
16173 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
16176 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
16178 unsigned lastprivate_conditional = 0;
16179 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
16180 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
16181 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
16182 ++lastprivate_conditional;
16183 if (lastprivate_conditional)
16185 struct omp_for_data fd;
16186 omp_extract_for_data (gfor, &fd, NULL);
16187 tree type = unsigned_type_for (fd.iter_type);
16188 while (lastprivate_conditional--)
16190 tree c = build_omp_clause (UNKNOWN_LOCATION,
16191 OMP_CLAUSE__CONDTEMP_);
16192 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
16193 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
16194 gimple_omp_for_set_clauses (gfor, c);
16199 if (ret != GS_ALL_DONE)
16200 return GS_ERROR;
16201 *expr_p = NULL_TREE;
16202 return GS_ALL_DONE;
16205 /* Helper for gimplify_omp_loop, called through walk_tree. */
16207 static tree
16208 note_no_context_vars (tree *tp, int *, void *data)
16210 if (VAR_P (*tp)
16211 && DECL_CONTEXT (*tp) == NULL_TREE
16212 && !is_global_var (*tp))
16214 vec<tree> *d = (vec<tree> *) data;
16215 d->safe_push (*tp);
16216 DECL_CONTEXT (*tp) = current_function_decl;
16218 return NULL_TREE;
16221 /* Gimplify the gross structure of an OMP_LOOP statement. */
16223 static enum gimplify_status
16224 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
16226 tree for_stmt = *expr_p;
16227 tree clauses = OMP_FOR_CLAUSES (for_stmt);
16228 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
16229 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
16230 int i;
16232 /* If order is not present, the behavior is as if order(concurrent)
16233 appeared. */
16234 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
16235 if (order == NULL_TREE)
16237 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
16238 OMP_CLAUSE_CHAIN (order) = clauses;
16239 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
16242 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
16243 if (bind == NULL_TREE)
16245 if (!flag_openmp) /* flag_openmp_simd */
16247 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
16248 kind = OMP_CLAUSE_BIND_TEAMS;
16249 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
16250 kind = OMP_CLAUSE_BIND_PARALLEL;
16251 else
16253 for (; octx; octx = octx->outer_context)
16255 if ((octx->region_type & ORT_ACC) != 0
16256 || octx->region_type == ORT_NONE
16257 || octx->region_type == ORT_IMPLICIT_TARGET)
16258 continue;
16259 break;
16261 if (octx == NULL && !in_omp_construct)
16262 error_at (EXPR_LOCATION (for_stmt),
16263 "%<bind%> clause not specified on a %<loop%> "
16264 "construct not nested inside another OpenMP construct");
16266 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
16267 OMP_CLAUSE_CHAIN (bind) = clauses;
16268 OMP_CLAUSE_BIND_KIND (bind) = kind;
16269 OMP_FOR_CLAUSES (for_stmt) = bind;
16271 else
16272 switch (OMP_CLAUSE_BIND_KIND (bind))
16274 case OMP_CLAUSE_BIND_THREAD:
16275 break;
16276 case OMP_CLAUSE_BIND_PARALLEL:
16277 if (!flag_openmp) /* flag_openmp_simd */
16279 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16280 break;
16282 for (; octx; octx = octx->outer_context)
16283 if (octx->region_type == ORT_SIMD
16284 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
16286 error_at (EXPR_LOCATION (for_stmt),
16287 "%<bind(parallel)%> on a %<loop%> construct nested "
16288 "inside %<simd%> construct");
16289 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16290 break;
16292 kind = OMP_CLAUSE_BIND_PARALLEL;
16293 break;
16294 case OMP_CLAUSE_BIND_TEAMS:
16295 if (!flag_openmp) /* flag_openmp_simd */
16297 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16298 break;
16300 if ((octx
16301 && octx->region_type != ORT_IMPLICIT_TARGET
16302 && octx->region_type != ORT_NONE
16303 && (octx->region_type & ORT_TEAMS) == 0)
16304 || in_omp_construct)
16306 error_at (EXPR_LOCATION (for_stmt),
16307 "%<bind(teams)%> on a %<loop%> region not strictly "
16308 "nested inside of a %<teams%> region");
16309 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16310 break;
16312 kind = OMP_CLAUSE_BIND_TEAMS;
16313 break;
16314 default:
16315 gcc_unreachable ();
16318 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
16319 switch (OMP_CLAUSE_CODE (*pc))
16321 case OMP_CLAUSE_REDUCTION:
16322 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
16324 error_at (OMP_CLAUSE_LOCATION (*pc),
16325 "%<inscan%> %<reduction%> clause on "
16326 "%qs construct", "loop");
16327 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
16329 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
16331 error_at (OMP_CLAUSE_LOCATION (*pc),
16332 "invalid %<task%> reduction modifier on construct "
16333 "other than %<parallel%>, %qs or %<sections%>",
16334 lang_GNU_Fortran () ? "do" : "for");
16335 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
16337 pc = &OMP_CLAUSE_CHAIN (*pc);
16338 break;
16339 case OMP_CLAUSE_LASTPRIVATE:
16340 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16342 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
16343 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
16344 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
16345 break;
16346 if (OMP_FOR_ORIG_DECLS (for_stmt)
16347 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
16348 i)) == TREE_LIST
16349 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
16350 i)))
16352 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
16353 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
16354 break;
16357 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
16359 error_at (OMP_CLAUSE_LOCATION (*pc),
16360 "%<lastprivate%> clause on a %<loop%> construct refers "
16361 "to a variable %qD which is not the loop iterator",
16362 OMP_CLAUSE_DECL (*pc));
16363 *pc = OMP_CLAUSE_CHAIN (*pc);
16364 break;
16366 pc = &OMP_CLAUSE_CHAIN (*pc);
16367 break;
16368 default:
16369 pc = &OMP_CLAUSE_CHAIN (*pc);
16370 break;
16373 TREE_SET_CODE (for_stmt, OMP_SIMD);
16375 int last;
16376 switch (kind)
16378 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
16379 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
16380 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
16382 for (int pass = 1; pass <= last; pass++)
16384 if (pass == 2)
16386 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
16387 make_node (BLOCK));
16388 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
16389 *expr_p = make_node (OMP_PARALLEL);
16390 TREE_TYPE (*expr_p) = void_type_node;
16391 OMP_PARALLEL_BODY (*expr_p) = bind;
16392 OMP_PARALLEL_COMBINED (*expr_p) = 1;
16393 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
16394 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
16395 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16396 if (OMP_FOR_ORIG_DECLS (for_stmt)
16397 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
16398 == TREE_LIST))
16400 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
16401 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
16403 *pc = build_omp_clause (UNKNOWN_LOCATION,
16404 OMP_CLAUSE_FIRSTPRIVATE);
16405 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
16406 pc = &OMP_CLAUSE_CHAIN (*pc);
16410 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
16411 tree *pc = &OMP_FOR_CLAUSES (t);
16412 TREE_TYPE (t) = void_type_node;
16413 OMP_FOR_BODY (t) = *expr_p;
16414 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
16415 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
16416 switch (OMP_CLAUSE_CODE (c))
16418 case OMP_CLAUSE_BIND:
16419 case OMP_CLAUSE_ORDER:
16420 case OMP_CLAUSE_COLLAPSE:
16421 *pc = copy_node (c);
16422 pc = &OMP_CLAUSE_CHAIN (*pc);
16423 break;
16424 case OMP_CLAUSE_PRIVATE:
16425 case OMP_CLAUSE_FIRSTPRIVATE:
16426 /* Only needed on innermost. */
16427 break;
16428 case OMP_CLAUSE_LASTPRIVATE:
16429 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
16431 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16432 OMP_CLAUSE_FIRSTPRIVATE);
16433 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
16434 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
16435 pc = &OMP_CLAUSE_CHAIN (*pc);
16437 *pc = copy_node (c);
16438 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
16439 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
16440 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
16442 if (pass != last)
16443 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
16444 else
16445 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
16446 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
16448 pc = &OMP_CLAUSE_CHAIN (*pc);
16449 break;
16450 case OMP_CLAUSE_REDUCTION:
16451 *pc = copy_node (c);
16452 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
16453 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
16454 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
16456 auto_vec<tree> no_context_vars;
16457 int walk_subtrees = 0;
16458 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
16459 &walk_subtrees, &no_context_vars);
16460 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
16461 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
16462 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
16463 note_no_context_vars,
16464 &no_context_vars);
16465 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
16466 note_no_context_vars,
16467 &no_context_vars);
16469 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
16470 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
16471 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
16472 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
16473 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
16475 hash_map<tree, tree> decl_map;
16476 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
16477 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
16478 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
16479 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
16480 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
16481 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
16483 copy_body_data id;
16484 memset (&id, 0, sizeof (id));
16485 id.src_fn = current_function_decl;
16486 id.dst_fn = current_function_decl;
16487 id.src_cfun = cfun;
16488 id.decl_map = &decl_map;
16489 id.copy_decl = copy_decl_no_change;
16490 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
16491 id.transform_new_cfg = true;
16492 id.transform_return_to_modify = false;
16493 id.eh_lp_nr = 0;
16494 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
16495 &id, NULL);
16496 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
16497 &id, NULL);
16499 for (tree d : no_context_vars)
16501 DECL_CONTEXT (d) = NULL_TREE;
16502 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
16505 else
16507 OMP_CLAUSE_REDUCTION_INIT (*pc)
16508 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
16509 OMP_CLAUSE_REDUCTION_MERGE (*pc)
16510 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
16512 pc = &OMP_CLAUSE_CHAIN (*pc);
16513 break;
16514 default:
16515 gcc_unreachable ();
16517 *pc = NULL_TREE;
16518 *expr_p = t;
16520 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
16524 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
16525 of OMP_TARGET's body. */
16527 static tree
16528 find_omp_teams (tree *tp, int *walk_subtrees, void *)
16530 *walk_subtrees = 0;
16531 switch (TREE_CODE (*tp))
16533 case OMP_TEAMS:
16534 return *tp;
16535 case BIND_EXPR:
16536 case STATEMENT_LIST:
16537 *walk_subtrees = 1;
16538 break;
16539 default:
16540 break;
16542 return NULL_TREE;
16545 /* Helper function of optimize_target_teams, determine if the expression
16546 can be computed safely before the target construct on the host. */
16548 static tree
16549 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
16551 splay_tree_node n;
16553 if (TYPE_P (*tp))
16555 *walk_subtrees = 0;
16556 return NULL_TREE;
16558 switch (TREE_CODE (*tp))
16560 case VAR_DECL:
16561 case PARM_DECL:
16562 case RESULT_DECL:
16563 *walk_subtrees = 0;
16564 if (error_operand_p (*tp)
16565 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
16566 || DECL_HAS_VALUE_EXPR_P (*tp)
16567 || DECL_THREAD_LOCAL_P (*tp)
16568 || TREE_SIDE_EFFECTS (*tp)
16569 || TREE_THIS_VOLATILE (*tp))
16570 return *tp;
16571 if (is_global_var (*tp)
16572 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
16573 || lookup_attribute ("omp declare target link",
16574 DECL_ATTRIBUTES (*tp))))
16575 return *tp;
16576 if (VAR_P (*tp)
16577 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
16578 && !is_global_var (*tp)
16579 && decl_function_context (*tp) == current_function_decl)
16580 return *tp;
16581 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
16582 (splay_tree_key) *tp);
16583 if (n == NULL)
16585 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
16586 return NULL_TREE;
16587 return *tp;
16589 else if (n->value & GOVD_LOCAL)
16590 return *tp;
16591 else if (n->value & GOVD_FIRSTPRIVATE)
16592 return NULL_TREE;
16593 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
16594 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
16595 return NULL_TREE;
16596 return *tp;
16597 case INTEGER_CST:
16598 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
16599 return *tp;
16600 return NULL_TREE;
16601 case TARGET_EXPR:
16602 if (TARGET_EXPR_INITIAL (*tp)
16603 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
16604 return *tp;
16605 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
16606 walk_subtrees, NULL);
16607 /* Allow some reasonable subset of integral arithmetics. */
16608 case PLUS_EXPR:
16609 case MINUS_EXPR:
16610 case MULT_EXPR:
16611 case TRUNC_DIV_EXPR:
16612 case CEIL_DIV_EXPR:
16613 case FLOOR_DIV_EXPR:
16614 case ROUND_DIV_EXPR:
16615 case TRUNC_MOD_EXPR:
16616 case CEIL_MOD_EXPR:
16617 case FLOOR_MOD_EXPR:
16618 case ROUND_MOD_EXPR:
16619 case RDIV_EXPR:
16620 case EXACT_DIV_EXPR:
16621 case MIN_EXPR:
16622 case MAX_EXPR:
16623 case LSHIFT_EXPR:
16624 case RSHIFT_EXPR:
16625 case BIT_IOR_EXPR:
16626 case BIT_XOR_EXPR:
16627 case BIT_AND_EXPR:
16628 case NEGATE_EXPR:
16629 case ABS_EXPR:
16630 case BIT_NOT_EXPR:
16631 case NON_LVALUE_EXPR:
16632 CASE_CONVERT:
16633 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
16634 return *tp;
16635 return NULL_TREE;
16636 /* And disallow anything else, except for comparisons. */
16637 default:
16638 if (COMPARISON_CLASS_P (*tp))
16639 return NULL_TREE;
16640 return *tp;
16644 /* Try to determine if the num_teams and/or thread_limit expressions
16645 can have their values determined already before entering the
16646 target construct.
16647 INTEGER_CSTs trivially are,
16648 integral decls that are firstprivate (explicitly or implicitly)
16649 or explicitly map(always, to:) or map(always, tofrom:) on the target
16650 region too, and expressions involving simple arithmetics on those
16651 too, function calls are not ok, dereferencing something neither etc.
16652 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
16653 EXPR based on what we find:
16654 0 stands for clause not specified at all, use implementation default
16655 -1 stands for value that can't be determined easily before entering
16656 the target construct.
16657 -2 means that no explicit teams construct was specified
16658 If teams construct is not present at all, use 1 for num_teams
16659 and 0 for thread_limit (only one team is involved, and the thread
16660 limit is implementation defined. */
16662 static void
16663 optimize_target_teams (tree target, gimple_seq *pre_p)
16665 tree body = OMP_BODY (target);
16666 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
16667 tree num_teams_lower = NULL_TREE;
16668 tree num_teams_upper = integer_zero_node;
16669 tree thread_limit = integer_zero_node;
16670 location_t num_teams_loc = EXPR_LOCATION (target);
16671 location_t thread_limit_loc = EXPR_LOCATION (target);
16672 tree c, *p, expr;
16673 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
16675 if (teams == NULL_TREE)
16676 num_teams_upper = build_int_cst (integer_type_node, -2);
16677 else
16678 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
16680 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
16682 p = &num_teams_upper;
16683 num_teams_loc = OMP_CLAUSE_LOCATION (c);
16684 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
16686 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
16687 if (TREE_CODE (expr) == INTEGER_CST)
16688 num_teams_lower = expr;
16689 else if (walk_tree (&expr, computable_teams_clause,
16690 NULL, NULL))
16691 num_teams_lower = integer_minus_one_node;
16692 else
16694 num_teams_lower = expr;
16695 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
16696 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
16697 is_gimple_val, fb_rvalue, false)
16698 == GS_ERROR)
16700 gimplify_omp_ctxp = target_ctx;
16701 num_teams_lower = integer_minus_one_node;
16703 else
16705 gimplify_omp_ctxp = target_ctx;
16706 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
16707 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
16708 = num_teams_lower;
16713 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
16715 p = &thread_limit;
16716 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
16718 else
16719 continue;
16720 expr = OMP_CLAUSE_OPERAND (c, 0);
16721 if (TREE_CODE (expr) == INTEGER_CST)
16723 *p = expr;
16724 continue;
16726 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
16728 *p = integer_minus_one_node;
16729 continue;
16731 *p = expr;
16732 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
16733 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
16734 == GS_ERROR)
16736 gimplify_omp_ctxp = target_ctx;
16737 *p = integer_minus_one_node;
16738 continue;
16740 gimplify_omp_ctxp = target_ctx;
16741 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
16742 OMP_CLAUSE_OPERAND (c, 0) = *p;
16744 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
16746 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
16747 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
16748 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
16749 OMP_TARGET_CLAUSES (target) = c;
16751 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
16752 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
16753 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
16754 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
16755 OMP_TARGET_CLAUSES (target) = c;
16758 /* Gimplify the gross structure of several OMP constructs. */
16760 static void
16761 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
16763 tree expr = *expr_p;
16764 gimple *stmt;
16765 gimple_seq body = NULL;
16766 enum omp_region_type ort;
16768 switch (TREE_CODE (expr))
16770 case OMP_SECTIONS:
16771 case OMP_SINGLE:
16772 ort = ORT_WORKSHARE;
16773 break;
16774 case OMP_SCOPE:
16775 ort = ORT_TASKGROUP;
16776 break;
16777 case OMP_TARGET:
16778 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
16779 break;
16780 case OACC_KERNELS:
16781 ort = ORT_ACC_KERNELS;
16782 break;
16783 case OACC_PARALLEL:
16784 ort = ORT_ACC_PARALLEL;
16785 break;
16786 case OACC_SERIAL:
16787 ort = ORT_ACC_SERIAL;
16788 break;
16789 case OACC_DATA:
16790 ort = ORT_ACC_DATA;
16791 break;
16792 case OMP_TARGET_DATA:
16793 ort = ORT_TARGET_DATA;
16794 break;
16795 case OMP_TEAMS:
16796 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
16797 if (gimplify_omp_ctxp == NULL
16798 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
16799 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
16800 break;
16801 case OACC_HOST_DATA:
16802 ort = ORT_ACC_HOST_DATA;
16803 break;
16804 default:
16805 gcc_unreachable ();
16808 bool save_in_omp_construct = in_omp_construct;
16809 if ((ort & ORT_ACC) == 0)
16810 in_omp_construct = false;
16811 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
16812 TREE_CODE (expr));
16813 if (TREE_CODE (expr) == OMP_TARGET)
16814 optimize_target_teams (expr, pre_p);
16815 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
16816 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
16818 push_gimplify_context ();
16819 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
16820 if (gimple_code (g) == GIMPLE_BIND)
16821 pop_gimplify_context (g);
16822 else
16823 pop_gimplify_context (NULL);
16824 if ((ort & ORT_TARGET_DATA) != 0)
16826 enum built_in_function end_ix;
16827 switch (TREE_CODE (expr))
16829 case OACC_DATA:
16830 case OACC_HOST_DATA:
16831 end_ix = BUILT_IN_GOACC_DATA_END;
16832 break;
16833 case OMP_TARGET_DATA:
16834 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
16835 break;
16836 default:
16837 gcc_unreachable ();
16839 tree fn = builtin_decl_explicit (end_ix);
16840 g = gimple_build_call (fn, 0);
16841 gimple_seq cleanup = NULL;
16842 gimple_seq_add_stmt (&cleanup, g);
16843 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
16844 body = NULL;
16845 gimple_seq_add_stmt (&body, g);
16848 else
16849 gimplify_and_add (OMP_BODY (expr), &body);
16850 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
16851 TREE_CODE (expr));
16852 in_omp_construct = save_in_omp_construct;
16854 switch (TREE_CODE (expr))
16856 case OACC_DATA:
16857 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
16858 OMP_CLAUSES (expr));
16859 break;
16860 case OACC_HOST_DATA:
16861 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
16863 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
16865 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
16868 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
16869 OMP_CLAUSES (expr));
16870 break;
16871 case OACC_KERNELS:
16872 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
16873 OMP_CLAUSES (expr));
16874 break;
16875 case OACC_PARALLEL:
16876 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
16877 OMP_CLAUSES (expr));
16878 break;
16879 case OACC_SERIAL:
16880 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
16881 OMP_CLAUSES (expr));
16882 break;
16883 case OMP_SECTIONS:
16884 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
16885 break;
16886 case OMP_SINGLE:
16887 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
16888 break;
16889 case OMP_SCOPE:
16890 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
16891 break;
16892 case OMP_TARGET:
16893 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
16894 OMP_CLAUSES (expr));
16895 break;
16896 case OMP_TARGET_DATA:
16897 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
16898 to be evaluated before the use_device_{ptr,addr} clauses if they
16899 refer to the same variables. */
16901 tree use_device_clauses;
16902 tree *pc, *uc = &use_device_clauses;
16903 for (pc = &OMP_CLAUSES (expr); *pc; )
16904 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
16905 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
16907 *uc = *pc;
16908 *pc = OMP_CLAUSE_CHAIN (*pc);
16909 uc = &OMP_CLAUSE_CHAIN (*uc);
16911 else
16912 pc = &OMP_CLAUSE_CHAIN (*pc);
16913 *uc = NULL_TREE;
16914 *pc = use_device_clauses;
16915 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
16916 OMP_CLAUSES (expr));
16918 break;
16919 case OMP_TEAMS:
16920 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
16921 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
16922 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
16923 break;
16924 default:
16925 gcc_unreachable ();
16928 gimplify_seq_add_stmt (pre_p, stmt);
16929 *expr_p = NULL_TREE;
16932 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
16933 target update constructs. */
16935 static void
16936 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
16938 tree expr = *expr_p;
16939 int kind;
16940 gomp_target *stmt;
16941 enum omp_region_type ort = ORT_WORKSHARE;
16943 switch (TREE_CODE (expr))
16945 case OACC_ENTER_DATA:
16946 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
16947 ort = ORT_ACC;
16948 break;
16949 case OACC_EXIT_DATA:
16950 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
16951 ort = ORT_ACC;
16952 break;
16953 case OACC_UPDATE:
16954 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
16955 ort = ORT_ACC;
16956 break;
16957 case OMP_TARGET_UPDATE:
16958 kind = GF_OMP_TARGET_KIND_UPDATE;
16959 break;
16960 case OMP_TARGET_ENTER_DATA:
16961 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
16962 break;
16963 case OMP_TARGET_EXIT_DATA:
16964 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
16965 break;
16966 default:
16967 gcc_unreachable ();
16969 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
16970 ort, TREE_CODE (expr));
16971 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
16972 TREE_CODE (expr));
16973 if (TREE_CODE (expr) == OACC_UPDATE
16974 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
16975 OMP_CLAUSE_IF_PRESENT))
16977 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
16978 clause. */
16979 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16980 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
16981 switch (OMP_CLAUSE_MAP_KIND (c))
16983 case GOMP_MAP_FORCE_TO:
16984 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
16985 break;
16986 case GOMP_MAP_FORCE_FROM:
16987 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
16988 break;
16989 default:
16990 break;
16993 else if (TREE_CODE (expr) == OACC_EXIT_DATA
16994 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
16995 OMP_CLAUSE_FINALIZE))
16997 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
16998 semantics. */
16999 bool have_clause = false;
17000 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17001 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
17002 switch (OMP_CLAUSE_MAP_KIND (c))
17004 case GOMP_MAP_FROM:
17005 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
17006 have_clause = true;
17007 break;
17008 case GOMP_MAP_RELEASE:
17009 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
17010 have_clause = true;
17011 break;
17012 case GOMP_MAP_TO_PSET:
17013 /* Fortran arrays with descriptors must map that descriptor when
17014 doing standalone "attach" operations (in OpenACC). In that
17015 case GOMP_MAP_TO_PSET appears by itself with no preceding
17016 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
17017 break;
17018 case GOMP_MAP_POINTER:
17019 /* TODO PR92929: we may see these here, but they'll always follow
17020 one of the clauses above, and will be handled by libgomp as
17021 one group, so no handling required here. */
17022 gcc_assert (have_clause);
17023 break;
17024 case GOMP_MAP_DETACH:
17025 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
17026 have_clause = false;
17027 break;
17028 case GOMP_MAP_STRUCT:
17029 case GOMP_MAP_STRUCT_UNORD:
17030 have_clause = false;
17031 break;
17032 default:
17033 gcc_unreachable ();
17036 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
17038 gimplify_seq_add_stmt (pre_p, stmt);
17039 *expr_p = NULL_TREE;
17042 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
17043 stabilized the lhs of the atomic operation as *ADDR. Return true if
17044 EXPR is this stabilized form. */
17046 static bool
17047 goa_lhs_expr_p (tree expr, tree addr)
17049 /* Also include casts to other type variants. The C front end is fond
17050 of adding these for e.g. volatile variables. This is like
17051 STRIP_TYPE_NOPS but includes the main variant lookup. */
17052 STRIP_USELESS_TYPE_CONVERSION (expr);
17054 if (INDIRECT_REF_P (expr))
17056 expr = TREE_OPERAND (expr, 0);
17057 while (expr != addr
17058 && (CONVERT_EXPR_P (expr)
17059 || TREE_CODE (expr) == NON_LVALUE_EXPR)
17060 && TREE_CODE (expr) == TREE_CODE (addr)
17061 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
17063 expr = TREE_OPERAND (expr, 0);
17064 addr = TREE_OPERAND (addr, 0);
17066 if (expr == addr)
17067 return true;
17068 return (TREE_CODE (addr) == ADDR_EXPR
17069 && TREE_CODE (expr) == ADDR_EXPR
17070 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
17072 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
17073 return true;
17074 return false;
17077 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
17078 expression does not involve the lhs, evaluate it into a temporary.
17079 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
17080 or -1 if an error was encountered. */
17082 static int
17083 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
17084 tree lhs_var, tree &target_expr, bool rhs, int depth)
17086 tree expr = *expr_p;
17087 int saw_lhs = 0;
17089 if (goa_lhs_expr_p (expr, lhs_addr))
17091 if (pre_p)
17092 *expr_p = lhs_var;
17093 return 1;
17095 if (is_gimple_val (expr))
17096 return 0;
17098 /* Maximum depth of lhs in expression is for the
17099 __builtin_clear_padding (...), __builtin_clear_padding (...),
17100 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
17101 if (++depth > 7)
17102 goto finish;
17104 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
17106 case tcc_binary:
17107 case tcc_comparison:
17108 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
17109 lhs_var, target_expr, true, depth);
17110 /* FALLTHRU */
17111 case tcc_unary:
17112 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
17113 lhs_var, target_expr, true, depth);
17114 break;
17115 case tcc_expression:
17116 switch (TREE_CODE (expr))
17118 case TRUTH_ANDIF_EXPR:
17119 case TRUTH_ORIF_EXPR:
17120 case TRUTH_AND_EXPR:
17121 case TRUTH_OR_EXPR:
17122 case TRUTH_XOR_EXPR:
17123 case BIT_INSERT_EXPR:
17124 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
17125 lhs_addr, lhs_var, target_expr, true,
17126 depth);
17127 /* FALLTHRU */
17128 case TRUTH_NOT_EXPR:
17129 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17130 lhs_addr, lhs_var, target_expr, true,
17131 depth);
17132 break;
17133 case MODIFY_EXPR:
17134 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
17135 target_expr, true, depth))
17136 break;
17137 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
17138 lhs_addr, lhs_var, target_expr, true,
17139 depth);
17140 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17141 lhs_addr, lhs_var, target_expr, false,
17142 depth);
17143 break;
17144 /* FALLTHRU */
17145 case ADDR_EXPR:
17146 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
17147 target_expr, true, depth))
17148 break;
17149 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17150 lhs_addr, lhs_var, target_expr, false,
17151 depth);
17152 break;
17153 case COMPOUND_EXPR:
17154 /* Break out any preevaluations from cp_build_modify_expr. */
17155 for (; TREE_CODE (expr) == COMPOUND_EXPR;
17156 expr = TREE_OPERAND (expr, 1))
17158 /* Special-case __builtin_clear_padding call before
17159 __builtin_memcmp. */
17160 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
17162 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
17163 if (fndecl
17164 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
17165 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
17166 && (!pre_p
17167 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
17168 lhs_addr, lhs_var,
17169 target_expr, true, depth)))
17171 if (pre_p)
17172 *expr_p = expr;
17173 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
17174 pre_p, lhs_addr, lhs_var,
17175 target_expr, true, depth);
17176 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
17177 pre_p, lhs_addr, lhs_var,
17178 target_expr, rhs, depth);
17179 return saw_lhs;
17183 if (pre_p)
17184 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
17186 if (!pre_p)
17187 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
17188 target_expr, rhs, depth);
17189 *expr_p = expr;
17190 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
17191 target_expr, rhs, depth);
17192 case COND_EXPR:
17193 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
17194 lhs_var, target_expr, true, depth))
17195 break;
17196 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17197 lhs_addr, lhs_var, target_expr, true,
17198 depth);
17199 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
17200 lhs_addr, lhs_var, target_expr, true,
17201 depth);
17202 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
17203 lhs_addr, lhs_var, target_expr, true,
17204 depth);
17205 break;
17206 case TARGET_EXPR:
17207 if (TARGET_EXPR_INITIAL (expr))
17209 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
17210 lhs_var, target_expr, true,
17211 depth))
17212 break;
17213 if (expr == target_expr)
17214 saw_lhs = 1;
17215 else
17217 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
17218 pre_p, lhs_addr, lhs_var,
17219 target_expr, true, depth);
17220 if (saw_lhs && target_expr == NULL_TREE && pre_p)
17221 target_expr = expr;
17224 break;
17225 default:
17226 break;
17228 break;
17229 case tcc_reference:
17230 if (TREE_CODE (expr) == BIT_FIELD_REF
17231 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
17232 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17233 lhs_addr, lhs_var, target_expr, true,
17234 depth);
17235 break;
17236 case tcc_vl_exp:
17237 if (TREE_CODE (expr) == CALL_EXPR)
17239 if (tree fndecl = get_callee_fndecl (expr))
17240 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
17241 BUILT_IN_MEMCMP))
17243 int nargs = call_expr_nargs (expr);
17244 for (int i = 0; i < nargs; i++)
17245 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
17246 pre_p, lhs_addr, lhs_var,
17247 target_expr, true, depth);
17250 break;
17251 default:
17252 break;
17255 finish:
17256 if (saw_lhs == 0 && pre_p)
17258 enum gimplify_status gs;
17259 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
17261 gimplify_stmt (&expr, pre_p);
17262 return saw_lhs;
17264 else if (rhs)
17265 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
17266 else
17267 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
17268 if (gs != GS_ALL_DONE)
17269 saw_lhs = -1;
17272 return saw_lhs;
17275 /* Gimplify an OMP_ATOMIC statement. */
17277 static enum gimplify_status
17278 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
17280 tree addr = TREE_OPERAND (*expr_p, 0);
17281 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
17282 ? NULL : TREE_OPERAND (*expr_p, 1);
17283 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
17284 tree tmp_load;
17285 gomp_atomic_load *loadstmt;
17286 gomp_atomic_store *storestmt;
17287 tree target_expr = NULL_TREE;
17289 tmp_load = create_tmp_reg (type);
17290 if (rhs
17291 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
17292 true, 0) < 0)
17293 return GS_ERROR;
17295 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
17296 != GS_ALL_DONE)
17297 return GS_ERROR;
17299 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
17300 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
17301 gimplify_seq_add_stmt (pre_p, loadstmt);
17302 if (rhs)
17304 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
17305 representatives. Use BIT_FIELD_REF on the lhs instead. */
17306 tree rhsarg = rhs;
17307 if (TREE_CODE (rhs) == COND_EXPR)
17308 rhsarg = TREE_OPERAND (rhs, 1);
17309 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
17310 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
17312 tree bitpos = TREE_OPERAND (rhsarg, 2);
17313 tree op1 = TREE_OPERAND (rhsarg, 1);
17314 tree bitsize;
17315 tree tmp_store = tmp_load;
17316 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
17317 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
17318 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
17319 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
17320 else
17321 bitsize = TYPE_SIZE (TREE_TYPE (op1));
17322 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
17323 tree t = build2_loc (EXPR_LOCATION (rhsarg),
17324 MODIFY_EXPR, void_type_node,
17325 build3_loc (EXPR_LOCATION (rhsarg),
17326 BIT_FIELD_REF, TREE_TYPE (op1),
17327 tmp_store, bitsize, bitpos), op1);
17328 if (TREE_CODE (rhs) == COND_EXPR)
17329 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
17330 TREE_OPERAND (rhs, 0), t, void_node);
17331 gimplify_and_add (t, pre_p);
17332 rhs = tmp_store;
17334 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
17335 if (TREE_CODE (rhs) == COND_EXPR)
17336 gimplify_ctxp->allow_rhs_cond_expr = true;
17337 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
17338 is_gimple_val, fb_rvalue);
17339 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
17340 if (gs != GS_ALL_DONE)
17341 return GS_ERROR;
17344 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
17345 rhs = tmp_load;
17346 storestmt
17347 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
17348 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
17350 gimple_omp_atomic_set_weak (loadstmt);
17351 gimple_omp_atomic_set_weak (storestmt);
17353 gimplify_seq_add_stmt (pre_p, storestmt);
17354 switch (TREE_CODE (*expr_p))
17356 case OMP_ATOMIC_READ:
17357 case OMP_ATOMIC_CAPTURE_OLD:
17358 *expr_p = tmp_load;
17359 gimple_omp_atomic_set_need_value (loadstmt);
17360 break;
17361 case OMP_ATOMIC_CAPTURE_NEW:
17362 *expr_p = rhs;
17363 gimple_omp_atomic_set_need_value (storestmt);
17364 break;
17365 default:
17366 *expr_p = NULL;
17367 break;
17370 return GS_ALL_DONE;
17373 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
17374 body, and adding some EH bits. */
17376 static enum gimplify_status
17377 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
17379 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
17380 gimple *body_stmt;
17381 gtransaction *trans_stmt;
17382 gimple_seq body = NULL;
17383 int subcode = 0;
17385 /* Wrap the transaction body in a BIND_EXPR so we have a context
17386 where to put decls for OMP. */
17387 if (TREE_CODE (tbody) != BIND_EXPR)
17389 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
17390 TREE_SIDE_EFFECTS (bind) = 1;
17391 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
17392 TRANSACTION_EXPR_BODY (expr) = bind;
17395 push_gimplify_context ();
17396 temp = voidify_wrapper_expr (*expr_p, NULL);
17398 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
17399 pop_gimplify_context (body_stmt);
17401 trans_stmt = gimple_build_transaction (body);
17402 if (TRANSACTION_EXPR_OUTER (expr))
17403 subcode = GTMA_IS_OUTER;
17404 else if (TRANSACTION_EXPR_RELAXED (expr))
17405 subcode = GTMA_IS_RELAXED;
17406 gimple_transaction_set_subcode (trans_stmt, subcode);
17408 gimplify_seq_add_stmt (pre_p, trans_stmt);
17410 if (temp)
17412 *expr_p = temp;
17413 return GS_OK;
17416 *expr_p = NULL_TREE;
17417 return GS_ALL_DONE;
17420 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
17421 is the OMP_BODY of the original EXPR (which has already been
17422 gimplified so it's not present in the EXPR).
17424 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
17426 static gimple *
17427 gimplify_omp_ordered (tree expr, gimple_seq body)
17429 tree c, decls;
17430 int failures = 0;
17431 unsigned int i;
17432 tree source_c = NULL_TREE;
17433 tree sink_c = NULL_TREE;
17435 if (gimplify_omp_ctxp)
17437 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17438 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17439 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
17441 error_at (OMP_CLAUSE_LOCATION (c),
17442 "%<ordered%> construct with %qs clause must be "
17443 "closely nested inside a loop with %<ordered%> clause",
17444 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
17445 failures++;
17447 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17448 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
17450 bool fail = false;
17451 sink_c = c;
17452 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
17453 continue; /* omp_cur_iteration - 1 */
17454 for (decls = OMP_CLAUSE_DECL (c), i = 0;
17455 decls && TREE_CODE (decls) == TREE_LIST;
17456 decls = TREE_CHAIN (decls), ++i)
17457 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
17458 continue;
17459 else if (TREE_VALUE (decls)
17460 != gimplify_omp_ctxp->loop_iter_var[2 * i])
17462 error_at (OMP_CLAUSE_LOCATION (c),
17463 "variable %qE is not an iteration "
17464 "of outermost loop %d, expected %qE",
17465 TREE_VALUE (decls), i + 1,
17466 gimplify_omp_ctxp->loop_iter_var[2 * i]);
17467 fail = true;
17468 failures++;
17470 else
17471 TREE_VALUE (decls)
17472 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
17473 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
17475 error_at (OMP_CLAUSE_LOCATION (c),
17476 "number of variables in %qs clause with "
17477 "%<sink%> modifier does not match number of "
17478 "iteration variables",
17479 OMP_CLAUSE_DOACROSS_DEPEND (c)
17480 ? "depend" : "doacross");
17481 failures++;
17484 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17485 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
17487 if (source_c)
17489 error_at (OMP_CLAUSE_LOCATION (c),
17490 "more than one %qs clause with %<source%> "
17491 "modifier on an %<ordered%> construct",
17492 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
17493 ? "depend" : "doacross");
17494 failures++;
17496 else
17497 source_c = c;
17500 if (source_c && sink_c)
17502 error_at (OMP_CLAUSE_LOCATION (source_c),
17503 "%qs clause with %<source%> modifier specified "
17504 "together with %qs clauses with %<sink%> modifier "
17505 "on the same construct",
17506 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
17507 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
17508 failures++;
17511 if (failures)
17512 return gimple_build_nop ();
17513 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
17516 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
17517 expression produces a value to be used as an operand inside a GIMPLE
17518 statement, the value will be stored back in *EXPR_P. This value will
17519 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
17520 an SSA_NAME. The corresponding sequence of GIMPLE statements is
17521 emitted in PRE_P and POST_P.
17523 Additionally, this process may overwrite parts of the input
17524 expression during gimplification. Ideally, it should be
17525 possible to do non-destructive gimplification.
17527 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
17528 the expression needs to evaluate to a value to be used as
17529 an operand in a GIMPLE statement, this value will be stored in
17530 *EXPR_P on exit. This happens when the caller specifies one
17531 of fb_lvalue or fb_rvalue fallback flags.
17533 PRE_P will contain the sequence of GIMPLE statements corresponding
17534 to the evaluation of EXPR and all the side-effects that must
17535 be executed before the main expression. On exit, the last
17536 statement of PRE_P is the core statement being gimplified. For
17537 instance, when gimplifying 'if (++a)' the last statement in
17538 PRE_P will be 'if (t.1)' where t.1 is the result of
17539 pre-incrementing 'a'.
17541 POST_P will contain the sequence of GIMPLE statements corresponding
17542 to the evaluation of all the side-effects that must be executed
17543 after the main expression. If this is NULL, the post
17544 side-effects are stored at the end of PRE_P.
17546 The reason why the output is split in two is to handle post
17547 side-effects explicitly. In some cases, an expression may have
17548 inner and outer post side-effects which need to be emitted in
17549 an order different from the one given by the recursive
17550 traversal. For instance, for the expression (*p--)++ the post
17551 side-effects of '--' must actually occur *after* the post
17552 side-effects of '++'. However, gimplification will first visit
17553 the inner expression, so if a separate POST sequence was not
17554 used, the resulting sequence would be:
17556 1 t.1 = *p
17557 2 p = p - 1
17558 3 t.2 = t.1 + 1
17559 4 *p = t.2
17561 However, the post-decrement operation in line #2 must not be
17562 evaluated until after the store to *p at line #4, so the
17563 correct sequence should be:
17565 1 t.1 = *p
17566 2 t.2 = t.1 + 1
17567 3 *p = t.2
17568 4 p = p - 1
17570 So, by specifying a separate post queue, it is possible
17571 to emit the post side-effects in the correct order.
17572 If POST_P is NULL, an internal queue will be used. Before
17573 returning to the caller, the sequence POST_P is appended to
17574 the main output sequence PRE_P.
17576 GIMPLE_TEST_F points to a function that takes a tree T and
17577 returns nonzero if T is in the GIMPLE form requested by the
17578 caller. The GIMPLE predicates are in gimple.cc.
17580 FALLBACK tells the function what sort of a temporary we want if
17581 gimplification cannot produce an expression that complies with
17582 GIMPLE_TEST_F.
17584 fb_none means that no temporary should be generated
17585 fb_rvalue means that an rvalue is OK to generate
17586 fb_lvalue means that an lvalue is OK to generate
17587 fb_either means that either is OK, but an lvalue is preferable.
17588 fb_mayfail means that gimplification may fail (in which case
17589 GS_ERROR will be returned)
17591 The return value is either GS_ERROR or GS_ALL_DONE, since this
17592 function iterates until EXPR is completely gimplified or an error
17593 occurs. */
17595 enum gimplify_status
17596 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17597 bool (*gimple_test_f) (tree), fallback_t fallback)
17599 tree tmp;
17600 gimple_seq internal_pre = NULL;
17601 gimple_seq internal_post = NULL;
17602 tree save_expr;
17603 bool is_statement;
17604 location_t saved_location;
17605 enum gimplify_status ret;
17606 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
17607 tree label;
17609 save_expr = *expr_p;
17610 if (save_expr == NULL_TREE)
17611 return GS_ALL_DONE;
17613 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
17614 is_statement = gimple_test_f == is_gimple_stmt;
17615 if (is_statement)
17616 gcc_assert (pre_p);
17618 /* Consistency checks. */
17619 if (gimple_test_f == is_gimple_reg)
17620 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
17621 else if (gimple_test_f == is_gimple_val
17622 || gimple_test_f == is_gimple_call_addr
17623 || gimple_test_f == is_gimple_condexpr_for_cond
17624 || gimple_test_f == is_gimple_mem_rhs
17625 || gimple_test_f == is_gimple_mem_rhs_or_call
17626 || gimple_test_f == is_gimple_reg_rhs
17627 || gimple_test_f == is_gimple_reg_rhs_or_call
17628 || gimple_test_f == is_gimple_asm_val
17629 || gimple_test_f == is_gimple_mem_ref_addr)
17630 gcc_assert (fallback & fb_rvalue);
17631 else if (gimple_test_f == is_gimple_min_lval
17632 || gimple_test_f == is_gimple_lvalue)
17633 gcc_assert (fallback & fb_lvalue);
17634 else if (gimple_test_f == is_gimple_addressable)
17635 gcc_assert (fallback & fb_either);
17636 else if (gimple_test_f == is_gimple_stmt)
17637 gcc_assert (fallback == fb_none);
17638 else
17640 /* We should have recognized the GIMPLE_TEST_F predicate to
17641 know what kind of fallback to use in case a temporary is
17642 needed to hold the value or address of *EXPR_P. */
17643 gcc_unreachable ();
17646 /* We used to check the predicate here and return immediately if it
17647 succeeds. This is wrong; the design is for gimplification to be
17648 idempotent, and for the predicates to only test for valid forms, not
17649 whether they are fully simplified. */
17650 if (pre_p == NULL)
17651 pre_p = &internal_pre;
17653 if (post_p == NULL)
17654 post_p = &internal_post;
17656 /* Remember the last statements added to PRE_P and POST_P. Every
17657 new statement added by the gimplification helpers needs to be
17658 annotated with location information. To centralize the
17659 responsibility, we remember the last statement that had been
17660 added to both queues before gimplifying *EXPR_P. If
17661 gimplification produces new statements in PRE_P and POST_P, those
17662 statements will be annotated with the same location information
17663 as *EXPR_P. */
17664 pre_last_gsi = gsi_last (*pre_p);
17665 post_last_gsi = gsi_last (*post_p);
17667 saved_location = input_location;
17668 if (save_expr != error_mark_node
17669 && EXPR_HAS_LOCATION (*expr_p))
17670 input_location = EXPR_LOCATION (*expr_p);
17672 /* Loop over the specific gimplifiers until the toplevel node
17673 remains the same. */
17676 /* Strip away as many useless type conversions as possible
17677 at the toplevel. */
17678 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
17680 /* Remember the expr. */
17681 save_expr = *expr_p;
17683 /* Die, die, die, my darling. */
17684 if (error_operand_p (save_expr))
17686 ret = GS_ERROR;
17687 break;
17690 /* Do any language-specific gimplification. */
17691 ret = ((enum gimplify_status)
17692 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
17693 if (ret == GS_OK)
17695 if (*expr_p == NULL_TREE)
17696 break;
17697 if (*expr_p != save_expr)
17698 continue;
17700 else if (ret != GS_UNHANDLED)
17701 break;
17703 /* Make sure that all the cases set 'ret' appropriately. */
17704 ret = GS_UNHANDLED;
17705 switch (TREE_CODE (*expr_p))
17707 /* First deal with the special cases. */
17709 case POSTINCREMENT_EXPR:
17710 case POSTDECREMENT_EXPR:
17711 case PREINCREMENT_EXPR:
17712 case PREDECREMENT_EXPR:
17713 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
17714 fallback != fb_none,
17715 TREE_TYPE (*expr_p));
17716 break;
17718 case VIEW_CONVERT_EXPR:
17719 if ((fallback & fb_rvalue)
17720 && is_gimple_reg_type (TREE_TYPE (*expr_p))
17721 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
17723 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17724 post_p, is_gimple_val, fb_rvalue);
17725 recalculate_side_effects (*expr_p);
17726 break;
17728 /* Fallthru. */
17730 case ARRAY_REF:
17731 case ARRAY_RANGE_REF:
17732 case REALPART_EXPR:
17733 case IMAGPART_EXPR:
17734 case COMPONENT_REF:
17735 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
17736 fallback ? fallback : fb_rvalue);
17737 break;
17739 case COND_EXPR:
17740 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
17742 /* C99 code may assign to an array in a structure value of a
17743 conditional expression, and this has undefined behavior
17744 only on execution, so create a temporary if an lvalue is
17745 required. */
17746 if (fallback == fb_lvalue)
17748 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
17749 mark_addressable (*expr_p);
17750 ret = GS_OK;
17752 break;
17754 case CALL_EXPR:
17755 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
17757 /* C99 code may assign to an array in a structure returned
17758 from a function, and this has undefined behavior only on
17759 execution, so create a temporary if an lvalue is
17760 required. */
17761 if (fallback == fb_lvalue)
17763 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
17764 mark_addressable (*expr_p);
17765 ret = GS_OK;
17767 break;
17769 case TREE_LIST:
17770 gcc_unreachable ();
17772 case OMP_ARRAY_SECTION:
17773 gcc_unreachable ();
17775 case COMPOUND_EXPR:
17776 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
17777 break;
17779 case COMPOUND_LITERAL_EXPR:
17780 ret = gimplify_compound_literal_expr (expr_p, pre_p,
17781 gimple_test_f, fallback);
17782 break;
17784 case MODIFY_EXPR:
17785 case INIT_EXPR:
17786 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
17787 fallback != fb_none);
17788 break;
17790 case TRUTH_ANDIF_EXPR:
17791 case TRUTH_ORIF_EXPR:
17793 /* Preserve the original type of the expression and the
17794 source location of the outer expression. */
17795 tree org_type = TREE_TYPE (*expr_p);
17796 *expr_p = gimple_boolify (*expr_p);
17797 *expr_p = build3_loc (input_location, COND_EXPR,
17798 org_type, *expr_p,
17799 fold_convert_loc
17800 (input_location,
17801 org_type, boolean_true_node),
17802 fold_convert_loc
17803 (input_location,
17804 org_type, boolean_false_node));
17805 ret = GS_OK;
17806 break;
17809 case TRUTH_NOT_EXPR:
17811 tree type = TREE_TYPE (*expr_p);
17812 /* The parsers are careful to generate TRUTH_NOT_EXPR
17813 only with operands that are always zero or one.
17814 We do not fold here but handle the only interesting case
17815 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
17816 *expr_p = gimple_boolify (*expr_p);
17817 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
17818 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
17819 TREE_TYPE (*expr_p),
17820 TREE_OPERAND (*expr_p, 0));
17821 else
17822 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
17823 TREE_TYPE (*expr_p),
17824 TREE_OPERAND (*expr_p, 0),
17825 build_int_cst (TREE_TYPE (*expr_p), 1));
17826 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
17827 *expr_p = fold_convert_loc (input_location, type, *expr_p);
17828 ret = GS_OK;
17829 break;
17832 case ADDR_EXPR:
17833 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
17834 break;
17836 case ANNOTATE_EXPR:
17838 tree cond = TREE_OPERAND (*expr_p, 0);
17839 tree kind = TREE_OPERAND (*expr_p, 1);
17840 tree data = TREE_OPERAND (*expr_p, 2);
17841 tree type = TREE_TYPE (cond);
17842 if (!INTEGRAL_TYPE_P (type))
17844 *expr_p = cond;
17845 ret = GS_OK;
17846 break;
17848 tree tmp = create_tmp_var (type);
17849 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
17850 gcall *call
17851 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
17852 gimple_call_set_lhs (call, tmp);
17853 gimplify_seq_add_stmt (pre_p, call);
17854 *expr_p = tmp;
17855 ret = GS_ALL_DONE;
17856 break;
17859 case VA_ARG_EXPR:
17860 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
17861 break;
17863 CASE_CONVERT:
17864 if (IS_EMPTY_STMT (*expr_p))
17866 ret = GS_ALL_DONE;
17867 break;
17870 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
17871 || fallback == fb_none)
17873 /* Just strip a conversion to void (or in void context) and
17874 try again. */
17875 *expr_p = TREE_OPERAND (*expr_p, 0);
17876 ret = GS_OK;
17877 break;
17880 ret = gimplify_conversion (expr_p);
17881 if (ret == GS_ERROR)
17882 break;
17883 if (*expr_p != save_expr)
17884 break;
17885 /* FALLTHRU */
17887 case FIX_TRUNC_EXPR:
17888 /* unary_expr: ... | '(' cast ')' val | ... */
17889 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17890 is_gimple_val, fb_rvalue);
17891 recalculate_side_effects (*expr_p);
17892 break;
17894 case INDIRECT_REF:
17896 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
17897 bool notrap = TREE_THIS_NOTRAP (*expr_p);
17898 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
17900 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
17901 if (*expr_p != save_expr)
17903 ret = GS_OK;
17904 break;
17907 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17908 is_gimple_reg, fb_rvalue);
17909 if (ret == GS_ERROR)
17910 break;
17912 recalculate_side_effects (*expr_p);
17913 *expr_p = fold_build2_loc (input_location, MEM_REF,
17914 TREE_TYPE (*expr_p),
17915 TREE_OPERAND (*expr_p, 0),
17916 build_int_cst (saved_ptr_type, 0));
17917 TREE_THIS_VOLATILE (*expr_p) = volatilep;
17918 TREE_THIS_NOTRAP (*expr_p) = notrap;
17919 ret = GS_OK;
17920 break;
17923 /* We arrive here through the various re-gimplifcation paths. */
17924 case MEM_REF:
17925 /* First try re-folding the whole thing. */
17926 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
17927 TREE_OPERAND (*expr_p, 0),
17928 TREE_OPERAND (*expr_p, 1));
17929 if (tmp)
17931 REF_REVERSE_STORAGE_ORDER (tmp)
17932 = REF_REVERSE_STORAGE_ORDER (*expr_p);
17933 *expr_p = tmp;
17934 recalculate_side_effects (*expr_p);
17935 ret = GS_OK;
17936 break;
17938 /* Avoid re-gimplifying the address operand if it is already
17939 in suitable form. Re-gimplifying would mark the address
17940 operand addressable. Always gimplify when not in SSA form
17941 as we still may have to gimplify decls with value-exprs. */
17942 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
17943 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
17945 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17946 is_gimple_mem_ref_addr, fb_rvalue);
17947 if (ret == GS_ERROR)
17948 break;
17950 recalculate_side_effects (*expr_p);
17951 ret = GS_ALL_DONE;
17952 break;
17954 /* Constants need not be gimplified. */
17955 case INTEGER_CST:
17956 case REAL_CST:
17957 case FIXED_CST:
17958 case STRING_CST:
17959 case COMPLEX_CST:
17960 case VECTOR_CST:
17961 /* Drop the overflow flag on constants, we do not want
17962 that in the GIMPLE IL. */
17963 if (TREE_OVERFLOW_P (*expr_p))
17964 *expr_p = drop_tree_overflow (*expr_p);
17965 ret = GS_ALL_DONE;
17966 break;
17968 case CONST_DECL:
17969 /* If we require an lvalue, such as for ADDR_EXPR, retain the
17970 CONST_DECL node. Otherwise the decl is replaceable by its
17971 value. */
17972 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
17973 if (fallback & fb_lvalue)
17974 ret = GS_ALL_DONE;
17975 else
17977 *expr_p = DECL_INITIAL (*expr_p);
17978 ret = GS_OK;
17980 break;
17982 case DECL_EXPR:
17983 ret = gimplify_decl_expr (expr_p, pre_p);
17984 break;
17986 case BIND_EXPR:
17987 ret = gimplify_bind_expr (expr_p, pre_p);
17988 break;
17990 case LOOP_EXPR:
17991 ret = gimplify_loop_expr (expr_p, pre_p);
17992 break;
17994 case SWITCH_EXPR:
17995 ret = gimplify_switch_expr (expr_p, pre_p);
17996 break;
17998 case EXIT_EXPR:
17999 ret = gimplify_exit_expr (expr_p);
18000 break;
18002 case GOTO_EXPR:
18003 /* If the target is not LABEL, then it is a computed jump
18004 and the target needs to be gimplified. */
18005 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
18007 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
18008 NULL, is_gimple_val, fb_rvalue);
18009 if (ret == GS_ERROR)
18010 break;
18012 gimplify_seq_add_stmt (pre_p,
18013 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
18014 ret = GS_ALL_DONE;
18015 break;
18017 case PREDICT_EXPR:
18018 gimplify_seq_add_stmt (pre_p,
18019 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
18020 PREDICT_EXPR_OUTCOME (*expr_p)));
18021 ret = GS_ALL_DONE;
18022 break;
18024 case LABEL_EXPR:
18025 ret = gimplify_label_expr (expr_p, pre_p);
18026 label = LABEL_EXPR_LABEL (*expr_p);
18027 gcc_assert (decl_function_context (label) == current_function_decl);
18029 /* If the label is used in a goto statement, or address of the label
18030 is taken, we need to unpoison all variables that were seen so far.
18031 Doing so would prevent us from reporting a false positives. */
18032 if (asan_poisoned_variables
18033 && asan_used_labels != NULL
18034 && asan_used_labels->contains (label)
18035 && !gimplify_omp_ctxp)
18036 asan_poison_variables (asan_poisoned_variables, false, pre_p);
18037 break;
18039 case CASE_LABEL_EXPR:
18040 ret = gimplify_case_label_expr (expr_p, pre_p);
18042 if (gimplify_ctxp->live_switch_vars)
18043 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
18044 pre_p);
18045 break;
18047 case RETURN_EXPR:
18048 ret = gimplify_return_expr (*expr_p, pre_p);
18049 break;
18051 case CONSTRUCTOR:
18052 /* Don't reduce this in place; let gimplify_init_constructor work its
18053 magic. Buf if we're just elaborating this for side effects, just
18054 gimplify any element that has side-effects. */
18055 if (fallback == fb_none)
18057 unsigned HOST_WIDE_INT ix;
18058 tree val;
18059 tree temp = NULL_TREE;
18060 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
18061 if (TREE_SIDE_EFFECTS (val))
18062 append_to_statement_list (val, &temp);
18064 *expr_p = temp;
18065 ret = temp ? GS_OK : GS_ALL_DONE;
18067 /* C99 code may assign to an array in a constructed
18068 structure or union, and this has undefined behavior only
18069 on execution, so create a temporary if an lvalue is
18070 required. */
18071 else if (fallback == fb_lvalue)
18073 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
18074 mark_addressable (*expr_p);
18075 ret = GS_OK;
18077 else
18078 ret = GS_ALL_DONE;
18079 break;
18081 /* The following are special cases that are not handled by the
18082 original GIMPLE grammar. */
18084 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
18085 eliminated. */
18086 case SAVE_EXPR:
18087 ret = gimplify_save_expr (expr_p, pre_p, post_p);
18088 break;
18090 case BIT_FIELD_REF:
18091 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18092 post_p, is_gimple_lvalue, fb_either);
18093 recalculate_side_effects (*expr_p);
18094 break;
18096 case TARGET_MEM_REF:
18098 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
18100 if (TMR_BASE (*expr_p))
18101 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
18102 post_p, is_gimple_mem_ref_addr, fb_either);
18103 if (TMR_INDEX (*expr_p))
18104 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
18105 post_p, is_gimple_val, fb_rvalue);
18106 if (TMR_INDEX2 (*expr_p))
18107 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
18108 post_p, is_gimple_val, fb_rvalue);
18109 /* TMR_STEP and TMR_OFFSET are always integer constants. */
18110 ret = MIN (r0, r1);
18112 break;
18114 case NON_LVALUE_EXPR:
18115 /* This should have been stripped above. */
18116 gcc_unreachable ();
18118 case ASM_EXPR:
18119 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
18120 break;
18122 case TRY_FINALLY_EXPR:
18123 case TRY_CATCH_EXPR:
18125 gimple_seq eval, cleanup;
18126 gtry *try_;
18128 /* Calls to destructors are generated automatically in FINALLY/CATCH
18129 block. They should have location as UNKNOWN_LOCATION. However,
18130 gimplify_call_expr will reset these call stmts to input_location
18131 if it finds stmt's location is unknown. To prevent resetting for
18132 destructors, we set the input_location to unknown.
18133 Note that this only affects the destructor calls in FINALLY/CATCH
18134 block, and will automatically reset to its original value by the
18135 end of gimplify_expr. */
18136 input_location = UNKNOWN_LOCATION;
18137 eval = cleanup = NULL;
18138 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
18139 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
18140 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
18142 gimple_seq n = NULL, e = NULL;
18143 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
18144 0), &n);
18145 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
18146 1), &e);
18147 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
18149 geh_else *stmt = gimple_build_eh_else (n, e);
18150 gimple_seq_add_stmt (&cleanup, stmt);
18153 else
18154 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
18155 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
18156 if (gimple_seq_empty_p (cleanup))
18158 gimple_seq_add_seq (pre_p, eval);
18159 ret = GS_ALL_DONE;
18160 break;
18162 try_ = gimple_build_try (eval, cleanup,
18163 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
18164 ? GIMPLE_TRY_FINALLY
18165 : GIMPLE_TRY_CATCH);
18166 if (EXPR_HAS_LOCATION (save_expr))
18167 gimple_set_location (try_, EXPR_LOCATION (save_expr));
18168 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
18169 gimple_set_location (try_, saved_location);
18170 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
18171 gimple_try_set_catch_is_cleanup (try_,
18172 TRY_CATCH_IS_CLEANUP (*expr_p));
18173 gimplify_seq_add_stmt (pre_p, try_);
18174 ret = GS_ALL_DONE;
18175 break;
18178 case CLEANUP_POINT_EXPR:
18179 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
18180 break;
18182 case TARGET_EXPR:
18183 ret = gimplify_target_expr (expr_p, pre_p, post_p);
18184 break;
18186 case CATCH_EXPR:
18188 gimple *c;
18189 gimple_seq handler = NULL;
18190 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
18191 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
18192 gimplify_seq_add_stmt (pre_p, c);
18193 ret = GS_ALL_DONE;
18194 break;
18197 case EH_FILTER_EXPR:
18199 gimple *ehf;
18200 gimple_seq failure = NULL;
18202 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
18203 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
18204 copy_warning (ehf, *expr_p);
18205 gimplify_seq_add_stmt (pre_p, ehf);
18206 ret = GS_ALL_DONE;
18207 break;
18210 case OBJ_TYPE_REF:
18212 enum gimplify_status r0, r1;
18213 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
18214 post_p, is_gimple_val, fb_rvalue);
18215 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
18216 post_p, is_gimple_val, fb_rvalue);
18217 TREE_SIDE_EFFECTS (*expr_p) = 0;
18218 ret = MIN (r0, r1);
18220 break;
18222 case LABEL_DECL:
18223 /* We get here when taking the address of a label. We mark
18224 the label as "forced"; meaning it can never be removed and
18225 it is a potential target for any computed goto. */
18226 FORCED_LABEL (*expr_p) = 1;
18227 ret = GS_ALL_DONE;
18228 break;
18230 case STATEMENT_LIST:
18231 ret = gimplify_statement_list (expr_p, pre_p);
18232 break;
18234 case WITH_SIZE_EXPR:
18236 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18237 post_p == &internal_post ? NULL : post_p,
18238 gimple_test_f, fallback);
18239 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
18240 is_gimple_val, fb_rvalue);
18241 ret = GS_ALL_DONE;
18243 break;
18245 case VAR_DECL:
18246 case PARM_DECL:
18247 ret = gimplify_var_or_parm_decl (expr_p);
18248 break;
18250 case RESULT_DECL:
18251 /* When within an OMP context, notice uses of variables. */
18252 if (gimplify_omp_ctxp)
18253 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
18254 ret = GS_ALL_DONE;
18255 break;
18257 case DEBUG_EXPR_DECL:
18258 gcc_unreachable ();
18260 case DEBUG_BEGIN_STMT:
18261 gimplify_seq_add_stmt (pre_p,
18262 gimple_build_debug_begin_stmt
18263 (TREE_BLOCK (*expr_p),
18264 EXPR_LOCATION (*expr_p)));
18265 ret = GS_ALL_DONE;
18266 *expr_p = NULL;
18267 break;
18269 case SSA_NAME:
18270 /* Allow callbacks into the gimplifier during optimization. */
18271 ret = GS_ALL_DONE;
18272 break;
18274 case OMP_PARALLEL:
18275 gimplify_omp_parallel (expr_p, pre_p);
18276 ret = GS_ALL_DONE;
18277 break;
18279 case OMP_TASK:
18280 gimplify_omp_task (expr_p, pre_p);
18281 ret = GS_ALL_DONE;
18282 break;
18284 case OMP_SIMD:
18286 /* Temporarily disable into_ssa, as scan_omp_simd
18287 which calls copy_gimple_seq_and_replace_locals can't deal
18288 with SSA_NAMEs defined outside of the body properly. */
18289 bool saved_into_ssa = gimplify_ctxp->into_ssa;
18290 gimplify_ctxp->into_ssa = false;
18291 ret = gimplify_omp_for (expr_p, pre_p);
18292 gimplify_ctxp->into_ssa = saved_into_ssa;
18293 break;
18296 case OMP_FOR:
18297 case OMP_DISTRIBUTE:
18298 case OMP_TASKLOOP:
18299 case OACC_LOOP:
18300 ret = gimplify_omp_for (expr_p, pre_p);
18301 break;
18303 case OMP_LOOP:
18304 ret = gimplify_omp_loop (expr_p, pre_p);
18305 break;
18307 case OACC_CACHE:
18308 gimplify_oacc_cache (expr_p, pre_p);
18309 ret = GS_ALL_DONE;
18310 break;
18312 case OACC_DECLARE:
18313 gimplify_oacc_declare (expr_p, pre_p);
18314 ret = GS_ALL_DONE;
18315 break;
18317 case OACC_HOST_DATA:
18318 case OACC_DATA:
18319 case OACC_KERNELS:
18320 case OACC_PARALLEL:
18321 case OACC_SERIAL:
18322 case OMP_SCOPE:
18323 case OMP_SECTIONS:
18324 case OMP_SINGLE:
18325 case OMP_TARGET:
18326 case OMP_TARGET_DATA:
18327 case OMP_TEAMS:
18328 gimplify_omp_workshare (expr_p, pre_p);
18329 ret = GS_ALL_DONE;
18330 break;
18332 case OACC_ENTER_DATA:
18333 case OACC_EXIT_DATA:
18334 case OACC_UPDATE:
18335 case OMP_TARGET_UPDATE:
18336 case OMP_TARGET_ENTER_DATA:
18337 case OMP_TARGET_EXIT_DATA:
18338 gimplify_omp_target_update (expr_p, pre_p);
18339 ret = GS_ALL_DONE;
18340 break;
18342 case OMP_SECTION:
18343 case OMP_STRUCTURED_BLOCK:
18344 case OMP_MASTER:
18345 case OMP_MASKED:
18346 case OMP_ORDERED:
18347 case OMP_CRITICAL:
18348 case OMP_SCAN:
18350 gimple_seq body = NULL;
18351 gimple *g;
18352 bool saved_in_omp_construct = in_omp_construct;
18354 in_omp_construct = true;
18355 gimplify_and_add (OMP_BODY (*expr_p), &body);
18356 in_omp_construct = saved_in_omp_construct;
18357 switch (TREE_CODE (*expr_p))
18359 case OMP_SECTION:
18360 g = gimple_build_omp_section (body);
18361 break;
18362 case OMP_STRUCTURED_BLOCK:
18363 g = gimple_build_omp_structured_block (body);
18364 break;
18365 case OMP_MASTER:
18366 g = gimple_build_omp_master (body);
18367 break;
18368 case OMP_ORDERED:
18369 g = gimplify_omp_ordered (*expr_p, body);
18370 if (OMP_BODY (*expr_p) == NULL_TREE
18371 && gimple_code (g) == GIMPLE_OMP_ORDERED)
18372 gimple_omp_ordered_standalone (g);
18373 break;
18374 case OMP_MASKED:
18375 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
18376 pre_p, ORT_WORKSHARE, OMP_MASKED);
18377 gimplify_adjust_omp_clauses (pre_p, body,
18378 &OMP_MASKED_CLAUSES (*expr_p),
18379 OMP_MASKED);
18380 g = gimple_build_omp_masked (body,
18381 OMP_MASKED_CLAUSES (*expr_p));
18382 break;
18383 case OMP_CRITICAL:
18384 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
18385 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
18386 gimplify_adjust_omp_clauses (pre_p, body,
18387 &OMP_CRITICAL_CLAUSES (*expr_p),
18388 OMP_CRITICAL);
18389 g = gimple_build_omp_critical (body,
18390 OMP_CRITICAL_NAME (*expr_p),
18391 OMP_CRITICAL_CLAUSES (*expr_p));
18392 break;
18393 case OMP_SCAN:
18394 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
18395 pre_p, ORT_WORKSHARE, OMP_SCAN);
18396 gimplify_adjust_omp_clauses (pre_p, body,
18397 &OMP_SCAN_CLAUSES (*expr_p),
18398 OMP_SCAN);
18399 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
18400 break;
18401 default:
18402 gcc_unreachable ();
18404 gimplify_seq_add_stmt (pre_p, g);
18405 ret = GS_ALL_DONE;
18406 break;
18409 case OMP_TASKGROUP:
18411 gimple_seq body = NULL;
18413 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
18414 bool saved_in_omp_construct = in_omp_construct;
18415 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
18416 OMP_TASKGROUP);
18417 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
18419 in_omp_construct = true;
18420 gimplify_and_add (OMP_BODY (*expr_p), &body);
18421 in_omp_construct = saved_in_omp_construct;
18422 gimple_seq cleanup = NULL;
18423 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
18424 gimple *g = gimple_build_call (fn, 0);
18425 gimple_seq_add_stmt (&cleanup, g);
18426 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
18427 body = NULL;
18428 gimple_seq_add_stmt (&body, g);
18429 g = gimple_build_omp_taskgroup (body, *pclauses);
18430 gimplify_seq_add_stmt (pre_p, g);
18431 ret = GS_ALL_DONE;
18432 break;
18435 case OMP_ATOMIC:
18436 case OMP_ATOMIC_READ:
18437 case OMP_ATOMIC_CAPTURE_OLD:
18438 case OMP_ATOMIC_CAPTURE_NEW:
18439 ret = gimplify_omp_atomic (expr_p, pre_p);
18440 break;
18442 case TRANSACTION_EXPR:
18443 ret = gimplify_transaction (expr_p, pre_p);
18444 break;
18446 case TRUTH_AND_EXPR:
18447 case TRUTH_OR_EXPR:
18448 case TRUTH_XOR_EXPR:
18450 tree orig_type = TREE_TYPE (*expr_p);
18451 tree new_type, xop0, xop1;
18452 *expr_p = gimple_boolify (*expr_p);
18453 new_type = TREE_TYPE (*expr_p);
18454 if (!useless_type_conversion_p (orig_type, new_type))
18456 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
18457 ret = GS_OK;
18458 break;
18461 /* Boolified binary truth expressions are semantically equivalent
18462 to bitwise binary expressions. Canonicalize them to the
18463 bitwise variant. */
18464 switch (TREE_CODE (*expr_p))
18466 case TRUTH_AND_EXPR:
18467 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
18468 break;
18469 case TRUTH_OR_EXPR:
18470 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
18471 break;
18472 case TRUTH_XOR_EXPR:
18473 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
18474 break;
18475 default:
18476 break;
18478 /* Now make sure that operands have compatible type to
18479 expression's new_type. */
18480 xop0 = TREE_OPERAND (*expr_p, 0);
18481 xop1 = TREE_OPERAND (*expr_p, 1);
18482 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
18483 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
18484 new_type,
18485 xop0);
18486 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
18487 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
18488 new_type,
18489 xop1);
18490 /* Continue classified as tcc_binary. */
18491 goto expr_2;
18494 case VEC_COND_EXPR:
18495 goto expr_3;
18497 case VEC_PERM_EXPR:
18498 /* Classified as tcc_expression. */
18499 goto expr_3;
18501 case BIT_INSERT_EXPR:
18502 /* Argument 3 is a constant. */
18503 goto expr_2;
18505 case POINTER_PLUS_EXPR:
18507 enum gimplify_status r0, r1;
18508 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18509 post_p, is_gimple_val, fb_rvalue);
18510 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
18511 post_p, is_gimple_val, fb_rvalue);
18512 recalculate_side_effects (*expr_p);
18513 ret = MIN (r0, r1);
18514 break;
18517 default:
18518 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
18520 case tcc_comparison:
18521 /* Handle comparison of objects of non scalar mode aggregates
18522 with a call to memcmp. It would be nice to only have to do
18523 this for variable-sized objects, but then we'd have to allow
18524 the same nest of reference nodes we allow for MODIFY_EXPR and
18525 that's too complex.
18527 Compare scalar mode aggregates as scalar mode values. Using
18528 memcmp for them would be very inefficient at best, and is
18529 plain wrong if bitfields are involved. */
18530 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
18531 ret = GS_ERROR;
18532 else
18534 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
18536 /* Vector comparisons need no boolification. */
18537 if (TREE_CODE (type) == VECTOR_TYPE)
18538 goto expr_2;
18539 else if (!AGGREGATE_TYPE_P (type))
18541 tree org_type = TREE_TYPE (*expr_p);
18542 *expr_p = gimple_boolify (*expr_p);
18543 if (!useless_type_conversion_p (org_type,
18544 TREE_TYPE (*expr_p)))
18546 *expr_p = fold_convert_loc (input_location,
18547 org_type, *expr_p);
18548 ret = GS_OK;
18550 else
18551 goto expr_2;
18553 else if (TYPE_MODE (type) != BLKmode)
18554 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
18555 else
18556 ret = gimplify_variable_sized_compare (expr_p);
18558 break;
18560 /* If *EXPR_P does not need to be special-cased, handle it
18561 according to its class. */
18562 case tcc_unary:
18563 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18564 post_p, is_gimple_val, fb_rvalue);
18565 break;
18567 case tcc_binary:
18568 expr_2:
18570 enum gimplify_status r0, r1;
18572 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18573 post_p, is_gimple_val, fb_rvalue);
18574 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
18575 post_p, is_gimple_val, fb_rvalue);
18577 ret = MIN (r0, r1);
18578 break;
18581 expr_3:
18583 enum gimplify_status r0, r1, r2;
18585 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18586 post_p, is_gimple_val, fb_rvalue);
18587 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
18588 post_p, is_gimple_val, fb_rvalue);
18589 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
18590 post_p, is_gimple_val, fb_rvalue);
18592 ret = MIN (MIN (r0, r1), r2);
18593 break;
18596 case tcc_declaration:
18597 case tcc_constant:
18598 ret = GS_ALL_DONE;
18599 goto dont_recalculate;
18601 default:
18602 gcc_unreachable ();
18605 recalculate_side_effects (*expr_p);
18607 dont_recalculate:
18608 break;
18611 gcc_assert (*expr_p || ret != GS_OK);
18613 while (ret == GS_OK);
18615 /* If we encountered an error_mark somewhere nested inside, either
18616 stub out the statement or propagate the error back out. */
18617 if (ret == GS_ERROR)
18619 if (is_statement)
18620 *expr_p = NULL;
18621 goto out;
18624 /* This was only valid as a return value from the langhook, which
18625 we handled. Make sure it doesn't escape from any other context. */
18626 gcc_assert (ret != GS_UNHANDLED);
18628 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
18630 /* We aren't looking for a value, and we don't have a valid
18631 statement. If it doesn't have side-effects, throw it away.
18632 We can also get here with code such as "*&&L;", where L is
18633 a LABEL_DECL that is marked as FORCED_LABEL. */
18634 if (TREE_CODE (*expr_p) == LABEL_DECL
18635 || !TREE_SIDE_EFFECTS (*expr_p))
18636 *expr_p = NULL;
18637 else if (!TREE_THIS_VOLATILE (*expr_p))
18639 /* This is probably a _REF that contains something nested that
18640 has side effects. Recurse through the operands to find it. */
18641 enum tree_code code = TREE_CODE (*expr_p);
18643 switch (code)
18645 case COMPONENT_REF:
18646 case REALPART_EXPR:
18647 case IMAGPART_EXPR:
18648 case VIEW_CONVERT_EXPR:
18649 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18650 gimple_test_f, fallback);
18651 break;
18653 case ARRAY_REF:
18654 case ARRAY_RANGE_REF:
18655 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18656 gimple_test_f, fallback);
18657 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
18658 gimple_test_f, fallback);
18659 break;
18661 default:
18662 /* Anything else with side-effects must be converted to
18663 a valid statement before we get here. */
18664 gcc_unreachable ();
18667 *expr_p = NULL;
18669 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
18670 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
18671 && !is_empty_type (TREE_TYPE (*expr_p)))
18673 /* Historically, the compiler has treated a bare reference
18674 to a non-BLKmode volatile lvalue as forcing a load. */
18675 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
18677 /* Normally, we do not want to create a temporary for a
18678 TREE_ADDRESSABLE type because such a type should not be
18679 copied by bitwise-assignment. However, we make an
18680 exception here, as all we are doing here is ensuring that
18681 we read the bytes that make up the type. We use
18682 create_tmp_var_raw because create_tmp_var will abort when
18683 given a TREE_ADDRESSABLE type. */
18684 tree tmp = create_tmp_var_raw (type, "vol");
18685 gimple_add_tmp_var (tmp);
18686 gimplify_assign (tmp, *expr_p, pre_p);
18687 *expr_p = NULL;
18689 else
18690 /* We can't do anything useful with a volatile reference to
18691 an incomplete type, so just throw it away. Likewise for
18692 a BLKmode type, since any implicit inner load should
18693 already have been turned into an explicit one by the
18694 gimplification process. */
18695 *expr_p = NULL;
18698 /* If we are gimplifying at the statement level, we're done. Tack
18699 everything together and return. */
18700 if (fallback == fb_none || is_statement)
18702 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
18703 it out for GC to reclaim it. */
18704 *expr_p = NULL_TREE;
18706 if (!gimple_seq_empty_p (internal_pre)
18707 || !gimple_seq_empty_p (internal_post))
18709 gimplify_seq_add_seq (&internal_pre, internal_post);
18710 gimplify_seq_add_seq (pre_p, internal_pre);
18713 /* The result of gimplifying *EXPR_P is going to be the last few
18714 statements in *PRE_P and *POST_P. Add location information
18715 to all the statements that were added by the gimplification
18716 helpers. */
18717 if (!gimple_seq_empty_p (*pre_p))
18718 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
18720 if (!gimple_seq_empty_p (*post_p))
18721 annotate_all_with_location_after (*post_p, post_last_gsi,
18722 input_location);
18724 goto out;
18727 #ifdef ENABLE_GIMPLE_CHECKING
18728 if (*expr_p)
18730 enum tree_code code = TREE_CODE (*expr_p);
18731 /* These expressions should already be in gimple IR form. */
18732 gcc_assert (code != MODIFY_EXPR
18733 && code != ASM_EXPR
18734 && code != BIND_EXPR
18735 && code != CATCH_EXPR
18736 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
18737 && code != EH_FILTER_EXPR
18738 && code != GOTO_EXPR
18739 && code != LABEL_EXPR
18740 && code != LOOP_EXPR
18741 && code != SWITCH_EXPR
18742 && code != TRY_FINALLY_EXPR
18743 && code != EH_ELSE_EXPR
18744 && code != OACC_PARALLEL
18745 && code != OACC_KERNELS
18746 && code != OACC_SERIAL
18747 && code != OACC_DATA
18748 && code != OACC_HOST_DATA
18749 && code != OACC_DECLARE
18750 && code != OACC_UPDATE
18751 && code != OACC_ENTER_DATA
18752 && code != OACC_EXIT_DATA
18753 && code != OACC_CACHE
18754 && code != OMP_CRITICAL
18755 && code != OMP_FOR
18756 && code != OACC_LOOP
18757 && code != OMP_MASTER
18758 && code != OMP_MASKED
18759 && code != OMP_TASKGROUP
18760 && code != OMP_ORDERED
18761 && code != OMP_PARALLEL
18762 && code != OMP_SCAN
18763 && code != OMP_SECTIONS
18764 && code != OMP_SECTION
18765 && code != OMP_STRUCTURED_BLOCK
18766 && code != OMP_SINGLE
18767 && code != OMP_SCOPE);
18769 #endif
18771 /* Otherwise we're gimplifying a subexpression, so the resulting
18772 value is interesting. If it's a valid operand that matches
18773 GIMPLE_TEST_F, we're done. Unless we are handling some
18774 post-effects internally; if that's the case, we need to copy into
18775 a temporary before adding the post-effects to POST_P. */
18776 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
18777 goto out;
18779 /* Otherwise, we need to create a new temporary for the gimplified
18780 expression. */
18782 /* We can't return an lvalue if we have an internal postqueue. The
18783 object the lvalue refers to would (probably) be modified by the
18784 postqueue; we need to copy the value out first, which means an
18785 rvalue. */
18786 if ((fallback & fb_lvalue)
18787 && gimple_seq_empty_p (internal_post)
18788 && is_gimple_addressable (*expr_p))
18790 /* An lvalue will do. Take the address of the expression, store it
18791 in a temporary, and replace the expression with an INDIRECT_REF of
18792 that temporary. */
18793 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
18794 unsigned int ref_align = get_object_alignment (*expr_p);
18795 tree ref_type = TREE_TYPE (*expr_p);
18796 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
18797 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
18798 if (TYPE_ALIGN (ref_type) != ref_align)
18799 ref_type = build_aligned_type (ref_type, ref_align);
18800 *expr_p = build2 (MEM_REF, ref_type,
18801 tmp, build_zero_cst (ref_alias_type));
18803 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
18805 /* An rvalue will do. Assign the gimplified expression into a
18806 new temporary TMP and replace the original expression with
18807 TMP. First, make sure that the expression has a type so that
18808 it can be assigned into a temporary. */
18809 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
18810 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
18812 else
18814 #ifdef ENABLE_GIMPLE_CHECKING
18815 if (!(fallback & fb_mayfail))
18817 fprintf (stderr, "gimplification failed:\n");
18818 print_generic_expr (stderr, *expr_p);
18819 debug_tree (*expr_p);
18820 internal_error ("gimplification failed");
18822 #endif
18823 gcc_assert (fallback & fb_mayfail);
18825 /* If this is an asm statement, and the user asked for the
18826 impossible, don't die. Fail and let gimplify_asm_expr
18827 issue an error. */
18828 ret = GS_ERROR;
18829 goto out;
18832 /* Make sure the temporary matches our predicate. */
18833 gcc_assert ((*gimple_test_f) (*expr_p));
18835 if (!gimple_seq_empty_p (internal_post))
18837 annotate_all_with_location (internal_post, input_location);
18838 gimplify_seq_add_seq (pre_p, internal_post);
18841 out:
18842 input_location = saved_location;
18843 return ret;
18846 /* Like gimplify_expr but make sure the gimplified result is not itself
18847 a SSA name (but a decl if it were). Temporaries required by
18848 evaluating *EXPR_P may be still SSA names. */
18850 static enum gimplify_status
18851 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
18852 bool (*gimple_test_f) (tree), fallback_t fallback,
18853 bool allow_ssa)
18855 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
18856 gimple_test_f, fallback);
18857 if (! allow_ssa
18858 && TREE_CODE (*expr_p) == SSA_NAME)
18859 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
18860 return ret;
18863 /* Look through TYPE for variable-sized objects and gimplify each such
18864 size that we find. Add to LIST_P any statements generated. */
18866 void
18867 gimplify_type_sizes (tree type, gimple_seq *list_p)
18869 if (type == NULL || type == error_mark_node)
18870 return;
18872 const bool ignored_p
18873 = TYPE_NAME (type)
18874 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
18875 && DECL_IGNORED_P (TYPE_NAME (type));
18876 tree t;
18878 /* We first do the main variant, then copy into any other variants. */
18879 type = TYPE_MAIN_VARIANT (type);
18881 /* Avoid infinite recursion. */
18882 if (TYPE_SIZES_GIMPLIFIED (type))
18883 return;
18885 TYPE_SIZES_GIMPLIFIED (type) = 1;
18887 switch (TREE_CODE (type))
18889 case INTEGER_TYPE:
18890 case ENUMERAL_TYPE:
18891 case BOOLEAN_TYPE:
18892 case REAL_TYPE:
18893 case FIXED_POINT_TYPE:
18894 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
18895 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
18897 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
18899 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
18900 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
18902 break;
18904 case ARRAY_TYPE:
18905 /* These types may not have declarations, so handle them here. */
18906 gimplify_type_sizes (TREE_TYPE (type), list_p);
18907 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
18908 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
18909 with assigned stack slots, for -O1+ -g they should be tracked
18910 by VTA. */
18911 if (!ignored_p
18912 && TYPE_DOMAIN (type)
18913 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
18915 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
18916 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
18917 DECL_IGNORED_P (t) = 0;
18918 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
18919 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
18920 DECL_IGNORED_P (t) = 0;
18922 break;
18924 case RECORD_TYPE:
18925 case UNION_TYPE:
18926 case QUAL_UNION_TYPE:
18927 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
18928 if (TREE_CODE (field) == FIELD_DECL)
18930 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
18931 /* Likewise, ensure variable offsets aren't removed. */
18932 if (!ignored_p
18933 && (t = DECL_FIELD_OFFSET (field))
18934 && VAR_P (t)
18935 && DECL_ARTIFICIAL (t))
18936 DECL_IGNORED_P (t) = 0;
18937 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
18938 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
18939 gimplify_type_sizes (TREE_TYPE (field), list_p);
18941 break;
18943 case POINTER_TYPE:
18944 case REFERENCE_TYPE:
18945 /* We used to recurse on the pointed-to type here, which turned out to
18946 be incorrect because its definition might refer to variables not
18947 yet initialized at this point if a forward declaration is involved.
18949 It was actually useful for anonymous pointed-to types to ensure
18950 that the sizes evaluation dominates every possible later use of the
18951 values. Restricting to such types here would be safe since there
18952 is no possible forward declaration around, but would introduce an
18953 undesirable middle-end semantic to anonymity. We then defer to
18954 front-ends the responsibility of ensuring that the sizes are
18955 evaluated both early and late enough, e.g. by attaching artificial
18956 type declarations to the tree. */
18957 break;
18959 default:
18960 break;
18963 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
18964 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
18966 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
18968 TYPE_SIZE (t) = TYPE_SIZE (type);
18969 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
18970 TYPE_SIZES_GIMPLIFIED (t) = 1;
18974 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
18975 a size or position, has had all of its SAVE_EXPRs evaluated.
18976 We add any required statements to *STMT_P. */
18978 void
18979 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
18981 tree expr = *expr_p;
18983 /* We don't do anything if the value isn't there, is constant, or contains
18984 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
18985 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
18986 will want to replace it with a new variable, but that will cause problems
18987 if this type is from outside the function. It's OK to have that here. */
18988 if (expr == NULL_TREE
18989 || is_gimple_constant (expr)
18990 || VAR_P (expr)
18991 || CONTAINS_PLACEHOLDER_P (expr))
18992 return;
18994 *expr_p = unshare_expr (expr);
18996 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
18997 if the def vanishes. */
18998 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
19000 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
19001 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
19002 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
19003 if (is_gimple_constant (*expr_p))
19004 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
19007 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
19008 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
19009 is true, also gimplify the parameters. */
19011 gbind *
19012 gimplify_body (tree fndecl, bool do_parms)
19014 location_t saved_location = input_location;
19015 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
19016 gimple *outer_stmt;
19017 gbind *outer_bind;
19019 timevar_push (TV_TREE_GIMPLIFY);
19021 init_tree_ssa (cfun);
19023 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
19024 gimplification. */
19025 default_rtl_profile ();
19027 gcc_assert (gimplify_ctxp == NULL);
19028 push_gimplify_context (true);
19030 if (flag_openacc || flag_openmp)
19032 gcc_assert (gimplify_omp_ctxp == NULL);
19033 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
19034 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
19037 /* Unshare most shared trees in the body and in that of any nested functions.
19038 It would seem we don't have to do this for nested functions because
19039 they are supposed to be output and then the outer function gimplified
19040 first, but the g++ front end doesn't always do it that way. */
19041 unshare_body (fndecl);
19042 unvisit_body (fndecl);
19044 /* Make sure input_location isn't set to something weird. */
19045 input_location = DECL_SOURCE_LOCATION (fndecl);
19047 /* Resolve callee-copies. This has to be done before processing
19048 the body so that DECL_VALUE_EXPR gets processed correctly. */
19049 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
19051 /* Gimplify the function's body. */
19052 seq = NULL;
19053 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
19054 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
19055 if (!outer_stmt)
19057 outer_stmt = gimple_build_nop ();
19058 gimplify_seq_add_stmt (&seq, outer_stmt);
19061 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
19062 not the case, wrap everything in a GIMPLE_BIND to make it so. */
19063 if (gimple_code (outer_stmt) == GIMPLE_BIND
19064 && (gimple_seq_first_nondebug_stmt (seq)
19065 == gimple_seq_last_nondebug_stmt (seq)))
19067 outer_bind = as_a <gbind *> (outer_stmt);
19068 if (gimple_seq_first_stmt (seq) != outer_stmt
19069 || gimple_seq_last_stmt (seq) != outer_stmt)
19071 /* If there are debug stmts before or after outer_stmt, move them
19072 inside of outer_bind body. */
19073 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
19074 gimple_seq second_seq = NULL;
19075 if (gimple_seq_first_stmt (seq) != outer_stmt
19076 && gimple_seq_last_stmt (seq) != outer_stmt)
19078 second_seq = gsi_split_seq_after (gsi);
19079 gsi_remove (&gsi, false);
19081 else if (gimple_seq_first_stmt (seq) != outer_stmt)
19082 gsi_remove (&gsi, false);
19083 else
19085 gsi_remove (&gsi, false);
19086 second_seq = seq;
19087 seq = NULL;
19089 gimple_seq_add_seq_without_update (&seq,
19090 gimple_bind_body (outer_bind));
19091 gimple_seq_add_seq_without_update (&seq, second_seq);
19092 gimple_bind_set_body (outer_bind, seq);
19095 else
19096 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
19098 DECL_SAVED_TREE (fndecl) = NULL_TREE;
19100 /* If we had callee-copies statements, insert them at the beginning
19101 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
19102 if (!gimple_seq_empty_p (parm_stmts))
19104 tree parm;
19106 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
19107 if (parm_cleanup)
19109 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
19110 GIMPLE_TRY_FINALLY);
19111 parm_stmts = NULL;
19112 gimple_seq_add_stmt (&parm_stmts, g);
19114 gimple_bind_set_body (outer_bind, parm_stmts);
19116 for (parm = DECL_ARGUMENTS (current_function_decl);
19117 parm; parm = DECL_CHAIN (parm))
19118 if (DECL_HAS_VALUE_EXPR_P (parm))
19120 DECL_HAS_VALUE_EXPR_P (parm) = 0;
19121 DECL_IGNORED_P (parm) = 0;
19125 if ((flag_openacc || flag_openmp || flag_openmp_simd)
19126 && gimplify_omp_ctxp)
19128 delete_omp_context (gimplify_omp_ctxp);
19129 gimplify_omp_ctxp = NULL;
19132 pop_gimplify_context (outer_bind);
19133 gcc_assert (gimplify_ctxp == NULL);
19135 if (flag_checking && !seen_error ())
19136 verify_gimple_in_seq (gimple_bind_body (outer_bind));
19138 timevar_pop (TV_TREE_GIMPLIFY);
19139 input_location = saved_location;
19141 return outer_bind;
19144 typedef char *char_p; /* For DEF_VEC_P. */
19146 /* Return whether we should exclude FNDECL from instrumentation. */
19148 static bool
19149 flag_instrument_functions_exclude_p (tree fndecl)
19151 vec<char_p> *v;
19153 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
19154 if (v && v->length () > 0)
19156 const char *name;
19157 int i;
19158 char *s;
19160 name = lang_hooks.decl_printable_name (fndecl, 1);
19161 FOR_EACH_VEC_ELT (*v, i, s)
19162 if (strstr (name, s) != NULL)
19163 return true;
19166 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
19167 if (v && v->length () > 0)
19169 const char *name;
19170 int i;
19171 char *s;
19173 name = DECL_SOURCE_FILE (fndecl);
19174 FOR_EACH_VEC_ELT (*v, i, s)
19175 if (strstr (name, s) != NULL)
19176 return true;
19179 return false;
19182 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
19183 If COND_VAR is not NULL, it is a boolean variable guarding the call to
19184 the instrumentation function. IF STMT is not NULL, it is a statement
19185 to be executed just before the call to the instrumentation function. */
19187 static void
19188 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
19189 tree cond_var, gimple *stmt)
19191 /* The instrumentation hooks aren't going to call the instrumented
19192 function and the address they receive is expected to be matchable
19193 against symbol addresses. Make sure we don't create a trampoline,
19194 in case the current function is nested. */
19195 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
19196 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
19198 tree label_true, label_false;
19199 if (cond_var)
19201 label_true = create_artificial_label (UNKNOWN_LOCATION);
19202 label_false = create_artificial_label (UNKNOWN_LOCATION);
19203 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
19204 label_true, label_false);
19205 gimplify_seq_add_stmt (seq, cond);
19206 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
19207 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
19208 NOT_TAKEN));
19211 if (stmt)
19212 gimplify_seq_add_stmt (seq, stmt);
19214 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
19215 gcall *call = gimple_build_call (x, 1, integer_zero_node);
19216 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
19217 gimple_call_set_lhs (call, tmp_var);
19218 gimplify_seq_add_stmt (seq, call);
19219 x = builtin_decl_implicit (fncode);
19220 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
19221 gimplify_seq_add_stmt (seq, call);
19223 if (cond_var)
19224 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
19227 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
19228 node for the function we want to gimplify.
19230 Return the sequence of GIMPLE statements corresponding to the body
19231 of FNDECL. */
19233 void
19234 gimplify_function_tree (tree fndecl)
19236 gimple_seq seq;
19237 gbind *bind;
19239 gcc_assert (!gimple_body (fndecl));
19241 if (DECL_STRUCT_FUNCTION (fndecl))
19242 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
19243 else
19244 push_struct_function (fndecl);
19246 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
19247 if necessary. */
19248 cfun->curr_properties |= PROP_gimple_lva;
19250 if (asan_sanitize_use_after_scope ())
19251 asan_poisoned_variables = new hash_set<tree> ();
19252 bind = gimplify_body (fndecl, true);
19253 if (asan_poisoned_variables)
19255 delete asan_poisoned_variables;
19256 asan_poisoned_variables = NULL;
19259 /* The tree body of the function is no longer needed, replace it
19260 with the new GIMPLE body. */
19261 seq = NULL;
19262 gimple_seq_add_stmt (&seq, bind);
19263 gimple_set_body (fndecl, seq);
19265 /* If we're instrumenting function entry/exit, then prepend the call to
19266 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
19267 catch the exit hook. */
19268 /* ??? Add some way to ignore exceptions for this TFE. */
19269 if (flag_instrument_function_entry_exit
19270 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
19271 /* Do not instrument extern inline functions. */
19272 && !(DECL_DECLARED_INLINE_P (fndecl)
19273 && DECL_EXTERNAL (fndecl)
19274 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
19275 && !flag_instrument_functions_exclude_p (fndecl))
19277 gimple_seq body = NULL, cleanup = NULL;
19278 gassign *assign;
19279 tree cond_var;
19281 /* If -finstrument-functions-once is specified, generate:
19283 static volatile bool C.0 = false;
19284 bool tmp_called;
19286 tmp_called = C.0;
19287 if (!tmp_called)
19289 C.0 = true;
19290 [call profiling enter function]
19293 without specific protection for data races. */
19294 if (flag_instrument_function_entry_exit > 1)
19296 tree first_var
19297 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
19298 VAR_DECL,
19299 create_tmp_var_name ("C"),
19300 boolean_type_node);
19301 DECL_ARTIFICIAL (first_var) = 1;
19302 DECL_IGNORED_P (first_var) = 1;
19303 TREE_STATIC (first_var) = 1;
19304 TREE_THIS_VOLATILE (first_var) = 1;
19305 TREE_USED (first_var) = 1;
19306 DECL_INITIAL (first_var) = boolean_false_node;
19307 varpool_node::add (first_var);
19309 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
19310 assign = gimple_build_assign (cond_var, first_var);
19311 gimplify_seq_add_stmt (&body, assign);
19313 assign = gimple_build_assign (first_var, boolean_true_node);
19316 else
19318 cond_var = NULL_TREE;
19319 assign = NULL;
19322 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
19323 cond_var, assign);
19325 /* If -finstrument-functions-once is specified, generate:
19327 if (!tmp_called)
19328 [call profiling exit function]
19330 without specific protection for data races. */
19331 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
19332 cond_var, NULL);
19334 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
19335 gimplify_seq_add_stmt (&body, tf);
19336 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
19338 /* Replace the current function body with the body
19339 wrapped in the try/finally TF. */
19340 seq = NULL;
19341 gimple_seq_add_stmt (&seq, new_bind);
19342 gimple_set_body (fndecl, seq);
19343 bind = new_bind;
19346 if (sanitize_flags_p (SANITIZE_THREAD)
19347 && param_tsan_instrument_func_entry_exit)
19349 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
19350 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
19351 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
19352 /* Replace the current function body with the body
19353 wrapped in the try/finally TF. */
19354 seq = NULL;
19355 gimple_seq_add_stmt (&seq, new_bind);
19356 gimple_set_body (fndecl, seq);
19359 DECL_SAVED_TREE (fndecl) = NULL_TREE;
19360 cfun->curr_properties |= PROP_gimple_any;
19362 pop_cfun ();
19364 dump_function (TDI_gimple, fndecl);
19367 /* Return a dummy expression of type TYPE in order to keep going after an
19368 error. */
19370 static tree
19371 dummy_object (tree type)
19373 tree t = build_int_cst (build_pointer_type (type), 0);
19374 return build2 (MEM_REF, type, t, t);
19377 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
19378 builtin function, but a very special sort of operator. */
19380 enum gimplify_status
19381 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
19382 gimple_seq *post_p ATTRIBUTE_UNUSED)
19384 tree promoted_type, have_va_type;
19385 tree valist = TREE_OPERAND (*expr_p, 0);
19386 tree type = TREE_TYPE (*expr_p);
19387 tree t, tag, aptag;
19388 location_t loc = EXPR_LOCATION (*expr_p);
19390 /* Verify that valist is of the proper type. */
19391 have_va_type = TREE_TYPE (valist);
19392 if (have_va_type == error_mark_node)
19393 return GS_ERROR;
19394 have_va_type = targetm.canonical_va_list_type (have_va_type);
19395 if (have_va_type == NULL_TREE
19396 && POINTER_TYPE_P (TREE_TYPE (valist)))
19397 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
19398 have_va_type
19399 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
19400 gcc_assert (have_va_type != NULL_TREE);
19402 /* Generate a diagnostic for requesting data of a type that cannot
19403 be passed through `...' due to type promotion at the call site. */
19404 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
19405 != type)
19407 static bool gave_help;
19408 bool warned;
19409 /* Use the expansion point to handle cases such as passing bool (defined
19410 in a system header) through `...'. */
19411 location_t xloc
19412 = expansion_point_location_if_in_system_header (loc);
19414 /* Unfortunately, this is merely undefined, rather than a constraint
19415 violation, so we cannot make this an error. If this call is never
19416 executed, the program is still strictly conforming. */
19417 auto_diagnostic_group d;
19418 warned = warning_at (xloc, 0,
19419 "%qT is promoted to %qT when passed through %<...%>",
19420 type, promoted_type);
19421 if (!gave_help && warned)
19423 gave_help = true;
19424 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
19425 promoted_type, type);
19428 /* We can, however, treat "undefined" any way we please.
19429 Call abort to encourage the user to fix the program. */
19430 if (warned)
19431 inform (xloc, "if this code is reached, the program will abort");
19432 /* Before the abort, allow the evaluation of the va_list
19433 expression to exit or longjmp. */
19434 gimplify_and_add (valist, pre_p);
19435 t = build_call_expr_loc (loc,
19436 builtin_decl_implicit (BUILT_IN_TRAP), 0);
19437 gimplify_and_add (t, pre_p);
19439 /* This is dead code, but go ahead and finish so that the
19440 mode of the result comes out right. */
19441 *expr_p = dummy_object (type);
19442 return GS_ALL_DONE;
19445 tag = build_int_cst (build_pointer_type (type), 0);
19446 aptag = build_int_cst (TREE_TYPE (valist), 0);
19448 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
19449 valist, tag, aptag);
19451 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
19452 needs to be expanded. */
19453 cfun->curr_properties &= ~PROP_gimple_lva;
19455 return GS_OK;
19458 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
19460 DST/SRC are the destination and source respectively. You can pass
19461 ungimplified trees in DST or SRC, in which case they will be
19462 converted to a gimple operand if necessary.
19464 This function returns the newly created GIMPLE_ASSIGN tuple. */
19466 gimple *
19467 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
19469 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
19470 gimplify_and_add (t, seq_p);
19471 ggc_free (t);
19472 return gimple_seq_last_stmt (*seq_p);
19475 inline hashval_t
19476 gimplify_hasher::hash (const elt_t *p)
19478 tree t = p->val;
19479 return iterative_hash_expr (t, 0);
19482 inline bool
19483 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
19485 tree t1 = p1->val;
19486 tree t2 = p2->val;
19487 enum tree_code code = TREE_CODE (t1);
19489 if (TREE_CODE (t2) != code
19490 || TREE_TYPE (t1) != TREE_TYPE (t2))
19491 return false;
19493 if (!operand_equal_p (t1, t2, 0))
19494 return false;
19496 /* Only allow them to compare equal if they also hash equal; otherwise
19497 results are nondeterminate, and we fail bootstrap comparison. */
19498 gcc_checking_assert (hash (p1) == hash (p2));
19500 return true;