libstdc++: Include cstdarg in freestanding
[official-gcc.git] / gcc / gimplify.cc
blob22ff1075abbf1c0b02e1a19cd75162c7b494d89a
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "diagnostic.h" /* For errorcount. */
40 #include "alias.h"
41 #include "fold-const.h"
42 #include "calls.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "gimple-iterator.h"
47 #include "gimple-fold.h"
48 #include "tree-eh.h"
49 #include "gimplify.h"
50 #include "stor-layout.h"
51 #include "print-tree.h"
52 #include "tree-iterator.h"
53 #include "tree-inline.h"
54 #include "langhooks.h"
55 #include "tree-cfg.h"
56 #include "tree-ssa.h"
57 #include "tree-hash-traits.h"
58 #include "omp-general.h"
59 #include "omp-low.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
62 #include "splay-tree.h"
63 #include "gimple-walk.h"
64 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "builtins.h"
66 #include "stringpool.h"
67 #include "attribs.h"
68 #include "asan.h"
69 #include "dbgcnt.h"
70 #include "omp-offload.h"
71 #include "context.h"
72 #include "tree-nested.h"
74 /* Hash set of poisoned variables in a bind expr. */
75 static hash_set<tree> *asan_poisoned_variables = NULL;
77 enum gimplify_omp_var_data
79 GOVD_SEEN = 0x000001,
80 GOVD_EXPLICIT = 0x000002,
81 GOVD_SHARED = 0x000004,
82 GOVD_PRIVATE = 0x000008,
83 GOVD_FIRSTPRIVATE = 0x000010,
84 GOVD_LASTPRIVATE = 0x000020,
85 GOVD_REDUCTION = 0x000040,
86 GOVD_LOCAL = 0x00080,
87 GOVD_MAP = 0x000100,
88 GOVD_DEBUG_PRIVATE = 0x000200,
89 GOVD_PRIVATE_OUTER_REF = 0x000400,
90 GOVD_LINEAR = 0x000800,
91 GOVD_ALIGNED = 0x001000,
93 /* Flag for GOVD_MAP: don't copy back. */
94 GOVD_MAP_TO_ONLY = 0x002000,
96 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
97 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
99 GOVD_MAP_0LEN_ARRAY = 0x008000,
101 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
102 GOVD_MAP_ALWAYS_TO = 0x010000,
104 /* Flag for shared vars that are or might be stored to in the region. */
105 GOVD_WRITTEN = 0x020000,
107 /* Flag for GOVD_MAP, if it is a forced mapping. */
108 GOVD_MAP_FORCE = 0x040000,
110 /* Flag for GOVD_MAP: must be present already. */
111 GOVD_MAP_FORCE_PRESENT = 0x080000,
113 /* Flag for GOVD_MAP: only allocate. */
114 GOVD_MAP_ALLOC_ONLY = 0x100000,
116 /* Flag for GOVD_MAP: only copy back. */
117 GOVD_MAP_FROM_ONLY = 0x200000,
119 GOVD_NONTEMPORAL = 0x400000,
121 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
122 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
124 GOVD_CONDTEMP = 0x1000000,
126 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
127 GOVD_REDUCTION_INSCAN = 0x2000000,
129 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
130 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
132 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
133 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
134 | GOVD_LOCAL)
138 enum omp_region_type
140 ORT_WORKSHARE = 0x00,
141 ORT_TASKGROUP = 0x01,
142 ORT_SIMD = 0x04,
144 ORT_PARALLEL = 0x08,
145 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
147 ORT_TASK = 0x10,
148 ORT_UNTIED_TASK = ORT_TASK | 1,
149 ORT_TASKLOOP = ORT_TASK | 2,
150 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
152 ORT_TEAMS = 0x20,
153 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
154 ORT_HOST_TEAMS = ORT_TEAMS | 2,
155 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
157 /* Data region. */
158 ORT_TARGET_DATA = 0x40,
160 /* Data region with offloading. */
161 ORT_TARGET = 0x80,
162 ORT_COMBINED_TARGET = ORT_TARGET | 1,
163 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
165 /* OpenACC variants. */
166 ORT_ACC = 0x100, /* A generic OpenACC region. */
167 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
168 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
169 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
170 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
171 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
173 /* Dummy OpenMP region, used to disable expansion of
174 DECL_VALUE_EXPRs in taskloop pre body. */
175 ORT_NONE = 0x200
178 /* Gimplify hashtable helper. */
180 struct gimplify_hasher : free_ptr_hash <elt_t>
182 static inline hashval_t hash (const elt_t *);
183 static inline bool equal (const elt_t *, const elt_t *);
186 struct gimplify_ctx
188 struct gimplify_ctx *prev_context;
190 vec<gbind *> bind_expr_stack;
191 tree temps;
192 gimple_seq conditional_cleanups;
193 tree exit_label;
194 tree return_temp;
196 vec<tree> case_labels;
197 hash_set<tree> *live_switch_vars;
198 /* The formal temporary table. Should this be persistent? */
199 hash_table<gimplify_hasher> *temp_htab;
201 int conditions;
202 unsigned into_ssa : 1;
203 unsigned allow_rhs_cond_expr : 1;
204 unsigned in_cleanup_point_expr : 1;
205 unsigned keep_stack : 1;
206 unsigned save_stack : 1;
207 unsigned in_switch_expr : 1;
210 enum gimplify_defaultmap_kind
212 GDMK_SCALAR,
213 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
214 GDMK_AGGREGATE,
215 GDMK_ALLOCATABLE,
216 GDMK_POINTER
219 struct gimplify_omp_ctx
221 struct gimplify_omp_ctx *outer_context;
222 splay_tree variables;
223 hash_set<tree> *privatized_types;
224 tree clauses;
225 /* Iteration variables in an OMP_FOR. */
226 vec<tree> loop_iter_var;
227 location_t location;
228 enum omp_clause_default_kind default_kind;
229 enum omp_region_type region_type;
230 enum tree_code code;
231 bool combined_loop;
232 bool distribute;
233 bool target_firstprivatize_array_bases;
234 bool add_safelen1;
235 bool order_concurrent;
236 bool has_depend;
237 bool in_for_exprs;
238 int defaultmap[5];
241 static struct gimplify_ctx *gimplify_ctxp;
242 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
243 static bool in_omp_construct;
245 /* Forward declaration. */
246 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
247 static hash_map<tree, tree> *oacc_declare_returns;
248 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
249 bool (*) (tree), fallback_t, bool);
250 static void prepare_gimple_addressable (tree *, gimple_seq *);
252 /* Shorter alias name for the above function for use in gimplify.cc
253 only. */
255 static inline void
256 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
258 gimple_seq_add_stmt_without_update (seq_p, gs);
261 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
262 NULL, a new sequence is allocated. This function is
263 similar to gimple_seq_add_seq, but does not scan the operands.
264 During gimplification, we need to manipulate statement sequences
265 before the def/use vectors have been constructed. */
267 static void
268 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
270 gimple_stmt_iterator si;
272 if (src == NULL)
273 return;
275 si = gsi_last (*dst_p);
276 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
280 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
281 and popping gimplify contexts. */
283 static struct gimplify_ctx *ctx_pool = NULL;
285 /* Return a gimplify context struct from the pool. */
287 static inline struct gimplify_ctx *
288 ctx_alloc (void)
290 struct gimplify_ctx * c = ctx_pool;
292 if (c)
293 ctx_pool = c->prev_context;
294 else
295 c = XNEW (struct gimplify_ctx);
297 memset (c, '\0', sizeof (*c));
298 return c;
301 /* Put gimplify context C back into the pool. */
303 static inline void
304 ctx_free (struct gimplify_ctx *c)
306 c->prev_context = ctx_pool;
307 ctx_pool = c;
310 /* Free allocated ctx stack memory. */
312 void
313 free_gimplify_stack (void)
315 struct gimplify_ctx *c;
317 while ((c = ctx_pool))
319 ctx_pool = c->prev_context;
320 free (c);
325 /* Set up a context for the gimplifier. */
327 void
328 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
330 struct gimplify_ctx *c = ctx_alloc ();
332 c->prev_context = gimplify_ctxp;
333 gimplify_ctxp = c;
334 gimplify_ctxp->into_ssa = in_ssa;
335 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
338 /* Tear down a context for the gimplifier. If BODY is non-null, then
339 put the temporaries into the outer BIND_EXPR. Otherwise, put them
340 in the local_decls.
342 BODY is not a sequence, but the first tuple in a sequence. */
344 void
345 pop_gimplify_context (gimple *body)
347 struct gimplify_ctx *c = gimplify_ctxp;
349 gcc_assert (c
350 && (!c->bind_expr_stack.exists ()
351 || c->bind_expr_stack.is_empty ()));
352 c->bind_expr_stack.release ();
353 gimplify_ctxp = c->prev_context;
355 if (body)
356 declare_vars (c->temps, body, false);
357 else
358 record_vars (c->temps);
360 delete c->temp_htab;
361 c->temp_htab = NULL;
362 ctx_free (c);
365 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
367 static void
368 gimple_push_bind_expr (gbind *bind_stmt)
370 gimplify_ctxp->bind_expr_stack.reserve (8);
371 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
374 /* Pop the first element off the stack of bindings. */
376 static void
377 gimple_pop_bind_expr (void)
379 gimplify_ctxp->bind_expr_stack.pop ();
382 /* Return the first element of the stack of bindings. */
384 gbind *
385 gimple_current_bind_expr (void)
387 return gimplify_ctxp->bind_expr_stack.last ();
390 /* Return the stack of bindings created during gimplification. */
392 vec<gbind *>
393 gimple_bind_expr_stack (void)
395 return gimplify_ctxp->bind_expr_stack;
398 /* Return true iff there is a COND_EXPR between us and the innermost
399 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
401 static bool
402 gimple_conditional_context (void)
404 return gimplify_ctxp->conditions > 0;
407 /* Note that we've entered a COND_EXPR. */
409 static void
410 gimple_push_condition (void)
412 #ifdef ENABLE_GIMPLE_CHECKING
413 if (gimplify_ctxp->conditions == 0)
414 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
415 #endif
416 ++(gimplify_ctxp->conditions);
419 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
420 now, add any conditional cleanups we've seen to the prequeue. */
422 static void
423 gimple_pop_condition (gimple_seq *pre_p)
425 int conds = --(gimplify_ctxp->conditions);
427 gcc_assert (conds >= 0);
428 if (conds == 0)
430 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
431 gimplify_ctxp->conditional_cleanups = NULL;
435 /* A stable comparison routine for use with splay trees and DECLs. */
437 static int
438 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
440 tree a = (tree) xa;
441 tree b = (tree) xb;
443 return DECL_UID (a) - DECL_UID (b);
446 /* Create a new omp construct that deals with variable remapping. */
448 static struct gimplify_omp_ctx *
449 new_omp_context (enum omp_region_type region_type)
451 struct gimplify_omp_ctx *c;
453 c = XCNEW (struct gimplify_omp_ctx);
454 c->outer_context = gimplify_omp_ctxp;
455 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
456 c->privatized_types = new hash_set<tree>;
457 c->location = input_location;
458 c->region_type = region_type;
459 if ((region_type & ORT_TASK) == 0)
460 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
461 else
462 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
463 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
464 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
465 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
466 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
467 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
469 return c;
472 /* Destroy an omp construct that deals with variable remapping. */
474 static void
475 delete_omp_context (struct gimplify_omp_ctx *c)
477 splay_tree_delete (c->variables);
478 delete c->privatized_types;
479 c->loop_iter_var.release ();
480 XDELETE (c);
483 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
484 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
486 /* Both gimplify the statement T and append it to *SEQ_P. This function
487 behaves exactly as gimplify_stmt, but you don't have to pass T as a
488 reference. */
490 void
491 gimplify_and_add (tree t, gimple_seq *seq_p)
493 gimplify_stmt (&t, seq_p);
496 /* Gimplify statement T into sequence *SEQ_P, and return the first
497 tuple in the sequence of generated tuples for this statement.
498 Return NULL if gimplifying T produced no tuples. */
500 static gimple *
501 gimplify_and_return_first (tree t, gimple_seq *seq_p)
503 gimple_stmt_iterator last = gsi_last (*seq_p);
505 gimplify_and_add (t, seq_p);
507 if (!gsi_end_p (last))
509 gsi_next (&last);
510 return gsi_stmt (last);
512 else
513 return gimple_seq_first_stmt (*seq_p);
516 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
517 LHS, or for a call argument. */
519 static bool
520 is_gimple_mem_rhs (tree t)
522 /* If we're dealing with a renamable type, either source or dest must be
523 a renamed variable. */
524 if (is_gimple_reg_type (TREE_TYPE (t)))
525 return is_gimple_val (t);
526 else
527 return is_gimple_val (t) || is_gimple_lvalue (t);
530 /* Return true if T is a CALL_EXPR or an expression that can be
531 assigned to a temporary. Note that this predicate should only be
532 used during gimplification. See the rationale for this in
533 gimplify_modify_expr. */
535 static bool
536 is_gimple_reg_rhs_or_call (tree t)
538 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
539 || TREE_CODE (t) == CALL_EXPR);
542 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
543 this predicate should only be used during gimplification. See the
544 rationale for this in gimplify_modify_expr. */
546 static bool
547 is_gimple_mem_rhs_or_call (tree t)
549 /* If we're dealing with a renamable type, either source or dest must be
550 a renamed variable. */
551 if (is_gimple_reg_type (TREE_TYPE (t)))
552 return is_gimple_val (t);
553 else
554 return (is_gimple_val (t)
555 || is_gimple_lvalue (t)
556 || TREE_CLOBBER_P (t)
557 || TREE_CODE (t) == CALL_EXPR);
560 /* Create a temporary with a name derived from VAL. Subroutine of
561 lookup_tmp_var; nobody else should call this function. */
563 static inline tree
564 create_tmp_from_val (tree val)
566 /* Drop all qualifiers and address-space information from the value type. */
567 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
568 tree var = create_tmp_var (type, get_name (val));
569 return var;
572 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
573 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
575 static tree
576 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
578 tree ret;
580 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
581 gcc_assert (!is_formal || !not_gimple_reg);
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
590 ret = create_tmp_from_val (val);
591 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
593 else
595 elt_t elt, *elt_p;
596 elt_t **slot;
598 elt.val = val;
599 if (!gimplify_ctxp->temp_htab)
600 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
601 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
602 if (*slot == NULL)
604 elt_p = XNEW (elt_t);
605 elt_p->val = val;
606 elt_p->temp = ret = create_tmp_from_val (val);
607 *slot = elt_p;
609 else
611 elt_p = *slot;
612 ret = elt_p->temp;
616 return ret;
619 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
621 static tree
622 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
623 bool is_formal, bool allow_ssa, bool not_gimple_reg)
625 tree t, mod;
627 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
628 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
629 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
630 fb_rvalue);
632 if (allow_ssa
633 && gimplify_ctxp->into_ssa
634 && is_gimple_reg_type (TREE_TYPE (val)))
636 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
637 if (! gimple_in_ssa_p (cfun))
639 const char *name = get_name (val);
640 if (name)
641 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
644 else
645 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
647 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
649 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
651 /* gimplify_modify_expr might want to reduce this further. */
652 gimplify_and_add (mod, pre_p);
653 ggc_free (mod);
655 return t;
658 /* Return a formal temporary variable initialized with VAL. PRE_P is as
659 in gimplify_expr. Only use this function if:
661 1) The value of the unfactored expression represented by VAL will not
662 change between the initialization and use of the temporary, and
663 2) The temporary will not be otherwise modified.
665 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
666 and #2 means it is inappropriate for && temps.
668 For other cases, use get_initialized_tmp_var instead. */
670 tree
671 get_formal_tmp_var (tree val, gimple_seq *pre_p)
673 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
676 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
677 are as in gimplify_expr. */
679 tree
680 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
681 gimple_seq *post_p /* = NULL */,
682 bool allow_ssa /* = true */)
684 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
687 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
688 generate debug info for them; otherwise don't. */
690 void
691 declare_vars (tree vars, gimple *gs, bool debug_info)
693 tree last = vars;
694 if (last)
696 tree temps, block;
698 gbind *scope = as_a <gbind *> (gs);
700 temps = nreverse (last);
702 block = gimple_bind_block (scope);
703 gcc_assert (!block || TREE_CODE (block) == BLOCK);
704 if (!block || !debug_info)
706 DECL_CHAIN (last) = gimple_bind_vars (scope);
707 gimple_bind_set_vars (scope, temps);
709 else
711 /* We need to attach the nodes both to the BIND_EXPR and to its
712 associated BLOCK for debugging purposes. The key point here
713 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
714 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
715 if (BLOCK_VARS (block))
716 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
717 else
719 gimple_bind_set_vars (scope,
720 chainon (gimple_bind_vars (scope), temps));
721 BLOCK_VARS (block) = temps;
727 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
728 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
729 no such upper bound can be obtained. */
731 static void
732 force_constant_size (tree var)
734 /* The only attempt we make is by querying the maximum size of objects
735 of the variable's type. */
737 HOST_WIDE_INT max_size;
739 gcc_assert (VAR_P (var));
741 max_size = max_int_size_in_bytes (TREE_TYPE (var));
743 gcc_assert (max_size >= 0);
745 DECL_SIZE_UNIT (var)
746 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
747 DECL_SIZE (var)
748 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
751 /* Push the temporary variable TMP into the current binding. */
753 void
754 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
756 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
758 /* Later processing assumes that the object size is constant, which might
759 not be true at this point. Force the use of a constant upper bound in
760 this case. */
761 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
762 force_constant_size (tmp);
764 DECL_CONTEXT (tmp) = fn->decl;
765 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
767 record_vars_into (tmp, fn->decl);
770 /* Push the temporary variable TMP into the current binding. */
772 void
773 gimple_add_tmp_var (tree tmp)
775 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
777 /* Later processing assumes that the object size is constant, which might
778 not be true at this point. Force the use of a constant upper bound in
779 this case. */
780 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
781 force_constant_size (tmp);
783 DECL_CONTEXT (tmp) = current_function_decl;
784 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
786 if (gimplify_ctxp)
788 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
789 gimplify_ctxp->temps = tmp;
791 /* Mark temporaries local within the nearest enclosing parallel. */
792 if (gimplify_omp_ctxp)
794 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
795 int flag = GOVD_LOCAL | GOVD_SEEN;
796 while (ctx
797 && (ctx->region_type == ORT_WORKSHARE
798 || ctx->region_type == ORT_TASKGROUP
799 || ctx->region_type == ORT_SIMD
800 || ctx->region_type == ORT_ACC))
802 if (ctx->region_type == ORT_SIMD
803 && TREE_ADDRESSABLE (tmp)
804 && !TREE_STATIC (tmp))
806 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
807 ctx->add_safelen1 = true;
808 else if (ctx->in_for_exprs)
809 flag = GOVD_PRIVATE;
810 else
811 flag = GOVD_PRIVATE | GOVD_SEEN;
812 break;
814 ctx = ctx->outer_context;
816 if (ctx)
817 omp_add_variable (ctx, tmp, flag);
820 else if (cfun)
821 record_vars (tmp);
822 else
824 gimple_seq body_seq;
826 /* This case is for nested functions. We need to expose the locals
827 they create. */
828 body_seq = gimple_body (current_function_decl);
829 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
835 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
836 nodes that are referenced more than once in GENERIC functions. This is
837 necessary because gimplification (translation into GIMPLE) is performed
838 by modifying tree nodes in-place, so gimplication of a shared node in a
839 first context could generate an invalid GIMPLE form in a second context.
841 This is achieved with a simple mark/copy/unmark algorithm that walks the
842 GENERIC representation top-down, marks nodes with TREE_VISITED the first
843 time it encounters them, duplicates them if they already have TREE_VISITED
844 set, and finally removes the TREE_VISITED marks it has set.
846 The algorithm works only at the function level, i.e. it generates a GENERIC
847 representation of a function with no nodes shared within the function when
848 passed a GENERIC function (except for nodes that are allowed to be shared).
850 At the global level, it is also necessary to unshare tree nodes that are
851 referenced in more than one function, for the same aforementioned reason.
852 This requires some cooperation from the front-end. There are 2 strategies:
854 1. Manual unsharing. The front-end needs to call unshare_expr on every
855 expression that might end up being shared across functions.
857 2. Deep unsharing. This is an extension of regular unsharing. Instead
858 of calling unshare_expr on expressions that might be shared across
859 functions, the front-end pre-marks them with TREE_VISITED. This will
860 ensure that they are unshared on the first reference within functions
861 when the regular unsharing algorithm runs. The counterpart is that
862 this algorithm must look deeper than for manual unsharing, which is
863 specified by LANG_HOOKS_DEEP_UNSHARING.
865 If there are only few specific cases of node sharing across functions, it is
866 probably easier for a front-end to unshare the expressions manually. On the
867 contrary, if the expressions generated at the global level are as widespread
868 as expressions generated within functions, deep unsharing is very likely the
869 way to go. */
871 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
872 These nodes model computations that must be done once. If we were to
873 unshare something like SAVE_EXPR(i++), the gimplification process would
874 create wrong code. However, if DATA is non-null, it must hold a pointer
875 set that is used to unshare the subtrees of these nodes. */
877 static tree
878 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
880 tree t = *tp;
881 enum tree_code code = TREE_CODE (t);
883 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
884 copy their subtrees if we can make sure to do it only once. */
885 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
887 if (data && !((hash_set<tree> *)data)->add (t))
889 else
890 *walk_subtrees = 0;
893 /* Stop at types, decls, constants like copy_tree_r. */
894 else if (TREE_CODE_CLASS (code) == tcc_type
895 || TREE_CODE_CLASS (code) == tcc_declaration
896 || TREE_CODE_CLASS (code) == tcc_constant)
897 *walk_subtrees = 0;
899 /* Cope with the statement expression extension. */
900 else if (code == STATEMENT_LIST)
903 /* Leave the bulk of the work to copy_tree_r itself. */
904 else
905 copy_tree_r (tp, walk_subtrees, NULL);
907 return NULL_TREE;
910 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
911 If *TP has been visited already, then *TP is deeply copied by calling
912 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
914 static tree
915 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
917 tree t = *tp;
918 enum tree_code code = TREE_CODE (t);
920 /* Skip types, decls, and constants. But we do want to look at their
921 types and the bounds of types. Mark them as visited so we properly
922 unmark their subtrees on the unmark pass. If we've already seen them,
923 don't look down further. */
924 if (TREE_CODE_CLASS (code) == tcc_type
925 || TREE_CODE_CLASS (code) == tcc_declaration
926 || TREE_CODE_CLASS (code) == tcc_constant)
928 if (TREE_VISITED (t))
929 *walk_subtrees = 0;
930 else
931 TREE_VISITED (t) = 1;
934 /* If this node has been visited already, unshare it and don't look
935 any deeper. */
936 else if (TREE_VISITED (t))
938 walk_tree (tp, mostly_copy_tree_r, data, NULL);
939 *walk_subtrees = 0;
942 /* Otherwise, mark the node as visited and keep looking. */
943 else
944 TREE_VISITED (t) = 1;
946 return NULL_TREE;
949 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
950 copy_if_shared_r callback unmodified. */
952 void
953 copy_if_shared (tree *tp, void *data)
955 walk_tree (tp, copy_if_shared_r, data, NULL);
958 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
959 any nested functions. */
961 static void
962 unshare_body (tree fndecl)
964 struct cgraph_node *cgn = cgraph_node::get (fndecl);
965 /* If the language requires deep unsharing, we need a pointer set to make
966 sure we don't repeatedly unshare subtrees of unshareable nodes. */
967 hash_set<tree> *visited
968 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
970 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
971 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
972 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
974 delete visited;
976 if (cgn)
977 for (cgn = first_nested_function (cgn); cgn;
978 cgn = next_nested_function (cgn))
979 unshare_body (cgn->decl);
982 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
983 Subtrees are walked until the first unvisited node is encountered. */
985 static tree
986 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
988 tree t = *tp;
990 /* If this node has been visited, unmark it and keep looking. */
991 if (TREE_VISITED (t))
992 TREE_VISITED (t) = 0;
994 /* Otherwise, don't look any deeper. */
995 else
996 *walk_subtrees = 0;
998 return NULL_TREE;
1001 /* Unmark the visited trees rooted at *TP. */
1003 static inline void
1004 unmark_visited (tree *tp)
1006 walk_tree (tp, unmark_visited_r, NULL, NULL);
1009 /* Likewise, but mark all trees as not visited. */
1011 static void
1012 unvisit_body (tree fndecl)
1014 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1016 unmark_visited (&DECL_SAVED_TREE (fndecl));
1017 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1018 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1020 if (cgn)
1021 for (cgn = first_nested_function (cgn);
1022 cgn; cgn = next_nested_function (cgn))
1023 unvisit_body (cgn->decl);
1026 /* Unconditionally make an unshared copy of EXPR. This is used when using
1027 stored expressions which span multiple functions, such as BINFO_VTABLE,
1028 as the normal unsharing process can't tell that they're shared. */
1030 tree
1031 unshare_expr (tree expr)
1033 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1034 return expr;
1037 /* Worker for unshare_expr_without_location. */
1039 static tree
1040 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1042 if (EXPR_P (*tp))
1043 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1044 else
1045 *walk_subtrees = 0;
1046 return NULL_TREE;
1049 /* Similar to unshare_expr but also prune all expression locations
1050 from EXPR. */
1052 tree
1053 unshare_expr_without_location (tree expr)
1055 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1056 if (EXPR_P (expr))
1057 walk_tree (&expr, prune_expr_location, NULL, NULL);
1058 return expr;
1061 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1062 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1063 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1064 EXPR is the location of the EXPR. */
1066 static location_t
1067 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1069 if (!expr)
1070 return or_else;
1072 if (EXPR_HAS_LOCATION (expr))
1073 return EXPR_LOCATION (expr);
1075 if (TREE_CODE (expr) != STATEMENT_LIST)
1076 return or_else;
1078 tree_stmt_iterator i = tsi_start (expr);
1080 bool found = false;
1081 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1083 found = true;
1084 tsi_next (&i);
1087 if (!found || !tsi_one_before_end_p (i))
1088 return or_else;
1090 return rexpr_location (tsi_stmt (i), or_else);
1093 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1094 rexpr_location for the potential recursion. */
1096 static inline bool
1097 rexpr_has_location (tree expr)
1099 return rexpr_location (expr) != UNKNOWN_LOCATION;
1103 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1104 contain statements and have a value. Assign its value to a temporary
1105 and give it void_type_node. Return the temporary, or NULL_TREE if
1106 WRAPPER was already void. */
1108 tree
1109 voidify_wrapper_expr (tree wrapper, tree temp)
1111 tree type = TREE_TYPE (wrapper);
1112 if (type && !VOID_TYPE_P (type))
1114 tree *p;
1116 /* Set p to point to the body of the wrapper. Loop until we find
1117 something that isn't a wrapper. */
1118 for (p = &wrapper; p && *p; )
1120 switch (TREE_CODE (*p))
1122 case BIND_EXPR:
1123 TREE_SIDE_EFFECTS (*p) = 1;
1124 TREE_TYPE (*p) = void_type_node;
1125 /* For a BIND_EXPR, the body is operand 1. */
1126 p = &BIND_EXPR_BODY (*p);
1127 break;
1129 case CLEANUP_POINT_EXPR:
1130 case TRY_FINALLY_EXPR:
1131 case TRY_CATCH_EXPR:
1132 TREE_SIDE_EFFECTS (*p) = 1;
1133 TREE_TYPE (*p) = void_type_node;
1134 p = &TREE_OPERAND (*p, 0);
1135 break;
1137 case STATEMENT_LIST:
1139 tree_stmt_iterator i = tsi_last (*p);
1140 TREE_SIDE_EFFECTS (*p) = 1;
1141 TREE_TYPE (*p) = void_type_node;
1142 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1144 break;
1146 case COMPOUND_EXPR:
1147 /* Advance to the last statement. Set all container types to
1148 void. */
1149 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1151 TREE_SIDE_EFFECTS (*p) = 1;
1152 TREE_TYPE (*p) = void_type_node;
1154 break;
1156 case TRANSACTION_EXPR:
1157 TREE_SIDE_EFFECTS (*p) = 1;
1158 TREE_TYPE (*p) = void_type_node;
1159 p = &TRANSACTION_EXPR_BODY (*p);
1160 break;
1162 default:
1163 /* Assume that any tree upon which voidify_wrapper_expr is
1164 directly called is a wrapper, and that its body is op0. */
1165 if (p == &wrapper)
1167 TREE_SIDE_EFFECTS (*p) = 1;
1168 TREE_TYPE (*p) = void_type_node;
1169 p = &TREE_OPERAND (*p, 0);
1170 break;
1172 goto out;
1176 out:
1177 if (p == NULL || IS_EMPTY_STMT (*p))
1178 temp = NULL_TREE;
1179 else if (temp)
1181 /* The wrapper is on the RHS of an assignment that we're pushing
1182 down. */
1183 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1184 || TREE_CODE (temp) == MODIFY_EXPR);
1185 TREE_OPERAND (temp, 1) = *p;
1186 *p = temp;
1188 else
1190 temp = create_tmp_var (type, "retval");
1191 *p = build2 (INIT_EXPR, type, temp, *p);
1194 return temp;
1197 return NULL_TREE;
1200 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1201 a temporary through which they communicate. */
1203 static void
1204 build_stack_save_restore (gcall **save, gcall **restore)
1206 tree tmp_var;
1208 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1209 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1210 gimple_call_set_lhs (*save, tmp_var);
1212 *restore
1213 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1214 1, tmp_var);
1217 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1219 static tree
1220 build_asan_poison_call_expr (tree decl)
1222 /* Do not poison variables that have size equal to zero. */
1223 tree unit_size = DECL_SIZE_UNIT (decl);
1224 if (zerop (unit_size))
1225 return NULL_TREE;
1227 tree base = build_fold_addr_expr (decl);
1229 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1230 void_type_node, 3,
1231 build_int_cst (integer_type_node,
1232 ASAN_MARK_POISON),
1233 base, unit_size);
1236 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1237 on POISON flag, shadow memory of a DECL variable. The call will be
1238 put on location identified by IT iterator, where BEFORE flag drives
1239 position where the stmt will be put. */
1241 static void
1242 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1243 bool before)
1245 tree unit_size = DECL_SIZE_UNIT (decl);
1246 tree base = build_fold_addr_expr (decl);
1248 /* Do not poison variables that have size equal to zero. */
1249 if (zerop (unit_size))
1250 return;
1252 /* It's necessary to have all stack variables aligned to ASAN granularity
1253 bytes. */
1254 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1255 unsigned shadow_granularity
1256 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1257 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1258 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1260 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1262 gimple *g
1263 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1264 build_int_cst (integer_type_node, flags),
1265 base, unit_size);
1267 if (before)
1268 gsi_insert_before (it, g, GSI_NEW_STMT);
1269 else
1270 gsi_insert_after (it, g, GSI_NEW_STMT);
1273 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1274 either poisons or unpoisons a DECL. Created statement is appended
1275 to SEQ_P gimple sequence. */
1277 static void
1278 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1280 gimple_stmt_iterator it = gsi_last (*seq_p);
1281 bool before = false;
1283 if (gsi_end_p (it))
1284 before = true;
1286 asan_poison_variable (decl, poison, &it, before);
1289 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1291 static int
1292 sort_by_decl_uid (const void *a, const void *b)
1294 const tree *t1 = (const tree *)a;
1295 const tree *t2 = (const tree *)b;
1297 int uid1 = DECL_UID (*t1);
1298 int uid2 = DECL_UID (*t2);
1300 if (uid1 < uid2)
1301 return -1;
1302 else if (uid1 > uid2)
1303 return 1;
1304 else
1305 return 0;
1308 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1309 depending on POISON flag. Created statement is appended
1310 to SEQ_P gimple sequence. */
1312 static void
1313 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1315 unsigned c = variables->elements ();
1316 if (c == 0)
1317 return;
1319 auto_vec<tree> sorted_variables (c);
1321 for (hash_set<tree>::iterator it = variables->begin ();
1322 it != variables->end (); ++it)
1323 sorted_variables.safe_push (*it);
1325 sorted_variables.qsort (sort_by_decl_uid);
1327 unsigned i;
1328 tree var;
1329 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1331 asan_poison_variable (var, poison, seq_p);
1333 /* Add use_after_scope_memory attribute for the variable in order
1334 to prevent re-written into SSA. */
1335 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1336 DECL_ATTRIBUTES (var)))
1337 DECL_ATTRIBUTES (var)
1338 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1339 integer_one_node,
1340 DECL_ATTRIBUTES (var));
1344 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1346 static enum gimplify_status
1347 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1349 tree bind_expr = *expr_p;
1350 bool old_keep_stack = gimplify_ctxp->keep_stack;
1351 bool old_save_stack = gimplify_ctxp->save_stack;
1352 tree t;
1353 gbind *bind_stmt;
1354 gimple_seq body, cleanup;
1355 gcall *stack_save;
1356 location_t start_locus = 0, end_locus = 0;
1357 tree ret_clauses = NULL;
1359 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1361 /* Mark variables seen in this bind expr. */
1362 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1364 if (VAR_P (t))
1366 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 tree attr;
1369 if (flag_openmp
1370 && !is_global_var (t)
1371 && DECL_CONTEXT (t) == current_function_decl
1372 && TREE_USED (t)
1373 && (attr = lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1374 != NULL_TREE)
1376 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
1377 tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1378 tree align = TREE_VALUE (TREE_VALUE (attr));
1379 /* Allocate directives that appear in a target region must specify
1380 an allocator clause unless a requires directive with the
1381 dynamic_allocators clause is present in the same compilation
1382 unit. */
1383 bool missing_dyn_alloc = false;
1384 if (alloc == NULL_TREE
1385 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1386 == 0))
1388 /* This comes too early for omp_discover_declare_target...,
1389 but should at least catch the most common cases. */
1390 missing_dyn_alloc
1391 = cgraph_node::get (current_function_decl)->offloadable;
1392 for (struct gimplify_omp_ctx *ctx2 = ctx;
1393 ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1394 if (ctx2->code == OMP_TARGET)
1395 missing_dyn_alloc = true;
1397 if (missing_dyn_alloc)
1398 error_at (DECL_SOURCE_LOCATION (t),
1399 "%<allocate%> directive for %qD inside a target "
1400 "region must specify an %<allocator%> clause", t);
1401 /* Skip for omp_default_mem_alloc (= 1),
1402 unless align is present. */
1403 else if (!errorcount
1404 && (align != NULL_TREE
1405 || alloc == NULL_TREE
1406 || !integer_onep (alloc)))
1408 /* Fortran might already use a pointer type internally;
1409 use that pointer except for type(C_ptr) and type(C_funptr);
1410 note that normal proc pointers are rejected. */
1411 tree type = TREE_TYPE (t);
1412 tree tmp, v;
1413 if (lang_GNU_Fortran ()
1414 && POINTER_TYPE_P (type)
1415 && TREE_TYPE (type) != void_type_node
1416 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
1418 type = TREE_TYPE (type);
1419 v = t;
1421 else
1423 tmp = build_pointer_type (type);
1424 v = create_tmp_var (tmp, get_name (t));
1425 DECL_IGNORED_P (v) = 0;
1426 DECL_ATTRIBUTES (v)
1427 = tree_cons (get_identifier ("omp allocate var"),
1428 build_tree_list (NULL_TREE, t),
1429 remove_attribute ("omp allocate",
1430 DECL_ATTRIBUTES (t)));
1431 tmp = build_fold_indirect_ref (v);
1432 TREE_THIS_NOTRAP (tmp) = 1;
1433 SET_DECL_VALUE_EXPR (t, tmp);
1434 DECL_HAS_VALUE_EXPR_P (t) = 1;
1436 tree sz = TYPE_SIZE_UNIT (type);
1437 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1438 hence, for some decls, a size variable is saved in the
1439 attributes; use it, if available. */
1440 if (TREE_CHAIN (TREE_VALUE (attr))
1441 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))
1442 && TREE_PURPOSE (
1443 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))))
1445 sz = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1446 sz = TREE_PURPOSE (sz);
1448 if (alloc == NULL_TREE)
1449 alloc = build_zero_cst (ptr_type_node);
1450 if (align == NULL_TREE)
1451 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (t));
1452 else
1453 align = build_int_cst (size_type_node,
1454 MAX (tree_to_uhwi (align),
1455 DECL_ALIGN_UNIT (t)));
1456 location_t loc = DECL_SOURCE_LOCATION (t);
1457 tmp = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
1458 tmp = build_call_expr_loc (loc, tmp, 3, align, sz, alloc);
1459 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1460 fold_convert (TREE_TYPE (v), tmp));
1461 gcc_assert (BIND_EXPR_BODY (bind_expr) != NULL_TREE);
1462 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1463 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1464 is set, using in a condition much further below. */
1465 gcc_assert (DECL_HAS_VALUE_EXPR_P (t)
1466 || TREE_CHAIN (TREE_VALUE (attr)));
1467 if (TREE_CHAIN (TREE_VALUE (attr)))
1469 /* Fortran is special as it does not have properly nest
1470 declarations in blocks. And as there is no
1471 initializer, there is also no expression to look for.
1472 Hence, the FE makes the statement list of the
1473 try-finally block available. We can put the GOMP_alloc
1474 at the top, unless an allocator or size expression
1475 requires to put it afterward; note that the size is
1476 always later in generated code; for strings, no
1477 size expr but still an expr might be available.
1478 As LTO does not handle a statement list, 'sl' has
1479 to be removed; done so by removing the attribute. */
1480 DECL_ATTRIBUTES (t)
1481 = remove_attribute ("omp allocate",
1482 DECL_ATTRIBUTES (t));
1483 tree sl = TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr)));
1484 tree_stmt_iterator e = tsi_start (sl);
1485 tree needle = NULL_TREE;
1486 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1488 needle = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1489 needle = (TREE_VALUE (needle) ? TREE_VALUE (needle)
1490 : sz);
1492 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1493 needle = sz;
1494 else if (DECL_P (alloc) && DECL_ARTIFICIAL (alloc))
1495 needle = alloc;
1497 if (needle != NULL_TREE)
1499 while (!tsi_end_p (e))
1501 if (*e == needle
1502 || (TREE_CODE (*e) == MODIFY_EXPR
1503 && TREE_OPERAND (*e, 0) == needle))
1504 break;
1505 ++e;
1507 gcc_assert (!tsi_end_p (e));
1509 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1511 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1512 here; for C/C++ it will be added in the 'cleanup'
1513 section after gimplification. But Fortran already has
1514 a try-finally block. */
1515 sl = TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr)));
1516 e = tsi_last (sl);
1517 tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1518 tmp = build_call_expr_loc (EXPR_LOCATION (*e), tmp, 2, v,
1519 build_zero_cst (ptr_type_node));
1520 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1521 tmp = build_clobber (TREE_TYPE (v), CLOBBER_EOL);
1522 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1523 fold_convert (TREE_TYPE (v), tmp));
1524 ++e;
1525 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1527 else
1529 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1530 == STATEMENT_LIST);
1531 tree_stmt_iterator e;
1532 e = tsi_start (BIND_EXPR_BODY (bind_expr));
1533 while (!tsi_end_p (e))
1535 if ((TREE_CODE (*e) == DECL_EXPR
1536 && TREE_OPERAND (*e, 0) == t)
1537 || (TREE_CODE (*e) == CLEANUP_POINT_EXPR
1538 && (TREE_CODE (TREE_OPERAND (*e, 0))
1539 == DECL_EXPR)
1540 && (TREE_OPERAND (TREE_OPERAND (*e, 0), 0)
1541 == t)))
1542 break;
1543 ++e;
1545 gcc_assert (!tsi_end_p (e));
1546 tsi_link_before (&e, tmp, TSI_SAME_STMT);
1551 /* Mark variable as local. */
1552 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1554 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1555 || splay_tree_lookup (ctx->variables,
1556 (splay_tree_key) t) == NULL)
1558 int flag = GOVD_LOCAL;
1559 if (ctx->region_type == ORT_SIMD
1560 && TREE_ADDRESSABLE (t)
1561 && !TREE_STATIC (t))
1563 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1564 ctx->add_safelen1 = true;
1565 else
1566 flag = GOVD_PRIVATE;
1568 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1570 /* Static locals inside of target construct or offloaded
1571 routines need to be "omp declare target". */
1572 if (TREE_STATIC (t))
1573 for (; ctx; ctx = ctx->outer_context)
1574 if ((ctx->region_type & ORT_TARGET) != 0)
1576 if (!lookup_attribute ("omp declare target",
1577 DECL_ATTRIBUTES (t)))
1579 tree id = get_identifier ("omp declare target");
1580 DECL_ATTRIBUTES (t)
1581 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1582 varpool_node *node = varpool_node::get (t);
1583 if (node)
1585 node->offloadable = 1;
1586 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1588 g->have_offload = true;
1589 if (!in_lto_p)
1590 vec_safe_push (offload_vars, t);
1594 break;
1598 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1600 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1601 cfun->has_local_explicit_reg_vars = true;
1605 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1606 BIND_EXPR_BLOCK (bind_expr));
1607 gimple_push_bind_expr (bind_stmt);
1609 gimplify_ctxp->keep_stack = false;
1610 gimplify_ctxp->save_stack = false;
1612 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1613 body = NULL;
1614 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1615 gimple_bind_set_body (bind_stmt, body);
1617 /* Source location wise, the cleanup code (stack_restore and clobbers)
1618 belongs to the end of the block, so propagate what we have. The
1619 stack_save operation belongs to the beginning of block, which we can
1620 infer from the bind_expr directly if the block has no explicit
1621 assignment. */
1622 if (BIND_EXPR_BLOCK (bind_expr))
1624 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1625 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1627 if (start_locus == 0)
1628 start_locus = EXPR_LOCATION (bind_expr);
1630 cleanup = NULL;
1631 stack_save = NULL;
1633 /* Add clobbers for all variables that go out of scope. */
1634 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1636 if (VAR_P (t)
1637 && !is_global_var (t)
1638 && DECL_CONTEXT (t) == current_function_decl)
1640 if (flag_openmp
1641 && DECL_HAS_VALUE_EXPR_P (t)
1642 && TREE_USED (t)
1643 && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1645 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1646 causes that the GOMP_free call is already added above;
1647 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1648 tree v = TREE_OPERAND (DECL_VALUE_EXPR (t), 0);
1649 tree tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1650 tmp = build_call_expr_loc (end_locus, tmp, 2, v,
1651 build_zero_cst (ptr_type_node));
1652 gimplify_and_add (tmp, &cleanup);
1653 gimple *clobber_stmt;
1654 tmp = build_clobber (TREE_TYPE (v), CLOBBER_EOL);
1655 clobber_stmt = gimple_build_assign (v, tmp);
1656 gimple_set_location (clobber_stmt, end_locus);
1657 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1659 if (!DECL_HARD_REGISTER (t)
1660 && !TREE_THIS_VOLATILE (t)
1661 && !DECL_HAS_VALUE_EXPR_P (t)
1662 /* Only care for variables that have to be in memory. Others
1663 will be rewritten into SSA names, hence moved to the
1664 top-level. */
1665 && !is_gimple_reg (t)
1666 && flag_stack_reuse != SR_NONE)
1668 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1669 gimple *clobber_stmt;
1670 clobber_stmt = gimple_build_assign (t, clobber);
1671 gimple_set_location (clobber_stmt, end_locus);
1672 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1675 if (flag_openacc && oacc_declare_returns != NULL)
1677 tree key = t;
1678 if (DECL_HAS_VALUE_EXPR_P (key))
1680 key = DECL_VALUE_EXPR (key);
1681 if (INDIRECT_REF_P (key))
1682 key = TREE_OPERAND (key, 0);
1684 tree *c = oacc_declare_returns->get (key);
1685 if (c != NULL)
1687 if (ret_clauses)
1688 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1690 ret_clauses = unshare_expr (*c);
1692 oacc_declare_returns->remove (key);
1694 if (oacc_declare_returns->is_empty ())
1696 delete oacc_declare_returns;
1697 oacc_declare_returns = NULL;
1703 if (asan_poisoned_variables != NULL
1704 && asan_poisoned_variables->contains (t))
1706 asan_poisoned_variables->remove (t);
1707 asan_poison_variable (t, true, &cleanup);
1710 if (gimplify_ctxp->live_switch_vars != NULL
1711 && gimplify_ctxp->live_switch_vars->contains (t))
1712 gimplify_ctxp->live_switch_vars->remove (t);
1715 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1716 the stack space allocated to the VLAs. */
1717 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1719 gcall *stack_restore;
1721 /* Save stack on entry and restore it on exit. Add a try_finally
1722 block to achieve this. */
1723 build_stack_save_restore (&stack_save, &stack_restore);
1725 gimple_set_location (stack_save, start_locus);
1726 gimple_set_location (stack_restore, end_locus);
1728 gimplify_seq_add_stmt (&cleanup, stack_restore);
1731 if (ret_clauses)
1733 gomp_target *stmt;
1734 gimple_stmt_iterator si = gsi_start (cleanup);
1736 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1737 ret_clauses);
1738 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1741 if (cleanup)
1743 gtry *gs;
1744 gimple_seq new_body;
1746 new_body = NULL;
1747 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1748 GIMPLE_TRY_FINALLY);
1750 if (stack_save)
1751 gimplify_seq_add_stmt (&new_body, stack_save);
1752 gimplify_seq_add_stmt (&new_body, gs);
1753 gimple_bind_set_body (bind_stmt, new_body);
1756 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1757 if (!gimplify_ctxp->keep_stack)
1758 gimplify_ctxp->keep_stack = old_keep_stack;
1759 gimplify_ctxp->save_stack = old_save_stack;
1761 gimple_pop_bind_expr ();
1763 gimplify_seq_add_stmt (pre_p, bind_stmt);
1765 if (temp)
1767 *expr_p = temp;
1768 return GS_OK;
1771 *expr_p = NULL_TREE;
1772 return GS_ALL_DONE;
1775 /* Maybe add early return predict statement to PRE_P sequence. */
1777 static void
1778 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1780 /* If we are not in a conditional context, add PREDICT statement. */
1781 if (gimple_conditional_context ())
1783 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1784 NOT_TAKEN);
1785 gimplify_seq_add_stmt (pre_p, predict);
1789 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1790 GIMPLE value, it is assigned to a new temporary and the statement is
1791 re-written to return the temporary.
1793 PRE_P points to the sequence where side effects that must happen before
1794 STMT should be stored. */
1796 static enum gimplify_status
1797 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1799 greturn *ret;
1800 tree ret_expr = TREE_OPERAND (stmt, 0);
1801 tree result_decl, result;
1803 if (ret_expr == error_mark_node)
1804 return GS_ERROR;
1806 if (!ret_expr
1807 || TREE_CODE (ret_expr) == RESULT_DECL)
1809 maybe_add_early_return_predict_stmt (pre_p);
1810 greturn *ret = gimple_build_return (ret_expr);
1811 copy_warning (ret, stmt);
1812 gimplify_seq_add_stmt (pre_p, ret);
1813 return GS_ALL_DONE;
1816 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1817 result_decl = NULL_TREE;
1818 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1820 /* Used in C++ for handling EH cleanup of the return value if a local
1821 cleanup throws. Assume the front-end knows what it's doing. */
1822 result_decl = DECL_RESULT (current_function_decl);
1823 /* But crash if we end up trying to modify ret_expr below. */
1824 ret_expr = NULL_TREE;
1826 else
1828 result_decl = TREE_OPERAND (ret_expr, 0);
1830 /* See through a return by reference. */
1831 if (INDIRECT_REF_P (result_decl))
1832 result_decl = TREE_OPERAND (result_decl, 0);
1834 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1835 || TREE_CODE (ret_expr) == INIT_EXPR)
1836 && TREE_CODE (result_decl) == RESULT_DECL);
1839 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1840 Recall that aggregate_value_p is FALSE for any aggregate type that is
1841 returned in registers. If we're returning values in registers, then
1842 we don't want to extend the lifetime of the RESULT_DECL, particularly
1843 across another call. In addition, for those aggregates for which
1844 hard_function_value generates a PARALLEL, we'll die during normal
1845 expansion of structure assignments; there's special code in expand_return
1846 to handle this case that does not exist in expand_expr. */
1847 if (!result_decl)
1848 result = NULL_TREE;
1849 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1851 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1853 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1854 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1855 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1856 should be effectively allocated by the caller, i.e. all calls to
1857 this function must be subject to the Return Slot Optimization. */
1858 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1859 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1861 result = result_decl;
1863 else if (gimplify_ctxp->return_temp)
1864 result = gimplify_ctxp->return_temp;
1865 else
1867 result = create_tmp_reg (TREE_TYPE (result_decl));
1869 /* ??? With complex control flow (usually involving abnormal edges),
1870 we can wind up warning about an uninitialized value for this. Due
1871 to how this variable is constructed and initialized, this is never
1872 true. Give up and never warn. */
1873 suppress_warning (result, OPT_Wuninitialized);
1875 gimplify_ctxp->return_temp = result;
1878 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1879 Then gimplify the whole thing. */
1880 if (result != result_decl)
1881 TREE_OPERAND (ret_expr, 0) = result;
1883 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1885 maybe_add_early_return_predict_stmt (pre_p);
1886 ret = gimple_build_return (result);
1887 copy_warning (ret, stmt);
1888 gimplify_seq_add_stmt (pre_p, ret);
1890 return GS_ALL_DONE;
1893 /* Gimplify a variable-length array DECL. */
1895 static void
1896 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1898 /* This is a variable-sized decl. Simplify its size and mark it
1899 for deferred expansion. */
1900 tree t, addr, ptr_type;
1902 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1903 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1905 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1906 if (DECL_HAS_VALUE_EXPR_P (decl))
1907 return;
1909 /* All occurrences of this decl in final gimplified code will be
1910 replaced by indirection. Setting DECL_VALUE_EXPR does two
1911 things: First, it lets the rest of the gimplifier know what
1912 replacement to use. Second, it lets the debug info know
1913 where to find the value. */
1914 ptr_type = build_pointer_type (TREE_TYPE (decl));
1915 addr = create_tmp_var (ptr_type, get_name (decl));
1916 DECL_IGNORED_P (addr) = 0;
1917 t = build_fold_indirect_ref (addr);
1918 TREE_THIS_NOTRAP (t) = 1;
1919 SET_DECL_VALUE_EXPR (decl, t);
1920 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1922 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1923 max_int_size_in_bytes (TREE_TYPE (decl)));
1924 /* The call has been built for a variable-sized object. */
1925 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1926 t = fold_convert (ptr_type, t);
1927 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1929 gimplify_and_add (t, seq_p);
1931 /* Record the dynamic allocation associated with DECL if requested. */
1932 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1933 record_dynamic_alloc (decl);
1936 /* A helper function to be called via walk_tree. Mark all labels under *TP
1937 as being forced. To be called for DECL_INITIAL of static variables. */
1939 static tree
1940 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1942 if (TYPE_P (*tp))
1943 *walk_subtrees = 0;
1944 if (TREE_CODE (*tp) == LABEL_DECL)
1946 FORCED_LABEL (*tp) = 1;
1947 cfun->has_forced_label_in_static = 1;
1950 return NULL_TREE;
1953 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1954 Build a call to internal const function DEFERRED_INIT:
1955 1st argument: SIZE of the DECL;
1956 2nd argument: INIT_TYPE;
1957 3rd argument: NAME of the DECL;
1959 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1961 static void
1962 gimple_add_init_for_auto_var (tree decl,
1963 enum auto_init_type init_type,
1964 gimple_seq *seq_p)
1966 gcc_assert (auto_var_p (decl));
1967 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1968 location_t loc = EXPR_LOCATION (decl);
1969 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1971 tree init_type_node
1972 = build_int_cst (integer_type_node, (int) init_type);
1974 tree decl_name = NULL_TREE;
1975 if (DECL_NAME (decl))
1977 decl_name = build_string_literal (DECL_NAME (decl));
1979 else
1981 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1982 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
1983 decl_name = build_string_literal (decl_name_anonymous);
1986 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1987 TREE_TYPE (decl), 3,
1988 decl_size, init_type_node,
1989 decl_name);
1991 gimplify_assign (decl, call, seq_p);
1994 /* Generate padding initialization for automatic vairable DECL.
1995 C guarantees that brace-init with fewer initializers than members
1996 aggregate will initialize the rest of the aggregate as-if it were
1997 static initialization. In turn static initialization guarantees
1998 that padding is initialized to zero. So, we always initialize paddings
1999 to zeroes regardless INIT_TYPE.
2000 To do the padding initialization, we insert a call to
2001 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2002 Note, we add an additional dummy argument for __builtin_clear_padding,
2003 'for_auto_init' to distinguish whether this call is for automatic
2004 variable initialization or not.
2006 static void
2007 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
2008 gimple_seq *seq_p)
2010 tree addr_of_decl = NULL_TREE;
2011 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
2013 if (is_vla)
2015 /* The temporary address variable for this vla should be
2016 created in gimplify_vla_decl. */
2017 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
2018 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
2019 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
2021 else
2023 mark_addressable (decl);
2024 addr_of_decl = build_fold_addr_expr (decl);
2027 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
2028 build_one_cst (TREE_TYPE (addr_of_decl)));
2029 gimplify_seq_add_stmt (seq_p, call);
2032 /* Return true if the DECL need to be automaticly initialized by the
2033 compiler. */
2034 static bool
2035 is_var_need_auto_init (tree decl)
2037 if (auto_var_p (decl)
2038 && (TREE_CODE (decl) != VAR_DECL
2039 || !DECL_HARD_REGISTER (decl))
2040 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2041 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
2042 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
2043 && !is_empty_type (TREE_TYPE (decl)))
2044 return true;
2045 return false;
2048 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2049 and initialization explicit. */
2051 static enum gimplify_status
2052 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
2054 tree stmt = *stmt_p;
2055 tree decl = DECL_EXPR_DECL (stmt);
2057 *stmt_p = NULL_TREE;
2059 if (TREE_TYPE (decl) == error_mark_node)
2060 return GS_ERROR;
2062 if ((TREE_CODE (decl) == TYPE_DECL
2063 || VAR_P (decl))
2064 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
2066 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
2067 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2068 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
2071 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2072 in case its size expressions contain problematic nodes like CALL_EXPR. */
2073 if (TREE_CODE (decl) == TYPE_DECL
2074 && DECL_ORIGINAL_TYPE (decl)
2075 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
2077 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
2078 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
2079 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
2082 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
2084 tree init = DECL_INITIAL (decl);
2085 bool is_vla = false;
2086 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2087 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2088 If the decl has VALUE_EXPR that was created by FE (usually
2089 C++FE), it's a proxy varaible, and FE already initialized
2090 the VALUE_EXPR of it, we should not initialize it anymore. */
2091 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
2093 poly_uint64 size;
2094 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
2095 || (!TREE_STATIC (decl)
2096 && flag_stack_check == GENERIC_STACK_CHECK
2097 && maybe_gt (size,
2098 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
2100 gimplify_vla_decl (decl, seq_p);
2101 is_vla = true;
2104 if (asan_poisoned_variables
2105 && !is_vla
2106 && TREE_ADDRESSABLE (decl)
2107 && !TREE_STATIC (decl)
2108 && !DECL_HAS_VALUE_EXPR_P (decl)
2109 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
2110 && dbg_cnt (asan_use_after_scope)
2111 && !gimplify_omp_ctxp
2112 /* GNAT introduces temporaries to hold return values of calls in
2113 initializers of variables defined in other units, so the
2114 declaration of the variable is discarded completely. We do not
2115 want to issue poison calls for such dropped variables. */
2116 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
2117 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
2119 asan_poisoned_variables->add (decl);
2120 asan_poison_variable (decl, false, seq_p);
2121 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
2122 gimplify_ctxp->live_switch_vars->add (decl);
2125 /* Some front ends do not explicitly declare all anonymous
2126 artificial variables. We compensate here by declaring the
2127 variables, though it would be better if the front ends would
2128 explicitly declare them. */
2129 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
2130 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
2131 gimple_add_tmp_var (decl);
2133 if (init && init != error_mark_node)
2135 if (!TREE_STATIC (decl))
2137 DECL_INITIAL (decl) = NULL_TREE;
2138 init = build2 (INIT_EXPR, void_type_node, decl, init);
2139 gimplify_and_add (init, seq_p);
2140 ggc_free (init);
2141 /* Clear TREE_READONLY if we really have an initialization. */
2142 if (!DECL_INITIAL (decl)
2143 && !omp_privatize_by_reference (decl))
2144 TREE_READONLY (decl) = 0;
2146 else
2147 /* We must still examine initializers for static variables
2148 as they may contain a label address. */
2149 walk_tree (&init, force_labels_r, NULL, NULL);
2151 /* When there is no explicit initializer, if the user requested,
2152 We should insert an artifical initializer for this automatic
2153 variable. */
2154 else if (is_var_need_auto_init (decl)
2155 && !decl_had_value_expr_p)
2157 gimple_add_init_for_auto_var (decl,
2158 flag_auto_var_init,
2159 seq_p);
2160 /* The expanding of a call to the above .DEFERRED_INIT will apply
2161 block initialization to the whole space covered by this variable.
2162 As a result, all the paddings will be initialized to zeroes
2163 for zero initialization and 0xFE byte-repeatable patterns for
2164 pattern initialization.
2165 In order to make the paddings as zeroes for pattern init, We
2166 should add a call to __builtin_clear_padding to clear the
2167 paddings to zero in compatiple with CLANG.
2168 We cannot insert this call if the variable is a gimple register
2169 since __builtin_clear_padding will take the address of the
2170 variable. As a result, if a long double/_Complex long double
2171 variable will spilled into stack later, its padding is 0XFE. */
2172 if (flag_auto_var_init == AUTO_INIT_PATTERN
2173 && !is_gimple_reg (decl)
2174 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2175 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2179 return GS_ALL_DONE;
2182 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2183 and replacing the LOOP_EXPR with goto, but if the loop contains an
2184 EXIT_EXPR, we need to append a label for it to jump to. */
2186 static enum gimplify_status
2187 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2189 tree saved_label = gimplify_ctxp->exit_label;
2190 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2192 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
2194 gimplify_ctxp->exit_label = NULL_TREE;
2196 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
2198 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
2200 if (gimplify_ctxp->exit_label)
2201 gimplify_seq_add_stmt (pre_p,
2202 gimple_build_label (gimplify_ctxp->exit_label));
2204 gimplify_ctxp->exit_label = saved_label;
2206 *expr_p = NULL;
2207 return GS_ALL_DONE;
2210 /* Gimplify a statement list onto a sequence. These may be created either
2211 by an enlightened front-end, or by shortcut_cond_expr. */
2213 static enum gimplify_status
2214 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2216 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2218 tree_stmt_iterator i = tsi_start (*expr_p);
2220 while (!tsi_end_p (i))
2222 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2223 tsi_delink (&i);
2226 if (temp)
2228 *expr_p = temp;
2229 return GS_OK;
2232 return GS_ALL_DONE;
2236 /* Emit warning for the unreachable statment STMT if needed.
2237 Return the gimple itself when the warning is emitted, otherwise
2238 return NULL. */
2239 static gimple *
2240 emit_warn_switch_unreachable (gimple *stmt)
2242 if (gimple_code (stmt) == GIMPLE_GOTO
2243 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2244 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2245 /* Don't warn for compiler-generated gotos. These occur
2246 in Duff's devices, for example. */
2247 return NULL;
2248 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2249 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2250 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2251 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2252 || (is_gimple_assign (stmt)
2253 && gimple_assign_single_p (stmt)
2254 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2255 && gimple_call_internal_p (
2256 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2257 IFN_DEFERRED_INIT))))
2258 /* Don't warn for compiler-generated initializations for
2259 -ftrivial-auto-var-init.
2260 There are 3 cases:
2261 case 1: a call to .DEFERRED_INIT;
2262 case 2: a call to __builtin_clear_padding with the 2nd argument is
2263 present and non-zero;
2264 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2265 that has the LHS of .DEFERRED_INIT as the RHS as following:
2266 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2267 i1 = _1. */
2268 return NULL;
2269 else
2270 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2271 "statement will never be executed");
2272 return stmt;
2275 /* Callback for walk_gimple_seq. */
2277 static tree
2278 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2279 bool *handled_ops_p,
2280 struct walk_stmt_info *wi)
2282 gimple *stmt = gsi_stmt (*gsi_p);
2283 bool unreachable_issued = wi->info != NULL;
2285 *handled_ops_p = true;
2286 switch (gimple_code (stmt))
2288 case GIMPLE_TRY:
2289 /* A compiler-generated cleanup or a user-written try block.
2290 If it's empty, don't dive into it--that would result in
2291 worse location info. */
2292 if (gimple_try_eval (stmt) == NULL)
2294 if (warn_switch_unreachable && !unreachable_issued)
2295 wi->info = emit_warn_switch_unreachable (stmt);
2297 /* Stop when auto var init warning is not on. */
2298 if (!warn_trivial_auto_var_init)
2299 return integer_zero_node;
2301 /* Fall through. */
2302 case GIMPLE_BIND:
2303 case GIMPLE_CATCH:
2304 case GIMPLE_EH_FILTER:
2305 case GIMPLE_TRANSACTION:
2306 /* Walk the sub-statements. */
2307 *handled_ops_p = false;
2308 break;
2310 case GIMPLE_DEBUG:
2311 /* Ignore these. We may generate them before declarations that
2312 are never executed. If there's something to warn about,
2313 there will be non-debug stmts too, and we'll catch those. */
2314 break;
2316 case GIMPLE_LABEL:
2317 /* Stop till the first Label. */
2318 return integer_zero_node;
2319 case GIMPLE_CALL:
2320 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2322 *handled_ops_p = false;
2323 break;
2325 if (warn_trivial_auto_var_init
2326 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2327 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2329 /* Get the variable name from the 3rd argument of call. */
2330 tree var_name = gimple_call_arg (stmt, 2);
2331 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2332 const char *var_name_str = TREE_STRING_POINTER (var_name);
2334 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2335 "%qs cannot be initialized with"
2336 "%<-ftrivial-auto-var_init%>",
2337 var_name_str);
2338 break;
2341 /* Fall through. */
2342 default:
2343 /* check the first "real" statement (not a decl/lexical scope/...), issue
2344 warning if needed. */
2345 if (warn_switch_unreachable && !unreachable_issued)
2346 wi->info = emit_warn_switch_unreachable (stmt);
2347 /* Stop when auto var init warning is not on. */
2348 if (!warn_trivial_auto_var_init)
2349 return integer_zero_node;
2350 break;
2352 return NULL_TREE;
2356 /* Possibly warn about unreachable statements between switch's controlling
2357 expression and the first case. Also warn about -ftrivial-auto-var-init
2358 cannot initialize the auto variable under such situation.
2359 SEQ is the body of a switch expression. */
2361 static void
2362 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2364 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2365 /* This warning doesn't play well with Fortran when optimizations
2366 are on. */
2367 || lang_GNU_Fortran ()
2368 || seq == NULL)
2369 return;
2371 struct walk_stmt_info wi;
2373 memset (&wi, 0, sizeof (wi));
2374 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2378 /* A label entry that pairs label and a location. */
2379 struct label_entry
2381 tree label;
2382 location_t loc;
2385 /* Find LABEL in vector of label entries VEC. */
2387 static struct label_entry *
2388 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2390 unsigned int i;
2391 struct label_entry *l;
2393 FOR_EACH_VEC_ELT (*vec, i, l)
2394 if (l->label == label)
2395 return l;
2396 return NULL;
2399 /* Return true if LABEL, a LABEL_DECL, represents a case label
2400 in a vector of labels CASES. */
2402 static bool
2403 case_label_p (const vec<tree> *cases, tree label)
2405 unsigned int i;
2406 tree l;
2408 FOR_EACH_VEC_ELT (*cases, i, l)
2409 if (CASE_LABEL (l) == label)
2410 return true;
2411 return false;
2414 /* Find the last nondebug statement in a scope STMT. */
2416 static gimple *
2417 last_stmt_in_scope (gimple *stmt)
2419 if (!stmt)
2420 return NULL;
2422 switch (gimple_code (stmt))
2424 case GIMPLE_BIND:
2426 gbind *bind = as_a <gbind *> (stmt);
2427 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2428 return last_stmt_in_scope (stmt);
2431 case GIMPLE_TRY:
2433 gtry *try_stmt = as_a <gtry *> (stmt);
2434 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2435 gimple *last_eval = last_stmt_in_scope (stmt);
2436 if (gimple_stmt_may_fallthru (last_eval)
2437 && (last_eval == NULL
2438 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2439 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2441 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2442 return last_stmt_in_scope (stmt);
2444 else
2445 return last_eval;
2448 case GIMPLE_DEBUG:
2449 gcc_unreachable ();
2451 default:
2452 return stmt;
2456 /* Collect labels that may fall through into LABELS and return the statement
2457 preceding another case label, or a user-defined label. Store a location
2458 useful to give warnings at *PREVLOC (usually the location of the returned
2459 statement or of its surrounding scope). */
2461 static gimple *
2462 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2463 auto_vec <struct label_entry> *labels,
2464 location_t *prevloc)
2466 gimple *prev = NULL;
2468 *prevloc = UNKNOWN_LOCATION;
2471 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2473 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2474 which starts on a GIMPLE_SWITCH and ends with a break label.
2475 Handle that as a single statement that can fall through. */
2476 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2477 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2478 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2479 if (last
2480 && gimple_code (first) == GIMPLE_SWITCH
2481 && gimple_code (last) == GIMPLE_LABEL)
2483 tree label = gimple_label_label (as_a <glabel *> (last));
2484 if (SWITCH_BREAK_LABEL_P (label))
2486 prev = bind;
2487 gsi_next (gsi_p);
2488 continue;
2492 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2493 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2495 /* Nested scope. Only look at the last statement of
2496 the innermost scope. */
2497 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2498 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2499 if (last)
2501 prev = last;
2502 /* It might be a label without a location. Use the
2503 location of the scope then. */
2504 if (!gimple_has_location (prev))
2505 *prevloc = bind_loc;
2507 gsi_next (gsi_p);
2508 continue;
2511 /* Ifs are tricky. */
2512 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2514 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2515 tree false_lab = gimple_cond_false_label (cond_stmt);
2516 location_t if_loc = gimple_location (cond_stmt);
2518 /* If we have e.g.
2519 if (i > 1) goto <D.2259>; else goto D;
2520 we can't do much with the else-branch. */
2521 if (!DECL_ARTIFICIAL (false_lab))
2522 break;
2524 /* Go on until the false label, then one step back. */
2525 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2527 gimple *stmt = gsi_stmt (*gsi_p);
2528 if (gimple_code (stmt) == GIMPLE_LABEL
2529 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2530 break;
2533 /* Not found? Oops. */
2534 if (gsi_end_p (*gsi_p))
2535 break;
2537 /* A dead label can't fall through. */
2538 if (!UNUSED_LABEL_P (false_lab))
2540 struct label_entry l = { false_lab, if_loc };
2541 labels->safe_push (l);
2544 /* Go to the last statement of the then branch. */
2545 gsi_prev (gsi_p);
2547 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2548 <D.1759>:
2549 <stmt>;
2550 goto <D.1761>;
2551 <D.1760>:
2553 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2554 && !gimple_has_location (gsi_stmt (*gsi_p)))
2556 /* Look at the statement before, it might be
2557 attribute fallthrough, in which case don't warn. */
2558 gsi_prev (gsi_p);
2559 bool fallthru_before_dest
2560 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2561 gsi_next (gsi_p);
2562 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2563 if (!fallthru_before_dest)
2565 struct label_entry l = { goto_dest, if_loc };
2566 labels->safe_push (l);
2569 /* This case is about
2570 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2571 <D.2022>:
2572 n = n + 1; // #1
2573 <D.2023>: // #2
2574 <D.1988>: // #3
2575 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2576 through to #3. So set PREV to #1. */
2577 else if (UNUSED_LABEL_P (false_lab))
2578 prev = gsi_stmt (*gsi_p);
2580 /* And move back. */
2581 gsi_next (gsi_p);
2584 /* Remember the last statement. Skip labels that are of no interest
2585 to us. */
2586 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2588 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2589 if (find_label_entry (labels, label))
2590 prev = gsi_stmt (*gsi_p);
2592 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2594 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2596 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2597 prev = gsi_stmt (*gsi_p);
2598 gsi_next (gsi_p);
2600 while (!gsi_end_p (*gsi_p)
2601 /* Stop if we find a case or a user-defined label. */
2602 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2603 || !gimple_has_location (gsi_stmt (*gsi_p))));
2605 if (prev && gimple_has_location (prev))
2606 *prevloc = gimple_location (prev);
2607 return prev;
2610 /* Return true if the switch fallthough warning should occur. LABEL is
2611 the label statement that we're falling through to. */
2613 static bool
2614 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2616 gimple_stmt_iterator gsi = *gsi_p;
2618 /* Don't warn if the label is marked with a "falls through" comment. */
2619 if (FALLTHROUGH_LABEL_P (label))
2620 return false;
2622 /* Don't warn for non-case labels followed by a statement:
2623 case 0:
2624 foo ();
2625 label:
2626 bar ();
2627 as these are likely intentional. */
2628 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2630 tree l;
2631 while (!gsi_end_p (gsi)
2632 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2633 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2634 && !case_label_p (&gimplify_ctxp->case_labels, l))
2635 gsi_next_nondebug (&gsi);
2636 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2637 return false;
2640 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2641 immediately breaks. */
2642 gsi = *gsi_p;
2644 /* Skip all immediately following labels. */
2645 while (!gsi_end_p (gsi)
2646 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2647 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2648 gsi_next_nondebug (&gsi);
2650 /* { ... something; default:; } */
2651 if (gsi_end_p (gsi)
2652 /* { ... something; default: break; } or
2653 { ... something; default: goto L; } */
2654 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2655 /* { ... something; default: return; } */
2656 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2657 return false;
2659 return true;
2662 /* Callback for walk_gimple_seq. */
2664 static tree
2665 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2666 struct walk_stmt_info *)
2668 gimple *stmt = gsi_stmt (*gsi_p);
2670 *handled_ops_p = true;
2671 switch (gimple_code (stmt))
2673 case GIMPLE_TRY:
2674 case GIMPLE_BIND:
2675 case GIMPLE_CATCH:
2676 case GIMPLE_EH_FILTER:
2677 case GIMPLE_TRANSACTION:
2678 /* Walk the sub-statements. */
2679 *handled_ops_p = false;
2680 break;
2682 /* Find a sequence of form:
2684 GIMPLE_LABEL
2685 [...]
2686 <may fallthru stmt>
2687 GIMPLE_LABEL
2689 and possibly warn. */
2690 case GIMPLE_LABEL:
2692 /* Found a label. Skip all immediately following labels. */
2693 while (!gsi_end_p (*gsi_p)
2694 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2695 gsi_next_nondebug (gsi_p);
2697 /* There might be no more statements. */
2698 if (gsi_end_p (*gsi_p))
2699 return integer_zero_node;
2701 /* Vector of labels that fall through. */
2702 auto_vec <struct label_entry> labels;
2703 location_t prevloc;
2704 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2706 /* There might be no more statements. */
2707 if (gsi_end_p (*gsi_p))
2708 return integer_zero_node;
2710 gimple *next = gsi_stmt (*gsi_p);
2711 tree label;
2712 /* If what follows is a label, then we may have a fallthrough. */
2713 if (gimple_code (next) == GIMPLE_LABEL
2714 && gimple_has_location (next)
2715 && (label = gimple_label_label (as_a <glabel *> (next)))
2716 && prev != NULL)
2718 struct label_entry *l;
2719 bool warned_p = false;
2720 auto_diagnostic_group d;
2721 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2722 /* Quiet. */;
2723 else if (gimple_code (prev) == GIMPLE_LABEL
2724 && (label = gimple_label_label (as_a <glabel *> (prev)))
2725 && (l = find_label_entry (&labels, label)))
2726 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2727 "this statement may fall through");
2728 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2729 /* Try to be clever and don't warn when the statement
2730 can't actually fall through. */
2731 && gimple_stmt_may_fallthru (prev)
2732 && prevloc != UNKNOWN_LOCATION)
2733 warned_p = warning_at (prevloc,
2734 OPT_Wimplicit_fallthrough_,
2735 "this statement may fall through");
2736 if (warned_p)
2737 inform (gimple_location (next), "here");
2739 /* Mark this label as processed so as to prevent multiple
2740 warnings in nested switches. */
2741 FALLTHROUGH_LABEL_P (label) = true;
2743 /* So that next warn_implicit_fallthrough_r will start looking for
2744 a new sequence starting with this label. */
2745 gsi_prev (gsi_p);
2748 break;
2749 default:
2750 break;
2752 return NULL_TREE;
2755 /* Warn when a switch case falls through. */
2757 static void
2758 maybe_warn_implicit_fallthrough (gimple_seq seq)
2760 if (!warn_implicit_fallthrough)
2761 return;
2763 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2764 if (!(lang_GNU_C ()
2765 || lang_GNU_CXX ()
2766 || lang_GNU_OBJC ()))
2767 return;
2769 struct walk_stmt_info wi;
2770 memset (&wi, 0, sizeof (wi));
2771 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2774 /* Callback for walk_gimple_seq. */
2776 static tree
2777 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2778 struct walk_stmt_info *wi)
2780 gimple *stmt = gsi_stmt (*gsi_p);
2782 *handled_ops_p = true;
2783 switch (gimple_code (stmt))
2785 case GIMPLE_TRY:
2786 case GIMPLE_BIND:
2787 case GIMPLE_CATCH:
2788 case GIMPLE_EH_FILTER:
2789 case GIMPLE_TRANSACTION:
2790 /* Walk the sub-statements. */
2791 *handled_ops_p = false;
2792 break;
2793 case GIMPLE_CALL:
2794 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2796 gsi_remove (gsi_p, true);
2797 if (gsi_end_p (*gsi_p))
2799 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2800 return integer_zero_node;
2803 bool found = false;
2804 location_t loc = gimple_location (stmt);
2806 gimple_stmt_iterator gsi2 = *gsi_p;
2807 stmt = gsi_stmt (gsi2);
2808 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2810 /* Go on until the artificial label. */
2811 tree goto_dest = gimple_goto_dest (stmt);
2812 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2814 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2815 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2816 == goto_dest)
2817 break;
2820 /* Not found? Stop. */
2821 if (gsi_end_p (gsi2))
2822 break;
2824 /* Look one past it. */
2825 gsi_next (&gsi2);
2828 /* We're looking for a case label or default label here. */
2829 while (!gsi_end_p (gsi2))
2831 stmt = gsi_stmt (gsi2);
2832 if (gimple_code (stmt) == GIMPLE_LABEL)
2834 tree label = gimple_label_label (as_a <glabel *> (stmt));
2835 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2837 found = true;
2838 break;
2841 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2843 else if (!is_gimple_debug (stmt))
2844 /* Anything else is not expected. */
2845 break;
2846 gsi_next (&gsi2);
2848 if (!found)
2849 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2850 "a case label or default label");
2852 break;
2853 default:
2854 break;
2856 return NULL_TREE;
2859 /* Expand all FALLTHROUGH () calls in SEQ. */
2861 static void
2862 expand_FALLTHROUGH (gimple_seq *seq_p)
2864 struct walk_stmt_info wi;
2865 location_t loc;
2866 memset (&wi, 0, sizeof (wi));
2867 wi.info = (void *) &loc;
2868 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2869 if (wi.callback_result == integer_zero_node)
2870 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2871 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2872 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2873 "a case label or default label");
2877 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2878 branch to. */
2880 static enum gimplify_status
2881 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2883 tree switch_expr = *expr_p;
2884 gimple_seq switch_body_seq = NULL;
2885 enum gimplify_status ret;
2886 tree index_type = TREE_TYPE (switch_expr);
2887 if (index_type == NULL_TREE)
2888 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2890 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2891 fb_rvalue);
2892 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2893 return ret;
2895 if (SWITCH_BODY (switch_expr))
2897 vec<tree> labels;
2898 vec<tree> saved_labels;
2899 hash_set<tree> *saved_live_switch_vars = NULL;
2900 tree default_case = NULL_TREE;
2901 gswitch *switch_stmt;
2903 /* Save old labels, get new ones from body, then restore the old
2904 labels. Save all the things from the switch body to append after. */
2905 saved_labels = gimplify_ctxp->case_labels;
2906 gimplify_ctxp->case_labels.create (8);
2908 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2909 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2910 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2911 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2912 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2913 else
2914 gimplify_ctxp->live_switch_vars = NULL;
2916 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2917 gimplify_ctxp->in_switch_expr = true;
2919 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2921 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2922 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2923 maybe_warn_implicit_fallthrough (switch_body_seq);
2924 /* Only do this for the outermost GIMPLE_SWITCH. */
2925 if (!gimplify_ctxp->in_switch_expr)
2926 expand_FALLTHROUGH (&switch_body_seq);
2928 labels = gimplify_ctxp->case_labels;
2929 gimplify_ctxp->case_labels = saved_labels;
2931 if (gimplify_ctxp->live_switch_vars)
2933 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2934 delete gimplify_ctxp->live_switch_vars;
2936 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2938 preprocess_case_label_vec_for_gimple (labels, index_type,
2939 &default_case);
2941 bool add_bind = false;
2942 if (!default_case)
2944 glabel *new_default;
2946 default_case
2947 = build_case_label (NULL_TREE, NULL_TREE,
2948 create_artificial_label (UNKNOWN_LOCATION));
2949 if (old_in_switch_expr)
2951 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2952 add_bind = true;
2954 new_default = gimple_build_label (CASE_LABEL (default_case));
2955 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2957 else if (old_in_switch_expr)
2959 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2960 if (last && gimple_code (last) == GIMPLE_LABEL)
2962 tree label = gimple_label_label (as_a <glabel *> (last));
2963 if (SWITCH_BREAK_LABEL_P (label))
2964 add_bind = true;
2968 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2969 default_case, labels);
2970 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2971 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2972 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2973 so that we can easily find the start and end of the switch
2974 statement. */
2975 if (add_bind)
2977 gimple_seq bind_body = NULL;
2978 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2979 gimple_seq_add_seq (&bind_body, switch_body_seq);
2980 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2981 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2982 gimplify_seq_add_stmt (pre_p, bind);
2984 else
2986 gimplify_seq_add_stmt (pre_p, switch_stmt);
2987 gimplify_seq_add_seq (pre_p, switch_body_seq);
2989 labels.release ();
2991 else
2992 gcc_unreachable ();
2994 return GS_ALL_DONE;
2997 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2999 static enum gimplify_status
3000 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
3002 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
3003 == current_function_decl);
3005 tree label = LABEL_EXPR_LABEL (*expr_p);
3006 glabel *label_stmt = gimple_build_label (label);
3007 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3008 gimplify_seq_add_stmt (pre_p, label_stmt);
3010 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3011 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3012 NOT_TAKEN));
3013 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3014 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3015 TAKEN));
3017 return GS_ALL_DONE;
3020 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3022 static enum gimplify_status
3023 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
3025 struct gimplify_ctx *ctxp;
3026 glabel *label_stmt;
3028 /* Invalid programs can play Duff's Device type games with, for example,
3029 #pragma omp parallel. At least in the C front end, we don't
3030 detect such invalid branches until after gimplification, in the
3031 diagnose_omp_blocks pass. */
3032 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
3033 if (ctxp->case_labels.exists ())
3034 break;
3036 tree label = CASE_LABEL (*expr_p);
3037 label_stmt = gimple_build_label (label);
3038 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3039 ctxp->case_labels.safe_push (*expr_p);
3040 gimplify_seq_add_stmt (pre_p, label_stmt);
3042 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3043 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3044 NOT_TAKEN));
3045 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3046 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3047 TAKEN));
3049 return GS_ALL_DONE;
3052 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3053 if necessary. */
3055 tree
3056 build_and_jump (tree *label_p)
3058 if (label_p == NULL)
3059 /* If there's nowhere to jump, just fall through. */
3060 return NULL_TREE;
3062 if (*label_p == NULL_TREE)
3064 tree label = create_artificial_label (UNKNOWN_LOCATION);
3065 *label_p = label;
3068 return build1 (GOTO_EXPR, void_type_node, *label_p);
3071 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3072 This also involves building a label to jump to and communicating it to
3073 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3075 static enum gimplify_status
3076 gimplify_exit_expr (tree *expr_p)
3078 tree cond = TREE_OPERAND (*expr_p, 0);
3079 tree expr;
3081 expr = build_and_jump (&gimplify_ctxp->exit_label);
3082 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
3083 *expr_p = expr;
3085 return GS_OK;
3088 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3089 different from its canonical type, wrap the whole thing inside a
3090 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3091 type.
3093 The canonical type of a COMPONENT_REF is the type of the field being
3094 referenced--unless the field is a bit-field which can be read directly
3095 in a smaller mode, in which case the canonical type is the
3096 sign-appropriate type corresponding to that mode. */
3098 static void
3099 canonicalize_component_ref (tree *expr_p)
3101 tree expr = *expr_p;
3102 tree type;
3104 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
3106 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
3107 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
3108 else
3109 type = TREE_TYPE (TREE_OPERAND (expr, 1));
3111 /* One could argue that all the stuff below is not necessary for
3112 the non-bitfield case and declare it a FE error if type
3113 adjustment would be needed. */
3114 if (TREE_TYPE (expr) != type)
3116 #ifdef ENABLE_TYPES_CHECKING
3117 tree old_type = TREE_TYPE (expr);
3118 #endif
3119 int type_quals;
3121 /* We need to preserve qualifiers and propagate them from
3122 operand 0. */
3123 type_quals = TYPE_QUALS (type)
3124 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
3125 if (TYPE_QUALS (type) != type_quals)
3126 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
3128 /* Set the type of the COMPONENT_REF to the underlying type. */
3129 TREE_TYPE (expr) = type;
3131 #ifdef ENABLE_TYPES_CHECKING
3132 /* It is now a FE error, if the conversion from the canonical
3133 type to the original expression type is not useless. */
3134 gcc_assert (useless_type_conversion_p (old_type, type));
3135 #endif
3139 /* If a NOP conversion is changing a pointer to array of foo to a pointer
3140 to foo, embed that change in the ADDR_EXPR by converting
3141 T array[U];
3142 (T *)&array
3144 &array[L]
3145 where L is the lower bound. For simplicity, only do this for constant
3146 lower bound.
3147 The constraint is that the type of &array[L] is trivially convertible
3148 to T *. */
3150 static void
3151 canonicalize_addr_expr (tree *expr_p)
3153 tree expr = *expr_p;
3154 tree addr_expr = TREE_OPERAND (expr, 0);
3155 tree datype, ddatype, pddatype;
3157 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3158 if (!POINTER_TYPE_P (TREE_TYPE (expr))
3159 || TREE_CODE (addr_expr) != ADDR_EXPR)
3160 return;
3162 /* The addr_expr type should be a pointer to an array. */
3163 datype = TREE_TYPE (TREE_TYPE (addr_expr));
3164 if (TREE_CODE (datype) != ARRAY_TYPE)
3165 return;
3167 /* The pointer to element type shall be trivially convertible to
3168 the expression pointer type. */
3169 ddatype = TREE_TYPE (datype);
3170 pddatype = build_pointer_type (ddatype);
3171 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3172 pddatype))
3173 return;
3175 /* The lower bound and element sizes must be constant. */
3176 if (!TYPE_SIZE_UNIT (ddatype)
3177 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3178 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3179 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3180 return;
3182 /* All checks succeeded. Build a new node to merge the cast. */
3183 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3184 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3185 NULL_TREE, NULL_TREE);
3186 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3188 /* We can have stripped a required restrict qualifier above. */
3189 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3190 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3193 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3194 underneath as appropriate. */
3196 static enum gimplify_status
3197 gimplify_conversion (tree *expr_p)
3199 location_t loc = EXPR_LOCATION (*expr_p);
3200 gcc_assert (CONVERT_EXPR_P (*expr_p));
3202 /* Then strip away all but the outermost conversion. */
3203 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3205 /* And remove the outermost conversion if it's useless. */
3206 if (tree_ssa_useless_type_conversion (*expr_p))
3207 *expr_p = TREE_OPERAND (*expr_p, 0);
3209 /* If we still have a conversion at the toplevel,
3210 then canonicalize some constructs. */
3211 if (CONVERT_EXPR_P (*expr_p))
3213 tree sub = TREE_OPERAND (*expr_p, 0);
3215 /* If a NOP conversion is changing the type of a COMPONENT_REF
3216 expression, then canonicalize its type now in order to expose more
3217 redundant conversions. */
3218 if (TREE_CODE (sub) == COMPONENT_REF)
3219 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3221 /* If a NOP conversion is changing a pointer to array of foo
3222 to a pointer to foo, embed that change in the ADDR_EXPR. */
3223 else if (TREE_CODE (sub) == ADDR_EXPR)
3224 canonicalize_addr_expr (expr_p);
3227 /* If we have a conversion to a non-register type force the
3228 use of a VIEW_CONVERT_EXPR instead. */
3229 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3230 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3231 TREE_OPERAND (*expr_p, 0));
3233 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3234 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3235 TREE_SET_CODE (*expr_p, NOP_EXPR);
3237 return GS_OK;
3240 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3241 DECL_VALUE_EXPR, and it's worth re-examining things. */
3243 static enum gimplify_status
3244 gimplify_var_or_parm_decl (tree *expr_p)
3246 tree decl = *expr_p;
3248 /* ??? If this is a local variable, and it has not been seen in any
3249 outer BIND_EXPR, then it's probably the result of a duplicate
3250 declaration, for which we've already issued an error. It would
3251 be really nice if the front end wouldn't leak these at all.
3252 Currently the only known culprit is C++ destructors, as seen
3253 in g++.old-deja/g++.jason/binding.C.
3254 Another possible culpit are size expressions for variably modified
3255 types which are lost in the FE or not gimplified correctly. */
3256 if (VAR_P (decl)
3257 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3258 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3259 && decl_function_context (decl) == current_function_decl)
3261 gcc_assert (seen_error ());
3262 return GS_ERROR;
3265 /* When within an OMP context, notice uses of variables. */
3266 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3267 return GS_ALL_DONE;
3269 /* If the decl is an alias for another expression, substitute it now. */
3270 if (DECL_HAS_VALUE_EXPR_P (decl))
3272 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3273 return GS_OK;
3276 return GS_ALL_DONE;
3279 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3281 static void
3282 recalculate_side_effects (tree t)
3284 enum tree_code code = TREE_CODE (t);
3285 int len = TREE_OPERAND_LENGTH (t);
3286 int i;
3288 switch (TREE_CODE_CLASS (code))
3290 case tcc_expression:
3291 switch (code)
3293 case INIT_EXPR:
3294 case MODIFY_EXPR:
3295 case VA_ARG_EXPR:
3296 case PREDECREMENT_EXPR:
3297 case PREINCREMENT_EXPR:
3298 case POSTDECREMENT_EXPR:
3299 case POSTINCREMENT_EXPR:
3300 /* All of these have side-effects, no matter what their
3301 operands are. */
3302 return;
3304 default:
3305 break;
3307 /* Fall through. */
3309 case tcc_comparison: /* a comparison expression */
3310 case tcc_unary: /* a unary arithmetic expression */
3311 case tcc_binary: /* a binary arithmetic expression */
3312 case tcc_reference: /* a reference */
3313 case tcc_vl_exp: /* a function call */
3314 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3315 for (i = 0; i < len; ++i)
3317 tree op = TREE_OPERAND (t, i);
3318 if (op && TREE_SIDE_EFFECTS (op))
3319 TREE_SIDE_EFFECTS (t) = 1;
3321 break;
3323 case tcc_constant:
3324 /* No side-effects. */
3325 return;
3327 default:
3328 gcc_unreachable ();
3332 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3333 node *EXPR_P.
3335 compound_lval
3336 : min_lval '[' val ']'
3337 | min_lval '.' ID
3338 | compound_lval '[' val ']'
3339 | compound_lval '.' ID
3341 This is not part of the original SIMPLE definition, which separates
3342 array and member references, but it seems reasonable to handle them
3343 together. Also, this way we don't run into problems with union
3344 aliasing; gcc requires that for accesses through a union to alias, the
3345 union reference must be explicit, which was not always the case when we
3346 were splitting up array and member refs.
3348 PRE_P points to the sequence where side effects that must happen before
3349 *EXPR_P should be stored.
3351 POST_P points to the sequence where side effects that must happen after
3352 *EXPR_P should be stored. */
3354 static enum gimplify_status
3355 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3356 fallback_t fallback)
3358 tree *p;
3359 enum gimplify_status ret = GS_ALL_DONE, tret;
3360 int i;
3361 location_t loc = EXPR_LOCATION (*expr_p);
3362 tree expr = *expr_p;
3364 /* Create a stack of the subexpressions so later we can walk them in
3365 order from inner to outer. */
3366 auto_vec<tree, 10> expr_stack;
3368 /* We can handle anything that get_inner_reference can deal with. */
3369 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3371 restart:
3372 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3373 if (TREE_CODE (*p) == INDIRECT_REF)
3374 *p = fold_indirect_ref_loc (loc, *p);
3376 if (handled_component_p (*p))
3378 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3379 additional COMPONENT_REFs. */
3380 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3381 && gimplify_var_or_parm_decl (p) == GS_OK)
3382 goto restart;
3383 else
3384 break;
3386 expr_stack.safe_push (*p);
3389 gcc_assert (expr_stack.length ());
3391 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3392 walked through and P points to the innermost expression.
3394 Java requires that we elaborated nodes in source order. That
3395 means we must gimplify the inner expression followed by each of
3396 the indices, in order. But we can't gimplify the inner
3397 expression until we deal with any variable bounds, sizes, or
3398 positions in order to deal with PLACEHOLDER_EXPRs.
3400 The base expression may contain a statement expression that
3401 has declarations used in size expressions, so has to be
3402 gimplified before gimplifying the size expressions.
3404 So we do this in three steps. First we deal with variable
3405 bounds, sizes, and positions, then we gimplify the base and
3406 ensure it is memory if needed, then we deal with the annotations
3407 for any variables in the components and any indices, from left
3408 to right. */
3410 bool need_non_reg = false;
3411 for (i = expr_stack.length () - 1; i >= 0; i--)
3413 tree t = expr_stack[i];
3415 if (error_operand_p (TREE_OPERAND (t, 0)))
3416 return GS_ERROR;
3418 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3420 /* Deal with the low bound and element type size and put them into
3421 the ARRAY_REF. If these values are set, they have already been
3422 gimplified. */
3423 if (TREE_OPERAND (t, 2) == NULL_TREE)
3425 tree low = unshare_expr (array_ref_low_bound (t));
3426 if (!is_gimple_min_invariant (low))
3428 TREE_OPERAND (t, 2) = low;
3432 if (TREE_OPERAND (t, 3) == NULL_TREE)
3434 tree elmt_size = array_ref_element_size (t);
3435 if (!is_gimple_min_invariant (elmt_size))
3437 elmt_size = unshare_expr (elmt_size);
3438 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3439 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3441 /* Divide the element size by the alignment of the element
3442 type (above). */
3443 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3444 elmt_size, factor);
3446 TREE_OPERAND (t, 3) = elmt_size;
3449 need_non_reg = true;
3451 else if (TREE_CODE (t) == COMPONENT_REF)
3453 /* Set the field offset into T and gimplify it. */
3454 if (TREE_OPERAND (t, 2) == NULL_TREE)
3456 tree offset = component_ref_field_offset (t);
3457 if (!is_gimple_min_invariant (offset))
3459 offset = unshare_expr (offset);
3460 tree field = TREE_OPERAND (t, 1);
3461 tree factor
3462 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3464 /* Divide the offset by its alignment. */
3465 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3466 offset, factor);
3468 TREE_OPERAND (t, 2) = offset;
3471 need_non_reg = true;
3473 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3474 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3475 is a non-register type then require the base object to be a
3476 non-register as well. */
3477 need_non_reg = true;
3480 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3481 so as to match the min_lval predicate. Failure to do so may result
3482 in the creation of large aggregate temporaries. */
3483 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3484 fallback | fb_lvalue);
3485 ret = MIN (ret, tret);
3486 if (ret == GS_ERROR)
3487 return GS_ERROR;
3489 /* Step 2a: if we have component references we do not support on
3490 registers then make sure the base isn't a register. Of course
3491 we can only do so if an rvalue is OK. */
3492 if (need_non_reg && (fallback & fb_rvalue))
3493 prepare_gimple_addressable (p, pre_p);
3496 /* Step 3: gimplify size expressions and the indices and operands of
3497 ARRAY_REF. During this loop we also remove any useless conversions.
3498 If we operate on a register also make sure to properly gimplify
3499 to individual operations. */
3501 bool reg_operations = is_gimple_reg (*p);
3502 for (; expr_stack.length () > 0; )
3504 tree t = expr_stack.pop ();
3506 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3508 gcc_assert (!reg_operations);
3510 /* Gimplify the low bound and element type size. */
3511 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3512 is_gimple_reg, fb_rvalue);
3513 ret = MIN (ret, tret);
3515 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3516 is_gimple_reg, fb_rvalue);
3517 ret = MIN (ret, tret);
3519 /* Gimplify the dimension. */
3520 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3521 is_gimple_val, fb_rvalue);
3522 ret = MIN (ret, tret);
3524 else if (TREE_CODE (t) == COMPONENT_REF)
3526 gcc_assert (!reg_operations);
3528 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3529 is_gimple_reg, fb_rvalue);
3530 ret = MIN (ret, tret);
3532 else if (reg_operations)
3534 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3535 is_gimple_val, fb_rvalue);
3536 ret = MIN (ret, tret);
3539 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3541 /* The innermost expression P may have originally had
3542 TREE_SIDE_EFFECTS set which would have caused all the outer
3543 expressions in *EXPR_P leading to P to also have had
3544 TREE_SIDE_EFFECTS set. */
3545 recalculate_side_effects (t);
3548 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3549 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3551 canonicalize_component_ref (expr_p);
3554 expr_stack.release ();
3556 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3558 return ret;
3561 /* Gimplify the self modifying expression pointed to by EXPR_P
3562 (++, --, +=, -=).
3564 PRE_P points to the list where side effects that must happen before
3565 *EXPR_P should be stored.
3567 POST_P points to the list where side effects that must happen after
3568 *EXPR_P should be stored.
3570 WANT_VALUE is nonzero iff we want to use the value of this expression
3571 in another expression.
3573 ARITH_TYPE is the type the computation should be performed in. */
3575 enum gimplify_status
3576 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3577 bool want_value, tree arith_type)
3579 enum tree_code code;
3580 tree lhs, lvalue, rhs, t1;
3581 gimple_seq post = NULL, *orig_post_p = post_p;
3582 bool postfix;
3583 enum tree_code arith_code;
3584 enum gimplify_status ret;
3585 location_t loc = EXPR_LOCATION (*expr_p);
3587 code = TREE_CODE (*expr_p);
3589 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3590 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3592 /* Prefix or postfix? */
3593 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3594 /* Faster to treat as prefix if result is not used. */
3595 postfix = want_value;
3596 else
3597 postfix = false;
3599 /* For postfix, make sure the inner expression's post side effects
3600 are executed after side effects from this expression. */
3601 if (postfix)
3602 post_p = &post;
3604 /* Add or subtract? */
3605 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3606 arith_code = PLUS_EXPR;
3607 else
3608 arith_code = MINUS_EXPR;
3610 /* Gimplify the LHS into a GIMPLE lvalue. */
3611 lvalue = TREE_OPERAND (*expr_p, 0);
3612 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3613 if (ret == GS_ERROR)
3614 return ret;
3616 /* Extract the operands to the arithmetic operation. */
3617 lhs = lvalue;
3618 rhs = TREE_OPERAND (*expr_p, 1);
3620 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3621 that as the result value and in the postqueue operation. */
3622 if (postfix)
3624 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3625 if (ret == GS_ERROR)
3626 return ret;
3628 lhs = get_initialized_tmp_var (lhs, pre_p);
3631 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3632 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3634 rhs = convert_to_ptrofftype_loc (loc, rhs);
3635 if (arith_code == MINUS_EXPR)
3636 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3637 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3639 else
3640 t1 = fold_convert (TREE_TYPE (*expr_p),
3641 fold_build2 (arith_code, arith_type,
3642 fold_convert (arith_type, lhs),
3643 fold_convert (arith_type, rhs)));
3645 if (postfix)
3647 gimplify_assign (lvalue, t1, pre_p);
3648 gimplify_seq_add_seq (orig_post_p, post);
3649 *expr_p = lhs;
3650 return GS_ALL_DONE;
3652 else
3654 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3655 return GS_OK;
3659 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3661 static void
3662 maybe_with_size_expr (tree *expr_p)
3664 tree expr = *expr_p;
3665 tree type = TREE_TYPE (expr);
3666 tree size;
3668 /* If we've already wrapped this or the type is error_mark_node, we can't do
3669 anything. */
3670 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3671 || type == error_mark_node)
3672 return;
3674 /* If the size isn't known or is a constant, we have nothing to do. */
3675 size = TYPE_SIZE_UNIT (type);
3676 if (!size || poly_int_tree_p (size))
3677 return;
3679 /* Otherwise, make a WITH_SIZE_EXPR. */
3680 size = unshare_expr (size);
3681 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3682 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3685 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3686 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3687 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3688 gimplified to an SSA name. */
3690 enum gimplify_status
3691 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3692 bool allow_ssa)
3694 bool (*test) (tree);
3695 fallback_t fb;
3697 /* In general, we allow lvalues for function arguments to avoid
3698 extra overhead of copying large aggregates out of even larger
3699 aggregates into temporaries only to copy the temporaries to
3700 the argument list. Make optimizers happy by pulling out to
3701 temporaries those types that fit in registers. */
3702 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3703 test = is_gimple_val, fb = fb_rvalue;
3704 else
3706 test = is_gimple_lvalue, fb = fb_either;
3707 /* Also strip a TARGET_EXPR that would force an extra copy. */
3708 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3710 tree init = TARGET_EXPR_INITIAL (*arg_p);
3711 if (init
3712 && !VOID_TYPE_P (TREE_TYPE (init)))
3713 *arg_p = init;
3717 /* If this is a variable sized type, we must remember the size. */
3718 maybe_with_size_expr (arg_p);
3720 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3721 /* Make sure arguments have the same location as the function call
3722 itself. */
3723 protected_set_expr_location (*arg_p, call_location);
3725 /* There is a sequence point before a function call. Side effects in
3726 the argument list must occur before the actual call. So, when
3727 gimplifying arguments, force gimplify_expr to use an internal
3728 post queue which is then appended to the end of PRE_P. */
3729 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3732 /* Don't fold inside offloading or taskreg regions: it can break code by
3733 adding decl references that weren't in the source. We'll do it during
3734 omplower pass instead. */
3736 static bool
3737 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3739 struct gimplify_omp_ctx *ctx;
3740 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3741 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3742 return false;
3743 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3744 return false;
3745 /* Delay folding of builtins until the IL is in consistent state
3746 so the diagnostic machinery can do a better job. */
3747 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3748 return false;
3749 return fold_stmt (gsi);
3752 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3753 WANT_VALUE is true if the result of the call is desired. */
3755 static enum gimplify_status
3756 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3758 tree fndecl, parms, p, fnptrtype;
3759 enum gimplify_status ret;
3760 int i, nargs;
3761 gcall *call;
3762 bool builtin_va_start_p = false;
3763 location_t loc = EXPR_LOCATION (*expr_p);
3765 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3767 /* For reliable diagnostics during inlining, it is necessary that
3768 every call_expr be annotated with file and line. */
3769 if (! EXPR_HAS_LOCATION (*expr_p))
3770 SET_EXPR_LOCATION (*expr_p, input_location);
3772 /* Gimplify internal functions created in the FEs. */
3773 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3775 if (want_value)
3776 return GS_ALL_DONE;
3778 nargs = call_expr_nargs (*expr_p);
3779 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3780 auto_vec<tree> vargs (nargs);
3782 if (ifn == IFN_ASSUME)
3784 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3786 /* If the [[assume (cond)]]; condition is simple
3787 enough and can be evaluated unconditionally
3788 without side-effects, expand it as
3789 if (!cond) __builtin_unreachable (); */
3790 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3791 *expr_p = build3 (COND_EXPR, void_type_node,
3792 CALL_EXPR_ARG (*expr_p, 0), void_node,
3793 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3794 fndecl, 0));
3795 return GS_OK;
3797 /* If not optimizing, ignore the assumptions. */
3798 if (!optimize || seen_error ())
3800 *expr_p = NULL_TREE;
3801 return GS_ALL_DONE;
3803 /* Temporarily, until gimple lowering, transform
3804 .ASSUME (cond);
3805 into:
3806 [[assume (guard)]]
3808 guard = cond;
3810 such that gimple lowering can outline the condition into
3811 a separate function easily. */
3812 tree guard = create_tmp_var (boolean_type_node);
3813 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3814 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3815 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3816 push_gimplify_context ();
3817 gimple_seq body = NULL;
3818 gimple *g = gimplify_and_return_first (*expr_p, &body);
3819 pop_gimplify_context (g);
3820 g = gimple_build_assume (guard, body);
3821 gimple_set_location (g, loc);
3822 gimplify_seq_add_stmt (pre_p, g);
3823 *expr_p = NULL_TREE;
3824 return GS_ALL_DONE;
3827 for (i = 0; i < nargs; i++)
3829 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3830 EXPR_LOCATION (*expr_p));
3831 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3834 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3835 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3836 gimplify_seq_add_stmt (pre_p, call);
3837 return GS_ALL_DONE;
3840 /* This may be a call to a builtin function.
3842 Builtin function calls may be transformed into different
3843 (and more efficient) builtin function calls under certain
3844 circumstances. Unfortunately, gimplification can muck things
3845 up enough that the builtin expanders are not aware that certain
3846 transformations are still valid.
3848 So we attempt transformation/gimplification of the call before
3849 we gimplify the CALL_EXPR. At this time we do not manage to
3850 transform all calls in the same manner as the expanders do, but
3851 we do transform most of them. */
3852 fndecl = get_callee_fndecl (*expr_p);
3853 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3854 switch (DECL_FUNCTION_CODE (fndecl))
3856 CASE_BUILT_IN_ALLOCA:
3857 /* If the call has been built for a variable-sized object, then we
3858 want to restore the stack level when the enclosing BIND_EXPR is
3859 exited to reclaim the allocated space; otherwise, we precisely
3860 need to do the opposite and preserve the latest stack level. */
3861 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3862 gimplify_ctxp->save_stack = true;
3863 else
3864 gimplify_ctxp->keep_stack = true;
3865 break;
3867 case BUILT_IN_VA_START:
3869 builtin_va_start_p = true;
3870 if (call_expr_nargs (*expr_p) < 2)
3872 error ("too few arguments to function %<va_start%>");
3873 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3874 return GS_OK;
3877 if (fold_builtin_next_arg (*expr_p, true))
3879 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3880 return GS_OK;
3882 break;
3885 case BUILT_IN_EH_RETURN:
3886 cfun->calls_eh_return = true;
3887 break;
3889 case BUILT_IN_CLEAR_PADDING:
3890 if (call_expr_nargs (*expr_p) == 1)
3892 /* Remember the original type of the argument in an internal
3893 dummy second argument, as in GIMPLE pointer conversions are
3894 useless. Also mark this call as not for automatic
3895 initialization in the internal dummy third argument. */
3896 p = CALL_EXPR_ARG (*expr_p, 0);
3897 *expr_p
3898 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3899 build_zero_cst (TREE_TYPE (p)));
3900 return GS_OK;
3902 break;
3904 default:
3907 if (fndecl && fndecl_built_in_p (fndecl))
3909 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3910 if (new_tree && new_tree != *expr_p)
3912 /* There was a transformation of this call which computes the
3913 same value, but in a more efficient way. Return and try
3914 again. */
3915 *expr_p = new_tree;
3916 return GS_OK;
3920 /* Remember the original function pointer type. */
3921 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3923 if (flag_openmp
3924 && fndecl
3925 && cfun
3926 && (cfun->curr_properties & PROP_gimple_any) == 0)
3928 tree variant = omp_resolve_declare_variant (fndecl);
3929 if (variant != fndecl)
3930 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3933 /* There is a sequence point before the call, so any side effects in
3934 the calling expression must occur before the actual call. Force
3935 gimplify_expr to use an internal post queue. */
3936 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3937 is_gimple_call_addr, fb_rvalue);
3939 if (ret == GS_ERROR)
3940 return GS_ERROR;
3942 nargs = call_expr_nargs (*expr_p);
3944 /* Get argument types for verification. */
3945 fndecl = get_callee_fndecl (*expr_p);
3946 parms = NULL_TREE;
3947 if (fndecl)
3948 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3949 else
3950 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3952 if (fndecl && DECL_ARGUMENTS (fndecl))
3953 p = DECL_ARGUMENTS (fndecl);
3954 else if (parms)
3955 p = parms;
3956 else
3957 p = NULL_TREE;
3958 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3961 /* If the last argument is __builtin_va_arg_pack () and it is not
3962 passed as a named argument, decrease the number of CALL_EXPR
3963 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3964 if (!p
3965 && i < nargs
3966 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3968 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3969 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3971 if (last_arg_fndecl
3972 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3974 tree call = *expr_p;
3976 --nargs;
3977 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3978 CALL_EXPR_FN (call),
3979 nargs, CALL_EXPR_ARGP (call));
3981 /* Copy all CALL_EXPR flags, location and block, except
3982 CALL_EXPR_VA_ARG_PACK flag. */
3983 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3984 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3985 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3986 = CALL_EXPR_RETURN_SLOT_OPT (call);
3987 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3988 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3990 /* Set CALL_EXPR_VA_ARG_PACK. */
3991 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3995 /* If the call returns twice then after building the CFG the call
3996 argument computations will no longer dominate the call because
3997 we add an abnormal incoming edge to the call. So do not use SSA
3998 vars there. */
3999 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
4001 /* Gimplify the function arguments. */
4002 if (nargs > 0)
4004 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
4005 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
4006 PUSH_ARGS_REVERSED ? i-- : i++)
4008 enum gimplify_status t;
4010 /* Avoid gimplifying the second argument to va_start, which needs to
4011 be the plain PARM_DECL. */
4012 if ((i != 1) || !builtin_va_start_p)
4014 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
4015 EXPR_LOCATION (*expr_p), ! returns_twice);
4017 if (t == GS_ERROR)
4018 ret = GS_ERROR;
4023 /* Gimplify the static chain. */
4024 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
4026 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
4027 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
4028 else
4030 enum gimplify_status t;
4031 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
4032 EXPR_LOCATION (*expr_p), ! returns_twice);
4033 if (t == GS_ERROR)
4034 ret = GS_ERROR;
4038 /* Verify the function result. */
4039 if (want_value && fndecl
4040 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
4042 error_at (loc, "using result of function returning %<void%>");
4043 ret = GS_ERROR;
4046 /* Try this again in case gimplification exposed something. */
4047 if (ret != GS_ERROR)
4049 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4051 if (new_tree && new_tree != *expr_p)
4053 /* There was a transformation of this call which computes the
4054 same value, but in a more efficient way. Return and try
4055 again. */
4056 *expr_p = new_tree;
4057 return GS_OK;
4060 else
4062 *expr_p = error_mark_node;
4063 return GS_ERROR;
4066 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4067 decl. This allows us to eliminate redundant or useless
4068 calls to "const" functions. */
4069 if (TREE_CODE (*expr_p) == CALL_EXPR)
4071 int flags = call_expr_flags (*expr_p);
4072 if (flags & (ECF_CONST | ECF_PURE)
4073 /* An infinite loop is considered a side effect. */
4074 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
4075 TREE_SIDE_EFFECTS (*expr_p) = 0;
4078 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4079 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4080 form and delegate the creation of a GIMPLE_CALL to
4081 gimplify_modify_expr. This is always possible because when
4082 WANT_VALUE is true, the caller wants the result of this call into
4083 a temporary, which means that we will emit an INIT_EXPR in
4084 internal_get_tmp_var which will then be handled by
4085 gimplify_modify_expr. */
4086 if (!want_value)
4088 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4089 have to do is replicate it as a GIMPLE_CALL tuple. */
4090 gimple_stmt_iterator gsi;
4091 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
4092 notice_special_calls (call);
4093 gimplify_seq_add_stmt (pre_p, call);
4094 gsi = gsi_last (*pre_p);
4095 maybe_fold_stmt (&gsi);
4096 *expr_p = NULL_TREE;
4098 else
4099 /* Remember the original function type. */
4100 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
4101 CALL_EXPR_FN (*expr_p));
4103 return ret;
4106 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4107 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4109 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4110 condition is true or false, respectively. If null, we should generate
4111 our own to skip over the evaluation of this specific expression.
4113 LOCUS is the source location of the COND_EXPR.
4115 This function is the tree equivalent of do_jump.
4117 shortcut_cond_r should only be called by shortcut_cond_expr. */
4119 static tree
4120 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
4121 location_t locus)
4123 tree local_label = NULL_TREE;
4124 tree t, expr = NULL;
4126 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4127 retain the shortcut semantics. Just insert the gotos here;
4128 shortcut_cond_expr will append the real blocks later. */
4129 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4131 location_t new_locus;
4133 /* Turn if (a && b) into
4135 if (a); else goto no;
4136 if (b) goto yes; else goto no;
4137 (no:) */
4139 if (false_label_p == NULL)
4140 false_label_p = &local_label;
4142 /* Keep the original source location on the first 'if'. */
4143 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
4144 append_to_statement_list (t, &expr);
4146 /* Set the source location of the && on the second 'if'. */
4147 new_locus = rexpr_location (pred, locus);
4148 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4149 new_locus);
4150 append_to_statement_list (t, &expr);
4152 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4154 location_t new_locus;
4156 /* Turn if (a || b) into
4158 if (a) goto yes;
4159 if (b) goto yes; else goto no;
4160 (yes:) */
4162 if (true_label_p == NULL)
4163 true_label_p = &local_label;
4165 /* Keep the original source location on the first 'if'. */
4166 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
4167 append_to_statement_list (t, &expr);
4169 /* Set the source location of the || on the second 'if'. */
4170 new_locus = rexpr_location (pred, locus);
4171 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4172 new_locus);
4173 append_to_statement_list (t, &expr);
4175 else if (TREE_CODE (pred) == COND_EXPR
4176 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
4177 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
4179 location_t new_locus;
4181 /* As long as we're messing with gotos, turn if (a ? b : c) into
4182 if (a)
4183 if (b) goto yes; else goto no;
4184 else
4185 if (c) goto yes; else goto no;
4187 Don't do this if one of the arms has void type, which can happen
4188 in C++ when the arm is throw. */
4190 /* Keep the original source location on the first 'if'. Set the source
4191 location of the ? on the second 'if'. */
4192 new_locus = rexpr_location (pred, locus);
4193 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
4194 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
4195 false_label_p, locus),
4196 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
4197 false_label_p, new_locus));
4199 else
4201 expr = build3 (COND_EXPR, void_type_node, pred,
4202 build_and_jump (true_label_p),
4203 build_and_jump (false_label_p));
4204 SET_EXPR_LOCATION (expr, locus);
4207 if (local_label)
4209 t = build1 (LABEL_EXPR, void_type_node, local_label);
4210 append_to_statement_list (t, &expr);
4213 return expr;
4216 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4217 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4218 statement, if it is the last one. Otherwise, return NULL. */
4220 static tree
4221 find_goto (tree expr)
4223 if (!expr)
4224 return NULL_TREE;
4226 if (TREE_CODE (expr) == GOTO_EXPR)
4227 return expr;
4229 if (TREE_CODE (expr) != STATEMENT_LIST)
4230 return NULL_TREE;
4232 tree_stmt_iterator i = tsi_start (expr);
4234 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4235 tsi_next (&i);
4237 if (!tsi_one_before_end_p (i))
4238 return NULL_TREE;
4240 return find_goto (tsi_stmt (i));
4243 /* Same as find_goto, except that it returns NULL if the destination
4244 is not a LABEL_DECL. */
4246 static inline tree
4247 find_goto_label (tree expr)
4249 tree dest = find_goto (expr);
4250 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4251 return dest;
4252 return NULL_TREE;
4255 /* Given a conditional expression EXPR with short-circuit boolean
4256 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4257 predicate apart into the equivalent sequence of conditionals. */
4259 static tree
4260 shortcut_cond_expr (tree expr)
4262 tree pred = TREE_OPERAND (expr, 0);
4263 tree then_ = TREE_OPERAND (expr, 1);
4264 tree else_ = TREE_OPERAND (expr, 2);
4265 tree true_label, false_label, end_label, t;
4266 tree *true_label_p;
4267 tree *false_label_p;
4268 bool emit_end, emit_false, jump_over_else;
4269 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4270 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4272 /* First do simple transformations. */
4273 if (!else_se)
4275 /* If there is no 'else', turn
4276 if (a && b) then c
4277 into
4278 if (a) if (b) then c. */
4279 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4281 /* Keep the original source location on the first 'if'. */
4282 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4283 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4284 /* Set the source location of the && on the second 'if'. */
4285 if (rexpr_has_location (pred))
4286 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4287 then_ = shortcut_cond_expr (expr);
4288 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4289 pred = TREE_OPERAND (pred, 0);
4290 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4291 SET_EXPR_LOCATION (expr, locus);
4295 if (!then_se)
4297 /* If there is no 'then', turn
4298 if (a || b); else d
4299 into
4300 if (a); else if (b); else d. */
4301 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4303 /* Keep the original source location on the first 'if'. */
4304 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4305 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4306 /* Set the source location of the || on the second 'if'. */
4307 if (rexpr_has_location (pred))
4308 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4309 else_ = shortcut_cond_expr (expr);
4310 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4311 pred = TREE_OPERAND (pred, 0);
4312 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4313 SET_EXPR_LOCATION (expr, locus);
4317 /* If we're done, great. */
4318 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4319 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4320 return expr;
4322 /* Otherwise we need to mess with gotos. Change
4323 if (a) c; else d;
4325 if (a); else goto no;
4326 c; goto end;
4327 no: d; end:
4328 and recursively gimplify the condition. */
4330 true_label = false_label = end_label = NULL_TREE;
4332 /* If our arms just jump somewhere, hijack those labels so we don't
4333 generate jumps to jumps. */
4335 if (tree then_goto = find_goto_label (then_))
4337 true_label = GOTO_DESTINATION (then_goto);
4338 then_ = NULL;
4339 then_se = false;
4342 if (tree else_goto = find_goto_label (else_))
4344 false_label = GOTO_DESTINATION (else_goto);
4345 else_ = NULL;
4346 else_se = false;
4349 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4350 if (true_label)
4351 true_label_p = &true_label;
4352 else
4353 true_label_p = NULL;
4355 /* The 'else' branch also needs a label if it contains interesting code. */
4356 if (false_label || else_se)
4357 false_label_p = &false_label;
4358 else
4359 false_label_p = NULL;
4361 /* If there was nothing else in our arms, just forward the label(s). */
4362 if (!then_se && !else_se)
4363 return shortcut_cond_r (pred, true_label_p, false_label_p,
4364 EXPR_LOC_OR_LOC (expr, input_location));
4366 /* If our last subexpression already has a terminal label, reuse it. */
4367 if (else_se)
4368 t = expr_last (else_);
4369 else if (then_se)
4370 t = expr_last (then_);
4371 else
4372 t = NULL;
4373 if (t && TREE_CODE (t) == LABEL_EXPR)
4374 end_label = LABEL_EXPR_LABEL (t);
4376 /* If we don't care about jumping to the 'else' branch, jump to the end
4377 if the condition is false. */
4378 if (!false_label_p)
4379 false_label_p = &end_label;
4381 /* We only want to emit these labels if we aren't hijacking them. */
4382 emit_end = (end_label == NULL_TREE);
4383 emit_false = (false_label == NULL_TREE);
4385 /* We only emit the jump over the else clause if we have to--if the
4386 then clause may fall through. Otherwise we can wind up with a
4387 useless jump and a useless label at the end of gimplified code,
4388 which will cause us to think that this conditional as a whole
4389 falls through even if it doesn't. If we then inline a function
4390 which ends with such a condition, that can cause us to issue an
4391 inappropriate warning about control reaching the end of a
4392 non-void function. */
4393 jump_over_else = block_may_fallthru (then_);
4395 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4396 EXPR_LOC_OR_LOC (expr, input_location));
4398 expr = NULL;
4399 append_to_statement_list (pred, &expr);
4401 append_to_statement_list (then_, &expr);
4402 if (else_se)
4404 if (jump_over_else)
4406 tree last = expr_last (expr);
4407 t = build_and_jump (&end_label);
4408 if (rexpr_has_location (last))
4409 SET_EXPR_LOCATION (t, rexpr_location (last));
4410 append_to_statement_list (t, &expr);
4412 if (emit_false)
4414 t = build1 (LABEL_EXPR, void_type_node, false_label);
4415 append_to_statement_list (t, &expr);
4417 append_to_statement_list (else_, &expr);
4419 if (emit_end && end_label)
4421 t = build1 (LABEL_EXPR, void_type_node, end_label);
4422 append_to_statement_list (t, &expr);
4425 return expr;
4428 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4430 tree
4431 gimple_boolify (tree expr)
4433 tree type = TREE_TYPE (expr);
4434 location_t loc = EXPR_LOCATION (expr);
4436 if (TREE_CODE (expr) == NE_EXPR
4437 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4438 && integer_zerop (TREE_OPERAND (expr, 1)))
4440 tree call = TREE_OPERAND (expr, 0);
4441 tree fn = get_callee_fndecl (call);
4443 /* For __builtin_expect ((long) (x), y) recurse into x as well
4444 if x is truth_value_p. */
4445 if (fn
4446 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4447 && call_expr_nargs (call) == 2)
4449 tree arg = CALL_EXPR_ARG (call, 0);
4450 if (arg)
4452 if (TREE_CODE (arg) == NOP_EXPR
4453 && TREE_TYPE (arg) == TREE_TYPE (call))
4454 arg = TREE_OPERAND (arg, 0);
4455 if (truth_value_p (TREE_CODE (arg)))
4457 arg = gimple_boolify (arg);
4458 CALL_EXPR_ARG (call, 0)
4459 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4465 switch (TREE_CODE (expr))
4467 case TRUTH_AND_EXPR:
4468 case TRUTH_OR_EXPR:
4469 case TRUTH_XOR_EXPR:
4470 case TRUTH_ANDIF_EXPR:
4471 case TRUTH_ORIF_EXPR:
4472 /* Also boolify the arguments of truth exprs. */
4473 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4474 /* FALLTHRU */
4476 case TRUTH_NOT_EXPR:
4477 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4479 /* These expressions always produce boolean results. */
4480 if (TREE_CODE (type) != BOOLEAN_TYPE)
4481 TREE_TYPE (expr) = boolean_type_node;
4482 return expr;
4484 case ANNOTATE_EXPR:
4485 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4487 case annot_expr_ivdep_kind:
4488 case annot_expr_unroll_kind:
4489 case annot_expr_no_vector_kind:
4490 case annot_expr_vector_kind:
4491 case annot_expr_parallel_kind:
4492 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4493 if (TREE_CODE (type) != BOOLEAN_TYPE)
4494 TREE_TYPE (expr) = boolean_type_node;
4495 return expr;
4496 default:
4497 gcc_unreachable ();
4500 default:
4501 if (COMPARISON_CLASS_P (expr))
4503 /* These expressions always produce boolean results. */
4504 if (TREE_CODE (type) != BOOLEAN_TYPE)
4505 TREE_TYPE (expr) = boolean_type_node;
4506 return expr;
4508 /* Other expressions that get here must have boolean values, but
4509 might need to be converted to the appropriate mode. */
4510 if (TREE_CODE (type) == BOOLEAN_TYPE)
4511 return expr;
4512 return fold_convert_loc (loc, boolean_type_node, expr);
4516 /* Given a conditional expression *EXPR_P without side effects, gimplify
4517 its operands. New statements are inserted to PRE_P. */
4519 static enum gimplify_status
4520 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4522 tree expr = *expr_p, cond;
4523 enum gimplify_status ret, tret;
4524 enum tree_code code;
4526 cond = gimple_boolify (COND_EXPR_COND (expr));
4528 /* We need to handle && and || specially, as their gimplification
4529 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4530 code = TREE_CODE (cond);
4531 if (code == TRUTH_ANDIF_EXPR)
4532 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4533 else if (code == TRUTH_ORIF_EXPR)
4534 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4535 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4536 COND_EXPR_COND (*expr_p) = cond;
4538 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4539 is_gimple_val, fb_rvalue);
4540 ret = MIN (ret, tret);
4541 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4542 is_gimple_val, fb_rvalue);
4544 return MIN (ret, tret);
4547 /* Return true if evaluating EXPR could trap.
4548 EXPR is GENERIC, while tree_could_trap_p can be called
4549 only on GIMPLE. */
4551 bool
4552 generic_expr_could_trap_p (tree expr)
4554 unsigned i, n;
4556 if (!expr || is_gimple_val (expr))
4557 return false;
4559 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4560 return true;
4562 n = TREE_OPERAND_LENGTH (expr);
4563 for (i = 0; i < n; i++)
4564 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4565 return true;
4567 return false;
4570 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4571 into
4573 if (p) if (p)
4574 t1 = a; a;
4575 else or else
4576 t1 = b; b;
4579 The second form is used when *EXPR_P is of type void.
4581 PRE_P points to the list where side effects that must happen before
4582 *EXPR_P should be stored. */
4584 static enum gimplify_status
4585 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4587 tree expr = *expr_p;
4588 tree type = TREE_TYPE (expr);
4589 location_t loc = EXPR_LOCATION (expr);
4590 tree tmp, arm1, arm2;
4591 enum gimplify_status ret;
4592 tree label_true, label_false, label_cont;
4593 bool have_then_clause_p, have_else_clause_p;
4594 gcond *cond_stmt;
4595 enum tree_code pred_code;
4596 gimple_seq seq = NULL;
4598 /* If this COND_EXPR has a value, copy the values into a temporary within
4599 the arms. */
4600 if (!VOID_TYPE_P (type))
4602 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4603 tree result;
4605 /* If either an rvalue is ok or we do not require an lvalue, create the
4606 temporary. But we cannot do that if the type is addressable. */
4607 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4608 && !TREE_ADDRESSABLE (type))
4610 if (gimplify_ctxp->allow_rhs_cond_expr
4611 /* If either branch has side effects or could trap, it can't be
4612 evaluated unconditionally. */
4613 && !TREE_SIDE_EFFECTS (then_)
4614 && !generic_expr_could_trap_p (then_)
4615 && !TREE_SIDE_EFFECTS (else_)
4616 && !generic_expr_could_trap_p (else_))
4617 return gimplify_pure_cond_expr (expr_p, pre_p);
4619 tmp = create_tmp_var (type, "iftmp");
4620 result = tmp;
4623 /* Otherwise, only create and copy references to the values. */
4624 else
4626 type = build_pointer_type (type);
4628 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4629 then_ = build_fold_addr_expr_loc (loc, then_);
4631 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4632 else_ = build_fold_addr_expr_loc (loc, else_);
4634 expr
4635 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4637 tmp = create_tmp_var (type, "iftmp");
4638 result = build_simple_mem_ref_loc (loc, tmp);
4641 /* Build the new then clause, `tmp = then_;'. But don't build the
4642 assignment if the value is void; in C++ it can be if it's a throw. */
4643 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4644 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4646 /* Similarly, build the new else clause, `tmp = else_;'. */
4647 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4648 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4650 TREE_TYPE (expr) = void_type_node;
4651 recalculate_side_effects (expr);
4653 /* Move the COND_EXPR to the prequeue. */
4654 gimplify_stmt (&expr, pre_p);
4656 *expr_p = result;
4657 return GS_ALL_DONE;
4660 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4661 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4662 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4663 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4665 /* Make sure the condition has BOOLEAN_TYPE. */
4666 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4668 /* Break apart && and || conditions. */
4669 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4670 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4672 expr = shortcut_cond_expr (expr);
4674 if (expr != *expr_p)
4676 *expr_p = expr;
4678 /* We can't rely on gimplify_expr to re-gimplify the expanded
4679 form properly, as cleanups might cause the target labels to be
4680 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4681 set up a conditional context. */
4682 gimple_push_condition ();
4683 gimplify_stmt (expr_p, &seq);
4684 gimple_pop_condition (pre_p);
4685 gimple_seq_add_seq (pre_p, seq);
4687 return GS_ALL_DONE;
4691 /* Now do the normal gimplification. */
4693 /* Gimplify condition. */
4694 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4695 is_gimple_condexpr_for_cond, fb_rvalue);
4696 if (ret == GS_ERROR)
4697 return GS_ERROR;
4698 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4700 gimple_push_condition ();
4702 have_then_clause_p = have_else_clause_p = false;
4703 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4704 if (label_true
4705 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4706 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4707 have different locations, otherwise we end up with incorrect
4708 location information on the branches. */
4709 && (optimize
4710 || !EXPR_HAS_LOCATION (expr)
4711 || !rexpr_has_location (label_true)
4712 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4714 have_then_clause_p = true;
4715 label_true = GOTO_DESTINATION (label_true);
4717 else
4718 label_true = create_artificial_label (UNKNOWN_LOCATION);
4719 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4720 if (label_false
4721 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4722 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4723 have different locations, otherwise we end up with incorrect
4724 location information on the branches. */
4725 && (optimize
4726 || !EXPR_HAS_LOCATION (expr)
4727 || !rexpr_has_location (label_false)
4728 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4730 have_else_clause_p = true;
4731 label_false = GOTO_DESTINATION (label_false);
4733 else
4734 label_false = create_artificial_label (UNKNOWN_LOCATION);
4736 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4737 &arm2);
4738 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4739 label_false);
4740 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4741 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4742 gimplify_seq_add_stmt (&seq, cond_stmt);
4743 gimple_stmt_iterator gsi = gsi_last (seq);
4744 maybe_fold_stmt (&gsi);
4746 label_cont = NULL_TREE;
4747 if (!have_then_clause_p)
4749 /* For if (...) {} else { code; } put label_true after
4750 the else block. */
4751 if (TREE_OPERAND (expr, 1) == NULL_TREE
4752 && !have_else_clause_p
4753 && TREE_OPERAND (expr, 2) != NULL_TREE)
4755 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4756 handling that label_cont == label_true can be only reached
4757 through fallthrough from { code; }. */
4758 if (integer_zerop (COND_EXPR_COND (expr)))
4759 UNUSED_LABEL_P (label_true) = 1;
4760 label_cont = label_true;
4762 else
4764 bool then_side_effects
4765 = (TREE_OPERAND (expr, 1)
4766 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4767 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4768 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4769 /* For if (...) { code; } else {} or
4770 if (...) { code; } else goto label; or
4771 if (...) { code; return; } else { ... }
4772 label_cont isn't needed. */
4773 if (!have_else_clause_p
4774 && TREE_OPERAND (expr, 2) != NULL_TREE
4775 && gimple_seq_may_fallthru (seq))
4777 gimple *g;
4778 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4780 /* For if (0) { non-side-effect-code } else { code }
4781 tell -Wimplicit-fallthrough handling that label_cont can
4782 be only reached through fallthrough from { code }. */
4783 if (integer_zerop (COND_EXPR_COND (expr)))
4785 UNUSED_LABEL_P (label_true) = 1;
4786 if (!then_side_effects)
4787 UNUSED_LABEL_P (label_cont) = 1;
4790 g = gimple_build_goto (label_cont);
4792 /* GIMPLE_COND's are very low level; they have embedded
4793 gotos. This particular embedded goto should not be marked
4794 with the location of the original COND_EXPR, as it would
4795 correspond to the COND_EXPR's condition, not the ELSE or the
4796 THEN arms. To avoid marking it with the wrong location, flag
4797 it as "no location". */
4798 gimple_set_do_not_emit_location (g);
4800 gimplify_seq_add_stmt (&seq, g);
4804 if (!have_else_clause_p)
4806 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4807 tell -Wimplicit-fallthrough handling that label_false can be only
4808 reached through fallthrough from { code }. */
4809 if (integer_nonzerop (COND_EXPR_COND (expr))
4810 && (TREE_OPERAND (expr, 2) == NULL_TREE
4811 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4812 UNUSED_LABEL_P (label_false) = 1;
4813 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4814 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4816 if (label_cont)
4817 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4819 gimple_pop_condition (pre_p);
4820 gimple_seq_add_seq (pre_p, seq);
4822 if (ret == GS_ERROR)
4823 ; /* Do nothing. */
4824 else if (have_then_clause_p || have_else_clause_p)
4825 ret = GS_ALL_DONE;
4826 else
4828 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4829 expr = TREE_OPERAND (expr, 0);
4830 gimplify_stmt (&expr, pre_p);
4833 *expr_p = NULL;
4834 return ret;
4837 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4838 to be marked addressable.
4840 We cannot rely on such an expression being directly markable if a temporary
4841 has been created by the gimplification. In this case, we create another
4842 temporary and initialize it with a copy, which will become a store after we
4843 mark it addressable. This can happen if the front-end passed us something
4844 that it could not mark addressable yet, like a Fortran pass-by-reference
4845 parameter (int) floatvar. */
4847 static void
4848 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4850 while (handled_component_p (*expr_p))
4851 expr_p = &TREE_OPERAND (*expr_p, 0);
4853 /* Do not allow an SSA name as the temporary. */
4854 if (is_gimple_reg (*expr_p))
4855 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4858 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4859 a call to __builtin_memcpy. */
4861 static enum gimplify_status
4862 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4863 gimple_seq *seq_p)
4865 tree t, to, to_ptr, from, from_ptr;
4866 gcall *gs;
4867 location_t loc = EXPR_LOCATION (*expr_p);
4869 to = TREE_OPERAND (*expr_p, 0);
4870 from = TREE_OPERAND (*expr_p, 1);
4872 /* Mark the RHS addressable. Beware that it may not be possible to do so
4873 directly if a temporary has been created by the gimplification. */
4874 prepare_gimple_addressable (&from, seq_p);
4876 mark_addressable (from);
4877 from_ptr = build_fold_addr_expr_loc (loc, from);
4878 gimplify_arg (&from_ptr, seq_p, loc);
4880 mark_addressable (to);
4881 to_ptr = build_fold_addr_expr_loc (loc, to);
4882 gimplify_arg (&to_ptr, seq_p, loc);
4884 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4886 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4887 gimple_call_set_alloca_for_var (gs, true);
4889 if (want_value)
4891 /* tmp = memcpy() */
4892 t = create_tmp_var (TREE_TYPE (to_ptr));
4893 gimple_call_set_lhs (gs, t);
4894 gimplify_seq_add_stmt (seq_p, gs);
4896 *expr_p = build_simple_mem_ref (t);
4897 return GS_ALL_DONE;
4900 gimplify_seq_add_stmt (seq_p, gs);
4901 *expr_p = NULL;
4902 return GS_ALL_DONE;
4905 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4906 a call to __builtin_memset. In this case we know that the RHS is
4907 a CONSTRUCTOR with an empty element list. */
4909 static enum gimplify_status
4910 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4911 gimple_seq *seq_p)
4913 tree t, from, to, to_ptr;
4914 gcall *gs;
4915 location_t loc = EXPR_LOCATION (*expr_p);
4917 /* Assert our assumptions, to abort instead of producing wrong code
4918 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4919 not be immediately exposed. */
4920 from = TREE_OPERAND (*expr_p, 1);
4921 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4922 from = TREE_OPERAND (from, 0);
4924 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4925 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4927 /* Now proceed. */
4928 to = TREE_OPERAND (*expr_p, 0);
4930 to_ptr = build_fold_addr_expr_loc (loc, to);
4931 gimplify_arg (&to_ptr, seq_p, loc);
4932 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4934 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4936 if (want_value)
4938 /* tmp = memset() */
4939 t = create_tmp_var (TREE_TYPE (to_ptr));
4940 gimple_call_set_lhs (gs, t);
4941 gimplify_seq_add_stmt (seq_p, gs);
4943 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4944 return GS_ALL_DONE;
4947 gimplify_seq_add_stmt (seq_p, gs);
4948 *expr_p = NULL;
4949 return GS_ALL_DONE;
4952 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4953 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4954 assignment. Return non-null if we detect a potential overlap. */
4956 struct gimplify_init_ctor_preeval_data
4958 /* The base decl of the lhs object. May be NULL, in which case we
4959 have to assume the lhs is indirect. */
4960 tree lhs_base_decl;
4962 /* The alias set of the lhs object. */
4963 alias_set_type lhs_alias_set;
4966 static tree
4967 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4969 struct gimplify_init_ctor_preeval_data *data
4970 = (struct gimplify_init_ctor_preeval_data *) xdata;
4971 tree t = *tp;
4973 /* If we find the base object, obviously we have overlap. */
4974 if (data->lhs_base_decl == t)
4975 return t;
4977 /* If the constructor component is indirect, determine if we have a
4978 potential overlap with the lhs. The only bits of information we
4979 have to go on at this point are addressability and alias sets. */
4980 if ((INDIRECT_REF_P (t)
4981 || TREE_CODE (t) == MEM_REF)
4982 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4983 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4984 return t;
4986 /* If the constructor component is a call, determine if it can hide a
4987 potential overlap with the lhs through an INDIRECT_REF like above.
4988 ??? Ugh - this is completely broken. In fact this whole analysis
4989 doesn't look conservative. */
4990 if (TREE_CODE (t) == CALL_EXPR)
4992 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4994 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4995 if (POINTER_TYPE_P (TREE_VALUE (type))
4996 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4997 && alias_sets_conflict_p (data->lhs_alias_set,
4998 get_alias_set
4999 (TREE_TYPE (TREE_VALUE (type)))))
5000 return t;
5003 if (IS_TYPE_OR_DECL_P (t))
5004 *walk_subtrees = 0;
5005 return NULL;
5008 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5009 force values that overlap with the lhs (as described by *DATA)
5010 into temporaries. */
5012 static void
5013 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5014 struct gimplify_init_ctor_preeval_data *data)
5016 enum gimplify_status one;
5018 /* If the value is constant, then there's nothing to pre-evaluate. */
5019 if (TREE_CONSTANT (*expr_p))
5021 /* Ensure it does not have side effects, it might contain a reference to
5022 the object we're initializing. */
5023 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
5024 return;
5027 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5028 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
5029 return;
5031 /* Recurse for nested constructors. */
5032 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
5034 unsigned HOST_WIDE_INT ix;
5035 constructor_elt *ce;
5036 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
5038 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
5039 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
5041 return;
5044 /* If this is a variable sized type, we must remember the size. */
5045 maybe_with_size_expr (expr_p);
5047 /* Gimplify the constructor element to something appropriate for the rhs
5048 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5049 the gimplifier will consider this a store to memory. Doing this
5050 gimplification now means that we won't have to deal with complicated
5051 language-specific trees, nor trees like SAVE_EXPR that can induce
5052 exponential search behavior. */
5053 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
5054 if (one == GS_ERROR)
5056 *expr_p = NULL;
5057 return;
5060 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5061 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5062 always be true for all scalars, since is_gimple_mem_rhs insists on a
5063 temporary variable for them. */
5064 if (DECL_P (*expr_p))
5065 return;
5067 /* If this is of variable size, we have no choice but to assume it doesn't
5068 overlap since we can't make a temporary for it. */
5069 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
5070 return;
5072 /* Otherwise, we must search for overlap ... */
5073 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
5074 return;
5076 /* ... and if found, force the value into a temporary. */
5077 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
5080 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
5081 a RANGE_EXPR in a CONSTRUCTOR for an array.
5083 var = lower;
5084 loop_entry:
5085 object[var] = value;
5086 if (var == upper)
5087 goto loop_exit;
5088 var = var + 1;
5089 goto loop_entry;
5090 loop_exit:
5092 We increment var _after_ the loop exit check because we might otherwise
5093 fail if upper == TYPE_MAX_VALUE (type for upper).
5095 Note that we never have to deal with SAVE_EXPRs here, because this has
5096 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5098 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
5099 gimple_seq *, bool);
5101 static void
5102 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
5103 tree value, tree array_elt_type,
5104 gimple_seq *pre_p, bool cleared)
5106 tree loop_entry_label, loop_exit_label, fall_thru_label;
5107 tree var, var_type, cref, tmp;
5109 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
5110 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
5111 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
5113 /* Create and initialize the index variable. */
5114 var_type = TREE_TYPE (upper);
5115 var = create_tmp_var (var_type);
5116 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
5118 /* Add the loop entry label. */
5119 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
5121 /* Build the reference. */
5122 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5123 var, NULL_TREE, NULL_TREE);
5125 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5126 the store. Otherwise just assign value to the reference. */
5128 if (TREE_CODE (value) == CONSTRUCTOR)
5129 /* NB we might have to call ourself recursively through
5130 gimplify_init_ctor_eval if the value is a constructor. */
5131 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5132 pre_p, cleared);
5133 else
5135 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
5136 != GS_ERROR)
5137 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
5140 /* We exit the loop when the index var is equal to the upper bound. */
5141 gimplify_seq_add_stmt (pre_p,
5142 gimple_build_cond (EQ_EXPR, var, upper,
5143 loop_exit_label, fall_thru_label));
5145 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
5147 /* Otherwise, increment the index var... */
5148 tmp = build2 (PLUS_EXPR, var_type, var,
5149 fold_convert (var_type, integer_one_node));
5150 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
5152 /* ...and jump back to the loop entry. */
5153 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
5155 /* Add the loop exit label. */
5156 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
5159 /* A subroutine of gimplify_init_constructor. Generate individual
5160 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5161 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5162 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5163 zeroed first. */
5165 static void
5166 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
5167 gimple_seq *pre_p, bool cleared)
5169 tree array_elt_type = NULL;
5170 unsigned HOST_WIDE_INT ix;
5171 tree purpose, value;
5173 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
5174 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
5176 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
5178 tree cref;
5180 /* NULL values are created above for gimplification errors. */
5181 if (value == NULL)
5182 continue;
5184 if (cleared && initializer_zerop (value))
5185 continue;
5187 /* ??? Here's to hoping the front end fills in all of the indices,
5188 so we don't have to figure out what's missing ourselves. */
5189 gcc_assert (purpose);
5191 /* Skip zero-sized fields, unless value has side-effects. This can
5192 happen with calls to functions returning a empty type, which
5193 we shouldn't discard. As a number of downstream passes don't
5194 expect sets of empty type fields, we rely on the gimplification of
5195 the MODIFY_EXPR we make below to drop the assignment statement. */
5196 if (!TREE_SIDE_EFFECTS (value)
5197 && TREE_CODE (purpose) == FIELD_DECL
5198 && is_empty_type (TREE_TYPE (purpose)))
5199 continue;
5201 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5202 whole range. */
5203 if (TREE_CODE (purpose) == RANGE_EXPR)
5205 tree lower = TREE_OPERAND (purpose, 0);
5206 tree upper = TREE_OPERAND (purpose, 1);
5208 /* If the lower bound is equal to upper, just treat it as if
5209 upper was the index. */
5210 if (simple_cst_equal (lower, upper))
5211 purpose = upper;
5212 else
5214 gimplify_init_ctor_eval_range (object, lower, upper, value,
5215 array_elt_type, pre_p, cleared);
5216 continue;
5220 if (array_elt_type)
5222 /* Do not use bitsizetype for ARRAY_REF indices. */
5223 if (TYPE_DOMAIN (TREE_TYPE (object)))
5224 purpose
5225 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5226 purpose);
5227 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5228 purpose, NULL_TREE, NULL_TREE);
5230 else
5232 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5233 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5234 unshare_expr (object), purpose, NULL_TREE);
5237 if (TREE_CODE (value) == CONSTRUCTOR
5238 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5239 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5240 pre_p, cleared);
5241 else
5243 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5244 gimplify_and_add (init, pre_p);
5245 ggc_free (init);
5250 /* Return the appropriate RHS predicate for this LHS. */
5252 gimple_predicate
5253 rhs_predicate_for (tree lhs)
5255 if (is_gimple_reg (lhs))
5256 return is_gimple_reg_rhs_or_call;
5257 else
5258 return is_gimple_mem_rhs_or_call;
5261 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5262 before the LHS has been gimplified. */
5264 static gimple_predicate
5265 initial_rhs_predicate_for (tree lhs)
5267 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5268 return is_gimple_reg_rhs_or_call;
5269 else
5270 return is_gimple_mem_rhs_or_call;
5273 /* Gimplify a C99 compound literal expression. This just means adding
5274 the DECL_EXPR before the current statement and using its anonymous
5275 decl instead. */
5277 static enum gimplify_status
5278 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5279 bool (*gimple_test_f) (tree),
5280 fallback_t fallback)
5282 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5283 tree decl = DECL_EXPR_DECL (decl_s);
5284 tree init = DECL_INITIAL (decl);
5285 /* Mark the decl as addressable if the compound literal
5286 expression is addressable now, otherwise it is marked too late
5287 after we gimplify the initialization expression. */
5288 if (TREE_ADDRESSABLE (*expr_p))
5289 TREE_ADDRESSABLE (decl) = 1;
5290 /* Otherwise, if we don't need an lvalue and have a literal directly
5291 substitute it. Check if it matches the gimple predicate, as
5292 otherwise we'd generate a new temporary, and we can as well just
5293 use the decl we already have. */
5294 else if (!TREE_ADDRESSABLE (decl)
5295 && !TREE_THIS_VOLATILE (decl)
5296 && init
5297 && (fallback & fb_lvalue) == 0
5298 && gimple_test_f (init))
5300 *expr_p = init;
5301 return GS_OK;
5304 /* If the decl is not addressable, then it is being used in some
5305 expression or on the right hand side of a statement, and it can
5306 be put into a readonly data section. */
5307 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5308 TREE_READONLY (decl) = 1;
5310 /* This decl isn't mentioned in the enclosing block, so add it to the
5311 list of temps. FIXME it seems a bit of a kludge to say that
5312 anonymous artificial vars aren't pushed, but everything else is. */
5313 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5314 gimple_add_tmp_var (decl);
5316 gimplify_and_add (decl_s, pre_p);
5317 *expr_p = decl;
5318 return GS_OK;
5321 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5322 return a new CONSTRUCTOR if something changed. */
5324 static tree
5325 optimize_compound_literals_in_ctor (tree orig_ctor)
5327 tree ctor = orig_ctor;
5328 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5329 unsigned int idx, num = vec_safe_length (elts);
5331 for (idx = 0; idx < num; idx++)
5333 tree value = (*elts)[idx].value;
5334 tree newval = value;
5335 if (TREE_CODE (value) == CONSTRUCTOR)
5336 newval = optimize_compound_literals_in_ctor (value);
5337 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5339 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5340 tree decl = DECL_EXPR_DECL (decl_s);
5341 tree init = DECL_INITIAL (decl);
5343 if (!TREE_ADDRESSABLE (value)
5344 && !TREE_ADDRESSABLE (decl)
5345 && init
5346 && TREE_CODE (init) == CONSTRUCTOR)
5347 newval = optimize_compound_literals_in_ctor (init);
5349 if (newval == value)
5350 continue;
5352 if (ctor == orig_ctor)
5354 ctor = copy_node (orig_ctor);
5355 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5356 elts = CONSTRUCTOR_ELTS (ctor);
5358 (*elts)[idx].value = newval;
5360 return ctor;
5363 /* A subroutine of gimplify_modify_expr. Break out elements of a
5364 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5366 Note that we still need to clear any elements that don't have explicit
5367 initializers, so if not all elements are initialized we keep the
5368 original MODIFY_EXPR, we just remove all of the constructor elements.
5370 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5371 GS_ERROR if we would have to create a temporary when gimplifying
5372 this constructor. Otherwise, return GS_OK.
5374 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5376 static enum gimplify_status
5377 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5378 bool want_value, bool notify_temp_creation)
5380 tree object, ctor, type;
5381 enum gimplify_status ret;
5382 vec<constructor_elt, va_gc> *elts;
5383 bool cleared = false;
5384 bool is_empty_ctor = false;
5385 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5387 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5389 if (!notify_temp_creation)
5391 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5392 is_gimple_lvalue, fb_lvalue);
5393 if (ret == GS_ERROR)
5394 return ret;
5397 object = TREE_OPERAND (*expr_p, 0);
5398 ctor = TREE_OPERAND (*expr_p, 1)
5399 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5400 type = TREE_TYPE (ctor);
5401 elts = CONSTRUCTOR_ELTS (ctor);
5402 ret = GS_ALL_DONE;
5404 switch (TREE_CODE (type))
5406 case RECORD_TYPE:
5407 case UNION_TYPE:
5408 case QUAL_UNION_TYPE:
5409 case ARRAY_TYPE:
5411 /* Use readonly data for initializers of this or smaller size
5412 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5413 ratio. */
5414 const HOST_WIDE_INT min_unique_size = 64;
5415 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5416 is smaller than this, use readonly data. */
5417 const int unique_nonzero_ratio = 8;
5418 /* True if a single access of the object must be ensured. This is the
5419 case if the target is volatile, the type is non-addressable and more
5420 than one field need to be assigned. */
5421 const bool ensure_single_access
5422 = TREE_THIS_VOLATILE (object)
5423 && !TREE_ADDRESSABLE (type)
5424 && vec_safe_length (elts) > 1;
5425 struct gimplify_init_ctor_preeval_data preeval_data;
5426 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5427 HOST_WIDE_INT num_unique_nonzero_elements;
5428 bool complete_p, valid_const_initializer;
5430 /* Aggregate types must lower constructors to initialization of
5431 individual elements. The exception is that a CONSTRUCTOR node
5432 with no elements indicates zero-initialization of the whole. */
5433 if (vec_safe_is_empty (elts))
5435 if (notify_temp_creation)
5436 return GS_OK;
5438 /* The var will be initialized and so appear on lhs of
5439 assignment, it can't be TREE_READONLY anymore. */
5440 if (VAR_P (object))
5441 TREE_READONLY (object) = 0;
5443 is_empty_ctor = true;
5444 break;
5447 /* Fetch information about the constructor to direct later processing.
5448 We might want to make static versions of it in various cases, and
5449 can only do so if it known to be a valid constant initializer. */
5450 valid_const_initializer
5451 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5452 &num_unique_nonzero_elements,
5453 &num_ctor_elements, &complete_p);
5455 /* If a const aggregate variable is being initialized, then it
5456 should never be a lose to promote the variable to be static. */
5457 if (valid_const_initializer
5458 && num_nonzero_elements > 1
5459 && TREE_READONLY (object)
5460 && VAR_P (object)
5461 && !DECL_REGISTER (object)
5462 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5463 || DECL_MERGEABLE (object))
5464 /* For ctors that have many repeated nonzero elements
5465 represented through RANGE_EXPRs, prefer initializing
5466 those through runtime loops over copies of large amounts
5467 of data from readonly data section. */
5468 && (num_unique_nonzero_elements
5469 > num_nonzero_elements / unique_nonzero_ratio
5470 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5471 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5473 if (notify_temp_creation)
5474 return GS_ERROR;
5476 DECL_INITIAL (object) = ctor;
5477 TREE_STATIC (object) = 1;
5478 if (!DECL_NAME (object))
5479 DECL_NAME (object) = create_tmp_var_name ("C");
5480 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5482 /* ??? C++ doesn't automatically append a .<number> to the
5483 assembler name, and even when it does, it looks at FE private
5484 data structures to figure out what that number should be,
5485 which are not set for this variable. I suppose this is
5486 important for local statics for inline functions, which aren't
5487 "local" in the object file sense. So in order to get a unique
5488 TU-local symbol, we must invoke the lhd version now. */
5489 lhd_set_decl_assembler_name (object);
5491 *expr_p = NULL_TREE;
5492 break;
5495 /* The var will be initialized and so appear on lhs of
5496 assignment, it can't be TREE_READONLY anymore. */
5497 if (VAR_P (object) && !notify_temp_creation)
5498 TREE_READONLY (object) = 0;
5500 /* If there are "lots" of initialized elements, even discounting
5501 those that are not address constants (and thus *must* be
5502 computed at runtime), then partition the constructor into
5503 constant and non-constant parts. Block copy the constant
5504 parts in, then generate code for the non-constant parts. */
5505 /* TODO. There's code in cp/typeck.cc to do this. */
5507 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5508 /* store_constructor will ignore the clearing of variable-sized
5509 objects. Initializers for such objects must explicitly set
5510 every field that needs to be set. */
5511 cleared = false;
5512 else if (!complete_p)
5513 /* If the constructor isn't complete, clear the whole object
5514 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5516 ??? This ought not to be needed. For any element not present
5517 in the initializer, we should simply set them to zero. Except
5518 we'd need to *find* the elements that are not present, and that
5519 requires trickery to avoid quadratic compile-time behavior in
5520 large cases or excessive memory use in small cases. */
5521 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5522 else if (num_ctor_elements - num_nonzero_elements
5523 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5524 && num_nonzero_elements < num_ctor_elements / 4)
5525 /* If there are "lots" of zeros, it's more efficient to clear
5526 the memory and then set the nonzero elements. */
5527 cleared = true;
5528 else if (ensure_single_access && num_nonzero_elements == 0)
5529 /* If a single access to the target must be ensured and all elements
5530 are zero, then it's optimal to clear whatever their number. */
5531 cleared = true;
5532 else
5533 cleared = false;
5535 /* If there are "lots" of initialized elements, and all of them
5536 are valid address constants, then the entire initializer can
5537 be dropped to memory, and then memcpy'd out. Don't do this
5538 for sparse arrays, though, as it's more efficient to follow
5539 the standard CONSTRUCTOR behavior of memset followed by
5540 individual element initialization. Also don't do this for small
5541 all-zero initializers (which aren't big enough to merit
5542 clearing), and don't try to make bitwise copies of
5543 TREE_ADDRESSABLE types. */
5544 if (valid_const_initializer
5545 && complete_p
5546 && !(cleared || num_nonzero_elements == 0)
5547 && !TREE_ADDRESSABLE (type))
5549 HOST_WIDE_INT size = int_size_in_bytes (type);
5550 unsigned int align;
5552 /* ??? We can still get unbounded array types, at least
5553 from the C++ front end. This seems wrong, but attempt
5554 to work around it for now. */
5555 if (size < 0)
5557 size = int_size_in_bytes (TREE_TYPE (object));
5558 if (size >= 0)
5559 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5562 /* Find the maximum alignment we can assume for the object. */
5563 /* ??? Make use of DECL_OFFSET_ALIGN. */
5564 if (DECL_P (object))
5565 align = DECL_ALIGN (object);
5566 else
5567 align = TYPE_ALIGN (type);
5569 /* Do a block move either if the size is so small as to make
5570 each individual move a sub-unit move on average, or if it
5571 is so large as to make individual moves inefficient. */
5572 if (size > 0
5573 && num_nonzero_elements > 1
5574 /* For ctors that have many repeated nonzero elements
5575 represented through RANGE_EXPRs, prefer initializing
5576 those through runtime loops over copies of large amounts
5577 of data from readonly data section. */
5578 && (num_unique_nonzero_elements
5579 > num_nonzero_elements / unique_nonzero_ratio
5580 || size <= min_unique_size)
5581 && (size < num_nonzero_elements
5582 || !can_move_by_pieces (size, align)))
5584 if (notify_temp_creation)
5585 return GS_ERROR;
5587 walk_tree (&ctor, force_labels_r, NULL, NULL);
5588 ctor = tree_output_constant_def (ctor);
5589 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5590 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5591 TREE_OPERAND (*expr_p, 1) = ctor;
5593 /* This is no longer an assignment of a CONSTRUCTOR, but
5594 we still may have processing to do on the LHS. So
5595 pretend we didn't do anything here to let that happen. */
5596 return GS_UNHANDLED;
5600 /* If a single access to the target must be ensured and there are
5601 nonzero elements or the zero elements are not assigned en masse,
5602 initialize the target from a temporary. */
5603 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5605 if (notify_temp_creation)
5606 return GS_ERROR;
5608 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5609 TREE_OPERAND (*expr_p, 0) = temp;
5610 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5611 *expr_p,
5612 build2 (MODIFY_EXPR, void_type_node,
5613 object, temp));
5614 return GS_OK;
5617 if (notify_temp_creation)
5618 return GS_OK;
5620 /* If there are nonzero elements and if needed, pre-evaluate to capture
5621 elements overlapping with the lhs into temporaries. We must do this
5622 before clearing to fetch the values before they are zeroed-out. */
5623 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5625 preeval_data.lhs_base_decl = get_base_address (object);
5626 if (!DECL_P (preeval_data.lhs_base_decl))
5627 preeval_data.lhs_base_decl = NULL;
5628 preeval_data.lhs_alias_set = get_alias_set (object);
5630 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5631 pre_p, post_p, &preeval_data);
5634 bool ctor_has_side_effects_p
5635 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5637 if (cleared)
5639 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5640 Note that we still have to gimplify, in order to handle the
5641 case of variable sized types. Avoid shared tree structures. */
5642 CONSTRUCTOR_ELTS (ctor) = NULL;
5643 TREE_SIDE_EFFECTS (ctor) = 0;
5644 object = unshare_expr (object);
5645 gimplify_stmt (expr_p, pre_p);
5648 /* If we have not block cleared the object, or if there are nonzero
5649 elements in the constructor, or if the constructor has side effects,
5650 add assignments to the individual scalar fields of the object. */
5651 if (!cleared
5652 || num_nonzero_elements > 0
5653 || ctor_has_side_effects_p)
5654 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5656 *expr_p = NULL_TREE;
5658 break;
5660 case COMPLEX_TYPE:
5662 tree r, i;
5664 if (notify_temp_creation)
5665 return GS_OK;
5667 /* Extract the real and imaginary parts out of the ctor. */
5668 gcc_assert (elts->length () == 2);
5669 r = (*elts)[0].value;
5670 i = (*elts)[1].value;
5671 if (r == NULL || i == NULL)
5673 tree zero = build_zero_cst (TREE_TYPE (type));
5674 if (r == NULL)
5675 r = zero;
5676 if (i == NULL)
5677 i = zero;
5680 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5681 represent creation of a complex value. */
5682 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5684 ctor = build_complex (type, r, i);
5685 TREE_OPERAND (*expr_p, 1) = ctor;
5687 else
5689 ctor = build2 (COMPLEX_EXPR, type, r, i);
5690 TREE_OPERAND (*expr_p, 1) = ctor;
5691 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5692 pre_p,
5693 post_p,
5694 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5695 fb_rvalue);
5698 break;
5700 case VECTOR_TYPE:
5702 unsigned HOST_WIDE_INT ix;
5703 constructor_elt *ce;
5705 if (notify_temp_creation)
5706 return GS_OK;
5708 /* Vector types use CONSTRUCTOR all the way through gimple
5709 compilation as a general initializer. */
5710 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5712 enum gimplify_status tret;
5713 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5714 fb_rvalue);
5715 if (tret == GS_ERROR)
5716 ret = GS_ERROR;
5717 else if (TREE_STATIC (ctor)
5718 && !initializer_constant_valid_p (ce->value,
5719 TREE_TYPE (ce->value)))
5720 TREE_STATIC (ctor) = 0;
5722 recompute_constructor_flags (ctor);
5724 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5725 if (TREE_CONSTANT (ctor))
5727 bool constant_p = true;
5728 tree value;
5730 /* Even when ctor is constant, it might contain non-*_CST
5731 elements, such as addresses or trapping values like
5732 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5733 in VECTOR_CST nodes. */
5734 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5735 if (!CONSTANT_CLASS_P (value))
5737 constant_p = false;
5738 break;
5741 if (constant_p)
5743 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5744 break;
5748 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5749 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5751 break;
5753 default:
5754 /* So how did we get a CONSTRUCTOR for a scalar type? */
5755 gcc_unreachable ();
5758 if (ret == GS_ERROR)
5759 return GS_ERROR;
5760 /* If we have gimplified both sides of the initializer but have
5761 not emitted an assignment, do so now. */
5762 if (*expr_p
5763 /* If the type is an empty type, we don't need to emit the
5764 assignment. */
5765 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5767 tree lhs = TREE_OPERAND (*expr_p, 0);
5768 tree rhs = TREE_OPERAND (*expr_p, 1);
5769 if (want_value && object == lhs)
5770 lhs = unshare_expr (lhs);
5771 gassign *init = gimple_build_assign (lhs, rhs);
5772 gimplify_seq_add_stmt (pre_p, init);
5774 if (want_value)
5776 *expr_p = object;
5777 ret = GS_OK;
5779 else
5781 *expr_p = NULL;
5782 ret = GS_ALL_DONE;
5785 /* If the user requests to initialize automatic variables, we
5786 should initialize paddings inside the variable. Add a call to
5787 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5788 initialize paddings of object always to zero regardless of
5789 INIT_TYPE. Note, we will not insert this call if the aggregate
5790 variable has be completely cleared already or it's initialized
5791 with an empty constructor. We cannot insert this call if the
5792 variable is a gimple register since __builtin_clear_padding will take
5793 the address of the variable. As a result, if a long double/_Complex long
5794 double variable will be spilled into stack later, its padding cannot
5795 be cleared with __builtin_clear_padding. We should clear its padding
5796 when it is spilled into memory. */
5797 if (is_init_expr
5798 && !is_gimple_reg (object)
5799 && clear_padding_type_may_have_padding_p (type)
5800 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5801 || !AGGREGATE_TYPE_P (type))
5802 && is_var_need_auto_init (object))
5803 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5805 return ret;
5808 /* Given a pointer value OP0, return a simplified version of an
5809 indirection through OP0, or NULL_TREE if no simplification is
5810 possible. This may only be applied to a rhs of an expression.
5811 Note that the resulting type may be different from the type pointed
5812 to in the sense that it is still compatible from the langhooks
5813 point of view. */
5815 static tree
5816 gimple_fold_indirect_ref_rhs (tree t)
5818 return gimple_fold_indirect_ref (t);
5821 /* Subroutine of gimplify_modify_expr to do simplifications of
5822 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5823 something changes. */
5825 static enum gimplify_status
5826 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5827 gimple_seq *pre_p, gimple_seq *post_p,
5828 bool want_value)
5830 enum gimplify_status ret = GS_UNHANDLED;
5831 bool changed;
5835 changed = false;
5836 switch (TREE_CODE (*from_p))
5838 case VAR_DECL:
5839 /* If we're assigning from a read-only variable initialized with
5840 a constructor and not volatile, do the direct assignment from
5841 the constructor, but only if the target is not volatile either
5842 since this latter assignment might end up being done on a per
5843 field basis. However, if the target is volatile and the type
5844 is aggregate and non-addressable, gimplify_init_constructor
5845 knows that it needs to ensure a single access to the target
5846 and it will return GS_OK only in this case. */
5847 if (TREE_READONLY (*from_p)
5848 && DECL_INITIAL (*from_p)
5849 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5850 && !TREE_THIS_VOLATILE (*from_p)
5851 && (!TREE_THIS_VOLATILE (*to_p)
5852 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5853 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5855 tree old_from = *from_p;
5856 enum gimplify_status subret;
5858 /* Move the constructor into the RHS. */
5859 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5861 /* Let's see if gimplify_init_constructor will need to put
5862 it in memory. */
5863 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5864 false, true);
5865 if (subret == GS_ERROR)
5867 /* If so, revert the change. */
5868 *from_p = old_from;
5870 else
5872 ret = GS_OK;
5873 changed = true;
5876 break;
5877 case INDIRECT_REF:
5878 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5879 /* If we have code like
5881 *(const A*)(A*)&x
5883 where the type of "x" is a (possibly cv-qualified variant
5884 of "A"), treat the entire expression as identical to "x".
5885 This kind of code arises in C++ when an object is bound
5886 to a const reference, and if "x" is a TARGET_EXPR we want
5887 to take advantage of the optimization below. But not if
5888 the type is TREE_ADDRESSABLE; then C++17 says that the
5889 TARGET_EXPR needs to be a temporary. */
5890 if (tree t
5891 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5893 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5894 if (TREE_THIS_VOLATILE (t) != volatile_p)
5896 if (DECL_P (t))
5897 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5898 build_fold_addr_expr (t));
5899 if (REFERENCE_CLASS_P (t))
5900 TREE_THIS_VOLATILE (t) = volatile_p;
5902 *from_p = t;
5903 ret = GS_OK;
5904 changed = true;
5906 break;
5908 case TARGET_EXPR:
5910 /* If we are initializing something from a TARGET_EXPR, strip the
5911 TARGET_EXPR and initialize it directly, if possible. This can't
5912 be done if the initializer is void, since that implies that the
5913 temporary is set in some non-trivial way.
5915 ??? What about code that pulls out the temp and uses it
5916 elsewhere? I think that such code never uses the TARGET_EXPR as
5917 an initializer. If I'm wrong, we'll die because the temp won't
5918 have any RTL. In that case, I guess we'll need to replace
5919 references somehow. */
5920 tree init = TARGET_EXPR_INITIAL (*from_p);
5922 if (init
5923 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5924 || !TARGET_EXPR_NO_ELIDE (*from_p))
5925 && !VOID_TYPE_P (TREE_TYPE (init)))
5927 *from_p = init;
5928 ret = GS_OK;
5929 changed = true;
5932 break;
5934 case COMPOUND_EXPR:
5935 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5936 caught. */
5937 gimplify_compound_expr (from_p, pre_p, true);
5938 ret = GS_OK;
5939 changed = true;
5940 break;
5942 case CONSTRUCTOR:
5943 /* If we already made some changes, let the front end have a
5944 crack at this before we break it down. */
5945 if (ret != GS_UNHANDLED)
5946 break;
5948 /* If we're initializing from a CONSTRUCTOR, break this into
5949 individual MODIFY_EXPRs. */
5950 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5951 false);
5952 return ret;
5954 case COND_EXPR:
5955 /* If we're assigning to a non-register type, push the assignment
5956 down into the branches. This is mandatory for ADDRESSABLE types,
5957 since we cannot generate temporaries for such, but it saves a
5958 copy in other cases as well. */
5959 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5961 /* This code should mirror the code in gimplify_cond_expr. */
5962 enum tree_code code = TREE_CODE (*expr_p);
5963 tree cond = *from_p;
5964 tree result = *to_p;
5966 ret = gimplify_expr (&result, pre_p, post_p,
5967 is_gimple_lvalue, fb_lvalue);
5968 if (ret != GS_ERROR)
5969 ret = GS_OK;
5971 /* If we are going to write RESULT more than once, clear
5972 TREE_READONLY flag, otherwise we might incorrectly promote
5973 the variable to static const and initialize it at compile
5974 time in one of the branches. */
5975 if (VAR_P (result)
5976 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5977 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5978 TREE_READONLY (result) = 0;
5979 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5980 TREE_OPERAND (cond, 1)
5981 = build2 (code, void_type_node, result,
5982 TREE_OPERAND (cond, 1));
5983 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5984 TREE_OPERAND (cond, 2)
5985 = build2 (code, void_type_node, unshare_expr (result),
5986 TREE_OPERAND (cond, 2));
5988 TREE_TYPE (cond) = void_type_node;
5989 recalculate_side_effects (cond);
5991 if (want_value)
5993 gimplify_and_add (cond, pre_p);
5994 *expr_p = unshare_expr (result);
5996 else
5997 *expr_p = cond;
5998 return ret;
6000 break;
6002 case CALL_EXPR:
6003 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6004 return slot so that we don't generate a temporary. */
6005 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
6006 && aggregate_value_p (*from_p, *from_p))
6008 bool use_target;
6010 if (!(rhs_predicate_for (*to_p))(*from_p))
6011 /* If we need a temporary, *to_p isn't accurate. */
6012 use_target = false;
6013 /* It's OK to use the return slot directly unless it's an NRV. */
6014 else if (TREE_CODE (*to_p) == RESULT_DECL
6015 && DECL_NAME (*to_p) == NULL_TREE
6016 && needs_to_live_in_memory (*to_p))
6017 use_target = true;
6018 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
6019 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
6020 /* Don't force regs into memory. */
6021 use_target = false;
6022 else if (TREE_CODE (*expr_p) == INIT_EXPR)
6023 /* It's OK to use the target directly if it's being
6024 initialized. */
6025 use_target = true;
6026 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
6027 != INTEGER_CST)
6028 /* Always use the target and thus RSO for variable-sized types.
6029 GIMPLE cannot deal with a variable-sized assignment
6030 embedded in a call statement. */
6031 use_target = true;
6032 else if (TREE_CODE (*to_p) != SSA_NAME
6033 && (!is_gimple_variable (*to_p)
6034 || needs_to_live_in_memory (*to_p)))
6035 /* Don't use the original target if it's already addressable;
6036 if its address escapes, and the called function uses the
6037 NRV optimization, a conforming program could see *to_p
6038 change before the called function returns; see c++/19317.
6039 When optimizing, the return_slot pass marks more functions
6040 as safe after we have escape info. */
6041 use_target = false;
6042 else
6043 use_target = true;
6045 if (use_target)
6047 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
6048 mark_addressable (*to_p);
6051 break;
6053 case WITH_SIZE_EXPR:
6054 /* Likewise for calls that return an aggregate of non-constant size,
6055 since we would not be able to generate a temporary at all. */
6056 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
6058 *from_p = TREE_OPERAND (*from_p, 0);
6059 /* We don't change ret in this case because the
6060 WITH_SIZE_EXPR might have been added in
6061 gimplify_modify_expr, so returning GS_OK would lead to an
6062 infinite loop. */
6063 changed = true;
6065 break;
6067 /* If we're initializing from a container, push the initialization
6068 inside it. */
6069 case CLEANUP_POINT_EXPR:
6070 case BIND_EXPR:
6071 case STATEMENT_LIST:
6073 tree wrap = *from_p;
6074 tree t;
6076 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
6077 fb_lvalue);
6078 if (ret != GS_ERROR)
6079 ret = GS_OK;
6081 t = voidify_wrapper_expr (wrap, *expr_p);
6082 gcc_assert (t == *expr_p);
6084 if (want_value)
6086 gimplify_and_add (wrap, pre_p);
6087 *expr_p = unshare_expr (*to_p);
6089 else
6090 *expr_p = wrap;
6091 return GS_OK;
6094 case NOP_EXPR:
6095 /* Pull out compound literal expressions from a NOP_EXPR.
6096 Those are created in the C FE to drop qualifiers during
6097 lvalue conversion. */
6098 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
6099 && tree_ssa_useless_type_conversion (*from_p))
6101 *from_p = TREE_OPERAND (*from_p, 0);
6102 ret = GS_OK;
6103 changed = true;
6105 break;
6107 case COMPOUND_LITERAL_EXPR:
6109 tree complit = TREE_OPERAND (*expr_p, 1);
6110 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
6111 tree decl = DECL_EXPR_DECL (decl_s);
6112 tree init = DECL_INITIAL (decl);
6114 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6115 into struct T x = { 0, 1, 2 } if the address of the
6116 compound literal has never been taken. */
6117 if (!TREE_ADDRESSABLE (complit)
6118 && !TREE_ADDRESSABLE (decl)
6119 && init)
6121 *expr_p = copy_node (*expr_p);
6122 TREE_OPERAND (*expr_p, 1) = init;
6123 return GS_OK;
6127 default:
6128 break;
6131 while (changed);
6133 return ret;
6137 /* Return true if T looks like a valid GIMPLE statement. */
6139 static bool
6140 is_gimple_stmt (tree t)
6142 const enum tree_code code = TREE_CODE (t);
6144 switch (code)
6146 case NOP_EXPR:
6147 /* The only valid NOP_EXPR is the empty statement. */
6148 return IS_EMPTY_STMT (t);
6150 case BIND_EXPR:
6151 case COND_EXPR:
6152 /* These are only valid if they're void. */
6153 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
6155 case SWITCH_EXPR:
6156 case GOTO_EXPR:
6157 case RETURN_EXPR:
6158 case LABEL_EXPR:
6159 case CASE_LABEL_EXPR:
6160 case TRY_CATCH_EXPR:
6161 case TRY_FINALLY_EXPR:
6162 case EH_FILTER_EXPR:
6163 case CATCH_EXPR:
6164 case ASM_EXPR:
6165 case STATEMENT_LIST:
6166 case OACC_PARALLEL:
6167 case OACC_KERNELS:
6168 case OACC_SERIAL:
6169 case OACC_DATA:
6170 case OACC_HOST_DATA:
6171 case OACC_DECLARE:
6172 case OACC_UPDATE:
6173 case OACC_ENTER_DATA:
6174 case OACC_EXIT_DATA:
6175 case OACC_CACHE:
6176 case OMP_PARALLEL:
6177 case OMP_FOR:
6178 case OMP_SIMD:
6179 case OMP_DISTRIBUTE:
6180 case OMP_LOOP:
6181 case OACC_LOOP:
6182 case OMP_SCAN:
6183 case OMP_SCOPE:
6184 case OMP_SECTIONS:
6185 case OMP_SECTION:
6186 case OMP_STRUCTURED_BLOCK:
6187 case OMP_SINGLE:
6188 case OMP_MASTER:
6189 case OMP_MASKED:
6190 case OMP_TASKGROUP:
6191 case OMP_ORDERED:
6192 case OMP_CRITICAL:
6193 case OMP_TASK:
6194 case OMP_TARGET:
6195 case OMP_TARGET_DATA:
6196 case OMP_TARGET_UPDATE:
6197 case OMP_TARGET_ENTER_DATA:
6198 case OMP_TARGET_EXIT_DATA:
6199 case OMP_TASKLOOP:
6200 case OMP_TEAMS:
6201 /* These are always void. */
6202 return true;
6204 case CALL_EXPR:
6205 case MODIFY_EXPR:
6206 case PREDICT_EXPR:
6207 /* These are valid regardless of their type. */
6208 return true;
6210 default:
6211 return false;
6216 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6217 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6219 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6220 other, unmodified part of the complex object just before the total store.
6221 As a consequence, if the object is still uninitialized, an undefined value
6222 will be loaded into a register, which may result in a spurious exception
6223 if the register is floating-point and the value happens to be a signaling
6224 NaN for example. Then the fully-fledged complex operations lowering pass
6225 followed by a DCE pass are necessary in order to fix things up. */
6227 static enum gimplify_status
6228 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6229 bool want_value)
6231 enum tree_code code, ocode;
6232 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6234 lhs = TREE_OPERAND (*expr_p, 0);
6235 rhs = TREE_OPERAND (*expr_p, 1);
6236 code = TREE_CODE (lhs);
6237 lhs = TREE_OPERAND (lhs, 0);
6239 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6240 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6241 suppress_warning (other);
6242 other = get_formal_tmp_var (other, pre_p);
6244 realpart = code == REALPART_EXPR ? rhs : other;
6245 imagpart = code == REALPART_EXPR ? other : rhs;
6247 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6248 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6249 else
6250 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6252 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6253 *expr_p = (want_value) ? rhs : NULL_TREE;
6255 return GS_ALL_DONE;
6258 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6260 modify_expr
6261 : varname '=' rhs
6262 | '*' ID '=' rhs
6264 PRE_P points to the list where side effects that must happen before
6265 *EXPR_P should be stored.
6267 POST_P points to the list where side effects that must happen after
6268 *EXPR_P should be stored.
6270 WANT_VALUE is nonzero iff we want to use the value of this expression
6271 in another expression. */
6273 static enum gimplify_status
6274 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6275 bool want_value)
6277 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6278 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6279 enum gimplify_status ret = GS_UNHANDLED;
6280 gimple *assign;
6281 location_t loc = EXPR_LOCATION (*expr_p);
6282 gimple_stmt_iterator gsi;
6284 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6285 return GS_ERROR;
6287 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6288 || TREE_CODE (*expr_p) == INIT_EXPR);
6290 /* Trying to simplify a clobber using normal logic doesn't work,
6291 so handle it here. */
6292 if (TREE_CLOBBER_P (*from_p))
6294 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6295 if (ret == GS_ERROR)
6296 return ret;
6297 gcc_assert (!want_value);
6298 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6300 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6301 pre_p, post_p);
6302 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6304 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6305 *expr_p = NULL;
6306 return GS_ALL_DONE;
6309 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6310 memset. */
6311 if (TREE_TYPE (*from_p) != error_mark_node
6312 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6313 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6314 && TREE_CODE (*from_p) == CONSTRUCTOR
6315 && CONSTRUCTOR_NELTS (*from_p) == 0)
6317 maybe_with_size_expr (from_p);
6318 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6319 return gimplify_modify_expr_to_memset (expr_p,
6320 TREE_OPERAND (*from_p, 1),
6321 want_value, pre_p);
6324 /* Insert pointer conversions required by the middle-end that are not
6325 required by the frontend. This fixes middle-end type checking for
6326 for example gcc.dg/redecl-6.c. */
6327 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6329 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6330 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6331 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6334 /* See if any simplifications can be done based on what the RHS is. */
6335 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6336 want_value);
6337 if (ret != GS_UNHANDLED)
6338 return ret;
6340 /* For empty types only gimplify the left hand side and right hand
6341 side as statements and throw away the assignment. Do this after
6342 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6343 types properly. */
6344 if (is_empty_type (TREE_TYPE (*from_p))
6345 && !want_value
6346 /* Don't do this for calls that return addressable types, expand_call
6347 relies on those having a lhs. */
6348 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6349 && TREE_CODE (*from_p) == CALL_EXPR))
6351 gimplify_stmt (from_p, pre_p);
6352 gimplify_stmt (to_p, pre_p);
6353 *expr_p = NULL_TREE;
6354 return GS_ALL_DONE;
6357 /* If the value being copied is of variable width, compute the length
6358 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6359 before gimplifying any of the operands so that we can resolve any
6360 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6361 the size of the expression to be copied, not of the destination, so
6362 that is what we must do here. */
6363 maybe_with_size_expr (from_p);
6365 /* As a special case, we have to temporarily allow for assignments
6366 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6367 a toplevel statement, when gimplifying the GENERIC expression
6368 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6369 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6371 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6372 prevent gimplify_expr from trying to create a new temporary for
6373 foo's LHS, we tell it that it should only gimplify until it
6374 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6375 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6376 and all we need to do here is set 'a' to be its LHS. */
6378 /* Gimplify the RHS first for C++17 and bug 71104. */
6379 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6380 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6381 if (ret == GS_ERROR)
6382 return ret;
6384 /* Then gimplify the LHS. */
6385 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6386 twice we have to make sure to gimplify into non-SSA as otherwise
6387 the abnormal edge added later will make those defs not dominate
6388 their uses.
6389 ??? Technically this applies only to the registers used in the
6390 resulting non-register *TO_P. */
6391 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6392 if (saved_into_ssa
6393 && TREE_CODE (*from_p) == CALL_EXPR
6394 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6395 gimplify_ctxp->into_ssa = false;
6396 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6397 gimplify_ctxp->into_ssa = saved_into_ssa;
6398 if (ret == GS_ERROR)
6399 return ret;
6401 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6402 guess for the predicate was wrong. */
6403 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6404 if (final_pred != initial_pred)
6406 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6407 if (ret == GS_ERROR)
6408 return ret;
6411 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6412 size as argument to the call. */
6413 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6415 tree call = TREE_OPERAND (*from_p, 0);
6416 tree vlasize = TREE_OPERAND (*from_p, 1);
6418 if (TREE_CODE (call) == CALL_EXPR
6419 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6421 int nargs = call_expr_nargs (call);
6422 tree type = TREE_TYPE (call);
6423 tree ap = CALL_EXPR_ARG (call, 0);
6424 tree tag = CALL_EXPR_ARG (call, 1);
6425 tree aptag = CALL_EXPR_ARG (call, 2);
6426 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6427 IFN_VA_ARG, type,
6428 nargs + 1, ap, tag,
6429 aptag, vlasize);
6430 TREE_OPERAND (*from_p, 0) = newcall;
6434 /* Now see if the above changed *from_p to something we handle specially. */
6435 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6436 want_value);
6437 if (ret != GS_UNHANDLED)
6438 return ret;
6440 /* If we've got a variable sized assignment between two lvalues (i.e. does
6441 not involve a call), then we can make things a bit more straightforward
6442 by converting the assignment to memcpy or memset. */
6443 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6445 tree from = TREE_OPERAND (*from_p, 0);
6446 tree size = TREE_OPERAND (*from_p, 1);
6448 if (TREE_CODE (from) == CONSTRUCTOR)
6449 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6451 if (is_gimple_addressable (from))
6453 *from_p = from;
6454 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6455 pre_p);
6459 /* Transform partial stores to non-addressable complex variables into
6460 total stores. This allows us to use real instead of virtual operands
6461 for these variables, which improves optimization. */
6462 if ((TREE_CODE (*to_p) == REALPART_EXPR
6463 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6464 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6465 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6467 /* Try to alleviate the effects of the gimplification creating artificial
6468 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6469 make sure not to create DECL_DEBUG_EXPR links across functions. */
6470 if (!gimplify_ctxp->into_ssa
6471 && VAR_P (*from_p)
6472 && DECL_IGNORED_P (*from_p)
6473 && DECL_P (*to_p)
6474 && !DECL_IGNORED_P (*to_p)
6475 && decl_function_context (*to_p) == current_function_decl
6476 && decl_function_context (*from_p) == current_function_decl)
6478 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6479 DECL_NAME (*from_p)
6480 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6481 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6482 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6485 if (want_value && TREE_THIS_VOLATILE (*to_p))
6486 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6488 if (TREE_CODE (*from_p) == CALL_EXPR)
6490 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6491 instead of a GIMPLE_ASSIGN. */
6492 gcall *call_stmt;
6493 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6495 /* Gimplify internal functions created in the FEs. */
6496 int nargs = call_expr_nargs (*from_p), i;
6497 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6498 auto_vec<tree> vargs (nargs);
6500 for (i = 0; i < nargs; i++)
6502 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6503 EXPR_LOCATION (*from_p));
6504 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6506 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6507 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6508 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6510 else
6512 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6513 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6514 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6515 tree fndecl = get_callee_fndecl (*from_p);
6516 if (fndecl
6517 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6518 && call_expr_nargs (*from_p) == 3)
6519 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6520 CALL_EXPR_ARG (*from_p, 0),
6521 CALL_EXPR_ARG (*from_p, 1),
6522 CALL_EXPR_ARG (*from_p, 2));
6523 else
6525 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6528 notice_special_calls (call_stmt);
6529 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6530 gimple_call_set_lhs (call_stmt, *to_p);
6531 else if (TREE_CODE (*to_p) == SSA_NAME)
6532 /* The above is somewhat premature, avoid ICEing later for a
6533 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6534 ??? This doesn't make it a default-def. */
6535 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6537 assign = call_stmt;
6539 else
6541 assign = gimple_build_assign (*to_p, *from_p);
6542 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6543 if (COMPARISON_CLASS_P (*from_p))
6544 copy_warning (assign, *from_p);
6547 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6549 /* We should have got an SSA name from the start. */
6550 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6551 || ! gimple_in_ssa_p (cfun));
6554 gimplify_seq_add_stmt (pre_p, assign);
6555 gsi = gsi_last (*pre_p);
6556 maybe_fold_stmt (&gsi);
6558 if (want_value)
6560 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6561 return GS_OK;
6563 else
6564 *expr_p = NULL;
6566 return GS_ALL_DONE;
6569 /* Gimplify a comparison between two variable-sized objects. Do this
6570 with a call to BUILT_IN_MEMCMP. */
6572 static enum gimplify_status
6573 gimplify_variable_sized_compare (tree *expr_p)
6575 location_t loc = EXPR_LOCATION (*expr_p);
6576 tree op0 = TREE_OPERAND (*expr_p, 0);
6577 tree op1 = TREE_OPERAND (*expr_p, 1);
6578 tree t, arg, dest, src, expr;
6580 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6581 arg = unshare_expr (arg);
6582 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6583 src = build_fold_addr_expr_loc (loc, op1);
6584 dest = build_fold_addr_expr_loc (loc, op0);
6585 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6586 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6588 expr
6589 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6590 SET_EXPR_LOCATION (expr, loc);
6591 *expr_p = expr;
6593 return GS_OK;
6596 /* Gimplify a comparison between two aggregate objects of integral scalar
6597 mode as a comparison between the bitwise equivalent scalar values. */
6599 static enum gimplify_status
6600 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6602 location_t loc = EXPR_LOCATION (*expr_p);
6603 tree op0 = TREE_OPERAND (*expr_p, 0);
6604 tree op1 = TREE_OPERAND (*expr_p, 1);
6606 tree type = TREE_TYPE (op0);
6607 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6609 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6610 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6612 *expr_p
6613 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6615 return GS_OK;
6618 /* Gimplify an expression sequence. This function gimplifies each
6619 expression and rewrites the original expression with the last
6620 expression of the sequence in GIMPLE form.
6622 PRE_P points to the list where the side effects for all the
6623 expressions in the sequence will be emitted.
6625 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6627 static enum gimplify_status
6628 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6630 tree t = *expr_p;
6634 tree *sub_p = &TREE_OPERAND (t, 0);
6636 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6637 gimplify_compound_expr (sub_p, pre_p, false);
6638 else
6639 gimplify_stmt (sub_p, pre_p);
6641 t = TREE_OPERAND (t, 1);
6643 while (TREE_CODE (t) == COMPOUND_EXPR);
6645 *expr_p = t;
6646 if (want_value)
6647 return GS_OK;
6648 else
6650 gimplify_stmt (expr_p, pre_p);
6651 return GS_ALL_DONE;
6655 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6656 gimplify. After gimplification, EXPR_P will point to a new temporary
6657 that holds the original value of the SAVE_EXPR node.
6659 PRE_P points to the list where side effects that must happen before
6660 *EXPR_P should be stored. */
6662 static enum gimplify_status
6663 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6665 enum gimplify_status ret = GS_ALL_DONE;
6666 tree val;
6668 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6669 val = TREE_OPERAND (*expr_p, 0);
6671 if (val && TREE_TYPE (val) == error_mark_node)
6672 return GS_ERROR;
6674 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6675 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6677 /* The operand may be a void-valued expression. It is
6678 being executed only for its side-effects. */
6679 if (TREE_TYPE (val) == void_type_node)
6681 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6682 is_gimple_stmt, fb_none);
6683 val = NULL;
6685 else
6686 /* The temporary may not be an SSA name as later abnormal and EH
6687 control flow may invalidate use/def domination. When in SSA
6688 form then assume there are no such issues and SAVE_EXPRs only
6689 appear via GENERIC foldings. */
6690 val = get_initialized_tmp_var (val, pre_p, post_p,
6691 gimple_in_ssa_p (cfun));
6693 TREE_OPERAND (*expr_p, 0) = val;
6694 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6697 *expr_p = val;
6699 return ret;
6702 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6704 unary_expr
6705 : ...
6706 | '&' varname
6709 PRE_P points to the list where side effects that must happen before
6710 *EXPR_P should be stored.
6712 POST_P points to the list where side effects that must happen after
6713 *EXPR_P should be stored. */
6715 static enum gimplify_status
6716 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6718 tree expr = *expr_p;
6719 tree op0 = TREE_OPERAND (expr, 0);
6720 enum gimplify_status ret;
6721 location_t loc = EXPR_LOCATION (*expr_p);
6723 switch (TREE_CODE (op0))
6725 case INDIRECT_REF:
6726 do_indirect_ref:
6727 /* Check if we are dealing with an expression of the form '&*ptr'.
6728 While the front end folds away '&*ptr' into 'ptr', these
6729 expressions may be generated internally by the compiler (e.g.,
6730 builtins like __builtin_va_end). */
6731 /* Caution: the silent array decomposition semantics we allow for
6732 ADDR_EXPR means we can't always discard the pair. */
6733 /* Gimplification of the ADDR_EXPR operand may drop
6734 cv-qualification conversions, so make sure we add them if
6735 needed. */
6737 tree op00 = TREE_OPERAND (op0, 0);
6738 tree t_expr = TREE_TYPE (expr);
6739 tree t_op00 = TREE_TYPE (op00);
6741 if (!useless_type_conversion_p (t_expr, t_op00))
6742 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6743 *expr_p = op00;
6744 ret = GS_OK;
6746 break;
6748 case VIEW_CONVERT_EXPR:
6749 /* Take the address of our operand and then convert it to the type of
6750 this ADDR_EXPR.
6752 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6753 all clear. The impact of this transformation is even less clear. */
6755 /* If the operand is a useless conversion, look through it. Doing so
6756 guarantees that the ADDR_EXPR and its operand will remain of the
6757 same type. */
6758 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6759 op0 = TREE_OPERAND (op0, 0);
6761 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6762 build_fold_addr_expr_loc (loc,
6763 TREE_OPERAND (op0, 0)));
6764 ret = GS_OK;
6765 break;
6767 case MEM_REF:
6768 if (integer_zerop (TREE_OPERAND (op0, 1)))
6769 goto do_indirect_ref;
6771 /* fall through */
6773 default:
6774 /* If we see a call to a declared builtin or see its address
6775 being taken (we can unify those cases here) then we can mark
6776 the builtin for implicit generation by GCC. */
6777 if (TREE_CODE (op0) == FUNCTION_DECL
6778 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6779 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6780 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6782 /* We use fb_either here because the C frontend sometimes takes
6783 the address of a call that returns a struct; see
6784 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6785 the implied temporary explicit. */
6787 /* Make the operand addressable. */
6788 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6789 is_gimple_addressable, fb_either);
6790 if (ret == GS_ERROR)
6791 break;
6793 /* Then mark it. Beware that it may not be possible to do so directly
6794 if a temporary has been created by the gimplification. */
6795 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6797 op0 = TREE_OPERAND (expr, 0);
6799 /* For various reasons, the gimplification of the expression
6800 may have made a new INDIRECT_REF. */
6801 if (INDIRECT_REF_P (op0)
6802 || (TREE_CODE (op0) == MEM_REF
6803 && integer_zerop (TREE_OPERAND (op0, 1))))
6804 goto do_indirect_ref;
6806 mark_addressable (TREE_OPERAND (expr, 0));
6808 /* The FEs may end up building ADDR_EXPRs early on a decl with
6809 an incomplete type. Re-build ADDR_EXPRs in canonical form
6810 here. */
6811 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6812 *expr_p = build_fold_addr_expr (op0);
6814 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6815 recompute_tree_invariant_for_addr_expr (*expr_p);
6817 /* If we re-built the ADDR_EXPR add a conversion to the original type
6818 if required. */
6819 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6820 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6822 break;
6825 return ret;
6828 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6829 value; output operands should be a gimple lvalue. */
6831 static enum gimplify_status
6832 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6834 tree expr;
6835 int noutputs;
6836 const char **oconstraints;
6837 int i;
6838 tree link;
6839 const char *constraint;
6840 bool allows_mem, allows_reg, is_inout;
6841 enum gimplify_status ret, tret;
6842 gasm *stmt;
6843 vec<tree, va_gc> *inputs;
6844 vec<tree, va_gc> *outputs;
6845 vec<tree, va_gc> *clobbers;
6846 vec<tree, va_gc> *labels;
6847 tree link_next;
6849 expr = *expr_p;
6850 noutputs = list_length (ASM_OUTPUTS (expr));
6851 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6853 inputs = NULL;
6854 outputs = NULL;
6855 clobbers = NULL;
6856 labels = NULL;
6858 ret = GS_ALL_DONE;
6859 link_next = NULL_TREE;
6860 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6862 bool ok;
6863 size_t constraint_len;
6865 link_next = TREE_CHAIN (link);
6867 oconstraints[i]
6868 = constraint
6869 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6870 constraint_len = strlen (constraint);
6871 if (constraint_len == 0)
6872 continue;
6874 ok = parse_output_constraint (&constraint, i, 0, 0,
6875 &allows_mem, &allows_reg, &is_inout);
6876 if (!ok)
6878 ret = GS_ERROR;
6879 is_inout = false;
6882 /* If we can't make copies, we can only accept memory.
6883 Similarly for VLAs. */
6884 tree outtype = TREE_TYPE (TREE_VALUE (link));
6885 if (outtype != error_mark_node
6886 && (TREE_ADDRESSABLE (outtype)
6887 || !COMPLETE_TYPE_P (outtype)
6888 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6890 if (allows_mem)
6891 allows_reg = 0;
6892 else
6894 error ("impossible constraint in %<asm%>");
6895 error ("non-memory output %d must stay in memory", i);
6896 return GS_ERROR;
6900 if (!allows_reg && allows_mem)
6901 mark_addressable (TREE_VALUE (link));
6903 tree orig = TREE_VALUE (link);
6904 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6905 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6906 fb_lvalue | fb_mayfail);
6907 if (tret == GS_ERROR)
6909 if (orig != error_mark_node)
6910 error ("invalid lvalue in %<asm%> output %d", i);
6911 ret = tret;
6914 /* If the constraint does not allow memory make sure we gimplify
6915 it to a register if it is not already but its base is. This
6916 happens for complex and vector components. */
6917 if (!allows_mem)
6919 tree op = TREE_VALUE (link);
6920 if (! is_gimple_val (op)
6921 && is_gimple_reg_type (TREE_TYPE (op))
6922 && is_gimple_reg (get_base_address (op)))
6924 tree tem = create_tmp_reg (TREE_TYPE (op));
6925 tree ass;
6926 if (is_inout)
6928 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6929 tem, unshare_expr (op));
6930 gimplify_and_add (ass, pre_p);
6932 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6933 gimplify_and_add (ass, post_p);
6935 TREE_VALUE (link) = tem;
6936 tret = GS_OK;
6940 vec_safe_push (outputs, link);
6941 TREE_CHAIN (link) = NULL_TREE;
6943 if (is_inout)
6945 /* An input/output operand. To give the optimizers more
6946 flexibility, split it into separate input and output
6947 operands. */
6948 tree input;
6949 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6950 char buf[11];
6952 /* Turn the in/out constraint into an output constraint. */
6953 char *p = xstrdup (constraint);
6954 p[0] = '=';
6955 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6957 /* And add a matching input constraint. */
6958 if (allows_reg)
6960 sprintf (buf, "%u", i);
6962 /* If there are multiple alternatives in the constraint,
6963 handle each of them individually. Those that allow register
6964 will be replaced with operand number, the others will stay
6965 unchanged. */
6966 if (strchr (p, ',') != NULL)
6968 size_t len = 0, buflen = strlen (buf);
6969 char *beg, *end, *str, *dst;
6971 for (beg = p + 1;;)
6973 end = strchr (beg, ',');
6974 if (end == NULL)
6975 end = strchr (beg, '\0');
6976 if ((size_t) (end - beg) < buflen)
6977 len += buflen + 1;
6978 else
6979 len += end - beg + 1;
6980 if (*end)
6981 beg = end + 1;
6982 else
6983 break;
6986 str = (char *) alloca (len);
6987 for (beg = p + 1, dst = str;;)
6989 const char *tem;
6990 bool mem_p, reg_p, inout_p;
6992 end = strchr (beg, ',');
6993 if (end)
6994 *end = '\0';
6995 beg[-1] = '=';
6996 tem = beg - 1;
6997 parse_output_constraint (&tem, i, 0, 0,
6998 &mem_p, &reg_p, &inout_p);
6999 if (dst != str)
7000 *dst++ = ',';
7001 if (reg_p)
7003 memcpy (dst, buf, buflen);
7004 dst += buflen;
7006 else
7008 if (end)
7009 len = end - beg;
7010 else
7011 len = strlen (beg);
7012 memcpy (dst, beg, len);
7013 dst += len;
7015 if (end)
7016 beg = end + 1;
7017 else
7018 break;
7020 *dst = '\0';
7021 input = build_string (dst - str, str);
7023 else
7024 input = build_string (strlen (buf), buf);
7026 else
7027 input = build_string (constraint_len - 1, constraint + 1);
7029 free (p);
7031 input = build_tree_list (build_tree_list (NULL_TREE, input),
7032 unshare_expr (TREE_VALUE (link)));
7033 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
7037 link_next = NULL_TREE;
7038 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
7040 link_next = TREE_CHAIN (link);
7041 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7042 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7043 oconstraints, &allows_mem, &allows_reg);
7045 /* If we can't make copies, we can only accept memory. */
7046 tree intype = TREE_TYPE (TREE_VALUE (link));
7047 if (intype != error_mark_node
7048 && (TREE_ADDRESSABLE (intype)
7049 || !COMPLETE_TYPE_P (intype)
7050 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
7052 if (allows_mem)
7053 allows_reg = 0;
7054 else
7056 error ("impossible constraint in %<asm%>");
7057 error ("non-memory input %d must stay in memory", i);
7058 return GS_ERROR;
7062 /* If the operand is a memory input, it should be an lvalue. */
7063 if (!allows_reg && allows_mem)
7065 tree inputv = TREE_VALUE (link);
7066 STRIP_NOPS (inputv);
7067 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
7068 || TREE_CODE (inputv) == PREINCREMENT_EXPR
7069 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
7070 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
7071 || TREE_CODE (inputv) == MODIFY_EXPR)
7072 TREE_VALUE (link) = error_mark_node;
7073 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7074 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7075 if (tret != GS_ERROR)
7077 /* Unlike output operands, memory inputs are not guaranteed
7078 to be lvalues by the FE, and while the expressions are
7079 marked addressable there, if it is e.g. a statement
7080 expression, temporaries in it might not end up being
7081 addressable. They might be already used in the IL and thus
7082 it is too late to make them addressable now though. */
7083 tree x = TREE_VALUE (link);
7084 while (handled_component_p (x))
7085 x = TREE_OPERAND (x, 0);
7086 if (TREE_CODE (x) == MEM_REF
7087 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
7088 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
7089 if ((VAR_P (x)
7090 || TREE_CODE (x) == PARM_DECL
7091 || TREE_CODE (x) == RESULT_DECL)
7092 && !TREE_ADDRESSABLE (x)
7093 && is_gimple_reg (x))
7095 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
7096 input_location), 0,
7097 "memory input %d is not directly addressable",
7099 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
7102 mark_addressable (TREE_VALUE (link));
7103 if (tret == GS_ERROR)
7105 if (inputv != error_mark_node)
7106 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
7107 "memory input %d is not directly addressable", i);
7108 ret = tret;
7111 else
7113 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7114 is_gimple_asm_val, fb_rvalue);
7115 if (tret == GS_ERROR)
7116 ret = tret;
7119 TREE_CHAIN (link) = NULL_TREE;
7120 vec_safe_push (inputs, link);
7123 link_next = NULL_TREE;
7124 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
7126 link_next = TREE_CHAIN (link);
7127 TREE_CHAIN (link) = NULL_TREE;
7128 vec_safe_push (clobbers, link);
7131 link_next = NULL_TREE;
7132 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
7134 link_next = TREE_CHAIN (link);
7135 TREE_CHAIN (link) = NULL_TREE;
7136 vec_safe_push (labels, link);
7139 /* Do not add ASMs with errors to the gimple IL stream. */
7140 if (ret != GS_ERROR)
7142 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
7143 inputs, outputs, clobbers, labels);
7145 /* asm is volatile if it was marked by the user as volatile or
7146 there are no outputs or this is an asm goto. */
7147 gimple_asm_set_volatile (stmt,
7148 ASM_VOLATILE_P (expr)
7149 || noutputs == 0
7150 || labels);
7151 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
7152 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
7154 gimplify_seq_add_stmt (pre_p, stmt);
7157 return ret;
7160 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7161 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7162 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7163 return to this function.
7165 FIXME should we complexify the prequeue handling instead? Or use flags
7166 for all the cleanups and let the optimizer tighten them up? The current
7167 code seems pretty fragile; it will break on a cleanup within any
7168 non-conditional nesting. But any such nesting would be broken, anyway;
7169 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7170 and continues out of it. We can do that at the RTL level, though, so
7171 having an optimizer to tighten up try/finally regions would be a Good
7172 Thing. */
7174 static enum gimplify_status
7175 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
7177 gimple_stmt_iterator iter;
7178 gimple_seq body_sequence = NULL;
7180 tree temp = voidify_wrapper_expr (*expr_p, NULL);
7182 /* We only care about the number of conditions between the innermost
7183 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7184 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7185 int old_conds = gimplify_ctxp->conditions;
7186 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
7187 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
7188 gimplify_ctxp->conditions = 0;
7189 gimplify_ctxp->conditional_cleanups = NULL;
7190 gimplify_ctxp->in_cleanup_point_expr = true;
7192 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
7194 gimplify_ctxp->conditions = old_conds;
7195 gimplify_ctxp->conditional_cleanups = old_cleanups;
7196 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
7198 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
7200 gimple *wce = gsi_stmt (iter);
7202 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
7204 if (gsi_one_before_end_p (iter))
7206 /* Note that gsi_insert_seq_before and gsi_remove do not
7207 scan operands, unlike some other sequence mutators. */
7208 if (!gimple_wce_cleanup_eh_only (wce))
7209 gsi_insert_seq_before_without_update (&iter,
7210 gimple_wce_cleanup (wce),
7211 GSI_SAME_STMT);
7212 gsi_remove (&iter, true);
7213 break;
7215 else
7217 gtry *gtry;
7218 gimple_seq seq;
7219 enum gimple_try_flags kind;
7221 if (gimple_wce_cleanup_eh_only (wce))
7222 kind = GIMPLE_TRY_CATCH;
7223 else
7224 kind = GIMPLE_TRY_FINALLY;
7225 seq = gsi_split_seq_after (iter);
7227 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
7228 /* Do not use gsi_replace here, as it may scan operands.
7229 We want to do a simple structural modification only. */
7230 gsi_set_stmt (&iter, gtry);
7231 iter = gsi_start (gtry->eval);
7234 else
7235 gsi_next (&iter);
7238 gimplify_seq_add_seq (pre_p, body_sequence);
7239 if (temp)
7241 *expr_p = temp;
7242 return GS_OK;
7244 else
7246 *expr_p = NULL;
7247 return GS_ALL_DONE;
7251 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7252 is the cleanup action required. EH_ONLY is true if the cleanup should
7253 only be executed if an exception is thrown, not on normal exit.
7254 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7255 only valid for clobbers. */
7257 static void
7258 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7259 bool force_uncond = false)
7261 gimple *wce;
7262 gimple_seq cleanup_stmts = NULL;
7264 /* Errors can result in improperly nested cleanups. Which results in
7265 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7266 if (seen_error ())
7267 return;
7269 if (gimple_conditional_context ())
7271 /* If we're in a conditional context, this is more complex. We only
7272 want to run the cleanup if we actually ran the initialization that
7273 necessitates it, but we want to run it after the end of the
7274 conditional context. So we wrap the try/finally around the
7275 condition and use a flag to determine whether or not to actually
7276 run the destructor. Thus
7278 test ? f(A()) : 0
7280 becomes (approximately)
7282 flag = 0;
7283 try {
7284 if (test) { A::A(temp); flag = 1; val = f(temp); }
7285 else { val = 0; }
7286 } finally {
7287 if (flag) A::~A(temp);
7291 if (force_uncond)
7293 gimplify_stmt (&cleanup, &cleanup_stmts);
7294 wce = gimple_build_wce (cleanup_stmts);
7295 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7297 else
7299 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7300 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7301 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7303 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7304 gimplify_stmt (&cleanup, &cleanup_stmts);
7305 wce = gimple_build_wce (cleanup_stmts);
7306 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7308 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7309 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7310 gimplify_seq_add_stmt (pre_p, ftrue);
7312 /* Because of this manipulation, and the EH edges that jump
7313 threading cannot redirect, the temporary (VAR) will appear
7314 to be used uninitialized. Don't warn. */
7315 suppress_warning (var, OPT_Wuninitialized);
7318 else
7320 gimplify_stmt (&cleanup, &cleanup_stmts);
7321 wce = gimple_build_wce (cleanup_stmts);
7322 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7323 gimplify_seq_add_stmt (pre_p, wce);
7327 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7329 static enum gimplify_status
7330 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7332 tree targ = *expr_p;
7333 tree temp = TARGET_EXPR_SLOT (targ);
7334 tree init = TARGET_EXPR_INITIAL (targ);
7335 enum gimplify_status ret;
7337 bool unpoison_empty_seq = false;
7338 gimple_stmt_iterator unpoison_it;
7340 if (init)
7342 gimple_seq init_pre_p = NULL;
7344 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7345 to the temps list. Handle also variable length TARGET_EXPRs. */
7346 if (!poly_int_tree_p (DECL_SIZE (temp)))
7348 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7349 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7350 /* FIXME: this is correct only when the size of the type does
7351 not depend on expressions evaluated in init. */
7352 gimplify_vla_decl (temp, &init_pre_p);
7354 else
7356 /* Save location where we need to place unpoisoning. It's possible
7357 that a variable will be converted to needs_to_live_in_memory. */
7358 unpoison_it = gsi_last (*pre_p);
7359 unpoison_empty_seq = gsi_end_p (unpoison_it);
7361 gimple_add_tmp_var (temp);
7364 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7365 expression is supposed to initialize the slot. */
7366 if (VOID_TYPE_P (TREE_TYPE (init)))
7367 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7368 fb_none);
7369 else
7371 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7372 init = init_expr;
7373 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7374 fb_none);
7375 init = NULL;
7376 ggc_free (init_expr);
7378 if (ret == GS_ERROR)
7380 /* PR c++/28266 Make sure this is expanded only once. */
7381 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7382 return GS_ERROR;
7385 if (init)
7386 gimplify_and_add (init, &init_pre_p);
7388 /* Add a clobber for the temporary going out of scope, like
7389 gimplify_bind_expr. But only if we did not promote the
7390 temporary to static storage. */
7391 if (gimplify_ctxp->in_cleanup_point_expr
7392 && !TREE_STATIC (temp)
7393 && needs_to_live_in_memory (temp))
7395 if (flag_stack_reuse == SR_ALL)
7397 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7398 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7399 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7401 if (asan_poisoned_variables
7402 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7403 && !TREE_STATIC (temp)
7404 && dbg_cnt (asan_use_after_scope)
7405 && !gimplify_omp_ctxp)
7407 tree asan_cleanup = build_asan_poison_call_expr (temp);
7408 if (asan_cleanup)
7410 if (unpoison_empty_seq)
7411 unpoison_it = gsi_start (*pre_p);
7413 asan_poison_variable (temp, false, &unpoison_it,
7414 unpoison_empty_seq);
7415 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7420 gimple_seq_add_seq (pre_p, init_pre_p);
7422 /* If needed, push the cleanup for the temp. */
7423 if (TARGET_EXPR_CLEANUP (targ))
7424 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7425 CLEANUP_EH_ONLY (targ), pre_p);
7427 /* Only expand this once. */
7428 TREE_OPERAND (targ, 3) = init;
7429 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7431 else
7432 /* We should have expanded this before. */
7433 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7435 *expr_p = temp;
7436 return GS_OK;
7439 /* Gimplification of expression trees. */
7441 /* Gimplify an expression which appears at statement context. The
7442 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7443 NULL, a new sequence is allocated.
7445 Return true if we actually added a statement to the queue. */
7447 bool
7448 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7450 gimple_seq_node last;
7452 last = gimple_seq_last (*seq_p);
7453 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7454 return last != gimple_seq_last (*seq_p);
7457 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7458 to CTX. If entries already exist, force them to be some flavor of private.
7459 If there is no enclosing parallel, do nothing. */
7461 void
7462 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7464 splay_tree_node n;
7466 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7467 return;
7471 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7472 if (n != NULL)
7474 if (n->value & GOVD_SHARED)
7475 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7476 else if (n->value & GOVD_MAP)
7477 n->value |= GOVD_MAP_TO_ONLY;
7478 else
7479 return;
7481 else if ((ctx->region_type & ORT_TARGET) != 0)
7483 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7484 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7485 else
7486 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7488 else if (ctx->region_type != ORT_WORKSHARE
7489 && ctx->region_type != ORT_TASKGROUP
7490 && ctx->region_type != ORT_SIMD
7491 && ctx->region_type != ORT_ACC
7492 && !(ctx->region_type & ORT_TARGET_DATA))
7493 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7495 ctx = ctx->outer_context;
7497 while (ctx);
7500 /* Similarly for each of the type sizes of TYPE. */
7502 static void
7503 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7505 if (type == NULL || type == error_mark_node)
7506 return;
7507 type = TYPE_MAIN_VARIANT (type);
7509 if (ctx->privatized_types->add (type))
7510 return;
7512 switch (TREE_CODE (type))
7514 case INTEGER_TYPE:
7515 case ENUMERAL_TYPE:
7516 case BOOLEAN_TYPE:
7517 case REAL_TYPE:
7518 case FIXED_POINT_TYPE:
7519 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7520 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7521 break;
7523 case ARRAY_TYPE:
7524 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7525 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7526 break;
7528 case RECORD_TYPE:
7529 case UNION_TYPE:
7530 case QUAL_UNION_TYPE:
7532 tree field;
7533 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7534 if (TREE_CODE (field) == FIELD_DECL)
7536 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7537 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7540 break;
7542 case POINTER_TYPE:
7543 case REFERENCE_TYPE:
7544 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7545 break;
7547 default:
7548 break;
7551 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7552 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7553 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7556 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7558 static void
7559 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7561 splay_tree_node n;
7562 unsigned int nflags;
7563 tree t;
7565 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7566 return;
7568 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7569 there are constructors involved somewhere. Exception is a shared clause,
7570 there is nothing privatized in that case. */
7571 if ((flags & GOVD_SHARED) == 0
7572 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7573 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7574 flags |= GOVD_SEEN;
7576 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7577 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7579 /* We shouldn't be re-adding the decl with the same data
7580 sharing class. */
7581 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7582 nflags = n->value | flags;
7583 /* The only combination of data sharing classes we should see is
7584 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7585 reduction variables to be used in data sharing clauses. */
7586 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7587 || ((nflags & GOVD_DATA_SHARE_CLASS)
7588 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7589 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7590 n->value = nflags;
7591 return;
7594 /* When adding a variable-sized variable, we have to handle all sorts
7595 of additional bits of data: the pointer replacement variable, and
7596 the parameters of the type. */
7597 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7599 /* Add the pointer replacement variable as PRIVATE if the variable
7600 replacement is private, else FIRSTPRIVATE since we'll need the
7601 address of the original variable either for SHARED, or for the
7602 copy into or out of the context. */
7603 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7605 if (flags & GOVD_MAP)
7606 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7607 else if (flags & GOVD_PRIVATE)
7608 nflags = GOVD_PRIVATE;
7609 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7610 && (flags & GOVD_FIRSTPRIVATE))
7611 || (ctx->region_type == ORT_TARGET_DATA
7612 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7613 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7614 else
7615 nflags = GOVD_FIRSTPRIVATE;
7616 nflags |= flags & GOVD_SEEN;
7617 t = DECL_VALUE_EXPR (decl);
7618 gcc_assert (INDIRECT_REF_P (t));
7619 t = TREE_OPERAND (t, 0);
7620 gcc_assert (DECL_P (t));
7621 omp_add_variable (ctx, t, nflags);
7624 /* Add all of the variable and type parameters (which should have
7625 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7626 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7627 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7628 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7630 /* The variable-sized variable itself is never SHARED, only some form
7631 of PRIVATE. The sharing would take place via the pointer variable
7632 which we remapped above. */
7633 if (flags & GOVD_SHARED)
7634 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7635 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7637 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7638 alloca statement we generate for the variable, so make sure it
7639 is available. This isn't automatically needed for the SHARED
7640 case, since we won't be allocating local storage then.
7641 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7642 in this case omp_notice_variable will be called later
7643 on when it is gimplified. */
7644 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7645 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7646 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7648 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7649 && omp_privatize_by_reference (decl))
7651 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7653 /* Similar to the direct variable sized case above, we'll need the
7654 size of references being privatized. */
7655 if ((flags & GOVD_SHARED) == 0)
7657 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7658 if (t && DECL_P (t))
7659 omp_notice_variable (ctx, t, true);
7663 if (n != NULL)
7664 n->value |= flags;
7665 else
7666 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7668 /* For reductions clauses in OpenACC loop directives, by default create a
7669 copy clause on the enclosing parallel construct for carrying back the
7670 results. */
7671 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7673 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7674 while (outer_ctx)
7676 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7677 if (n != NULL)
7679 /* Ignore local variables and explicitly declared clauses. */
7680 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7681 break;
7682 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7684 /* According to the OpenACC spec, such a reduction variable
7685 should already have a copy map on a kernels construct,
7686 verify that here. */
7687 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7688 && (n->value & GOVD_MAP));
7690 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7692 /* Remove firstprivate and make it a copy map. */
7693 n->value &= ~GOVD_FIRSTPRIVATE;
7694 n->value |= GOVD_MAP;
7697 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7699 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7700 GOVD_MAP | GOVD_SEEN);
7701 break;
7703 outer_ctx = outer_ctx->outer_context;
7708 /* Notice a threadprivate variable DECL used in OMP context CTX.
7709 This just prints out diagnostics about threadprivate variable uses
7710 in untied tasks. If DECL2 is non-NULL, prevent this warning
7711 on that variable. */
7713 static bool
7714 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7715 tree decl2)
7717 splay_tree_node n;
7718 struct gimplify_omp_ctx *octx;
7720 for (octx = ctx; octx; octx = octx->outer_context)
7721 if ((octx->region_type & ORT_TARGET) != 0
7722 || octx->order_concurrent)
7724 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7725 if (n == NULL)
7727 if (octx->order_concurrent)
7729 error ("threadprivate variable %qE used in a region with"
7730 " %<order(concurrent)%> clause", DECL_NAME (decl));
7731 inform (octx->location, "enclosing region");
7733 else
7735 error ("threadprivate variable %qE used in target region",
7736 DECL_NAME (decl));
7737 inform (octx->location, "enclosing target region");
7739 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7741 if (decl2)
7742 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7745 if (ctx->region_type != ORT_UNTIED_TASK)
7746 return false;
7747 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7748 if (n == NULL)
7750 error ("threadprivate variable %qE used in untied task",
7751 DECL_NAME (decl));
7752 inform (ctx->location, "enclosing task");
7753 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7755 if (decl2)
7756 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7757 return false;
7760 /* Return true if global var DECL is device resident. */
7762 static bool
7763 device_resident_p (tree decl)
7765 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7767 if (!attr)
7768 return false;
7770 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7772 tree c = TREE_VALUE (t);
7773 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7774 return true;
7777 return false;
7780 /* Return true if DECL has an ACC DECLARE attribute. */
7782 static bool
7783 is_oacc_declared (tree decl)
7785 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7786 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7787 return declared != NULL_TREE;
7790 /* Determine outer default flags for DECL mentioned in an OMP region
7791 but not declared in an enclosing clause.
7793 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7794 remapped firstprivate instead of shared. To some extent this is
7795 addressed in omp_firstprivatize_type_sizes, but not
7796 effectively. */
7798 static unsigned
7799 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7800 bool in_code, unsigned flags)
7802 enum omp_clause_default_kind default_kind = ctx->default_kind;
7803 enum omp_clause_default_kind kind;
7805 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7806 if (ctx->region_type & ORT_TASK)
7808 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7810 /* The event-handle specified by a detach clause should always be firstprivate,
7811 regardless of the current default. */
7812 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7813 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7815 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7816 default_kind = kind;
7817 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7818 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7819 /* For C/C++ default({,first}private), variables with static storage duration
7820 declared in a namespace or global scope and referenced in construct
7821 must be explicitly specified, i.e. acts as default(none). */
7822 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7823 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7824 && VAR_P (decl)
7825 && is_global_var (decl)
7826 && (DECL_FILE_SCOPE_P (decl)
7827 || (DECL_CONTEXT (decl)
7828 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7829 && !lang_GNU_Fortran ())
7830 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7832 switch (default_kind)
7834 case OMP_CLAUSE_DEFAULT_NONE:
7836 const char *rtype;
7838 if (ctx->region_type & ORT_PARALLEL)
7839 rtype = "parallel";
7840 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7841 rtype = "taskloop";
7842 else if (ctx->region_type & ORT_TASK)
7843 rtype = "task";
7844 else if (ctx->region_type & ORT_TEAMS)
7845 rtype = "teams";
7846 else
7847 gcc_unreachable ();
7849 error ("%qE not specified in enclosing %qs",
7850 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7851 inform (ctx->location, "enclosing %qs", rtype);
7853 /* FALLTHRU */
7854 case OMP_CLAUSE_DEFAULT_SHARED:
7855 flags |= GOVD_SHARED;
7856 break;
7857 case OMP_CLAUSE_DEFAULT_PRIVATE:
7858 flags |= GOVD_PRIVATE;
7859 break;
7860 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7861 flags |= GOVD_FIRSTPRIVATE;
7862 break;
7863 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7864 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7865 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7866 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7868 omp_notice_variable (octx, decl, in_code);
7869 for (; octx; octx = octx->outer_context)
7871 splay_tree_node n2;
7873 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7874 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7875 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7876 continue;
7877 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7879 flags |= GOVD_FIRSTPRIVATE;
7880 goto found_outer;
7882 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7884 flags |= GOVD_SHARED;
7885 goto found_outer;
7890 if (TREE_CODE (decl) == PARM_DECL
7891 || (!is_global_var (decl)
7892 && DECL_CONTEXT (decl) == current_function_decl))
7893 flags |= GOVD_FIRSTPRIVATE;
7894 else
7895 flags |= GOVD_SHARED;
7896 found_outer:
7897 break;
7899 default:
7900 gcc_unreachable ();
7903 return flags;
7906 /* Return string name for types of OpenACC constructs from ORT_* values. */
7908 static const char *
7909 oacc_region_type_name (enum omp_region_type region_type)
7911 switch (region_type)
7913 case ORT_ACC_DATA:
7914 return "data";
7915 case ORT_ACC_PARALLEL:
7916 return "parallel";
7917 case ORT_ACC_KERNELS:
7918 return "kernels";
7919 case ORT_ACC_SERIAL:
7920 return "serial";
7921 default:
7922 gcc_unreachable ();
7926 /* Determine outer default flags for DECL mentioned in an OACC region
7927 but not declared in an enclosing clause. */
7929 static unsigned
7930 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7932 struct gimplify_omp_ctx *ctx_default = ctx;
7933 /* If no 'default' clause appears on this compute construct... */
7934 if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
7936 /* ..., see if one appears on a lexically containing 'data'
7937 construct. */
7938 while ((ctx_default = ctx_default->outer_context))
7940 if (ctx_default->region_type == ORT_ACC_DATA
7941 && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
7942 break;
7944 /* If not, reset. */
7945 if (!ctx_default)
7946 ctx_default = ctx;
7949 bool on_device = false;
7950 bool is_private = false;
7951 bool declared = is_oacc_declared (decl);
7952 tree type = TREE_TYPE (decl);
7954 if (omp_privatize_by_reference (decl))
7955 type = TREE_TYPE (type);
7957 /* For Fortran COMMON blocks, only used variables in those blocks are
7958 transfered and remapped. The block itself will have a private clause to
7959 avoid transfering the data twice.
7960 The hook evaluates to false by default. For a variable in Fortran's COMMON
7961 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7962 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7963 the whole block. For C++ and Fortran, it can also be true under certain
7964 other conditions, if DECL_HAS_VALUE_EXPR. */
7965 if (RECORD_OR_UNION_TYPE_P (type))
7966 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7968 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7969 && is_global_var (decl)
7970 && device_resident_p (decl)
7971 && !is_private)
7973 on_device = true;
7974 flags |= GOVD_MAP_TO_ONLY;
7977 switch (ctx->region_type)
7979 case ORT_ACC_KERNELS:
7980 if (is_private)
7981 flags |= GOVD_FIRSTPRIVATE;
7982 else if (AGGREGATE_TYPE_P (type))
7984 /* Aggregates default to 'present_or_copy', or 'present'. */
7985 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7986 flags |= GOVD_MAP;
7987 else
7988 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7990 else
7991 /* Scalars default to 'copy'. */
7992 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7994 break;
7996 case ORT_ACC_PARALLEL:
7997 case ORT_ACC_SERIAL:
7998 if (is_private)
7999 flags |= GOVD_FIRSTPRIVATE;
8000 else if (on_device || declared)
8001 flags |= GOVD_MAP;
8002 else if (AGGREGATE_TYPE_P (type))
8004 /* Aggregates default to 'present_or_copy', or 'present'. */
8005 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8006 flags |= GOVD_MAP;
8007 else
8008 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8010 else
8011 /* Scalars default to 'firstprivate'. */
8012 flags |= GOVD_FIRSTPRIVATE;
8014 break;
8016 default:
8017 gcc_unreachable ();
8020 if (DECL_ARTIFICIAL (decl))
8021 ; /* We can get compiler-generated decls, and should not complain
8022 about them. */
8023 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
8025 error ("%qE not specified in enclosing OpenACC %qs construct",
8026 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
8027 oacc_region_type_name (ctx->region_type));
8028 if (ctx_default != ctx)
8029 inform (ctx->location, "enclosing OpenACC %qs construct and",
8030 oacc_region_type_name (ctx->region_type));
8031 inform (ctx_default->location,
8032 "enclosing OpenACC %qs construct with %qs clause",
8033 oacc_region_type_name (ctx_default->region_type),
8034 "default(none)");
8036 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
8037 ; /* Handled above. */
8038 else
8039 gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
8041 return flags;
8044 /* Record the fact that DECL was used within the OMP context CTX.
8045 IN_CODE is true when real code uses DECL, and false when we should
8046 merely emit default(none) errors. Return true if DECL is going to
8047 be remapped and thus DECL shouldn't be gimplified into its
8048 DECL_VALUE_EXPR (if any). */
8050 static bool
8051 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
8053 splay_tree_node n;
8054 unsigned flags = in_code ? GOVD_SEEN : 0;
8055 bool ret = false, shared;
8057 if (error_operand_p (decl))
8058 return false;
8060 if (DECL_ARTIFICIAL (decl))
8062 tree attr = lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl));
8063 if (attr)
8064 decl = TREE_VALUE (TREE_VALUE (attr));
8067 if (ctx->region_type == ORT_NONE)
8068 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
8070 if (is_global_var (decl))
8072 /* Threadprivate variables are predetermined. */
8073 if (DECL_THREAD_LOCAL_P (decl))
8074 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
8076 if (DECL_HAS_VALUE_EXPR_P (decl))
8078 if (ctx->region_type & ORT_ACC)
8079 /* For OpenACC, defer expansion of value to avoid transfering
8080 privatized common block data instead of im-/explicitly transfered
8081 variables which are in common blocks. */
8083 else
8085 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8087 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
8088 return omp_notice_threadprivate_variable (ctx, decl, value);
8092 if (gimplify_omp_ctxp->outer_context == NULL
8093 && VAR_P (decl)
8094 && oacc_get_fn_attrib (current_function_decl))
8096 location_t loc = DECL_SOURCE_LOCATION (decl);
8098 if (lookup_attribute ("omp declare target link",
8099 DECL_ATTRIBUTES (decl)))
8101 error_at (loc,
8102 "%qE with %<link%> clause used in %<routine%> function",
8103 DECL_NAME (decl));
8104 return false;
8106 else if (!lookup_attribute ("omp declare target",
8107 DECL_ATTRIBUTES (decl)))
8109 error_at (loc,
8110 "%qE requires a %<declare%> directive for use "
8111 "in a %<routine%> function", DECL_NAME (decl));
8112 return false;
8117 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8118 if ((ctx->region_type & ORT_TARGET) != 0)
8120 if (ctx->region_type & ORT_ACC)
8121 /* For OpenACC, as remarked above, defer expansion. */
8122 shared = false;
8123 else
8124 shared = true;
8126 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8127 if (n == NULL)
8129 unsigned nflags = flags;
8130 if ((ctx->region_type & ORT_ACC) == 0)
8132 bool is_declare_target = false;
8133 if (is_global_var (decl)
8134 && varpool_node::get_create (decl)->offloadable)
8136 struct gimplify_omp_ctx *octx;
8137 for (octx = ctx->outer_context;
8138 octx; octx = octx->outer_context)
8140 n = splay_tree_lookup (octx->variables,
8141 (splay_tree_key)decl);
8142 if (n
8143 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
8144 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8145 break;
8147 is_declare_target = octx == NULL;
8149 if (!is_declare_target)
8151 int gdmk;
8152 enum omp_clause_defaultmap_kind kind;
8153 if (lang_hooks.decls.omp_allocatable_p (decl))
8154 gdmk = GDMK_ALLOCATABLE;
8155 else if (lang_hooks.decls.omp_scalar_target_p (decl))
8156 gdmk = GDMK_SCALAR_TARGET;
8157 else if (lang_hooks.decls.omp_scalar_p (decl, false))
8158 gdmk = GDMK_SCALAR;
8159 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
8160 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8161 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
8162 == POINTER_TYPE)))
8163 gdmk = GDMK_POINTER;
8164 else
8165 gdmk = GDMK_AGGREGATE;
8166 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
8167 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
8169 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
8170 nflags |= GOVD_FIRSTPRIVATE;
8171 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
8172 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
8173 else
8174 gcc_unreachable ();
8176 else if (ctx->defaultmap[gdmk] == 0)
8178 tree d = lang_hooks.decls.omp_report_decl (decl);
8179 error ("%qE not specified in enclosing %<target%>",
8180 DECL_NAME (d));
8181 inform (ctx->location, "enclosing %<target%>");
8183 else if (ctx->defaultmap[gdmk]
8184 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
8185 nflags |= ctx->defaultmap[gdmk];
8186 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
8188 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8189 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
8191 else
8193 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8194 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
8199 struct gimplify_omp_ctx *octx = ctx->outer_context;
8200 if ((ctx->region_type & ORT_ACC) && octx)
8202 /* Look in outer OpenACC contexts, to see if there's a
8203 data attribute for this variable. */
8204 omp_notice_variable (octx, decl, in_code);
8206 for (; octx; octx = octx->outer_context)
8208 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
8209 break;
8210 splay_tree_node n2
8211 = splay_tree_lookup (octx->variables,
8212 (splay_tree_key) decl);
8213 if (n2)
8215 if (octx->region_type == ORT_ACC_HOST_DATA)
8216 error ("variable %qE declared in enclosing "
8217 "%<host_data%> region", DECL_NAME (decl));
8218 nflags |= GOVD_MAP;
8219 if (octx->region_type == ORT_ACC_DATA
8220 && (n2->value & GOVD_MAP_0LEN_ARRAY))
8221 nflags |= GOVD_MAP_0LEN_ARRAY;
8222 goto found_outer;
8227 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
8228 | GOVD_MAP_ALLOC_ONLY)) == flags)
8230 tree type = TREE_TYPE (decl);
8232 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8233 && omp_privatize_by_reference (decl))
8234 type = TREE_TYPE (type);
8235 if (!omp_mappable_type (type))
8237 error ("%qD referenced in target region does not have "
8238 "a mappable type", decl);
8239 nflags |= GOVD_MAP | GOVD_EXPLICIT;
8241 else
8243 if ((ctx->region_type & ORT_ACC) != 0)
8244 nflags = oacc_default_clause (ctx, decl, flags);
8245 else
8246 nflags |= GOVD_MAP;
8249 found_outer:
8250 omp_add_variable (ctx, decl, nflags);
8252 else
8254 /* If nothing changed, there's nothing left to do. */
8255 if ((n->value & flags) == flags)
8256 return ret;
8257 flags |= n->value;
8258 n->value = flags;
8260 goto do_outer;
8263 if (n == NULL)
8265 if (ctx->region_type == ORT_WORKSHARE
8266 || ctx->region_type == ORT_TASKGROUP
8267 || ctx->region_type == ORT_SIMD
8268 || ctx->region_type == ORT_ACC
8269 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8270 goto do_outer;
8272 flags = omp_default_clause (ctx, decl, in_code, flags);
8274 if ((flags & GOVD_PRIVATE)
8275 && lang_hooks.decls.omp_private_outer_ref (decl))
8276 flags |= GOVD_PRIVATE_OUTER_REF;
8278 omp_add_variable (ctx, decl, flags);
8280 shared = (flags & GOVD_SHARED) != 0;
8281 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8282 goto do_outer;
8285 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8286 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8287 if (ctx->region_type == ORT_SIMD
8288 && ctx->in_for_exprs
8289 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8290 == GOVD_PRIVATE))
8291 flags &= ~GOVD_SEEN;
8293 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8294 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8295 && DECL_SIZE (decl))
8297 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8299 splay_tree_node n2;
8300 tree t = DECL_VALUE_EXPR (decl);
8301 gcc_assert (INDIRECT_REF_P (t));
8302 t = TREE_OPERAND (t, 0);
8303 gcc_assert (DECL_P (t));
8304 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8305 n2->value |= GOVD_SEEN;
8307 else if (omp_privatize_by_reference (decl)
8308 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8309 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8310 != INTEGER_CST))
8312 splay_tree_node n2;
8313 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8314 gcc_assert (DECL_P (t));
8315 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8316 if (n2)
8317 omp_notice_variable (ctx, t, true);
8321 if (ctx->region_type & ORT_ACC)
8322 /* For OpenACC, as remarked above, defer expansion. */
8323 shared = false;
8324 else
8325 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8326 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8328 /* If nothing changed, there's nothing left to do. */
8329 if ((n->value & flags) == flags)
8330 return ret;
8331 flags |= n->value;
8332 n->value = flags;
8334 do_outer:
8335 /* If the variable is private in the current context, then we don't
8336 need to propagate anything to an outer context. */
8337 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8338 return ret;
8339 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8340 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8341 return ret;
8342 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8343 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8344 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8345 return ret;
8346 if (ctx->outer_context
8347 && omp_notice_variable (ctx->outer_context, decl, in_code))
8348 return true;
8349 return ret;
8352 /* Verify that DECL is private within CTX. If there's specific information
8353 to the contrary in the innermost scope, generate an error. */
8355 static bool
8356 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8358 splay_tree_node n;
8360 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8361 if (n != NULL)
8363 if (n->value & GOVD_SHARED)
8365 if (ctx == gimplify_omp_ctxp)
8367 if (simd)
8368 error ("iteration variable %qE is predetermined linear",
8369 DECL_NAME (decl));
8370 else
8371 error ("iteration variable %qE should be private",
8372 DECL_NAME (decl));
8373 n->value = GOVD_PRIVATE;
8374 return true;
8376 else
8377 return false;
8379 else if ((n->value & GOVD_EXPLICIT) != 0
8380 && (ctx == gimplify_omp_ctxp
8381 || (ctx->region_type == ORT_COMBINED_PARALLEL
8382 && gimplify_omp_ctxp->outer_context == ctx)))
8384 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8385 error ("iteration variable %qE should not be firstprivate",
8386 DECL_NAME (decl));
8387 else if ((n->value & GOVD_REDUCTION) != 0)
8388 error ("iteration variable %qE should not be reduction",
8389 DECL_NAME (decl));
8390 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8391 error ("iteration variable %qE should not be linear",
8392 DECL_NAME (decl));
8394 return (ctx == gimplify_omp_ctxp
8395 || (ctx->region_type == ORT_COMBINED_PARALLEL
8396 && gimplify_omp_ctxp->outer_context == ctx));
8399 if (ctx->region_type != ORT_WORKSHARE
8400 && ctx->region_type != ORT_TASKGROUP
8401 && ctx->region_type != ORT_SIMD
8402 && ctx->region_type != ORT_ACC)
8403 return false;
8404 else if (ctx->outer_context)
8405 return omp_is_private (ctx->outer_context, decl, simd);
8406 return false;
8409 /* Return true if DECL is private within a parallel region
8410 that binds to the current construct's context or in parallel
8411 region's REDUCTION clause. */
8413 static bool
8414 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8416 splay_tree_node n;
8420 ctx = ctx->outer_context;
8421 if (ctx == NULL)
8423 if (is_global_var (decl))
8424 return false;
8426 /* References might be private, but might be shared too,
8427 when checking for copyprivate, assume they might be
8428 private, otherwise assume they might be shared. */
8429 if (copyprivate)
8430 return true;
8432 if (omp_privatize_by_reference (decl))
8433 return false;
8435 /* Treat C++ privatized non-static data members outside
8436 of the privatization the same. */
8437 if (omp_member_access_dummy_var (decl))
8438 return false;
8440 return true;
8443 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8445 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8446 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8448 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8449 || n == NULL
8450 || (n->value & GOVD_MAP) == 0)
8451 continue;
8452 return false;
8455 if (n != NULL)
8457 if ((n->value & GOVD_LOCAL) != 0
8458 && omp_member_access_dummy_var (decl))
8459 return false;
8460 return (n->value & GOVD_SHARED) == 0;
8463 if (ctx->region_type == ORT_WORKSHARE
8464 || ctx->region_type == ORT_TASKGROUP
8465 || ctx->region_type == ORT_SIMD
8466 || ctx->region_type == ORT_ACC)
8467 continue;
8469 break;
8471 while (1);
8472 return false;
8475 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8477 static tree
8478 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8480 tree t = *tp;
8482 /* If this node has been visited, unmark it and keep looking. */
8483 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8484 return t;
8486 if (IS_TYPE_OR_DECL_P (t))
8487 *walk_subtrees = 0;
8488 return NULL_TREE;
8492 /* Gimplify the affinity clause but effectively ignore it.
8493 Generate:
8494 var = begin;
8495 if ((step > 1) ? var <= end : var > end)
8496 locatator_var_expr; */
8498 static void
8499 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8501 tree last_iter = NULL_TREE;
8502 tree last_bind = NULL_TREE;
8503 tree label = NULL_TREE;
8504 tree *last_body = NULL;
8505 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8506 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8508 tree t = OMP_CLAUSE_DECL (c);
8509 if (TREE_CODE (t) == TREE_LIST
8510 && TREE_PURPOSE (t)
8511 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8513 if (TREE_VALUE (t) == null_pointer_node)
8514 continue;
8515 if (TREE_PURPOSE (t) != last_iter)
8517 if (last_bind)
8519 append_to_statement_list (label, last_body);
8520 gimplify_and_add (last_bind, pre_p);
8521 last_bind = NULL_TREE;
8523 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8525 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8526 is_gimple_val, fb_rvalue) == GS_ERROR
8527 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8528 is_gimple_val, fb_rvalue) == GS_ERROR
8529 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8530 is_gimple_val, fb_rvalue) == GS_ERROR
8531 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8532 is_gimple_val, fb_rvalue)
8533 == GS_ERROR))
8534 return;
8536 last_iter = TREE_PURPOSE (t);
8537 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8538 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8539 NULL, block);
8540 last_body = &BIND_EXPR_BODY (last_bind);
8541 tree cond = NULL_TREE;
8542 location_t loc = OMP_CLAUSE_LOCATION (c);
8543 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8545 tree var = TREE_VEC_ELT (it, 0);
8546 tree begin = TREE_VEC_ELT (it, 1);
8547 tree end = TREE_VEC_ELT (it, 2);
8548 tree step = TREE_VEC_ELT (it, 3);
8549 loc = DECL_SOURCE_LOCATION (var);
8550 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8551 var, begin);
8552 append_to_statement_list_force (tem, last_body);
8554 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8555 step, build_zero_cst (TREE_TYPE (step)));
8556 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8557 var, end);
8558 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8559 var, end);
8560 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8561 cond1, cond2, cond3);
8562 if (cond)
8563 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8564 boolean_type_node, cond, cond1);
8565 else
8566 cond = cond1;
8568 tree cont_label = create_artificial_label (loc);
8569 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8570 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8571 void_node,
8572 build_and_jump (&cont_label));
8573 append_to_statement_list_force (tem, last_body);
8575 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8577 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8578 last_body);
8579 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8581 if (error_operand_p (TREE_VALUE (t)))
8582 return;
8583 append_to_statement_list_force (TREE_VALUE (t), last_body);
8584 TREE_VALUE (t) = null_pointer_node;
8586 else
8588 if (last_bind)
8590 append_to_statement_list (label, last_body);
8591 gimplify_and_add (last_bind, pre_p);
8592 last_bind = NULL_TREE;
8594 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8596 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8597 NULL, is_gimple_val, fb_rvalue);
8598 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8600 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8601 return;
8602 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8603 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8604 return;
8605 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8608 if (last_bind)
8610 append_to_statement_list (label, last_body);
8611 gimplify_and_add (last_bind, pre_p);
8613 return;
8616 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8617 lower all the depend clauses by populating corresponding depend
8618 array. Returns 0 if there are no such depend clauses, or
8619 2 if all depend clauses should be removed, 1 otherwise. */
8621 static int
8622 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8624 tree c;
8625 gimple *g;
8626 size_t n[5] = { 0, 0, 0, 0, 0 };
8627 bool unused[5];
8628 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8629 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8630 size_t i, j;
8631 location_t first_loc = UNKNOWN_LOCATION;
8633 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8634 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8636 switch (OMP_CLAUSE_DEPEND_KIND (c))
8638 case OMP_CLAUSE_DEPEND_IN:
8639 i = 2;
8640 break;
8641 case OMP_CLAUSE_DEPEND_OUT:
8642 case OMP_CLAUSE_DEPEND_INOUT:
8643 i = 0;
8644 break;
8645 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8646 i = 1;
8647 break;
8648 case OMP_CLAUSE_DEPEND_DEPOBJ:
8649 i = 3;
8650 break;
8651 case OMP_CLAUSE_DEPEND_INOUTSET:
8652 i = 4;
8653 break;
8654 default:
8655 gcc_unreachable ();
8657 tree t = OMP_CLAUSE_DECL (c);
8658 if (first_loc == UNKNOWN_LOCATION)
8659 first_loc = OMP_CLAUSE_LOCATION (c);
8660 if (TREE_CODE (t) == TREE_LIST
8661 && TREE_PURPOSE (t)
8662 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8664 if (TREE_PURPOSE (t) != last_iter)
8666 tree tcnt = size_one_node;
8667 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8669 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8670 is_gimple_val, fb_rvalue) == GS_ERROR
8671 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8672 is_gimple_val, fb_rvalue) == GS_ERROR
8673 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8674 is_gimple_val, fb_rvalue) == GS_ERROR
8675 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8676 is_gimple_val, fb_rvalue)
8677 == GS_ERROR))
8678 return 2;
8679 tree var = TREE_VEC_ELT (it, 0);
8680 tree begin = TREE_VEC_ELT (it, 1);
8681 tree end = TREE_VEC_ELT (it, 2);
8682 tree step = TREE_VEC_ELT (it, 3);
8683 tree orig_step = TREE_VEC_ELT (it, 4);
8684 tree type = TREE_TYPE (var);
8685 tree stype = TREE_TYPE (step);
8686 location_t loc = DECL_SOURCE_LOCATION (var);
8687 tree endmbegin;
8688 /* Compute count for this iterator as
8689 orig_step > 0
8690 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8691 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8692 and compute product of those for the entire depend
8693 clause. */
8694 if (POINTER_TYPE_P (type))
8695 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8696 stype, end, begin);
8697 else
8698 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8699 end, begin);
8700 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8701 step,
8702 build_int_cst (stype, 1));
8703 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8704 build_int_cst (stype, 1));
8705 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8706 unshare_expr (endmbegin),
8707 stepm1);
8708 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8709 pos, step);
8710 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8711 endmbegin, stepp1);
8712 if (TYPE_UNSIGNED (stype))
8714 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8715 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8717 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8718 neg, step);
8719 step = NULL_TREE;
8720 tree cond = fold_build2_loc (loc, LT_EXPR,
8721 boolean_type_node,
8722 begin, end);
8723 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8724 build_int_cst (stype, 0));
8725 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8726 end, begin);
8727 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8728 build_int_cst (stype, 0));
8729 tree osteptype = TREE_TYPE (orig_step);
8730 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8731 orig_step,
8732 build_int_cst (osteptype, 0));
8733 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8734 cond, pos, neg);
8735 cnt = fold_convert_loc (loc, sizetype, cnt);
8736 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8737 fb_rvalue) == GS_ERROR)
8738 return 2;
8739 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8741 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8742 fb_rvalue) == GS_ERROR)
8743 return 2;
8744 last_iter = TREE_PURPOSE (t);
8745 last_count = tcnt;
8747 if (counts[i] == NULL_TREE)
8748 counts[i] = last_count;
8749 else
8750 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8751 PLUS_EXPR, counts[i], last_count);
8753 else
8754 n[i]++;
8756 for (i = 0; i < 5; i++)
8757 if (counts[i])
8758 break;
8759 if (i == 5)
8760 return 0;
8762 tree total = size_zero_node;
8763 for (i = 0; i < 5; i++)
8765 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8766 if (counts[i] == NULL_TREE)
8767 counts[i] = size_zero_node;
8768 if (n[i])
8769 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8770 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8771 fb_rvalue) == GS_ERROR)
8772 return 2;
8773 total = size_binop (PLUS_EXPR, total, counts[i]);
8776 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8777 == GS_ERROR)
8778 return 2;
8779 bool is_old = unused[1] && unused[3] && unused[4];
8780 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8781 size_int (is_old ? 1 : 4));
8782 if (!unused[4])
8783 totalpx = size_binop (PLUS_EXPR, totalpx,
8784 size_binop (MULT_EXPR, counts[4], size_int (2)));
8785 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8786 tree array = create_tmp_var_raw (type);
8787 TREE_ADDRESSABLE (array) = 1;
8788 if (!poly_int_tree_p (totalpx))
8790 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8791 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8792 if (gimplify_omp_ctxp)
8794 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8795 while (ctx
8796 && (ctx->region_type == ORT_WORKSHARE
8797 || ctx->region_type == ORT_TASKGROUP
8798 || ctx->region_type == ORT_SIMD
8799 || ctx->region_type == ORT_ACC))
8800 ctx = ctx->outer_context;
8801 if (ctx)
8802 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8804 gimplify_vla_decl (array, pre_p);
8806 else
8807 gimple_add_tmp_var (array);
8808 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8809 NULL_TREE);
8810 tree tem;
8811 if (!is_old)
8813 tem = build2 (MODIFY_EXPR, void_type_node, r,
8814 build_int_cst (ptr_type_node, 0));
8815 gimplify_and_add (tem, pre_p);
8816 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8817 NULL_TREE);
8819 tem = build2 (MODIFY_EXPR, void_type_node, r,
8820 fold_convert (ptr_type_node, total));
8821 gimplify_and_add (tem, pre_p);
8822 for (i = 1; i < (is_old ? 2 : 4); i++)
8824 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8825 NULL_TREE, NULL_TREE);
8826 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8827 gimplify_and_add (tem, pre_p);
8830 tree cnts[6];
8831 for (j = 5; j; j--)
8832 if (!unused[j - 1])
8833 break;
8834 for (i = 0; i < 5; i++)
8836 if (i && (i >= j || unused[i - 1]))
8838 cnts[i] = cnts[i - 1];
8839 continue;
8841 cnts[i] = create_tmp_var (sizetype);
8842 if (i == 0)
8843 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8844 else
8846 tree t;
8847 if (is_old)
8848 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8849 else
8850 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8851 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8852 == GS_ERROR)
8853 return 2;
8854 g = gimple_build_assign (cnts[i], t);
8856 gimple_seq_add_stmt (pre_p, g);
8858 if (unused[4])
8859 cnts[5] = NULL_TREE;
8860 else
8862 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8863 cnts[5] = create_tmp_var (sizetype);
8864 g = gimple_build_assign (cnts[i], t);
8865 gimple_seq_add_stmt (pre_p, g);
8868 last_iter = NULL_TREE;
8869 tree last_bind = NULL_TREE;
8870 tree *last_body = NULL;
8871 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8872 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8874 switch (OMP_CLAUSE_DEPEND_KIND (c))
8876 case OMP_CLAUSE_DEPEND_IN:
8877 i = 2;
8878 break;
8879 case OMP_CLAUSE_DEPEND_OUT:
8880 case OMP_CLAUSE_DEPEND_INOUT:
8881 i = 0;
8882 break;
8883 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8884 i = 1;
8885 break;
8886 case OMP_CLAUSE_DEPEND_DEPOBJ:
8887 i = 3;
8888 break;
8889 case OMP_CLAUSE_DEPEND_INOUTSET:
8890 i = 4;
8891 break;
8892 default:
8893 gcc_unreachable ();
8895 tree t = OMP_CLAUSE_DECL (c);
8896 if (TREE_CODE (t) == TREE_LIST
8897 && TREE_PURPOSE (t)
8898 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8900 if (TREE_PURPOSE (t) != last_iter)
8902 if (last_bind)
8903 gimplify_and_add (last_bind, pre_p);
8904 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8905 last_bind = build3 (BIND_EXPR, void_type_node,
8906 BLOCK_VARS (block), NULL, block);
8907 TREE_SIDE_EFFECTS (last_bind) = 1;
8908 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8909 tree *p = &BIND_EXPR_BODY (last_bind);
8910 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8912 tree var = TREE_VEC_ELT (it, 0);
8913 tree begin = TREE_VEC_ELT (it, 1);
8914 tree end = TREE_VEC_ELT (it, 2);
8915 tree step = TREE_VEC_ELT (it, 3);
8916 tree orig_step = TREE_VEC_ELT (it, 4);
8917 tree type = TREE_TYPE (var);
8918 location_t loc = DECL_SOURCE_LOCATION (var);
8919 /* Emit:
8920 var = begin;
8921 goto cond_label;
8922 beg_label:
8924 var = var + step;
8925 cond_label:
8926 if (orig_step > 0) {
8927 if (var < end) goto beg_label;
8928 } else {
8929 if (var > end) goto beg_label;
8931 for each iterator, with inner iterators added to
8932 the ... above. */
8933 tree beg_label = create_artificial_label (loc);
8934 tree cond_label = NULL_TREE;
8935 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8936 var, begin);
8937 append_to_statement_list_force (tem, p);
8938 tem = build_and_jump (&cond_label);
8939 append_to_statement_list_force (tem, p);
8940 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8941 append_to_statement_list (tem, p);
8942 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8943 NULL_TREE, NULL_TREE);
8944 TREE_SIDE_EFFECTS (bind) = 1;
8945 SET_EXPR_LOCATION (bind, loc);
8946 append_to_statement_list_force (bind, p);
8947 if (POINTER_TYPE_P (type))
8948 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8949 var, fold_convert_loc (loc, sizetype,
8950 step));
8951 else
8952 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8953 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8954 var, tem);
8955 append_to_statement_list_force (tem, p);
8956 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8957 append_to_statement_list (tem, p);
8958 tree cond = fold_build2_loc (loc, LT_EXPR,
8959 boolean_type_node,
8960 var, end);
8961 tree pos
8962 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8963 cond, build_and_jump (&beg_label),
8964 void_node);
8965 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8966 var, end);
8967 tree neg
8968 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8969 cond, build_and_jump (&beg_label),
8970 void_node);
8971 tree osteptype = TREE_TYPE (orig_step);
8972 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8973 orig_step,
8974 build_int_cst (osteptype, 0));
8975 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8976 cond, pos, neg);
8977 append_to_statement_list_force (tem, p);
8978 p = &BIND_EXPR_BODY (bind);
8980 last_body = p;
8982 last_iter = TREE_PURPOSE (t);
8983 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8985 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8986 0), last_body);
8987 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8989 if (error_operand_p (TREE_VALUE (t)))
8990 return 2;
8991 if (TREE_VALUE (t) != null_pointer_node)
8992 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8993 if (i == 4)
8995 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8996 NULL_TREE, NULL_TREE);
8997 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8998 NULL_TREE, NULL_TREE);
8999 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9000 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9001 void_type_node, r, r2);
9002 append_to_statement_list_force (tem, last_body);
9003 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9004 void_type_node, cnts[i],
9005 size_binop (PLUS_EXPR, cnts[i],
9006 size_int (1)));
9007 append_to_statement_list_force (tem, last_body);
9008 i = 5;
9010 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9011 NULL_TREE, NULL_TREE);
9012 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9013 void_type_node, r, TREE_VALUE (t));
9014 append_to_statement_list_force (tem, last_body);
9015 if (i == 5)
9017 r = build4 (ARRAY_REF, ptr_type_node, array,
9018 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9019 NULL_TREE, NULL_TREE);
9020 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9021 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9022 void_type_node, r, tem);
9023 append_to_statement_list_force (tem, last_body);
9025 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9026 void_type_node, cnts[i],
9027 size_binop (PLUS_EXPR, cnts[i],
9028 size_int (1 + (i == 5))));
9029 append_to_statement_list_force (tem, last_body);
9030 TREE_VALUE (t) = null_pointer_node;
9032 else
9034 if (last_bind)
9036 gimplify_and_add (last_bind, pre_p);
9037 last_bind = NULL_TREE;
9039 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9041 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9042 NULL, is_gimple_val, fb_rvalue);
9043 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9045 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9046 return 2;
9047 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
9048 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9049 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9050 is_gimple_val, fb_rvalue) == GS_ERROR)
9051 return 2;
9052 if (i == 4)
9054 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9055 NULL_TREE, NULL_TREE);
9056 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9057 NULL_TREE, NULL_TREE);
9058 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9059 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
9060 gimplify_and_add (tem, pre_p);
9061 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
9062 cnts[i],
9063 size_int (1)));
9064 gimple_seq_add_stmt (pre_p, g);
9065 i = 5;
9067 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9068 NULL_TREE, NULL_TREE);
9069 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
9070 gimplify_and_add (tem, pre_p);
9071 if (i == 5)
9073 r = build4 (ARRAY_REF, ptr_type_node, array,
9074 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9075 NULL_TREE, NULL_TREE);
9076 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9077 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
9078 append_to_statement_list_force (tem, last_body);
9079 gimplify_and_add (tem, pre_p);
9081 g = gimple_build_assign (cnts[i],
9082 size_binop (PLUS_EXPR, cnts[i],
9083 size_int (1 + (i == 5))));
9084 gimple_seq_add_stmt (pre_p, g);
9087 if (last_bind)
9088 gimplify_and_add (last_bind, pre_p);
9089 tree cond = boolean_false_node;
9090 if (is_old)
9092 if (!unused[0])
9093 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
9094 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
9095 size_int (2)));
9096 if (!unused[2])
9097 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
9098 build2_loc (first_loc, NE_EXPR, boolean_type_node,
9099 cnts[2],
9100 size_binop_loc (first_loc, PLUS_EXPR,
9101 totalpx,
9102 size_int (1))));
9104 else
9106 tree prev = size_int (5);
9107 for (i = 0; i < 5; i++)
9109 if (unused[i])
9110 continue;
9111 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
9112 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
9113 build2_loc (first_loc, NE_EXPR, boolean_type_node,
9114 cnts[i], unshare_expr (prev)));
9117 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
9118 build_call_expr_loc (first_loc,
9119 builtin_decl_explicit (BUILT_IN_TRAP),
9120 0), void_node);
9121 gimplify_and_add (tem, pre_p);
9122 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9123 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
9124 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9125 OMP_CLAUSE_CHAIN (c) = *list_p;
9126 *list_p = c;
9127 return 1;
9130 /* For a set of mappings describing an array section pointed to by a struct
9131 (or derived type, etc.) component, create an "alloc" or "release" node to
9132 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9133 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9134 be created that is inserted into the list of mapping nodes attached to the
9135 directive being processed -- not part of the sorted list of nodes after
9136 GOMP_MAP_STRUCT.
9138 CODE is the code of the directive being processed. GRP_START and GRP_END
9139 are the first and last of two or three nodes representing this array section
9140 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9141 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9142 filled with the additional node described above, if needed.
9144 This function does not add the new nodes to any lists itself. It is the
9145 responsibility of the caller to do that. */
9147 static tree
9148 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
9149 tree *extra_node)
9151 enum gomp_map_kind mkind
9152 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
9153 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
9155 gcc_assert (grp_start != grp_end);
9157 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9158 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9159 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
9160 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
9161 tree grp_mid = NULL_TREE;
9162 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
9163 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
9165 if (grp_mid
9166 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9167 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
9168 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
9169 else
9170 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
9172 if (grp_mid
9173 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9174 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
9175 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
9177 tree c3
9178 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9179 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
9180 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
9181 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
9182 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
9184 *extra_node = c3;
9186 else
9187 *extra_node = NULL_TREE;
9189 return c2;
9192 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9193 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9194 If BASE_REF is non-NULL and the containing object is a reference, set
9195 *BASE_REF to that reference before dereferencing the object.
9196 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9197 has array type, else return NULL. */
9199 static tree
9200 extract_base_bit_offset (tree base, poly_int64 *bitposp,
9201 poly_offset_int *poffsetp)
9203 tree offset;
9204 poly_int64 bitsize, bitpos;
9205 machine_mode mode;
9206 int unsignedp, reversep, volatilep = 0;
9207 poly_offset_int poffset;
9209 STRIP_NOPS (base);
9211 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
9212 &unsignedp, &reversep, &volatilep);
9214 STRIP_NOPS (base);
9216 if (offset && poly_int_tree_p (offset))
9218 poffset = wi::to_poly_offset (offset);
9219 offset = NULL_TREE;
9221 else
9222 poffset = 0;
9224 if (maybe_ne (bitpos, 0))
9225 poffset += bits_to_bytes_round_down (bitpos);
9227 *bitposp = bitpos;
9228 *poffsetp = poffset;
9230 return base;
9233 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
9234 started processing the group yet. The TEMPORARY mark is used when we first
9235 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9236 when we have processed all the group's children (i.e. all the base pointers
9237 referred to by the group's mapping nodes, recursively). */
9239 enum omp_tsort_mark {
9240 UNVISITED,
9241 TEMPORARY,
9242 PERMANENT
9245 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
9246 but ignores side effects in the equality comparisons. */
9248 struct tree_operand_hash_no_se : tree_operand_hash
9250 static inline bool equal (const value_type &,
9251 const compare_type &);
9254 inline bool
9255 tree_operand_hash_no_se::equal (const value_type &t1,
9256 const compare_type &t2)
9258 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
9261 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9262 clause. */
9264 struct omp_mapping_group {
9265 tree *grp_start;
9266 tree grp_end;
9267 omp_tsort_mark mark;
9268 /* If we've removed the group but need to reindex, mark the group as
9269 deleted. */
9270 bool deleted;
9271 struct omp_mapping_group *sibling;
9272 struct omp_mapping_group *next;
9275 DEBUG_FUNCTION void
9276 debug_mapping_group (omp_mapping_group *grp)
9278 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9279 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9280 debug_generic_expr (*grp->grp_start);
9281 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9284 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9285 isn't one. */
9287 static tree
9288 omp_get_base_pointer (tree expr)
9290 while (TREE_CODE (expr) == ARRAY_REF
9291 || TREE_CODE (expr) == COMPONENT_REF)
9292 expr = TREE_OPERAND (expr, 0);
9294 if (INDIRECT_REF_P (expr)
9295 || (TREE_CODE (expr) == MEM_REF
9296 && integer_zerop (TREE_OPERAND (expr, 1))))
9298 expr = TREE_OPERAND (expr, 0);
9299 while (TREE_CODE (expr) == COMPOUND_EXPR)
9300 expr = TREE_OPERAND (expr, 1);
9301 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9302 expr = TREE_OPERAND (expr, 0);
9303 if (TREE_CODE (expr) == SAVE_EXPR)
9304 expr = TREE_OPERAND (expr, 0);
9305 STRIP_NOPS (expr);
9306 return expr;
9309 return NULL_TREE;
9312 /* Remove COMPONENT_REFS and indirections from EXPR. */
9314 static tree
9315 omp_strip_components_and_deref (tree expr)
9317 while (TREE_CODE (expr) == COMPONENT_REF
9318 || INDIRECT_REF_P (expr)
9319 || (TREE_CODE (expr) == MEM_REF
9320 && integer_zerop (TREE_OPERAND (expr, 1)))
9321 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9322 || TREE_CODE (expr) == COMPOUND_EXPR)
9323 if (TREE_CODE (expr) == COMPOUND_EXPR)
9324 expr = TREE_OPERAND (expr, 1);
9325 else
9326 expr = TREE_OPERAND (expr, 0);
9328 STRIP_NOPS (expr);
9330 return expr;
9333 static tree
9334 omp_strip_indirections (tree expr)
9336 while (INDIRECT_REF_P (expr)
9337 || (TREE_CODE (expr) == MEM_REF
9338 && integer_zerop (TREE_OPERAND (expr, 1))))
9339 expr = TREE_OPERAND (expr, 0);
9341 return expr;
9344 /* An attach or detach operation depends directly on the address being
9345 attached/detached. Return that address, or none if there are no
9346 attachments/detachments. */
9348 static tree
9349 omp_get_attachment (omp_mapping_group *grp)
9351 tree node = *grp->grp_start;
9353 switch (OMP_CLAUSE_MAP_KIND (node))
9355 case GOMP_MAP_TO:
9356 case GOMP_MAP_FROM:
9357 case GOMP_MAP_TOFROM:
9358 case GOMP_MAP_ALWAYS_FROM:
9359 case GOMP_MAP_ALWAYS_TO:
9360 case GOMP_MAP_ALWAYS_TOFROM:
9361 case GOMP_MAP_FORCE_FROM:
9362 case GOMP_MAP_FORCE_TO:
9363 case GOMP_MAP_FORCE_TOFROM:
9364 case GOMP_MAP_FORCE_PRESENT:
9365 case GOMP_MAP_PRESENT_ALLOC:
9366 case GOMP_MAP_PRESENT_FROM:
9367 case GOMP_MAP_PRESENT_TO:
9368 case GOMP_MAP_PRESENT_TOFROM:
9369 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9370 case GOMP_MAP_ALWAYS_PRESENT_TO:
9371 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9372 case GOMP_MAP_ALLOC:
9373 case GOMP_MAP_RELEASE:
9374 case GOMP_MAP_DELETE:
9375 case GOMP_MAP_FORCE_ALLOC:
9376 if (node == grp->grp_end)
9377 return NULL_TREE;
9379 node = OMP_CLAUSE_CHAIN (node);
9380 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9382 gcc_assert (node != grp->grp_end);
9383 node = OMP_CLAUSE_CHAIN (node);
9385 if (node)
9386 switch (OMP_CLAUSE_MAP_KIND (node))
9388 case GOMP_MAP_POINTER:
9389 case GOMP_MAP_ALWAYS_POINTER:
9390 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9391 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9392 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9393 return NULL_TREE;
9395 case GOMP_MAP_ATTACH_DETACH:
9396 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9397 return OMP_CLAUSE_DECL (node);
9399 default:
9400 internal_error ("unexpected mapping node");
9402 return error_mark_node;
9404 case GOMP_MAP_TO_PSET:
9405 gcc_assert (node != grp->grp_end);
9406 node = OMP_CLAUSE_CHAIN (node);
9407 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9408 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9409 return OMP_CLAUSE_DECL (node);
9410 else
9411 internal_error ("unexpected mapping node");
9412 return error_mark_node;
9414 case GOMP_MAP_ATTACH:
9415 case GOMP_MAP_DETACH:
9416 node = OMP_CLAUSE_CHAIN (node);
9417 if (!node || *grp->grp_start == grp->grp_end)
9418 return OMP_CLAUSE_DECL (*grp->grp_start);
9419 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9420 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9421 return OMP_CLAUSE_DECL (*grp->grp_start);
9422 else
9423 internal_error ("unexpected mapping node");
9424 return error_mark_node;
9426 case GOMP_MAP_STRUCT:
9427 case GOMP_MAP_FORCE_DEVICEPTR:
9428 case GOMP_MAP_DEVICE_RESIDENT:
9429 case GOMP_MAP_LINK:
9430 case GOMP_MAP_IF_PRESENT:
9431 case GOMP_MAP_FIRSTPRIVATE:
9432 case GOMP_MAP_FIRSTPRIVATE_INT:
9433 case GOMP_MAP_USE_DEVICE_PTR:
9434 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9435 return NULL_TREE;
9437 default:
9438 internal_error ("unexpected mapping node");
9441 return error_mark_node;
9444 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9445 mappings, return the chain pointer to the end of that group in the list. */
9447 static tree *
9448 omp_group_last (tree *start_p)
9450 tree c = *start_p, nc, *grp_last_p = start_p;
9452 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9454 nc = OMP_CLAUSE_CHAIN (c);
9456 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9457 return grp_last_p;
9459 switch (OMP_CLAUSE_MAP_KIND (c))
9461 default:
9462 while (nc
9463 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9464 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9465 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9466 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9467 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9468 || (OMP_CLAUSE_MAP_KIND (nc)
9469 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9470 || (OMP_CLAUSE_MAP_KIND (nc)
9471 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9472 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9473 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9475 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9476 c = nc;
9477 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9478 if (nc2
9479 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9480 && (OMP_CLAUSE_MAP_KIND (nc)
9481 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9482 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9484 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9485 c = nc2;
9486 nc2 = OMP_CLAUSE_CHAIN (nc2);
9488 nc = nc2;
9490 break;
9492 case GOMP_MAP_ATTACH:
9493 case GOMP_MAP_DETACH:
9494 /* This is a weird artifact of how directives are parsed: bare attach or
9495 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9496 FIRSTPRIVATE_REFERENCE node. FIXME. */
9497 if (nc
9498 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9499 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9500 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9501 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9502 break;
9504 case GOMP_MAP_TO_PSET:
9505 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9506 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9507 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9508 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9509 break;
9511 case GOMP_MAP_STRUCT:
9513 unsigned HOST_WIDE_INT num_mappings
9514 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9515 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9516 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9517 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9518 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9519 for (unsigned i = 0; i < num_mappings; i++)
9520 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9522 break;
9525 return grp_last_p;
9528 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9529 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9530 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9531 if we have more than one such group, else return NULL. */
9533 static void
9534 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9535 tree gather_sentinel)
9537 for (tree *cp = list_p;
9538 *cp && *cp != gather_sentinel;
9539 cp = &OMP_CLAUSE_CHAIN (*cp))
9541 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9542 continue;
9544 tree *grp_last_p = omp_group_last (cp);
9545 omp_mapping_group grp;
9547 grp.grp_start = cp;
9548 grp.grp_end = *grp_last_p;
9549 grp.mark = UNVISITED;
9550 grp.sibling = NULL;
9551 grp.deleted = false;
9552 grp.next = NULL;
9553 groups->safe_push (grp);
9555 cp = grp_last_p;
9559 static vec<omp_mapping_group> *
9560 omp_gather_mapping_groups (tree *list_p)
9562 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9564 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9566 if (groups->length () > 0)
9567 return groups;
9568 else
9570 delete groups;
9571 return NULL;
9575 /* A pointer mapping group GRP may define a block of memory starting at some
9576 base address, and maybe also define a firstprivate pointer or firstprivate
9577 reference that points to that block. The return value is a node containing
9578 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9579 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9580 return the number of consecutive chained nodes in CHAINED. */
9582 static tree
9583 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9584 tree *firstprivate)
9586 tree node = *grp->grp_start;
9588 *firstprivate = NULL_TREE;
9589 *chained = 1;
9591 switch (OMP_CLAUSE_MAP_KIND (node))
9593 case GOMP_MAP_TO:
9594 case GOMP_MAP_FROM:
9595 case GOMP_MAP_TOFROM:
9596 case GOMP_MAP_ALWAYS_FROM:
9597 case GOMP_MAP_ALWAYS_TO:
9598 case GOMP_MAP_ALWAYS_TOFROM:
9599 case GOMP_MAP_FORCE_FROM:
9600 case GOMP_MAP_FORCE_TO:
9601 case GOMP_MAP_FORCE_TOFROM:
9602 case GOMP_MAP_FORCE_PRESENT:
9603 case GOMP_MAP_PRESENT_ALLOC:
9604 case GOMP_MAP_PRESENT_FROM:
9605 case GOMP_MAP_PRESENT_TO:
9606 case GOMP_MAP_PRESENT_TOFROM:
9607 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9608 case GOMP_MAP_ALWAYS_PRESENT_TO:
9609 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9610 case GOMP_MAP_ALLOC:
9611 case GOMP_MAP_RELEASE:
9612 case GOMP_MAP_DELETE:
9613 case GOMP_MAP_FORCE_ALLOC:
9614 case GOMP_MAP_IF_PRESENT:
9615 if (node == grp->grp_end)
9616 return node;
9618 node = OMP_CLAUSE_CHAIN (node);
9619 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9621 if (node == grp->grp_end)
9622 return *grp->grp_start;
9623 node = OMP_CLAUSE_CHAIN (node);
9625 if (node)
9626 switch (OMP_CLAUSE_MAP_KIND (node))
9628 case GOMP_MAP_POINTER:
9629 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9630 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9631 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9632 *firstprivate = OMP_CLAUSE_DECL (node);
9633 return *grp->grp_start;
9635 case GOMP_MAP_ALWAYS_POINTER:
9636 case GOMP_MAP_ATTACH_DETACH:
9637 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9638 return *grp->grp_start;
9640 default:
9641 internal_error ("unexpected mapping node");
9643 else
9644 internal_error ("unexpected mapping node");
9645 return error_mark_node;
9647 case GOMP_MAP_TO_PSET:
9648 gcc_assert (node != grp->grp_end);
9649 node = OMP_CLAUSE_CHAIN (node);
9650 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9651 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9652 return NULL_TREE;
9653 else
9654 internal_error ("unexpected mapping node");
9655 return error_mark_node;
9657 case GOMP_MAP_ATTACH:
9658 case GOMP_MAP_DETACH:
9659 node = OMP_CLAUSE_CHAIN (node);
9660 if (!node || *grp->grp_start == grp->grp_end)
9661 return NULL_TREE;
9662 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9663 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9665 /* We're mapping the base pointer itself in a bare attach or detach
9666 node. This is a side effect of how parsing works, and the mapping
9667 will be removed anyway (at least for enter/exit data directives).
9668 We should ignore the mapping here. FIXME. */
9669 return NULL_TREE;
9671 else
9672 internal_error ("unexpected mapping node");
9673 return error_mark_node;
9675 case GOMP_MAP_STRUCT:
9677 unsigned HOST_WIDE_INT num_mappings
9678 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9679 node = OMP_CLAUSE_CHAIN (node);
9680 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9681 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9683 *firstprivate = OMP_CLAUSE_DECL (node);
9684 node = OMP_CLAUSE_CHAIN (node);
9686 *chained = num_mappings;
9687 return node;
9690 case GOMP_MAP_FORCE_DEVICEPTR:
9691 case GOMP_MAP_DEVICE_RESIDENT:
9692 case GOMP_MAP_LINK:
9693 case GOMP_MAP_FIRSTPRIVATE:
9694 case GOMP_MAP_FIRSTPRIVATE_INT:
9695 case GOMP_MAP_USE_DEVICE_PTR:
9696 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9697 return NULL_TREE;
9699 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9700 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9701 case GOMP_MAP_POINTER:
9702 case GOMP_MAP_ALWAYS_POINTER:
9703 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9704 /* These shouldn't appear by themselves. */
9705 if (!seen_error ())
9706 internal_error ("unexpected pointer mapping node");
9707 return error_mark_node;
9709 default:
9710 gcc_unreachable ();
9713 return error_mark_node;
9716 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9717 nodes by tree_operand_hash_no_se. */
9719 static void
9720 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9721 omp_mapping_group *> *grpmap,
9722 vec<omp_mapping_group> *groups,
9723 tree reindex_sentinel)
9725 omp_mapping_group *grp;
9726 unsigned int i;
9727 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9729 FOR_EACH_VEC_ELT (*groups, i, grp)
9731 if (reindexing && *grp->grp_start == reindex_sentinel)
9732 above_hwm = true;
9734 if (reindexing && !above_hwm)
9735 continue;
9737 tree fpp;
9738 unsigned int chained;
9739 tree node = omp_group_base (grp, &chained, &fpp);
9741 if (node == error_mark_node || (!node && !fpp))
9742 continue;
9744 for (unsigned j = 0;
9745 node && j < chained;
9746 node = OMP_CLAUSE_CHAIN (node), j++)
9748 tree decl = OMP_CLAUSE_DECL (node);
9749 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9750 meaning node-hash lookups don't work. This is a workaround for
9751 that, but ideally we should just create the INDIRECT_REF at
9752 source instead. FIXME. */
9753 if (TREE_CODE (decl) == MEM_REF
9754 && integer_zerop (TREE_OPERAND (decl, 1)))
9755 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9757 omp_mapping_group **prev = grpmap->get (decl);
9759 if (prev && *prev == grp)
9760 /* Empty. */;
9761 else if (prev)
9763 /* Mapping the same thing twice is normally diagnosed as an error,
9764 but can happen under some circumstances, e.g. in pr99928-16.c,
9765 the directive:
9767 #pragma omp target simd reduction(+:a[:3]) \
9768 map(always, tofrom: a[:6])
9771 will result in two "a[0]" mappings (of different sizes). */
9773 grp->sibling = (*prev)->sibling;
9774 (*prev)->sibling = grp;
9776 else
9777 grpmap->put (decl, grp);
9780 if (!fpp)
9781 continue;
9783 omp_mapping_group **prev = grpmap->get (fpp);
9784 if (prev && *prev != grp)
9786 grp->sibling = (*prev)->sibling;
9787 (*prev)->sibling = grp;
9789 else
9790 grpmap->put (fpp, grp);
9794 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9795 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9797 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9798 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9800 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9802 return grpmap;
9805 /* Rebuild group map from partially-processed clause list (during
9806 omp_build_struct_sibling_lists). We have already processed nodes up until
9807 a high-water mark (HWM). This is a bit tricky because the list is being
9808 reordered as it is scanned, but we know:
9810 1. The list after HWM has not been touched yet, so we can reindex it safely.
9812 2. The list before and including HWM has been altered, but remains
9813 well-formed throughout the sibling-list building operation.
9815 so, we can do the reindex operation in two parts, on the processed and
9816 then the unprocessed halves of the list. */
9818 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9819 omp_reindex_mapping_groups (tree *list_p,
9820 vec<omp_mapping_group> *groups,
9821 vec<omp_mapping_group> *processed_groups,
9822 tree sentinel)
9824 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9825 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9827 processed_groups->truncate (0);
9829 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9830 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9831 if (sentinel)
9832 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9834 return grpmap;
9837 /* Find the immediately-containing struct for a component ref (etc.)
9838 expression EXPR. */
9840 static tree
9841 omp_containing_struct (tree expr)
9843 tree expr0 = expr;
9845 STRIP_NOPS (expr);
9847 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9848 component ref. */
9849 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9850 return expr0;
9852 while (TREE_CODE (expr) == ARRAY_REF)
9853 expr = TREE_OPERAND (expr, 0);
9855 if (TREE_CODE (expr) == COMPONENT_REF)
9856 expr = TREE_OPERAND (expr, 0);
9858 return expr;
9861 /* Return TRUE if DECL describes a component that is part of a whole structure
9862 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9863 that maps that structure, if present. */
9865 static bool
9866 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9867 omp_mapping_group *> *grpmap,
9868 tree decl,
9869 omp_mapping_group **mapped_by_group)
9871 tree wsdecl = NULL_TREE;
9873 *mapped_by_group = NULL;
9875 while (true)
9877 wsdecl = omp_containing_struct (decl);
9878 if (wsdecl == decl)
9879 break;
9880 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9881 if (!wholestruct
9882 && TREE_CODE (wsdecl) == MEM_REF
9883 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9885 tree deref = TREE_OPERAND (wsdecl, 0);
9886 deref = build_fold_indirect_ref (deref);
9887 wholestruct = grpmap->get (deref);
9889 if (wholestruct)
9891 *mapped_by_group = *wholestruct;
9892 return true;
9894 decl = wsdecl;
9897 return false;
9900 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9901 FALSE on error. */
9903 static bool
9904 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9905 vec<omp_mapping_group> *groups,
9906 hash_map<tree_operand_hash_no_se,
9907 omp_mapping_group *> *grpmap,
9908 omp_mapping_group *grp)
9910 if (grp->mark == PERMANENT)
9911 return true;
9912 if (grp->mark == TEMPORARY)
9914 fprintf (stderr, "when processing group:\n");
9915 debug_mapping_group (grp);
9916 internal_error ("base pointer cycle detected");
9917 return false;
9919 grp->mark = TEMPORARY;
9921 tree attaches_to = omp_get_attachment (grp);
9923 if (attaches_to)
9925 omp_mapping_group **basep = grpmap->get (attaches_to);
9927 if (basep && *basep != grp)
9929 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9930 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9931 return false;
9935 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9937 while (decl)
9939 tree base = omp_get_base_pointer (decl);
9941 if (!base)
9942 break;
9944 omp_mapping_group **innerp = grpmap->get (base);
9945 omp_mapping_group *wholestruct;
9947 /* We should treat whole-structure mappings as if all (pointer, in this
9948 case) members are mapped as individual list items. Check if we have
9949 such a whole-structure mapping, if we don't have an explicit reference
9950 to the pointer member itself. */
9951 if (!innerp
9952 && TREE_CODE (base) == COMPONENT_REF
9953 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9954 innerp = &wholestruct;
9956 if (innerp && *innerp != grp)
9958 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9959 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9960 return false;
9961 break;
9964 decl = base;
9967 grp->mark = PERMANENT;
9969 /* Emit grp to output list. */
9971 **outlist = grp;
9972 *outlist = &grp->next;
9974 return true;
9977 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9978 before mappings that use those pointers. This is an implementation of the
9979 depth-first search algorithm, described e.g. at:
9981 https://en.wikipedia.org/wiki/Topological_sorting
9984 static omp_mapping_group *
9985 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9986 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
9987 *grpmap)
9989 omp_mapping_group *grp, *outlist = NULL, **cursor;
9990 unsigned int i;
9992 cursor = &outlist;
9994 FOR_EACH_VEC_ELT (*groups, i, grp)
9996 if (grp->mark != PERMANENT)
9997 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9998 return NULL;
10001 return outlist;
10004 /* Split INLIST into two parts, moving groups corresponding to
10005 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
10006 The former list is then appended to the latter. Each sub-list retains the
10007 order of the original list.
10008 Note that ATTACH nodes are later moved to the end of the list in
10009 gimplify_adjust_omp_clauses, for target regions. */
10011 static omp_mapping_group *
10012 omp_segregate_mapping_groups (omp_mapping_group *inlist)
10014 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
10015 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
10017 for (omp_mapping_group *w = inlist; w;)
10019 tree c = *w->grp_start;
10020 omp_mapping_group *next = w->next;
10022 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
10024 switch (OMP_CLAUSE_MAP_KIND (c))
10026 case GOMP_MAP_ALLOC:
10027 case GOMP_MAP_RELEASE:
10028 case GOMP_MAP_DELETE:
10029 *ard_tail = w;
10030 w->next = NULL;
10031 ard_tail = &w->next;
10032 break;
10034 default:
10035 *tf_tail = w;
10036 w->next = NULL;
10037 tf_tail = &w->next;
10040 w = next;
10043 /* Now splice the lists together... */
10044 *tf_tail = ard_groups;
10046 return tf_groups;
10049 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10050 those groups based on the output list of omp_tsort_mapping_groups --
10051 singly-linked, threaded through each element's NEXT pointer starting at
10052 HEAD. Each list element appears exactly once in that linked list.
10054 Each element of GROUPS may correspond to one or several mapping nodes.
10055 Node groups are kept together, and in the reordered list, the positions of
10056 the original groups are reused for the positions of the reordered list.
10057 Hence if we have e.g.
10059 {to ptr ptr} firstprivate {tofrom ptr} ...
10060 ^ ^ ^
10061 first group non-"map" second group
10063 and say the second group contains a base pointer for the first so must be
10064 moved before it, the resulting list will contain:
10066 {tofrom ptr} firstprivate {to ptr ptr} ...
10067 ^ prev. second group ^ prev. first group
10070 static tree *
10071 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
10072 omp_mapping_group *head,
10073 tree *list_p)
10075 omp_mapping_group *grp;
10076 unsigned int i;
10077 unsigned numgroups = groups->length ();
10078 auto_vec<tree> old_heads (numgroups);
10079 auto_vec<tree *> old_headps (numgroups);
10080 auto_vec<tree> new_heads (numgroups);
10081 auto_vec<tree> old_succs (numgroups);
10082 bool map_at_start = (list_p == (*groups)[0].grp_start);
10084 tree *new_grp_tail = NULL;
10086 /* Stash the start & end nodes of each mapping group before we start
10087 modifying the list. */
10088 FOR_EACH_VEC_ELT (*groups, i, grp)
10090 old_headps.quick_push (grp->grp_start);
10091 old_heads.quick_push (*grp->grp_start);
10092 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
10095 /* And similarly, the heads of the groups in the order we want to rearrange
10096 the list to. */
10097 for (omp_mapping_group *w = head; w; w = w->next)
10098 new_heads.quick_push (*w->grp_start);
10100 FOR_EACH_VEC_ELT (*groups, i, grp)
10102 gcc_assert (head);
10104 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
10106 /* a {b c d} {e f g} h i j (original)
10108 a {k l m} {e f g} h i j (inserted new group on last iter)
10110 a {k l m} {n o p} h i j (this time, chain last group to new one)
10111 ^new_grp_tail
10113 *new_grp_tail = new_heads[i];
10115 else if (new_grp_tail)
10117 /* a {b c d} e {f g h} i j k (original)
10119 a {l m n} e {f g h} i j k (gap after last iter's group)
10121 a {l m n} e {o p q} h i j (chain last group to old successor)
10122 ^new_grp_tail
10124 *new_grp_tail = old_succs[i - 1];
10125 *old_headps[i] = new_heads[i];
10127 else
10129 /* The first inserted group -- point to new group, and leave end
10130 open.
10131 a {b c d} e f
10133 a {g h i...
10135 *grp->grp_start = new_heads[i];
10138 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
10140 head = head->next;
10143 if (new_grp_tail)
10144 *new_grp_tail = old_succs[numgroups - 1];
10146 gcc_assert (!head);
10148 return map_at_start ? (*groups)[0].grp_start : list_p;
10151 /* DECL is supposed to have lastprivate semantics in the outer contexts
10152 of combined/composite constructs, starting with OCTX.
10153 Add needed lastprivate, shared or map clause if no data sharing or
10154 mapping clause are present. IMPLICIT_P is true if it is an implicit
10155 clause (IV on simd), in which case the lastprivate will not be
10156 copied to some constructs. */
10158 static void
10159 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
10160 tree decl, bool implicit_p)
10162 struct gimplify_omp_ctx *orig_octx = octx;
10163 for (; octx; octx = octx->outer_context)
10165 if ((octx->region_type == ORT_COMBINED_PARALLEL
10166 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
10167 && splay_tree_lookup (octx->variables,
10168 (splay_tree_key) decl) == NULL)
10170 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
10171 continue;
10173 if ((octx->region_type & ORT_TASK) != 0
10174 && octx->combined_loop
10175 && splay_tree_lookup (octx->variables,
10176 (splay_tree_key) decl) == NULL)
10178 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10179 continue;
10181 if (implicit_p
10182 && octx->region_type == ORT_WORKSHARE
10183 && octx->combined_loop
10184 && splay_tree_lookup (octx->variables,
10185 (splay_tree_key) decl) == NULL
10186 && octx->outer_context
10187 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
10188 && splay_tree_lookup (octx->outer_context->variables,
10189 (splay_tree_key) decl) == NULL)
10191 octx = octx->outer_context;
10192 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10193 continue;
10195 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
10196 && octx->combined_loop
10197 && splay_tree_lookup (octx->variables,
10198 (splay_tree_key) decl) == NULL
10199 && !omp_check_private (octx, decl, false))
10201 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10202 continue;
10204 if (octx->region_type == ORT_COMBINED_TARGET)
10206 splay_tree_node n = splay_tree_lookup (octx->variables,
10207 (splay_tree_key) decl);
10208 if (n == NULL)
10210 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10211 octx = octx->outer_context;
10213 else if (!implicit_p
10214 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
10216 n->value &= ~(GOVD_FIRSTPRIVATE
10217 | GOVD_FIRSTPRIVATE_IMPLICIT
10218 | GOVD_EXPLICIT);
10219 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10220 octx = octx->outer_context;
10223 break;
10225 if (octx && (implicit_p || octx != orig_octx))
10226 omp_notice_variable (octx, decl, true);
10229 /* If we have mappings INNER and OUTER, where INNER is a component access and
10230 OUTER is a mapping of the whole containing struct, check that the mappings
10231 are compatible. We'll be deleting the inner mapping, so we need to make
10232 sure the outer mapping does (at least) the same transfers to/from the device
10233 as the inner mapping. */
10235 bool
10236 omp_check_mapping_compatibility (location_t loc,
10237 omp_mapping_group *outer,
10238 omp_mapping_group *inner)
10240 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
10242 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
10243 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
10245 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
10246 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
10248 if (outer_kind == inner_kind)
10249 return true;
10251 switch (outer_kind)
10253 case GOMP_MAP_ALWAYS_TO:
10254 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10255 || inner_kind == GOMP_MAP_ALLOC
10256 || inner_kind == GOMP_MAP_TO)
10257 return true;
10258 break;
10260 case GOMP_MAP_ALWAYS_FROM:
10261 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10262 || inner_kind == GOMP_MAP_ALLOC
10263 || inner_kind == GOMP_MAP_FROM)
10264 return true;
10265 break;
10267 case GOMP_MAP_TO:
10268 case GOMP_MAP_FROM:
10269 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10270 || inner_kind == GOMP_MAP_ALLOC)
10271 return true;
10272 break;
10274 case GOMP_MAP_ALWAYS_TOFROM:
10275 case GOMP_MAP_TOFROM:
10276 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10277 || inner_kind == GOMP_MAP_ALLOC
10278 || inner_kind == GOMP_MAP_TO
10279 || inner_kind == GOMP_MAP_FROM
10280 || inner_kind == GOMP_MAP_TOFROM)
10281 return true;
10282 break;
10284 default:
10288 error_at (loc, "data movement for component %qE is not compatible with "
10289 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10290 OMP_CLAUSE_DECL (first_outer));
10292 return false;
10295 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10296 clause dependencies we handle for now are struct element mappings and
10297 whole-struct mappings on the same directive, and duplicate clause
10298 detection. */
10300 void
10301 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10302 hash_map<tree_operand_hash_no_se,
10303 omp_mapping_group *> *grpmap)
10305 int i;
10306 omp_mapping_group *grp;
10307 hash_set<tree_operand_hash> *seen_components = NULL;
10308 hash_set<tree_operand_hash> *shown_error = NULL;
10310 FOR_EACH_VEC_ELT (*groups, i, grp)
10312 tree grp_end = grp->grp_end;
10313 tree decl = OMP_CLAUSE_DECL (grp_end);
10315 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10317 if (DECL_P (grp_end))
10318 continue;
10320 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10321 while (TREE_CODE (c) == ARRAY_REF)
10322 c = TREE_OPERAND (c, 0);
10323 if (TREE_CODE (c) != COMPONENT_REF)
10324 continue;
10325 if (!seen_components)
10326 seen_components = new hash_set<tree_operand_hash> ();
10327 if (!shown_error)
10328 shown_error = new hash_set<tree_operand_hash> ();
10329 if (seen_components->contains (c)
10330 && !shown_error->contains (c))
10332 error_at (OMP_CLAUSE_LOCATION (grp_end),
10333 "%qE appears more than once in map clauses",
10334 OMP_CLAUSE_DECL (grp_end));
10335 shown_error->add (c);
10337 else
10338 seen_components->add (c);
10340 omp_mapping_group *struct_group;
10341 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10342 && *grp->grp_start == grp_end)
10344 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10345 struct_group, grp);
10346 /* Remove the whole of this mapping -- redundant. */
10347 grp->deleted = true;
10351 if (seen_components)
10352 delete seen_components;
10353 if (shown_error)
10354 delete shown_error;
10357 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10358 is linked to the previous node pointed to by INSERT_AT. */
10360 static tree *
10361 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10363 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10364 *insert_at = newnode;
10365 return &OMP_CLAUSE_CHAIN (newnode);
10368 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10369 pointed to by chain MOVE_AFTER instead. */
10371 static void
10372 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10374 gcc_assert (node == *old_pos);
10375 *old_pos = OMP_CLAUSE_CHAIN (node);
10376 OMP_CLAUSE_CHAIN (node) = *move_after;
10377 *move_after = node;
10380 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10381 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10382 new nodes are prepended to the list before splicing into the new position.
10383 Return the position we should continue scanning the list at, or NULL to
10384 stay where we were. */
10386 static tree *
10387 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10388 tree *move_after)
10390 if (first_ptr == move_after)
10391 return NULL;
10393 tree tmp = *first_ptr;
10394 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10395 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10396 *move_after = tmp;
10398 return first_ptr;
10401 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10402 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10403 pointer MOVE_AFTER.
10405 The latter list was previously part of the OMP clause list, and the former
10406 (prepended) part is comprised of new nodes.
10408 We start with a list of nodes starting with a struct mapping node. We
10409 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10410 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10411 the group of mapping nodes we are currently processing (from the chain
10412 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10413 we should continue processing from, or NULL to stay where we were.
10415 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10416 different) is worked through below. Here we are processing LAST_NODE, and
10417 FIRST_PTR points at the preceding mapping clause:
10419 #. mapping node chain
10420 ---------------------------------------------------
10421 A. struct_node [->B]
10422 B. comp_1 [->C]
10423 C. comp_2 [->D (move_after)]
10424 D. map_to_3 [->E]
10425 E. attach_3 [->F (first_ptr)]
10426 F. map_to_4 [->G (continue_at)]
10427 G. attach_4 (last_node) [->H]
10428 H. ...
10430 *last_new_tail = *first_ptr;
10432 I. new_node (first_new) [->F (last_new_tail)]
10434 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10436 #. mapping node chain
10437 ----------------------------------------------------
10438 A. struct_node [->B]
10439 B. comp_1 [->C]
10440 C. comp_2 [->D (move_after)]
10441 D. map_to_3 [->E]
10442 E. attach_3 [->H (first_ptr)]
10443 F. map_to_4 [->G (continue_at)]
10444 G. attach_4 (last_node) [->H]
10445 H. ...
10447 I. new_node (first_new) [->F (last_new_tail)]
10449 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10451 #. mapping node chain
10452 ---------------------------------------------------
10453 A. struct_node [->B]
10454 B. comp_1 [->C]
10455 C. comp_2 [->D (move_after)]
10456 D. map_to_3 [->E]
10457 E. attach_3 [->H (continue_at)]
10458 F. map_to_4 [->G]
10459 G. attach_4 (last_node) [->D]
10460 H. ...
10462 I. new_node (first_new) [->F (last_new_tail)]
10464 *move_after = first_new;
10466 #. mapping node chain
10467 ---------------------------------------------------
10468 A. struct_node [->B]
10469 B. comp_1 [->C]
10470 C. comp_2 [->I (move_after)]
10471 D. map_to_3 [->E]
10472 E. attach_3 [->H (continue_at)]
10473 F. map_to_4 [->G]
10474 G. attach_4 (last_node) [->D]
10475 H. ...
10476 I. new_node (first_new) [->F (last_new_tail)]
10478 or, in order:
10480 #. mapping node chain
10481 ---------------------------------------------------
10482 A. struct_node [->B]
10483 B. comp_1 [->C]
10484 C. comp_2 [->I (move_after)]
10485 I. new_node (first_new) [->F (last_new_tail)]
10486 F. map_to_4 [->G]
10487 G. attach_4 (last_node) [->D]
10488 D. map_to_3 [->E]
10489 E. attach_3 [->H (continue_at)]
10490 H. ...
10493 static tree *
10494 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10495 tree *first_ptr, tree last_node,
10496 tree *move_after)
10498 tree *continue_at = NULL;
10499 *last_new_tail = *first_ptr;
10500 if (first_ptr == move_after)
10501 *move_after = first_new;
10502 else
10504 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10505 continue_at = first_ptr;
10506 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10507 *move_after = first_new;
10509 return continue_at;
10512 /* Mapping struct members causes an additional set of nodes to be created,
10513 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10514 number of members being mapped, in order of ascending position (address or
10515 bitwise).
10517 We scan through the list of mapping clauses, calling this function for each
10518 struct member mapping we find, and build up the list of mappings after the
10519 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10520 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10521 moved into place in the sorted list.
10523 struct {
10524 int *a;
10525 int *b;
10526 int c;
10527 int *d;
10530 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10531 struct.d[0:n])
10533 GOMP_MAP_STRUCT (4)
10534 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10535 GOMP_MAP_ALLOC (struct.a)
10536 GOMP_MAP_ALLOC (struct.b)
10537 GOMP_MAP_TO (struct.c)
10538 GOMP_MAP_ALLOC (struct.d)
10541 In the case where we are mapping references to pointers, or in Fortran if
10542 we are mapping an array with a descriptor, additional nodes may be created
10543 after the struct node list also.
10545 The return code is either a pointer to the next node to process (if the
10546 list has been rearranged), else NULL to continue with the next node in the
10547 original list. */
10549 static tree *
10550 omp_accumulate_sibling_list (enum omp_region_type region_type,
10551 enum tree_code code,
10552 hash_map<tree_operand_hash, tree>
10553 *&struct_map_to_clause, tree *grp_start_p,
10554 tree grp_end, tree *inner)
10556 poly_offset_int coffset;
10557 poly_int64 cbitpos;
10558 tree ocd = OMP_CLAUSE_DECL (grp_end);
10559 bool openmp = !(region_type & ORT_ACC);
10560 tree *continue_at = NULL;
10562 while (TREE_CODE (ocd) == ARRAY_REF)
10563 ocd = TREE_OPERAND (ocd, 0);
10565 if (INDIRECT_REF_P (ocd))
10566 ocd = TREE_OPERAND (ocd, 0);
10568 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10570 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10571 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10572 == GOMP_MAP_ATTACH_DETACH)
10573 || (OMP_CLAUSE_MAP_KIND (grp_end)
10574 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10575 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10576 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10578 /* FIXME: If we're not mapping the base pointer in some other clause on this
10579 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10580 early-exit. */
10581 if (openmp && attach_detach)
10582 return NULL;
10584 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10586 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10587 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10589 OMP_CLAUSE_SET_MAP_KIND (l, k);
10591 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10593 OMP_CLAUSE_SIZE (l)
10594 = (!attach ? size_int (1)
10595 : (DECL_P (OMP_CLAUSE_DECL (l))
10596 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10597 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10598 if (struct_map_to_clause == NULL)
10599 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10600 struct_map_to_clause->put (base, l);
10602 if (ptr || attach_detach)
10604 tree extra_node;
10605 tree alloc_node
10606 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10607 &extra_node);
10608 OMP_CLAUSE_CHAIN (l) = alloc_node;
10610 tree *insert_node_pos = grp_start_p;
10612 if (extra_node)
10614 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10615 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10617 else
10618 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10620 *insert_node_pos = l;
10622 else
10624 gcc_assert (*grp_start_p == grp_end);
10625 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10628 tree noind = omp_strip_indirections (base);
10630 if (!openmp
10631 && (region_type & ORT_TARGET)
10632 && TREE_CODE (noind) == COMPONENT_REF)
10634 /* The base for this component access is a struct component access
10635 itself. Insert a node to be processed on the next iteration of
10636 our caller's loop, which will subsequently be turned into a new,
10637 inner GOMP_MAP_STRUCT mapping.
10639 We need to do this else the non-DECL_P base won't be
10640 rewritten correctly in the offloaded region. */
10641 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10642 OMP_CLAUSE_MAP);
10643 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10644 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10645 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10646 *inner = c2;
10647 return NULL;
10650 tree sdecl = omp_strip_components_and_deref (base);
10652 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10654 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10655 OMP_CLAUSE_MAP);
10656 bool base_ref
10657 = (INDIRECT_REF_P (base)
10658 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10659 == REFERENCE_TYPE)
10660 || (INDIRECT_REF_P (TREE_OPERAND (base, 0))
10661 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10662 (TREE_OPERAND (base, 0), 0)))
10663 == REFERENCE_TYPE))));
10664 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10665 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10666 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10667 OMP_CLAUSE_DECL (c2) = sdecl;
10668 tree baddr = build_fold_addr_expr (base);
10669 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10670 ptrdiff_type_node, baddr);
10671 /* This isn't going to be good enough when we add support for more
10672 complicated lvalue expressions. FIXME. */
10673 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10674 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10675 sdecl = build_simple_mem_ref (sdecl);
10676 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10677 ptrdiff_type_node, sdecl);
10678 OMP_CLAUSE_SIZE (c2)
10679 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10680 ptrdiff_type_node, baddr, decladdr);
10681 /* Insert after struct node. */
10682 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10683 OMP_CLAUSE_CHAIN (l) = c2;
10686 return NULL;
10688 else if (struct_map_to_clause)
10690 tree *osc = struct_map_to_clause->get (base);
10691 tree *sc = NULL, *scp = NULL;
10692 sc = &OMP_CLAUSE_CHAIN (*osc);
10693 /* The struct mapping might be immediately followed by a
10694 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10695 indirect access or a reference, or both. (This added node is removed
10696 in omp-low.c after it has been processed there.) */
10697 if (*sc != grp_end
10698 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10699 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10700 sc = &OMP_CLAUSE_CHAIN (*sc);
10701 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10702 if ((ptr || attach_detach) && sc == grp_start_p)
10703 break;
10704 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10705 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10706 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10707 break;
10708 else
10710 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10711 poly_offset_int offset;
10712 poly_int64 bitpos;
10714 if (TREE_CODE (sc_decl) == ARRAY_REF)
10716 while (TREE_CODE (sc_decl) == ARRAY_REF)
10717 sc_decl = TREE_OPERAND (sc_decl, 0);
10718 if (TREE_CODE (sc_decl) != COMPONENT_REF
10719 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10720 break;
10722 else if (INDIRECT_REF_P (sc_decl)
10723 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10724 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10725 == REFERENCE_TYPE))
10726 sc_decl = TREE_OPERAND (sc_decl, 0);
10728 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10729 if (!base2 || !operand_equal_p (base2, base, 0))
10730 break;
10731 if (scp)
10732 continue;
10733 if (maybe_lt (coffset, offset)
10734 || (known_eq (coffset, offset)
10735 && maybe_lt (cbitpos, bitpos)))
10737 if (ptr || attach_detach)
10738 scp = sc;
10739 else
10740 break;
10744 if (!attach)
10745 OMP_CLAUSE_SIZE (*osc)
10746 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10747 if (ptr || attach_detach)
10749 tree cl = NULL_TREE, extra_node;
10750 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10751 grp_end, &extra_node);
10752 tree *tail_chain = NULL;
10754 /* Here, we have:
10756 grp_end : the last (or only) node in this group.
10757 grp_start_p : pointer to the first node in a pointer mapping group
10758 up to and including GRP_END.
10759 sc : pointer to the chain for the end of the struct component
10760 list.
10761 scp : pointer to the chain for the sorted position at which we
10762 should insert in the middle of the struct component list
10763 (else NULL to insert at end).
10764 alloc_node : the "alloc" node for the structure (pointer-type)
10765 component. We insert at SCP (if present), else SC
10766 (the end of the struct component list).
10767 extra_node : a newly-synthesized node for an additional indirect
10768 pointer mapping or a Fortran pointer set, if needed.
10769 cl : first node to prepend before grp_start_p.
10770 tail_chain : pointer to chain of last prepended node.
10772 The general idea is we move the nodes for this struct mapping
10773 together: the alloc node goes into the sorted list directly after
10774 the struct mapping, and any extra nodes (together with the nodes
10775 mapping arrays pointed to by struct components) get moved after
10776 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10777 the end of the struct component mapping list. It's important that
10778 the alloc_node comes first in that case because it's part of the
10779 sorted component mapping list (but subsequent nodes are not!). */
10781 if (scp)
10782 omp_siblist_insert_node_after (alloc_node, scp);
10784 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10785 already inserted it) and the extra_node (if it is present). The
10786 list can be empty if we added alloc_node above and there is no
10787 extra node. */
10788 if (scp && extra_node)
10790 cl = extra_node;
10791 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10793 else if (extra_node)
10795 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10796 cl = alloc_node;
10797 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10799 else if (!scp)
10801 cl = alloc_node;
10802 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10805 continue_at
10806 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10807 grp_start_p, grp_end,
10809 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10811 else if (*sc != grp_end)
10813 gcc_assert (*grp_start_p == grp_end);
10815 /* We are moving the current node back to a previous struct node:
10816 the node that used to point to the current node will now point to
10817 the next node. */
10818 continue_at = grp_start_p;
10819 /* In the non-pointer case, the mapping clause itself is moved into
10820 the correct position in the struct component list, which in this
10821 case is just SC. */
10822 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10825 return continue_at;
10828 /* Scan through GROUPS, and create sorted structure sibling lists without
10829 gimplifying. */
10831 static bool
10832 omp_build_struct_sibling_lists (enum tree_code code,
10833 enum omp_region_type region_type,
10834 vec<omp_mapping_group> *groups,
10835 hash_map<tree_operand_hash_no_se,
10836 omp_mapping_group *> **grpmap,
10837 tree *list_p)
10839 unsigned i;
10840 omp_mapping_group *grp;
10841 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10842 bool success = true;
10843 tree *new_next = NULL;
10844 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10845 auto_vec<omp_mapping_group> pre_hwm_groups;
10847 FOR_EACH_VEC_ELT (*groups, i, grp)
10849 tree c = grp->grp_end;
10850 tree decl = OMP_CLAUSE_DECL (c);
10851 tree grp_end = grp->grp_end;
10852 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10854 if (new_next)
10855 grp->grp_start = new_next;
10857 new_next = NULL;
10859 tree *grp_start_p = grp->grp_start;
10861 if (DECL_P (decl))
10862 continue;
10864 /* Skip groups we marked for deletion in
10865 oacc_resolve_clause_dependencies. */
10866 if (grp->deleted)
10867 continue;
10869 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10870 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10872 /* Don't process an array descriptor that isn't inside a derived type
10873 as a struct (the GOMP_MAP_POINTER following will have the form
10874 "var.data", but such mappings are handled specially). */
10875 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10876 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10877 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10878 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10879 continue;
10882 tree d = decl;
10883 if (TREE_CODE (d) == ARRAY_REF)
10885 while (TREE_CODE (d) == ARRAY_REF)
10886 d = TREE_OPERAND (d, 0);
10887 if (TREE_CODE (d) == COMPONENT_REF
10888 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10889 decl = d;
10891 if (d == decl
10892 && INDIRECT_REF_P (decl)
10893 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10894 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10895 == REFERENCE_TYPE)
10896 && (OMP_CLAUSE_MAP_KIND (c)
10897 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10898 decl = TREE_OPERAND (decl, 0);
10900 STRIP_NOPS (decl);
10902 if (TREE_CODE (decl) != COMPONENT_REF)
10903 continue;
10905 /* If we're mapping the whole struct in another node, skip adding this
10906 node to a sibling list. */
10907 omp_mapping_group *wholestruct;
10908 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10909 &wholestruct))
10911 if (!(region_type & ORT_ACC)
10912 && *grp_start_p == grp_end)
10913 /* Remove the whole of this mapping -- redundant. */
10914 grp->deleted = true;
10916 continue;
10919 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10920 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10921 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10922 && code != OACC_UPDATE
10923 && code != OMP_TARGET_UPDATE)
10925 if (error_operand_p (decl))
10927 success = false;
10928 goto error_out;
10931 tree stype = TREE_TYPE (decl);
10932 if (TREE_CODE (stype) == REFERENCE_TYPE)
10933 stype = TREE_TYPE (stype);
10934 if (TYPE_SIZE_UNIT (stype) == NULL
10935 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10937 error_at (OMP_CLAUSE_LOCATION (c),
10938 "mapping field %qE of variable length "
10939 "structure", OMP_CLAUSE_DECL (c));
10940 success = false;
10941 goto error_out;
10944 tree inner = NULL_TREE;
10946 new_next
10947 = omp_accumulate_sibling_list (region_type, code,
10948 struct_map_to_clause, grp_start_p,
10949 grp_end, &inner);
10951 if (inner)
10953 if (new_next && *new_next == NULL_TREE)
10954 *new_next = inner;
10955 else
10956 *tail = inner;
10958 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10959 omp_mapping_group newgrp;
10960 newgrp.grp_start = new_next ? new_next : tail;
10961 newgrp.grp_end = inner;
10962 newgrp.mark = UNVISITED;
10963 newgrp.sibling = NULL;
10964 newgrp.deleted = false;
10965 newgrp.next = NULL;
10966 groups->safe_push (newgrp);
10968 /* !!! Growing GROUPS might invalidate the pointers in the group
10969 map. Rebuild it here. This is a bit inefficient, but
10970 shouldn't happen very often. */
10971 delete (*grpmap);
10972 *grpmap
10973 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10974 sentinel);
10976 tail = &OMP_CLAUSE_CHAIN (inner);
10981 /* Delete groups marked for deletion above. At this point the order of the
10982 groups may no longer correspond to the order of the underlying list,
10983 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10984 deleted nodes... */
10986 FOR_EACH_VEC_ELT (*groups, i, grp)
10987 if (grp->deleted)
10988 for (tree d = *grp->grp_start;
10989 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10990 d = OMP_CLAUSE_CHAIN (d))
10991 OMP_CLAUSE_DECL (d) = NULL_TREE;
10993 /* ...then sweep through the list removing the now-empty nodes. */
10995 tail = list_p;
10996 while (*tail)
10998 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10999 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
11000 *tail = OMP_CLAUSE_CHAIN (*tail);
11001 else
11002 tail = &OMP_CLAUSE_CHAIN (*tail);
11005 error_out:
11006 if (struct_map_to_clause)
11007 delete struct_map_to_clause;
11009 return success;
11012 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
11013 and previous omp contexts. */
11015 static void
11016 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
11017 enum omp_region_type region_type,
11018 enum tree_code code)
11020 struct gimplify_omp_ctx *ctx, *outer_ctx;
11021 tree c;
11022 tree *orig_list_p = list_p;
11023 int handled_depend_iterators = -1;
11024 int nowait = -1;
11026 ctx = new_omp_context (region_type);
11027 ctx->code = code;
11028 outer_ctx = ctx->outer_context;
11029 if (code == OMP_TARGET)
11031 if (!lang_GNU_Fortran ())
11032 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
11033 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
11034 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
11035 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11037 if (!lang_GNU_Fortran ())
11038 switch (code)
11040 case OMP_TARGET:
11041 case OMP_TARGET_DATA:
11042 case OMP_TARGET_ENTER_DATA:
11043 case OMP_TARGET_EXIT_DATA:
11044 case OACC_DECLARE:
11045 case OACC_HOST_DATA:
11046 case OACC_PARALLEL:
11047 case OACC_KERNELS:
11048 ctx->target_firstprivatize_array_bases = true;
11049 default:
11050 break;
11053 if (code == OMP_TARGET
11054 || code == OMP_TARGET_DATA
11055 || code == OMP_TARGET_ENTER_DATA
11056 || code == OMP_TARGET_EXIT_DATA)
11058 vec<omp_mapping_group> *groups;
11059 groups = omp_gather_mapping_groups (list_p);
11060 if (groups)
11062 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
11063 grpmap = omp_index_mapping_groups (groups);
11065 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
11066 list_p);
11068 omp_mapping_group *outlist = NULL;
11070 /* Topological sorting may fail if we have duplicate nodes, which
11071 we should have detected and shown an error for already. Skip
11072 sorting in that case. */
11073 if (seen_error ())
11074 goto failure;
11076 delete grpmap;
11077 delete groups;
11079 /* Rebuild now we have struct sibling lists. */
11080 groups = omp_gather_mapping_groups (list_p);
11081 grpmap = omp_index_mapping_groups (groups);
11083 outlist = omp_tsort_mapping_groups (groups, grpmap);
11084 outlist = omp_segregate_mapping_groups (outlist);
11085 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
11087 failure:
11088 delete grpmap;
11089 delete groups;
11092 /* OpenMP map clauses with 'present' need to go in front of those
11093 without. */
11094 tree present_map_head = NULL;
11095 tree *present_map_tail_p = &present_map_head;
11096 tree *first_map_clause_p = NULL;
11098 for (tree *c_p = list_p; *c_p; )
11100 tree c = *c_p;
11101 tree *next_c_p = &OMP_CLAUSE_CHAIN (c);
11103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11105 if (!first_map_clause_p)
11106 first_map_clause_p = c_p;
11107 switch (OMP_CLAUSE_MAP_KIND (c))
11109 case GOMP_MAP_PRESENT_ALLOC:
11110 case GOMP_MAP_PRESENT_FROM:
11111 case GOMP_MAP_PRESENT_TO:
11112 case GOMP_MAP_PRESENT_TOFROM:
11113 next_c_p = c_p;
11114 *c_p = OMP_CLAUSE_CHAIN (c);
11116 OMP_CLAUSE_CHAIN (c) = NULL;
11117 *present_map_tail_p = c;
11118 present_map_tail_p = &OMP_CLAUSE_CHAIN (c);
11120 break;
11122 default:
11123 break;
11127 c_p = next_c_p;
11129 if (first_map_clause_p && present_map_head)
11131 tree next = *first_map_clause_p;
11132 *first_map_clause_p = present_map_head;
11133 *present_map_tail_p = next;
11136 else if (region_type & ORT_ACC)
11138 vec<omp_mapping_group> *groups;
11139 groups = omp_gather_mapping_groups (list_p);
11140 if (groups)
11142 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
11143 grpmap = omp_index_mapping_groups (groups);
11145 oacc_resolve_clause_dependencies (groups, grpmap);
11146 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
11147 list_p);
11149 delete groups;
11150 delete grpmap;
11154 while ((c = *list_p) != NULL)
11156 bool remove = false;
11157 bool notice_outer = true;
11158 const char *check_non_private = NULL;
11159 unsigned int flags;
11160 tree decl;
11162 switch (OMP_CLAUSE_CODE (c))
11164 case OMP_CLAUSE_PRIVATE:
11165 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
11166 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
11168 flags |= GOVD_PRIVATE_OUTER_REF;
11169 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
11171 else
11172 notice_outer = false;
11173 goto do_add;
11174 case OMP_CLAUSE_SHARED:
11175 flags = GOVD_SHARED | GOVD_EXPLICIT;
11176 goto do_add;
11177 case OMP_CLAUSE_FIRSTPRIVATE:
11178 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11179 check_non_private = "firstprivate";
11180 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11182 gcc_assert (code == OMP_TARGET);
11183 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
11185 goto do_add;
11186 case OMP_CLAUSE_LASTPRIVATE:
11187 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11188 switch (code)
11190 case OMP_DISTRIBUTE:
11191 error_at (OMP_CLAUSE_LOCATION (c),
11192 "conditional %<lastprivate%> clause on "
11193 "%qs construct", "distribute");
11194 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11195 break;
11196 case OMP_TASKLOOP:
11197 error_at (OMP_CLAUSE_LOCATION (c),
11198 "conditional %<lastprivate%> clause on "
11199 "%qs construct", "taskloop");
11200 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11201 break;
11202 default:
11203 break;
11205 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
11206 if (code != OMP_LOOP)
11207 check_non_private = "lastprivate";
11208 decl = OMP_CLAUSE_DECL (c);
11209 if (error_operand_p (decl))
11210 goto do_add;
11211 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
11212 && !lang_hooks.decls.omp_scalar_p (decl, true))
11214 error_at (OMP_CLAUSE_LOCATION (c),
11215 "non-scalar variable %qD in conditional "
11216 "%<lastprivate%> clause", decl);
11217 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11219 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11220 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
11221 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
11222 false);
11223 goto do_add;
11224 case OMP_CLAUSE_REDUCTION:
11225 if (OMP_CLAUSE_REDUCTION_TASK (c))
11227 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11229 if (nowait == -1)
11230 nowait = omp_find_clause (*list_p,
11231 OMP_CLAUSE_NOWAIT) != NULL_TREE;
11232 if (nowait
11233 && (outer_ctx == NULL
11234 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
11236 error_at (OMP_CLAUSE_LOCATION (c),
11237 "%<task%> reduction modifier on a construct "
11238 "with a %<nowait%> clause");
11239 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11242 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
11244 error_at (OMP_CLAUSE_LOCATION (c),
11245 "invalid %<task%> reduction modifier on construct "
11246 "other than %<parallel%>, %qs, %<sections%> or "
11247 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
11248 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11251 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11252 switch (code)
11254 case OMP_SECTIONS:
11255 error_at (OMP_CLAUSE_LOCATION (c),
11256 "%<inscan%> %<reduction%> clause on "
11257 "%qs construct", "sections");
11258 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11259 break;
11260 case OMP_PARALLEL:
11261 error_at (OMP_CLAUSE_LOCATION (c),
11262 "%<inscan%> %<reduction%> clause on "
11263 "%qs construct", "parallel");
11264 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11265 break;
11266 case OMP_TEAMS:
11267 error_at (OMP_CLAUSE_LOCATION (c),
11268 "%<inscan%> %<reduction%> clause on "
11269 "%qs construct", "teams");
11270 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11271 break;
11272 case OMP_TASKLOOP:
11273 error_at (OMP_CLAUSE_LOCATION (c),
11274 "%<inscan%> %<reduction%> clause on "
11275 "%qs construct", "taskloop");
11276 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11277 break;
11278 case OMP_SCOPE:
11279 error_at (OMP_CLAUSE_LOCATION (c),
11280 "%<inscan%> %<reduction%> clause on "
11281 "%qs construct", "scope");
11282 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11283 break;
11284 default:
11285 break;
11287 /* FALLTHRU */
11288 case OMP_CLAUSE_IN_REDUCTION:
11289 case OMP_CLAUSE_TASK_REDUCTION:
11290 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
11291 /* OpenACC permits reductions on private variables. */
11292 if (!(region_type & ORT_ACC)
11293 /* taskgroup is actually not a worksharing region. */
11294 && code != OMP_TASKGROUP)
11295 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
11296 decl = OMP_CLAUSE_DECL (c);
11297 if (TREE_CODE (decl) == MEM_REF)
11299 tree type = TREE_TYPE (decl);
11300 bool saved_into_ssa = gimplify_ctxp->into_ssa;
11301 gimplify_ctxp->into_ssa = false;
11302 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
11303 NULL, is_gimple_val, fb_rvalue, false)
11304 == GS_ERROR)
11306 gimplify_ctxp->into_ssa = saved_into_ssa;
11307 remove = true;
11308 break;
11310 gimplify_ctxp->into_ssa = saved_into_ssa;
11311 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11312 if (DECL_P (v))
11314 omp_firstprivatize_variable (ctx, v);
11315 omp_notice_variable (ctx, v, true);
11317 decl = TREE_OPERAND (decl, 0);
11318 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11320 gimplify_ctxp->into_ssa = false;
11321 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
11322 NULL, is_gimple_val, fb_rvalue, false)
11323 == GS_ERROR)
11325 gimplify_ctxp->into_ssa = saved_into_ssa;
11326 remove = true;
11327 break;
11329 gimplify_ctxp->into_ssa = saved_into_ssa;
11330 v = TREE_OPERAND (decl, 1);
11331 if (DECL_P (v))
11333 omp_firstprivatize_variable (ctx, v);
11334 omp_notice_variable (ctx, v, true);
11336 decl = TREE_OPERAND (decl, 0);
11338 if (TREE_CODE (decl) == ADDR_EXPR
11339 || TREE_CODE (decl) == INDIRECT_REF)
11340 decl = TREE_OPERAND (decl, 0);
11342 goto do_add_decl;
11343 case OMP_CLAUSE_LINEAR:
11344 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
11345 is_gimple_val, fb_rvalue) == GS_ERROR)
11347 remove = true;
11348 break;
11350 else
11352 if (code == OMP_SIMD
11353 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11355 struct gimplify_omp_ctx *octx = outer_ctx;
11356 if (octx
11357 && octx->region_type == ORT_WORKSHARE
11358 && octx->combined_loop
11359 && !octx->distribute)
11361 if (octx->outer_context
11362 && (octx->outer_context->region_type
11363 == ORT_COMBINED_PARALLEL))
11364 octx = octx->outer_context->outer_context;
11365 else
11366 octx = octx->outer_context;
11368 if (octx
11369 && octx->region_type == ORT_WORKSHARE
11370 && octx->combined_loop
11371 && octx->distribute)
11373 error_at (OMP_CLAUSE_LOCATION (c),
11374 "%<linear%> clause for variable other than "
11375 "loop iterator specified on construct "
11376 "combined with %<distribute%>");
11377 remove = true;
11378 break;
11381 /* For combined #pragma omp parallel for simd, need to put
11382 lastprivate and perhaps firstprivate too on the
11383 parallel. Similarly for #pragma omp for simd. */
11384 struct gimplify_omp_ctx *octx = outer_ctx;
11385 bool taskloop_seen = false;
11386 decl = NULL_TREE;
11389 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11390 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11391 break;
11392 decl = OMP_CLAUSE_DECL (c);
11393 if (error_operand_p (decl))
11395 decl = NULL_TREE;
11396 break;
11398 flags = GOVD_SEEN;
11399 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11400 flags |= GOVD_FIRSTPRIVATE;
11401 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11402 flags |= GOVD_LASTPRIVATE;
11403 if (octx
11404 && octx->region_type == ORT_WORKSHARE
11405 && octx->combined_loop)
11407 if (octx->outer_context
11408 && (octx->outer_context->region_type
11409 == ORT_COMBINED_PARALLEL))
11410 octx = octx->outer_context;
11411 else if (omp_check_private (octx, decl, false))
11412 break;
11414 else if (octx
11415 && (octx->region_type & ORT_TASK) != 0
11416 && octx->combined_loop)
11417 taskloop_seen = true;
11418 else if (octx
11419 && octx->region_type == ORT_COMBINED_PARALLEL
11420 && ((ctx->region_type == ORT_WORKSHARE
11421 && octx == outer_ctx)
11422 || taskloop_seen))
11423 flags = GOVD_SEEN | GOVD_SHARED;
11424 else if (octx
11425 && ((octx->region_type & ORT_COMBINED_TEAMS)
11426 == ORT_COMBINED_TEAMS))
11427 flags = GOVD_SEEN | GOVD_SHARED;
11428 else if (octx
11429 && octx->region_type == ORT_COMBINED_TARGET)
11431 if (flags & GOVD_LASTPRIVATE)
11432 flags = GOVD_SEEN | GOVD_MAP;
11434 else
11435 break;
11436 splay_tree_node on
11437 = splay_tree_lookup (octx->variables,
11438 (splay_tree_key) decl);
11439 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11441 octx = NULL;
11442 break;
11444 omp_add_variable (octx, decl, flags);
11445 if (octx->outer_context == NULL)
11446 break;
11447 octx = octx->outer_context;
11449 while (1);
11450 if (octx
11451 && decl
11452 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11453 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11454 omp_notice_variable (octx, decl, true);
11456 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11457 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11458 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11460 notice_outer = false;
11461 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11463 goto do_add;
11465 case OMP_CLAUSE_MAP:
11466 decl = OMP_CLAUSE_DECL (c);
11467 if (error_operand_p (decl))
11468 remove = true;
11469 switch (code)
11471 case OMP_TARGET:
11472 break;
11473 case OACC_DATA:
11474 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11475 break;
11476 /* FALLTHRU */
11477 case OMP_TARGET_DATA:
11478 case OMP_TARGET_ENTER_DATA:
11479 case OMP_TARGET_EXIT_DATA:
11480 case OACC_ENTER_DATA:
11481 case OACC_EXIT_DATA:
11482 case OACC_HOST_DATA:
11483 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11484 || (OMP_CLAUSE_MAP_KIND (c)
11485 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11486 /* For target {,enter ,exit }data only the array slice is
11487 mapped, but not the pointer to it. */
11488 remove = true;
11489 break;
11490 default:
11491 break;
11493 if (remove)
11494 break;
11495 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11497 struct gimplify_omp_ctx *octx;
11498 for (octx = outer_ctx; octx; octx = octx->outer_context)
11500 if (octx->region_type != ORT_ACC_HOST_DATA)
11501 break;
11502 splay_tree_node n2
11503 = splay_tree_lookup (octx->variables,
11504 (splay_tree_key) decl);
11505 if (n2)
11506 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11507 "declared in enclosing %<host_data%> region",
11508 DECL_NAME (decl));
11511 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11512 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11513 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11514 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11515 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11517 remove = true;
11518 break;
11520 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11521 || (OMP_CLAUSE_MAP_KIND (c)
11522 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11523 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11524 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11526 OMP_CLAUSE_SIZE (c)
11527 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11528 false);
11529 if ((region_type & ORT_TARGET) != 0)
11530 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11531 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11534 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11536 tree base = omp_strip_components_and_deref (decl);
11537 if (DECL_P (base))
11539 decl = base;
11540 splay_tree_node n
11541 = splay_tree_lookup (ctx->variables,
11542 (splay_tree_key) decl);
11543 if (seen_error ()
11544 && n
11545 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11547 remove = true;
11548 break;
11550 flags = GOVD_MAP | GOVD_EXPLICIT;
11552 goto do_add_decl;
11556 if (TREE_CODE (decl) == TARGET_EXPR)
11558 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11559 is_gimple_lvalue, fb_lvalue)
11560 == GS_ERROR)
11561 remove = true;
11563 else if (!DECL_P (decl))
11565 tree d = decl, *pd;
11566 if (TREE_CODE (d) == ARRAY_REF)
11568 while (TREE_CODE (d) == ARRAY_REF)
11569 d = TREE_OPERAND (d, 0);
11570 if (TREE_CODE (d) == COMPONENT_REF
11571 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11572 decl = d;
11574 pd = &OMP_CLAUSE_DECL (c);
11575 if (d == decl
11576 && TREE_CODE (decl) == INDIRECT_REF
11577 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11578 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11579 == REFERENCE_TYPE)
11580 && (OMP_CLAUSE_MAP_KIND (c)
11581 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11583 pd = &TREE_OPERAND (decl, 0);
11584 decl = TREE_OPERAND (decl, 0);
11586 /* An "attach/detach" operation on an update directive should
11587 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11588 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11589 depends on the previous mapping. */
11590 if (code == OACC_UPDATE
11591 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11592 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11594 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11596 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11597 == ARRAY_TYPE)
11598 remove = true;
11599 else
11601 gomp_map_kind k = ((code == OACC_EXIT_DATA
11602 || code == OMP_TARGET_EXIT_DATA)
11603 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11604 OMP_CLAUSE_SET_MAP_KIND (c, k);
11608 tree cref = decl;
11610 while (TREE_CODE (cref) == ARRAY_REF)
11611 cref = TREE_OPERAND (cref, 0);
11613 if (TREE_CODE (cref) == INDIRECT_REF)
11614 cref = TREE_OPERAND (cref, 0);
11616 if (TREE_CODE (cref) == COMPONENT_REF)
11618 tree base = cref;
11619 while (base && !DECL_P (base))
11621 tree innerbase = omp_get_base_pointer (base);
11622 if (!innerbase)
11623 break;
11624 base = innerbase;
11626 if (base
11627 && DECL_P (base)
11628 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11629 && POINTER_TYPE_P (TREE_TYPE (base)))
11631 splay_tree_node n
11632 = splay_tree_lookup (ctx->variables,
11633 (splay_tree_key) base);
11634 n->value |= GOVD_SEEN;
11638 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11640 /* Don't gimplify *pd fully at this point, as the base
11641 will need to be adjusted during omp lowering. */
11642 auto_vec<tree, 10> expr_stack;
11643 tree *p = pd;
11644 while (handled_component_p (*p)
11645 || TREE_CODE (*p) == INDIRECT_REF
11646 || TREE_CODE (*p) == ADDR_EXPR
11647 || TREE_CODE (*p) == MEM_REF
11648 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11650 expr_stack.safe_push (*p);
11651 p = &TREE_OPERAND (*p, 0);
11653 for (int i = expr_stack.length () - 1; i >= 0; i--)
11655 tree t = expr_stack[i];
11656 if (TREE_CODE (t) == ARRAY_REF
11657 || TREE_CODE (t) == ARRAY_RANGE_REF)
11659 if (TREE_OPERAND (t, 2) == NULL_TREE)
11661 tree low = unshare_expr (array_ref_low_bound (t));
11662 if (!is_gimple_min_invariant (low))
11664 TREE_OPERAND (t, 2) = low;
11665 if (gimplify_expr (&TREE_OPERAND (t, 2),
11666 pre_p, NULL,
11667 is_gimple_reg,
11668 fb_rvalue) == GS_ERROR)
11669 remove = true;
11672 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11673 NULL, is_gimple_reg,
11674 fb_rvalue) == GS_ERROR)
11675 remove = true;
11676 if (TREE_OPERAND (t, 3) == NULL_TREE)
11678 tree elmt_size = array_ref_element_size (t);
11679 if (!is_gimple_min_invariant (elmt_size))
11681 elmt_size = unshare_expr (elmt_size);
11682 tree elmt_type
11683 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11684 0)));
11685 tree factor
11686 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11687 elmt_size
11688 = size_binop (EXACT_DIV_EXPR, elmt_size,
11689 factor);
11690 TREE_OPERAND (t, 3) = elmt_size;
11691 if (gimplify_expr (&TREE_OPERAND (t, 3),
11692 pre_p, NULL,
11693 is_gimple_reg,
11694 fb_rvalue) == GS_ERROR)
11695 remove = true;
11698 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11699 NULL, is_gimple_reg,
11700 fb_rvalue) == GS_ERROR)
11701 remove = true;
11703 else if (TREE_CODE (t) == COMPONENT_REF)
11705 if (TREE_OPERAND (t, 2) == NULL_TREE)
11707 tree offset = component_ref_field_offset (t);
11708 if (!is_gimple_min_invariant (offset))
11710 offset = unshare_expr (offset);
11711 tree field = TREE_OPERAND (t, 1);
11712 tree factor
11713 = size_int (DECL_OFFSET_ALIGN (field)
11714 / BITS_PER_UNIT);
11715 offset = size_binop (EXACT_DIV_EXPR, offset,
11716 factor);
11717 TREE_OPERAND (t, 2) = offset;
11718 if (gimplify_expr (&TREE_OPERAND (t, 2),
11719 pre_p, NULL,
11720 is_gimple_reg,
11721 fb_rvalue) == GS_ERROR)
11722 remove = true;
11725 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11726 NULL, is_gimple_reg,
11727 fb_rvalue) == GS_ERROR)
11728 remove = true;
11731 for (; expr_stack.length () > 0; )
11733 tree t = expr_stack.pop ();
11735 if (TREE_CODE (t) == ARRAY_REF
11736 || TREE_CODE (t) == ARRAY_RANGE_REF)
11738 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11739 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11740 NULL, is_gimple_val,
11741 fb_rvalue) == GS_ERROR)
11742 remove = true;
11746 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11747 fb_lvalue) == GS_ERROR)
11749 remove = true;
11750 break;
11752 break;
11754 flags = GOVD_MAP | GOVD_EXPLICIT;
11755 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11756 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11757 flags |= GOVD_MAP_ALWAYS_TO;
11759 if ((code == OMP_TARGET
11760 || code == OMP_TARGET_DATA
11761 || code == OMP_TARGET_ENTER_DATA
11762 || code == OMP_TARGET_EXIT_DATA)
11763 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11765 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11766 octx = octx->outer_context)
11768 splay_tree_node n
11769 = splay_tree_lookup (octx->variables,
11770 (splay_tree_key) OMP_CLAUSE_DECL (c));
11771 /* If this is contained in an outer OpenMP region as a
11772 firstprivate value, remove the attach/detach. */
11773 if (n && (n->value & GOVD_FIRSTPRIVATE))
11775 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11776 goto do_add;
11780 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11781 ? GOMP_MAP_DETACH
11782 : GOMP_MAP_ATTACH);
11783 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11786 goto do_add;
11788 case OMP_CLAUSE_AFFINITY:
11789 gimplify_omp_affinity (list_p, pre_p);
11790 remove = true;
11791 break;
11792 case OMP_CLAUSE_DOACROSS:
11793 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11795 tree deps = OMP_CLAUSE_DECL (c);
11796 while (deps && TREE_CODE (deps) == TREE_LIST)
11798 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11799 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11800 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11801 pre_p, NULL, is_gimple_val, fb_rvalue);
11802 deps = TREE_CHAIN (deps);
11805 else
11806 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11807 == OMP_CLAUSE_DOACROSS_SOURCE);
11808 break;
11809 case OMP_CLAUSE_DEPEND:
11810 if (handled_depend_iterators == -1)
11811 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11812 if (handled_depend_iterators)
11814 if (handled_depend_iterators == 2)
11815 remove = true;
11816 break;
11818 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11820 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11821 NULL, is_gimple_val, fb_rvalue);
11822 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11824 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11826 remove = true;
11827 break;
11829 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11831 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11832 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11833 is_gimple_val, fb_rvalue) == GS_ERROR)
11835 remove = true;
11836 break;
11839 if (code == OMP_TASK)
11840 ctx->has_depend = true;
11841 break;
11843 case OMP_CLAUSE_TO:
11844 case OMP_CLAUSE_FROM:
11845 case OMP_CLAUSE__CACHE_:
11846 decl = OMP_CLAUSE_DECL (c);
11847 if (error_operand_p (decl))
11849 remove = true;
11850 break;
11852 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11853 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11854 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11855 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11856 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11858 remove = true;
11859 break;
11861 if (!DECL_P (decl))
11863 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11864 NULL, is_gimple_lvalue, fb_lvalue)
11865 == GS_ERROR)
11867 remove = true;
11868 break;
11870 break;
11872 goto do_notice;
11874 case OMP_CLAUSE_USE_DEVICE_PTR:
11875 case OMP_CLAUSE_USE_DEVICE_ADDR:
11876 flags = GOVD_EXPLICIT;
11877 goto do_add;
11879 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11880 decl = OMP_CLAUSE_DECL (c);
11881 while (TREE_CODE (decl) == INDIRECT_REF
11882 || TREE_CODE (decl) == ARRAY_REF)
11883 decl = TREE_OPERAND (decl, 0);
11884 flags = GOVD_EXPLICIT;
11885 goto do_add_decl;
11887 case OMP_CLAUSE_IS_DEVICE_PTR:
11888 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11889 goto do_add;
11891 do_add:
11892 decl = OMP_CLAUSE_DECL (c);
11893 do_add_decl:
11894 if (error_operand_p (decl))
11896 remove = true;
11897 break;
11899 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11901 tree t = omp_member_access_dummy_var (decl);
11902 if (t)
11904 tree v = DECL_VALUE_EXPR (decl);
11905 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11906 if (outer_ctx)
11907 omp_notice_variable (outer_ctx, t, true);
11910 if (code == OACC_DATA
11911 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11912 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11913 flags |= GOVD_MAP_0LEN_ARRAY;
11914 omp_add_variable (ctx, decl, flags);
11915 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11916 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11917 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11918 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11920 struct gimplify_omp_ctx *pctx
11921 = code == OMP_TARGET ? outer_ctx : ctx;
11922 if (pctx)
11923 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11924 GOVD_LOCAL | GOVD_SEEN);
11925 if (pctx
11926 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11927 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11928 find_decl_expr,
11929 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11930 NULL) == NULL_TREE)
11931 omp_add_variable (pctx,
11932 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11933 GOVD_LOCAL | GOVD_SEEN);
11934 gimplify_omp_ctxp = pctx;
11935 push_gimplify_context ();
11937 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11938 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11940 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11941 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11942 pop_gimplify_context
11943 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11944 push_gimplify_context ();
11945 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11946 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11947 pop_gimplify_context
11948 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11949 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11950 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11952 gimplify_omp_ctxp = outer_ctx;
11954 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11955 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11957 gimplify_omp_ctxp = ctx;
11958 push_gimplify_context ();
11959 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11961 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11962 NULL, NULL);
11963 TREE_SIDE_EFFECTS (bind) = 1;
11964 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11965 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11967 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11968 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11969 pop_gimplify_context
11970 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11971 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11973 gimplify_omp_ctxp = outer_ctx;
11975 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11976 && OMP_CLAUSE_LINEAR_STMT (c))
11978 gimplify_omp_ctxp = ctx;
11979 push_gimplify_context ();
11980 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11982 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11983 NULL, NULL);
11984 TREE_SIDE_EFFECTS (bind) = 1;
11985 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11986 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11988 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11989 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11990 pop_gimplify_context
11991 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11992 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11994 gimplify_omp_ctxp = outer_ctx;
11996 if (notice_outer)
11997 goto do_notice;
11998 break;
12000 case OMP_CLAUSE_COPYIN:
12001 case OMP_CLAUSE_COPYPRIVATE:
12002 decl = OMP_CLAUSE_DECL (c);
12003 if (error_operand_p (decl))
12005 remove = true;
12006 break;
12008 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
12009 && !remove
12010 && !omp_check_private (ctx, decl, true))
12012 remove = true;
12013 if (is_global_var (decl))
12015 if (DECL_THREAD_LOCAL_P (decl))
12016 remove = false;
12017 else if (DECL_HAS_VALUE_EXPR_P (decl))
12019 tree value = get_base_address (DECL_VALUE_EXPR (decl));
12021 if (value
12022 && DECL_P (value)
12023 && DECL_THREAD_LOCAL_P (value))
12024 remove = false;
12027 if (remove)
12028 error_at (OMP_CLAUSE_LOCATION (c),
12029 "copyprivate variable %qE is not threadprivate"
12030 " or private in outer context", DECL_NAME (decl));
12032 do_notice:
12033 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12034 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
12035 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12036 && outer_ctx
12037 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
12038 || (region_type == ORT_WORKSHARE
12039 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12040 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
12041 || code == OMP_LOOP)))
12042 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
12043 || (code == OMP_LOOP
12044 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12045 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
12046 == ORT_COMBINED_TEAMS))))
12048 splay_tree_node on
12049 = splay_tree_lookup (outer_ctx->variables,
12050 (splay_tree_key)decl);
12051 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
12053 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12054 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12055 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
12056 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12057 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
12058 == POINTER_TYPE))))
12059 omp_firstprivatize_variable (outer_ctx, decl);
12060 else
12062 omp_add_variable (outer_ctx, decl,
12063 GOVD_SEEN | GOVD_SHARED);
12064 if (outer_ctx->outer_context)
12065 omp_notice_variable (outer_ctx->outer_context, decl,
12066 true);
12070 if (outer_ctx)
12071 omp_notice_variable (outer_ctx, decl, true);
12072 if (check_non_private
12073 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
12074 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
12075 || decl == OMP_CLAUSE_DECL (c)
12076 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12077 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12078 == ADDR_EXPR
12079 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12080 == POINTER_PLUS_EXPR
12081 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
12082 (OMP_CLAUSE_DECL (c), 0), 0))
12083 == ADDR_EXPR)))))
12084 && omp_check_private (ctx, decl, false))
12086 error ("%s variable %qE is private in outer context",
12087 check_non_private, DECL_NAME (decl));
12088 remove = true;
12090 break;
12092 case OMP_CLAUSE_DETACH:
12093 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
12094 goto do_add;
12096 case OMP_CLAUSE_IF:
12097 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
12098 && OMP_CLAUSE_IF_MODIFIER (c) != code)
12100 const char *p[2];
12101 for (int i = 0; i < 2; i++)
12102 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
12104 case VOID_CST: p[i] = "cancel"; break;
12105 case OMP_PARALLEL: p[i] = "parallel"; break;
12106 case OMP_SIMD: p[i] = "simd"; break;
12107 case OMP_TASK: p[i] = "task"; break;
12108 case OMP_TASKLOOP: p[i] = "taskloop"; break;
12109 case OMP_TARGET_DATA: p[i] = "target data"; break;
12110 case OMP_TARGET: p[i] = "target"; break;
12111 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
12112 case OMP_TARGET_ENTER_DATA:
12113 p[i] = "target enter data"; break;
12114 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
12115 default: gcc_unreachable ();
12117 error_at (OMP_CLAUSE_LOCATION (c),
12118 "expected %qs %<if%> clause modifier rather than %qs",
12119 p[0], p[1]);
12120 remove = true;
12122 /* Fall through. */
12124 case OMP_CLAUSE_FINAL:
12125 OMP_CLAUSE_OPERAND (c, 0)
12126 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
12127 /* Fall through. */
12129 case OMP_CLAUSE_NUM_TEAMS:
12130 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
12131 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
12132 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
12134 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
12136 remove = true;
12137 break;
12139 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
12140 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
12141 pre_p, NULL, true);
12143 /* Fall through. */
12145 case OMP_CLAUSE_SCHEDULE:
12146 case OMP_CLAUSE_NUM_THREADS:
12147 case OMP_CLAUSE_THREAD_LIMIT:
12148 case OMP_CLAUSE_DIST_SCHEDULE:
12149 case OMP_CLAUSE_DEVICE:
12150 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
12151 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
12153 if (code != OMP_TARGET)
12155 error_at (OMP_CLAUSE_LOCATION (c),
12156 "%<device%> clause with %<ancestor%> is only "
12157 "allowed on %<target%> construct");
12158 remove = true;
12159 break;
12162 tree clauses = *orig_list_p;
12163 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
12164 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
12165 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
12166 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
12167 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
12168 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
12171 error_at (OMP_CLAUSE_LOCATION (c),
12172 "with %<ancestor%>, only the %<device%>, "
12173 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
12174 "and %<map%> clauses may appear on the "
12175 "construct");
12176 remove = true;
12177 break;
12180 /* Fall through. */
12182 case OMP_CLAUSE_PRIORITY:
12183 case OMP_CLAUSE_GRAINSIZE:
12184 case OMP_CLAUSE_NUM_TASKS:
12185 case OMP_CLAUSE_FILTER:
12186 case OMP_CLAUSE_HINT:
12187 case OMP_CLAUSE_ASYNC:
12188 case OMP_CLAUSE_WAIT:
12189 case OMP_CLAUSE_NUM_GANGS:
12190 case OMP_CLAUSE_NUM_WORKERS:
12191 case OMP_CLAUSE_VECTOR_LENGTH:
12192 case OMP_CLAUSE_WORKER:
12193 case OMP_CLAUSE_VECTOR:
12194 if (OMP_CLAUSE_OPERAND (c, 0)
12195 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
12197 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
12199 remove = true;
12200 break;
12202 /* All these clauses care about value, not a particular decl,
12203 so try to force it into a SSA_NAME or fresh temporary. */
12204 OMP_CLAUSE_OPERAND (c, 0)
12205 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
12206 pre_p, NULL, true);
12208 break;
12210 case OMP_CLAUSE_GANG:
12211 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
12212 is_gimple_val, fb_rvalue) == GS_ERROR)
12213 remove = true;
12214 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
12215 is_gimple_val, fb_rvalue) == GS_ERROR)
12216 remove = true;
12217 break;
12219 case OMP_CLAUSE_NOWAIT:
12220 nowait = 1;
12221 break;
12223 case OMP_CLAUSE_ORDERED:
12224 case OMP_CLAUSE_UNTIED:
12225 case OMP_CLAUSE_COLLAPSE:
12226 case OMP_CLAUSE_TILE:
12227 case OMP_CLAUSE_AUTO:
12228 case OMP_CLAUSE_SEQ:
12229 case OMP_CLAUSE_INDEPENDENT:
12230 case OMP_CLAUSE_MERGEABLE:
12231 case OMP_CLAUSE_PROC_BIND:
12232 case OMP_CLAUSE_SAFELEN:
12233 case OMP_CLAUSE_SIMDLEN:
12234 case OMP_CLAUSE_NOGROUP:
12235 case OMP_CLAUSE_THREADS:
12236 case OMP_CLAUSE_SIMD:
12237 case OMP_CLAUSE_BIND:
12238 case OMP_CLAUSE_IF_PRESENT:
12239 case OMP_CLAUSE_FINALIZE:
12240 break;
12242 case OMP_CLAUSE_ORDER:
12243 ctx->order_concurrent = true;
12244 break;
12246 case OMP_CLAUSE_DEFAULTMAP:
12247 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
12248 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
12250 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
12251 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
12252 gdmkmin = GDMK_SCALAR;
12253 gdmkmax = GDMK_POINTER;
12254 break;
12255 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
12256 gdmkmin = GDMK_SCALAR;
12257 gdmkmax = GDMK_SCALAR_TARGET;
12258 break;
12259 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
12260 gdmkmin = gdmkmax = GDMK_AGGREGATE;
12261 break;
12262 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
12263 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
12264 break;
12265 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
12266 gdmkmin = gdmkmax = GDMK_POINTER;
12267 break;
12268 default:
12269 gcc_unreachable ();
12271 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
12272 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
12274 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
12275 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
12276 break;
12277 case OMP_CLAUSE_DEFAULTMAP_TO:
12278 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
12279 break;
12280 case OMP_CLAUSE_DEFAULTMAP_FROM:
12281 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
12282 break;
12283 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
12284 ctx->defaultmap[gdmk] = GOVD_MAP;
12285 break;
12286 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
12287 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12288 break;
12289 case OMP_CLAUSE_DEFAULTMAP_NONE:
12290 ctx->defaultmap[gdmk] = 0;
12291 break;
12292 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
12293 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
12294 break;
12295 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
12296 switch (gdmk)
12298 case GDMK_SCALAR:
12299 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12300 break;
12301 case GDMK_SCALAR_TARGET:
12302 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
12303 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
12304 break;
12305 case GDMK_AGGREGATE:
12306 case GDMK_ALLOCATABLE:
12307 ctx->defaultmap[gdmk] = GOVD_MAP;
12308 break;
12309 case GDMK_POINTER:
12310 ctx->defaultmap[gdmk] = GOVD_MAP;
12311 if (!lang_GNU_Fortran ())
12312 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
12313 break;
12314 default:
12315 gcc_unreachable ();
12317 break;
12318 default:
12319 gcc_unreachable ();
12321 break;
12323 case OMP_CLAUSE_ALIGNED:
12324 decl = OMP_CLAUSE_DECL (c);
12325 if (error_operand_p (decl))
12327 remove = true;
12328 break;
12330 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12331 is_gimple_val, fb_rvalue) == GS_ERROR)
12333 remove = true;
12334 break;
12336 if (!is_global_var (decl)
12337 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12338 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12339 break;
12341 case OMP_CLAUSE_NONTEMPORAL:
12342 decl = OMP_CLAUSE_DECL (c);
12343 if (error_operand_p (decl))
12345 remove = true;
12346 break;
12348 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12349 break;
12351 case OMP_CLAUSE_ALLOCATE:
12352 decl = OMP_CLAUSE_DECL (c);
12353 if (error_operand_p (decl))
12355 remove = true;
12356 break;
12358 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12359 is_gimple_val, fb_rvalue) == GS_ERROR)
12361 remove = true;
12362 break;
12364 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12365 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12366 == INTEGER_CST))
12368 else if (code == OMP_TASKLOOP
12369 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12370 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12371 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12372 pre_p, NULL, false);
12373 break;
12375 case OMP_CLAUSE_DEFAULT:
12376 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12377 break;
12379 case OMP_CLAUSE_INCLUSIVE:
12380 case OMP_CLAUSE_EXCLUSIVE:
12381 decl = OMP_CLAUSE_DECL (c);
12383 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12384 (splay_tree_key) decl);
12385 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12387 error_at (OMP_CLAUSE_LOCATION (c),
12388 "%qD specified in %qs clause but not in %<inscan%> "
12389 "%<reduction%> clause on the containing construct",
12390 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12391 remove = true;
12393 else
12395 n->value |= GOVD_REDUCTION_INSCAN;
12396 if (outer_ctx->region_type == ORT_SIMD
12397 && outer_ctx->outer_context
12398 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12400 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12401 (splay_tree_key) decl);
12402 if (n && (n->value & GOVD_REDUCTION) != 0)
12403 n->value |= GOVD_REDUCTION_INSCAN;
12407 break;
12409 case OMP_CLAUSE_NOHOST:
12410 default:
12411 gcc_unreachable ();
12414 if (code == OACC_DATA
12415 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12416 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12417 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12418 remove = true;
12419 if (remove)
12420 *list_p = OMP_CLAUSE_CHAIN (c);
12421 else
12422 list_p = &OMP_CLAUSE_CHAIN (c);
12425 ctx->clauses = *orig_list_p;
12426 gimplify_omp_ctxp = ctx;
12429 /* Return true if DECL is a candidate for shared to firstprivate
12430 optimization. We only consider non-addressable scalars, not
12431 too big, and not references. */
12433 static bool
12434 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12436 if (TREE_ADDRESSABLE (decl))
12437 return false;
12438 tree type = TREE_TYPE (decl);
12439 if (!is_gimple_reg_type (type)
12440 || TREE_CODE (type) == REFERENCE_TYPE
12441 || TREE_ADDRESSABLE (type))
12442 return false;
12443 /* Don't optimize too large decls, as each thread/task will have
12444 its own. */
12445 HOST_WIDE_INT len = int_size_in_bytes (type);
12446 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12447 return false;
12448 if (omp_privatize_by_reference (decl))
12449 return false;
12450 return true;
12453 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12454 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12455 GOVD_WRITTEN in outer contexts. */
12457 static void
12458 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12460 for (; ctx; ctx = ctx->outer_context)
12462 splay_tree_node n = splay_tree_lookup (ctx->variables,
12463 (splay_tree_key) decl);
12464 if (n == NULL)
12465 continue;
12466 else if (n->value & GOVD_SHARED)
12468 n->value |= GOVD_WRITTEN;
12469 return;
12471 else if (n->value & GOVD_DATA_SHARE_CLASS)
12472 return;
12476 /* Helper callback for walk_gimple_seq to discover possible stores
12477 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12478 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12479 for those. */
12481 static tree
12482 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12484 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12486 *walk_subtrees = 0;
12487 if (!wi->is_lhs)
12488 return NULL_TREE;
12490 tree op = *tp;
12493 if (handled_component_p (op))
12494 op = TREE_OPERAND (op, 0);
12495 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12496 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12497 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12498 else
12499 break;
12501 while (1);
12502 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12503 return NULL_TREE;
12505 omp_mark_stores (gimplify_omp_ctxp, op);
12506 return NULL_TREE;
12509 /* Helper callback for walk_gimple_seq to discover possible stores
12510 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12511 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12512 for those. */
12514 static tree
12515 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12516 bool *handled_ops_p,
12517 struct walk_stmt_info *wi)
12519 gimple *stmt = gsi_stmt (*gsi_p);
12520 switch (gimple_code (stmt))
12522 /* Don't recurse on OpenMP constructs for which
12523 gimplify_adjust_omp_clauses already handled the bodies,
12524 except handle gimple_omp_for_pre_body. */
12525 case GIMPLE_OMP_FOR:
12526 *handled_ops_p = true;
12527 if (gimple_omp_for_pre_body (stmt))
12528 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12529 omp_find_stores_stmt, omp_find_stores_op, wi);
12530 break;
12531 case GIMPLE_OMP_PARALLEL:
12532 case GIMPLE_OMP_TASK:
12533 case GIMPLE_OMP_SECTIONS:
12534 case GIMPLE_OMP_SINGLE:
12535 case GIMPLE_OMP_SCOPE:
12536 case GIMPLE_OMP_TARGET:
12537 case GIMPLE_OMP_TEAMS:
12538 case GIMPLE_OMP_CRITICAL:
12539 *handled_ops_p = true;
12540 break;
12541 default:
12542 break;
12544 return NULL_TREE;
12547 struct gimplify_adjust_omp_clauses_data
12549 tree *list_p;
12550 gimple_seq *pre_p;
12553 /* For all variables that were not actually used within the context,
12554 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12556 static int
12557 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12559 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12560 gimple_seq *pre_p
12561 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12562 tree decl = (tree) n->key;
12563 unsigned flags = n->value;
12564 enum omp_clause_code code;
12565 tree clause;
12566 bool private_debug;
12568 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12569 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12570 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12571 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12572 return 0;
12573 if ((flags & GOVD_SEEN) == 0)
12574 return 0;
12575 if (flags & GOVD_DEBUG_PRIVATE)
12577 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12578 private_debug = true;
12580 else if (flags & GOVD_MAP)
12581 private_debug = false;
12582 else
12583 private_debug
12584 = lang_hooks.decls.omp_private_debug_clause (decl,
12585 !!(flags & GOVD_SHARED));
12586 if (private_debug)
12587 code = OMP_CLAUSE_PRIVATE;
12588 else if (flags & GOVD_MAP)
12590 code = OMP_CLAUSE_MAP;
12591 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12592 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12594 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12595 return 0;
12597 if (VAR_P (decl)
12598 && DECL_IN_CONSTANT_POOL (decl)
12599 && !lookup_attribute ("omp declare target",
12600 DECL_ATTRIBUTES (decl)))
12602 tree id = get_identifier ("omp declare target");
12603 DECL_ATTRIBUTES (decl)
12604 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12605 varpool_node *node = varpool_node::get (decl);
12606 if (node)
12608 node->offloadable = 1;
12609 if (ENABLE_OFFLOADING)
12610 g->have_offload = true;
12614 else if (flags & GOVD_SHARED)
12616 if (is_global_var (decl))
12618 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12619 while (ctx != NULL)
12621 splay_tree_node on
12622 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12623 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12624 | GOVD_PRIVATE | GOVD_REDUCTION
12625 | GOVD_LINEAR | GOVD_MAP)) != 0)
12626 break;
12627 ctx = ctx->outer_context;
12629 if (ctx == NULL)
12630 return 0;
12632 code = OMP_CLAUSE_SHARED;
12633 /* Don't optimize shared into firstprivate for read-only vars
12634 on tasks with depend clause, we shouldn't try to copy them
12635 until the dependencies are satisfied. */
12636 if (gimplify_omp_ctxp->has_depend)
12637 flags |= GOVD_WRITTEN;
12639 else if (flags & GOVD_PRIVATE)
12640 code = OMP_CLAUSE_PRIVATE;
12641 else if (flags & GOVD_FIRSTPRIVATE)
12643 code = OMP_CLAUSE_FIRSTPRIVATE;
12644 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12645 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12646 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12648 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12649 "%<target%> construct", decl);
12650 return 0;
12653 else if (flags & GOVD_LASTPRIVATE)
12654 code = OMP_CLAUSE_LASTPRIVATE;
12655 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12656 return 0;
12657 else if (flags & GOVD_CONDTEMP)
12659 code = OMP_CLAUSE__CONDTEMP_;
12660 gimple_add_tmp_var (decl);
12662 else
12663 gcc_unreachable ();
12665 if (((flags & GOVD_LASTPRIVATE)
12666 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12667 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12668 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12670 tree chain = *list_p;
12671 clause = build_omp_clause (input_location, code);
12672 OMP_CLAUSE_DECL (clause) = decl;
12673 OMP_CLAUSE_CHAIN (clause) = chain;
12674 if (private_debug)
12675 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12676 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12677 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12678 else if (code == OMP_CLAUSE_SHARED
12679 && (flags & GOVD_WRITTEN) == 0
12680 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12681 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12682 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12683 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12684 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12686 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12687 OMP_CLAUSE_DECL (nc) = decl;
12688 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12689 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12690 OMP_CLAUSE_DECL (clause)
12691 = build_simple_mem_ref_loc (input_location, decl);
12692 OMP_CLAUSE_DECL (clause)
12693 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12694 build_int_cst (build_pointer_type (char_type_node), 0));
12695 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12696 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12697 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12698 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12699 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12700 OMP_CLAUSE_CHAIN (nc) = chain;
12701 OMP_CLAUSE_CHAIN (clause) = nc;
12702 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12703 gimplify_omp_ctxp = ctx->outer_context;
12704 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12705 pre_p, NULL, is_gimple_val, fb_rvalue);
12706 gimplify_omp_ctxp = ctx;
12708 else if (code == OMP_CLAUSE_MAP)
12710 int kind;
12711 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12712 switch (flags & (GOVD_MAP_TO_ONLY
12713 | GOVD_MAP_FORCE
12714 | GOVD_MAP_FORCE_PRESENT
12715 | GOVD_MAP_ALLOC_ONLY
12716 | GOVD_MAP_FROM_ONLY))
12718 case 0:
12719 kind = GOMP_MAP_TOFROM;
12720 break;
12721 case GOVD_MAP_FORCE:
12722 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12723 break;
12724 case GOVD_MAP_TO_ONLY:
12725 kind = GOMP_MAP_TO;
12726 break;
12727 case GOVD_MAP_FROM_ONLY:
12728 kind = GOMP_MAP_FROM;
12729 break;
12730 case GOVD_MAP_ALLOC_ONLY:
12731 kind = GOMP_MAP_ALLOC;
12732 break;
12733 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12734 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12735 break;
12736 case GOVD_MAP_FORCE_PRESENT:
12737 kind = GOMP_MAP_FORCE_PRESENT;
12738 break;
12739 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
12740 kind = GOMP_MAP_FORCE_PRESENT;
12741 break;
12742 default:
12743 gcc_unreachable ();
12745 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12746 /* Setting of the implicit flag for the runtime is currently disabled for
12747 OpenACC. */
12748 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12749 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12750 if (DECL_SIZE (decl)
12751 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12753 tree decl2 = DECL_VALUE_EXPR (decl);
12754 gcc_assert (INDIRECT_REF_P (decl2));
12755 decl2 = TREE_OPERAND (decl2, 0);
12756 gcc_assert (DECL_P (decl2));
12757 tree mem = build_simple_mem_ref (decl2);
12758 OMP_CLAUSE_DECL (clause) = mem;
12759 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12760 if (gimplify_omp_ctxp->outer_context)
12762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12763 omp_notice_variable (ctx, decl2, true);
12764 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12766 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12767 OMP_CLAUSE_MAP);
12768 OMP_CLAUSE_DECL (nc) = decl;
12769 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12770 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12771 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12772 else
12773 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12774 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12775 OMP_CLAUSE_CHAIN (clause) = nc;
12777 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12778 && omp_privatize_by_reference (decl))
12780 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12781 OMP_CLAUSE_SIZE (clause)
12782 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12783 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12784 gimplify_omp_ctxp = ctx->outer_context;
12785 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12786 pre_p, NULL, is_gimple_val, fb_rvalue);
12787 gimplify_omp_ctxp = ctx;
12788 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12789 OMP_CLAUSE_MAP);
12790 OMP_CLAUSE_DECL (nc) = decl;
12791 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12792 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12793 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12794 OMP_CLAUSE_CHAIN (clause) = nc;
12796 else
12797 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12799 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12801 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12802 OMP_CLAUSE_DECL (nc) = decl;
12803 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12804 OMP_CLAUSE_CHAIN (nc) = chain;
12805 OMP_CLAUSE_CHAIN (clause) = nc;
12806 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12807 gimplify_omp_ctxp = ctx->outer_context;
12808 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12809 (ctx->region_type & ORT_ACC) != 0);
12810 gimplify_omp_ctxp = ctx;
12812 *list_p = clause;
12813 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12814 gimplify_omp_ctxp = ctx->outer_context;
12815 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12816 in simd. Those are only added for the local vars inside of simd body
12817 and they don't need to be e.g. default constructible. */
12818 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12819 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12820 (ctx->region_type & ORT_ACC) != 0);
12821 if (gimplify_omp_ctxp)
12822 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12823 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12824 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12825 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12826 true);
12827 gimplify_omp_ctxp = ctx;
12828 return 0;
12831 static void
12832 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12833 enum tree_code code)
12835 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12836 tree *orig_list_p = list_p;
12837 tree c, decl;
12838 bool has_inscan_reductions = false;
12840 if (body)
12842 struct gimplify_omp_ctx *octx;
12843 for (octx = ctx; octx; octx = octx->outer_context)
12844 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12845 break;
12846 if (octx)
12848 struct walk_stmt_info wi;
12849 memset (&wi, 0, sizeof (wi));
12850 walk_gimple_seq (body, omp_find_stores_stmt,
12851 omp_find_stores_op, &wi);
12855 if (ctx->add_safelen1)
12857 /* If there are VLAs in the body of simd loop, prevent
12858 vectorization. */
12859 gcc_assert (ctx->region_type == ORT_SIMD);
12860 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12861 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12862 OMP_CLAUSE_CHAIN (c) = *list_p;
12863 *list_p = c;
12864 list_p = &OMP_CLAUSE_CHAIN (c);
12867 if (ctx->region_type == ORT_WORKSHARE
12868 && ctx->outer_context
12869 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12871 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12872 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12873 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12875 decl = OMP_CLAUSE_DECL (c);
12876 splay_tree_node n
12877 = splay_tree_lookup (ctx->outer_context->variables,
12878 (splay_tree_key) decl);
12879 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12880 (splay_tree_key) decl));
12881 omp_add_variable (ctx, decl, n->value);
12882 tree c2 = copy_node (c);
12883 OMP_CLAUSE_CHAIN (c2) = *list_p;
12884 *list_p = c2;
12885 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12886 continue;
12887 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12888 OMP_CLAUSE_FIRSTPRIVATE);
12889 OMP_CLAUSE_DECL (c2) = decl;
12890 OMP_CLAUSE_CHAIN (c2) = *list_p;
12891 *list_p = c2;
12895 tree attach_list = NULL_TREE;
12896 tree *attach_tail = &attach_list;
12898 while ((c = *list_p) != NULL)
12900 splay_tree_node n;
12901 bool remove = false;
12902 bool move_attach = false;
12904 switch (OMP_CLAUSE_CODE (c))
12906 case OMP_CLAUSE_FIRSTPRIVATE:
12907 if ((ctx->region_type & ORT_TARGET)
12908 && (ctx->region_type & ORT_ACC) == 0
12909 && TYPE_ATOMIC (strip_array_types
12910 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12912 error_at (OMP_CLAUSE_LOCATION (c),
12913 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12914 "%<target%> construct", OMP_CLAUSE_DECL (c));
12915 remove = true;
12916 break;
12918 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12920 decl = OMP_CLAUSE_DECL (c);
12921 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12922 if ((n->value & GOVD_MAP) != 0)
12924 remove = true;
12925 break;
12927 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12928 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12930 /* FALLTHRU */
12931 case OMP_CLAUSE_PRIVATE:
12932 case OMP_CLAUSE_SHARED:
12933 case OMP_CLAUSE_LINEAR:
12934 decl = OMP_CLAUSE_DECL (c);
12935 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12936 remove = !(n->value & GOVD_SEEN);
12937 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12938 && code == OMP_PARALLEL
12939 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12940 remove = true;
12941 if (! remove)
12943 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12944 if ((n->value & GOVD_DEBUG_PRIVATE)
12945 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12947 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12948 || ((n->value & GOVD_DATA_SHARE_CLASS)
12949 == GOVD_SHARED));
12950 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12951 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12953 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12954 && ctx->has_depend
12955 && DECL_P (decl))
12956 n->value |= GOVD_WRITTEN;
12957 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12958 && (n->value & GOVD_WRITTEN) == 0
12959 && DECL_P (decl)
12960 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12961 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12962 else if (DECL_P (decl)
12963 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12964 && (n->value & GOVD_WRITTEN) != 0)
12965 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12966 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12967 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12968 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12970 else
12971 n->value &= ~GOVD_EXPLICIT;
12972 break;
12974 case OMP_CLAUSE_LASTPRIVATE:
12975 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12976 accurately reflect the presence of a FIRSTPRIVATE clause. */
12977 decl = OMP_CLAUSE_DECL (c);
12978 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12979 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12980 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12981 if (code == OMP_DISTRIBUTE
12982 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12984 remove = true;
12985 error_at (OMP_CLAUSE_LOCATION (c),
12986 "same variable used in %<firstprivate%> and "
12987 "%<lastprivate%> clauses on %<distribute%> "
12988 "construct");
12990 if (!remove
12991 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12992 && DECL_P (decl)
12993 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12994 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12995 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12996 remove = true;
12997 break;
12999 case OMP_CLAUSE_ALIGNED:
13000 decl = OMP_CLAUSE_DECL (c);
13001 if (!is_global_var (decl))
13003 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13004 remove = n == NULL || !(n->value & GOVD_SEEN);
13005 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
13007 struct gimplify_omp_ctx *octx;
13008 if (n != NULL
13009 && (n->value & (GOVD_DATA_SHARE_CLASS
13010 & ~GOVD_FIRSTPRIVATE)))
13011 remove = true;
13012 else
13013 for (octx = ctx->outer_context; octx;
13014 octx = octx->outer_context)
13016 n = splay_tree_lookup (octx->variables,
13017 (splay_tree_key) decl);
13018 if (n == NULL)
13019 continue;
13020 if (n->value & GOVD_LOCAL)
13021 break;
13022 /* We have to avoid assigning a shared variable
13023 to itself when trying to add
13024 __builtin_assume_aligned. */
13025 if (n->value & GOVD_SHARED)
13027 remove = true;
13028 break;
13033 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
13035 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13036 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
13037 remove = true;
13039 break;
13041 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13042 decl = OMP_CLAUSE_DECL (c);
13043 while (INDIRECT_REF_P (decl)
13044 || TREE_CODE (decl) == ARRAY_REF)
13045 decl = TREE_OPERAND (decl, 0);
13046 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13047 remove = n == NULL || !(n->value & GOVD_SEEN);
13048 break;
13050 case OMP_CLAUSE_IS_DEVICE_PTR:
13051 case OMP_CLAUSE_NONTEMPORAL:
13052 decl = OMP_CLAUSE_DECL (c);
13053 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13054 remove = n == NULL || !(n->value & GOVD_SEEN);
13055 break;
13057 case OMP_CLAUSE_MAP:
13058 switch (OMP_CLAUSE_MAP_KIND (c))
13060 case GOMP_MAP_PRESENT_ALLOC:
13061 case GOMP_MAP_PRESENT_TO:
13062 case GOMP_MAP_PRESENT_FROM:
13063 case GOMP_MAP_PRESENT_TOFROM:
13064 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
13065 break;
13066 default:
13067 break;
13069 if (code == OMP_TARGET_EXIT_DATA
13070 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
13072 remove = true;
13073 break;
13075 /* If we have a target region, we can push all the attaches to the
13076 end of the list (we may have standalone "attach" operations
13077 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
13078 the attachment point AND the pointed-to block have been mapped).
13079 If we have something else, e.g. "enter data", we need to keep
13080 "attach" nodes together with the previous node they attach to so
13081 that separate "exit data" operations work properly (see
13082 libgomp/target.c). */
13083 if ((ctx->region_type & ORT_TARGET) != 0
13084 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13085 || (OMP_CLAUSE_MAP_KIND (c)
13086 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
13087 move_attach = true;
13088 decl = OMP_CLAUSE_DECL (c);
13089 /* Data clauses associated with reductions must be
13090 compatible with present_or_copy. Warn and adjust the clause
13091 if that is not the case. */
13092 if (ctx->region_type == ORT_ACC_PARALLEL
13093 || ctx->region_type == ORT_ACC_SERIAL)
13095 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
13096 n = NULL;
13098 if (DECL_P (t))
13099 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
13101 if (n && (n->value & GOVD_REDUCTION))
13103 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
13105 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
13106 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
13107 && kind != GOMP_MAP_FORCE_PRESENT
13108 && kind != GOMP_MAP_POINTER)
13110 warning_at (OMP_CLAUSE_LOCATION (c), 0,
13111 "incompatible data clause with reduction "
13112 "on %qE; promoting to %<present_or_copy%>",
13113 DECL_NAME (t));
13114 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
13118 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
13119 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
13121 remove = true;
13122 break;
13124 if (!DECL_P (decl))
13126 if ((ctx->region_type & ORT_TARGET) != 0
13127 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
13129 if (INDIRECT_REF_P (decl)
13130 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
13131 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
13132 == REFERENCE_TYPE))
13133 decl = TREE_OPERAND (decl, 0);
13134 if (TREE_CODE (decl) == COMPONENT_REF)
13136 while (TREE_CODE (decl) == COMPONENT_REF)
13137 decl = TREE_OPERAND (decl, 0);
13138 if (DECL_P (decl))
13140 n = splay_tree_lookup (ctx->variables,
13141 (splay_tree_key) decl);
13142 if (!(n->value & GOVD_SEEN))
13143 remove = true;
13147 break;
13149 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13150 if ((ctx->region_type & ORT_TARGET) != 0
13151 && !(n->value & GOVD_SEEN)
13152 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
13153 && (!is_global_var (decl)
13154 || !lookup_attribute ("omp declare target link",
13155 DECL_ATTRIBUTES (decl))))
13157 remove = true;
13158 /* For struct element mapping, if struct is never referenced
13159 in target block and none of the mapping has always modifier,
13160 remove all the struct element mappings, which immediately
13161 follow the GOMP_MAP_STRUCT map clause. */
13162 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
13164 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
13165 while (cnt--)
13166 OMP_CLAUSE_CHAIN (c)
13167 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
13170 else if (DECL_SIZE (decl)
13171 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
13172 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
13173 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
13174 && (OMP_CLAUSE_MAP_KIND (c)
13175 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
13177 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
13178 for these, TREE_CODE (DECL_SIZE (decl)) will always be
13179 INTEGER_CST. */
13180 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
13182 tree decl2 = DECL_VALUE_EXPR (decl);
13183 gcc_assert (INDIRECT_REF_P (decl2));
13184 decl2 = TREE_OPERAND (decl2, 0);
13185 gcc_assert (DECL_P (decl2));
13186 tree mem = build_simple_mem_ref (decl2);
13187 OMP_CLAUSE_DECL (c) = mem;
13188 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13189 if (ctx->outer_context)
13191 omp_notice_variable (ctx->outer_context, decl2, true);
13192 omp_notice_variable (ctx->outer_context,
13193 OMP_CLAUSE_SIZE (c), true);
13195 if (((ctx->region_type & ORT_TARGET) != 0
13196 || !ctx->target_firstprivatize_array_bases)
13197 && ((n->value & GOVD_SEEN) == 0
13198 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
13200 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13201 OMP_CLAUSE_MAP);
13202 OMP_CLAUSE_DECL (nc) = decl;
13203 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13204 if (ctx->target_firstprivatize_array_bases)
13205 OMP_CLAUSE_SET_MAP_KIND (nc,
13206 GOMP_MAP_FIRSTPRIVATE_POINTER);
13207 else
13208 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13209 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
13210 OMP_CLAUSE_CHAIN (c) = nc;
13211 c = nc;
13214 else
13216 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13217 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
13218 gcc_assert ((n->value & GOVD_SEEN) == 0
13219 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13220 == 0));
13222 break;
13224 case OMP_CLAUSE_TO:
13225 case OMP_CLAUSE_FROM:
13226 case OMP_CLAUSE__CACHE_:
13227 decl = OMP_CLAUSE_DECL (c);
13228 if (!DECL_P (decl))
13229 break;
13230 if (DECL_SIZE (decl)
13231 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
13233 tree decl2 = DECL_VALUE_EXPR (decl);
13234 gcc_assert (INDIRECT_REF_P (decl2));
13235 decl2 = TREE_OPERAND (decl2, 0);
13236 gcc_assert (DECL_P (decl2));
13237 tree mem = build_simple_mem_ref (decl2);
13238 OMP_CLAUSE_DECL (c) = mem;
13239 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13240 if (ctx->outer_context)
13242 omp_notice_variable (ctx->outer_context, decl2, true);
13243 omp_notice_variable (ctx->outer_context,
13244 OMP_CLAUSE_SIZE (c), true);
13247 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13248 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
13249 break;
13251 case OMP_CLAUSE_REDUCTION:
13252 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
13254 decl = OMP_CLAUSE_DECL (c);
13255 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13256 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
13258 remove = true;
13259 error_at (OMP_CLAUSE_LOCATION (c),
13260 "%qD specified in %<inscan%> %<reduction%> clause "
13261 "but not in %<scan%> directive clause", decl);
13262 break;
13264 has_inscan_reductions = true;
13266 /* FALLTHRU */
13267 case OMP_CLAUSE_IN_REDUCTION:
13268 case OMP_CLAUSE_TASK_REDUCTION:
13269 decl = OMP_CLAUSE_DECL (c);
13270 /* OpenACC reductions need a present_or_copy data clause.
13271 Add one if necessary. Emit error when the reduction is private. */
13272 if (ctx->region_type == ORT_ACC_PARALLEL
13273 || ctx->region_type == ORT_ACC_SERIAL)
13275 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13276 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13278 remove = true;
13279 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
13280 "reduction on %qE", DECL_NAME (decl));
13282 else if ((n->value & GOVD_MAP) == 0)
13284 tree next = OMP_CLAUSE_CHAIN (c);
13285 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
13286 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
13287 OMP_CLAUSE_DECL (nc) = decl;
13288 OMP_CLAUSE_CHAIN (c) = nc;
13289 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13290 (ctx->region_type
13291 & ORT_ACC) != 0);
13292 while (1)
13294 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
13295 if (OMP_CLAUSE_CHAIN (nc) == NULL)
13296 break;
13297 nc = OMP_CLAUSE_CHAIN (nc);
13299 OMP_CLAUSE_CHAIN (nc) = next;
13300 n->value |= GOVD_MAP;
13303 if (DECL_P (decl)
13304 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13305 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13306 break;
13308 case OMP_CLAUSE_ALLOCATE:
13309 decl = OMP_CLAUSE_DECL (c);
13310 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13311 if (n != NULL && !(n->value & GOVD_SEEN))
13313 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
13314 != 0
13315 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
13316 remove = true;
13318 if (!remove
13319 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13320 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
13321 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
13322 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
13323 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
13325 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13326 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
13327 if (n == NULL)
13329 enum omp_clause_default_kind default_kind
13330 = ctx->default_kind;
13331 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
13332 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13333 true);
13334 ctx->default_kind = default_kind;
13336 else
13337 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13338 true);
13340 break;
13342 case OMP_CLAUSE_COPYIN:
13343 case OMP_CLAUSE_COPYPRIVATE:
13344 case OMP_CLAUSE_IF:
13345 case OMP_CLAUSE_NUM_THREADS:
13346 case OMP_CLAUSE_NUM_TEAMS:
13347 case OMP_CLAUSE_THREAD_LIMIT:
13348 case OMP_CLAUSE_DIST_SCHEDULE:
13349 case OMP_CLAUSE_DEVICE:
13350 case OMP_CLAUSE_SCHEDULE:
13351 case OMP_CLAUSE_NOWAIT:
13352 case OMP_CLAUSE_ORDERED:
13353 case OMP_CLAUSE_DEFAULT:
13354 case OMP_CLAUSE_UNTIED:
13355 case OMP_CLAUSE_COLLAPSE:
13356 case OMP_CLAUSE_FINAL:
13357 case OMP_CLAUSE_MERGEABLE:
13358 case OMP_CLAUSE_PROC_BIND:
13359 case OMP_CLAUSE_SAFELEN:
13360 case OMP_CLAUSE_SIMDLEN:
13361 case OMP_CLAUSE_DEPEND:
13362 case OMP_CLAUSE_DOACROSS:
13363 case OMP_CLAUSE_PRIORITY:
13364 case OMP_CLAUSE_GRAINSIZE:
13365 case OMP_CLAUSE_NUM_TASKS:
13366 case OMP_CLAUSE_NOGROUP:
13367 case OMP_CLAUSE_THREADS:
13368 case OMP_CLAUSE_SIMD:
13369 case OMP_CLAUSE_FILTER:
13370 case OMP_CLAUSE_HINT:
13371 case OMP_CLAUSE_DEFAULTMAP:
13372 case OMP_CLAUSE_ORDER:
13373 case OMP_CLAUSE_BIND:
13374 case OMP_CLAUSE_DETACH:
13375 case OMP_CLAUSE_USE_DEVICE_PTR:
13376 case OMP_CLAUSE_USE_DEVICE_ADDR:
13377 case OMP_CLAUSE_ASYNC:
13378 case OMP_CLAUSE_WAIT:
13379 case OMP_CLAUSE_INDEPENDENT:
13380 case OMP_CLAUSE_NUM_GANGS:
13381 case OMP_CLAUSE_NUM_WORKERS:
13382 case OMP_CLAUSE_VECTOR_LENGTH:
13383 case OMP_CLAUSE_GANG:
13384 case OMP_CLAUSE_WORKER:
13385 case OMP_CLAUSE_VECTOR:
13386 case OMP_CLAUSE_AUTO:
13387 case OMP_CLAUSE_SEQ:
13388 case OMP_CLAUSE_TILE:
13389 case OMP_CLAUSE_IF_PRESENT:
13390 case OMP_CLAUSE_FINALIZE:
13391 case OMP_CLAUSE_INCLUSIVE:
13392 case OMP_CLAUSE_EXCLUSIVE:
13393 break;
13395 case OMP_CLAUSE_NOHOST:
13396 default:
13397 gcc_unreachable ();
13400 if (remove)
13401 *list_p = OMP_CLAUSE_CHAIN (c);
13402 else if (move_attach)
13404 /* Remove attach node from here, separate out into its own list. */
13405 *attach_tail = c;
13406 *list_p = OMP_CLAUSE_CHAIN (c);
13407 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13408 attach_tail = &OMP_CLAUSE_CHAIN (c);
13410 else
13411 list_p = &OMP_CLAUSE_CHAIN (c);
13414 /* Splice attach nodes at the end of the list. */
13415 if (attach_list)
13417 *list_p = attach_list;
13418 list_p = attach_tail;
13421 /* Add in any implicit data sharing. */
13422 struct gimplify_adjust_omp_clauses_data data;
13423 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13425 /* OpenMP. Implicit clauses are added at the start of the clause list,
13426 but after any non-map clauses. */
13427 tree *implicit_add_list_p = orig_list_p;
13428 while (*implicit_add_list_p
13429 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13430 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13431 data.list_p = implicit_add_list_p;
13433 else
13434 /* OpenACC. */
13435 data.list_p = list_p;
13436 data.pre_p = pre_p;
13437 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13439 if (has_inscan_reductions)
13440 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13441 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13442 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13444 error_at (OMP_CLAUSE_LOCATION (c),
13445 "%<inscan%> %<reduction%> clause used together with "
13446 "%<linear%> clause for a variable other than loop "
13447 "iterator");
13448 break;
13451 gimplify_omp_ctxp = ctx->outer_context;
13452 delete_omp_context (ctx);
13455 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13456 -1 if unknown yet (simd is involved, won't be known until vectorization)
13457 and 1 if they do. If SCORES is non-NULL, it should point to an array
13458 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13459 of the CONSTRUCTS (position -1 if it will never match) followed by
13460 number of constructs in the OpenMP context construct trait. If the
13461 score depends on whether it will be in a declare simd clone or not,
13462 the function returns 2 and there will be two sets of the scores, the first
13463 one for the case that it is not in a declare simd clone, the other
13464 that it is in a declare simd clone. */
13467 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13468 int *scores)
13470 int matched = 0, cnt = 0;
13471 bool simd_seen = false;
13472 bool target_seen = false;
13473 int declare_simd_cnt = -1;
13474 auto_vec<enum tree_code, 16> codes;
13475 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13477 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13478 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13479 == ORT_TARGET && ctx->code == OMP_TARGET)
13480 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13481 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13482 || (ctx->region_type == ORT_SIMD
13483 && ctx->code == OMP_SIMD
13484 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13486 ++cnt;
13487 if (scores)
13488 codes.safe_push (ctx->code);
13489 else if (matched < nconstructs && ctx->code == constructs[matched])
13491 if (ctx->code == OMP_SIMD)
13493 if (matched)
13494 return 0;
13495 simd_seen = true;
13497 ++matched;
13499 if (ctx->code == OMP_TARGET)
13501 if (scores == NULL)
13502 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13503 target_seen = true;
13504 break;
13507 else if (ctx->region_type == ORT_WORKSHARE
13508 && ctx->code == OMP_LOOP
13509 && ctx->outer_context
13510 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13511 && ctx->outer_context->outer_context
13512 && ctx->outer_context->outer_context->code == OMP_LOOP
13513 && ctx->outer_context->outer_context->distribute)
13514 ctx = ctx->outer_context->outer_context;
13515 ctx = ctx->outer_context;
13517 if (!target_seen
13518 && lookup_attribute ("omp declare simd",
13519 DECL_ATTRIBUTES (current_function_decl)))
13521 /* Declare simd is a maybe case, it is supposed to be added only to the
13522 omp-simd-clone.cc added clones and not to the base function. */
13523 declare_simd_cnt = cnt++;
13524 if (scores)
13525 codes.safe_push (OMP_SIMD);
13526 else if (cnt == 0
13527 && constructs[0] == OMP_SIMD)
13529 gcc_assert (matched == 0);
13530 simd_seen = true;
13531 if (++matched == nconstructs)
13532 return -1;
13535 if (tree attr = lookup_attribute ("omp declare variant variant",
13536 DECL_ATTRIBUTES (current_function_decl)))
13538 enum tree_code variant_constructs[5];
13539 int variant_nconstructs = 0;
13540 if (!target_seen)
13541 variant_nconstructs
13542 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13543 variant_constructs);
13544 for (int i = 0; i < variant_nconstructs; i++)
13546 ++cnt;
13547 if (scores)
13548 codes.safe_push (variant_constructs[i]);
13549 else if (matched < nconstructs
13550 && variant_constructs[i] == constructs[matched])
13552 if (variant_constructs[i] == OMP_SIMD)
13554 if (matched)
13555 return 0;
13556 simd_seen = true;
13558 ++matched;
13562 if (!target_seen
13563 && lookup_attribute ("omp declare target block",
13564 DECL_ATTRIBUTES (current_function_decl)))
13566 if (scores)
13567 codes.safe_push (OMP_TARGET);
13568 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13569 ++matched;
13571 if (scores)
13573 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13575 int j = codes.length () - 1;
13576 for (int i = nconstructs - 1; i >= 0; i--)
13578 while (j >= 0
13579 && (pass != 0 || declare_simd_cnt != j)
13580 && constructs[i] != codes[j])
13581 --j;
13582 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13583 *scores++ = j - 1;
13584 else
13585 *scores++ = j;
13587 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13588 ? codes.length () - 1 : codes.length ());
13590 return declare_simd_cnt == -1 ? 1 : 2;
13592 if (matched == nconstructs)
13593 return simd_seen ? -1 : 1;
13594 return 0;
13597 /* Gimplify OACC_CACHE. */
13599 static void
13600 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13602 tree expr = *expr_p;
13604 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13605 OACC_CACHE);
13606 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13607 OACC_CACHE);
13609 /* TODO: Do something sensible with this information. */
13611 *expr_p = NULL_TREE;
13614 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13615 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13616 kind. The entry kind will replace the one in CLAUSE, while the exit
13617 kind will be used in a new omp_clause and returned to the caller. */
13619 static tree
13620 gimplify_oacc_declare_1 (tree clause)
13622 HOST_WIDE_INT kind, new_op;
13623 bool ret = false;
13624 tree c = NULL;
13626 kind = OMP_CLAUSE_MAP_KIND (clause);
13628 switch (kind)
13630 case GOMP_MAP_ALLOC:
13631 new_op = GOMP_MAP_RELEASE;
13632 ret = true;
13633 break;
13635 case GOMP_MAP_FROM:
13636 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13637 new_op = GOMP_MAP_FROM;
13638 ret = true;
13639 break;
13641 case GOMP_MAP_TOFROM:
13642 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13643 new_op = GOMP_MAP_FROM;
13644 ret = true;
13645 break;
13647 case GOMP_MAP_DEVICE_RESIDENT:
13648 case GOMP_MAP_FORCE_DEVICEPTR:
13649 case GOMP_MAP_FORCE_PRESENT:
13650 case GOMP_MAP_LINK:
13651 case GOMP_MAP_POINTER:
13652 case GOMP_MAP_TO:
13653 break;
13655 default:
13656 gcc_unreachable ();
13657 break;
13660 if (ret)
13662 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13663 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13664 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13667 return c;
13670 /* Gimplify OACC_DECLARE. */
13672 static void
13673 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13675 tree expr = *expr_p;
13676 gomp_target *stmt;
13677 tree clauses, t, decl;
13679 clauses = OACC_DECLARE_CLAUSES (expr);
13681 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13682 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13684 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13686 decl = OMP_CLAUSE_DECL (t);
13688 if (TREE_CODE (decl) == MEM_REF)
13689 decl = TREE_OPERAND (decl, 0);
13691 if (VAR_P (decl) && !is_oacc_declared (decl))
13693 tree attr = get_identifier ("oacc declare target");
13694 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13695 DECL_ATTRIBUTES (decl));
13698 if (VAR_P (decl)
13699 && !is_global_var (decl)
13700 && DECL_CONTEXT (decl) == current_function_decl)
13702 tree c = gimplify_oacc_declare_1 (t);
13703 if (c)
13705 if (oacc_declare_returns == NULL)
13706 oacc_declare_returns = new hash_map<tree, tree>;
13708 oacc_declare_returns->put (decl, c);
13712 if (gimplify_omp_ctxp)
13713 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13716 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13717 clauses);
13719 gimplify_seq_add_stmt (pre_p, stmt);
13721 *expr_p = NULL_TREE;
13724 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13725 gimplification of the body, as well as scanning the body for used
13726 variables. We need to do this scan now, because variable-sized
13727 decls will be decomposed during gimplification. */
13729 static void
13730 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13732 tree expr = *expr_p;
13733 gimple *g;
13734 gimple_seq body = NULL;
13736 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13737 OMP_PARALLEL_COMBINED (expr)
13738 ? ORT_COMBINED_PARALLEL
13739 : ORT_PARALLEL, OMP_PARALLEL);
13741 push_gimplify_context ();
13743 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13744 if (gimple_code (g) == GIMPLE_BIND)
13745 pop_gimplify_context (g);
13746 else
13747 pop_gimplify_context (NULL);
13749 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13750 OMP_PARALLEL);
13752 g = gimple_build_omp_parallel (body,
13753 OMP_PARALLEL_CLAUSES (expr),
13754 NULL_TREE, NULL_TREE);
13755 if (OMP_PARALLEL_COMBINED (expr))
13756 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13757 gimplify_seq_add_stmt (pre_p, g);
13758 *expr_p = NULL_TREE;
13761 /* Gimplify the contents of an OMP_TASK statement. This involves
13762 gimplification of the body, as well as scanning the body for used
13763 variables. We need to do this scan now, because variable-sized
13764 decls will be decomposed during gimplification. */
13766 static void
13767 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13769 tree expr = *expr_p;
13770 gimple *g;
13771 gimple_seq body = NULL;
13772 bool nowait = false;
13773 bool has_depend = false;
13775 if (OMP_TASK_BODY (expr) == NULL_TREE)
13777 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13778 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13780 has_depend = true;
13781 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13783 error_at (OMP_CLAUSE_LOCATION (c),
13784 "%<mutexinoutset%> kind in %<depend%> clause on a "
13785 "%<taskwait%> construct");
13786 break;
13789 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13790 nowait = true;
13791 if (nowait && !has_depend)
13793 error_at (EXPR_LOCATION (expr),
13794 "%<taskwait%> construct with %<nowait%> clause but no "
13795 "%<depend%> clauses");
13796 *expr_p = NULL_TREE;
13797 return;
13801 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13802 omp_find_clause (OMP_TASK_CLAUSES (expr),
13803 OMP_CLAUSE_UNTIED)
13804 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13806 if (OMP_TASK_BODY (expr))
13808 push_gimplify_context ();
13810 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13811 if (gimple_code (g) == GIMPLE_BIND)
13812 pop_gimplify_context (g);
13813 else
13814 pop_gimplify_context (NULL);
13817 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13818 OMP_TASK);
13820 g = gimple_build_omp_task (body,
13821 OMP_TASK_CLAUSES (expr),
13822 NULL_TREE, NULL_TREE,
13823 NULL_TREE, NULL_TREE, NULL_TREE);
13824 if (OMP_TASK_BODY (expr) == NULL_TREE)
13825 gimple_omp_task_set_taskwait_p (g, true);
13826 gimplify_seq_add_stmt (pre_p, g);
13827 *expr_p = NULL_TREE;
13830 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13831 force it into a temporary initialized in PRE_P and add firstprivate clause
13832 to ORIG_FOR_STMT. */
13834 static void
13835 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13836 tree orig_for_stmt)
13838 if (*tp == NULL || is_gimple_constant (*tp))
13839 return;
13841 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13842 /* Reference to pointer conversion is considered useless,
13843 but is significant for firstprivate clause. Force it
13844 here. */
13845 if (type
13846 && TREE_CODE (type) == POINTER_TYPE
13847 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13849 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13850 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13851 gimplify_and_add (m, pre_p);
13852 *tp = v;
13855 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13856 OMP_CLAUSE_DECL (c) = *tp;
13857 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13858 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13861 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13862 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13864 static tree
13865 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13867 switch (TREE_CODE (*tp))
13869 case OMP_ORDERED:
13870 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13871 return *tp;
13872 break;
13873 case OMP_SIMD:
13874 case OMP_PARALLEL:
13875 case OMP_TARGET:
13876 *walk_subtrees = 0;
13877 break;
13878 default:
13879 break;
13881 return NULL_TREE;
13884 /* Gimplify the gross structure of an OMP_FOR statement. */
13886 static enum gimplify_status
13887 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13889 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13890 enum gimplify_status ret = GS_ALL_DONE;
13891 enum gimplify_status tret;
13892 gomp_for *gfor;
13893 gimple_seq for_body, for_pre_body;
13894 int i;
13895 bitmap has_decl_expr = NULL;
13896 enum omp_region_type ort = ORT_WORKSHARE;
13897 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13899 orig_for_stmt = for_stmt = *expr_p;
13901 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13902 != NULL_TREE);
13903 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13905 tree *data[4] = { NULL, NULL, NULL, NULL };
13906 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13907 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13908 find_combined_omp_for, data, NULL);
13909 if (inner_for_stmt == NULL_TREE)
13911 gcc_assert (seen_error ());
13912 *expr_p = NULL_TREE;
13913 return GS_ERROR;
13915 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13917 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13918 &OMP_FOR_PRE_BODY (for_stmt));
13919 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13921 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13923 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13924 &OMP_FOR_PRE_BODY (for_stmt));
13925 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13928 if (data[0])
13930 /* We have some statements or variable declarations in between
13931 the composite construct directives. Move them around the
13932 inner_for_stmt. */
13933 data[0] = expr_p;
13934 for (i = 0; i < 3; i++)
13935 if (data[i])
13937 tree t = *data[i];
13938 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13939 data[i + 1] = data[i];
13940 *data[i] = OMP_BODY (t);
13941 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13942 NULL_TREE, make_node (BLOCK));
13943 OMP_BODY (t) = body;
13944 append_to_statement_list_force (inner_for_stmt,
13945 &BIND_EXPR_BODY (body));
13946 *data[3] = t;
13947 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13948 gcc_assert (*data[3] == inner_for_stmt);
13950 return GS_OK;
13953 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13954 if (!loop_p
13955 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13956 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13957 i)) == TREE_LIST
13958 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13959 i)))
13961 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13962 /* Class iterators aren't allowed on OMP_SIMD, so the only
13963 case we need to solve is distribute parallel for. They are
13964 allowed on the loop construct, but that is already handled
13965 in gimplify_omp_loop. */
13966 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13967 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13968 && data[1]);
13969 tree orig_decl = TREE_PURPOSE (orig);
13970 tree last = TREE_VALUE (orig);
13971 tree *pc;
13972 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13973 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13974 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13975 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13976 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13977 break;
13978 if (*pc == NULL_TREE)
13980 tree *spc;
13981 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13982 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13983 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13984 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13985 break;
13986 if (*spc)
13988 tree c = *spc;
13989 *spc = OMP_CLAUSE_CHAIN (c);
13990 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13991 *pc = c;
13994 if (*pc == NULL_TREE)
13996 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13998 /* private clause will appear only on inner_for_stmt.
13999 Change it into firstprivate, and add private clause
14000 on for_stmt. */
14001 tree c = copy_node (*pc);
14002 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14003 OMP_FOR_CLAUSES (for_stmt) = c;
14004 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
14005 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
14007 else
14009 /* lastprivate clause will appear on both inner_for_stmt
14010 and for_stmt. Add firstprivate clause to
14011 inner_for_stmt. */
14012 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
14013 OMP_CLAUSE_FIRSTPRIVATE);
14014 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
14015 OMP_CLAUSE_CHAIN (c) = *pc;
14016 *pc = c;
14017 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
14019 tree c = build_omp_clause (UNKNOWN_LOCATION,
14020 OMP_CLAUSE_FIRSTPRIVATE);
14021 OMP_CLAUSE_DECL (c) = last;
14022 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14023 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14024 c = build_omp_clause (UNKNOWN_LOCATION,
14025 *pc ? OMP_CLAUSE_SHARED
14026 : OMP_CLAUSE_FIRSTPRIVATE);
14027 OMP_CLAUSE_DECL (c) = orig_decl;
14028 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14029 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14031 /* Similarly, take care of C++ range for temporaries, those should
14032 be firstprivate on OMP_PARALLEL if any. */
14033 if (data[1])
14034 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
14035 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
14036 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
14037 i)) == TREE_LIST
14038 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
14039 i)))
14041 tree orig
14042 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
14043 tree v = TREE_CHAIN (orig);
14044 tree c = build_omp_clause (UNKNOWN_LOCATION,
14045 OMP_CLAUSE_FIRSTPRIVATE);
14046 /* First add firstprivate clause for the __for_end artificial
14047 decl. */
14048 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
14049 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14050 == REFERENCE_TYPE)
14051 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
14052 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14053 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14054 if (TREE_VEC_ELT (v, 0))
14056 /* And now the same for __for_range artificial decl if it
14057 exists. */
14058 c = build_omp_clause (UNKNOWN_LOCATION,
14059 OMP_CLAUSE_FIRSTPRIVATE);
14060 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
14061 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14062 == REFERENCE_TYPE)
14063 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
14064 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14065 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14070 switch (TREE_CODE (for_stmt))
14072 case OMP_FOR:
14073 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
14075 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14076 OMP_CLAUSE_SCHEDULE))
14077 error_at (EXPR_LOCATION (for_stmt),
14078 "%qs clause may not appear on non-rectangular %qs",
14079 "schedule", lang_GNU_Fortran () ? "do" : "for");
14080 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
14081 error_at (EXPR_LOCATION (for_stmt),
14082 "%qs clause may not appear on non-rectangular %qs",
14083 "ordered", lang_GNU_Fortran () ? "do" : "for");
14085 break;
14086 case OMP_DISTRIBUTE:
14087 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
14088 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14089 OMP_CLAUSE_DIST_SCHEDULE))
14090 error_at (EXPR_LOCATION (for_stmt),
14091 "%qs clause may not appear on non-rectangular %qs",
14092 "dist_schedule", "distribute");
14093 break;
14094 case OACC_LOOP:
14095 ort = ORT_ACC;
14096 break;
14097 case OMP_TASKLOOP:
14098 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
14100 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14101 OMP_CLAUSE_GRAINSIZE))
14102 error_at (EXPR_LOCATION (for_stmt),
14103 "%qs clause may not appear on non-rectangular %qs",
14104 "grainsize", "taskloop");
14105 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14106 OMP_CLAUSE_NUM_TASKS))
14107 error_at (EXPR_LOCATION (for_stmt),
14108 "%qs clause may not appear on non-rectangular %qs",
14109 "num_tasks", "taskloop");
14111 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
14112 ort = ORT_UNTIED_TASKLOOP;
14113 else
14114 ort = ORT_TASKLOOP;
14115 break;
14116 case OMP_SIMD:
14117 ort = ORT_SIMD;
14118 break;
14119 default:
14120 gcc_unreachable ();
14123 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
14124 clause for the IV. */
14125 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14127 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
14128 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14129 decl = TREE_OPERAND (t, 0);
14130 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
14131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14132 && OMP_CLAUSE_DECL (c) == decl)
14134 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14135 break;
14139 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
14140 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
14141 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
14142 ? OMP_LOOP : TREE_CODE (for_stmt));
14144 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
14145 gimplify_omp_ctxp->distribute = true;
14147 /* Handle OMP_FOR_INIT. */
14148 for_pre_body = NULL;
14149 if ((ort == ORT_SIMD
14150 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
14151 && OMP_FOR_PRE_BODY (for_stmt))
14153 has_decl_expr = BITMAP_ALLOC (NULL);
14154 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
14155 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
14157 t = OMP_FOR_PRE_BODY (for_stmt);
14158 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
14160 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
14162 tree_stmt_iterator si;
14163 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
14164 tsi_next (&si))
14166 t = tsi_stmt (si);
14167 if (TREE_CODE (t) == DECL_EXPR
14168 && VAR_P (DECL_EXPR_DECL (t)))
14169 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
14173 if (OMP_FOR_PRE_BODY (for_stmt))
14175 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
14176 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
14177 else
14179 struct gimplify_omp_ctx ctx;
14180 memset (&ctx, 0, sizeof (ctx));
14181 ctx.region_type = ORT_NONE;
14182 gimplify_omp_ctxp = &ctx;
14183 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
14184 gimplify_omp_ctxp = NULL;
14187 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
14189 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
14190 for_stmt = inner_for_stmt;
14192 /* For taskloop, need to gimplify the start, end and step before the
14193 taskloop, outside of the taskloop omp context. */
14194 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14196 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14198 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14199 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
14200 ? pre_p : &for_pre_body);
14201 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
14202 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14204 tree v = TREE_OPERAND (t, 1);
14205 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
14206 for_pre_p, orig_for_stmt);
14207 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
14208 for_pre_p, orig_for_stmt);
14210 else
14211 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
14212 orig_for_stmt);
14214 /* Handle OMP_FOR_COND. */
14215 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14216 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14218 tree v = TREE_OPERAND (t, 1);
14219 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
14220 for_pre_p, orig_for_stmt);
14221 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
14222 for_pre_p, orig_for_stmt);
14224 else
14225 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
14226 orig_for_stmt);
14228 /* Handle OMP_FOR_INCR. */
14229 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14230 if (TREE_CODE (t) == MODIFY_EXPR)
14232 decl = TREE_OPERAND (t, 0);
14233 t = TREE_OPERAND (t, 1);
14234 tree *tp = &TREE_OPERAND (t, 1);
14235 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
14236 tp = &TREE_OPERAND (t, 0);
14238 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
14239 orig_for_stmt);
14243 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
14244 OMP_TASKLOOP);
14247 if (orig_for_stmt != for_stmt)
14248 gimplify_omp_ctxp->combined_loop = true;
14250 for_body = NULL;
14251 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14252 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
14253 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14254 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
14256 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
14257 bool is_doacross = false;
14258 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
14259 find_standalone_omp_ordered, NULL))
14261 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
14262 is_doacross = true;
14263 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
14264 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
14265 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14266 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
14268 error_at (OMP_CLAUSE_LOCATION (*pc),
14269 "%<linear%> clause may not be specified together "
14270 "with %<ordered%> clause if stand-alone %<ordered%> "
14271 "construct is nested in it");
14272 *pc = OMP_CLAUSE_CHAIN (*pc);
14274 else
14275 pc = &OMP_CLAUSE_CHAIN (*pc);
14277 int collapse = 1, tile = 0;
14278 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
14279 if (c)
14280 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
14281 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
14282 if (c)
14283 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
14284 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
14285 hash_set<tree> *allocate_uids = NULL;
14286 if (c)
14288 allocate_uids = new hash_set<tree>;
14289 for (; c; c = OMP_CLAUSE_CHAIN (c))
14290 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
14291 allocate_uids->add (OMP_CLAUSE_DECL (c));
14293 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14295 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14296 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14297 decl = TREE_OPERAND (t, 0);
14298 gcc_assert (DECL_P (decl));
14299 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
14300 || POINTER_TYPE_P (TREE_TYPE (decl)));
14301 if (is_doacross)
14303 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
14305 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14306 if (TREE_CODE (orig_decl) == TREE_LIST)
14308 orig_decl = TREE_PURPOSE (orig_decl);
14309 if (!orig_decl)
14310 orig_decl = decl;
14312 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
14314 else
14315 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14316 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14319 if (for_stmt == orig_for_stmt)
14321 tree orig_decl = decl;
14322 if (OMP_FOR_ORIG_DECLS (for_stmt))
14324 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14325 if (TREE_CODE (orig_decl) == TREE_LIST)
14327 orig_decl = TREE_PURPOSE (orig_decl);
14328 if (!orig_decl)
14329 orig_decl = decl;
14332 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
14333 error_at (EXPR_LOCATION (for_stmt),
14334 "threadprivate iteration variable %qD", orig_decl);
14337 /* Make sure the iteration variable is private. */
14338 tree c = NULL_TREE;
14339 tree c2 = NULL_TREE;
14340 if (orig_for_stmt != for_stmt)
14342 /* Preserve this information until we gimplify the inner simd. */
14343 if (has_decl_expr
14344 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14345 TREE_PRIVATE (t) = 1;
14347 else if (ort == ORT_SIMD)
14349 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14350 (splay_tree_key) decl);
14351 omp_is_private (gimplify_omp_ctxp, decl,
14352 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14353 != 1));
14354 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14356 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14357 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14358 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14359 OMP_CLAUSE_LASTPRIVATE);
14360 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14361 OMP_CLAUSE_LASTPRIVATE))
14362 if (OMP_CLAUSE_DECL (c3) == decl)
14364 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14365 "conditional %<lastprivate%> on loop "
14366 "iterator %qD ignored", decl);
14367 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14368 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14371 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14373 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14374 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14375 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14376 if ((has_decl_expr
14377 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14378 || TREE_PRIVATE (t))
14380 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14381 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14383 struct gimplify_omp_ctx *outer
14384 = gimplify_omp_ctxp->outer_context;
14385 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14387 if (outer->region_type == ORT_WORKSHARE
14388 && outer->combined_loop)
14390 n = splay_tree_lookup (outer->variables,
14391 (splay_tree_key)decl);
14392 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14394 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14395 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14397 else
14399 struct gimplify_omp_ctx *octx = outer->outer_context;
14400 if (octx
14401 && octx->region_type == ORT_COMBINED_PARALLEL
14402 && octx->outer_context
14403 && (octx->outer_context->region_type
14404 == ORT_WORKSHARE)
14405 && octx->outer_context->combined_loop)
14407 octx = octx->outer_context;
14408 n = splay_tree_lookup (octx->variables,
14409 (splay_tree_key)decl);
14410 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14412 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14413 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14420 OMP_CLAUSE_DECL (c) = decl;
14421 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14422 OMP_FOR_CLAUSES (for_stmt) = c;
14423 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14424 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14425 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14426 true);
14428 else
14430 bool lastprivate
14431 = (!has_decl_expr
14432 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14433 if (TREE_PRIVATE (t))
14434 lastprivate = false;
14435 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14437 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14438 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14439 lastprivate = false;
14442 struct gimplify_omp_ctx *outer
14443 = gimplify_omp_ctxp->outer_context;
14444 if (outer && lastprivate)
14445 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14446 true);
14448 c = build_omp_clause (input_location,
14449 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14450 : OMP_CLAUSE_PRIVATE);
14451 OMP_CLAUSE_DECL (c) = decl;
14452 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14453 OMP_FOR_CLAUSES (for_stmt) = c;
14454 omp_add_variable (gimplify_omp_ctxp, decl,
14455 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14456 | GOVD_EXPLICIT | GOVD_SEEN);
14457 c = NULL_TREE;
14460 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14462 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14463 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14464 (splay_tree_key) decl);
14465 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14466 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14467 OMP_CLAUSE_LASTPRIVATE);
14468 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14469 OMP_CLAUSE_LASTPRIVATE))
14470 if (OMP_CLAUSE_DECL (c3) == decl)
14472 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14473 "conditional %<lastprivate%> on loop "
14474 "iterator %qD ignored", decl);
14475 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14476 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14479 else
14480 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14482 /* If DECL is not a gimple register, create a temporary variable to act
14483 as an iteration counter. This is valid, since DECL cannot be
14484 modified in the body of the loop. Similarly for any iteration vars
14485 in simd with collapse > 1 where the iterator vars must be
14486 lastprivate. And similarly for vars mentioned in allocate clauses. */
14487 if (orig_for_stmt != for_stmt)
14488 var = decl;
14489 else if (!is_gimple_reg (decl)
14490 || (ort == ORT_SIMD
14491 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14492 || (allocate_uids && allocate_uids->contains (decl)))
14494 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14495 /* Make sure omp_add_variable is not called on it prematurely.
14496 We call it ourselves a few lines later. */
14497 gimplify_omp_ctxp = NULL;
14498 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14499 gimplify_omp_ctxp = ctx;
14500 TREE_OPERAND (t, 0) = var;
14502 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14504 if (ort == ORT_SIMD
14505 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14507 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14508 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14509 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14510 OMP_CLAUSE_DECL (c2) = var;
14511 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14512 OMP_FOR_CLAUSES (for_stmt) = c2;
14513 omp_add_variable (gimplify_omp_ctxp, var,
14514 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14515 if (c == NULL_TREE)
14517 c = c2;
14518 c2 = NULL_TREE;
14521 else
14522 omp_add_variable (gimplify_omp_ctxp, var,
14523 GOVD_PRIVATE | GOVD_SEEN);
14525 else
14526 var = decl;
14528 gimplify_omp_ctxp->in_for_exprs = true;
14529 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14531 tree lb = TREE_OPERAND (t, 1);
14532 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14533 is_gimple_val, fb_rvalue, false);
14534 ret = MIN (ret, tret);
14535 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14536 is_gimple_val, fb_rvalue, false);
14538 else
14539 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14540 is_gimple_val, fb_rvalue, false);
14541 gimplify_omp_ctxp->in_for_exprs = false;
14542 ret = MIN (ret, tret);
14543 if (ret == GS_ERROR)
14544 return ret;
14546 /* Handle OMP_FOR_COND. */
14547 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14548 gcc_assert (COMPARISON_CLASS_P (t));
14549 gcc_assert (TREE_OPERAND (t, 0) == decl);
14551 gimplify_omp_ctxp->in_for_exprs = true;
14552 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14554 tree ub = TREE_OPERAND (t, 1);
14555 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14556 is_gimple_val, fb_rvalue, false);
14557 ret = MIN (ret, tret);
14558 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14559 is_gimple_val, fb_rvalue, false);
14561 else
14562 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14563 is_gimple_val, fb_rvalue, false);
14564 gimplify_omp_ctxp->in_for_exprs = false;
14565 ret = MIN (ret, tret);
14567 /* Handle OMP_FOR_INCR. */
14568 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14569 switch (TREE_CODE (t))
14571 case PREINCREMENT_EXPR:
14572 case POSTINCREMENT_EXPR:
14574 tree decl = TREE_OPERAND (t, 0);
14575 /* c_omp_for_incr_canonicalize_ptr() should have been
14576 called to massage things appropriately. */
14577 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14579 if (orig_for_stmt != for_stmt)
14580 break;
14581 t = build_int_cst (TREE_TYPE (decl), 1);
14582 if (c)
14583 OMP_CLAUSE_LINEAR_STEP (c) = t;
14584 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14585 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14586 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14587 break;
14590 case PREDECREMENT_EXPR:
14591 case POSTDECREMENT_EXPR:
14592 /* c_omp_for_incr_canonicalize_ptr() should have been
14593 called to massage things appropriately. */
14594 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14595 if (orig_for_stmt != for_stmt)
14596 break;
14597 t = build_int_cst (TREE_TYPE (decl), -1);
14598 if (c)
14599 OMP_CLAUSE_LINEAR_STEP (c) = t;
14600 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14601 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14602 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14603 break;
14605 case MODIFY_EXPR:
14606 gcc_assert (TREE_OPERAND (t, 0) == decl);
14607 TREE_OPERAND (t, 0) = var;
14609 t = TREE_OPERAND (t, 1);
14610 switch (TREE_CODE (t))
14612 case PLUS_EXPR:
14613 if (TREE_OPERAND (t, 1) == decl)
14615 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14616 TREE_OPERAND (t, 0) = var;
14617 break;
14620 /* Fallthru. */
14621 case MINUS_EXPR:
14622 case POINTER_PLUS_EXPR:
14623 gcc_assert (TREE_OPERAND (t, 0) == decl);
14624 TREE_OPERAND (t, 0) = var;
14625 break;
14626 default:
14627 gcc_unreachable ();
14630 gimplify_omp_ctxp->in_for_exprs = true;
14631 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14632 is_gimple_val, fb_rvalue, false);
14633 ret = MIN (ret, tret);
14634 if (c)
14636 tree step = TREE_OPERAND (t, 1);
14637 tree stept = TREE_TYPE (decl);
14638 if (POINTER_TYPE_P (stept))
14639 stept = sizetype;
14640 step = fold_convert (stept, step);
14641 if (TREE_CODE (t) == MINUS_EXPR)
14642 step = fold_build1 (NEGATE_EXPR, stept, step);
14643 OMP_CLAUSE_LINEAR_STEP (c) = step;
14644 if (step != TREE_OPERAND (t, 1))
14646 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14647 &for_pre_body, NULL,
14648 is_gimple_val, fb_rvalue, false);
14649 ret = MIN (ret, tret);
14652 gimplify_omp_ctxp->in_for_exprs = false;
14653 break;
14655 default:
14656 gcc_unreachable ();
14659 if (c2)
14661 gcc_assert (c);
14662 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14665 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14667 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14668 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14669 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14670 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14671 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14672 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14673 && OMP_CLAUSE_DECL (c) == decl)
14675 if (is_doacross && (collapse == 1 || i >= collapse))
14676 t = var;
14677 else
14679 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14680 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14681 gcc_assert (TREE_OPERAND (t, 0) == var);
14682 t = TREE_OPERAND (t, 1);
14683 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14684 || TREE_CODE (t) == MINUS_EXPR
14685 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14686 gcc_assert (TREE_OPERAND (t, 0) == var);
14687 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14688 is_doacross ? var : decl,
14689 TREE_OPERAND (t, 1));
14691 gimple_seq *seq;
14692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14693 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14694 else
14695 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14696 push_gimplify_context ();
14697 gimplify_assign (decl, t, seq);
14698 gimple *bind = NULL;
14699 if (gimplify_ctxp->temps)
14701 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14702 *seq = NULL;
14703 gimplify_seq_add_stmt (seq, bind);
14705 pop_gimplify_context (bind);
14708 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14709 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14711 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14712 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14713 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14714 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14715 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14716 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14717 gcc_assert (COMPARISON_CLASS_P (t));
14718 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14719 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14720 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14724 BITMAP_FREE (has_decl_expr);
14725 delete allocate_uids;
14727 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14728 || (loop_p && orig_for_stmt == for_stmt))
14730 push_gimplify_context ();
14731 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14733 OMP_FOR_BODY (orig_for_stmt)
14734 = build3 (BIND_EXPR, void_type_node, NULL,
14735 OMP_FOR_BODY (orig_for_stmt), NULL);
14736 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14740 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14741 &for_body);
14743 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14744 || (loop_p && orig_for_stmt == for_stmt))
14746 if (gimple_code (g) == GIMPLE_BIND)
14747 pop_gimplify_context (g);
14748 else
14749 pop_gimplify_context (NULL);
14752 if (orig_for_stmt != for_stmt)
14753 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14755 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14756 decl = TREE_OPERAND (t, 0);
14757 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14758 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14759 gimplify_omp_ctxp = ctx->outer_context;
14760 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14761 gimplify_omp_ctxp = ctx;
14762 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14763 TREE_OPERAND (t, 0) = var;
14764 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14765 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14766 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14767 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14768 for (int j = i + 1;
14769 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14771 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14772 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14773 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14774 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14776 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14777 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14779 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14780 gcc_assert (COMPARISON_CLASS_P (t));
14781 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14782 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14784 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14785 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14790 gimplify_adjust_omp_clauses (pre_p, for_body,
14791 &OMP_FOR_CLAUSES (orig_for_stmt),
14792 TREE_CODE (orig_for_stmt));
14794 int kind;
14795 switch (TREE_CODE (orig_for_stmt))
14797 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14798 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14799 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14800 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14801 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14802 default:
14803 gcc_unreachable ();
14805 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14807 gimplify_seq_add_seq (pre_p, for_pre_body);
14808 for_pre_body = NULL;
14810 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14811 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14812 for_pre_body);
14813 if (orig_for_stmt != for_stmt)
14814 gimple_omp_for_set_combined_p (gfor, true);
14815 if (gimplify_omp_ctxp
14816 && (gimplify_omp_ctxp->combined_loop
14817 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14818 && gimplify_omp_ctxp->outer_context
14819 && gimplify_omp_ctxp->outer_context->combined_loop)))
14821 gimple_omp_for_set_combined_into_p (gfor, true);
14822 if (gimplify_omp_ctxp->combined_loop)
14823 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14824 else
14825 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14828 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14830 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14831 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14832 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14833 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14834 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14835 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14836 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14837 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14840 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14841 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14842 The outer taskloop stands for computing the number of iterations,
14843 counts for collapsed loops and holding taskloop specific clauses.
14844 The task construct stands for the effect of data sharing on the
14845 explicit task it creates and the inner taskloop stands for expansion
14846 of the static loop inside of the explicit task construct. */
14847 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14849 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14850 tree task_clauses = NULL_TREE;
14851 tree c = *gfor_clauses_ptr;
14852 tree *gtask_clauses_ptr = &task_clauses;
14853 tree outer_for_clauses = NULL_TREE;
14854 tree *gforo_clauses_ptr = &outer_for_clauses;
14855 bitmap lastprivate_uids = NULL;
14856 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14858 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14859 if (c)
14861 lastprivate_uids = BITMAP_ALLOC (NULL);
14862 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14863 OMP_CLAUSE_LASTPRIVATE))
14864 bitmap_set_bit (lastprivate_uids,
14865 DECL_UID (OMP_CLAUSE_DECL (c)));
14867 c = *gfor_clauses_ptr;
14869 for (; c; c = OMP_CLAUSE_CHAIN (c))
14870 switch (OMP_CLAUSE_CODE (c))
14872 /* These clauses are allowed on task, move them there. */
14873 case OMP_CLAUSE_SHARED:
14874 case OMP_CLAUSE_FIRSTPRIVATE:
14875 case OMP_CLAUSE_DEFAULT:
14876 case OMP_CLAUSE_IF:
14877 case OMP_CLAUSE_UNTIED:
14878 case OMP_CLAUSE_FINAL:
14879 case OMP_CLAUSE_MERGEABLE:
14880 case OMP_CLAUSE_PRIORITY:
14881 case OMP_CLAUSE_REDUCTION:
14882 case OMP_CLAUSE_IN_REDUCTION:
14883 *gtask_clauses_ptr = c;
14884 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14885 break;
14886 case OMP_CLAUSE_PRIVATE:
14887 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14889 /* We want private on outer for and firstprivate
14890 on task. */
14891 *gtask_clauses_ptr
14892 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14893 OMP_CLAUSE_FIRSTPRIVATE);
14894 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14895 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14896 openacc);
14897 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14898 *gforo_clauses_ptr = c;
14899 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14901 else
14903 *gtask_clauses_ptr = c;
14904 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14906 break;
14907 /* These clauses go into outer taskloop clauses. */
14908 case OMP_CLAUSE_GRAINSIZE:
14909 case OMP_CLAUSE_NUM_TASKS:
14910 case OMP_CLAUSE_NOGROUP:
14911 *gforo_clauses_ptr = c;
14912 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14913 break;
14914 /* Collapse clause we duplicate on both taskloops. */
14915 case OMP_CLAUSE_COLLAPSE:
14916 *gfor_clauses_ptr = c;
14917 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14918 *gforo_clauses_ptr = copy_node (c);
14919 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14920 break;
14921 /* For lastprivate, keep the clause on inner taskloop, and add
14922 a shared clause on task. If the same decl is also firstprivate,
14923 add also firstprivate clause on the inner taskloop. */
14924 case OMP_CLAUSE_LASTPRIVATE:
14925 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14927 /* For taskloop C++ lastprivate IVs, we want:
14928 1) private on outer taskloop
14929 2) firstprivate and shared on task
14930 3) lastprivate on inner taskloop */
14931 *gtask_clauses_ptr
14932 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14933 OMP_CLAUSE_FIRSTPRIVATE);
14934 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14935 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14936 openacc);
14937 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14938 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14939 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14940 OMP_CLAUSE_PRIVATE);
14941 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14942 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14943 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14944 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14946 *gfor_clauses_ptr = c;
14947 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14948 *gtask_clauses_ptr
14949 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14950 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14951 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14952 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14953 gtask_clauses_ptr
14954 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14955 break;
14956 /* Allocate clause we duplicate on task and inner taskloop
14957 if the decl is lastprivate, otherwise just put on task. */
14958 case OMP_CLAUSE_ALLOCATE:
14959 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14960 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14962 /* Additionally, put firstprivate clause on task
14963 for the allocator if it is not constant. */
14964 *gtask_clauses_ptr
14965 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14966 OMP_CLAUSE_FIRSTPRIVATE);
14967 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14968 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14969 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14971 if (lastprivate_uids
14972 && bitmap_bit_p (lastprivate_uids,
14973 DECL_UID (OMP_CLAUSE_DECL (c))))
14975 *gfor_clauses_ptr = c;
14976 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14977 *gtask_clauses_ptr = copy_node (c);
14978 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14980 else
14982 *gtask_clauses_ptr = c;
14983 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14985 break;
14986 default:
14987 gcc_unreachable ();
14989 *gfor_clauses_ptr = NULL_TREE;
14990 *gtask_clauses_ptr = NULL_TREE;
14991 *gforo_clauses_ptr = NULL_TREE;
14992 BITMAP_FREE (lastprivate_uids);
14993 gimple_set_location (gfor, input_location);
14994 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14995 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14996 NULL_TREE, NULL_TREE, NULL_TREE);
14997 gimple_set_location (g, input_location);
14998 gimple_omp_task_set_taskloop_p (g, true);
14999 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
15000 gomp_for *gforo
15001 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
15002 gimple_omp_for_collapse (gfor),
15003 gimple_omp_for_pre_body (gfor));
15004 gimple_omp_for_set_pre_body (gfor, NULL);
15005 gimple_omp_for_set_combined_p (gforo, true);
15006 gimple_omp_for_set_combined_into_p (gfor, true);
15007 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
15009 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
15010 tree v = create_tmp_var (type);
15011 gimple_omp_for_set_index (gforo, i, v);
15012 t = unshare_expr (gimple_omp_for_initial (gfor, i));
15013 gimple_omp_for_set_initial (gforo, i, t);
15014 gimple_omp_for_set_cond (gforo, i,
15015 gimple_omp_for_cond (gfor, i));
15016 t = unshare_expr (gimple_omp_for_final (gfor, i));
15017 gimple_omp_for_set_final (gforo, i, t);
15018 t = unshare_expr (gimple_omp_for_incr (gfor, i));
15019 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
15020 TREE_OPERAND (t, 0) = v;
15021 gimple_omp_for_set_incr (gforo, i, t);
15022 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
15023 OMP_CLAUSE_DECL (t) = v;
15024 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
15025 gimple_omp_for_set_clauses (gforo, t);
15026 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
15028 tree *p1 = NULL, *p2 = NULL;
15029 t = gimple_omp_for_initial (gforo, i);
15030 if (TREE_CODE (t) == TREE_VEC)
15031 p1 = &TREE_VEC_ELT (t, 0);
15032 t = gimple_omp_for_final (gforo, i);
15033 if (TREE_CODE (t) == TREE_VEC)
15035 if (p1)
15036 p2 = &TREE_VEC_ELT (t, 0);
15037 else
15038 p1 = &TREE_VEC_ELT (t, 0);
15040 if (p1)
15042 int j;
15043 for (j = 0; j < i; j++)
15044 if (*p1 == gimple_omp_for_index (gfor, j))
15046 *p1 = gimple_omp_for_index (gforo, j);
15047 if (p2)
15048 *p2 = *p1;
15049 break;
15051 gcc_assert (j < i);
15055 gimplify_seq_add_stmt (pre_p, gforo);
15057 else
15058 gimplify_seq_add_stmt (pre_p, gfor);
15060 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
15062 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15063 unsigned lastprivate_conditional = 0;
15064 while (ctx
15065 && (ctx->region_type == ORT_TARGET_DATA
15066 || ctx->region_type == ORT_TASKGROUP))
15067 ctx = ctx->outer_context;
15068 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
15069 for (tree c = gimple_omp_for_clauses (gfor);
15070 c; c = OMP_CLAUSE_CHAIN (c))
15071 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15072 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
15073 ++lastprivate_conditional;
15074 if (lastprivate_conditional)
15076 struct omp_for_data fd;
15077 omp_extract_for_data (gfor, &fd, NULL);
15078 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
15079 lastprivate_conditional);
15080 tree var = create_tmp_var_raw (type);
15081 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
15082 OMP_CLAUSE_DECL (c) = var;
15083 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
15084 gimple_omp_for_set_clauses (gfor, c);
15085 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
15088 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
15090 unsigned lastprivate_conditional = 0;
15091 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
15092 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15093 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
15094 ++lastprivate_conditional;
15095 if (lastprivate_conditional)
15097 struct omp_for_data fd;
15098 omp_extract_for_data (gfor, &fd, NULL);
15099 tree type = unsigned_type_for (fd.iter_type);
15100 while (lastprivate_conditional--)
15102 tree c = build_omp_clause (UNKNOWN_LOCATION,
15103 OMP_CLAUSE__CONDTEMP_);
15104 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
15105 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
15106 gimple_omp_for_set_clauses (gfor, c);
15111 if (ret != GS_ALL_DONE)
15112 return GS_ERROR;
15113 *expr_p = NULL_TREE;
15114 return GS_ALL_DONE;
15117 /* Helper for gimplify_omp_loop, called through walk_tree. */
15119 static tree
15120 note_no_context_vars (tree *tp, int *, void *data)
15122 if (VAR_P (*tp)
15123 && DECL_CONTEXT (*tp) == NULL_TREE
15124 && !is_global_var (*tp))
15126 vec<tree> *d = (vec<tree> *) data;
15127 d->safe_push (*tp);
15128 DECL_CONTEXT (*tp) = current_function_decl;
15130 return NULL_TREE;
15133 /* Gimplify the gross structure of an OMP_LOOP statement. */
15135 static enum gimplify_status
15136 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
15138 tree for_stmt = *expr_p;
15139 tree clauses = OMP_FOR_CLAUSES (for_stmt);
15140 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
15141 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
15142 int i;
15144 /* If order is not present, the behavior is as if order(concurrent)
15145 appeared. */
15146 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
15147 if (order == NULL_TREE)
15149 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
15150 OMP_CLAUSE_CHAIN (order) = clauses;
15151 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
15154 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
15155 if (bind == NULL_TREE)
15157 if (!flag_openmp) /* flag_openmp_simd */
15159 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
15160 kind = OMP_CLAUSE_BIND_TEAMS;
15161 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
15162 kind = OMP_CLAUSE_BIND_PARALLEL;
15163 else
15165 for (; octx; octx = octx->outer_context)
15167 if ((octx->region_type & ORT_ACC) != 0
15168 || octx->region_type == ORT_NONE
15169 || octx->region_type == ORT_IMPLICIT_TARGET)
15170 continue;
15171 break;
15173 if (octx == NULL && !in_omp_construct)
15174 error_at (EXPR_LOCATION (for_stmt),
15175 "%<bind%> clause not specified on a %<loop%> "
15176 "construct not nested inside another OpenMP construct");
15178 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
15179 OMP_CLAUSE_CHAIN (bind) = clauses;
15180 OMP_CLAUSE_BIND_KIND (bind) = kind;
15181 OMP_FOR_CLAUSES (for_stmt) = bind;
15183 else
15184 switch (OMP_CLAUSE_BIND_KIND (bind))
15186 case OMP_CLAUSE_BIND_THREAD:
15187 break;
15188 case OMP_CLAUSE_BIND_PARALLEL:
15189 if (!flag_openmp) /* flag_openmp_simd */
15191 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15192 break;
15194 for (; octx; octx = octx->outer_context)
15195 if (octx->region_type == ORT_SIMD
15196 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
15198 error_at (EXPR_LOCATION (for_stmt),
15199 "%<bind(parallel)%> on a %<loop%> construct nested "
15200 "inside %<simd%> construct");
15201 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15202 break;
15204 kind = OMP_CLAUSE_BIND_PARALLEL;
15205 break;
15206 case OMP_CLAUSE_BIND_TEAMS:
15207 if (!flag_openmp) /* flag_openmp_simd */
15209 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15210 break;
15212 if ((octx
15213 && octx->region_type != ORT_IMPLICIT_TARGET
15214 && octx->region_type != ORT_NONE
15215 && (octx->region_type & ORT_TEAMS) == 0)
15216 || in_omp_construct)
15218 error_at (EXPR_LOCATION (for_stmt),
15219 "%<bind(teams)%> on a %<loop%> region not strictly "
15220 "nested inside of a %<teams%> region");
15221 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15222 break;
15224 kind = OMP_CLAUSE_BIND_TEAMS;
15225 break;
15226 default:
15227 gcc_unreachable ();
15230 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
15231 switch (OMP_CLAUSE_CODE (*pc))
15233 case OMP_CLAUSE_REDUCTION:
15234 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
15236 error_at (OMP_CLAUSE_LOCATION (*pc),
15237 "%<inscan%> %<reduction%> clause on "
15238 "%qs construct", "loop");
15239 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
15241 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
15243 error_at (OMP_CLAUSE_LOCATION (*pc),
15244 "invalid %<task%> reduction modifier on construct "
15245 "other than %<parallel%>, %qs or %<sections%>",
15246 lang_GNU_Fortran () ? "do" : "for");
15247 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
15249 pc = &OMP_CLAUSE_CHAIN (*pc);
15250 break;
15251 case OMP_CLAUSE_LASTPRIVATE:
15252 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15254 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15255 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15256 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
15257 break;
15258 if (OMP_FOR_ORIG_DECLS (for_stmt)
15259 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15260 i)) == TREE_LIST
15261 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15262 i)))
15264 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15265 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
15266 break;
15269 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
15271 error_at (OMP_CLAUSE_LOCATION (*pc),
15272 "%<lastprivate%> clause on a %<loop%> construct refers "
15273 "to a variable %qD which is not the loop iterator",
15274 OMP_CLAUSE_DECL (*pc));
15275 *pc = OMP_CLAUSE_CHAIN (*pc);
15276 break;
15278 pc = &OMP_CLAUSE_CHAIN (*pc);
15279 break;
15280 default:
15281 pc = &OMP_CLAUSE_CHAIN (*pc);
15282 break;
15285 TREE_SET_CODE (for_stmt, OMP_SIMD);
15287 int last;
15288 switch (kind)
15290 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
15291 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
15292 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
15294 for (int pass = 1; pass <= last; pass++)
15296 if (pass == 2)
15298 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
15299 make_node (BLOCK));
15300 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
15301 *expr_p = make_node (OMP_PARALLEL);
15302 TREE_TYPE (*expr_p) = void_type_node;
15303 OMP_PARALLEL_BODY (*expr_p) = bind;
15304 OMP_PARALLEL_COMBINED (*expr_p) = 1;
15305 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
15306 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
15307 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15308 if (OMP_FOR_ORIG_DECLS (for_stmt)
15309 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
15310 == TREE_LIST))
15312 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15313 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
15315 *pc = build_omp_clause (UNKNOWN_LOCATION,
15316 OMP_CLAUSE_FIRSTPRIVATE);
15317 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
15318 pc = &OMP_CLAUSE_CHAIN (*pc);
15322 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
15323 tree *pc = &OMP_FOR_CLAUSES (t);
15324 TREE_TYPE (t) = void_type_node;
15325 OMP_FOR_BODY (t) = *expr_p;
15326 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
15327 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15328 switch (OMP_CLAUSE_CODE (c))
15330 case OMP_CLAUSE_BIND:
15331 case OMP_CLAUSE_ORDER:
15332 case OMP_CLAUSE_COLLAPSE:
15333 *pc = copy_node (c);
15334 pc = &OMP_CLAUSE_CHAIN (*pc);
15335 break;
15336 case OMP_CLAUSE_PRIVATE:
15337 case OMP_CLAUSE_FIRSTPRIVATE:
15338 /* Only needed on innermost. */
15339 break;
15340 case OMP_CLAUSE_LASTPRIVATE:
15341 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15343 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15344 OMP_CLAUSE_FIRSTPRIVATE);
15345 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15346 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15347 pc = &OMP_CLAUSE_CHAIN (*pc);
15349 *pc = copy_node (c);
15350 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15351 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15352 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15354 if (pass != last)
15355 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15356 else
15357 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15358 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15360 pc = &OMP_CLAUSE_CHAIN (*pc);
15361 break;
15362 case OMP_CLAUSE_REDUCTION:
15363 *pc = copy_node (c);
15364 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15365 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15366 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15368 auto_vec<tree> no_context_vars;
15369 int walk_subtrees = 0;
15370 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15371 &walk_subtrees, &no_context_vars);
15372 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15373 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15374 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15375 note_no_context_vars,
15376 &no_context_vars);
15377 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15378 note_no_context_vars,
15379 &no_context_vars);
15381 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15382 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15383 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15384 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15385 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15387 hash_map<tree, tree> decl_map;
15388 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15389 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15390 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15391 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15392 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15393 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15395 copy_body_data id;
15396 memset (&id, 0, sizeof (id));
15397 id.src_fn = current_function_decl;
15398 id.dst_fn = current_function_decl;
15399 id.src_cfun = cfun;
15400 id.decl_map = &decl_map;
15401 id.copy_decl = copy_decl_no_change;
15402 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15403 id.transform_new_cfg = true;
15404 id.transform_return_to_modify = false;
15405 id.eh_lp_nr = 0;
15406 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15407 &id, NULL);
15408 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15409 &id, NULL);
15411 for (tree d : no_context_vars)
15413 DECL_CONTEXT (d) = NULL_TREE;
15414 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15417 else
15419 OMP_CLAUSE_REDUCTION_INIT (*pc)
15420 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15421 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15422 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15424 pc = &OMP_CLAUSE_CHAIN (*pc);
15425 break;
15426 default:
15427 gcc_unreachable ();
15429 *pc = NULL_TREE;
15430 *expr_p = t;
15432 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15436 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15437 of OMP_TARGET's body. */
15439 static tree
15440 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15442 *walk_subtrees = 0;
15443 switch (TREE_CODE (*tp))
15445 case OMP_TEAMS:
15446 return *tp;
15447 case BIND_EXPR:
15448 case STATEMENT_LIST:
15449 *walk_subtrees = 1;
15450 break;
15451 default:
15452 break;
15454 return NULL_TREE;
15457 /* Helper function of optimize_target_teams, determine if the expression
15458 can be computed safely before the target construct on the host. */
15460 static tree
15461 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15463 splay_tree_node n;
15465 if (TYPE_P (*tp))
15467 *walk_subtrees = 0;
15468 return NULL_TREE;
15470 switch (TREE_CODE (*tp))
15472 case VAR_DECL:
15473 case PARM_DECL:
15474 case RESULT_DECL:
15475 *walk_subtrees = 0;
15476 if (error_operand_p (*tp)
15477 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15478 || DECL_HAS_VALUE_EXPR_P (*tp)
15479 || DECL_THREAD_LOCAL_P (*tp)
15480 || TREE_SIDE_EFFECTS (*tp)
15481 || TREE_THIS_VOLATILE (*tp))
15482 return *tp;
15483 if (is_global_var (*tp)
15484 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15485 || lookup_attribute ("omp declare target link",
15486 DECL_ATTRIBUTES (*tp))))
15487 return *tp;
15488 if (VAR_P (*tp)
15489 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15490 && !is_global_var (*tp)
15491 && decl_function_context (*tp) == current_function_decl)
15492 return *tp;
15493 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15494 (splay_tree_key) *tp);
15495 if (n == NULL)
15497 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15498 return NULL_TREE;
15499 return *tp;
15501 else if (n->value & GOVD_LOCAL)
15502 return *tp;
15503 else if (n->value & GOVD_FIRSTPRIVATE)
15504 return NULL_TREE;
15505 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15506 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15507 return NULL_TREE;
15508 return *tp;
15509 case INTEGER_CST:
15510 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15511 return *tp;
15512 return NULL_TREE;
15513 case TARGET_EXPR:
15514 if (TARGET_EXPR_INITIAL (*tp)
15515 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15516 return *tp;
15517 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15518 walk_subtrees, NULL);
15519 /* Allow some reasonable subset of integral arithmetics. */
15520 case PLUS_EXPR:
15521 case MINUS_EXPR:
15522 case MULT_EXPR:
15523 case TRUNC_DIV_EXPR:
15524 case CEIL_DIV_EXPR:
15525 case FLOOR_DIV_EXPR:
15526 case ROUND_DIV_EXPR:
15527 case TRUNC_MOD_EXPR:
15528 case CEIL_MOD_EXPR:
15529 case FLOOR_MOD_EXPR:
15530 case ROUND_MOD_EXPR:
15531 case RDIV_EXPR:
15532 case EXACT_DIV_EXPR:
15533 case MIN_EXPR:
15534 case MAX_EXPR:
15535 case LSHIFT_EXPR:
15536 case RSHIFT_EXPR:
15537 case BIT_IOR_EXPR:
15538 case BIT_XOR_EXPR:
15539 case BIT_AND_EXPR:
15540 case NEGATE_EXPR:
15541 case ABS_EXPR:
15542 case BIT_NOT_EXPR:
15543 case NON_LVALUE_EXPR:
15544 CASE_CONVERT:
15545 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15546 return *tp;
15547 return NULL_TREE;
15548 /* And disallow anything else, except for comparisons. */
15549 default:
15550 if (COMPARISON_CLASS_P (*tp))
15551 return NULL_TREE;
15552 return *tp;
15556 /* Try to determine if the num_teams and/or thread_limit expressions
15557 can have their values determined already before entering the
15558 target construct.
15559 INTEGER_CSTs trivially are,
15560 integral decls that are firstprivate (explicitly or implicitly)
15561 or explicitly map(always, to:) or map(always, tofrom:) on the target
15562 region too, and expressions involving simple arithmetics on those
15563 too, function calls are not ok, dereferencing something neither etc.
15564 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15565 EXPR based on what we find:
15566 0 stands for clause not specified at all, use implementation default
15567 -1 stands for value that can't be determined easily before entering
15568 the target construct.
15569 -2 means that no explicit teams construct was specified
15570 If teams construct is not present at all, use 1 for num_teams
15571 and 0 for thread_limit (only one team is involved, and the thread
15572 limit is implementation defined. */
15574 static void
15575 optimize_target_teams (tree target, gimple_seq *pre_p)
15577 tree body = OMP_BODY (target);
15578 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15579 tree num_teams_lower = NULL_TREE;
15580 tree num_teams_upper = integer_zero_node;
15581 tree thread_limit = integer_zero_node;
15582 location_t num_teams_loc = EXPR_LOCATION (target);
15583 location_t thread_limit_loc = EXPR_LOCATION (target);
15584 tree c, *p, expr;
15585 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15587 if (teams == NULL_TREE)
15588 num_teams_upper = build_int_cst (integer_type_node, -2);
15589 else
15590 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15592 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15594 p = &num_teams_upper;
15595 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15596 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15598 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15599 if (TREE_CODE (expr) == INTEGER_CST)
15600 num_teams_lower = expr;
15601 else if (walk_tree (&expr, computable_teams_clause,
15602 NULL, NULL))
15603 num_teams_lower = integer_minus_one_node;
15604 else
15606 num_teams_lower = expr;
15607 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15608 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15609 is_gimple_val, fb_rvalue, false)
15610 == GS_ERROR)
15612 gimplify_omp_ctxp = target_ctx;
15613 num_teams_lower = integer_minus_one_node;
15615 else
15617 gimplify_omp_ctxp = target_ctx;
15618 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15619 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15620 = num_teams_lower;
15625 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15627 p = &thread_limit;
15628 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15630 else
15631 continue;
15632 expr = OMP_CLAUSE_OPERAND (c, 0);
15633 if (TREE_CODE (expr) == INTEGER_CST)
15635 *p = expr;
15636 continue;
15638 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15640 *p = integer_minus_one_node;
15641 continue;
15643 *p = expr;
15644 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15645 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15646 == GS_ERROR)
15648 gimplify_omp_ctxp = target_ctx;
15649 *p = integer_minus_one_node;
15650 continue;
15652 gimplify_omp_ctxp = target_ctx;
15653 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15654 OMP_CLAUSE_OPERAND (c, 0) = *p;
15656 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15658 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15659 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15660 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15661 OMP_TARGET_CLAUSES (target) = c;
15663 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15664 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15665 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15666 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15667 OMP_TARGET_CLAUSES (target) = c;
15670 /* Gimplify the gross structure of several OMP constructs. */
15672 static void
15673 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15675 tree expr = *expr_p;
15676 gimple *stmt;
15677 gimple_seq body = NULL;
15678 enum omp_region_type ort;
15680 switch (TREE_CODE (expr))
15682 case OMP_SECTIONS:
15683 case OMP_SINGLE:
15684 ort = ORT_WORKSHARE;
15685 break;
15686 case OMP_SCOPE:
15687 ort = ORT_TASKGROUP;
15688 break;
15689 case OMP_TARGET:
15690 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15691 break;
15692 case OACC_KERNELS:
15693 ort = ORT_ACC_KERNELS;
15694 break;
15695 case OACC_PARALLEL:
15696 ort = ORT_ACC_PARALLEL;
15697 break;
15698 case OACC_SERIAL:
15699 ort = ORT_ACC_SERIAL;
15700 break;
15701 case OACC_DATA:
15702 ort = ORT_ACC_DATA;
15703 break;
15704 case OMP_TARGET_DATA:
15705 ort = ORT_TARGET_DATA;
15706 break;
15707 case OMP_TEAMS:
15708 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15709 if (gimplify_omp_ctxp == NULL
15710 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15711 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15712 break;
15713 case OACC_HOST_DATA:
15714 ort = ORT_ACC_HOST_DATA;
15715 break;
15716 default:
15717 gcc_unreachable ();
15720 bool save_in_omp_construct = in_omp_construct;
15721 if ((ort & ORT_ACC) == 0)
15722 in_omp_construct = false;
15723 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15724 TREE_CODE (expr));
15725 if (TREE_CODE (expr) == OMP_TARGET)
15726 optimize_target_teams (expr, pre_p);
15727 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15728 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15730 push_gimplify_context ();
15731 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15732 if (gimple_code (g) == GIMPLE_BIND)
15733 pop_gimplify_context (g);
15734 else
15735 pop_gimplify_context (NULL);
15736 if ((ort & ORT_TARGET_DATA) != 0)
15738 enum built_in_function end_ix;
15739 switch (TREE_CODE (expr))
15741 case OACC_DATA:
15742 case OACC_HOST_DATA:
15743 end_ix = BUILT_IN_GOACC_DATA_END;
15744 break;
15745 case OMP_TARGET_DATA:
15746 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15747 break;
15748 default:
15749 gcc_unreachable ();
15751 tree fn = builtin_decl_explicit (end_ix);
15752 g = gimple_build_call (fn, 0);
15753 gimple_seq cleanup = NULL;
15754 gimple_seq_add_stmt (&cleanup, g);
15755 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15756 body = NULL;
15757 gimple_seq_add_stmt (&body, g);
15760 else
15761 gimplify_and_add (OMP_BODY (expr), &body);
15762 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15763 TREE_CODE (expr));
15764 in_omp_construct = save_in_omp_construct;
15766 switch (TREE_CODE (expr))
15768 case OACC_DATA:
15769 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15770 OMP_CLAUSES (expr));
15771 break;
15772 case OACC_HOST_DATA:
15773 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15775 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15776 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15777 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15780 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15781 OMP_CLAUSES (expr));
15782 break;
15783 case OACC_KERNELS:
15784 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15785 OMP_CLAUSES (expr));
15786 break;
15787 case OACC_PARALLEL:
15788 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15789 OMP_CLAUSES (expr));
15790 break;
15791 case OACC_SERIAL:
15792 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15793 OMP_CLAUSES (expr));
15794 break;
15795 case OMP_SECTIONS:
15796 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15797 break;
15798 case OMP_SINGLE:
15799 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15800 break;
15801 case OMP_SCOPE:
15802 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15803 break;
15804 case OMP_TARGET:
15805 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15806 OMP_CLAUSES (expr));
15807 break;
15808 case OMP_TARGET_DATA:
15809 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15810 to be evaluated before the use_device_{ptr,addr} clauses if they
15811 refer to the same variables. */
15813 tree use_device_clauses;
15814 tree *pc, *uc = &use_device_clauses;
15815 for (pc = &OMP_CLAUSES (expr); *pc; )
15816 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15817 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15819 *uc = *pc;
15820 *pc = OMP_CLAUSE_CHAIN (*pc);
15821 uc = &OMP_CLAUSE_CHAIN (*uc);
15823 else
15824 pc = &OMP_CLAUSE_CHAIN (*pc);
15825 *uc = NULL_TREE;
15826 *pc = use_device_clauses;
15827 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15828 OMP_CLAUSES (expr));
15830 break;
15831 case OMP_TEAMS:
15832 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15833 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15834 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15835 break;
15836 default:
15837 gcc_unreachable ();
15840 gimplify_seq_add_stmt (pre_p, stmt);
15841 *expr_p = NULL_TREE;
15844 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15845 target update constructs. */
15847 static void
15848 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15850 tree expr = *expr_p;
15851 int kind;
15852 gomp_target *stmt;
15853 enum omp_region_type ort = ORT_WORKSHARE;
15855 switch (TREE_CODE (expr))
15857 case OACC_ENTER_DATA:
15858 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15859 ort = ORT_ACC;
15860 break;
15861 case OACC_EXIT_DATA:
15862 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15863 ort = ORT_ACC;
15864 break;
15865 case OACC_UPDATE:
15866 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15867 ort = ORT_ACC;
15868 break;
15869 case OMP_TARGET_UPDATE:
15870 kind = GF_OMP_TARGET_KIND_UPDATE;
15871 break;
15872 case OMP_TARGET_ENTER_DATA:
15873 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15874 break;
15875 case OMP_TARGET_EXIT_DATA:
15876 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15877 break;
15878 default:
15879 gcc_unreachable ();
15881 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15882 ort, TREE_CODE (expr));
15883 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15884 TREE_CODE (expr));
15885 if (TREE_CODE (expr) == OACC_UPDATE
15886 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15887 OMP_CLAUSE_IF_PRESENT))
15889 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15890 clause. */
15891 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15892 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15893 switch (OMP_CLAUSE_MAP_KIND (c))
15895 case GOMP_MAP_FORCE_TO:
15896 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15897 break;
15898 case GOMP_MAP_FORCE_FROM:
15899 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15900 break;
15901 default:
15902 break;
15905 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15906 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15907 OMP_CLAUSE_FINALIZE))
15909 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15910 semantics. */
15911 bool have_clause = false;
15912 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15913 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15914 switch (OMP_CLAUSE_MAP_KIND (c))
15916 case GOMP_MAP_FROM:
15917 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15918 have_clause = true;
15919 break;
15920 case GOMP_MAP_RELEASE:
15921 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15922 have_clause = true;
15923 break;
15924 case GOMP_MAP_TO_PSET:
15925 /* Fortran arrays with descriptors must map that descriptor when
15926 doing standalone "attach" operations (in OpenACC). In that
15927 case GOMP_MAP_TO_PSET appears by itself with no preceding
15928 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15929 break;
15930 case GOMP_MAP_POINTER:
15931 /* TODO PR92929: we may see these here, but they'll always follow
15932 one of the clauses above, and will be handled by libgomp as
15933 one group, so no handling required here. */
15934 gcc_assert (have_clause);
15935 break;
15936 case GOMP_MAP_DETACH:
15937 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15938 have_clause = false;
15939 break;
15940 case GOMP_MAP_STRUCT:
15941 have_clause = false;
15942 break;
15943 default:
15944 gcc_unreachable ();
15947 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15949 gimplify_seq_add_stmt (pre_p, stmt);
15950 *expr_p = NULL_TREE;
15953 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15954 stabilized the lhs of the atomic operation as *ADDR. Return true if
15955 EXPR is this stabilized form. */
15957 static bool
15958 goa_lhs_expr_p (tree expr, tree addr)
15960 /* Also include casts to other type variants. The C front end is fond
15961 of adding these for e.g. volatile variables. This is like
15962 STRIP_TYPE_NOPS but includes the main variant lookup. */
15963 STRIP_USELESS_TYPE_CONVERSION (expr);
15965 if (INDIRECT_REF_P (expr))
15967 expr = TREE_OPERAND (expr, 0);
15968 while (expr != addr
15969 && (CONVERT_EXPR_P (expr)
15970 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15971 && TREE_CODE (expr) == TREE_CODE (addr)
15972 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15974 expr = TREE_OPERAND (expr, 0);
15975 addr = TREE_OPERAND (addr, 0);
15977 if (expr == addr)
15978 return true;
15979 return (TREE_CODE (addr) == ADDR_EXPR
15980 && TREE_CODE (expr) == ADDR_EXPR
15981 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15983 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15984 return true;
15985 return false;
15988 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15989 expression does not involve the lhs, evaluate it into a temporary.
15990 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15991 or -1 if an error was encountered. */
15993 static int
15994 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15995 tree lhs_var, tree &target_expr, bool rhs, int depth)
15997 tree expr = *expr_p;
15998 int saw_lhs = 0;
16000 if (goa_lhs_expr_p (expr, lhs_addr))
16002 if (pre_p)
16003 *expr_p = lhs_var;
16004 return 1;
16006 if (is_gimple_val (expr))
16007 return 0;
16009 /* Maximum depth of lhs in expression is for the
16010 __builtin_clear_padding (...), __builtin_clear_padding (...),
16011 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
16012 if (++depth > 7)
16013 goto finish;
16015 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
16017 case tcc_binary:
16018 case tcc_comparison:
16019 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
16020 lhs_var, target_expr, true, depth);
16021 /* FALLTHRU */
16022 case tcc_unary:
16023 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
16024 lhs_var, target_expr, true, depth);
16025 break;
16026 case tcc_expression:
16027 switch (TREE_CODE (expr))
16029 case TRUTH_ANDIF_EXPR:
16030 case TRUTH_ORIF_EXPR:
16031 case TRUTH_AND_EXPR:
16032 case TRUTH_OR_EXPR:
16033 case TRUTH_XOR_EXPR:
16034 case BIT_INSERT_EXPR:
16035 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
16036 lhs_addr, lhs_var, target_expr, true,
16037 depth);
16038 /* FALLTHRU */
16039 case TRUTH_NOT_EXPR:
16040 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16041 lhs_addr, lhs_var, target_expr, true,
16042 depth);
16043 break;
16044 case MODIFY_EXPR:
16045 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
16046 target_expr, true, depth))
16047 break;
16048 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
16049 lhs_addr, lhs_var, target_expr, true,
16050 depth);
16051 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16052 lhs_addr, lhs_var, target_expr, false,
16053 depth);
16054 break;
16055 /* FALLTHRU */
16056 case ADDR_EXPR:
16057 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
16058 target_expr, true, depth))
16059 break;
16060 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16061 lhs_addr, lhs_var, target_expr, false,
16062 depth);
16063 break;
16064 case COMPOUND_EXPR:
16065 /* Break out any preevaluations from cp_build_modify_expr. */
16066 for (; TREE_CODE (expr) == COMPOUND_EXPR;
16067 expr = TREE_OPERAND (expr, 1))
16069 /* Special-case __builtin_clear_padding call before
16070 __builtin_memcmp. */
16071 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
16073 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
16074 if (fndecl
16075 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
16076 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
16077 && (!pre_p
16078 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
16079 lhs_addr, lhs_var,
16080 target_expr, true, depth)))
16082 if (pre_p)
16083 *expr_p = expr;
16084 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
16085 pre_p, lhs_addr, lhs_var,
16086 target_expr, true, depth);
16087 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
16088 pre_p, lhs_addr, lhs_var,
16089 target_expr, rhs, depth);
16090 return saw_lhs;
16094 if (pre_p)
16095 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
16097 if (!pre_p)
16098 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
16099 target_expr, rhs, depth);
16100 *expr_p = expr;
16101 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
16102 target_expr, rhs, depth);
16103 case COND_EXPR:
16104 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
16105 lhs_var, target_expr, true, depth))
16106 break;
16107 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16108 lhs_addr, lhs_var, target_expr, true,
16109 depth);
16110 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
16111 lhs_addr, lhs_var, target_expr, true,
16112 depth);
16113 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
16114 lhs_addr, lhs_var, target_expr, true,
16115 depth);
16116 break;
16117 case TARGET_EXPR:
16118 if (TARGET_EXPR_INITIAL (expr))
16120 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
16121 lhs_var, target_expr, true,
16122 depth))
16123 break;
16124 if (expr == target_expr)
16125 saw_lhs = 1;
16126 else
16128 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
16129 pre_p, lhs_addr, lhs_var,
16130 target_expr, true, depth);
16131 if (saw_lhs && target_expr == NULL_TREE && pre_p)
16132 target_expr = expr;
16135 break;
16136 default:
16137 break;
16139 break;
16140 case tcc_reference:
16141 if (TREE_CODE (expr) == BIT_FIELD_REF
16142 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
16143 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16144 lhs_addr, lhs_var, target_expr, true,
16145 depth);
16146 break;
16147 case tcc_vl_exp:
16148 if (TREE_CODE (expr) == CALL_EXPR)
16150 if (tree fndecl = get_callee_fndecl (expr))
16151 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
16152 BUILT_IN_MEMCMP))
16154 int nargs = call_expr_nargs (expr);
16155 for (int i = 0; i < nargs; i++)
16156 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
16157 pre_p, lhs_addr, lhs_var,
16158 target_expr, true, depth);
16161 break;
16162 default:
16163 break;
16166 finish:
16167 if (saw_lhs == 0 && pre_p)
16169 enum gimplify_status gs;
16170 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
16172 gimplify_stmt (&expr, pre_p);
16173 return saw_lhs;
16175 else if (rhs)
16176 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
16177 else
16178 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
16179 if (gs != GS_ALL_DONE)
16180 saw_lhs = -1;
16183 return saw_lhs;
16186 /* Gimplify an OMP_ATOMIC statement. */
16188 static enum gimplify_status
16189 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
16191 tree addr = TREE_OPERAND (*expr_p, 0);
16192 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
16193 ? NULL : TREE_OPERAND (*expr_p, 1);
16194 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
16195 tree tmp_load;
16196 gomp_atomic_load *loadstmt;
16197 gomp_atomic_store *storestmt;
16198 tree target_expr = NULL_TREE;
16200 tmp_load = create_tmp_reg (type);
16201 if (rhs
16202 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
16203 true, 0) < 0)
16204 return GS_ERROR;
16206 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
16207 != GS_ALL_DONE)
16208 return GS_ERROR;
16210 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
16211 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16212 gimplify_seq_add_stmt (pre_p, loadstmt);
16213 if (rhs)
16215 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
16216 representatives. Use BIT_FIELD_REF on the lhs instead. */
16217 tree rhsarg = rhs;
16218 if (TREE_CODE (rhs) == COND_EXPR)
16219 rhsarg = TREE_OPERAND (rhs, 1);
16220 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
16221 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
16223 tree bitpos = TREE_OPERAND (rhsarg, 2);
16224 tree op1 = TREE_OPERAND (rhsarg, 1);
16225 tree bitsize;
16226 tree tmp_store = tmp_load;
16227 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
16228 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
16229 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
16230 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
16231 else
16232 bitsize = TYPE_SIZE (TREE_TYPE (op1));
16233 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
16234 tree t = build2_loc (EXPR_LOCATION (rhsarg),
16235 MODIFY_EXPR, void_type_node,
16236 build3_loc (EXPR_LOCATION (rhsarg),
16237 BIT_FIELD_REF, TREE_TYPE (op1),
16238 tmp_store, bitsize, bitpos), op1);
16239 if (TREE_CODE (rhs) == COND_EXPR)
16240 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
16241 TREE_OPERAND (rhs, 0), t, void_node);
16242 gimplify_and_add (t, pre_p);
16243 rhs = tmp_store;
16245 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
16246 if (TREE_CODE (rhs) == COND_EXPR)
16247 gimplify_ctxp->allow_rhs_cond_expr = true;
16248 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
16249 is_gimple_val, fb_rvalue);
16250 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
16251 if (gs != GS_ALL_DONE)
16252 return GS_ERROR;
16255 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
16256 rhs = tmp_load;
16257 storestmt
16258 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16259 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
16261 gimple_omp_atomic_set_weak (loadstmt);
16262 gimple_omp_atomic_set_weak (storestmt);
16264 gimplify_seq_add_stmt (pre_p, storestmt);
16265 switch (TREE_CODE (*expr_p))
16267 case OMP_ATOMIC_READ:
16268 case OMP_ATOMIC_CAPTURE_OLD:
16269 *expr_p = tmp_load;
16270 gimple_omp_atomic_set_need_value (loadstmt);
16271 break;
16272 case OMP_ATOMIC_CAPTURE_NEW:
16273 *expr_p = rhs;
16274 gimple_omp_atomic_set_need_value (storestmt);
16275 break;
16276 default:
16277 *expr_p = NULL;
16278 break;
16281 return GS_ALL_DONE;
16284 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
16285 body, and adding some EH bits. */
16287 static enum gimplify_status
16288 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
16290 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
16291 gimple *body_stmt;
16292 gtransaction *trans_stmt;
16293 gimple_seq body = NULL;
16294 int subcode = 0;
16296 /* Wrap the transaction body in a BIND_EXPR so we have a context
16297 where to put decls for OMP. */
16298 if (TREE_CODE (tbody) != BIND_EXPR)
16300 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
16301 TREE_SIDE_EFFECTS (bind) = 1;
16302 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
16303 TRANSACTION_EXPR_BODY (expr) = bind;
16306 push_gimplify_context ();
16307 temp = voidify_wrapper_expr (*expr_p, NULL);
16309 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
16310 pop_gimplify_context (body_stmt);
16312 trans_stmt = gimple_build_transaction (body);
16313 if (TRANSACTION_EXPR_OUTER (expr))
16314 subcode = GTMA_IS_OUTER;
16315 else if (TRANSACTION_EXPR_RELAXED (expr))
16316 subcode = GTMA_IS_RELAXED;
16317 gimple_transaction_set_subcode (trans_stmt, subcode);
16319 gimplify_seq_add_stmt (pre_p, trans_stmt);
16321 if (temp)
16323 *expr_p = temp;
16324 return GS_OK;
16327 *expr_p = NULL_TREE;
16328 return GS_ALL_DONE;
16331 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
16332 is the OMP_BODY of the original EXPR (which has already been
16333 gimplified so it's not present in the EXPR).
16335 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16337 static gimple *
16338 gimplify_omp_ordered (tree expr, gimple_seq body)
16340 tree c, decls;
16341 int failures = 0;
16342 unsigned int i;
16343 tree source_c = NULL_TREE;
16344 tree sink_c = NULL_TREE;
16346 if (gimplify_omp_ctxp)
16348 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16349 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16350 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16352 error_at (OMP_CLAUSE_LOCATION (c),
16353 "%<ordered%> construct with %qs clause must be "
16354 "closely nested inside a loop with %<ordered%> clause",
16355 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16356 failures++;
16358 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16359 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16361 bool fail = false;
16362 sink_c = c;
16363 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16364 continue; /* omp_cur_iteration - 1 */
16365 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16366 decls && TREE_CODE (decls) == TREE_LIST;
16367 decls = TREE_CHAIN (decls), ++i)
16368 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16369 continue;
16370 else if (TREE_VALUE (decls)
16371 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16373 error_at (OMP_CLAUSE_LOCATION (c),
16374 "variable %qE is not an iteration "
16375 "of outermost loop %d, expected %qE",
16376 TREE_VALUE (decls), i + 1,
16377 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16378 fail = true;
16379 failures++;
16381 else
16382 TREE_VALUE (decls)
16383 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16384 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16386 error_at (OMP_CLAUSE_LOCATION (c),
16387 "number of variables in %qs clause with "
16388 "%<sink%> modifier does not match number of "
16389 "iteration variables",
16390 OMP_CLAUSE_DOACROSS_DEPEND (c)
16391 ? "depend" : "doacross");
16392 failures++;
16395 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16396 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16398 if (source_c)
16400 error_at (OMP_CLAUSE_LOCATION (c),
16401 "more than one %qs clause with %<source%> "
16402 "modifier on an %<ordered%> construct",
16403 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16404 ? "depend" : "doacross");
16405 failures++;
16407 else
16408 source_c = c;
16411 if (source_c && sink_c)
16413 error_at (OMP_CLAUSE_LOCATION (source_c),
16414 "%qs clause with %<source%> modifier specified "
16415 "together with %qs clauses with %<sink%> modifier "
16416 "on the same construct",
16417 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16418 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16419 failures++;
16422 if (failures)
16423 return gimple_build_nop ();
16424 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16427 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16428 expression produces a value to be used as an operand inside a GIMPLE
16429 statement, the value will be stored back in *EXPR_P. This value will
16430 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16431 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16432 emitted in PRE_P and POST_P.
16434 Additionally, this process may overwrite parts of the input
16435 expression during gimplification. Ideally, it should be
16436 possible to do non-destructive gimplification.
16438 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16439 the expression needs to evaluate to a value to be used as
16440 an operand in a GIMPLE statement, this value will be stored in
16441 *EXPR_P on exit. This happens when the caller specifies one
16442 of fb_lvalue or fb_rvalue fallback flags.
16444 PRE_P will contain the sequence of GIMPLE statements corresponding
16445 to the evaluation of EXPR and all the side-effects that must
16446 be executed before the main expression. On exit, the last
16447 statement of PRE_P is the core statement being gimplified. For
16448 instance, when gimplifying 'if (++a)' the last statement in
16449 PRE_P will be 'if (t.1)' where t.1 is the result of
16450 pre-incrementing 'a'.
16452 POST_P will contain the sequence of GIMPLE statements corresponding
16453 to the evaluation of all the side-effects that must be executed
16454 after the main expression. If this is NULL, the post
16455 side-effects are stored at the end of PRE_P.
16457 The reason why the output is split in two is to handle post
16458 side-effects explicitly. In some cases, an expression may have
16459 inner and outer post side-effects which need to be emitted in
16460 an order different from the one given by the recursive
16461 traversal. For instance, for the expression (*p--)++ the post
16462 side-effects of '--' must actually occur *after* the post
16463 side-effects of '++'. However, gimplification will first visit
16464 the inner expression, so if a separate POST sequence was not
16465 used, the resulting sequence would be:
16467 1 t.1 = *p
16468 2 p = p - 1
16469 3 t.2 = t.1 + 1
16470 4 *p = t.2
16472 However, the post-decrement operation in line #2 must not be
16473 evaluated until after the store to *p at line #4, so the
16474 correct sequence should be:
16476 1 t.1 = *p
16477 2 t.2 = t.1 + 1
16478 3 *p = t.2
16479 4 p = p - 1
16481 So, by specifying a separate post queue, it is possible
16482 to emit the post side-effects in the correct order.
16483 If POST_P is NULL, an internal queue will be used. Before
16484 returning to the caller, the sequence POST_P is appended to
16485 the main output sequence PRE_P.
16487 GIMPLE_TEST_F points to a function that takes a tree T and
16488 returns nonzero if T is in the GIMPLE form requested by the
16489 caller. The GIMPLE predicates are in gimple.cc.
16491 FALLBACK tells the function what sort of a temporary we want if
16492 gimplification cannot produce an expression that complies with
16493 GIMPLE_TEST_F.
16495 fb_none means that no temporary should be generated
16496 fb_rvalue means that an rvalue is OK to generate
16497 fb_lvalue means that an lvalue is OK to generate
16498 fb_either means that either is OK, but an lvalue is preferable.
16499 fb_mayfail means that gimplification may fail (in which case
16500 GS_ERROR will be returned)
16502 The return value is either GS_ERROR or GS_ALL_DONE, since this
16503 function iterates until EXPR is completely gimplified or an error
16504 occurs. */
16506 enum gimplify_status
16507 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16508 bool (*gimple_test_f) (tree), fallback_t fallback)
16510 tree tmp;
16511 gimple_seq internal_pre = NULL;
16512 gimple_seq internal_post = NULL;
16513 tree save_expr;
16514 bool is_statement;
16515 location_t saved_location;
16516 enum gimplify_status ret;
16517 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16518 tree label;
16520 save_expr = *expr_p;
16521 if (save_expr == NULL_TREE)
16522 return GS_ALL_DONE;
16524 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16525 is_statement = gimple_test_f == is_gimple_stmt;
16526 if (is_statement)
16527 gcc_assert (pre_p);
16529 /* Consistency checks. */
16530 if (gimple_test_f == is_gimple_reg)
16531 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16532 else if (gimple_test_f == is_gimple_val
16533 || gimple_test_f == is_gimple_call_addr
16534 || gimple_test_f == is_gimple_condexpr_for_cond
16535 || gimple_test_f == is_gimple_mem_rhs
16536 || gimple_test_f == is_gimple_mem_rhs_or_call
16537 || gimple_test_f == is_gimple_reg_rhs
16538 || gimple_test_f == is_gimple_reg_rhs_or_call
16539 || gimple_test_f == is_gimple_asm_val
16540 || gimple_test_f == is_gimple_mem_ref_addr)
16541 gcc_assert (fallback & fb_rvalue);
16542 else if (gimple_test_f == is_gimple_min_lval
16543 || gimple_test_f == is_gimple_lvalue)
16544 gcc_assert (fallback & fb_lvalue);
16545 else if (gimple_test_f == is_gimple_addressable)
16546 gcc_assert (fallback & fb_either);
16547 else if (gimple_test_f == is_gimple_stmt)
16548 gcc_assert (fallback == fb_none);
16549 else
16551 /* We should have recognized the GIMPLE_TEST_F predicate to
16552 know what kind of fallback to use in case a temporary is
16553 needed to hold the value or address of *EXPR_P. */
16554 gcc_unreachable ();
16557 /* We used to check the predicate here and return immediately if it
16558 succeeds. This is wrong; the design is for gimplification to be
16559 idempotent, and for the predicates to only test for valid forms, not
16560 whether they are fully simplified. */
16561 if (pre_p == NULL)
16562 pre_p = &internal_pre;
16564 if (post_p == NULL)
16565 post_p = &internal_post;
16567 /* Remember the last statements added to PRE_P and POST_P. Every
16568 new statement added by the gimplification helpers needs to be
16569 annotated with location information. To centralize the
16570 responsibility, we remember the last statement that had been
16571 added to both queues before gimplifying *EXPR_P. If
16572 gimplification produces new statements in PRE_P and POST_P, those
16573 statements will be annotated with the same location information
16574 as *EXPR_P. */
16575 pre_last_gsi = gsi_last (*pre_p);
16576 post_last_gsi = gsi_last (*post_p);
16578 saved_location = input_location;
16579 if (save_expr != error_mark_node
16580 && EXPR_HAS_LOCATION (*expr_p))
16581 input_location = EXPR_LOCATION (*expr_p);
16583 /* Loop over the specific gimplifiers until the toplevel node
16584 remains the same. */
16587 /* Strip away as many useless type conversions as possible
16588 at the toplevel. */
16589 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16591 /* Remember the expr. */
16592 save_expr = *expr_p;
16594 /* Die, die, die, my darling. */
16595 if (error_operand_p (save_expr))
16597 ret = GS_ERROR;
16598 break;
16601 /* Do any language-specific gimplification. */
16602 ret = ((enum gimplify_status)
16603 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16604 if (ret == GS_OK)
16606 if (*expr_p == NULL_TREE)
16607 break;
16608 if (*expr_p != save_expr)
16609 continue;
16611 else if (ret != GS_UNHANDLED)
16612 break;
16614 /* Make sure that all the cases set 'ret' appropriately. */
16615 ret = GS_UNHANDLED;
16616 switch (TREE_CODE (*expr_p))
16618 /* First deal with the special cases. */
16620 case POSTINCREMENT_EXPR:
16621 case POSTDECREMENT_EXPR:
16622 case PREINCREMENT_EXPR:
16623 case PREDECREMENT_EXPR:
16624 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16625 fallback != fb_none,
16626 TREE_TYPE (*expr_p));
16627 break;
16629 case VIEW_CONVERT_EXPR:
16630 if ((fallback & fb_rvalue)
16631 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16632 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16634 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16635 post_p, is_gimple_val, fb_rvalue);
16636 recalculate_side_effects (*expr_p);
16637 break;
16639 /* Fallthru. */
16641 case ARRAY_REF:
16642 case ARRAY_RANGE_REF:
16643 case REALPART_EXPR:
16644 case IMAGPART_EXPR:
16645 case COMPONENT_REF:
16646 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16647 fallback ? fallback : fb_rvalue);
16648 break;
16650 case COND_EXPR:
16651 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16653 /* C99 code may assign to an array in a structure value of a
16654 conditional expression, and this has undefined behavior
16655 only on execution, so create a temporary if an lvalue is
16656 required. */
16657 if (fallback == fb_lvalue)
16659 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16660 mark_addressable (*expr_p);
16661 ret = GS_OK;
16663 break;
16665 case CALL_EXPR:
16666 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16668 /* C99 code may assign to an array in a structure returned
16669 from a function, and this has undefined behavior only on
16670 execution, so create a temporary if an lvalue is
16671 required. */
16672 if (fallback == fb_lvalue)
16674 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16675 mark_addressable (*expr_p);
16676 ret = GS_OK;
16678 break;
16680 case TREE_LIST:
16681 gcc_unreachable ();
16683 case COMPOUND_EXPR:
16684 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16685 break;
16687 case COMPOUND_LITERAL_EXPR:
16688 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16689 gimple_test_f, fallback);
16690 break;
16692 case MODIFY_EXPR:
16693 case INIT_EXPR:
16694 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16695 fallback != fb_none);
16696 break;
16698 case TRUTH_ANDIF_EXPR:
16699 case TRUTH_ORIF_EXPR:
16701 /* Preserve the original type of the expression and the
16702 source location of the outer expression. */
16703 tree org_type = TREE_TYPE (*expr_p);
16704 *expr_p = gimple_boolify (*expr_p);
16705 *expr_p = build3_loc (input_location, COND_EXPR,
16706 org_type, *expr_p,
16707 fold_convert_loc
16708 (input_location,
16709 org_type, boolean_true_node),
16710 fold_convert_loc
16711 (input_location,
16712 org_type, boolean_false_node));
16713 ret = GS_OK;
16714 break;
16717 case TRUTH_NOT_EXPR:
16719 tree type = TREE_TYPE (*expr_p);
16720 /* The parsers are careful to generate TRUTH_NOT_EXPR
16721 only with operands that are always zero or one.
16722 We do not fold here but handle the only interesting case
16723 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16724 *expr_p = gimple_boolify (*expr_p);
16725 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16726 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16727 TREE_TYPE (*expr_p),
16728 TREE_OPERAND (*expr_p, 0));
16729 else
16730 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16731 TREE_TYPE (*expr_p),
16732 TREE_OPERAND (*expr_p, 0),
16733 build_int_cst (TREE_TYPE (*expr_p), 1));
16734 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16735 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16736 ret = GS_OK;
16737 break;
16740 case ADDR_EXPR:
16741 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16742 break;
16744 case ANNOTATE_EXPR:
16746 tree cond = TREE_OPERAND (*expr_p, 0);
16747 tree kind = TREE_OPERAND (*expr_p, 1);
16748 tree data = TREE_OPERAND (*expr_p, 2);
16749 tree type = TREE_TYPE (cond);
16750 if (!INTEGRAL_TYPE_P (type))
16752 *expr_p = cond;
16753 ret = GS_OK;
16754 break;
16756 tree tmp = create_tmp_var (type);
16757 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16758 gcall *call
16759 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16760 gimple_call_set_lhs (call, tmp);
16761 gimplify_seq_add_stmt (pre_p, call);
16762 *expr_p = tmp;
16763 ret = GS_ALL_DONE;
16764 break;
16767 case VA_ARG_EXPR:
16768 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16769 break;
16771 CASE_CONVERT:
16772 if (IS_EMPTY_STMT (*expr_p))
16774 ret = GS_ALL_DONE;
16775 break;
16778 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16779 || fallback == fb_none)
16781 /* Just strip a conversion to void (or in void context) and
16782 try again. */
16783 *expr_p = TREE_OPERAND (*expr_p, 0);
16784 ret = GS_OK;
16785 break;
16788 ret = gimplify_conversion (expr_p);
16789 if (ret == GS_ERROR)
16790 break;
16791 if (*expr_p != save_expr)
16792 break;
16793 /* FALLTHRU */
16795 case FIX_TRUNC_EXPR:
16796 /* unary_expr: ... | '(' cast ')' val | ... */
16797 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16798 is_gimple_val, fb_rvalue);
16799 recalculate_side_effects (*expr_p);
16800 break;
16802 case INDIRECT_REF:
16804 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16805 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16806 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16808 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16809 if (*expr_p != save_expr)
16811 ret = GS_OK;
16812 break;
16815 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16816 is_gimple_reg, fb_rvalue);
16817 if (ret == GS_ERROR)
16818 break;
16820 recalculate_side_effects (*expr_p);
16821 *expr_p = fold_build2_loc (input_location, MEM_REF,
16822 TREE_TYPE (*expr_p),
16823 TREE_OPERAND (*expr_p, 0),
16824 build_int_cst (saved_ptr_type, 0));
16825 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16826 TREE_THIS_NOTRAP (*expr_p) = notrap;
16827 ret = GS_OK;
16828 break;
16831 /* We arrive here through the various re-gimplifcation paths. */
16832 case MEM_REF:
16833 /* First try re-folding the whole thing. */
16834 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16835 TREE_OPERAND (*expr_p, 0),
16836 TREE_OPERAND (*expr_p, 1));
16837 if (tmp)
16839 REF_REVERSE_STORAGE_ORDER (tmp)
16840 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16841 *expr_p = tmp;
16842 recalculate_side_effects (*expr_p);
16843 ret = GS_OK;
16844 break;
16846 /* Avoid re-gimplifying the address operand if it is already
16847 in suitable form. Re-gimplifying would mark the address
16848 operand addressable. Always gimplify when not in SSA form
16849 as we still may have to gimplify decls with value-exprs. */
16850 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16851 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16853 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16854 is_gimple_mem_ref_addr, fb_rvalue);
16855 if (ret == GS_ERROR)
16856 break;
16858 recalculate_side_effects (*expr_p);
16859 ret = GS_ALL_DONE;
16860 break;
16862 /* Constants need not be gimplified. */
16863 case INTEGER_CST:
16864 case REAL_CST:
16865 case FIXED_CST:
16866 case STRING_CST:
16867 case COMPLEX_CST:
16868 case VECTOR_CST:
16869 /* Drop the overflow flag on constants, we do not want
16870 that in the GIMPLE IL. */
16871 if (TREE_OVERFLOW_P (*expr_p))
16872 *expr_p = drop_tree_overflow (*expr_p);
16873 ret = GS_ALL_DONE;
16874 break;
16876 case CONST_DECL:
16877 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16878 CONST_DECL node. Otherwise the decl is replaceable by its
16879 value. */
16880 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16881 if (fallback & fb_lvalue)
16882 ret = GS_ALL_DONE;
16883 else
16885 *expr_p = DECL_INITIAL (*expr_p);
16886 ret = GS_OK;
16888 break;
16890 case DECL_EXPR:
16891 ret = gimplify_decl_expr (expr_p, pre_p);
16892 break;
16894 case BIND_EXPR:
16895 ret = gimplify_bind_expr (expr_p, pre_p);
16896 break;
16898 case LOOP_EXPR:
16899 ret = gimplify_loop_expr (expr_p, pre_p);
16900 break;
16902 case SWITCH_EXPR:
16903 ret = gimplify_switch_expr (expr_p, pre_p);
16904 break;
16906 case EXIT_EXPR:
16907 ret = gimplify_exit_expr (expr_p);
16908 break;
16910 case GOTO_EXPR:
16911 /* If the target is not LABEL, then it is a computed jump
16912 and the target needs to be gimplified. */
16913 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16915 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16916 NULL, is_gimple_val, fb_rvalue);
16917 if (ret == GS_ERROR)
16918 break;
16920 gimplify_seq_add_stmt (pre_p,
16921 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16922 ret = GS_ALL_DONE;
16923 break;
16925 case PREDICT_EXPR:
16926 gimplify_seq_add_stmt (pre_p,
16927 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16928 PREDICT_EXPR_OUTCOME (*expr_p)));
16929 ret = GS_ALL_DONE;
16930 break;
16932 case LABEL_EXPR:
16933 ret = gimplify_label_expr (expr_p, pre_p);
16934 label = LABEL_EXPR_LABEL (*expr_p);
16935 gcc_assert (decl_function_context (label) == current_function_decl);
16937 /* If the label is used in a goto statement, or address of the label
16938 is taken, we need to unpoison all variables that were seen so far.
16939 Doing so would prevent us from reporting a false positives. */
16940 if (asan_poisoned_variables
16941 && asan_used_labels != NULL
16942 && asan_used_labels->contains (label)
16943 && !gimplify_omp_ctxp)
16944 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16945 break;
16947 case CASE_LABEL_EXPR:
16948 ret = gimplify_case_label_expr (expr_p, pre_p);
16950 if (gimplify_ctxp->live_switch_vars)
16951 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16952 pre_p);
16953 break;
16955 case RETURN_EXPR:
16956 ret = gimplify_return_expr (*expr_p, pre_p);
16957 break;
16959 case CONSTRUCTOR:
16960 /* Don't reduce this in place; let gimplify_init_constructor work its
16961 magic. Buf if we're just elaborating this for side effects, just
16962 gimplify any element that has side-effects. */
16963 if (fallback == fb_none)
16965 unsigned HOST_WIDE_INT ix;
16966 tree val;
16967 tree temp = NULL_TREE;
16968 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16969 if (TREE_SIDE_EFFECTS (val))
16970 append_to_statement_list (val, &temp);
16972 *expr_p = temp;
16973 ret = temp ? GS_OK : GS_ALL_DONE;
16975 /* C99 code may assign to an array in a constructed
16976 structure or union, and this has undefined behavior only
16977 on execution, so create a temporary if an lvalue is
16978 required. */
16979 else if (fallback == fb_lvalue)
16981 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16982 mark_addressable (*expr_p);
16983 ret = GS_OK;
16985 else
16986 ret = GS_ALL_DONE;
16987 break;
16989 /* The following are special cases that are not handled by the
16990 original GIMPLE grammar. */
16992 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16993 eliminated. */
16994 case SAVE_EXPR:
16995 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16996 break;
16998 case BIT_FIELD_REF:
16999 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17000 post_p, is_gimple_lvalue, fb_either);
17001 recalculate_side_effects (*expr_p);
17002 break;
17004 case TARGET_MEM_REF:
17006 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
17008 if (TMR_BASE (*expr_p))
17009 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
17010 post_p, is_gimple_mem_ref_addr, fb_either);
17011 if (TMR_INDEX (*expr_p))
17012 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
17013 post_p, is_gimple_val, fb_rvalue);
17014 if (TMR_INDEX2 (*expr_p))
17015 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
17016 post_p, is_gimple_val, fb_rvalue);
17017 /* TMR_STEP and TMR_OFFSET are always integer constants. */
17018 ret = MIN (r0, r1);
17020 break;
17022 case NON_LVALUE_EXPR:
17023 /* This should have been stripped above. */
17024 gcc_unreachable ();
17026 case ASM_EXPR:
17027 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
17028 break;
17030 case TRY_FINALLY_EXPR:
17031 case TRY_CATCH_EXPR:
17033 gimple_seq eval, cleanup;
17034 gtry *try_;
17036 /* Calls to destructors are generated automatically in FINALLY/CATCH
17037 block. They should have location as UNKNOWN_LOCATION. However,
17038 gimplify_call_expr will reset these call stmts to input_location
17039 if it finds stmt's location is unknown. To prevent resetting for
17040 destructors, we set the input_location to unknown.
17041 Note that this only affects the destructor calls in FINALLY/CATCH
17042 block, and will automatically reset to its original value by the
17043 end of gimplify_expr. */
17044 input_location = UNKNOWN_LOCATION;
17045 eval = cleanup = NULL;
17046 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
17047 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
17048 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
17050 gimple_seq n = NULL, e = NULL;
17051 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
17052 0), &n);
17053 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
17054 1), &e);
17055 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
17057 geh_else *stmt = gimple_build_eh_else (n, e);
17058 gimple_seq_add_stmt (&cleanup, stmt);
17061 else
17062 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
17063 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
17064 if (gimple_seq_empty_p (cleanup))
17066 gimple_seq_add_seq (pre_p, eval);
17067 ret = GS_ALL_DONE;
17068 break;
17070 try_ = gimple_build_try (eval, cleanup,
17071 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
17072 ? GIMPLE_TRY_FINALLY
17073 : GIMPLE_TRY_CATCH);
17074 if (EXPR_HAS_LOCATION (save_expr))
17075 gimple_set_location (try_, EXPR_LOCATION (save_expr));
17076 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
17077 gimple_set_location (try_, saved_location);
17078 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
17079 gimple_try_set_catch_is_cleanup (try_,
17080 TRY_CATCH_IS_CLEANUP (*expr_p));
17081 gimplify_seq_add_stmt (pre_p, try_);
17082 ret = GS_ALL_DONE;
17083 break;
17086 case CLEANUP_POINT_EXPR:
17087 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
17088 break;
17090 case TARGET_EXPR:
17091 ret = gimplify_target_expr (expr_p, pre_p, post_p);
17092 break;
17094 case CATCH_EXPR:
17096 gimple *c;
17097 gimple_seq handler = NULL;
17098 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
17099 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
17100 gimplify_seq_add_stmt (pre_p, c);
17101 ret = GS_ALL_DONE;
17102 break;
17105 case EH_FILTER_EXPR:
17107 gimple *ehf;
17108 gimple_seq failure = NULL;
17110 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
17111 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
17112 copy_warning (ehf, *expr_p);
17113 gimplify_seq_add_stmt (pre_p, ehf);
17114 ret = GS_ALL_DONE;
17115 break;
17118 case OBJ_TYPE_REF:
17120 enum gimplify_status r0, r1;
17121 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
17122 post_p, is_gimple_val, fb_rvalue);
17123 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
17124 post_p, is_gimple_val, fb_rvalue);
17125 TREE_SIDE_EFFECTS (*expr_p) = 0;
17126 ret = MIN (r0, r1);
17128 break;
17130 case LABEL_DECL:
17131 /* We get here when taking the address of a label. We mark
17132 the label as "forced"; meaning it can never be removed and
17133 it is a potential target for any computed goto. */
17134 FORCED_LABEL (*expr_p) = 1;
17135 ret = GS_ALL_DONE;
17136 break;
17138 case STATEMENT_LIST:
17139 ret = gimplify_statement_list (expr_p, pre_p);
17140 break;
17142 case WITH_SIZE_EXPR:
17144 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17145 post_p == &internal_post ? NULL : post_p,
17146 gimple_test_f, fallback);
17147 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17148 is_gimple_val, fb_rvalue);
17149 ret = GS_ALL_DONE;
17151 break;
17153 case VAR_DECL:
17154 case PARM_DECL:
17155 ret = gimplify_var_or_parm_decl (expr_p);
17156 break;
17158 case RESULT_DECL:
17159 /* When within an OMP context, notice uses of variables. */
17160 if (gimplify_omp_ctxp)
17161 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
17162 ret = GS_ALL_DONE;
17163 break;
17165 case DEBUG_EXPR_DECL:
17166 gcc_unreachable ();
17168 case DEBUG_BEGIN_STMT:
17169 gimplify_seq_add_stmt (pre_p,
17170 gimple_build_debug_begin_stmt
17171 (TREE_BLOCK (*expr_p),
17172 EXPR_LOCATION (*expr_p)));
17173 ret = GS_ALL_DONE;
17174 *expr_p = NULL;
17175 break;
17177 case SSA_NAME:
17178 /* Allow callbacks into the gimplifier during optimization. */
17179 ret = GS_ALL_DONE;
17180 break;
17182 case OMP_PARALLEL:
17183 gimplify_omp_parallel (expr_p, pre_p);
17184 ret = GS_ALL_DONE;
17185 break;
17187 case OMP_TASK:
17188 gimplify_omp_task (expr_p, pre_p);
17189 ret = GS_ALL_DONE;
17190 break;
17192 case OMP_SIMD:
17194 /* Temporarily disable into_ssa, as scan_omp_simd
17195 which calls copy_gimple_seq_and_replace_locals can't deal
17196 with SSA_NAMEs defined outside of the body properly. */
17197 bool saved_into_ssa = gimplify_ctxp->into_ssa;
17198 gimplify_ctxp->into_ssa = false;
17199 ret = gimplify_omp_for (expr_p, pre_p);
17200 gimplify_ctxp->into_ssa = saved_into_ssa;
17201 break;
17204 case OMP_FOR:
17205 case OMP_DISTRIBUTE:
17206 case OMP_TASKLOOP:
17207 case OACC_LOOP:
17208 ret = gimplify_omp_for (expr_p, pre_p);
17209 break;
17211 case OMP_LOOP:
17212 ret = gimplify_omp_loop (expr_p, pre_p);
17213 break;
17215 case OACC_CACHE:
17216 gimplify_oacc_cache (expr_p, pre_p);
17217 ret = GS_ALL_DONE;
17218 break;
17220 case OACC_DECLARE:
17221 gimplify_oacc_declare (expr_p, pre_p);
17222 ret = GS_ALL_DONE;
17223 break;
17225 case OACC_HOST_DATA:
17226 case OACC_DATA:
17227 case OACC_KERNELS:
17228 case OACC_PARALLEL:
17229 case OACC_SERIAL:
17230 case OMP_SCOPE:
17231 case OMP_SECTIONS:
17232 case OMP_SINGLE:
17233 case OMP_TARGET:
17234 case OMP_TARGET_DATA:
17235 case OMP_TEAMS:
17236 gimplify_omp_workshare (expr_p, pre_p);
17237 ret = GS_ALL_DONE;
17238 break;
17240 case OACC_ENTER_DATA:
17241 case OACC_EXIT_DATA:
17242 case OACC_UPDATE:
17243 case OMP_TARGET_UPDATE:
17244 case OMP_TARGET_ENTER_DATA:
17245 case OMP_TARGET_EXIT_DATA:
17246 gimplify_omp_target_update (expr_p, pre_p);
17247 ret = GS_ALL_DONE;
17248 break;
17250 case OMP_SECTION:
17251 case OMP_STRUCTURED_BLOCK:
17252 case OMP_MASTER:
17253 case OMP_MASKED:
17254 case OMP_ORDERED:
17255 case OMP_CRITICAL:
17256 case OMP_SCAN:
17258 gimple_seq body = NULL;
17259 gimple *g;
17260 bool saved_in_omp_construct = in_omp_construct;
17262 in_omp_construct = true;
17263 gimplify_and_add (OMP_BODY (*expr_p), &body);
17264 in_omp_construct = saved_in_omp_construct;
17265 switch (TREE_CODE (*expr_p))
17267 case OMP_SECTION:
17268 g = gimple_build_omp_section (body);
17269 break;
17270 case OMP_STRUCTURED_BLOCK:
17271 g = gimple_build_omp_structured_block (body);
17272 break;
17273 case OMP_MASTER:
17274 g = gimple_build_omp_master (body);
17275 break;
17276 case OMP_ORDERED:
17277 g = gimplify_omp_ordered (*expr_p, body);
17278 if (OMP_BODY (*expr_p) == NULL_TREE
17279 && gimple_code (g) == GIMPLE_OMP_ORDERED)
17280 gimple_omp_ordered_standalone (g);
17281 break;
17282 case OMP_MASKED:
17283 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
17284 pre_p, ORT_WORKSHARE, OMP_MASKED);
17285 gimplify_adjust_omp_clauses (pre_p, body,
17286 &OMP_MASKED_CLAUSES (*expr_p),
17287 OMP_MASKED);
17288 g = gimple_build_omp_masked (body,
17289 OMP_MASKED_CLAUSES (*expr_p));
17290 break;
17291 case OMP_CRITICAL:
17292 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
17293 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
17294 gimplify_adjust_omp_clauses (pre_p, body,
17295 &OMP_CRITICAL_CLAUSES (*expr_p),
17296 OMP_CRITICAL);
17297 g = gimple_build_omp_critical (body,
17298 OMP_CRITICAL_NAME (*expr_p),
17299 OMP_CRITICAL_CLAUSES (*expr_p));
17300 break;
17301 case OMP_SCAN:
17302 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
17303 pre_p, ORT_WORKSHARE, OMP_SCAN);
17304 gimplify_adjust_omp_clauses (pre_p, body,
17305 &OMP_SCAN_CLAUSES (*expr_p),
17306 OMP_SCAN);
17307 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
17308 break;
17309 default:
17310 gcc_unreachable ();
17312 gimplify_seq_add_stmt (pre_p, g);
17313 ret = GS_ALL_DONE;
17314 break;
17317 case OMP_TASKGROUP:
17319 gimple_seq body = NULL;
17321 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
17322 bool saved_in_omp_construct = in_omp_construct;
17323 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
17324 OMP_TASKGROUP);
17325 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
17327 in_omp_construct = true;
17328 gimplify_and_add (OMP_BODY (*expr_p), &body);
17329 in_omp_construct = saved_in_omp_construct;
17330 gimple_seq cleanup = NULL;
17331 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
17332 gimple *g = gimple_build_call (fn, 0);
17333 gimple_seq_add_stmt (&cleanup, g);
17334 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
17335 body = NULL;
17336 gimple_seq_add_stmt (&body, g);
17337 g = gimple_build_omp_taskgroup (body, *pclauses);
17338 gimplify_seq_add_stmt (pre_p, g);
17339 ret = GS_ALL_DONE;
17340 break;
17343 case OMP_ATOMIC:
17344 case OMP_ATOMIC_READ:
17345 case OMP_ATOMIC_CAPTURE_OLD:
17346 case OMP_ATOMIC_CAPTURE_NEW:
17347 ret = gimplify_omp_atomic (expr_p, pre_p);
17348 break;
17350 case TRANSACTION_EXPR:
17351 ret = gimplify_transaction (expr_p, pre_p);
17352 break;
17354 case TRUTH_AND_EXPR:
17355 case TRUTH_OR_EXPR:
17356 case TRUTH_XOR_EXPR:
17358 tree orig_type = TREE_TYPE (*expr_p);
17359 tree new_type, xop0, xop1;
17360 *expr_p = gimple_boolify (*expr_p);
17361 new_type = TREE_TYPE (*expr_p);
17362 if (!useless_type_conversion_p (orig_type, new_type))
17364 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17365 ret = GS_OK;
17366 break;
17369 /* Boolified binary truth expressions are semantically equivalent
17370 to bitwise binary expressions. Canonicalize them to the
17371 bitwise variant. */
17372 switch (TREE_CODE (*expr_p))
17374 case TRUTH_AND_EXPR:
17375 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17376 break;
17377 case TRUTH_OR_EXPR:
17378 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17379 break;
17380 case TRUTH_XOR_EXPR:
17381 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17382 break;
17383 default:
17384 break;
17386 /* Now make sure that operands have compatible type to
17387 expression's new_type. */
17388 xop0 = TREE_OPERAND (*expr_p, 0);
17389 xop1 = TREE_OPERAND (*expr_p, 1);
17390 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17391 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17392 new_type,
17393 xop0);
17394 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17395 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17396 new_type,
17397 xop1);
17398 /* Continue classified as tcc_binary. */
17399 goto expr_2;
17402 case VEC_COND_EXPR:
17403 goto expr_3;
17405 case VEC_PERM_EXPR:
17406 /* Classified as tcc_expression. */
17407 goto expr_3;
17409 case BIT_INSERT_EXPR:
17410 /* Argument 3 is a constant. */
17411 goto expr_2;
17413 case POINTER_PLUS_EXPR:
17415 enum gimplify_status r0, r1;
17416 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17417 post_p, is_gimple_val, fb_rvalue);
17418 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17419 post_p, is_gimple_val, fb_rvalue);
17420 recalculate_side_effects (*expr_p);
17421 ret = MIN (r0, r1);
17422 break;
17425 default:
17426 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17428 case tcc_comparison:
17429 /* Handle comparison of objects of non scalar mode aggregates
17430 with a call to memcmp. It would be nice to only have to do
17431 this for variable-sized objects, but then we'd have to allow
17432 the same nest of reference nodes we allow for MODIFY_EXPR and
17433 that's too complex.
17435 Compare scalar mode aggregates as scalar mode values. Using
17436 memcmp for them would be very inefficient at best, and is
17437 plain wrong if bitfields are involved. */
17438 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17439 ret = GS_ERROR;
17440 else
17442 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17444 /* Vector comparisons need no boolification. */
17445 if (TREE_CODE (type) == VECTOR_TYPE)
17446 goto expr_2;
17447 else if (!AGGREGATE_TYPE_P (type))
17449 tree org_type = TREE_TYPE (*expr_p);
17450 *expr_p = gimple_boolify (*expr_p);
17451 if (!useless_type_conversion_p (org_type,
17452 TREE_TYPE (*expr_p)))
17454 *expr_p = fold_convert_loc (input_location,
17455 org_type, *expr_p);
17456 ret = GS_OK;
17458 else
17459 goto expr_2;
17461 else if (TYPE_MODE (type) != BLKmode)
17462 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17463 else
17464 ret = gimplify_variable_sized_compare (expr_p);
17466 break;
17468 /* If *EXPR_P does not need to be special-cased, handle it
17469 according to its class. */
17470 case tcc_unary:
17471 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17472 post_p, is_gimple_val, fb_rvalue);
17473 break;
17475 case tcc_binary:
17476 expr_2:
17478 enum gimplify_status r0, r1;
17480 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17481 post_p, is_gimple_val, fb_rvalue);
17482 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17483 post_p, is_gimple_val, fb_rvalue);
17485 ret = MIN (r0, r1);
17486 break;
17489 expr_3:
17491 enum gimplify_status r0, r1, r2;
17493 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17494 post_p, is_gimple_val, fb_rvalue);
17495 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17496 post_p, is_gimple_val, fb_rvalue);
17497 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17498 post_p, is_gimple_val, fb_rvalue);
17500 ret = MIN (MIN (r0, r1), r2);
17501 break;
17504 case tcc_declaration:
17505 case tcc_constant:
17506 ret = GS_ALL_DONE;
17507 goto dont_recalculate;
17509 default:
17510 gcc_unreachable ();
17513 recalculate_side_effects (*expr_p);
17515 dont_recalculate:
17516 break;
17519 gcc_assert (*expr_p || ret != GS_OK);
17521 while (ret == GS_OK);
17523 /* If we encountered an error_mark somewhere nested inside, either
17524 stub out the statement or propagate the error back out. */
17525 if (ret == GS_ERROR)
17527 if (is_statement)
17528 *expr_p = NULL;
17529 goto out;
17532 /* This was only valid as a return value from the langhook, which
17533 we handled. Make sure it doesn't escape from any other context. */
17534 gcc_assert (ret != GS_UNHANDLED);
17536 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17538 /* We aren't looking for a value, and we don't have a valid
17539 statement. If it doesn't have side-effects, throw it away.
17540 We can also get here with code such as "*&&L;", where L is
17541 a LABEL_DECL that is marked as FORCED_LABEL. */
17542 if (TREE_CODE (*expr_p) == LABEL_DECL
17543 || !TREE_SIDE_EFFECTS (*expr_p))
17544 *expr_p = NULL;
17545 else if (!TREE_THIS_VOLATILE (*expr_p))
17547 /* This is probably a _REF that contains something nested that
17548 has side effects. Recurse through the operands to find it. */
17549 enum tree_code code = TREE_CODE (*expr_p);
17551 switch (code)
17553 case COMPONENT_REF:
17554 case REALPART_EXPR:
17555 case IMAGPART_EXPR:
17556 case VIEW_CONVERT_EXPR:
17557 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17558 gimple_test_f, fallback);
17559 break;
17561 case ARRAY_REF:
17562 case ARRAY_RANGE_REF:
17563 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17564 gimple_test_f, fallback);
17565 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17566 gimple_test_f, fallback);
17567 break;
17569 default:
17570 /* Anything else with side-effects must be converted to
17571 a valid statement before we get here. */
17572 gcc_unreachable ();
17575 *expr_p = NULL;
17577 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17578 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17579 && !is_empty_type (TREE_TYPE (*expr_p)))
17581 /* Historically, the compiler has treated a bare reference
17582 to a non-BLKmode volatile lvalue as forcing a load. */
17583 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17585 /* Normally, we do not want to create a temporary for a
17586 TREE_ADDRESSABLE type because such a type should not be
17587 copied by bitwise-assignment. However, we make an
17588 exception here, as all we are doing here is ensuring that
17589 we read the bytes that make up the type. We use
17590 create_tmp_var_raw because create_tmp_var will abort when
17591 given a TREE_ADDRESSABLE type. */
17592 tree tmp = create_tmp_var_raw (type, "vol");
17593 gimple_add_tmp_var (tmp);
17594 gimplify_assign (tmp, *expr_p, pre_p);
17595 *expr_p = NULL;
17597 else
17598 /* We can't do anything useful with a volatile reference to
17599 an incomplete type, so just throw it away. Likewise for
17600 a BLKmode type, since any implicit inner load should
17601 already have been turned into an explicit one by the
17602 gimplification process. */
17603 *expr_p = NULL;
17606 /* If we are gimplifying at the statement level, we're done. Tack
17607 everything together and return. */
17608 if (fallback == fb_none || is_statement)
17610 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17611 it out for GC to reclaim it. */
17612 *expr_p = NULL_TREE;
17614 if (!gimple_seq_empty_p (internal_pre)
17615 || !gimple_seq_empty_p (internal_post))
17617 gimplify_seq_add_seq (&internal_pre, internal_post);
17618 gimplify_seq_add_seq (pre_p, internal_pre);
17621 /* The result of gimplifying *EXPR_P is going to be the last few
17622 statements in *PRE_P and *POST_P. Add location information
17623 to all the statements that were added by the gimplification
17624 helpers. */
17625 if (!gimple_seq_empty_p (*pre_p))
17626 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17628 if (!gimple_seq_empty_p (*post_p))
17629 annotate_all_with_location_after (*post_p, post_last_gsi,
17630 input_location);
17632 goto out;
17635 #ifdef ENABLE_GIMPLE_CHECKING
17636 if (*expr_p)
17638 enum tree_code code = TREE_CODE (*expr_p);
17639 /* These expressions should already be in gimple IR form. */
17640 gcc_assert (code != MODIFY_EXPR
17641 && code != ASM_EXPR
17642 && code != BIND_EXPR
17643 && code != CATCH_EXPR
17644 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17645 && code != EH_FILTER_EXPR
17646 && code != GOTO_EXPR
17647 && code != LABEL_EXPR
17648 && code != LOOP_EXPR
17649 && code != SWITCH_EXPR
17650 && code != TRY_FINALLY_EXPR
17651 && code != EH_ELSE_EXPR
17652 && code != OACC_PARALLEL
17653 && code != OACC_KERNELS
17654 && code != OACC_SERIAL
17655 && code != OACC_DATA
17656 && code != OACC_HOST_DATA
17657 && code != OACC_DECLARE
17658 && code != OACC_UPDATE
17659 && code != OACC_ENTER_DATA
17660 && code != OACC_EXIT_DATA
17661 && code != OACC_CACHE
17662 && code != OMP_CRITICAL
17663 && code != OMP_FOR
17664 && code != OACC_LOOP
17665 && code != OMP_MASTER
17666 && code != OMP_MASKED
17667 && code != OMP_TASKGROUP
17668 && code != OMP_ORDERED
17669 && code != OMP_PARALLEL
17670 && code != OMP_SCAN
17671 && code != OMP_SECTIONS
17672 && code != OMP_SECTION
17673 && code != OMP_STRUCTURED_BLOCK
17674 && code != OMP_SINGLE
17675 && code != OMP_SCOPE);
17677 #endif
17679 /* Otherwise we're gimplifying a subexpression, so the resulting
17680 value is interesting. If it's a valid operand that matches
17681 GIMPLE_TEST_F, we're done. Unless we are handling some
17682 post-effects internally; if that's the case, we need to copy into
17683 a temporary before adding the post-effects to POST_P. */
17684 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17685 goto out;
17687 /* Otherwise, we need to create a new temporary for the gimplified
17688 expression. */
17690 /* We can't return an lvalue if we have an internal postqueue. The
17691 object the lvalue refers to would (probably) be modified by the
17692 postqueue; we need to copy the value out first, which means an
17693 rvalue. */
17694 if ((fallback & fb_lvalue)
17695 && gimple_seq_empty_p (internal_post)
17696 && is_gimple_addressable (*expr_p))
17698 /* An lvalue will do. Take the address of the expression, store it
17699 in a temporary, and replace the expression with an INDIRECT_REF of
17700 that temporary. */
17701 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17702 unsigned int ref_align = get_object_alignment (*expr_p);
17703 tree ref_type = TREE_TYPE (*expr_p);
17704 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17705 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17706 if (TYPE_ALIGN (ref_type) != ref_align)
17707 ref_type = build_aligned_type (ref_type, ref_align);
17708 *expr_p = build2 (MEM_REF, ref_type,
17709 tmp, build_zero_cst (ref_alias_type));
17711 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17713 /* An rvalue will do. Assign the gimplified expression into a
17714 new temporary TMP and replace the original expression with
17715 TMP. First, make sure that the expression has a type so that
17716 it can be assigned into a temporary. */
17717 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17718 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17720 else
17722 #ifdef ENABLE_GIMPLE_CHECKING
17723 if (!(fallback & fb_mayfail))
17725 fprintf (stderr, "gimplification failed:\n");
17726 print_generic_expr (stderr, *expr_p);
17727 debug_tree (*expr_p);
17728 internal_error ("gimplification failed");
17730 #endif
17731 gcc_assert (fallback & fb_mayfail);
17733 /* If this is an asm statement, and the user asked for the
17734 impossible, don't die. Fail and let gimplify_asm_expr
17735 issue an error. */
17736 ret = GS_ERROR;
17737 goto out;
17740 /* Make sure the temporary matches our predicate. */
17741 gcc_assert ((*gimple_test_f) (*expr_p));
17743 if (!gimple_seq_empty_p (internal_post))
17745 annotate_all_with_location (internal_post, input_location);
17746 gimplify_seq_add_seq (pre_p, internal_post);
17749 out:
17750 input_location = saved_location;
17751 return ret;
17754 /* Like gimplify_expr but make sure the gimplified result is not itself
17755 a SSA name (but a decl if it were). Temporaries required by
17756 evaluating *EXPR_P may be still SSA names. */
17758 static enum gimplify_status
17759 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17760 bool (*gimple_test_f) (tree), fallback_t fallback,
17761 bool allow_ssa)
17763 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17764 gimple_test_f, fallback);
17765 if (! allow_ssa
17766 && TREE_CODE (*expr_p) == SSA_NAME)
17767 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17768 return ret;
17771 /* Look through TYPE for variable-sized objects and gimplify each such
17772 size that we find. Add to LIST_P any statements generated. */
17774 void
17775 gimplify_type_sizes (tree type, gimple_seq *list_p)
17777 if (type == NULL || type == error_mark_node)
17778 return;
17780 const bool ignored_p
17781 = TYPE_NAME (type)
17782 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17783 && DECL_IGNORED_P (TYPE_NAME (type));
17784 tree t;
17786 /* We first do the main variant, then copy into any other variants. */
17787 type = TYPE_MAIN_VARIANT (type);
17789 /* Avoid infinite recursion. */
17790 if (TYPE_SIZES_GIMPLIFIED (type))
17791 return;
17793 TYPE_SIZES_GIMPLIFIED (type) = 1;
17795 switch (TREE_CODE (type))
17797 case INTEGER_TYPE:
17798 case ENUMERAL_TYPE:
17799 case BOOLEAN_TYPE:
17800 case REAL_TYPE:
17801 case FIXED_POINT_TYPE:
17802 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17803 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17805 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17807 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17808 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17810 break;
17812 case ARRAY_TYPE:
17813 /* These types may not have declarations, so handle them here. */
17814 gimplify_type_sizes (TREE_TYPE (type), list_p);
17815 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17816 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17817 with assigned stack slots, for -O1+ -g they should be tracked
17818 by VTA. */
17819 if (!ignored_p
17820 && TYPE_DOMAIN (type)
17821 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17823 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17824 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17825 DECL_IGNORED_P (t) = 0;
17826 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17827 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17828 DECL_IGNORED_P (t) = 0;
17830 break;
17832 case RECORD_TYPE:
17833 case UNION_TYPE:
17834 case QUAL_UNION_TYPE:
17835 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17836 if (TREE_CODE (field) == FIELD_DECL)
17838 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17839 /* Likewise, ensure variable offsets aren't removed. */
17840 if (!ignored_p
17841 && (t = DECL_FIELD_OFFSET (field))
17842 && VAR_P (t)
17843 && DECL_ARTIFICIAL (t))
17844 DECL_IGNORED_P (t) = 0;
17845 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17846 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17847 gimplify_type_sizes (TREE_TYPE (field), list_p);
17849 break;
17851 case POINTER_TYPE:
17852 case REFERENCE_TYPE:
17853 /* We used to recurse on the pointed-to type here, which turned out to
17854 be incorrect because its definition might refer to variables not
17855 yet initialized at this point if a forward declaration is involved.
17857 It was actually useful for anonymous pointed-to types to ensure
17858 that the sizes evaluation dominates every possible later use of the
17859 values. Restricting to such types here would be safe since there
17860 is no possible forward declaration around, but would introduce an
17861 undesirable middle-end semantic to anonymity. We then defer to
17862 front-ends the responsibility of ensuring that the sizes are
17863 evaluated both early and late enough, e.g. by attaching artificial
17864 type declarations to the tree. */
17865 break;
17867 default:
17868 break;
17871 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17872 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17874 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17876 TYPE_SIZE (t) = TYPE_SIZE (type);
17877 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17878 TYPE_SIZES_GIMPLIFIED (t) = 1;
17882 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17883 a size or position, has had all of its SAVE_EXPRs evaluated.
17884 We add any required statements to *STMT_P. */
17886 void
17887 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17889 tree expr = *expr_p;
17891 /* We don't do anything if the value isn't there, is constant, or contains
17892 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17893 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17894 will want to replace it with a new variable, but that will cause problems
17895 if this type is from outside the function. It's OK to have that here. */
17896 if (expr == NULL_TREE
17897 || is_gimple_constant (expr)
17898 || VAR_P (expr)
17899 || CONTAINS_PLACEHOLDER_P (expr))
17900 return;
17902 *expr_p = unshare_expr (expr);
17904 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17905 if the def vanishes. */
17906 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17908 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17909 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17910 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17911 if (is_gimple_constant (*expr_p))
17912 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17915 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17916 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17917 is true, also gimplify the parameters. */
17919 gbind *
17920 gimplify_body (tree fndecl, bool do_parms)
17922 location_t saved_location = input_location;
17923 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17924 gimple *outer_stmt;
17925 gbind *outer_bind;
17927 timevar_push (TV_TREE_GIMPLIFY);
17929 init_tree_ssa (cfun);
17931 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17932 gimplification. */
17933 default_rtl_profile ();
17935 gcc_assert (gimplify_ctxp == NULL);
17936 push_gimplify_context (true);
17938 if (flag_openacc || flag_openmp)
17940 gcc_assert (gimplify_omp_ctxp == NULL);
17941 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17942 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17945 /* Unshare most shared trees in the body and in that of any nested functions.
17946 It would seem we don't have to do this for nested functions because
17947 they are supposed to be output and then the outer function gimplified
17948 first, but the g++ front end doesn't always do it that way. */
17949 unshare_body (fndecl);
17950 unvisit_body (fndecl);
17952 /* Make sure input_location isn't set to something weird. */
17953 input_location = DECL_SOURCE_LOCATION (fndecl);
17955 /* Resolve callee-copies. This has to be done before processing
17956 the body so that DECL_VALUE_EXPR gets processed correctly. */
17957 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17959 /* Gimplify the function's body. */
17960 seq = NULL;
17961 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17962 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17963 if (!outer_stmt)
17965 outer_stmt = gimple_build_nop ();
17966 gimplify_seq_add_stmt (&seq, outer_stmt);
17969 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17970 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17971 if (gimple_code (outer_stmt) == GIMPLE_BIND
17972 && (gimple_seq_first_nondebug_stmt (seq)
17973 == gimple_seq_last_nondebug_stmt (seq)))
17975 outer_bind = as_a <gbind *> (outer_stmt);
17976 if (gimple_seq_first_stmt (seq) != outer_stmt
17977 || gimple_seq_last_stmt (seq) != outer_stmt)
17979 /* If there are debug stmts before or after outer_stmt, move them
17980 inside of outer_bind body. */
17981 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17982 gimple_seq second_seq = NULL;
17983 if (gimple_seq_first_stmt (seq) != outer_stmt
17984 && gimple_seq_last_stmt (seq) != outer_stmt)
17986 second_seq = gsi_split_seq_after (gsi);
17987 gsi_remove (&gsi, false);
17989 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17990 gsi_remove (&gsi, false);
17991 else
17993 gsi_remove (&gsi, false);
17994 second_seq = seq;
17995 seq = NULL;
17997 gimple_seq_add_seq_without_update (&seq,
17998 gimple_bind_body (outer_bind));
17999 gimple_seq_add_seq_without_update (&seq, second_seq);
18000 gimple_bind_set_body (outer_bind, seq);
18003 else
18004 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
18006 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18008 /* If we had callee-copies statements, insert them at the beginning
18009 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
18010 if (!gimple_seq_empty_p (parm_stmts))
18012 tree parm;
18014 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
18015 if (parm_cleanup)
18017 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
18018 GIMPLE_TRY_FINALLY);
18019 parm_stmts = NULL;
18020 gimple_seq_add_stmt (&parm_stmts, g);
18022 gimple_bind_set_body (outer_bind, parm_stmts);
18024 for (parm = DECL_ARGUMENTS (current_function_decl);
18025 parm; parm = DECL_CHAIN (parm))
18026 if (DECL_HAS_VALUE_EXPR_P (parm))
18028 DECL_HAS_VALUE_EXPR_P (parm) = 0;
18029 DECL_IGNORED_P (parm) = 0;
18033 if ((flag_openacc || flag_openmp || flag_openmp_simd)
18034 && gimplify_omp_ctxp)
18036 delete_omp_context (gimplify_omp_ctxp);
18037 gimplify_omp_ctxp = NULL;
18040 pop_gimplify_context (outer_bind);
18041 gcc_assert (gimplify_ctxp == NULL);
18043 if (flag_checking && !seen_error ())
18044 verify_gimple_in_seq (gimple_bind_body (outer_bind));
18046 timevar_pop (TV_TREE_GIMPLIFY);
18047 input_location = saved_location;
18049 return outer_bind;
18052 typedef char *char_p; /* For DEF_VEC_P. */
18054 /* Return whether we should exclude FNDECL from instrumentation. */
18056 static bool
18057 flag_instrument_functions_exclude_p (tree fndecl)
18059 vec<char_p> *v;
18061 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
18062 if (v && v->length () > 0)
18064 const char *name;
18065 int i;
18066 char *s;
18068 name = lang_hooks.decl_printable_name (fndecl, 1);
18069 FOR_EACH_VEC_ELT (*v, i, s)
18070 if (strstr (name, s) != NULL)
18071 return true;
18074 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
18075 if (v && v->length () > 0)
18077 const char *name;
18078 int i;
18079 char *s;
18081 name = DECL_SOURCE_FILE (fndecl);
18082 FOR_EACH_VEC_ELT (*v, i, s)
18083 if (strstr (name, s) != NULL)
18084 return true;
18087 return false;
18090 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
18091 If COND_VAR is not NULL, it is a boolean variable guarding the call to
18092 the instrumentation function. IF STMT is not NULL, it is a statement
18093 to be executed just before the call to the instrumentation function. */
18095 static void
18096 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
18097 tree cond_var, gimple *stmt)
18099 /* The instrumentation hooks aren't going to call the instrumented
18100 function and the address they receive is expected to be matchable
18101 against symbol addresses. Make sure we don't create a trampoline,
18102 in case the current function is nested. */
18103 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
18104 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
18106 tree label_true, label_false;
18107 if (cond_var)
18109 label_true = create_artificial_label (UNKNOWN_LOCATION);
18110 label_false = create_artificial_label (UNKNOWN_LOCATION);
18111 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
18112 label_true, label_false);
18113 gimplify_seq_add_stmt (seq, cond);
18114 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
18115 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
18116 NOT_TAKEN));
18119 if (stmt)
18120 gimplify_seq_add_stmt (seq, stmt);
18122 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
18123 gcall *call = gimple_build_call (x, 1, integer_zero_node);
18124 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
18125 gimple_call_set_lhs (call, tmp_var);
18126 gimplify_seq_add_stmt (seq, call);
18127 x = builtin_decl_implicit (fncode);
18128 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
18129 gimplify_seq_add_stmt (seq, call);
18131 if (cond_var)
18132 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
18135 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
18136 node for the function we want to gimplify.
18138 Return the sequence of GIMPLE statements corresponding to the body
18139 of FNDECL. */
18141 void
18142 gimplify_function_tree (tree fndecl)
18144 gimple_seq seq;
18145 gbind *bind;
18147 gcc_assert (!gimple_body (fndecl));
18149 if (DECL_STRUCT_FUNCTION (fndecl))
18150 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
18151 else
18152 push_struct_function (fndecl);
18154 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
18155 if necessary. */
18156 cfun->curr_properties |= PROP_gimple_lva;
18158 if (asan_sanitize_use_after_scope ())
18159 asan_poisoned_variables = new hash_set<tree> ();
18160 bind = gimplify_body (fndecl, true);
18161 if (asan_poisoned_variables)
18163 delete asan_poisoned_variables;
18164 asan_poisoned_variables = NULL;
18167 /* The tree body of the function is no longer needed, replace it
18168 with the new GIMPLE body. */
18169 seq = NULL;
18170 gimple_seq_add_stmt (&seq, bind);
18171 gimple_set_body (fndecl, seq);
18173 /* If we're instrumenting function entry/exit, then prepend the call to
18174 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
18175 catch the exit hook. */
18176 /* ??? Add some way to ignore exceptions for this TFE. */
18177 if (flag_instrument_function_entry_exit
18178 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
18179 /* Do not instrument extern inline functions. */
18180 && !(DECL_DECLARED_INLINE_P (fndecl)
18181 && DECL_EXTERNAL (fndecl)
18182 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
18183 && !flag_instrument_functions_exclude_p (fndecl))
18185 gimple_seq body = NULL, cleanup = NULL;
18186 gassign *assign;
18187 tree cond_var;
18189 /* If -finstrument-functions-once is specified, generate:
18191 static volatile bool C.0 = false;
18192 bool tmp_called;
18194 tmp_called = C.0;
18195 if (!tmp_called)
18197 C.0 = true;
18198 [call profiling enter function]
18201 without specific protection for data races. */
18202 if (flag_instrument_function_entry_exit > 1)
18204 tree first_var
18205 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
18206 VAR_DECL,
18207 create_tmp_var_name ("C"),
18208 boolean_type_node);
18209 DECL_ARTIFICIAL (first_var) = 1;
18210 DECL_IGNORED_P (first_var) = 1;
18211 TREE_STATIC (first_var) = 1;
18212 TREE_THIS_VOLATILE (first_var) = 1;
18213 TREE_USED (first_var) = 1;
18214 DECL_INITIAL (first_var) = boolean_false_node;
18215 varpool_node::add (first_var);
18217 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
18218 assign = gimple_build_assign (cond_var, first_var);
18219 gimplify_seq_add_stmt (&body, assign);
18221 assign = gimple_build_assign (first_var, boolean_true_node);
18224 else
18226 cond_var = NULL_TREE;
18227 assign = NULL;
18230 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
18231 cond_var, assign);
18233 /* If -finstrument-functions-once is specified, generate:
18235 if (!tmp_called)
18236 [call profiling exit function]
18238 without specific protection for data races. */
18239 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
18240 cond_var, NULL);
18242 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
18243 gimplify_seq_add_stmt (&body, tf);
18244 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
18246 /* Replace the current function body with the body
18247 wrapped in the try/finally TF. */
18248 seq = NULL;
18249 gimple_seq_add_stmt (&seq, new_bind);
18250 gimple_set_body (fndecl, seq);
18251 bind = new_bind;
18254 if (sanitize_flags_p (SANITIZE_THREAD)
18255 && param_tsan_instrument_func_entry_exit)
18257 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
18258 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
18259 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
18260 /* Replace the current function body with the body
18261 wrapped in the try/finally TF. */
18262 seq = NULL;
18263 gimple_seq_add_stmt (&seq, new_bind);
18264 gimple_set_body (fndecl, seq);
18267 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18268 cfun->curr_properties |= PROP_gimple_any;
18270 pop_cfun ();
18272 dump_function (TDI_gimple, fndecl);
18275 /* Return a dummy expression of type TYPE in order to keep going after an
18276 error. */
18278 static tree
18279 dummy_object (tree type)
18281 tree t = build_int_cst (build_pointer_type (type), 0);
18282 return build2 (MEM_REF, type, t, t);
18285 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
18286 builtin function, but a very special sort of operator. */
18288 enum gimplify_status
18289 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
18290 gimple_seq *post_p ATTRIBUTE_UNUSED)
18292 tree promoted_type, have_va_type;
18293 tree valist = TREE_OPERAND (*expr_p, 0);
18294 tree type = TREE_TYPE (*expr_p);
18295 tree t, tag, aptag;
18296 location_t loc = EXPR_LOCATION (*expr_p);
18298 /* Verify that valist is of the proper type. */
18299 have_va_type = TREE_TYPE (valist);
18300 if (have_va_type == error_mark_node)
18301 return GS_ERROR;
18302 have_va_type = targetm.canonical_va_list_type (have_va_type);
18303 if (have_va_type == NULL_TREE
18304 && POINTER_TYPE_P (TREE_TYPE (valist)))
18305 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
18306 have_va_type
18307 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
18308 gcc_assert (have_va_type != NULL_TREE);
18310 /* Generate a diagnostic for requesting data of a type that cannot
18311 be passed through `...' due to type promotion at the call site. */
18312 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
18313 != type)
18315 static bool gave_help;
18316 bool warned;
18317 /* Use the expansion point to handle cases such as passing bool (defined
18318 in a system header) through `...'. */
18319 location_t xloc
18320 = expansion_point_location_if_in_system_header (loc);
18322 /* Unfortunately, this is merely undefined, rather than a constraint
18323 violation, so we cannot make this an error. If this call is never
18324 executed, the program is still strictly conforming. */
18325 auto_diagnostic_group d;
18326 warned = warning_at (xloc, 0,
18327 "%qT is promoted to %qT when passed through %<...%>",
18328 type, promoted_type);
18329 if (!gave_help && warned)
18331 gave_help = true;
18332 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
18333 promoted_type, type);
18336 /* We can, however, treat "undefined" any way we please.
18337 Call abort to encourage the user to fix the program. */
18338 if (warned)
18339 inform (xloc, "if this code is reached, the program will abort");
18340 /* Before the abort, allow the evaluation of the va_list
18341 expression to exit or longjmp. */
18342 gimplify_and_add (valist, pre_p);
18343 t = build_call_expr_loc (loc,
18344 builtin_decl_implicit (BUILT_IN_TRAP), 0);
18345 gimplify_and_add (t, pre_p);
18347 /* This is dead code, but go ahead and finish so that the
18348 mode of the result comes out right. */
18349 *expr_p = dummy_object (type);
18350 return GS_ALL_DONE;
18353 tag = build_int_cst (build_pointer_type (type), 0);
18354 aptag = build_int_cst (TREE_TYPE (valist), 0);
18356 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18357 valist, tag, aptag);
18359 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18360 needs to be expanded. */
18361 cfun->curr_properties &= ~PROP_gimple_lva;
18363 return GS_OK;
18366 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18368 DST/SRC are the destination and source respectively. You can pass
18369 ungimplified trees in DST or SRC, in which case they will be
18370 converted to a gimple operand if necessary.
18372 This function returns the newly created GIMPLE_ASSIGN tuple. */
18374 gimple *
18375 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18377 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18378 gimplify_and_add (t, seq_p);
18379 ggc_free (t);
18380 return gimple_seq_last_stmt (*seq_p);
18383 inline hashval_t
18384 gimplify_hasher::hash (const elt_t *p)
18386 tree t = p->val;
18387 return iterative_hash_expr (t, 0);
18390 inline bool
18391 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18393 tree t1 = p1->val;
18394 tree t2 = p2->val;
18395 enum tree_code code = TREE_CODE (t1);
18397 if (TREE_CODE (t2) != code
18398 || TREE_TYPE (t1) != TREE_TYPE (t2))
18399 return false;
18401 if (!operand_equal_p (t1, t2, 0))
18402 return false;
18404 /* Only allow them to compare equal if they also hash equal; otherwise
18405 results are nondeterminate, and we fail bootstrap comparison. */
18406 gcc_checking_assert (hash (p1) == hash (p2));
18408 return true;