d: Merge upstream dmd, druntime 2bbf64907c, phobos b64bfbf91
[official-gcc.git] / gcc / gimplify.cc
blob342e43a7f2558e62da5623374da3db005280461c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "diagnostic.h" /* For errorcount. */
40 #include "alias.h"
41 #include "fold-const.h"
42 #include "calls.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "gimple-iterator.h"
47 #include "gimple-fold.h"
48 #include "tree-eh.h"
49 #include "gimplify.h"
50 #include "stor-layout.h"
51 #include "print-tree.h"
52 #include "tree-iterator.h"
53 #include "tree-inline.h"
54 #include "langhooks.h"
55 #include "tree-cfg.h"
56 #include "tree-ssa.h"
57 #include "tree-hash-traits.h"
58 #include "omp-general.h"
59 #include "omp-low.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
62 #include "splay-tree.h"
63 #include "gimple-walk.h"
64 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "builtins.h"
66 #include "stringpool.h"
67 #include "attribs.h"
68 #include "asan.h"
69 #include "dbgcnt.h"
70 #include "omp-offload.h"
71 #include "context.h"
72 #include "tree-nested.h"
74 /* Hash set of poisoned variables in a bind expr. */
75 static hash_set<tree> *asan_poisoned_variables = NULL;
77 enum gimplify_omp_var_data
79 GOVD_SEEN = 0x000001,
80 GOVD_EXPLICIT = 0x000002,
81 GOVD_SHARED = 0x000004,
82 GOVD_PRIVATE = 0x000008,
83 GOVD_FIRSTPRIVATE = 0x000010,
84 GOVD_LASTPRIVATE = 0x000020,
85 GOVD_REDUCTION = 0x000040,
86 GOVD_LOCAL = 0x00080,
87 GOVD_MAP = 0x000100,
88 GOVD_DEBUG_PRIVATE = 0x000200,
89 GOVD_PRIVATE_OUTER_REF = 0x000400,
90 GOVD_LINEAR = 0x000800,
91 GOVD_ALIGNED = 0x001000,
93 /* Flag for GOVD_MAP: don't copy back. */
94 GOVD_MAP_TO_ONLY = 0x002000,
96 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
97 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
99 GOVD_MAP_0LEN_ARRAY = 0x008000,
101 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
102 GOVD_MAP_ALWAYS_TO = 0x010000,
104 /* Flag for shared vars that are or might be stored to in the region. */
105 GOVD_WRITTEN = 0x020000,
107 /* Flag for GOVD_MAP, if it is a forced mapping. */
108 GOVD_MAP_FORCE = 0x040000,
110 /* Flag for GOVD_MAP: must be present already. */
111 GOVD_MAP_FORCE_PRESENT = 0x080000,
113 /* Flag for GOVD_MAP: only allocate. */
114 GOVD_MAP_ALLOC_ONLY = 0x100000,
116 /* Flag for GOVD_MAP: only copy back. */
117 GOVD_MAP_FROM_ONLY = 0x200000,
119 GOVD_NONTEMPORAL = 0x400000,
121 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
122 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
124 GOVD_CONDTEMP = 0x1000000,
126 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
127 GOVD_REDUCTION_INSCAN = 0x2000000,
129 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
130 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
132 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
133 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
134 | GOVD_LOCAL)
138 enum omp_region_type
140 ORT_WORKSHARE = 0x00,
141 ORT_TASKGROUP = 0x01,
142 ORT_SIMD = 0x04,
144 ORT_PARALLEL = 0x08,
145 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
147 ORT_TASK = 0x10,
148 ORT_UNTIED_TASK = ORT_TASK | 1,
149 ORT_TASKLOOP = ORT_TASK | 2,
150 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
152 ORT_TEAMS = 0x20,
153 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
154 ORT_HOST_TEAMS = ORT_TEAMS | 2,
155 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
157 /* Data region. */
158 ORT_TARGET_DATA = 0x40,
160 /* Data region with offloading. */
161 ORT_TARGET = 0x80,
162 ORT_COMBINED_TARGET = ORT_TARGET | 1,
163 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
165 /* OpenACC variants. */
166 ORT_ACC = 0x100, /* A generic OpenACC region. */
167 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
168 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
169 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
170 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
171 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
173 /* Dummy OpenMP region, used to disable expansion of
174 DECL_VALUE_EXPRs in taskloop pre body. */
175 ORT_NONE = 0x200
178 /* Gimplify hashtable helper. */
180 struct gimplify_hasher : free_ptr_hash <elt_t>
182 static inline hashval_t hash (const elt_t *);
183 static inline bool equal (const elt_t *, const elt_t *);
186 struct gimplify_ctx
188 struct gimplify_ctx *prev_context;
190 vec<gbind *> bind_expr_stack;
191 tree temps;
192 gimple_seq conditional_cleanups;
193 tree exit_label;
194 tree return_temp;
196 vec<tree> case_labels;
197 hash_set<tree> *live_switch_vars;
198 /* The formal temporary table. Should this be persistent? */
199 hash_table<gimplify_hasher> *temp_htab;
201 int conditions;
202 unsigned into_ssa : 1;
203 unsigned allow_rhs_cond_expr : 1;
204 unsigned in_cleanup_point_expr : 1;
205 unsigned keep_stack : 1;
206 unsigned save_stack : 1;
207 unsigned in_switch_expr : 1;
210 enum gimplify_defaultmap_kind
212 GDMK_SCALAR,
213 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
214 GDMK_AGGREGATE,
215 GDMK_ALLOCATABLE,
216 GDMK_POINTER
219 struct gimplify_omp_ctx
221 struct gimplify_omp_ctx *outer_context;
222 splay_tree variables;
223 hash_set<tree> *privatized_types;
224 tree clauses;
225 /* Iteration variables in an OMP_FOR. */
226 vec<tree> loop_iter_var;
227 location_t location;
228 enum omp_clause_default_kind default_kind;
229 enum omp_region_type region_type;
230 enum tree_code code;
231 bool combined_loop;
232 bool distribute;
233 bool target_firstprivatize_array_bases;
234 bool add_safelen1;
235 bool order_concurrent;
236 bool has_depend;
237 bool in_for_exprs;
238 int defaultmap[5];
241 static struct gimplify_ctx *gimplify_ctxp;
242 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
243 static bool in_omp_construct;
245 /* Forward declaration. */
246 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
247 static hash_map<tree, tree> *oacc_declare_returns;
248 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
249 bool (*) (tree), fallback_t, bool);
250 static void prepare_gimple_addressable (tree *, gimple_seq *);
252 /* Shorter alias name for the above function for use in gimplify.cc
253 only. */
255 static inline void
256 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
258 gimple_seq_add_stmt_without_update (seq_p, gs);
261 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
262 NULL, a new sequence is allocated. This function is
263 similar to gimple_seq_add_seq, but does not scan the operands.
264 During gimplification, we need to manipulate statement sequences
265 before the def/use vectors have been constructed. */
267 static void
268 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
270 gimple_stmt_iterator si;
272 if (src == NULL)
273 return;
275 si = gsi_last (*dst_p);
276 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
280 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
281 and popping gimplify contexts. */
283 static struct gimplify_ctx *ctx_pool = NULL;
285 /* Return a gimplify context struct from the pool. */
287 static inline struct gimplify_ctx *
288 ctx_alloc (void)
290 struct gimplify_ctx * c = ctx_pool;
292 if (c)
293 ctx_pool = c->prev_context;
294 else
295 c = XNEW (struct gimplify_ctx);
297 memset (c, '\0', sizeof (*c));
298 return c;
301 /* Put gimplify context C back into the pool. */
303 static inline void
304 ctx_free (struct gimplify_ctx *c)
306 c->prev_context = ctx_pool;
307 ctx_pool = c;
310 /* Free allocated ctx stack memory. */
312 void
313 free_gimplify_stack (void)
315 struct gimplify_ctx *c;
317 while ((c = ctx_pool))
319 ctx_pool = c->prev_context;
320 free (c);
325 /* Set up a context for the gimplifier. */
327 void
328 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
330 struct gimplify_ctx *c = ctx_alloc ();
332 c->prev_context = gimplify_ctxp;
333 gimplify_ctxp = c;
334 gimplify_ctxp->into_ssa = in_ssa;
335 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
338 /* Tear down a context for the gimplifier. If BODY is non-null, then
339 put the temporaries into the outer BIND_EXPR. Otherwise, put them
340 in the local_decls.
342 BODY is not a sequence, but the first tuple in a sequence. */
344 void
345 pop_gimplify_context (gimple *body)
347 struct gimplify_ctx *c = gimplify_ctxp;
349 gcc_assert (c
350 && (!c->bind_expr_stack.exists ()
351 || c->bind_expr_stack.is_empty ()));
352 c->bind_expr_stack.release ();
353 gimplify_ctxp = c->prev_context;
355 if (body)
356 declare_vars (c->temps, body, false);
357 else
358 record_vars (c->temps);
360 delete c->temp_htab;
361 c->temp_htab = NULL;
362 ctx_free (c);
365 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
367 static void
368 gimple_push_bind_expr (gbind *bind_stmt)
370 gimplify_ctxp->bind_expr_stack.reserve (8);
371 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
374 /* Pop the first element off the stack of bindings. */
376 static void
377 gimple_pop_bind_expr (void)
379 gimplify_ctxp->bind_expr_stack.pop ();
382 /* Return the first element of the stack of bindings. */
384 gbind *
385 gimple_current_bind_expr (void)
387 return gimplify_ctxp->bind_expr_stack.last ();
390 /* Return the stack of bindings created during gimplification. */
392 vec<gbind *>
393 gimple_bind_expr_stack (void)
395 return gimplify_ctxp->bind_expr_stack;
398 /* Return true iff there is a COND_EXPR between us and the innermost
399 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
401 static bool
402 gimple_conditional_context (void)
404 return gimplify_ctxp->conditions > 0;
407 /* Note that we've entered a COND_EXPR. */
409 static void
410 gimple_push_condition (void)
412 #ifdef ENABLE_GIMPLE_CHECKING
413 if (gimplify_ctxp->conditions == 0)
414 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
415 #endif
416 ++(gimplify_ctxp->conditions);
419 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
420 now, add any conditional cleanups we've seen to the prequeue. */
422 static void
423 gimple_pop_condition (gimple_seq *pre_p)
425 int conds = --(gimplify_ctxp->conditions);
427 gcc_assert (conds >= 0);
428 if (conds == 0)
430 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
431 gimplify_ctxp->conditional_cleanups = NULL;
435 /* A stable comparison routine for use with splay trees and DECLs. */
437 static int
438 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
440 tree a = (tree) xa;
441 tree b = (tree) xb;
443 return DECL_UID (a) - DECL_UID (b);
446 /* Create a new omp construct that deals with variable remapping. */
448 static struct gimplify_omp_ctx *
449 new_omp_context (enum omp_region_type region_type)
451 struct gimplify_omp_ctx *c;
453 c = XCNEW (struct gimplify_omp_ctx);
454 c->outer_context = gimplify_omp_ctxp;
455 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
456 c->privatized_types = new hash_set<tree>;
457 c->location = input_location;
458 c->region_type = region_type;
459 if ((region_type & ORT_TASK) == 0)
460 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
461 else
462 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
463 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
464 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
465 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
466 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
467 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
469 return c;
472 /* Destroy an omp construct that deals with variable remapping. */
474 static void
475 delete_omp_context (struct gimplify_omp_ctx *c)
477 splay_tree_delete (c->variables);
478 delete c->privatized_types;
479 c->loop_iter_var.release ();
480 XDELETE (c);
483 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
484 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
486 /* Both gimplify the statement T and append it to *SEQ_P. This function
487 behaves exactly as gimplify_stmt, but you don't have to pass T as a
488 reference. */
490 void
491 gimplify_and_add (tree t, gimple_seq *seq_p)
493 gimplify_stmt (&t, seq_p);
496 /* Gimplify statement T into sequence *SEQ_P, and return the first
497 tuple in the sequence of generated tuples for this statement.
498 Return NULL if gimplifying T produced no tuples. */
500 static gimple *
501 gimplify_and_return_first (tree t, gimple_seq *seq_p)
503 gimple_stmt_iterator last = gsi_last (*seq_p);
505 gimplify_and_add (t, seq_p);
507 if (!gsi_end_p (last))
509 gsi_next (&last);
510 return gsi_stmt (last);
512 else
513 return gimple_seq_first_stmt (*seq_p);
516 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
517 LHS, or for a call argument. */
519 static bool
520 is_gimple_mem_rhs (tree t)
522 /* If we're dealing with a renamable type, either source or dest must be
523 a renamed variable. */
524 if (is_gimple_reg_type (TREE_TYPE (t)))
525 return is_gimple_val (t);
526 else
527 return is_gimple_val (t) || is_gimple_lvalue (t);
530 /* Return true if T is a CALL_EXPR or an expression that can be
531 assigned to a temporary. Note that this predicate should only be
532 used during gimplification. See the rationale for this in
533 gimplify_modify_expr. */
535 static bool
536 is_gimple_reg_rhs_or_call (tree t)
538 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
539 || TREE_CODE (t) == CALL_EXPR);
542 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
543 this predicate should only be used during gimplification. See the
544 rationale for this in gimplify_modify_expr. */
546 static bool
547 is_gimple_mem_rhs_or_call (tree t)
549 /* If we're dealing with a renamable type, either source or dest must be
550 a renamed variable. */
551 if (is_gimple_reg_type (TREE_TYPE (t)))
552 return is_gimple_val (t);
553 else
554 return (is_gimple_val (t)
555 || is_gimple_lvalue (t)
556 || TREE_CLOBBER_P (t)
557 || TREE_CODE (t) == CALL_EXPR);
560 /* Create a temporary with a name derived from VAL. Subroutine of
561 lookup_tmp_var; nobody else should call this function. */
563 static inline tree
564 create_tmp_from_val (tree val)
566 /* Drop all qualifiers and address-space information from the value type. */
567 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
568 tree var = create_tmp_var (type, get_name (val));
569 return var;
572 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
573 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
575 static tree
576 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
578 tree ret;
580 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
581 gcc_assert (!is_formal || !not_gimple_reg);
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
590 ret = create_tmp_from_val (val);
591 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
593 else
595 elt_t elt, *elt_p;
596 elt_t **slot;
598 elt.val = val;
599 if (!gimplify_ctxp->temp_htab)
600 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
601 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
602 if (*slot == NULL)
604 elt_p = XNEW (elt_t);
605 elt_p->val = val;
606 elt_p->temp = ret = create_tmp_from_val (val);
607 *slot = elt_p;
609 else
611 elt_p = *slot;
612 ret = elt_p->temp;
616 return ret;
619 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
621 static tree
622 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
623 bool is_formal, bool allow_ssa, bool not_gimple_reg)
625 tree t, mod;
627 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
628 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
629 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
630 fb_rvalue);
632 if (allow_ssa
633 && gimplify_ctxp->into_ssa
634 && is_gimple_reg_type (TREE_TYPE (val)))
636 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
637 if (! gimple_in_ssa_p (cfun))
639 const char *name = get_name (val);
640 if (name)
641 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
644 else
645 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
647 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
649 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
651 /* gimplify_modify_expr might want to reduce this further. */
652 gimplify_and_add (mod, pre_p);
653 ggc_free (mod);
655 return t;
658 /* Return a formal temporary variable initialized with VAL. PRE_P is as
659 in gimplify_expr. Only use this function if:
661 1) The value of the unfactored expression represented by VAL will not
662 change between the initialization and use of the temporary, and
663 2) The temporary will not be otherwise modified.
665 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
666 and #2 means it is inappropriate for && temps.
668 For other cases, use get_initialized_tmp_var instead. */
670 tree
671 get_formal_tmp_var (tree val, gimple_seq *pre_p)
673 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
676 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
677 are as in gimplify_expr. */
679 tree
680 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
681 gimple_seq *post_p /* = NULL */,
682 bool allow_ssa /* = true */)
684 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
687 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
688 generate debug info for them; otherwise don't. */
690 void
691 declare_vars (tree vars, gimple *gs, bool debug_info)
693 tree last = vars;
694 if (last)
696 tree temps, block;
698 gbind *scope = as_a <gbind *> (gs);
700 temps = nreverse (last);
702 block = gimple_bind_block (scope);
703 gcc_assert (!block || TREE_CODE (block) == BLOCK);
704 if (!block || !debug_info)
706 DECL_CHAIN (last) = gimple_bind_vars (scope);
707 gimple_bind_set_vars (scope, temps);
709 else
711 /* We need to attach the nodes both to the BIND_EXPR and to its
712 associated BLOCK for debugging purposes. The key point here
713 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
714 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
715 if (BLOCK_VARS (block))
716 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
717 else
719 gimple_bind_set_vars (scope,
720 chainon (gimple_bind_vars (scope), temps));
721 BLOCK_VARS (block) = temps;
727 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
728 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
729 no such upper bound can be obtained. */
731 static void
732 force_constant_size (tree var)
734 /* The only attempt we make is by querying the maximum size of objects
735 of the variable's type. */
737 HOST_WIDE_INT max_size;
739 gcc_assert (VAR_P (var));
741 max_size = max_int_size_in_bytes (TREE_TYPE (var));
743 gcc_assert (max_size >= 0);
745 DECL_SIZE_UNIT (var)
746 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
747 DECL_SIZE (var)
748 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
751 /* Push the temporary variable TMP into the current binding. */
753 void
754 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
756 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
758 /* Later processing assumes that the object size is constant, which might
759 not be true at this point. Force the use of a constant upper bound in
760 this case. */
761 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
762 force_constant_size (tmp);
764 DECL_CONTEXT (tmp) = fn->decl;
765 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
767 record_vars_into (tmp, fn->decl);
770 /* Push the temporary variable TMP into the current binding. */
772 void
773 gimple_add_tmp_var (tree tmp)
775 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
777 /* Later processing assumes that the object size is constant, which might
778 not be true at this point. Force the use of a constant upper bound in
779 this case. */
780 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
781 force_constant_size (tmp);
783 DECL_CONTEXT (tmp) = current_function_decl;
784 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
786 if (gimplify_ctxp)
788 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
789 gimplify_ctxp->temps = tmp;
791 /* Mark temporaries local within the nearest enclosing parallel. */
792 if (gimplify_omp_ctxp)
794 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
795 int flag = GOVD_LOCAL | GOVD_SEEN;
796 while (ctx
797 && (ctx->region_type == ORT_WORKSHARE
798 || ctx->region_type == ORT_TASKGROUP
799 || ctx->region_type == ORT_SIMD
800 || ctx->region_type == ORT_ACC))
802 if (ctx->region_type == ORT_SIMD
803 && TREE_ADDRESSABLE (tmp)
804 && !TREE_STATIC (tmp))
806 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
807 ctx->add_safelen1 = true;
808 else if (ctx->in_for_exprs)
809 flag = GOVD_PRIVATE;
810 else
811 flag = GOVD_PRIVATE | GOVD_SEEN;
812 break;
814 ctx = ctx->outer_context;
816 if (ctx)
817 omp_add_variable (ctx, tmp, flag);
820 else if (cfun)
821 record_vars (tmp);
822 else
824 gimple_seq body_seq;
826 /* This case is for nested functions. We need to expose the locals
827 they create. */
828 body_seq = gimple_body (current_function_decl);
829 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
835 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
836 nodes that are referenced more than once in GENERIC functions. This is
837 necessary because gimplification (translation into GIMPLE) is performed
838 by modifying tree nodes in-place, so gimplication of a shared node in a
839 first context could generate an invalid GIMPLE form in a second context.
841 This is achieved with a simple mark/copy/unmark algorithm that walks the
842 GENERIC representation top-down, marks nodes with TREE_VISITED the first
843 time it encounters them, duplicates them if they already have TREE_VISITED
844 set, and finally removes the TREE_VISITED marks it has set.
846 The algorithm works only at the function level, i.e. it generates a GENERIC
847 representation of a function with no nodes shared within the function when
848 passed a GENERIC function (except for nodes that are allowed to be shared).
850 At the global level, it is also necessary to unshare tree nodes that are
851 referenced in more than one function, for the same aforementioned reason.
852 This requires some cooperation from the front-end. There are 2 strategies:
854 1. Manual unsharing. The front-end needs to call unshare_expr on every
855 expression that might end up being shared across functions.
857 2. Deep unsharing. This is an extension of regular unsharing. Instead
858 of calling unshare_expr on expressions that might be shared across
859 functions, the front-end pre-marks them with TREE_VISITED. This will
860 ensure that they are unshared on the first reference within functions
861 when the regular unsharing algorithm runs. The counterpart is that
862 this algorithm must look deeper than for manual unsharing, which is
863 specified by LANG_HOOKS_DEEP_UNSHARING.
865 If there are only few specific cases of node sharing across functions, it is
866 probably easier for a front-end to unshare the expressions manually. On the
867 contrary, if the expressions generated at the global level are as widespread
868 as expressions generated within functions, deep unsharing is very likely the
869 way to go. */
871 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
872 These nodes model computations that must be done once. If we were to
873 unshare something like SAVE_EXPR(i++), the gimplification process would
874 create wrong code. However, if DATA is non-null, it must hold a pointer
875 set that is used to unshare the subtrees of these nodes. */
877 static tree
878 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
880 tree t = *tp;
881 enum tree_code code = TREE_CODE (t);
883 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
884 copy their subtrees if we can make sure to do it only once. */
885 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
887 if (data && !((hash_set<tree> *)data)->add (t))
889 else
890 *walk_subtrees = 0;
893 /* Stop at types, decls, constants like copy_tree_r. */
894 else if (TREE_CODE_CLASS (code) == tcc_type
895 || TREE_CODE_CLASS (code) == tcc_declaration
896 || TREE_CODE_CLASS (code) == tcc_constant)
897 *walk_subtrees = 0;
899 /* Cope with the statement expression extension. */
900 else if (code == STATEMENT_LIST)
903 /* Leave the bulk of the work to copy_tree_r itself. */
904 else
905 copy_tree_r (tp, walk_subtrees, NULL);
907 return NULL_TREE;
910 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
911 If *TP has been visited already, then *TP is deeply copied by calling
912 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
914 static tree
915 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
917 tree t = *tp;
918 enum tree_code code = TREE_CODE (t);
920 /* Skip types, decls, and constants. But we do want to look at their
921 types and the bounds of types. Mark them as visited so we properly
922 unmark their subtrees on the unmark pass. If we've already seen them,
923 don't look down further. */
924 if (TREE_CODE_CLASS (code) == tcc_type
925 || TREE_CODE_CLASS (code) == tcc_declaration
926 || TREE_CODE_CLASS (code) == tcc_constant)
928 if (TREE_VISITED (t))
929 *walk_subtrees = 0;
930 else
931 TREE_VISITED (t) = 1;
934 /* If this node has been visited already, unshare it and don't look
935 any deeper. */
936 else if (TREE_VISITED (t))
938 walk_tree (tp, mostly_copy_tree_r, data, NULL);
939 *walk_subtrees = 0;
942 /* Otherwise, mark the node as visited and keep looking. */
943 else
944 TREE_VISITED (t) = 1;
946 return NULL_TREE;
949 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
950 copy_if_shared_r callback unmodified. */
952 void
953 copy_if_shared (tree *tp, void *data)
955 walk_tree (tp, copy_if_shared_r, data, NULL);
958 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
959 any nested functions. */
961 static void
962 unshare_body (tree fndecl)
964 struct cgraph_node *cgn = cgraph_node::get (fndecl);
965 /* If the language requires deep unsharing, we need a pointer set to make
966 sure we don't repeatedly unshare subtrees of unshareable nodes. */
967 hash_set<tree> *visited
968 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
970 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
971 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
972 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
974 delete visited;
976 if (cgn)
977 for (cgn = first_nested_function (cgn); cgn;
978 cgn = next_nested_function (cgn))
979 unshare_body (cgn->decl);
982 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
983 Subtrees are walked until the first unvisited node is encountered. */
985 static tree
986 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
988 tree t = *tp;
990 /* If this node has been visited, unmark it and keep looking. */
991 if (TREE_VISITED (t))
992 TREE_VISITED (t) = 0;
994 /* Otherwise, don't look any deeper. */
995 else
996 *walk_subtrees = 0;
998 return NULL_TREE;
1001 /* Unmark the visited trees rooted at *TP. */
1003 static inline void
1004 unmark_visited (tree *tp)
1006 walk_tree (tp, unmark_visited_r, NULL, NULL);
1009 /* Likewise, but mark all trees as not visited. */
1011 static void
1012 unvisit_body (tree fndecl)
1014 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1016 unmark_visited (&DECL_SAVED_TREE (fndecl));
1017 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1018 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1020 if (cgn)
1021 for (cgn = first_nested_function (cgn);
1022 cgn; cgn = next_nested_function (cgn))
1023 unvisit_body (cgn->decl);
1026 /* Unconditionally make an unshared copy of EXPR. This is used when using
1027 stored expressions which span multiple functions, such as BINFO_VTABLE,
1028 as the normal unsharing process can't tell that they're shared. */
1030 tree
1031 unshare_expr (tree expr)
1033 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1034 return expr;
1037 /* Worker for unshare_expr_without_location. */
1039 static tree
1040 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1042 if (EXPR_P (*tp))
1043 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1044 else
1045 *walk_subtrees = 0;
1046 return NULL_TREE;
1049 /* Similar to unshare_expr but also prune all expression locations
1050 from EXPR. */
1052 tree
1053 unshare_expr_without_location (tree expr)
1055 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1056 if (EXPR_P (expr))
1057 walk_tree (&expr, prune_expr_location, NULL, NULL);
1058 return expr;
1061 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1062 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1063 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1064 EXPR is the location of the EXPR. */
1066 static location_t
1067 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1069 if (!expr)
1070 return or_else;
1072 if (EXPR_HAS_LOCATION (expr))
1073 return EXPR_LOCATION (expr);
1075 if (TREE_CODE (expr) != STATEMENT_LIST)
1076 return or_else;
1078 tree_stmt_iterator i = tsi_start (expr);
1080 bool found = false;
1081 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1083 found = true;
1084 tsi_next (&i);
1087 if (!found || !tsi_one_before_end_p (i))
1088 return or_else;
1090 return rexpr_location (tsi_stmt (i), or_else);
1093 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1094 rexpr_location for the potential recursion. */
1096 static inline bool
1097 rexpr_has_location (tree expr)
1099 return rexpr_location (expr) != UNKNOWN_LOCATION;
1103 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1104 contain statements and have a value. Assign its value to a temporary
1105 and give it void_type_node. Return the temporary, or NULL_TREE if
1106 WRAPPER was already void. */
1108 tree
1109 voidify_wrapper_expr (tree wrapper, tree temp)
1111 tree type = TREE_TYPE (wrapper);
1112 if (type && !VOID_TYPE_P (type))
1114 tree *p;
1116 /* Set p to point to the body of the wrapper. Loop until we find
1117 something that isn't a wrapper. */
1118 for (p = &wrapper; p && *p; )
1120 switch (TREE_CODE (*p))
1122 case BIND_EXPR:
1123 TREE_SIDE_EFFECTS (*p) = 1;
1124 TREE_TYPE (*p) = void_type_node;
1125 /* For a BIND_EXPR, the body is operand 1. */
1126 p = &BIND_EXPR_BODY (*p);
1127 break;
1129 case CLEANUP_POINT_EXPR:
1130 case TRY_FINALLY_EXPR:
1131 case TRY_CATCH_EXPR:
1132 TREE_SIDE_EFFECTS (*p) = 1;
1133 TREE_TYPE (*p) = void_type_node;
1134 p = &TREE_OPERAND (*p, 0);
1135 break;
1137 case STATEMENT_LIST:
1139 tree_stmt_iterator i = tsi_last (*p);
1140 TREE_SIDE_EFFECTS (*p) = 1;
1141 TREE_TYPE (*p) = void_type_node;
1142 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1144 break;
1146 case COMPOUND_EXPR:
1147 /* Advance to the last statement. Set all container types to
1148 void. */
1149 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1151 TREE_SIDE_EFFECTS (*p) = 1;
1152 TREE_TYPE (*p) = void_type_node;
1154 break;
1156 case TRANSACTION_EXPR:
1157 TREE_SIDE_EFFECTS (*p) = 1;
1158 TREE_TYPE (*p) = void_type_node;
1159 p = &TRANSACTION_EXPR_BODY (*p);
1160 break;
1162 default:
1163 /* Assume that any tree upon which voidify_wrapper_expr is
1164 directly called is a wrapper, and that its body is op0. */
1165 if (p == &wrapper)
1167 TREE_SIDE_EFFECTS (*p) = 1;
1168 TREE_TYPE (*p) = void_type_node;
1169 p = &TREE_OPERAND (*p, 0);
1170 break;
1172 goto out;
1176 out:
1177 if (p == NULL || IS_EMPTY_STMT (*p))
1178 temp = NULL_TREE;
1179 else if (temp)
1181 /* The wrapper is on the RHS of an assignment that we're pushing
1182 down. */
1183 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1184 || TREE_CODE (temp) == MODIFY_EXPR);
1185 TREE_OPERAND (temp, 1) = *p;
1186 *p = temp;
1188 else
1190 temp = create_tmp_var (type, "retval");
1191 *p = build2 (INIT_EXPR, type, temp, *p);
1194 return temp;
1197 return NULL_TREE;
1200 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1201 a temporary through which they communicate. */
1203 static void
1204 build_stack_save_restore (gcall **save, gcall **restore)
1206 tree tmp_var;
1208 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1209 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1210 gimple_call_set_lhs (*save, tmp_var);
1212 *restore
1213 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1214 1, tmp_var);
1217 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1219 static tree
1220 build_asan_poison_call_expr (tree decl)
1222 /* Do not poison variables that have size equal to zero. */
1223 tree unit_size = DECL_SIZE_UNIT (decl);
1224 if (zerop (unit_size))
1225 return NULL_TREE;
1227 tree base = build_fold_addr_expr (decl);
1229 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1230 void_type_node, 3,
1231 build_int_cst (integer_type_node,
1232 ASAN_MARK_POISON),
1233 base, unit_size);
1236 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1237 on POISON flag, shadow memory of a DECL variable. The call will be
1238 put on location identified by IT iterator, where BEFORE flag drives
1239 position where the stmt will be put. */
1241 static void
1242 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1243 bool before)
1245 tree unit_size = DECL_SIZE_UNIT (decl);
1246 tree base = build_fold_addr_expr (decl);
1248 /* Do not poison variables that have size equal to zero. */
1249 if (zerop (unit_size))
1250 return;
1252 /* It's necessary to have all stack variables aligned to ASAN granularity
1253 bytes. */
1254 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1255 unsigned shadow_granularity
1256 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1257 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1258 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1260 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1262 gimple *g
1263 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1264 build_int_cst (integer_type_node, flags),
1265 base, unit_size);
1267 if (before)
1268 gsi_insert_before (it, g, GSI_NEW_STMT);
1269 else
1270 gsi_insert_after (it, g, GSI_NEW_STMT);
1273 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1274 either poisons or unpoisons a DECL. Created statement is appended
1275 to SEQ_P gimple sequence. */
1277 static void
1278 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1280 gimple_stmt_iterator it = gsi_last (*seq_p);
1281 bool before = false;
1283 if (gsi_end_p (it))
1284 before = true;
1286 asan_poison_variable (decl, poison, &it, before);
1289 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1291 static int
1292 sort_by_decl_uid (const void *a, const void *b)
1294 const tree *t1 = (const tree *)a;
1295 const tree *t2 = (const tree *)b;
1297 int uid1 = DECL_UID (*t1);
1298 int uid2 = DECL_UID (*t2);
1300 if (uid1 < uid2)
1301 return -1;
1302 else if (uid1 > uid2)
1303 return 1;
1304 else
1305 return 0;
1308 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1309 depending on POISON flag. Created statement is appended
1310 to SEQ_P gimple sequence. */
1312 static void
1313 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1315 unsigned c = variables->elements ();
1316 if (c == 0)
1317 return;
1319 auto_vec<tree> sorted_variables (c);
1321 for (hash_set<tree>::iterator it = variables->begin ();
1322 it != variables->end (); ++it)
1323 sorted_variables.safe_push (*it);
1325 sorted_variables.qsort (sort_by_decl_uid);
1327 unsigned i;
1328 tree var;
1329 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1331 asan_poison_variable (var, poison, seq_p);
1333 /* Add use_after_scope_memory attribute for the variable in order
1334 to prevent re-written into SSA. */
1335 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1336 DECL_ATTRIBUTES (var)))
1337 DECL_ATTRIBUTES (var)
1338 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1339 integer_one_node,
1340 DECL_ATTRIBUTES (var));
1344 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1346 static enum gimplify_status
1347 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1349 tree bind_expr = *expr_p;
1350 bool old_keep_stack = gimplify_ctxp->keep_stack;
1351 bool old_save_stack = gimplify_ctxp->save_stack;
1352 tree t;
1353 gbind *bind_stmt;
1354 gimple_seq body, cleanup;
1355 gcall *stack_save;
1356 location_t start_locus = 0, end_locus = 0;
1357 tree ret_clauses = NULL;
1359 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1361 /* Mark variables seen in this bind expr. */
1362 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1364 if (VAR_P (t))
1366 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 tree attr;
1369 if (flag_openmp
1370 && !is_global_var (t)
1371 && DECL_CONTEXT (t) == current_function_decl
1372 && TREE_USED (t)
1373 && (attr = lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1374 != NULL_TREE)
1376 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
1377 tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1378 tree align = TREE_VALUE (TREE_VALUE (attr));
1379 /* Allocate directives that appear in a target region must specify
1380 an allocator clause unless a requires directive with the
1381 dynamic_allocators clause is present in the same compilation
1382 unit. */
1383 bool missing_dyn_alloc = false;
1384 if (alloc == NULL_TREE
1385 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1386 == 0))
1388 /* This comes too early for omp_discover_declare_target...,
1389 but should at least catch the most common cases. */
1390 missing_dyn_alloc
1391 = cgraph_node::get (current_function_decl)->offloadable;
1392 for (struct gimplify_omp_ctx *ctx2 = ctx;
1393 ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1394 if (ctx2->code == OMP_TARGET)
1395 missing_dyn_alloc = true;
1397 if (missing_dyn_alloc)
1398 error_at (DECL_SOURCE_LOCATION (t),
1399 "%<allocate%> directive for %qD inside a target "
1400 "region must specify an %<allocator%> clause", t);
1401 /* Skip for omp_default_mem_alloc (= 1),
1402 unless align is present. */
1403 else if (!errorcount
1404 && (align != NULL_TREE
1405 || alloc == NULL_TREE
1406 || !integer_onep (alloc)))
1408 /* Fortran might already use a pointer type internally;
1409 use that pointer except for type(C_ptr) and type(C_funptr);
1410 note that normal proc pointers are rejected. */
1411 tree type = TREE_TYPE (t);
1412 tree tmp, v;
1413 if (lang_GNU_Fortran ()
1414 && POINTER_TYPE_P (type)
1415 && TREE_TYPE (type) != void_type_node
1416 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
1418 type = TREE_TYPE (type);
1419 v = t;
1421 else
1423 tmp = build_pointer_type (type);
1424 v = create_tmp_var (tmp, get_name (t));
1425 DECL_IGNORED_P (v) = 0;
1426 DECL_ATTRIBUTES (v)
1427 = tree_cons (get_identifier ("omp allocate var"),
1428 build_tree_list (NULL_TREE, t),
1429 remove_attribute ("omp allocate",
1430 DECL_ATTRIBUTES (t)));
1431 tmp = build_fold_indirect_ref (v);
1432 TREE_THIS_NOTRAP (tmp) = 1;
1433 SET_DECL_VALUE_EXPR (t, tmp);
1434 DECL_HAS_VALUE_EXPR_P (t) = 1;
1436 tree sz = TYPE_SIZE_UNIT (type);
1437 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1438 hence, for some decls, a size variable is saved in the
1439 attributes; use it, if available. */
1440 if (TREE_CHAIN (TREE_VALUE (attr))
1441 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))
1442 && TREE_PURPOSE (
1443 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))))
1445 sz = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1446 sz = TREE_PURPOSE (sz);
1448 if (alloc == NULL_TREE)
1449 alloc = build_zero_cst (ptr_type_node);
1450 if (align == NULL_TREE)
1451 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (t));
1452 else
1453 align = build_int_cst (size_type_node,
1454 MAX (tree_to_uhwi (align),
1455 DECL_ALIGN_UNIT (t)));
1456 location_t loc = DECL_SOURCE_LOCATION (t);
1457 tmp = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
1458 tmp = build_call_expr_loc (loc, tmp, 3, align, sz, alloc);
1459 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1460 fold_convert (TREE_TYPE (v), tmp));
1461 gcc_assert (BIND_EXPR_BODY (bind_expr) != NULL_TREE);
1462 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1463 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1464 is set, using in a condition much further below. */
1465 gcc_assert (DECL_HAS_VALUE_EXPR_P (t)
1466 || TREE_CHAIN (TREE_VALUE (attr)));
1467 if (TREE_CHAIN (TREE_VALUE (attr)))
1469 /* Fortran is special as it does not have properly nest
1470 declarations in blocks. And as there is no
1471 initializer, there is also no expression to look for.
1472 Hence, the FE makes the statement list of the
1473 try-finally block available. We can put the GOMP_alloc
1474 at the top, unless an allocator or size expression
1475 requires to put it afterward; note that the size is
1476 always later in generated code; for strings, no
1477 size expr but still an expr might be available.
1478 As LTO does not handle a statement list, 'sl' has
1479 to be removed; done so by removing the attribute. */
1480 DECL_ATTRIBUTES (t)
1481 = remove_attribute ("omp allocate",
1482 DECL_ATTRIBUTES (t));
1483 tree sl = TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr)));
1484 tree_stmt_iterator e = tsi_start (sl);
1485 tree needle = NULL_TREE;
1486 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1488 needle = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1489 needle = (TREE_VALUE (needle) ? TREE_VALUE (needle)
1490 : sz);
1492 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1493 needle = sz;
1494 else if (DECL_P (alloc) && DECL_ARTIFICIAL (alloc))
1495 needle = alloc;
1497 if (needle != NULL_TREE)
1499 while (!tsi_end_p (e))
1501 if (*e == needle
1502 || (TREE_CODE (*e) == MODIFY_EXPR
1503 && TREE_OPERAND (*e, 0) == needle))
1504 break;
1505 ++e;
1507 gcc_assert (!tsi_end_p (e));
1509 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1511 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1512 here; for C/C++ it will be added in the 'cleanup'
1513 section after gimplification. But Fortran already has
1514 a try-finally block. */
1515 sl = TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr)));
1516 e = tsi_last (sl);
1517 tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1518 tmp = build_call_expr_loc (EXPR_LOCATION (*e), tmp, 2, v,
1519 build_zero_cst (ptr_type_node));
1520 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1521 tmp = build_clobber (TREE_TYPE (v), CLOBBER_EOL);
1522 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1523 fold_convert (TREE_TYPE (v), tmp));
1524 ++e;
1525 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1527 else
1529 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1530 == STATEMENT_LIST);
1531 tree_stmt_iterator e;
1532 e = tsi_start (BIND_EXPR_BODY (bind_expr));
1533 while (!tsi_end_p (e))
1535 if ((TREE_CODE (*e) == DECL_EXPR
1536 && TREE_OPERAND (*e, 0) == t)
1537 || (TREE_CODE (*e) == CLEANUP_POINT_EXPR
1538 && (TREE_CODE (TREE_OPERAND (*e, 0))
1539 == DECL_EXPR)
1540 && (TREE_OPERAND (TREE_OPERAND (*e, 0), 0)
1541 == t)))
1542 break;
1543 ++e;
1545 gcc_assert (!tsi_end_p (e));
1546 tsi_link_before (&e, tmp, TSI_SAME_STMT);
1551 /* Mark variable as local. */
1552 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1554 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1555 || splay_tree_lookup (ctx->variables,
1556 (splay_tree_key) t) == NULL)
1558 int flag = GOVD_LOCAL;
1559 if (ctx->region_type == ORT_SIMD
1560 && TREE_ADDRESSABLE (t)
1561 && !TREE_STATIC (t))
1563 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1564 ctx->add_safelen1 = true;
1565 else
1566 flag = GOVD_PRIVATE;
1568 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1570 /* Static locals inside of target construct or offloaded
1571 routines need to be "omp declare target". */
1572 if (TREE_STATIC (t))
1573 for (; ctx; ctx = ctx->outer_context)
1574 if ((ctx->region_type & ORT_TARGET) != 0)
1576 if (!lookup_attribute ("omp declare target",
1577 DECL_ATTRIBUTES (t)))
1579 tree id = get_identifier ("omp declare target");
1580 DECL_ATTRIBUTES (t)
1581 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1582 varpool_node *node = varpool_node::get (t);
1583 if (node)
1585 node->offloadable = 1;
1586 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1588 g->have_offload = true;
1589 if (!in_lto_p)
1590 vec_safe_push (offload_vars, t);
1594 break;
1598 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1600 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1601 cfun->has_local_explicit_reg_vars = true;
1605 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1606 BIND_EXPR_BLOCK (bind_expr));
1607 gimple_push_bind_expr (bind_stmt);
1609 gimplify_ctxp->keep_stack = false;
1610 gimplify_ctxp->save_stack = false;
1612 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1613 body = NULL;
1614 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1615 gimple_bind_set_body (bind_stmt, body);
1617 /* Source location wise, the cleanup code (stack_restore and clobbers)
1618 belongs to the end of the block, so propagate what we have. The
1619 stack_save operation belongs to the beginning of block, which we can
1620 infer from the bind_expr directly if the block has no explicit
1621 assignment. */
1622 if (BIND_EXPR_BLOCK (bind_expr))
1624 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1625 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1627 if (start_locus == 0)
1628 start_locus = EXPR_LOCATION (bind_expr);
1630 cleanup = NULL;
1631 stack_save = NULL;
1633 /* Add clobbers for all variables that go out of scope. */
1634 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1636 if (VAR_P (t)
1637 && !is_global_var (t)
1638 && DECL_CONTEXT (t) == current_function_decl)
1640 if (flag_openmp
1641 && DECL_HAS_VALUE_EXPR_P (t)
1642 && TREE_USED (t)
1643 && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1645 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1646 causes that the GOMP_free call is already added above;
1647 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1648 tree v = TREE_OPERAND (DECL_VALUE_EXPR (t), 0);
1649 tree tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1650 tmp = build_call_expr_loc (end_locus, tmp, 2, v,
1651 build_zero_cst (ptr_type_node));
1652 gimplify_and_add (tmp, &cleanup);
1653 gimple *clobber_stmt;
1654 tmp = build_clobber (TREE_TYPE (v), CLOBBER_EOL);
1655 clobber_stmt = gimple_build_assign (v, tmp);
1656 gimple_set_location (clobber_stmt, end_locus);
1657 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1659 if (!DECL_HARD_REGISTER (t)
1660 && !TREE_THIS_VOLATILE (t)
1661 && !DECL_HAS_VALUE_EXPR_P (t)
1662 /* Only care for variables that have to be in memory. Others
1663 will be rewritten into SSA names, hence moved to the
1664 top-level. */
1665 && !is_gimple_reg (t)
1666 && flag_stack_reuse != SR_NONE)
1668 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1669 gimple *clobber_stmt;
1670 clobber_stmt = gimple_build_assign (t, clobber);
1671 gimple_set_location (clobber_stmt, end_locus);
1672 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1675 if (flag_openacc && oacc_declare_returns != NULL)
1677 tree key = t;
1678 if (DECL_HAS_VALUE_EXPR_P (key))
1680 key = DECL_VALUE_EXPR (key);
1681 if (INDIRECT_REF_P (key))
1682 key = TREE_OPERAND (key, 0);
1684 tree *c = oacc_declare_returns->get (key);
1685 if (c != NULL)
1687 if (ret_clauses)
1688 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1690 ret_clauses = unshare_expr (*c);
1692 oacc_declare_returns->remove (key);
1694 if (oacc_declare_returns->is_empty ())
1696 delete oacc_declare_returns;
1697 oacc_declare_returns = NULL;
1703 if (asan_poisoned_variables != NULL
1704 && asan_poisoned_variables->contains (t))
1706 asan_poisoned_variables->remove (t);
1707 asan_poison_variable (t, true, &cleanup);
1710 if (gimplify_ctxp->live_switch_vars != NULL
1711 && gimplify_ctxp->live_switch_vars->contains (t))
1712 gimplify_ctxp->live_switch_vars->remove (t);
1715 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1716 the stack space allocated to the VLAs. */
1717 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1719 gcall *stack_restore;
1721 /* Save stack on entry and restore it on exit. Add a try_finally
1722 block to achieve this. */
1723 build_stack_save_restore (&stack_save, &stack_restore);
1725 gimple_set_location (stack_save, start_locus);
1726 gimple_set_location (stack_restore, end_locus);
1728 gimplify_seq_add_stmt (&cleanup, stack_restore);
1731 if (ret_clauses)
1733 gomp_target *stmt;
1734 gimple_stmt_iterator si = gsi_start (cleanup);
1736 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1737 ret_clauses);
1738 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1741 if (cleanup)
1743 gtry *gs;
1744 gimple_seq new_body;
1746 new_body = NULL;
1747 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1748 GIMPLE_TRY_FINALLY);
1750 if (stack_save)
1751 gimplify_seq_add_stmt (&new_body, stack_save);
1752 gimplify_seq_add_stmt (&new_body, gs);
1753 gimple_bind_set_body (bind_stmt, new_body);
1756 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1757 if (!gimplify_ctxp->keep_stack)
1758 gimplify_ctxp->keep_stack = old_keep_stack;
1759 gimplify_ctxp->save_stack = old_save_stack;
1761 gimple_pop_bind_expr ();
1763 gimplify_seq_add_stmt (pre_p, bind_stmt);
1765 if (temp)
1767 *expr_p = temp;
1768 return GS_OK;
1771 *expr_p = NULL_TREE;
1772 return GS_ALL_DONE;
1775 /* Maybe add early return predict statement to PRE_P sequence. */
1777 static void
1778 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1780 /* If we are not in a conditional context, add PREDICT statement. */
1781 if (gimple_conditional_context ())
1783 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1784 NOT_TAKEN);
1785 gimplify_seq_add_stmt (pre_p, predict);
1789 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1790 GIMPLE value, it is assigned to a new temporary and the statement is
1791 re-written to return the temporary.
1793 PRE_P points to the sequence where side effects that must happen before
1794 STMT should be stored. */
1796 static enum gimplify_status
1797 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1799 greturn *ret;
1800 tree ret_expr = TREE_OPERAND (stmt, 0);
1801 tree result_decl, result;
1803 if (ret_expr == error_mark_node)
1804 return GS_ERROR;
1806 if (!ret_expr
1807 || TREE_CODE (ret_expr) == RESULT_DECL)
1809 maybe_add_early_return_predict_stmt (pre_p);
1810 greturn *ret = gimple_build_return (ret_expr);
1811 copy_warning (ret, stmt);
1812 gimplify_seq_add_stmt (pre_p, ret);
1813 return GS_ALL_DONE;
1816 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1817 result_decl = NULL_TREE;
1818 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1820 /* Used in C++ for handling EH cleanup of the return value if a local
1821 cleanup throws. Assume the front-end knows what it's doing. */
1822 result_decl = DECL_RESULT (current_function_decl);
1823 /* But crash if we end up trying to modify ret_expr below. */
1824 ret_expr = NULL_TREE;
1826 else
1828 result_decl = TREE_OPERAND (ret_expr, 0);
1830 /* See through a return by reference. */
1831 if (INDIRECT_REF_P (result_decl))
1832 result_decl = TREE_OPERAND (result_decl, 0);
1834 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1835 || TREE_CODE (ret_expr) == INIT_EXPR)
1836 && TREE_CODE (result_decl) == RESULT_DECL);
1839 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1840 Recall that aggregate_value_p is FALSE for any aggregate type that is
1841 returned in registers. If we're returning values in registers, then
1842 we don't want to extend the lifetime of the RESULT_DECL, particularly
1843 across another call. In addition, for those aggregates for which
1844 hard_function_value generates a PARALLEL, we'll die during normal
1845 expansion of structure assignments; there's special code in expand_return
1846 to handle this case that does not exist in expand_expr. */
1847 if (!result_decl)
1848 result = NULL_TREE;
1849 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1851 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1853 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1854 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1855 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1856 should be effectively allocated by the caller, i.e. all calls to
1857 this function must be subject to the Return Slot Optimization. */
1858 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1859 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1861 result = result_decl;
1863 else if (gimplify_ctxp->return_temp)
1864 result = gimplify_ctxp->return_temp;
1865 else
1867 result = create_tmp_reg (TREE_TYPE (result_decl));
1869 /* ??? With complex control flow (usually involving abnormal edges),
1870 we can wind up warning about an uninitialized value for this. Due
1871 to how this variable is constructed and initialized, this is never
1872 true. Give up and never warn. */
1873 suppress_warning (result, OPT_Wuninitialized);
1875 gimplify_ctxp->return_temp = result;
1878 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1879 Then gimplify the whole thing. */
1880 if (result != result_decl)
1881 TREE_OPERAND (ret_expr, 0) = result;
1883 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1885 maybe_add_early_return_predict_stmt (pre_p);
1886 ret = gimple_build_return (result);
1887 copy_warning (ret, stmt);
1888 gimplify_seq_add_stmt (pre_p, ret);
1890 return GS_ALL_DONE;
1893 /* Gimplify a variable-length array DECL. */
1895 static void
1896 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1898 /* This is a variable-sized decl. Simplify its size and mark it
1899 for deferred expansion. */
1900 tree t, addr, ptr_type;
1902 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1903 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1905 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1906 if (DECL_HAS_VALUE_EXPR_P (decl))
1907 return;
1909 /* All occurrences of this decl in final gimplified code will be
1910 replaced by indirection. Setting DECL_VALUE_EXPR does two
1911 things: First, it lets the rest of the gimplifier know what
1912 replacement to use. Second, it lets the debug info know
1913 where to find the value. */
1914 ptr_type = build_pointer_type (TREE_TYPE (decl));
1915 addr = create_tmp_var (ptr_type, get_name (decl));
1916 DECL_IGNORED_P (addr) = 0;
1917 t = build_fold_indirect_ref (addr);
1918 TREE_THIS_NOTRAP (t) = 1;
1919 SET_DECL_VALUE_EXPR (decl, t);
1920 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1922 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1923 max_int_size_in_bytes (TREE_TYPE (decl)));
1924 /* The call has been built for a variable-sized object. */
1925 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1926 t = fold_convert (ptr_type, t);
1927 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1929 gimplify_and_add (t, seq_p);
1931 /* Record the dynamic allocation associated with DECL if requested. */
1932 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1933 record_dynamic_alloc (decl);
1936 /* A helper function to be called via walk_tree. Mark all labels under *TP
1937 as being forced. To be called for DECL_INITIAL of static variables. */
1939 static tree
1940 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1942 if (TYPE_P (*tp))
1943 *walk_subtrees = 0;
1944 if (TREE_CODE (*tp) == LABEL_DECL)
1946 FORCED_LABEL (*tp) = 1;
1947 cfun->has_forced_label_in_static = 1;
1950 return NULL_TREE;
1953 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1954 Build a call to internal const function DEFERRED_INIT:
1955 1st argument: SIZE of the DECL;
1956 2nd argument: INIT_TYPE;
1957 3rd argument: NAME of the DECL;
1959 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1961 static void
1962 gimple_add_init_for_auto_var (tree decl,
1963 enum auto_init_type init_type,
1964 gimple_seq *seq_p)
1966 gcc_assert (auto_var_p (decl));
1967 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1968 location_t loc = EXPR_LOCATION (decl);
1969 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1971 tree init_type_node
1972 = build_int_cst (integer_type_node, (int) init_type);
1974 tree decl_name = NULL_TREE;
1975 if (DECL_NAME (decl))
1977 decl_name = build_string_literal (DECL_NAME (decl));
1979 else
1981 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1982 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
1983 decl_name = build_string_literal (decl_name_anonymous);
1986 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1987 TREE_TYPE (decl), 3,
1988 decl_size, init_type_node,
1989 decl_name);
1991 gimplify_assign (decl, call, seq_p);
1994 /* Generate padding initialization for automatic vairable DECL.
1995 C guarantees that brace-init with fewer initializers than members
1996 aggregate will initialize the rest of the aggregate as-if it were
1997 static initialization. In turn static initialization guarantees
1998 that padding is initialized to zero. So, we always initialize paddings
1999 to zeroes regardless INIT_TYPE.
2000 To do the padding initialization, we insert a call to
2001 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2002 Note, we add an additional dummy argument for __builtin_clear_padding,
2003 'for_auto_init' to distinguish whether this call is for automatic
2004 variable initialization or not.
2006 static void
2007 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
2008 gimple_seq *seq_p)
2010 tree addr_of_decl = NULL_TREE;
2011 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
2013 if (is_vla)
2015 /* The temporary address variable for this vla should be
2016 created in gimplify_vla_decl. */
2017 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
2018 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
2019 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
2021 else
2023 mark_addressable (decl);
2024 addr_of_decl = build_fold_addr_expr (decl);
2027 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
2028 build_one_cst (TREE_TYPE (addr_of_decl)));
2029 gimplify_seq_add_stmt (seq_p, call);
2032 /* Return true if the DECL need to be automaticly initialized by the
2033 compiler. */
2034 static bool
2035 is_var_need_auto_init (tree decl)
2037 if (auto_var_p (decl)
2038 && (TREE_CODE (decl) != VAR_DECL
2039 || !DECL_HARD_REGISTER (decl))
2040 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2041 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
2042 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
2043 && !is_empty_type (TREE_TYPE (decl)))
2044 return true;
2045 return false;
2048 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2049 and initialization explicit. */
2051 static enum gimplify_status
2052 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
2054 tree stmt = *stmt_p;
2055 tree decl = DECL_EXPR_DECL (stmt);
2057 *stmt_p = NULL_TREE;
2059 if (TREE_TYPE (decl) == error_mark_node)
2060 return GS_ERROR;
2062 if ((TREE_CODE (decl) == TYPE_DECL
2063 || VAR_P (decl))
2064 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
2066 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
2067 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2068 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
2071 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2072 in case its size expressions contain problematic nodes like CALL_EXPR. */
2073 if (TREE_CODE (decl) == TYPE_DECL
2074 && DECL_ORIGINAL_TYPE (decl)
2075 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
2077 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
2078 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
2079 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
2082 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
2084 tree init = DECL_INITIAL (decl);
2085 bool is_vla = false;
2086 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2087 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2088 If the decl has VALUE_EXPR that was created by FE (usually
2089 C++FE), it's a proxy varaible, and FE already initialized
2090 the VALUE_EXPR of it, we should not initialize it anymore. */
2091 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
2093 poly_uint64 size;
2094 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
2095 || (!TREE_STATIC (decl)
2096 && flag_stack_check == GENERIC_STACK_CHECK
2097 && maybe_gt (size,
2098 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
2100 gimplify_vla_decl (decl, seq_p);
2101 is_vla = true;
2104 if (asan_poisoned_variables
2105 && !is_vla
2106 && TREE_ADDRESSABLE (decl)
2107 && !TREE_STATIC (decl)
2108 && !DECL_HAS_VALUE_EXPR_P (decl)
2109 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
2110 && dbg_cnt (asan_use_after_scope)
2111 && !gimplify_omp_ctxp
2112 /* GNAT introduces temporaries to hold return values of calls in
2113 initializers of variables defined in other units, so the
2114 declaration of the variable is discarded completely. We do not
2115 want to issue poison calls for such dropped variables. */
2116 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
2117 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
2119 asan_poisoned_variables->add (decl);
2120 asan_poison_variable (decl, false, seq_p);
2121 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
2122 gimplify_ctxp->live_switch_vars->add (decl);
2125 /* Some front ends do not explicitly declare all anonymous
2126 artificial variables. We compensate here by declaring the
2127 variables, though it would be better if the front ends would
2128 explicitly declare them. */
2129 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
2130 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
2131 gimple_add_tmp_var (decl);
2133 if (init && init != error_mark_node)
2135 if (!TREE_STATIC (decl))
2137 DECL_INITIAL (decl) = NULL_TREE;
2138 init = build2 (INIT_EXPR, void_type_node, decl, init);
2139 gimplify_and_add (init, seq_p);
2140 ggc_free (init);
2141 /* Clear TREE_READONLY if we really have an initialization. */
2142 if (!DECL_INITIAL (decl)
2143 && !omp_privatize_by_reference (decl))
2144 TREE_READONLY (decl) = 0;
2146 else
2147 /* We must still examine initializers for static variables
2148 as they may contain a label address. */
2149 walk_tree (&init, force_labels_r, NULL, NULL);
2151 /* When there is no explicit initializer, if the user requested,
2152 We should insert an artifical initializer for this automatic
2153 variable. */
2154 else if (is_var_need_auto_init (decl)
2155 && !decl_had_value_expr_p)
2157 gimple_add_init_for_auto_var (decl,
2158 flag_auto_var_init,
2159 seq_p);
2160 /* The expanding of a call to the above .DEFERRED_INIT will apply
2161 block initialization to the whole space covered by this variable.
2162 As a result, all the paddings will be initialized to zeroes
2163 for zero initialization and 0xFE byte-repeatable patterns for
2164 pattern initialization.
2165 In order to make the paddings as zeroes for pattern init, We
2166 should add a call to __builtin_clear_padding to clear the
2167 paddings to zero in compatiple with CLANG.
2168 We cannot insert this call if the variable is a gimple register
2169 since __builtin_clear_padding will take the address of the
2170 variable. As a result, if a long double/_Complex long double
2171 variable will spilled into stack later, its padding is 0XFE. */
2172 if (flag_auto_var_init == AUTO_INIT_PATTERN
2173 && !is_gimple_reg (decl)
2174 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2175 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2179 return GS_ALL_DONE;
2182 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2183 and replacing the LOOP_EXPR with goto, but if the loop contains an
2184 EXIT_EXPR, we need to append a label for it to jump to. */
2186 static enum gimplify_status
2187 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2189 tree saved_label = gimplify_ctxp->exit_label;
2190 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2192 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
2194 gimplify_ctxp->exit_label = NULL_TREE;
2196 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
2198 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
2200 if (gimplify_ctxp->exit_label)
2201 gimplify_seq_add_stmt (pre_p,
2202 gimple_build_label (gimplify_ctxp->exit_label));
2204 gimplify_ctxp->exit_label = saved_label;
2206 *expr_p = NULL;
2207 return GS_ALL_DONE;
2210 /* Gimplify a statement list onto a sequence. These may be created either
2211 by an enlightened front-end, or by shortcut_cond_expr. */
2213 static enum gimplify_status
2214 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2216 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2218 tree_stmt_iterator i = tsi_start (*expr_p);
2220 while (!tsi_end_p (i))
2222 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2223 tsi_delink (&i);
2226 if (temp)
2228 *expr_p = temp;
2229 return GS_OK;
2232 return GS_ALL_DONE;
2236 /* Emit warning for the unreachable statment STMT if needed.
2237 Return the gimple itself when the warning is emitted, otherwise
2238 return NULL. */
2239 static gimple *
2240 emit_warn_switch_unreachable (gimple *stmt)
2242 if (gimple_code (stmt) == GIMPLE_GOTO
2243 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2244 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2245 /* Don't warn for compiler-generated gotos. These occur
2246 in Duff's devices, for example. */
2247 return NULL;
2248 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2249 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2250 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2251 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2252 || (is_gimple_assign (stmt)
2253 && gimple_assign_single_p (stmt)
2254 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2255 && gimple_call_internal_p (
2256 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2257 IFN_DEFERRED_INIT))))
2258 /* Don't warn for compiler-generated initializations for
2259 -ftrivial-auto-var-init.
2260 There are 3 cases:
2261 case 1: a call to .DEFERRED_INIT;
2262 case 2: a call to __builtin_clear_padding with the 2nd argument is
2263 present and non-zero;
2264 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2265 that has the LHS of .DEFERRED_INIT as the RHS as following:
2266 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2267 i1 = _1. */
2268 return NULL;
2269 else
2270 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2271 "statement will never be executed");
2272 return stmt;
2275 /* Callback for walk_gimple_seq. */
2277 static tree
2278 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2279 bool *handled_ops_p,
2280 struct walk_stmt_info *wi)
2282 gimple *stmt = gsi_stmt (*gsi_p);
2283 bool unreachable_issued = wi->info != NULL;
2285 *handled_ops_p = true;
2286 switch (gimple_code (stmt))
2288 case GIMPLE_TRY:
2289 /* A compiler-generated cleanup or a user-written try block.
2290 If it's empty, don't dive into it--that would result in
2291 worse location info. */
2292 if (gimple_try_eval (stmt) == NULL)
2294 if (warn_switch_unreachable && !unreachable_issued)
2295 wi->info = emit_warn_switch_unreachable (stmt);
2297 /* Stop when auto var init warning is not on. */
2298 if (!warn_trivial_auto_var_init)
2299 return integer_zero_node;
2301 /* Fall through. */
2302 case GIMPLE_BIND:
2303 case GIMPLE_CATCH:
2304 case GIMPLE_EH_FILTER:
2305 case GIMPLE_TRANSACTION:
2306 /* Walk the sub-statements. */
2307 *handled_ops_p = false;
2308 break;
2310 case GIMPLE_DEBUG:
2311 /* Ignore these. We may generate them before declarations that
2312 are never executed. If there's something to warn about,
2313 there will be non-debug stmts too, and we'll catch those. */
2314 break;
2316 case GIMPLE_LABEL:
2317 /* Stop till the first Label. */
2318 return integer_zero_node;
2319 case GIMPLE_CALL:
2320 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2322 *handled_ops_p = false;
2323 break;
2325 if (warn_trivial_auto_var_init
2326 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2327 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2329 /* Get the variable name from the 3rd argument of call. */
2330 tree var_name = gimple_call_arg (stmt, 2);
2331 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2332 const char *var_name_str = TREE_STRING_POINTER (var_name);
2334 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2335 "%qs cannot be initialized with"
2336 "%<-ftrivial-auto-var_init%>",
2337 var_name_str);
2338 break;
2341 /* Fall through. */
2342 default:
2343 /* check the first "real" statement (not a decl/lexical scope/...), issue
2344 warning if needed. */
2345 if (warn_switch_unreachable && !unreachable_issued)
2346 wi->info = emit_warn_switch_unreachable (stmt);
2347 /* Stop when auto var init warning is not on. */
2348 if (!warn_trivial_auto_var_init)
2349 return integer_zero_node;
2350 break;
2352 return NULL_TREE;
2356 /* Possibly warn about unreachable statements between switch's controlling
2357 expression and the first case. Also warn about -ftrivial-auto-var-init
2358 cannot initialize the auto variable under such situation.
2359 SEQ is the body of a switch expression. */
2361 static void
2362 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2364 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2365 /* This warning doesn't play well with Fortran when optimizations
2366 are on. */
2367 || lang_GNU_Fortran ()
2368 || seq == NULL)
2369 return;
2371 struct walk_stmt_info wi;
2373 memset (&wi, 0, sizeof (wi));
2374 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2378 /* A label entry that pairs label and a location. */
2379 struct label_entry
2381 tree label;
2382 location_t loc;
2385 /* Find LABEL in vector of label entries VEC. */
2387 static struct label_entry *
2388 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2390 unsigned int i;
2391 struct label_entry *l;
2393 FOR_EACH_VEC_ELT (*vec, i, l)
2394 if (l->label == label)
2395 return l;
2396 return NULL;
2399 /* Return true if LABEL, a LABEL_DECL, represents a case label
2400 in a vector of labels CASES. */
2402 static bool
2403 case_label_p (const vec<tree> *cases, tree label)
2405 unsigned int i;
2406 tree l;
2408 FOR_EACH_VEC_ELT (*cases, i, l)
2409 if (CASE_LABEL (l) == label)
2410 return true;
2411 return false;
2414 /* Find the last nondebug statement in a scope STMT. */
2416 static gimple *
2417 last_stmt_in_scope (gimple *stmt)
2419 if (!stmt)
2420 return NULL;
2422 switch (gimple_code (stmt))
2424 case GIMPLE_BIND:
2426 gbind *bind = as_a <gbind *> (stmt);
2427 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2428 return last_stmt_in_scope (stmt);
2431 case GIMPLE_TRY:
2433 gtry *try_stmt = as_a <gtry *> (stmt);
2434 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2435 gimple *last_eval = last_stmt_in_scope (stmt);
2436 if (gimple_stmt_may_fallthru (last_eval)
2437 && (last_eval == NULL
2438 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2439 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2441 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2442 return last_stmt_in_scope (stmt);
2444 else
2445 return last_eval;
2448 case GIMPLE_DEBUG:
2449 gcc_unreachable ();
2451 default:
2452 return stmt;
2456 /* Collect labels that may fall through into LABELS and return the statement
2457 preceding another case label, or a user-defined label. Store a location
2458 useful to give warnings at *PREVLOC (usually the location of the returned
2459 statement or of its surrounding scope). */
2461 static gimple *
2462 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2463 auto_vec <struct label_entry> *labels,
2464 location_t *prevloc)
2466 gimple *prev = NULL;
2468 *prevloc = UNKNOWN_LOCATION;
2471 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2473 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2474 which starts on a GIMPLE_SWITCH and ends with a break label.
2475 Handle that as a single statement that can fall through. */
2476 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2477 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2478 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2479 if (last
2480 && gimple_code (first) == GIMPLE_SWITCH
2481 && gimple_code (last) == GIMPLE_LABEL)
2483 tree label = gimple_label_label (as_a <glabel *> (last));
2484 if (SWITCH_BREAK_LABEL_P (label))
2486 prev = bind;
2487 gsi_next (gsi_p);
2488 continue;
2492 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2493 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2495 /* Nested scope. Only look at the last statement of
2496 the innermost scope. */
2497 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2498 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2499 if (last)
2501 prev = last;
2502 /* It might be a label without a location. Use the
2503 location of the scope then. */
2504 if (!gimple_has_location (prev))
2505 *prevloc = bind_loc;
2507 gsi_next (gsi_p);
2508 continue;
2511 /* Ifs are tricky. */
2512 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2514 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2515 tree false_lab = gimple_cond_false_label (cond_stmt);
2516 location_t if_loc = gimple_location (cond_stmt);
2518 /* If we have e.g.
2519 if (i > 1) goto <D.2259>; else goto D;
2520 we can't do much with the else-branch. */
2521 if (!DECL_ARTIFICIAL (false_lab))
2522 break;
2524 /* Go on until the false label, then one step back. */
2525 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2527 gimple *stmt = gsi_stmt (*gsi_p);
2528 if (gimple_code (stmt) == GIMPLE_LABEL
2529 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2530 break;
2533 /* Not found? Oops. */
2534 if (gsi_end_p (*gsi_p))
2535 break;
2537 /* A dead label can't fall through. */
2538 if (!UNUSED_LABEL_P (false_lab))
2540 struct label_entry l = { false_lab, if_loc };
2541 labels->safe_push (l);
2544 /* Go to the last statement of the then branch. */
2545 gsi_prev (gsi_p);
2547 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2548 <D.1759>:
2549 <stmt>;
2550 goto <D.1761>;
2551 <D.1760>:
2553 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2554 && !gimple_has_location (gsi_stmt (*gsi_p)))
2556 /* Look at the statement before, it might be
2557 attribute fallthrough, in which case don't warn. */
2558 gsi_prev (gsi_p);
2559 bool fallthru_before_dest
2560 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2561 gsi_next (gsi_p);
2562 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2563 if (!fallthru_before_dest)
2565 struct label_entry l = { goto_dest, if_loc };
2566 labels->safe_push (l);
2569 /* This case is about
2570 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2571 <D.2022>:
2572 n = n + 1; // #1
2573 <D.2023>: // #2
2574 <D.1988>: // #3
2575 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2576 through to #3. So set PREV to #1. */
2577 else if (UNUSED_LABEL_P (false_lab))
2578 prev = gsi_stmt (*gsi_p);
2580 /* And move back. */
2581 gsi_next (gsi_p);
2584 /* Remember the last statement. Skip labels that are of no interest
2585 to us. */
2586 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2588 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2589 if (find_label_entry (labels, label))
2590 prev = gsi_stmt (*gsi_p);
2592 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2594 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2596 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2597 prev = gsi_stmt (*gsi_p);
2598 gsi_next (gsi_p);
2600 while (!gsi_end_p (*gsi_p)
2601 /* Stop if we find a case or a user-defined label. */
2602 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2603 || !gimple_has_location (gsi_stmt (*gsi_p))));
2605 if (prev && gimple_has_location (prev))
2606 *prevloc = gimple_location (prev);
2607 return prev;
2610 /* Return true if the switch fallthough warning should occur. LABEL is
2611 the label statement that we're falling through to. */
2613 static bool
2614 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2616 gimple_stmt_iterator gsi = *gsi_p;
2618 /* Don't warn if the label is marked with a "falls through" comment. */
2619 if (FALLTHROUGH_LABEL_P (label))
2620 return false;
2622 /* Don't warn for non-case labels followed by a statement:
2623 case 0:
2624 foo ();
2625 label:
2626 bar ();
2627 as these are likely intentional. */
2628 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2630 tree l;
2631 while (!gsi_end_p (gsi)
2632 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2633 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2634 && !case_label_p (&gimplify_ctxp->case_labels, l))
2635 gsi_next_nondebug (&gsi);
2636 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2637 return false;
2640 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2641 immediately breaks. */
2642 gsi = *gsi_p;
2644 /* Skip all immediately following labels. */
2645 while (!gsi_end_p (gsi)
2646 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2647 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2648 gsi_next_nondebug (&gsi);
2650 /* { ... something; default:; } */
2651 if (gsi_end_p (gsi)
2652 /* { ... something; default: break; } or
2653 { ... something; default: goto L; } */
2654 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2655 /* { ... something; default: return; } */
2656 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2657 return false;
2659 return true;
2662 /* Callback for walk_gimple_seq. */
2664 static tree
2665 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2666 struct walk_stmt_info *)
2668 gimple *stmt = gsi_stmt (*gsi_p);
2670 *handled_ops_p = true;
2671 switch (gimple_code (stmt))
2673 case GIMPLE_TRY:
2674 case GIMPLE_BIND:
2675 case GIMPLE_CATCH:
2676 case GIMPLE_EH_FILTER:
2677 case GIMPLE_TRANSACTION:
2678 /* Walk the sub-statements. */
2679 *handled_ops_p = false;
2680 break;
2682 /* Find a sequence of form:
2684 GIMPLE_LABEL
2685 [...]
2686 <may fallthru stmt>
2687 GIMPLE_LABEL
2689 and possibly warn. */
2690 case GIMPLE_LABEL:
2692 /* Found a label. Skip all immediately following labels. */
2693 while (!gsi_end_p (*gsi_p)
2694 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2695 gsi_next_nondebug (gsi_p);
2697 /* There might be no more statements. */
2698 if (gsi_end_p (*gsi_p))
2699 return integer_zero_node;
2701 /* Vector of labels that fall through. */
2702 auto_vec <struct label_entry> labels;
2703 location_t prevloc;
2704 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2706 /* There might be no more statements. */
2707 if (gsi_end_p (*gsi_p))
2708 return integer_zero_node;
2710 gimple *next = gsi_stmt (*gsi_p);
2711 tree label;
2712 /* If what follows is a label, then we may have a fallthrough. */
2713 if (gimple_code (next) == GIMPLE_LABEL
2714 && gimple_has_location (next)
2715 && (label = gimple_label_label (as_a <glabel *> (next)))
2716 && prev != NULL)
2718 struct label_entry *l;
2719 bool warned_p = false;
2720 auto_diagnostic_group d;
2721 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2722 /* Quiet. */;
2723 else if (gimple_code (prev) == GIMPLE_LABEL
2724 && (label = gimple_label_label (as_a <glabel *> (prev)))
2725 && (l = find_label_entry (&labels, label)))
2726 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2727 "this statement may fall through");
2728 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2729 /* Try to be clever and don't warn when the statement
2730 can't actually fall through. */
2731 && gimple_stmt_may_fallthru (prev)
2732 && prevloc != UNKNOWN_LOCATION)
2733 warned_p = warning_at (prevloc,
2734 OPT_Wimplicit_fallthrough_,
2735 "this statement may fall through");
2736 if (warned_p)
2737 inform (gimple_location (next), "here");
2739 /* Mark this label as processed so as to prevent multiple
2740 warnings in nested switches. */
2741 FALLTHROUGH_LABEL_P (label) = true;
2743 /* So that next warn_implicit_fallthrough_r will start looking for
2744 a new sequence starting with this label. */
2745 gsi_prev (gsi_p);
2748 break;
2749 default:
2750 break;
2752 return NULL_TREE;
2755 /* Warn when a switch case falls through. */
2757 static void
2758 maybe_warn_implicit_fallthrough (gimple_seq seq)
2760 if (!warn_implicit_fallthrough)
2761 return;
2763 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2764 if (!(lang_GNU_C ()
2765 || lang_GNU_CXX ()
2766 || lang_GNU_OBJC ()))
2767 return;
2769 struct walk_stmt_info wi;
2770 memset (&wi, 0, sizeof (wi));
2771 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2774 /* Callback for walk_gimple_seq. */
2776 static tree
2777 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2778 struct walk_stmt_info *wi)
2780 gimple *stmt = gsi_stmt (*gsi_p);
2782 *handled_ops_p = true;
2783 switch (gimple_code (stmt))
2785 case GIMPLE_TRY:
2786 case GIMPLE_BIND:
2787 case GIMPLE_CATCH:
2788 case GIMPLE_EH_FILTER:
2789 case GIMPLE_TRANSACTION:
2790 /* Walk the sub-statements. */
2791 *handled_ops_p = false;
2792 break;
2793 case GIMPLE_CALL:
2794 static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2795 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2797 location_t loc = gimple_location (stmt);
2798 gsi_remove (gsi_p, true);
2799 wi->removed_stmt = true;
2801 /* nothrow flag is added by genericize_c_loop to mark fallthrough
2802 statement at the end of some loop's body. Those should be
2803 always diagnosed, either because they indeed don't precede
2804 a case label or default label, or because the next statement
2805 is not within the same iteration statement. */
2806 if ((stmt->subcode & GF_CALL_NOTHROW) != 0)
2808 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2809 "a case label or default label");
2810 break;
2813 if (gsi_end_p (*gsi_p))
2815 static_cast<location_t *>(wi->info)[0] = BUILTINS_LOCATION;
2816 static_cast<location_t *>(wi->info)[1] = loc;
2817 break;
2820 bool found = false;
2822 gimple_stmt_iterator gsi2 = *gsi_p;
2823 stmt = gsi_stmt (gsi2);
2824 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2826 /* Go on until the artificial label. */
2827 tree goto_dest = gimple_goto_dest (stmt);
2828 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2830 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2831 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2832 == goto_dest)
2833 break;
2836 /* Not found? Stop. */
2837 if (gsi_end_p (gsi2))
2838 break;
2840 /* Look one past it. */
2841 gsi_next (&gsi2);
2844 /* We're looking for a case label or default label here. */
2845 while (!gsi_end_p (gsi2))
2847 stmt = gsi_stmt (gsi2);
2848 if (gimple_code (stmt) == GIMPLE_LABEL)
2850 tree label = gimple_label_label (as_a <glabel *> (stmt));
2851 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2853 found = true;
2854 break;
2857 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2859 else if (!is_gimple_debug (stmt))
2860 /* Anything else is not expected. */
2861 break;
2862 gsi_next (&gsi2);
2864 if (!found)
2865 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2866 "a case label or default label");
2868 break;
2869 default:
2870 static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2871 break;
2873 return NULL_TREE;
2876 /* Expand all FALLTHROUGH () calls in SEQ. */
2878 static void
2879 expand_FALLTHROUGH (gimple_seq *seq_p)
2881 struct walk_stmt_info wi;
2882 location_t loc[2];
2883 memset (&wi, 0, sizeof (wi));
2884 loc[0] = UNKNOWN_LOCATION;
2885 loc[1] = UNKNOWN_LOCATION;
2886 wi.info = (void *) &loc[0];
2887 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2888 if (loc[0] != UNKNOWN_LOCATION)
2889 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2890 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2891 pedwarn (loc[1], 0, "attribute %<fallthrough%> not preceding "
2892 "a case label or default label");
2896 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2897 branch to. */
2899 static enum gimplify_status
2900 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2902 tree switch_expr = *expr_p;
2903 gimple_seq switch_body_seq = NULL;
2904 enum gimplify_status ret;
2905 tree index_type = TREE_TYPE (switch_expr);
2906 if (index_type == NULL_TREE)
2907 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2909 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2910 fb_rvalue);
2911 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2912 return ret;
2914 if (SWITCH_BODY (switch_expr))
2916 vec<tree> labels;
2917 vec<tree> saved_labels;
2918 hash_set<tree> *saved_live_switch_vars = NULL;
2919 tree default_case = NULL_TREE;
2920 gswitch *switch_stmt;
2922 /* Save old labels, get new ones from body, then restore the old
2923 labels. Save all the things from the switch body to append after. */
2924 saved_labels = gimplify_ctxp->case_labels;
2925 gimplify_ctxp->case_labels.create (8);
2927 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2928 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2929 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2930 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2931 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2932 else
2933 gimplify_ctxp->live_switch_vars = NULL;
2935 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2936 gimplify_ctxp->in_switch_expr = true;
2938 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2940 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2941 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2942 maybe_warn_implicit_fallthrough (switch_body_seq);
2943 /* Only do this for the outermost GIMPLE_SWITCH. */
2944 if (!gimplify_ctxp->in_switch_expr)
2945 expand_FALLTHROUGH (&switch_body_seq);
2947 labels = gimplify_ctxp->case_labels;
2948 gimplify_ctxp->case_labels = saved_labels;
2950 if (gimplify_ctxp->live_switch_vars)
2952 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2953 delete gimplify_ctxp->live_switch_vars;
2955 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2957 preprocess_case_label_vec_for_gimple (labels, index_type,
2958 &default_case);
2960 bool add_bind = false;
2961 if (!default_case)
2963 glabel *new_default;
2965 default_case
2966 = build_case_label (NULL_TREE, NULL_TREE,
2967 create_artificial_label (UNKNOWN_LOCATION));
2968 if (old_in_switch_expr)
2970 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2971 add_bind = true;
2973 new_default = gimple_build_label (CASE_LABEL (default_case));
2974 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2976 else if (old_in_switch_expr)
2978 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2979 if (last && gimple_code (last) == GIMPLE_LABEL)
2981 tree label = gimple_label_label (as_a <glabel *> (last));
2982 if (SWITCH_BREAK_LABEL_P (label))
2983 add_bind = true;
2987 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2988 default_case, labels);
2989 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2990 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2991 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2992 so that we can easily find the start and end of the switch
2993 statement. */
2994 if (add_bind)
2996 gimple_seq bind_body = NULL;
2997 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2998 gimple_seq_add_seq (&bind_body, switch_body_seq);
2999 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
3000 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
3001 gimplify_seq_add_stmt (pre_p, bind);
3003 else
3005 gimplify_seq_add_stmt (pre_p, switch_stmt);
3006 gimplify_seq_add_seq (pre_p, switch_body_seq);
3008 labels.release ();
3010 else
3011 gcc_unreachable ();
3013 return GS_ALL_DONE;
3016 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3018 static enum gimplify_status
3019 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
3021 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
3022 == current_function_decl);
3024 tree label = LABEL_EXPR_LABEL (*expr_p);
3025 glabel *label_stmt = gimple_build_label (label);
3026 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3027 gimplify_seq_add_stmt (pre_p, label_stmt);
3029 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3030 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3031 NOT_TAKEN));
3032 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3033 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3034 TAKEN));
3036 return GS_ALL_DONE;
3039 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3041 static enum gimplify_status
3042 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
3044 struct gimplify_ctx *ctxp;
3045 glabel *label_stmt;
3047 /* Invalid programs can play Duff's Device type games with, for example,
3048 #pragma omp parallel. At least in the C front end, we don't
3049 detect such invalid branches until after gimplification, in the
3050 diagnose_omp_blocks pass. */
3051 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
3052 if (ctxp->case_labels.exists ())
3053 break;
3055 tree label = CASE_LABEL (*expr_p);
3056 label_stmt = gimple_build_label (label);
3057 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3058 ctxp->case_labels.safe_push (*expr_p);
3059 gimplify_seq_add_stmt (pre_p, label_stmt);
3061 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3062 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3063 NOT_TAKEN));
3064 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3065 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3066 TAKEN));
3068 return GS_ALL_DONE;
3071 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3072 if necessary. */
3074 tree
3075 build_and_jump (tree *label_p)
3077 if (label_p == NULL)
3078 /* If there's nowhere to jump, just fall through. */
3079 return NULL_TREE;
3081 if (*label_p == NULL_TREE)
3083 tree label = create_artificial_label (UNKNOWN_LOCATION);
3084 *label_p = label;
3087 return build1 (GOTO_EXPR, void_type_node, *label_p);
3090 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3091 This also involves building a label to jump to and communicating it to
3092 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3094 static enum gimplify_status
3095 gimplify_exit_expr (tree *expr_p)
3097 tree cond = TREE_OPERAND (*expr_p, 0);
3098 tree expr;
3100 expr = build_and_jump (&gimplify_ctxp->exit_label);
3101 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
3102 *expr_p = expr;
3104 return GS_OK;
3107 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3108 different from its canonical type, wrap the whole thing inside a
3109 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3110 type.
3112 The canonical type of a COMPONENT_REF is the type of the field being
3113 referenced--unless the field is a bit-field which can be read directly
3114 in a smaller mode, in which case the canonical type is the
3115 sign-appropriate type corresponding to that mode. */
3117 static void
3118 canonicalize_component_ref (tree *expr_p)
3120 tree expr = *expr_p;
3121 tree type;
3123 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
3125 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
3126 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
3127 else
3128 type = TREE_TYPE (TREE_OPERAND (expr, 1));
3130 /* One could argue that all the stuff below is not necessary for
3131 the non-bitfield case and declare it a FE error if type
3132 adjustment would be needed. */
3133 if (TREE_TYPE (expr) != type)
3135 #ifdef ENABLE_TYPES_CHECKING
3136 tree old_type = TREE_TYPE (expr);
3137 #endif
3138 int type_quals;
3140 /* We need to preserve qualifiers and propagate them from
3141 operand 0. */
3142 type_quals = TYPE_QUALS (type)
3143 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
3144 if (TYPE_QUALS (type) != type_quals)
3145 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
3147 /* Set the type of the COMPONENT_REF to the underlying type. */
3148 TREE_TYPE (expr) = type;
3150 #ifdef ENABLE_TYPES_CHECKING
3151 /* It is now a FE error, if the conversion from the canonical
3152 type to the original expression type is not useless. */
3153 gcc_assert (useless_type_conversion_p (old_type, type));
3154 #endif
3158 /* If a NOP conversion is changing a pointer to array of foo to a pointer
3159 to foo, embed that change in the ADDR_EXPR by converting
3160 T array[U];
3161 (T *)&array
3163 &array[L]
3164 where L is the lower bound. For simplicity, only do this for constant
3165 lower bound.
3166 The constraint is that the type of &array[L] is trivially convertible
3167 to T *. */
3169 static void
3170 canonicalize_addr_expr (tree *expr_p)
3172 tree expr = *expr_p;
3173 tree addr_expr = TREE_OPERAND (expr, 0);
3174 tree datype, ddatype, pddatype;
3176 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3177 if (!POINTER_TYPE_P (TREE_TYPE (expr))
3178 || TREE_CODE (addr_expr) != ADDR_EXPR)
3179 return;
3181 /* The addr_expr type should be a pointer to an array. */
3182 datype = TREE_TYPE (TREE_TYPE (addr_expr));
3183 if (TREE_CODE (datype) != ARRAY_TYPE)
3184 return;
3186 /* The pointer to element type shall be trivially convertible to
3187 the expression pointer type. */
3188 ddatype = TREE_TYPE (datype);
3189 pddatype = build_pointer_type (ddatype);
3190 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3191 pddatype))
3192 return;
3194 /* The lower bound and element sizes must be constant. */
3195 if (!TYPE_SIZE_UNIT (ddatype)
3196 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3197 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3198 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3199 return;
3201 /* All checks succeeded. Build a new node to merge the cast. */
3202 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3203 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3204 NULL_TREE, NULL_TREE);
3205 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3207 /* We can have stripped a required restrict qualifier above. */
3208 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3209 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3212 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3213 underneath as appropriate. */
3215 static enum gimplify_status
3216 gimplify_conversion (tree *expr_p)
3218 location_t loc = EXPR_LOCATION (*expr_p);
3219 gcc_assert (CONVERT_EXPR_P (*expr_p));
3221 /* Then strip away all but the outermost conversion. */
3222 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3224 /* And remove the outermost conversion if it's useless. */
3225 if (tree_ssa_useless_type_conversion (*expr_p))
3226 *expr_p = TREE_OPERAND (*expr_p, 0);
3228 /* If we still have a conversion at the toplevel,
3229 then canonicalize some constructs. */
3230 if (CONVERT_EXPR_P (*expr_p))
3232 tree sub = TREE_OPERAND (*expr_p, 0);
3234 /* If a NOP conversion is changing the type of a COMPONENT_REF
3235 expression, then canonicalize its type now in order to expose more
3236 redundant conversions. */
3237 if (TREE_CODE (sub) == COMPONENT_REF)
3238 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3240 /* If a NOP conversion is changing a pointer to array of foo
3241 to a pointer to foo, embed that change in the ADDR_EXPR. */
3242 else if (TREE_CODE (sub) == ADDR_EXPR)
3243 canonicalize_addr_expr (expr_p);
3246 /* If we have a conversion to a non-register type force the
3247 use of a VIEW_CONVERT_EXPR instead. */
3248 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3249 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3250 TREE_OPERAND (*expr_p, 0));
3252 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3253 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3254 TREE_SET_CODE (*expr_p, NOP_EXPR);
3256 return GS_OK;
3259 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3260 DECL_VALUE_EXPR, and it's worth re-examining things. */
3262 static enum gimplify_status
3263 gimplify_var_or_parm_decl (tree *expr_p)
3265 tree decl = *expr_p;
3267 /* ??? If this is a local variable, and it has not been seen in any
3268 outer BIND_EXPR, then it's probably the result of a duplicate
3269 declaration, for which we've already issued an error. It would
3270 be really nice if the front end wouldn't leak these at all.
3271 Currently the only known culprit is C++ destructors, as seen
3272 in g++.old-deja/g++.jason/binding.C.
3273 Another possible culpit are size expressions for variably modified
3274 types which are lost in the FE or not gimplified correctly. */
3275 if (VAR_P (decl)
3276 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3277 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3278 && decl_function_context (decl) == current_function_decl)
3280 gcc_assert (seen_error ());
3281 return GS_ERROR;
3284 /* When within an OMP context, notice uses of variables. */
3285 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3286 return GS_ALL_DONE;
3288 /* If the decl is an alias for another expression, substitute it now. */
3289 if (DECL_HAS_VALUE_EXPR_P (decl))
3291 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3292 return GS_OK;
3295 return GS_ALL_DONE;
3298 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3300 static void
3301 recalculate_side_effects (tree t)
3303 enum tree_code code = TREE_CODE (t);
3304 int len = TREE_OPERAND_LENGTH (t);
3305 int i;
3307 switch (TREE_CODE_CLASS (code))
3309 case tcc_expression:
3310 switch (code)
3312 case INIT_EXPR:
3313 case MODIFY_EXPR:
3314 case VA_ARG_EXPR:
3315 case PREDECREMENT_EXPR:
3316 case PREINCREMENT_EXPR:
3317 case POSTDECREMENT_EXPR:
3318 case POSTINCREMENT_EXPR:
3319 /* All of these have side-effects, no matter what their
3320 operands are. */
3321 return;
3323 default:
3324 break;
3326 /* Fall through. */
3328 case tcc_comparison: /* a comparison expression */
3329 case tcc_unary: /* a unary arithmetic expression */
3330 case tcc_binary: /* a binary arithmetic expression */
3331 case tcc_reference: /* a reference */
3332 case tcc_vl_exp: /* a function call */
3333 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3334 for (i = 0; i < len; ++i)
3336 tree op = TREE_OPERAND (t, i);
3337 if (op && TREE_SIDE_EFFECTS (op))
3338 TREE_SIDE_EFFECTS (t) = 1;
3340 break;
3342 case tcc_constant:
3343 /* No side-effects. */
3344 return;
3346 default:
3347 gcc_unreachable ();
3351 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3352 node *EXPR_P.
3354 compound_lval
3355 : min_lval '[' val ']'
3356 | min_lval '.' ID
3357 | compound_lval '[' val ']'
3358 | compound_lval '.' ID
3360 This is not part of the original SIMPLE definition, which separates
3361 array and member references, but it seems reasonable to handle them
3362 together. Also, this way we don't run into problems with union
3363 aliasing; gcc requires that for accesses through a union to alias, the
3364 union reference must be explicit, which was not always the case when we
3365 were splitting up array and member refs.
3367 PRE_P points to the sequence where side effects that must happen before
3368 *EXPR_P should be stored.
3370 POST_P points to the sequence where side effects that must happen after
3371 *EXPR_P should be stored. */
3373 static enum gimplify_status
3374 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3375 fallback_t fallback)
3377 tree *p;
3378 enum gimplify_status ret = GS_ALL_DONE, tret;
3379 int i;
3380 location_t loc = EXPR_LOCATION (*expr_p);
3381 tree expr = *expr_p;
3383 /* Create a stack of the subexpressions so later we can walk them in
3384 order from inner to outer. */
3385 auto_vec<tree, 10> expr_stack;
3387 /* We can handle anything that get_inner_reference can deal with. */
3388 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3390 restart:
3391 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3392 if (TREE_CODE (*p) == INDIRECT_REF)
3393 *p = fold_indirect_ref_loc (loc, *p);
3395 if (handled_component_p (*p))
3397 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3398 additional COMPONENT_REFs. */
3399 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3400 && gimplify_var_or_parm_decl (p) == GS_OK)
3401 goto restart;
3402 else
3403 break;
3405 expr_stack.safe_push (*p);
3408 gcc_assert (expr_stack.length ());
3410 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3411 walked through and P points to the innermost expression.
3413 Java requires that we elaborated nodes in source order. That
3414 means we must gimplify the inner expression followed by each of
3415 the indices, in order. But we can't gimplify the inner
3416 expression until we deal with any variable bounds, sizes, or
3417 positions in order to deal with PLACEHOLDER_EXPRs.
3419 The base expression may contain a statement expression that
3420 has declarations used in size expressions, so has to be
3421 gimplified before gimplifying the size expressions.
3423 So we do this in three steps. First we deal with variable
3424 bounds, sizes, and positions, then we gimplify the base and
3425 ensure it is memory if needed, then we deal with the annotations
3426 for any variables in the components and any indices, from left
3427 to right. */
3429 bool need_non_reg = false;
3430 for (i = expr_stack.length () - 1; i >= 0; i--)
3432 tree t = expr_stack[i];
3434 if (error_operand_p (TREE_OPERAND (t, 0)))
3435 return GS_ERROR;
3437 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3439 /* Deal with the low bound and element type size and put them into
3440 the ARRAY_REF. If these values are set, they have already been
3441 gimplified. */
3442 if (TREE_OPERAND (t, 2) == NULL_TREE)
3444 tree low = unshare_expr (array_ref_low_bound (t));
3445 if (!is_gimple_min_invariant (low))
3447 TREE_OPERAND (t, 2) = low;
3451 if (TREE_OPERAND (t, 3) == NULL_TREE)
3453 tree elmt_size = array_ref_element_size (t);
3454 if (!is_gimple_min_invariant (elmt_size))
3456 elmt_size = unshare_expr (elmt_size);
3457 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3458 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3460 /* Divide the element size by the alignment of the element
3461 type (above). */
3462 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3463 elmt_size, factor);
3465 TREE_OPERAND (t, 3) = elmt_size;
3468 need_non_reg = true;
3470 else if (TREE_CODE (t) == COMPONENT_REF)
3472 /* Set the field offset into T and gimplify it. */
3473 if (TREE_OPERAND (t, 2) == NULL_TREE)
3475 tree offset = component_ref_field_offset (t);
3476 if (!is_gimple_min_invariant (offset))
3478 offset = unshare_expr (offset);
3479 tree field = TREE_OPERAND (t, 1);
3480 tree factor
3481 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3483 /* Divide the offset by its alignment. */
3484 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3485 offset, factor);
3487 TREE_OPERAND (t, 2) = offset;
3490 need_non_reg = true;
3492 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3493 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3494 is a non-register type then require the base object to be a
3495 non-register as well. */
3496 need_non_reg = true;
3499 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3500 so as to match the min_lval predicate. Failure to do so may result
3501 in the creation of large aggregate temporaries. */
3502 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3503 fallback | fb_lvalue);
3504 ret = MIN (ret, tret);
3505 if (ret == GS_ERROR)
3506 return GS_ERROR;
3508 /* Step 2a: if we have component references we do not support on
3509 registers then make sure the base isn't a register. Of course
3510 we can only do so if an rvalue is OK. */
3511 if (need_non_reg && (fallback & fb_rvalue))
3512 prepare_gimple_addressable (p, pre_p);
3515 /* Step 3: gimplify size expressions and the indices and operands of
3516 ARRAY_REF. During this loop we also remove any useless conversions.
3517 If we operate on a register also make sure to properly gimplify
3518 to individual operations. */
3520 bool reg_operations = is_gimple_reg (*p);
3521 for (; expr_stack.length () > 0; )
3523 tree t = expr_stack.pop ();
3525 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3527 gcc_assert (!reg_operations);
3529 /* Gimplify the low bound and element type size. */
3530 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3531 is_gimple_reg, fb_rvalue);
3532 ret = MIN (ret, tret);
3534 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3535 is_gimple_reg, fb_rvalue);
3536 ret = MIN (ret, tret);
3538 /* Gimplify the dimension. */
3539 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3540 is_gimple_val, fb_rvalue);
3541 ret = MIN (ret, tret);
3543 else if (TREE_CODE (t) == COMPONENT_REF)
3545 gcc_assert (!reg_operations);
3547 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3548 is_gimple_reg, fb_rvalue);
3549 ret = MIN (ret, tret);
3551 else if (reg_operations)
3553 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3554 is_gimple_val, fb_rvalue);
3555 ret = MIN (ret, tret);
3558 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3560 /* The innermost expression P may have originally had
3561 TREE_SIDE_EFFECTS set which would have caused all the outer
3562 expressions in *EXPR_P leading to P to also have had
3563 TREE_SIDE_EFFECTS set. */
3564 recalculate_side_effects (t);
3567 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3568 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3570 canonicalize_component_ref (expr_p);
3573 expr_stack.release ();
3575 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3577 return ret;
3580 /* Gimplify the self modifying expression pointed to by EXPR_P
3581 (++, --, +=, -=).
3583 PRE_P points to the list where side effects that must happen before
3584 *EXPR_P should be stored.
3586 POST_P points to the list where side effects that must happen after
3587 *EXPR_P should be stored.
3589 WANT_VALUE is nonzero iff we want to use the value of this expression
3590 in another expression.
3592 ARITH_TYPE is the type the computation should be performed in. */
3594 enum gimplify_status
3595 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3596 bool want_value, tree arith_type)
3598 enum tree_code code;
3599 tree lhs, lvalue, rhs, t1;
3600 gimple_seq post = NULL, *orig_post_p = post_p;
3601 bool postfix;
3602 enum tree_code arith_code;
3603 enum gimplify_status ret;
3604 location_t loc = EXPR_LOCATION (*expr_p);
3606 code = TREE_CODE (*expr_p);
3608 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3609 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3611 /* Prefix or postfix? */
3612 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3613 /* Faster to treat as prefix if result is not used. */
3614 postfix = want_value;
3615 else
3616 postfix = false;
3618 /* For postfix, make sure the inner expression's post side effects
3619 are executed after side effects from this expression. */
3620 if (postfix)
3621 post_p = &post;
3623 /* Add or subtract? */
3624 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3625 arith_code = PLUS_EXPR;
3626 else
3627 arith_code = MINUS_EXPR;
3629 /* Gimplify the LHS into a GIMPLE lvalue. */
3630 lvalue = TREE_OPERAND (*expr_p, 0);
3631 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3632 if (ret == GS_ERROR)
3633 return ret;
3635 /* Extract the operands to the arithmetic operation. */
3636 lhs = lvalue;
3637 rhs = TREE_OPERAND (*expr_p, 1);
3639 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3640 that as the result value and in the postqueue operation. */
3641 if (postfix)
3643 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3644 if (ret == GS_ERROR)
3645 return ret;
3647 lhs = get_initialized_tmp_var (lhs, pre_p);
3650 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3651 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3653 rhs = convert_to_ptrofftype_loc (loc, rhs);
3654 if (arith_code == MINUS_EXPR)
3655 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3656 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3658 else
3659 t1 = fold_convert (TREE_TYPE (*expr_p),
3660 fold_build2 (arith_code, arith_type,
3661 fold_convert (arith_type, lhs),
3662 fold_convert (arith_type, rhs)));
3664 if (postfix)
3666 gimplify_assign (lvalue, t1, pre_p);
3667 gimplify_seq_add_seq (orig_post_p, post);
3668 *expr_p = lhs;
3669 return GS_ALL_DONE;
3671 else
3673 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3674 return GS_OK;
3678 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3680 static void
3681 maybe_with_size_expr (tree *expr_p)
3683 tree expr = *expr_p;
3684 tree type = TREE_TYPE (expr);
3685 tree size;
3687 /* If we've already wrapped this or the type is error_mark_node, we can't do
3688 anything. */
3689 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3690 || type == error_mark_node)
3691 return;
3693 /* If the size isn't known or is a constant, we have nothing to do. */
3694 size = TYPE_SIZE_UNIT (type);
3695 if (!size || poly_int_tree_p (size))
3696 return;
3698 /* Otherwise, make a WITH_SIZE_EXPR. */
3699 size = unshare_expr (size);
3700 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3701 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3704 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3705 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3706 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3707 gimplified to an SSA name. */
3709 enum gimplify_status
3710 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3711 bool allow_ssa)
3713 bool (*test) (tree);
3714 fallback_t fb;
3716 /* In general, we allow lvalues for function arguments to avoid
3717 extra overhead of copying large aggregates out of even larger
3718 aggregates into temporaries only to copy the temporaries to
3719 the argument list. Make optimizers happy by pulling out to
3720 temporaries those types that fit in registers. */
3721 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3722 test = is_gimple_val, fb = fb_rvalue;
3723 else
3725 test = is_gimple_lvalue, fb = fb_either;
3726 /* Also strip a TARGET_EXPR that would force an extra copy. */
3727 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3729 tree init = TARGET_EXPR_INITIAL (*arg_p);
3730 if (init
3731 && !VOID_TYPE_P (TREE_TYPE (init)))
3732 *arg_p = init;
3736 /* If this is a variable sized type, we must remember the size. */
3737 maybe_with_size_expr (arg_p);
3739 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3740 /* Make sure arguments have the same location as the function call
3741 itself. */
3742 protected_set_expr_location (*arg_p, call_location);
3744 /* There is a sequence point before a function call. Side effects in
3745 the argument list must occur before the actual call. So, when
3746 gimplifying arguments, force gimplify_expr to use an internal
3747 post queue which is then appended to the end of PRE_P. */
3748 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3751 /* Don't fold inside offloading or taskreg regions: it can break code by
3752 adding decl references that weren't in the source. We'll do it during
3753 omplower pass instead. */
3755 static bool
3756 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3758 struct gimplify_omp_ctx *ctx;
3759 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3760 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3761 return false;
3762 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3763 return false;
3764 /* Delay folding of builtins until the IL is in consistent state
3765 so the diagnostic machinery can do a better job. */
3766 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3767 return false;
3768 return fold_stmt (gsi);
3771 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3772 WANT_VALUE is true if the result of the call is desired. */
3774 static enum gimplify_status
3775 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3777 tree fndecl, parms, p, fnptrtype;
3778 enum gimplify_status ret;
3779 int i, nargs;
3780 gcall *call;
3781 bool builtin_va_start_p = false;
3782 location_t loc = EXPR_LOCATION (*expr_p);
3784 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3786 /* For reliable diagnostics during inlining, it is necessary that
3787 every call_expr be annotated with file and line. */
3788 if (! EXPR_HAS_LOCATION (*expr_p))
3789 SET_EXPR_LOCATION (*expr_p, input_location);
3791 /* Gimplify internal functions created in the FEs. */
3792 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3794 if (want_value)
3795 return GS_ALL_DONE;
3797 nargs = call_expr_nargs (*expr_p);
3798 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3799 auto_vec<tree> vargs (nargs);
3801 if (ifn == IFN_ASSUME)
3803 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3805 /* If the [[assume (cond)]]; condition is simple
3806 enough and can be evaluated unconditionally
3807 without side-effects, expand it as
3808 if (!cond) __builtin_unreachable (); */
3809 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3810 *expr_p = build3 (COND_EXPR, void_type_node,
3811 CALL_EXPR_ARG (*expr_p, 0), void_node,
3812 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3813 fndecl, 0));
3814 return GS_OK;
3816 /* If not optimizing, ignore the assumptions. */
3817 if (!optimize || seen_error ())
3819 *expr_p = NULL_TREE;
3820 return GS_ALL_DONE;
3822 /* Temporarily, until gimple lowering, transform
3823 .ASSUME (cond);
3824 into:
3825 [[assume (guard)]]
3827 guard = cond;
3829 such that gimple lowering can outline the condition into
3830 a separate function easily. */
3831 tree guard = create_tmp_var (boolean_type_node);
3832 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3833 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3834 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3835 push_gimplify_context ();
3836 gimple_seq body = NULL;
3837 gimple *g = gimplify_and_return_first (*expr_p, &body);
3838 pop_gimplify_context (g);
3839 g = gimple_build_assume (guard, body);
3840 gimple_set_location (g, loc);
3841 gimplify_seq_add_stmt (pre_p, g);
3842 *expr_p = NULL_TREE;
3843 return GS_ALL_DONE;
3846 for (i = 0; i < nargs; i++)
3848 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3849 EXPR_LOCATION (*expr_p));
3850 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3853 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3854 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3855 gimplify_seq_add_stmt (pre_p, call);
3856 return GS_ALL_DONE;
3859 /* This may be a call to a builtin function.
3861 Builtin function calls may be transformed into different
3862 (and more efficient) builtin function calls under certain
3863 circumstances. Unfortunately, gimplification can muck things
3864 up enough that the builtin expanders are not aware that certain
3865 transformations are still valid.
3867 So we attempt transformation/gimplification of the call before
3868 we gimplify the CALL_EXPR. At this time we do not manage to
3869 transform all calls in the same manner as the expanders do, but
3870 we do transform most of them. */
3871 fndecl = get_callee_fndecl (*expr_p);
3872 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3873 switch (DECL_FUNCTION_CODE (fndecl))
3875 CASE_BUILT_IN_ALLOCA:
3876 /* If the call has been built for a variable-sized object, then we
3877 want to restore the stack level when the enclosing BIND_EXPR is
3878 exited to reclaim the allocated space; otherwise, we precisely
3879 need to do the opposite and preserve the latest stack level. */
3880 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3881 gimplify_ctxp->save_stack = true;
3882 else
3883 gimplify_ctxp->keep_stack = true;
3884 break;
3886 case BUILT_IN_VA_START:
3888 builtin_va_start_p = true;
3889 if (call_expr_nargs (*expr_p) < 2)
3891 error ("too few arguments to function %<va_start%>");
3892 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3893 return GS_OK;
3896 if (fold_builtin_next_arg (*expr_p, true))
3898 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3899 return GS_OK;
3901 break;
3904 case BUILT_IN_EH_RETURN:
3905 cfun->calls_eh_return = true;
3906 break;
3908 case BUILT_IN_CLEAR_PADDING:
3909 if (call_expr_nargs (*expr_p) == 1)
3911 /* Remember the original type of the argument in an internal
3912 dummy second argument, as in GIMPLE pointer conversions are
3913 useless. Also mark this call as not for automatic
3914 initialization in the internal dummy third argument. */
3915 p = CALL_EXPR_ARG (*expr_p, 0);
3916 *expr_p
3917 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3918 build_zero_cst (TREE_TYPE (p)));
3919 return GS_OK;
3921 break;
3923 default:
3926 if (fndecl && fndecl_built_in_p (fndecl))
3928 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3929 if (new_tree && new_tree != *expr_p)
3931 /* There was a transformation of this call which computes the
3932 same value, but in a more efficient way. Return and try
3933 again. */
3934 *expr_p = new_tree;
3935 return GS_OK;
3939 /* Remember the original function pointer type. */
3940 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3942 if (flag_openmp
3943 && fndecl
3944 && cfun
3945 && (cfun->curr_properties & PROP_gimple_any) == 0)
3947 tree variant = omp_resolve_declare_variant (fndecl);
3948 if (variant != fndecl)
3949 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3952 /* There is a sequence point before the call, so any side effects in
3953 the calling expression must occur before the actual call. Force
3954 gimplify_expr to use an internal post queue. */
3955 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3956 is_gimple_call_addr, fb_rvalue);
3958 if (ret == GS_ERROR)
3959 return GS_ERROR;
3961 nargs = call_expr_nargs (*expr_p);
3963 /* Get argument types for verification. */
3964 fndecl = get_callee_fndecl (*expr_p);
3965 parms = NULL_TREE;
3966 if (fndecl)
3967 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3968 else
3969 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3971 if (fndecl && DECL_ARGUMENTS (fndecl))
3972 p = DECL_ARGUMENTS (fndecl);
3973 else if (parms)
3974 p = parms;
3975 else
3976 p = NULL_TREE;
3977 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3980 /* If the last argument is __builtin_va_arg_pack () and it is not
3981 passed as a named argument, decrease the number of CALL_EXPR
3982 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3983 if (!p
3984 && i < nargs
3985 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3987 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3988 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3990 if (last_arg_fndecl
3991 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3993 tree call = *expr_p;
3995 --nargs;
3996 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3997 CALL_EXPR_FN (call),
3998 nargs, CALL_EXPR_ARGP (call));
4000 /* Copy all CALL_EXPR flags, location and block, except
4001 CALL_EXPR_VA_ARG_PACK flag. */
4002 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
4003 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
4004 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
4005 = CALL_EXPR_RETURN_SLOT_OPT (call);
4006 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
4007 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
4009 /* Set CALL_EXPR_VA_ARG_PACK. */
4010 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
4014 /* If the call returns twice then after building the CFG the call
4015 argument computations will no longer dominate the call because
4016 we add an abnormal incoming edge to the call. So do not use SSA
4017 vars there. */
4018 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
4020 /* Gimplify the function arguments. */
4021 if (nargs > 0)
4023 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
4024 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
4025 PUSH_ARGS_REVERSED ? i-- : i++)
4027 enum gimplify_status t;
4029 /* Avoid gimplifying the second argument to va_start, which needs to
4030 be the plain PARM_DECL. */
4031 if ((i != 1) || !builtin_va_start_p)
4033 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
4034 EXPR_LOCATION (*expr_p), ! returns_twice);
4036 if (t == GS_ERROR)
4037 ret = GS_ERROR;
4042 /* Gimplify the static chain. */
4043 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
4045 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
4046 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
4047 else
4049 enum gimplify_status t;
4050 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
4051 EXPR_LOCATION (*expr_p), ! returns_twice);
4052 if (t == GS_ERROR)
4053 ret = GS_ERROR;
4057 /* Verify the function result. */
4058 if (want_value && fndecl
4059 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
4061 error_at (loc, "using result of function returning %<void%>");
4062 ret = GS_ERROR;
4065 /* Try this again in case gimplification exposed something. */
4066 if (ret != GS_ERROR)
4068 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4070 if (new_tree && new_tree != *expr_p)
4072 /* There was a transformation of this call which computes the
4073 same value, but in a more efficient way. Return and try
4074 again. */
4075 *expr_p = new_tree;
4076 return GS_OK;
4079 else
4081 *expr_p = error_mark_node;
4082 return GS_ERROR;
4085 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4086 decl. This allows us to eliminate redundant or useless
4087 calls to "const" functions. */
4088 if (TREE_CODE (*expr_p) == CALL_EXPR)
4090 int flags = call_expr_flags (*expr_p);
4091 if (flags & (ECF_CONST | ECF_PURE)
4092 /* An infinite loop is considered a side effect. */
4093 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
4094 TREE_SIDE_EFFECTS (*expr_p) = 0;
4097 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4098 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4099 form and delegate the creation of a GIMPLE_CALL to
4100 gimplify_modify_expr. This is always possible because when
4101 WANT_VALUE is true, the caller wants the result of this call into
4102 a temporary, which means that we will emit an INIT_EXPR in
4103 internal_get_tmp_var which will then be handled by
4104 gimplify_modify_expr. */
4105 if (!want_value)
4107 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4108 have to do is replicate it as a GIMPLE_CALL tuple. */
4109 gimple_stmt_iterator gsi;
4110 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
4111 notice_special_calls (call);
4112 gimplify_seq_add_stmt (pre_p, call);
4113 gsi = gsi_last (*pre_p);
4114 maybe_fold_stmt (&gsi);
4115 *expr_p = NULL_TREE;
4117 else
4118 /* Remember the original function type. */
4119 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
4120 CALL_EXPR_FN (*expr_p));
4122 return ret;
4125 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4126 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4128 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4129 condition is true or false, respectively. If null, we should generate
4130 our own to skip over the evaluation of this specific expression.
4132 LOCUS is the source location of the COND_EXPR.
4134 This function is the tree equivalent of do_jump.
4136 shortcut_cond_r should only be called by shortcut_cond_expr. */
4138 static tree
4139 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
4140 location_t locus)
4142 tree local_label = NULL_TREE;
4143 tree t, expr = NULL;
4145 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4146 retain the shortcut semantics. Just insert the gotos here;
4147 shortcut_cond_expr will append the real blocks later. */
4148 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4150 location_t new_locus;
4152 /* Turn if (a && b) into
4154 if (a); else goto no;
4155 if (b) goto yes; else goto no;
4156 (no:) */
4158 if (false_label_p == NULL)
4159 false_label_p = &local_label;
4161 /* Keep the original source location on the first 'if'. */
4162 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
4163 append_to_statement_list (t, &expr);
4165 /* Set the source location of the && on the second 'if'. */
4166 new_locus = rexpr_location (pred, locus);
4167 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4168 new_locus);
4169 append_to_statement_list (t, &expr);
4171 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4173 location_t new_locus;
4175 /* Turn if (a || b) into
4177 if (a) goto yes;
4178 if (b) goto yes; else goto no;
4179 (yes:) */
4181 if (true_label_p == NULL)
4182 true_label_p = &local_label;
4184 /* Keep the original source location on the first 'if'. */
4185 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
4186 append_to_statement_list (t, &expr);
4188 /* Set the source location of the || on the second 'if'. */
4189 new_locus = rexpr_location (pred, locus);
4190 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4191 new_locus);
4192 append_to_statement_list (t, &expr);
4194 else if (TREE_CODE (pred) == COND_EXPR
4195 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
4196 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
4198 location_t new_locus;
4200 /* As long as we're messing with gotos, turn if (a ? b : c) into
4201 if (a)
4202 if (b) goto yes; else goto no;
4203 else
4204 if (c) goto yes; else goto no;
4206 Don't do this if one of the arms has void type, which can happen
4207 in C++ when the arm is throw. */
4209 /* Keep the original source location on the first 'if'. Set the source
4210 location of the ? on the second 'if'. */
4211 new_locus = rexpr_location (pred, locus);
4212 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
4213 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
4214 false_label_p, locus),
4215 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
4216 false_label_p, new_locus));
4218 else
4220 expr = build3 (COND_EXPR, void_type_node, pred,
4221 build_and_jump (true_label_p),
4222 build_and_jump (false_label_p));
4223 SET_EXPR_LOCATION (expr, locus);
4226 if (local_label)
4228 t = build1 (LABEL_EXPR, void_type_node, local_label);
4229 append_to_statement_list (t, &expr);
4232 return expr;
4235 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4236 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4237 statement, if it is the last one. Otherwise, return NULL. */
4239 static tree
4240 find_goto (tree expr)
4242 if (!expr)
4243 return NULL_TREE;
4245 if (TREE_CODE (expr) == GOTO_EXPR)
4246 return expr;
4248 if (TREE_CODE (expr) != STATEMENT_LIST)
4249 return NULL_TREE;
4251 tree_stmt_iterator i = tsi_start (expr);
4253 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4254 tsi_next (&i);
4256 if (!tsi_one_before_end_p (i))
4257 return NULL_TREE;
4259 return find_goto (tsi_stmt (i));
4262 /* Same as find_goto, except that it returns NULL if the destination
4263 is not a LABEL_DECL. */
4265 static inline tree
4266 find_goto_label (tree expr)
4268 tree dest = find_goto (expr);
4269 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4270 return dest;
4271 return NULL_TREE;
4274 /* Given a conditional expression EXPR with short-circuit boolean
4275 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4276 predicate apart into the equivalent sequence of conditionals. */
4278 static tree
4279 shortcut_cond_expr (tree expr)
4281 tree pred = TREE_OPERAND (expr, 0);
4282 tree then_ = TREE_OPERAND (expr, 1);
4283 tree else_ = TREE_OPERAND (expr, 2);
4284 tree true_label, false_label, end_label, t;
4285 tree *true_label_p;
4286 tree *false_label_p;
4287 bool emit_end, emit_false, jump_over_else;
4288 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4289 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4291 /* First do simple transformations. */
4292 if (!else_se)
4294 /* If there is no 'else', turn
4295 if (a && b) then c
4296 into
4297 if (a) if (b) then c. */
4298 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4300 /* Keep the original source location on the first 'if'. */
4301 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4302 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4303 /* Set the source location of the && on the second 'if'. */
4304 if (rexpr_has_location (pred))
4305 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4306 then_ = shortcut_cond_expr (expr);
4307 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4308 pred = TREE_OPERAND (pred, 0);
4309 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4310 SET_EXPR_LOCATION (expr, locus);
4314 if (!then_se)
4316 /* If there is no 'then', turn
4317 if (a || b); else d
4318 into
4319 if (a); else if (b); else d. */
4320 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4322 /* Keep the original source location on the first 'if'. */
4323 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4324 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4325 /* Set the source location of the || on the second 'if'. */
4326 if (rexpr_has_location (pred))
4327 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4328 else_ = shortcut_cond_expr (expr);
4329 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4330 pred = TREE_OPERAND (pred, 0);
4331 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4332 SET_EXPR_LOCATION (expr, locus);
4336 /* If we're done, great. */
4337 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4338 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4339 return expr;
4341 /* Otherwise we need to mess with gotos. Change
4342 if (a) c; else d;
4344 if (a); else goto no;
4345 c; goto end;
4346 no: d; end:
4347 and recursively gimplify the condition. */
4349 true_label = false_label = end_label = NULL_TREE;
4351 /* If our arms just jump somewhere, hijack those labels so we don't
4352 generate jumps to jumps. */
4354 if (tree then_goto = find_goto_label (then_))
4356 true_label = GOTO_DESTINATION (then_goto);
4357 then_ = NULL;
4358 then_se = false;
4361 if (tree else_goto = find_goto_label (else_))
4363 false_label = GOTO_DESTINATION (else_goto);
4364 else_ = NULL;
4365 else_se = false;
4368 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4369 if (true_label)
4370 true_label_p = &true_label;
4371 else
4372 true_label_p = NULL;
4374 /* The 'else' branch also needs a label if it contains interesting code. */
4375 if (false_label || else_se)
4376 false_label_p = &false_label;
4377 else
4378 false_label_p = NULL;
4380 /* If there was nothing else in our arms, just forward the label(s). */
4381 if (!then_se && !else_se)
4382 return shortcut_cond_r (pred, true_label_p, false_label_p,
4383 EXPR_LOC_OR_LOC (expr, input_location));
4385 /* If our last subexpression already has a terminal label, reuse it. */
4386 if (else_se)
4387 t = expr_last (else_);
4388 else if (then_se)
4389 t = expr_last (then_);
4390 else
4391 t = NULL;
4392 if (t && TREE_CODE (t) == LABEL_EXPR)
4393 end_label = LABEL_EXPR_LABEL (t);
4395 /* If we don't care about jumping to the 'else' branch, jump to the end
4396 if the condition is false. */
4397 if (!false_label_p)
4398 false_label_p = &end_label;
4400 /* We only want to emit these labels if we aren't hijacking them. */
4401 emit_end = (end_label == NULL_TREE);
4402 emit_false = (false_label == NULL_TREE);
4404 /* We only emit the jump over the else clause if we have to--if the
4405 then clause may fall through. Otherwise we can wind up with a
4406 useless jump and a useless label at the end of gimplified code,
4407 which will cause us to think that this conditional as a whole
4408 falls through even if it doesn't. If we then inline a function
4409 which ends with such a condition, that can cause us to issue an
4410 inappropriate warning about control reaching the end of a
4411 non-void function. */
4412 jump_over_else = block_may_fallthru (then_);
4414 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4415 EXPR_LOC_OR_LOC (expr, input_location));
4417 expr = NULL;
4418 append_to_statement_list (pred, &expr);
4420 append_to_statement_list (then_, &expr);
4421 if (else_se)
4423 if (jump_over_else)
4425 tree last = expr_last (expr);
4426 t = build_and_jump (&end_label);
4427 if (rexpr_has_location (last))
4428 SET_EXPR_LOCATION (t, rexpr_location (last));
4429 append_to_statement_list (t, &expr);
4431 if (emit_false)
4433 t = build1 (LABEL_EXPR, void_type_node, false_label);
4434 append_to_statement_list (t, &expr);
4436 append_to_statement_list (else_, &expr);
4438 if (emit_end && end_label)
4440 t = build1 (LABEL_EXPR, void_type_node, end_label);
4441 append_to_statement_list (t, &expr);
4444 return expr;
4447 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4449 tree
4450 gimple_boolify (tree expr)
4452 tree type = TREE_TYPE (expr);
4453 location_t loc = EXPR_LOCATION (expr);
4455 if (TREE_CODE (expr) == NE_EXPR
4456 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4457 && integer_zerop (TREE_OPERAND (expr, 1)))
4459 tree call = TREE_OPERAND (expr, 0);
4460 tree fn = get_callee_fndecl (call);
4462 /* For __builtin_expect ((long) (x), y) recurse into x as well
4463 if x is truth_value_p. */
4464 if (fn
4465 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4466 && call_expr_nargs (call) == 2)
4468 tree arg = CALL_EXPR_ARG (call, 0);
4469 if (arg)
4471 if (TREE_CODE (arg) == NOP_EXPR
4472 && TREE_TYPE (arg) == TREE_TYPE (call))
4473 arg = TREE_OPERAND (arg, 0);
4474 if (truth_value_p (TREE_CODE (arg)))
4476 arg = gimple_boolify (arg);
4477 CALL_EXPR_ARG (call, 0)
4478 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4484 switch (TREE_CODE (expr))
4486 case TRUTH_AND_EXPR:
4487 case TRUTH_OR_EXPR:
4488 case TRUTH_XOR_EXPR:
4489 case TRUTH_ANDIF_EXPR:
4490 case TRUTH_ORIF_EXPR:
4491 /* Also boolify the arguments of truth exprs. */
4492 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4493 /* FALLTHRU */
4495 case TRUTH_NOT_EXPR:
4496 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4498 /* These expressions always produce boolean results. */
4499 if (TREE_CODE (type) != BOOLEAN_TYPE)
4500 TREE_TYPE (expr) = boolean_type_node;
4501 return expr;
4503 case ANNOTATE_EXPR:
4504 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4506 case annot_expr_ivdep_kind:
4507 case annot_expr_unroll_kind:
4508 case annot_expr_no_vector_kind:
4509 case annot_expr_vector_kind:
4510 case annot_expr_parallel_kind:
4511 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4512 if (TREE_CODE (type) != BOOLEAN_TYPE)
4513 TREE_TYPE (expr) = boolean_type_node;
4514 return expr;
4515 default:
4516 gcc_unreachable ();
4519 default:
4520 if (COMPARISON_CLASS_P (expr))
4522 /* These expressions always produce boolean results. */
4523 if (TREE_CODE (type) != BOOLEAN_TYPE)
4524 TREE_TYPE (expr) = boolean_type_node;
4525 return expr;
4527 /* Other expressions that get here must have boolean values, but
4528 might need to be converted to the appropriate mode. */
4529 if (TREE_CODE (type) == BOOLEAN_TYPE)
4530 return expr;
4531 return fold_convert_loc (loc, boolean_type_node, expr);
4535 /* Given a conditional expression *EXPR_P without side effects, gimplify
4536 its operands. New statements are inserted to PRE_P. */
4538 static enum gimplify_status
4539 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4541 tree expr = *expr_p, cond;
4542 enum gimplify_status ret, tret;
4543 enum tree_code code;
4545 cond = gimple_boolify (COND_EXPR_COND (expr));
4547 /* We need to handle && and || specially, as their gimplification
4548 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4549 code = TREE_CODE (cond);
4550 if (code == TRUTH_ANDIF_EXPR)
4551 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4552 else if (code == TRUTH_ORIF_EXPR)
4553 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4554 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4555 COND_EXPR_COND (*expr_p) = cond;
4557 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4558 is_gimple_val, fb_rvalue);
4559 ret = MIN (ret, tret);
4560 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4561 is_gimple_val, fb_rvalue);
4563 return MIN (ret, tret);
4566 /* Return true if evaluating EXPR could trap.
4567 EXPR is GENERIC, while tree_could_trap_p can be called
4568 only on GIMPLE. */
4570 bool
4571 generic_expr_could_trap_p (tree expr)
4573 unsigned i, n;
4575 if (!expr || is_gimple_val (expr))
4576 return false;
4578 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4579 return true;
4581 n = TREE_OPERAND_LENGTH (expr);
4582 for (i = 0; i < n; i++)
4583 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4584 return true;
4586 return false;
4589 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4590 into
4592 if (p) if (p)
4593 t1 = a; a;
4594 else or else
4595 t1 = b; b;
4598 The second form is used when *EXPR_P is of type void.
4600 PRE_P points to the list where side effects that must happen before
4601 *EXPR_P should be stored. */
4603 static enum gimplify_status
4604 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4606 tree expr = *expr_p;
4607 tree type = TREE_TYPE (expr);
4608 location_t loc = EXPR_LOCATION (expr);
4609 tree tmp, arm1, arm2;
4610 enum gimplify_status ret;
4611 tree label_true, label_false, label_cont;
4612 bool have_then_clause_p, have_else_clause_p;
4613 gcond *cond_stmt;
4614 enum tree_code pred_code;
4615 gimple_seq seq = NULL;
4617 /* If this COND_EXPR has a value, copy the values into a temporary within
4618 the arms. */
4619 if (!VOID_TYPE_P (type))
4621 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4622 tree result;
4624 /* If either an rvalue is ok or we do not require an lvalue, create the
4625 temporary. But we cannot do that if the type is addressable. */
4626 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4627 && !TREE_ADDRESSABLE (type))
4629 if (gimplify_ctxp->allow_rhs_cond_expr
4630 /* If either branch has side effects or could trap, it can't be
4631 evaluated unconditionally. */
4632 && !TREE_SIDE_EFFECTS (then_)
4633 && !generic_expr_could_trap_p (then_)
4634 && !TREE_SIDE_EFFECTS (else_)
4635 && !generic_expr_could_trap_p (else_))
4636 return gimplify_pure_cond_expr (expr_p, pre_p);
4638 tmp = create_tmp_var (type, "iftmp");
4639 result = tmp;
4642 /* Otherwise, only create and copy references to the values. */
4643 else
4645 type = build_pointer_type (type);
4647 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4648 then_ = build_fold_addr_expr_loc (loc, then_);
4650 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4651 else_ = build_fold_addr_expr_loc (loc, else_);
4653 expr
4654 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4656 tmp = create_tmp_var (type, "iftmp");
4657 result = build_simple_mem_ref_loc (loc, tmp);
4660 /* Build the new then clause, `tmp = then_;'. But don't build the
4661 assignment if the value is void; in C++ it can be if it's a throw. */
4662 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4663 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4665 /* Similarly, build the new else clause, `tmp = else_;'. */
4666 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4667 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4669 TREE_TYPE (expr) = void_type_node;
4670 recalculate_side_effects (expr);
4672 /* Move the COND_EXPR to the prequeue. */
4673 gimplify_stmt (&expr, pre_p);
4675 *expr_p = result;
4676 return GS_ALL_DONE;
4679 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4680 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4681 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4682 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4684 /* Make sure the condition has BOOLEAN_TYPE. */
4685 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4687 /* Break apart && and || conditions. */
4688 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4689 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4691 expr = shortcut_cond_expr (expr);
4693 if (expr != *expr_p)
4695 *expr_p = expr;
4697 /* We can't rely on gimplify_expr to re-gimplify the expanded
4698 form properly, as cleanups might cause the target labels to be
4699 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4700 set up a conditional context. */
4701 gimple_push_condition ();
4702 gimplify_stmt (expr_p, &seq);
4703 gimple_pop_condition (pre_p);
4704 gimple_seq_add_seq (pre_p, seq);
4706 return GS_ALL_DONE;
4710 /* Now do the normal gimplification. */
4712 /* Gimplify condition. */
4713 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4714 is_gimple_condexpr_for_cond, fb_rvalue);
4715 if (ret == GS_ERROR)
4716 return GS_ERROR;
4717 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4719 gimple_push_condition ();
4721 have_then_clause_p = have_else_clause_p = false;
4722 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4723 if (label_true
4724 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4725 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4726 have different locations, otherwise we end up with incorrect
4727 location information on the branches. */
4728 && (optimize
4729 || !EXPR_HAS_LOCATION (expr)
4730 || !rexpr_has_location (label_true)
4731 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4733 have_then_clause_p = true;
4734 label_true = GOTO_DESTINATION (label_true);
4736 else
4737 label_true = create_artificial_label (UNKNOWN_LOCATION);
4738 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4739 if (label_false
4740 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4741 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4742 have different locations, otherwise we end up with incorrect
4743 location information on the branches. */
4744 && (optimize
4745 || !EXPR_HAS_LOCATION (expr)
4746 || !rexpr_has_location (label_false)
4747 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4749 have_else_clause_p = true;
4750 label_false = GOTO_DESTINATION (label_false);
4752 else
4753 label_false = create_artificial_label (UNKNOWN_LOCATION);
4755 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4756 &arm2);
4757 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4758 label_false);
4759 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4760 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4761 gimplify_seq_add_stmt (&seq, cond_stmt);
4762 gimple_stmt_iterator gsi = gsi_last (seq);
4763 maybe_fold_stmt (&gsi);
4765 label_cont = NULL_TREE;
4766 if (!have_then_clause_p)
4768 /* For if (...) {} else { code; } put label_true after
4769 the else block. */
4770 if (TREE_OPERAND (expr, 1) == NULL_TREE
4771 && !have_else_clause_p
4772 && TREE_OPERAND (expr, 2) != NULL_TREE)
4774 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4775 handling that label_cont == label_true can be only reached
4776 through fallthrough from { code; }. */
4777 if (integer_zerop (COND_EXPR_COND (expr)))
4778 UNUSED_LABEL_P (label_true) = 1;
4779 label_cont = label_true;
4781 else
4783 bool then_side_effects
4784 = (TREE_OPERAND (expr, 1)
4785 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4786 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4787 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4788 /* For if (...) { code; } else {} or
4789 if (...) { code; } else goto label; or
4790 if (...) { code; return; } else { ... }
4791 label_cont isn't needed. */
4792 if (!have_else_clause_p
4793 && TREE_OPERAND (expr, 2) != NULL_TREE
4794 && gimple_seq_may_fallthru (seq))
4796 gimple *g;
4797 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4799 /* For if (0) { non-side-effect-code } else { code }
4800 tell -Wimplicit-fallthrough handling that label_cont can
4801 be only reached through fallthrough from { code }. */
4802 if (integer_zerop (COND_EXPR_COND (expr)))
4804 UNUSED_LABEL_P (label_true) = 1;
4805 if (!then_side_effects)
4806 UNUSED_LABEL_P (label_cont) = 1;
4809 g = gimple_build_goto (label_cont);
4811 /* GIMPLE_COND's are very low level; they have embedded
4812 gotos. This particular embedded goto should not be marked
4813 with the location of the original COND_EXPR, as it would
4814 correspond to the COND_EXPR's condition, not the ELSE or the
4815 THEN arms. To avoid marking it with the wrong location, flag
4816 it as "no location". */
4817 gimple_set_do_not_emit_location (g);
4819 gimplify_seq_add_stmt (&seq, g);
4823 if (!have_else_clause_p)
4825 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4826 tell -Wimplicit-fallthrough handling that label_false can be only
4827 reached through fallthrough from { code }. */
4828 if (integer_nonzerop (COND_EXPR_COND (expr))
4829 && (TREE_OPERAND (expr, 2) == NULL_TREE
4830 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4831 UNUSED_LABEL_P (label_false) = 1;
4832 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4833 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4835 if (label_cont)
4836 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4838 gimple_pop_condition (pre_p);
4839 gimple_seq_add_seq (pre_p, seq);
4841 if (ret == GS_ERROR)
4842 ; /* Do nothing. */
4843 else if (have_then_clause_p || have_else_clause_p)
4844 ret = GS_ALL_DONE;
4845 else
4847 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4848 expr = TREE_OPERAND (expr, 0);
4849 gimplify_stmt (&expr, pre_p);
4852 *expr_p = NULL;
4853 return ret;
4856 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4857 to be marked addressable.
4859 We cannot rely on such an expression being directly markable if a temporary
4860 has been created by the gimplification. In this case, we create another
4861 temporary and initialize it with a copy, which will become a store after we
4862 mark it addressable. This can happen if the front-end passed us something
4863 that it could not mark addressable yet, like a Fortran pass-by-reference
4864 parameter (int) floatvar. */
4866 static void
4867 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4869 while (handled_component_p (*expr_p))
4870 expr_p = &TREE_OPERAND (*expr_p, 0);
4872 /* Do not allow an SSA name as the temporary. */
4873 if (is_gimple_reg (*expr_p))
4874 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4877 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4878 a call to __builtin_memcpy. */
4880 static enum gimplify_status
4881 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4882 gimple_seq *seq_p)
4884 tree t, to, to_ptr, from, from_ptr;
4885 gcall *gs;
4886 location_t loc = EXPR_LOCATION (*expr_p);
4888 to = TREE_OPERAND (*expr_p, 0);
4889 from = TREE_OPERAND (*expr_p, 1);
4890 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to)))
4891 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))));
4893 /* Mark the RHS addressable. Beware that it may not be possible to do so
4894 directly if a temporary has been created by the gimplification. */
4895 prepare_gimple_addressable (&from, seq_p);
4897 mark_addressable (from);
4898 from_ptr = build_fold_addr_expr_loc (loc, from);
4899 gimplify_arg (&from_ptr, seq_p, loc);
4901 mark_addressable (to);
4902 to_ptr = build_fold_addr_expr_loc (loc, to);
4903 gimplify_arg (&to_ptr, seq_p, loc);
4905 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4907 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4908 gimple_call_set_alloca_for_var (gs, true);
4910 if (want_value)
4912 /* tmp = memcpy() */
4913 t = create_tmp_var (TREE_TYPE (to_ptr));
4914 gimple_call_set_lhs (gs, t);
4915 gimplify_seq_add_stmt (seq_p, gs);
4917 *expr_p = build_simple_mem_ref (t);
4918 return GS_ALL_DONE;
4921 gimplify_seq_add_stmt (seq_p, gs);
4922 *expr_p = NULL;
4923 return GS_ALL_DONE;
4926 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4927 a call to __builtin_memset. In this case we know that the RHS is
4928 a CONSTRUCTOR with an empty element list. */
4930 static enum gimplify_status
4931 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4932 gimple_seq *seq_p)
4934 tree t, from, to, to_ptr;
4935 gcall *gs;
4936 location_t loc = EXPR_LOCATION (*expr_p);
4938 /* Assert our assumptions, to abort instead of producing wrong code
4939 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4940 not be immediately exposed. */
4941 from = TREE_OPERAND (*expr_p, 1);
4942 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4943 from = TREE_OPERAND (from, 0);
4945 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4946 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4948 /* Now proceed. */
4949 to = TREE_OPERAND (*expr_p, 0);
4950 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to))));
4952 to_ptr = build_fold_addr_expr_loc (loc, to);
4953 gimplify_arg (&to_ptr, seq_p, loc);
4954 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4956 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4958 if (want_value)
4960 /* tmp = memset() */
4961 t = create_tmp_var (TREE_TYPE (to_ptr));
4962 gimple_call_set_lhs (gs, t);
4963 gimplify_seq_add_stmt (seq_p, gs);
4965 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4966 return GS_ALL_DONE;
4969 gimplify_seq_add_stmt (seq_p, gs);
4970 *expr_p = NULL;
4971 return GS_ALL_DONE;
4974 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4975 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4976 assignment. Return non-null if we detect a potential overlap. */
4978 struct gimplify_init_ctor_preeval_data
4980 /* The base decl of the lhs object. May be NULL, in which case we
4981 have to assume the lhs is indirect. */
4982 tree lhs_base_decl;
4984 /* The alias set of the lhs object. */
4985 alias_set_type lhs_alias_set;
4988 static tree
4989 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4991 struct gimplify_init_ctor_preeval_data *data
4992 = (struct gimplify_init_ctor_preeval_data *) xdata;
4993 tree t = *tp;
4995 /* If we find the base object, obviously we have overlap. */
4996 if (data->lhs_base_decl == t)
4997 return t;
4999 /* If the constructor component is indirect, determine if we have a
5000 potential overlap with the lhs. The only bits of information we
5001 have to go on at this point are addressability and alias sets. */
5002 if ((INDIRECT_REF_P (t)
5003 || TREE_CODE (t) == MEM_REF)
5004 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5005 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
5006 return t;
5008 /* If the constructor component is a call, determine if it can hide a
5009 potential overlap with the lhs through an INDIRECT_REF like above.
5010 ??? Ugh - this is completely broken. In fact this whole analysis
5011 doesn't look conservative. */
5012 if (TREE_CODE (t) == CALL_EXPR)
5014 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
5016 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
5017 if (POINTER_TYPE_P (TREE_VALUE (type))
5018 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5019 && alias_sets_conflict_p (data->lhs_alias_set,
5020 get_alias_set
5021 (TREE_TYPE (TREE_VALUE (type)))))
5022 return t;
5025 if (IS_TYPE_OR_DECL_P (t))
5026 *walk_subtrees = 0;
5027 return NULL;
5030 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5031 force values that overlap with the lhs (as described by *DATA)
5032 into temporaries. */
5034 static void
5035 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5036 struct gimplify_init_ctor_preeval_data *data)
5038 enum gimplify_status one;
5040 /* If the value is constant, then there's nothing to pre-evaluate. */
5041 if (TREE_CONSTANT (*expr_p))
5043 /* Ensure it does not have side effects, it might contain a reference to
5044 the object we're initializing. */
5045 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
5046 return;
5049 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5050 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
5051 return;
5053 /* Recurse for nested constructors. */
5054 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
5056 unsigned HOST_WIDE_INT ix;
5057 constructor_elt *ce;
5058 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
5060 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
5061 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
5063 return;
5066 /* If this is a variable sized type, we must remember the size. */
5067 maybe_with_size_expr (expr_p);
5069 /* Gimplify the constructor element to something appropriate for the rhs
5070 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5071 the gimplifier will consider this a store to memory. Doing this
5072 gimplification now means that we won't have to deal with complicated
5073 language-specific trees, nor trees like SAVE_EXPR that can induce
5074 exponential search behavior. */
5075 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
5076 if (one == GS_ERROR)
5078 *expr_p = NULL;
5079 return;
5082 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5083 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5084 always be true for all scalars, since is_gimple_mem_rhs insists on a
5085 temporary variable for them. */
5086 if (DECL_P (*expr_p))
5087 return;
5089 /* If this is of variable size, we have no choice but to assume it doesn't
5090 overlap since we can't make a temporary for it. */
5091 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
5092 return;
5094 /* Otherwise, we must search for overlap ... */
5095 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
5096 return;
5098 /* ... and if found, force the value into a temporary. */
5099 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
5102 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
5103 a RANGE_EXPR in a CONSTRUCTOR for an array.
5105 var = lower;
5106 loop_entry:
5107 object[var] = value;
5108 if (var == upper)
5109 goto loop_exit;
5110 var = var + 1;
5111 goto loop_entry;
5112 loop_exit:
5114 We increment var _after_ the loop exit check because we might otherwise
5115 fail if upper == TYPE_MAX_VALUE (type for upper).
5117 Note that we never have to deal with SAVE_EXPRs here, because this has
5118 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5120 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
5121 gimple_seq *, bool);
5123 static void
5124 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
5125 tree value, tree array_elt_type,
5126 gimple_seq *pre_p, bool cleared)
5128 tree loop_entry_label, loop_exit_label, fall_thru_label;
5129 tree var, var_type, cref, tmp;
5131 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
5132 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
5133 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
5135 /* Create and initialize the index variable. */
5136 var_type = TREE_TYPE (upper);
5137 var = create_tmp_var (var_type);
5138 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
5140 /* Add the loop entry label. */
5141 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
5143 /* Build the reference. */
5144 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5145 var, NULL_TREE, NULL_TREE);
5147 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5148 the store. Otherwise just assign value to the reference. */
5150 if (TREE_CODE (value) == CONSTRUCTOR)
5151 /* NB we might have to call ourself recursively through
5152 gimplify_init_ctor_eval if the value is a constructor. */
5153 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5154 pre_p, cleared);
5155 else
5157 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
5158 != GS_ERROR)
5159 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
5162 /* We exit the loop when the index var is equal to the upper bound. */
5163 gimplify_seq_add_stmt (pre_p,
5164 gimple_build_cond (EQ_EXPR, var, upper,
5165 loop_exit_label, fall_thru_label));
5167 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
5169 /* Otherwise, increment the index var... */
5170 tmp = build2 (PLUS_EXPR, var_type, var,
5171 fold_convert (var_type, integer_one_node));
5172 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
5174 /* ...and jump back to the loop entry. */
5175 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
5177 /* Add the loop exit label. */
5178 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
5181 /* A subroutine of gimplify_init_constructor. Generate individual
5182 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5183 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5184 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5185 zeroed first. */
5187 static void
5188 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
5189 gimple_seq *pre_p, bool cleared)
5191 tree array_elt_type = NULL;
5192 unsigned HOST_WIDE_INT ix;
5193 tree purpose, value;
5195 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
5196 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
5198 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
5200 tree cref;
5202 /* NULL values are created above for gimplification errors. */
5203 if (value == NULL)
5204 continue;
5206 if (cleared && initializer_zerop (value))
5207 continue;
5209 /* ??? Here's to hoping the front end fills in all of the indices,
5210 so we don't have to figure out what's missing ourselves. */
5211 gcc_assert (purpose);
5213 /* Skip zero-sized fields, unless value has side-effects. This can
5214 happen with calls to functions returning a empty type, which
5215 we shouldn't discard. As a number of downstream passes don't
5216 expect sets of empty type fields, we rely on the gimplification of
5217 the MODIFY_EXPR we make below to drop the assignment statement. */
5218 if (!TREE_SIDE_EFFECTS (value)
5219 && TREE_CODE (purpose) == FIELD_DECL
5220 && is_empty_type (TREE_TYPE (purpose)))
5221 continue;
5223 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5224 whole range. */
5225 if (TREE_CODE (purpose) == RANGE_EXPR)
5227 tree lower = TREE_OPERAND (purpose, 0);
5228 tree upper = TREE_OPERAND (purpose, 1);
5230 /* If the lower bound is equal to upper, just treat it as if
5231 upper was the index. */
5232 if (simple_cst_equal (lower, upper))
5233 purpose = upper;
5234 else
5236 gimplify_init_ctor_eval_range (object, lower, upper, value,
5237 array_elt_type, pre_p, cleared);
5238 continue;
5242 if (array_elt_type)
5244 /* Do not use bitsizetype for ARRAY_REF indices. */
5245 if (TYPE_DOMAIN (TREE_TYPE (object)))
5246 purpose
5247 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5248 purpose);
5249 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5250 purpose, NULL_TREE, NULL_TREE);
5252 else
5254 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5255 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5256 unshare_expr (object), purpose, NULL_TREE);
5259 if (TREE_CODE (value) == CONSTRUCTOR
5260 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5261 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5262 pre_p, cleared);
5263 else
5265 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5266 gimplify_and_add (init, pre_p);
5267 ggc_free (init);
5272 /* Return the appropriate RHS predicate for this LHS. */
5274 gimple_predicate
5275 rhs_predicate_for (tree lhs)
5277 if (is_gimple_reg (lhs))
5278 return is_gimple_reg_rhs_or_call;
5279 else
5280 return is_gimple_mem_rhs_or_call;
5283 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5284 before the LHS has been gimplified. */
5286 static gimple_predicate
5287 initial_rhs_predicate_for (tree lhs)
5289 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5290 return is_gimple_reg_rhs_or_call;
5291 else
5292 return is_gimple_mem_rhs_or_call;
5295 /* Gimplify a C99 compound literal expression. This just means adding
5296 the DECL_EXPR before the current statement and using its anonymous
5297 decl instead. */
5299 static enum gimplify_status
5300 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5301 bool (*gimple_test_f) (tree),
5302 fallback_t fallback)
5304 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5305 tree decl = DECL_EXPR_DECL (decl_s);
5306 tree init = DECL_INITIAL (decl);
5307 /* Mark the decl as addressable if the compound literal
5308 expression is addressable now, otherwise it is marked too late
5309 after we gimplify the initialization expression. */
5310 if (TREE_ADDRESSABLE (*expr_p))
5311 TREE_ADDRESSABLE (decl) = 1;
5312 /* Otherwise, if we don't need an lvalue and have a literal directly
5313 substitute it. Check if it matches the gimple predicate, as
5314 otherwise we'd generate a new temporary, and we can as well just
5315 use the decl we already have. */
5316 else if (!TREE_ADDRESSABLE (decl)
5317 && !TREE_THIS_VOLATILE (decl)
5318 && init
5319 && (fallback & fb_lvalue) == 0
5320 && gimple_test_f (init))
5322 *expr_p = init;
5323 return GS_OK;
5326 /* If the decl is not addressable, then it is being used in some
5327 expression or on the right hand side of a statement, and it can
5328 be put into a readonly data section. */
5329 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5330 TREE_READONLY (decl) = 1;
5332 /* This decl isn't mentioned in the enclosing block, so add it to the
5333 list of temps. FIXME it seems a bit of a kludge to say that
5334 anonymous artificial vars aren't pushed, but everything else is. */
5335 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5336 gimple_add_tmp_var (decl);
5338 gimplify_and_add (decl_s, pre_p);
5339 *expr_p = decl;
5340 return GS_OK;
5343 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5344 return a new CONSTRUCTOR if something changed. */
5346 static tree
5347 optimize_compound_literals_in_ctor (tree orig_ctor)
5349 tree ctor = orig_ctor;
5350 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5351 unsigned int idx, num = vec_safe_length (elts);
5353 for (idx = 0; idx < num; idx++)
5355 tree value = (*elts)[idx].value;
5356 tree newval = value;
5357 if (TREE_CODE (value) == CONSTRUCTOR)
5358 newval = optimize_compound_literals_in_ctor (value);
5359 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5361 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5362 tree decl = DECL_EXPR_DECL (decl_s);
5363 tree init = DECL_INITIAL (decl);
5365 if (!TREE_ADDRESSABLE (value)
5366 && !TREE_ADDRESSABLE (decl)
5367 && init
5368 && TREE_CODE (init) == CONSTRUCTOR)
5369 newval = optimize_compound_literals_in_ctor (init);
5371 if (newval == value)
5372 continue;
5374 if (ctor == orig_ctor)
5376 ctor = copy_node (orig_ctor);
5377 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5378 elts = CONSTRUCTOR_ELTS (ctor);
5380 (*elts)[idx].value = newval;
5382 return ctor;
5385 /* A subroutine of gimplify_modify_expr. Break out elements of a
5386 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5388 Note that we still need to clear any elements that don't have explicit
5389 initializers, so if not all elements are initialized we keep the
5390 original MODIFY_EXPR, we just remove all of the constructor elements.
5392 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5393 GS_ERROR if we would have to create a temporary when gimplifying
5394 this constructor. Otherwise, return GS_OK.
5396 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5398 static enum gimplify_status
5399 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5400 bool want_value, bool notify_temp_creation)
5402 tree object, ctor, type;
5403 enum gimplify_status ret;
5404 vec<constructor_elt, va_gc> *elts;
5405 bool cleared = false;
5406 bool is_empty_ctor = false;
5407 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5409 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5411 if (!notify_temp_creation)
5413 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5414 is_gimple_lvalue, fb_lvalue);
5415 if (ret == GS_ERROR)
5416 return ret;
5419 object = TREE_OPERAND (*expr_p, 0);
5420 ctor = TREE_OPERAND (*expr_p, 1)
5421 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5422 type = TREE_TYPE (ctor);
5423 elts = CONSTRUCTOR_ELTS (ctor);
5424 ret = GS_ALL_DONE;
5426 switch (TREE_CODE (type))
5428 case RECORD_TYPE:
5429 case UNION_TYPE:
5430 case QUAL_UNION_TYPE:
5431 case ARRAY_TYPE:
5433 /* Use readonly data for initializers of this or smaller size
5434 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5435 ratio. */
5436 const HOST_WIDE_INT min_unique_size = 64;
5437 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5438 is smaller than this, use readonly data. */
5439 const int unique_nonzero_ratio = 8;
5440 /* True if a single access of the object must be ensured. This is the
5441 case if the target is volatile, the type is non-addressable and more
5442 than one field need to be assigned. */
5443 const bool ensure_single_access
5444 = TREE_THIS_VOLATILE (object)
5445 && !TREE_ADDRESSABLE (type)
5446 && vec_safe_length (elts) > 1;
5447 struct gimplify_init_ctor_preeval_data preeval_data;
5448 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5449 HOST_WIDE_INT num_unique_nonzero_elements;
5450 bool complete_p, valid_const_initializer;
5452 /* Aggregate types must lower constructors to initialization of
5453 individual elements. The exception is that a CONSTRUCTOR node
5454 with no elements indicates zero-initialization of the whole. */
5455 if (vec_safe_is_empty (elts))
5457 if (notify_temp_creation)
5458 return GS_OK;
5460 /* The var will be initialized and so appear on lhs of
5461 assignment, it can't be TREE_READONLY anymore. */
5462 if (VAR_P (object))
5463 TREE_READONLY (object) = 0;
5465 is_empty_ctor = true;
5466 break;
5469 /* Fetch information about the constructor to direct later processing.
5470 We might want to make static versions of it in various cases, and
5471 can only do so if it known to be a valid constant initializer. */
5472 valid_const_initializer
5473 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5474 &num_unique_nonzero_elements,
5475 &num_ctor_elements, &complete_p);
5477 /* If a const aggregate variable is being initialized, then it
5478 should never be a lose to promote the variable to be static. */
5479 if (valid_const_initializer
5480 && num_nonzero_elements > 1
5481 && TREE_READONLY (object)
5482 && VAR_P (object)
5483 && !DECL_REGISTER (object)
5484 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5485 || DECL_MERGEABLE (object))
5486 /* For ctors that have many repeated nonzero elements
5487 represented through RANGE_EXPRs, prefer initializing
5488 those through runtime loops over copies of large amounts
5489 of data from readonly data section. */
5490 && (num_unique_nonzero_elements
5491 > num_nonzero_elements / unique_nonzero_ratio
5492 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5493 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5495 if (notify_temp_creation)
5496 return GS_ERROR;
5498 DECL_INITIAL (object) = ctor;
5499 TREE_STATIC (object) = 1;
5500 if (!DECL_NAME (object))
5501 DECL_NAME (object) = create_tmp_var_name ("C");
5502 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5504 /* ??? C++ doesn't automatically append a .<number> to the
5505 assembler name, and even when it does, it looks at FE private
5506 data structures to figure out what that number should be,
5507 which are not set for this variable. I suppose this is
5508 important for local statics for inline functions, which aren't
5509 "local" in the object file sense. So in order to get a unique
5510 TU-local symbol, we must invoke the lhd version now. */
5511 lhd_set_decl_assembler_name (object);
5513 *expr_p = NULL_TREE;
5514 break;
5517 /* The var will be initialized and so appear on lhs of
5518 assignment, it can't be TREE_READONLY anymore. */
5519 if (VAR_P (object) && !notify_temp_creation)
5520 TREE_READONLY (object) = 0;
5522 /* If there are "lots" of initialized elements, even discounting
5523 those that are not address constants (and thus *must* be
5524 computed at runtime), then partition the constructor into
5525 constant and non-constant parts. Block copy the constant
5526 parts in, then generate code for the non-constant parts. */
5527 /* TODO. There's code in cp/typeck.cc to do this. */
5529 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5530 /* store_constructor will ignore the clearing of variable-sized
5531 objects. Initializers for such objects must explicitly set
5532 every field that needs to be set. */
5533 cleared = false;
5534 else if (!complete_p)
5535 /* If the constructor isn't complete, clear the whole object
5536 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5538 ??? This ought not to be needed. For any element not present
5539 in the initializer, we should simply set them to zero. Except
5540 we'd need to *find* the elements that are not present, and that
5541 requires trickery to avoid quadratic compile-time behavior in
5542 large cases or excessive memory use in small cases. */
5543 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5544 else if (num_ctor_elements - num_nonzero_elements
5545 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5546 && num_nonzero_elements < num_ctor_elements / 4)
5547 /* If there are "lots" of zeros, it's more efficient to clear
5548 the memory and then set the nonzero elements. */
5549 cleared = true;
5550 else if (ensure_single_access && num_nonzero_elements == 0)
5551 /* If a single access to the target must be ensured and all elements
5552 are zero, then it's optimal to clear whatever their number. */
5553 cleared = true;
5554 else
5555 cleared = false;
5557 /* If there are "lots" of initialized elements, and all of them
5558 are valid address constants, then the entire initializer can
5559 be dropped to memory, and then memcpy'd out. Don't do this
5560 for sparse arrays, though, as it's more efficient to follow
5561 the standard CONSTRUCTOR behavior of memset followed by
5562 individual element initialization. Also don't do this for small
5563 all-zero initializers (which aren't big enough to merit
5564 clearing), and don't try to make bitwise copies of
5565 TREE_ADDRESSABLE types. */
5566 if (valid_const_initializer
5567 && complete_p
5568 && !(cleared || num_nonzero_elements == 0)
5569 && !TREE_ADDRESSABLE (type))
5571 HOST_WIDE_INT size = int_size_in_bytes (type);
5572 unsigned int align;
5574 /* ??? We can still get unbounded array types, at least
5575 from the C++ front end. This seems wrong, but attempt
5576 to work around it for now. */
5577 if (size < 0)
5579 size = int_size_in_bytes (TREE_TYPE (object));
5580 if (size >= 0)
5581 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5584 /* Find the maximum alignment we can assume for the object. */
5585 /* ??? Make use of DECL_OFFSET_ALIGN. */
5586 if (DECL_P (object))
5587 align = DECL_ALIGN (object);
5588 else
5589 align = TYPE_ALIGN (type);
5591 /* Do a block move either if the size is so small as to make
5592 each individual move a sub-unit move on average, or if it
5593 is so large as to make individual moves inefficient. */
5594 if (size > 0
5595 && num_nonzero_elements > 1
5596 /* For ctors that have many repeated nonzero elements
5597 represented through RANGE_EXPRs, prefer initializing
5598 those through runtime loops over copies of large amounts
5599 of data from readonly data section. */
5600 && (num_unique_nonzero_elements
5601 > num_nonzero_elements / unique_nonzero_ratio
5602 || size <= min_unique_size)
5603 && (size < num_nonzero_elements
5604 || !can_move_by_pieces (size, align)))
5606 if (notify_temp_creation)
5607 return GS_ERROR;
5609 walk_tree (&ctor, force_labels_r, NULL, NULL);
5610 ctor = tree_output_constant_def (ctor);
5611 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5612 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5613 TREE_OPERAND (*expr_p, 1) = ctor;
5615 /* This is no longer an assignment of a CONSTRUCTOR, but
5616 we still may have processing to do on the LHS. So
5617 pretend we didn't do anything here to let that happen. */
5618 return GS_UNHANDLED;
5622 /* If a single access to the target must be ensured and there are
5623 nonzero elements or the zero elements are not assigned en masse,
5624 initialize the target from a temporary. */
5625 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5627 if (notify_temp_creation)
5628 return GS_ERROR;
5630 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5631 TREE_OPERAND (*expr_p, 0) = temp;
5632 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5633 *expr_p,
5634 build2 (MODIFY_EXPR, void_type_node,
5635 object, temp));
5636 return GS_OK;
5639 if (notify_temp_creation)
5640 return GS_OK;
5642 /* If there are nonzero elements and if needed, pre-evaluate to capture
5643 elements overlapping with the lhs into temporaries. We must do this
5644 before clearing to fetch the values before they are zeroed-out. */
5645 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5647 preeval_data.lhs_base_decl = get_base_address (object);
5648 if (!DECL_P (preeval_data.lhs_base_decl))
5649 preeval_data.lhs_base_decl = NULL;
5650 preeval_data.lhs_alias_set = get_alias_set (object);
5652 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5653 pre_p, post_p, &preeval_data);
5656 bool ctor_has_side_effects_p
5657 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5659 if (cleared)
5661 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5662 Note that we still have to gimplify, in order to handle the
5663 case of variable sized types. Avoid shared tree structures. */
5664 CONSTRUCTOR_ELTS (ctor) = NULL;
5665 TREE_SIDE_EFFECTS (ctor) = 0;
5666 object = unshare_expr (object);
5667 gimplify_stmt (expr_p, pre_p);
5670 /* If we have not block cleared the object, or if there are nonzero
5671 elements in the constructor, or if the constructor has side effects,
5672 add assignments to the individual scalar fields of the object. */
5673 if (!cleared
5674 || num_nonzero_elements > 0
5675 || ctor_has_side_effects_p)
5676 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5678 *expr_p = NULL_TREE;
5680 break;
5682 case COMPLEX_TYPE:
5684 tree r, i;
5686 if (notify_temp_creation)
5687 return GS_OK;
5689 /* Extract the real and imaginary parts out of the ctor. */
5690 gcc_assert (elts->length () == 2);
5691 r = (*elts)[0].value;
5692 i = (*elts)[1].value;
5693 if (r == NULL || i == NULL)
5695 tree zero = build_zero_cst (TREE_TYPE (type));
5696 if (r == NULL)
5697 r = zero;
5698 if (i == NULL)
5699 i = zero;
5702 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5703 represent creation of a complex value. */
5704 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5706 ctor = build_complex (type, r, i);
5707 TREE_OPERAND (*expr_p, 1) = ctor;
5709 else
5711 ctor = build2 (COMPLEX_EXPR, type, r, i);
5712 TREE_OPERAND (*expr_p, 1) = ctor;
5713 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5714 pre_p,
5715 post_p,
5716 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5717 fb_rvalue);
5720 break;
5722 case VECTOR_TYPE:
5724 unsigned HOST_WIDE_INT ix;
5725 constructor_elt *ce;
5727 if (notify_temp_creation)
5728 return GS_OK;
5730 /* Vector types use CONSTRUCTOR all the way through gimple
5731 compilation as a general initializer. */
5732 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5734 enum gimplify_status tret;
5735 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5736 fb_rvalue);
5737 if (tret == GS_ERROR)
5738 ret = GS_ERROR;
5739 else if (TREE_STATIC (ctor)
5740 && !initializer_constant_valid_p (ce->value,
5741 TREE_TYPE (ce->value)))
5742 TREE_STATIC (ctor) = 0;
5744 recompute_constructor_flags (ctor);
5746 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5747 if (TREE_CONSTANT (ctor))
5749 bool constant_p = true;
5750 tree value;
5752 /* Even when ctor is constant, it might contain non-*_CST
5753 elements, such as addresses or trapping values like
5754 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5755 in VECTOR_CST nodes. */
5756 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5757 if (!CONSTANT_CLASS_P (value))
5759 constant_p = false;
5760 break;
5763 if (constant_p)
5765 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5766 break;
5770 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5771 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5773 break;
5775 default:
5776 /* So how did we get a CONSTRUCTOR for a scalar type? */
5777 gcc_unreachable ();
5780 if (ret == GS_ERROR)
5781 return GS_ERROR;
5782 /* If we have gimplified both sides of the initializer but have
5783 not emitted an assignment, do so now. */
5784 if (*expr_p
5785 /* If the type is an empty type, we don't need to emit the
5786 assignment. */
5787 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5789 tree lhs = TREE_OPERAND (*expr_p, 0);
5790 tree rhs = TREE_OPERAND (*expr_p, 1);
5791 if (want_value && object == lhs)
5792 lhs = unshare_expr (lhs);
5793 gassign *init = gimple_build_assign (lhs, rhs);
5794 gimplify_seq_add_stmt (pre_p, init);
5796 if (want_value)
5798 *expr_p = object;
5799 ret = GS_OK;
5801 else
5803 *expr_p = NULL;
5804 ret = GS_ALL_DONE;
5807 /* If the user requests to initialize automatic variables, we
5808 should initialize paddings inside the variable. Add a call to
5809 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5810 initialize paddings of object always to zero regardless of
5811 INIT_TYPE. Note, we will not insert this call if the aggregate
5812 variable has be completely cleared already or it's initialized
5813 with an empty constructor. We cannot insert this call if the
5814 variable is a gimple register since __builtin_clear_padding will take
5815 the address of the variable. As a result, if a long double/_Complex long
5816 double variable will be spilled into stack later, its padding cannot
5817 be cleared with __builtin_clear_padding. We should clear its padding
5818 when it is spilled into memory. */
5819 if (is_init_expr
5820 && !is_gimple_reg (object)
5821 && clear_padding_type_may_have_padding_p (type)
5822 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5823 || !AGGREGATE_TYPE_P (type))
5824 && is_var_need_auto_init (object))
5825 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5827 return ret;
5830 /* Given a pointer value OP0, return a simplified version of an
5831 indirection through OP0, or NULL_TREE if no simplification is
5832 possible. This may only be applied to a rhs of an expression.
5833 Note that the resulting type may be different from the type pointed
5834 to in the sense that it is still compatible from the langhooks
5835 point of view. */
5837 static tree
5838 gimple_fold_indirect_ref_rhs (tree t)
5840 return gimple_fold_indirect_ref (t);
5843 /* Subroutine of gimplify_modify_expr to do simplifications of
5844 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5845 something changes. */
5847 static enum gimplify_status
5848 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5849 gimple_seq *pre_p, gimple_seq *post_p,
5850 bool want_value)
5852 enum gimplify_status ret = GS_UNHANDLED;
5853 bool changed;
5857 changed = false;
5858 switch (TREE_CODE (*from_p))
5860 case VAR_DECL:
5861 /* If we're assigning from a read-only variable initialized with
5862 a constructor and not volatile, do the direct assignment from
5863 the constructor, but only if the target is not volatile either
5864 since this latter assignment might end up being done on a per
5865 field basis. However, if the target is volatile and the type
5866 is aggregate and non-addressable, gimplify_init_constructor
5867 knows that it needs to ensure a single access to the target
5868 and it will return GS_OK only in this case. */
5869 if (TREE_READONLY (*from_p)
5870 && DECL_INITIAL (*from_p)
5871 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5872 && !TREE_THIS_VOLATILE (*from_p)
5873 && (!TREE_THIS_VOLATILE (*to_p)
5874 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5875 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5877 tree old_from = *from_p;
5878 enum gimplify_status subret;
5880 /* Move the constructor into the RHS. */
5881 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5883 /* Let's see if gimplify_init_constructor will need to put
5884 it in memory. */
5885 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5886 false, true);
5887 if (subret == GS_ERROR)
5889 /* If so, revert the change. */
5890 *from_p = old_from;
5892 else
5894 ret = GS_OK;
5895 changed = true;
5898 break;
5899 case INDIRECT_REF:
5900 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5901 /* If we have code like
5903 *(const A*)(A*)&x
5905 where the type of "x" is a (possibly cv-qualified variant
5906 of "A"), treat the entire expression as identical to "x".
5907 This kind of code arises in C++ when an object is bound
5908 to a const reference, and if "x" is a TARGET_EXPR we want
5909 to take advantage of the optimization below. But not if
5910 the type is TREE_ADDRESSABLE; then C++17 says that the
5911 TARGET_EXPR needs to be a temporary. */
5912 if (tree t
5913 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5915 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5916 if (TREE_THIS_VOLATILE (t) != volatile_p)
5918 if (DECL_P (t))
5919 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5920 build_fold_addr_expr (t));
5921 if (REFERENCE_CLASS_P (t))
5922 TREE_THIS_VOLATILE (t) = volatile_p;
5924 *from_p = t;
5925 ret = GS_OK;
5926 changed = true;
5928 break;
5930 case TARGET_EXPR:
5932 /* If we are initializing something from a TARGET_EXPR, strip the
5933 TARGET_EXPR and initialize it directly, if possible. This can't
5934 be done if the initializer is void, since that implies that the
5935 temporary is set in some non-trivial way.
5937 ??? What about code that pulls out the temp and uses it
5938 elsewhere? I think that such code never uses the TARGET_EXPR as
5939 an initializer. If I'm wrong, we'll die because the temp won't
5940 have any RTL. In that case, I guess we'll need to replace
5941 references somehow. */
5942 tree init = TARGET_EXPR_INITIAL (*from_p);
5944 if (init
5945 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5946 || !TARGET_EXPR_NO_ELIDE (*from_p))
5947 && !VOID_TYPE_P (TREE_TYPE (init)))
5949 *from_p = init;
5950 ret = GS_OK;
5951 changed = true;
5954 break;
5956 case COMPOUND_EXPR:
5957 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5958 caught. */
5959 gimplify_compound_expr (from_p, pre_p, true);
5960 ret = GS_OK;
5961 changed = true;
5962 break;
5964 case CONSTRUCTOR:
5965 /* If we already made some changes, let the front end have a
5966 crack at this before we break it down. */
5967 if (ret != GS_UNHANDLED)
5968 break;
5970 /* If we're initializing from a CONSTRUCTOR, break this into
5971 individual MODIFY_EXPRs. */
5972 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5973 false);
5974 return ret;
5976 case COND_EXPR:
5977 /* If we're assigning to a non-register type, push the assignment
5978 down into the branches. This is mandatory for ADDRESSABLE types,
5979 since we cannot generate temporaries for such, but it saves a
5980 copy in other cases as well. */
5981 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5983 /* This code should mirror the code in gimplify_cond_expr. */
5984 enum tree_code code = TREE_CODE (*expr_p);
5985 tree cond = *from_p;
5986 tree result = *to_p;
5988 ret = gimplify_expr (&result, pre_p, post_p,
5989 is_gimple_lvalue, fb_lvalue);
5990 if (ret != GS_ERROR)
5991 ret = GS_OK;
5993 /* If we are going to write RESULT more than once, clear
5994 TREE_READONLY flag, otherwise we might incorrectly promote
5995 the variable to static const and initialize it at compile
5996 time in one of the branches. */
5997 if (VAR_P (result)
5998 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5999 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
6000 TREE_READONLY (result) = 0;
6001 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
6002 TREE_OPERAND (cond, 1)
6003 = build2 (code, void_type_node, result,
6004 TREE_OPERAND (cond, 1));
6005 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
6006 TREE_OPERAND (cond, 2)
6007 = build2 (code, void_type_node, unshare_expr (result),
6008 TREE_OPERAND (cond, 2));
6010 TREE_TYPE (cond) = void_type_node;
6011 recalculate_side_effects (cond);
6013 if (want_value)
6015 gimplify_and_add (cond, pre_p);
6016 *expr_p = unshare_expr (result);
6018 else
6019 *expr_p = cond;
6020 return ret;
6022 break;
6024 case CALL_EXPR:
6025 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6026 return slot so that we don't generate a temporary. */
6027 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
6028 && aggregate_value_p (*from_p, *from_p))
6030 bool use_target;
6032 if (!(rhs_predicate_for (*to_p))(*from_p))
6033 /* If we need a temporary, *to_p isn't accurate. */
6034 use_target = false;
6035 /* It's OK to use the return slot directly unless it's an NRV. */
6036 else if (TREE_CODE (*to_p) == RESULT_DECL
6037 && DECL_NAME (*to_p) == NULL_TREE
6038 && needs_to_live_in_memory (*to_p))
6039 use_target = true;
6040 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
6041 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
6042 /* Don't force regs into memory. */
6043 use_target = false;
6044 else if (TREE_CODE (*expr_p) == INIT_EXPR)
6045 /* It's OK to use the target directly if it's being
6046 initialized. */
6047 use_target = true;
6048 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
6049 != INTEGER_CST)
6050 /* Always use the target and thus RSO for variable-sized types.
6051 GIMPLE cannot deal with a variable-sized assignment
6052 embedded in a call statement. */
6053 use_target = true;
6054 else if (TREE_CODE (*to_p) != SSA_NAME
6055 && (!is_gimple_variable (*to_p)
6056 || needs_to_live_in_memory (*to_p)))
6057 /* Don't use the original target if it's already addressable;
6058 if its address escapes, and the called function uses the
6059 NRV optimization, a conforming program could see *to_p
6060 change before the called function returns; see c++/19317.
6061 When optimizing, the return_slot pass marks more functions
6062 as safe after we have escape info. */
6063 use_target = false;
6064 else
6065 use_target = true;
6067 if (use_target)
6069 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
6070 mark_addressable (*to_p);
6073 break;
6075 case WITH_SIZE_EXPR:
6076 /* Likewise for calls that return an aggregate of non-constant size,
6077 since we would not be able to generate a temporary at all. */
6078 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
6080 *from_p = TREE_OPERAND (*from_p, 0);
6081 /* We don't change ret in this case because the
6082 WITH_SIZE_EXPR might have been added in
6083 gimplify_modify_expr, so returning GS_OK would lead to an
6084 infinite loop. */
6085 changed = true;
6087 break;
6089 /* If we're initializing from a container, push the initialization
6090 inside it. */
6091 case CLEANUP_POINT_EXPR:
6092 case BIND_EXPR:
6093 case STATEMENT_LIST:
6095 tree wrap = *from_p;
6096 tree t;
6098 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
6099 fb_lvalue);
6100 if (ret != GS_ERROR)
6101 ret = GS_OK;
6103 t = voidify_wrapper_expr (wrap, *expr_p);
6104 gcc_assert (t == *expr_p);
6106 if (want_value)
6108 gimplify_and_add (wrap, pre_p);
6109 *expr_p = unshare_expr (*to_p);
6111 else
6112 *expr_p = wrap;
6113 return GS_OK;
6116 case NOP_EXPR:
6117 /* Pull out compound literal expressions from a NOP_EXPR.
6118 Those are created in the C FE to drop qualifiers during
6119 lvalue conversion. */
6120 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
6121 && tree_ssa_useless_type_conversion (*from_p))
6123 *from_p = TREE_OPERAND (*from_p, 0);
6124 ret = GS_OK;
6125 changed = true;
6127 break;
6129 case COMPOUND_LITERAL_EXPR:
6131 tree complit = TREE_OPERAND (*expr_p, 1);
6132 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
6133 tree decl = DECL_EXPR_DECL (decl_s);
6134 tree init = DECL_INITIAL (decl);
6136 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6137 into struct T x = { 0, 1, 2 } if the address of the
6138 compound literal has never been taken. */
6139 if (!TREE_ADDRESSABLE (complit)
6140 && !TREE_ADDRESSABLE (decl)
6141 && init)
6143 *expr_p = copy_node (*expr_p);
6144 TREE_OPERAND (*expr_p, 1) = init;
6145 return GS_OK;
6149 default:
6150 break;
6153 while (changed);
6155 return ret;
6159 /* Return true if T looks like a valid GIMPLE statement. */
6161 static bool
6162 is_gimple_stmt (tree t)
6164 const enum tree_code code = TREE_CODE (t);
6166 switch (code)
6168 case NOP_EXPR:
6169 /* The only valid NOP_EXPR is the empty statement. */
6170 return IS_EMPTY_STMT (t);
6172 case BIND_EXPR:
6173 case COND_EXPR:
6174 /* These are only valid if they're void. */
6175 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
6177 case SWITCH_EXPR:
6178 case GOTO_EXPR:
6179 case RETURN_EXPR:
6180 case LABEL_EXPR:
6181 case CASE_LABEL_EXPR:
6182 case TRY_CATCH_EXPR:
6183 case TRY_FINALLY_EXPR:
6184 case EH_FILTER_EXPR:
6185 case CATCH_EXPR:
6186 case ASM_EXPR:
6187 case STATEMENT_LIST:
6188 case OACC_PARALLEL:
6189 case OACC_KERNELS:
6190 case OACC_SERIAL:
6191 case OACC_DATA:
6192 case OACC_HOST_DATA:
6193 case OACC_DECLARE:
6194 case OACC_UPDATE:
6195 case OACC_ENTER_DATA:
6196 case OACC_EXIT_DATA:
6197 case OACC_CACHE:
6198 case OMP_PARALLEL:
6199 case OMP_FOR:
6200 case OMP_SIMD:
6201 case OMP_DISTRIBUTE:
6202 case OMP_LOOP:
6203 case OACC_LOOP:
6204 case OMP_SCAN:
6205 case OMP_SCOPE:
6206 case OMP_SECTIONS:
6207 case OMP_SECTION:
6208 case OMP_STRUCTURED_BLOCK:
6209 case OMP_SINGLE:
6210 case OMP_MASTER:
6211 case OMP_MASKED:
6212 case OMP_TASKGROUP:
6213 case OMP_ORDERED:
6214 case OMP_CRITICAL:
6215 case OMP_TASK:
6216 case OMP_TARGET:
6217 case OMP_TARGET_DATA:
6218 case OMP_TARGET_UPDATE:
6219 case OMP_TARGET_ENTER_DATA:
6220 case OMP_TARGET_EXIT_DATA:
6221 case OMP_TASKLOOP:
6222 case OMP_TEAMS:
6223 /* These are always void. */
6224 return true;
6226 case CALL_EXPR:
6227 case MODIFY_EXPR:
6228 case PREDICT_EXPR:
6229 /* These are valid regardless of their type. */
6230 return true;
6232 default:
6233 return false;
6238 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6239 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6241 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6242 other, unmodified part of the complex object just before the total store.
6243 As a consequence, if the object is still uninitialized, an undefined value
6244 will be loaded into a register, which may result in a spurious exception
6245 if the register is floating-point and the value happens to be a signaling
6246 NaN for example. Then the fully-fledged complex operations lowering pass
6247 followed by a DCE pass are necessary in order to fix things up. */
6249 static enum gimplify_status
6250 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6251 bool want_value)
6253 enum tree_code code, ocode;
6254 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6256 lhs = TREE_OPERAND (*expr_p, 0);
6257 rhs = TREE_OPERAND (*expr_p, 1);
6258 code = TREE_CODE (lhs);
6259 lhs = TREE_OPERAND (lhs, 0);
6261 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6262 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6263 suppress_warning (other);
6264 other = get_formal_tmp_var (other, pre_p);
6266 realpart = code == REALPART_EXPR ? rhs : other;
6267 imagpart = code == REALPART_EXPR ? other : rhs;
6269 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6270 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6271 else
6272 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6274 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6275 *expr_p = (want_value) ? rhs : NULL_TREE;
6277 return GS_ALL_DONE;
6280 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6282 modify_expr
6283 : varname '=' rhs
6284 | '*' ID '=' rhs
6286 PRE_P points to the list where side effects that must happen before
6287 *EXPR_P should be stored.
6289 POST_P points to the list where side effects that must happen after
6290 *EXPR_P should be stored.
6292 WANT_VALUE is nonzero iff we want to use the value of this expression
6293 in another expression. */
6295 static enum gimplify_status
6296 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6297 bool want_value)
6299 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6300 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6301 enum gimplify_status ret = GS_UNHANDLED;
6302 gimple *assign;
6303 location_t loc = EXPR_LOCATION (*expr_p);
6304 gimple_stmt_iterator gsi;
6306 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6307 return GS_ERROR;
6309 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6310 || TREE_CODE (*expr_p) == INIT_EXPR);
6312 /* Trying to simplify a clobber using normal logic doesn't work,
6313 so handle it here. */
6314 if (TREE_CLOBBER_P (*from_p))
6316 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6317 if (ret == GS_ERROR)
6318 return ret;
6319 gcc_assert (!want_value);
6320 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6322 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6323 pre_p, post_p);
6324 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6326 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6327 *expr_p = NULL;
6328 return GS_ALL_DONE;
6331 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6332 memset. */
6333 if (TREE_TYPE (*from_p) != error_mark_node
6334 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6335 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6336 && TREE_CODE (*from_p) == CONSTRUCTOR
6337 && CONSTRUCTOR_NELTS (*from_p) == 0)
6339 maybe_with_size_expr (from_p);
6340 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6341 return gimplify_modify_expr_to_memset (expr_p,
6342 TREE_OPERAND (*from_p, 1),
6343 want_value, pre_p);
6346 /* Insert pointer conversions required by the middle-end that are not
6347 required by the frontend. This fixes middle-end type checking for
6348 for example gcc.dg/redecl-6.c. */
6349 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6351 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6352 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6353 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6356 /* See if any simplifications can be done based on what the RHS is. */
6357 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6358 want_value);
6359 if (ret != GS_UNHANDLED)
6360 return ret;
6362 /* For empty types only gimplify the left hand side and right hand
6363 side as statements and throw away the assignment. Do this after
6364 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6365 types properly. */
6366 if (is_empty_type (TREE_TYPE (*from_p))
6367 && !want_value
6368 /* Don't do this for calls that return addressable types, expand_call
6369 relies on those having a lhs. */
6370 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6371 && TREE_CODE (*from_p) == CALL_EXPR))
6373 gimplify_stmt (from_p, pre_p);
6374 gimplify_stmt (to_p, pre_p);
6375 *expr_p = NULL_TREE;
6376 return GS_ALL_DONE;
6379 /* If the value being copied is of variable width, compute the length
6380 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6381 before gimplifying any of the operands so that we can resolve any
6382 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6383 the size of the expression to be copied, not of the destination, so
6384 that is what we must do here. */
6385 maybe_with_size_expr (from_p);
6387 /* As a special case, we have to temporarily allow for assignments
6388 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6389 a toplevel statement, when gimplifying the GENERIC expression
6390 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6391 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6393 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6394 prevent gimplify_expr from trying to create a new temporary for
6395 foo's LHS, we tell it that it should only gimplify until it
6396 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6397 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6398 and all we need to do here is set 'a' to be its LHS. */
6400 /* Gimplify the RHS first for C++17 and bug 71104. */
6401 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6402 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6403 if (ret == GS_ERROR)
6404 return ret;
6406 /* Then gimplify the LHS. */
6407 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6408 twice we have to make sure to gimplify into non-SSA as otherwise
6409 the abnormal edge added later will make those defs not dominate
6410 their uses.
6411 ??? Technically this applies only to the registers used in the
6412 resulting non-register *TO_P. */
6413 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6414 if (saved_into_ssa
6415 && TREE_CODE (*from_p) == CALL_EXPR
6416 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6417 gimplify_ctxp->into_ssa = false;
6418 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6419 gimplify_ctxp->into_ssa = saved_into_ssa;
6420 if (ret == GS_ERROR)
6421 return ret;
6423 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6424 guess for the predicate was wrong. */
6425 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6426 if (final_pred != initial_pred)
6428 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6429 if (ret == GS_ERROR)
6430 return ret;
6433 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6434 size as argument to the call. */
6435 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6437 tree call = TREE_OPERAND (*from_p, 0);
6438 tree vlasize = TREE_OPERAND (*from_p, 1);
6440 if (TREE_CODE (call) == CALL_EXPR
6441 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6443 int nargs = call_expr_nargs (call);
6444 tree type = TREE_TYPE (call);
6445 tree ap = CALL_EXPR_ARG (call, 0);
6446 tree tag = CALL_EXPR_ARG (call, 1);
6447 tree aptag = CALL_EXPR_ARG (call, 2);
6448 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6449 IFN_VA_ARG, type,
6450 nargs + 1, ap, tag,
6451 aptag, vlasize);
6452 TREE_OPERAND (*from_p, 0) = newcall;
6456 /* Now see if the above changed *from_p to something we handle specially. */
6457 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6458 want_value);
6459 if (ret != GS_UNHANDLED)
6460 return ret;
6462 /* If we've got a variable sized assignment between two lvalues (i.e. does
6463 not involve a call), then we can make things a bit more straightforward
6464 by converting the assignment to memcpy or memset. */
6465 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6467 tree from = TREE_OPERAND (*from_p, 0);
6468 tree size = TREE_OPERAND (*from_p, 1);
6470 if (TREE_CODE (from) == CONSTRUCTOR)
6471 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6472 else if (is_gimple_addressable (from)
6473 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p)))
6474 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))))
6476 *from_p = from;
6477 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6478 pre_p);
6482 /* Transform partial stores to non-addressable complex variables into
6483 total stores. This allows us to use real instead of virtual operands
6484 for these variables, which improves optimization. */
6485 if ((TREE_CODE (*to_p) == REALPART_EXPR
6486 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6487 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6488 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6490 /* Try to alleviate the effects of the gimplification creating artificial
6491 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6492 make sure not to create DECL_DEBUG_EXPR links across functions. */
6493 if (!gimplify_ctxp->into_ssa
6494 && VAR_P (*from_p)
6495 && DECL_IGNORED_P (*from_p)
6496 && DECL_P (*to_p)
6497 && !DECL_IGNORED_P (*to_p)
6498 && decl_function_context (*to_p) == current_function_decl
6499 && decl_function_context (*from_p) == current_function_decl)
6501 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6502 DECL_NAME (*from_p)
6503 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6504 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6505 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6508 if (want_value && TREE_THIS_VOLATILE (*to_p))
6509 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6511 if (TREE_CODE (*from_p) == CALL_EXPR)
6513 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6514 instead of a GIMPLE_ASSIGN. */
6515 gcall *call_stmt;
6516 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6518 /* Gimplify internal functions created in the FEs. */
6519 int nargs = call_expr_nargs (*from_p), i;
6520 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6521 auto_vec<tree> vargs (nargs);
6523 for (i = 0; i < nargs; i++)
6525 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6526 EXPR_LOCATION (*from_p));
6527 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6529 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6530 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6531 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6533 else
6535 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6536 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6537 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6538 tree fndecl = get_callee_fndecl (*from_p);
6539 if (fndecl
6540 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6541 && call_expr_nargs (*from_p) == 3)
6542 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6543 CALL_EXPR_ARG (*from_p, 0),
6544 CALL_EXPR_ARG (*from_p, 1),
6545 CALL_EXPR_ARG (*from_p, 2));
6546 else
6548 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6551 notice_special_calls (call_stmt);
6552 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6553 gimple_call_set_lhs (call_stmt, *to_p);
6554 else if (TREE_CODE (*to_p) == SSA_NAME)
6555 /* The above is somewhat premature, avoid ICEing later for a
6556 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6557 ??? This doesn't make it a default-def. */
6558 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6560 assign = call_stmt;
6562 else
6564 assign = gimple_build_assign (*to_p, *from_p);
6565 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6566 if (COMPARISON_CLASS_P (*from_p))
6567 copy_warning (assign, *from_p);
6570 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6572 /* We should have got an SSA name from the start. */
6573 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6574 || ! gimple_in_ssa_p (cfun));
6577 gimplify_seq_add_stmt (pre_p, assign);
6578 gsi = gsi_last (*pre_p);
6579 maybe_fold_stmt (&gsi);
6581 if (want_value)
6583 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6584 return GS_OK;
6586 else
6587 *expr_p = NULL;
6589 return GS_ALL_DONE;
6592 /* Gimplify a comparison between two variable-sized objects. Do this
6593 with a call to BUILT_IN_MEMCMP. */
6595 static enum gimplify_status
6596 gimplify_variable_sized_compare (tree *expr_p)
6598 location_t loc = EXPR_LOCATION (*expr_p);
6599 tree op0 = TREE_OPERAND (*expr_p, 0);
6600 tree op1 = TREE_OPERAND (*expr_p, 1);
6601 tree t, arg, dest, src, expr;
6603 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6604 arg = unshare_expr (arg);
6605 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6606 src = build_fold_addr_expr_loc (loc, op1);
6607 dest = build_fold_addr_expr_loc (loc, op0);
6608 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6609 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6611 expr
6612 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6613 SET_EXPR_LOCATION (expr, loc);
6614 *expr_p = expr;
6616 return GS_OK;
6619 /* Gimplify a comparison between two aggregate objects of integral scalar
6620 mode as a comparison between the bitwise equivalent scalar values. */
6622 static enum gimplify_status
6623 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6625 location_t loc = EXPR_LOCATION (*expr_p);
6626 tree op0 = TREE_OPERAND (*expr_p, 0);
6627 tree op1 = TREE_OPERAND (*expr_p, 1);
6629 tree type = TREE_TYPE (op0);
6630 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6632 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6633 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6635 *expr_p
6636 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6638 return GS_OK;
6641 /* Gimplify an expression sequence. This function gimplifies each
6642 expression and rewrites the original expression with the last
6643 expression of the sequence in GIMPLE form.
6645 PRE_P points to the list where the side effects for all the
6646 expressions in the sequence will be emitted.
6648 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6650 static enum gimplify_status
6651 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6653 tree t = *expr_p;
6657 tree *sub_p = &TREE_OPERAND (t, 0);
6659 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6660 gimplify_compound_expr (sub_p, pre_p, false);
6661 else
6662 gimplify_stmt (sub_p, pre_p);
6664 t = TREE_OPERAND (t, 1);
6666 while (TREE_CODE (t) == COMPOUND_EXPR);
6668 *expr_p = t;
6669 if (want_value)
6670 return GS_OK;
6671 else
6673 gimplify_stmt (expr_p, pre_p);
6674 return GS_ALL_DONE;
6678 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6679 gimplify. After gimplification, EXPR_P will point to a new temporary
6680 that holds the original value of the SAVE_EXPR node.
6682 PRE_P points to the list where side effects that must happen before
6683 *EXPR_P should be stored. */
6685 static enum gimplify_status
6686 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6688 enum gimplify_status ret = GS_ALL_DONE;
6689 tree val;
6691 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6692 val = TREE_OPERAND (*expr_p, 0);
6694 if (val && TREE_TYPE (val) == error_mark_node)
6695 return GS_ERROR;
6697 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6698 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6700 /* The operand may be a void-valued expression. It is
6701 being executed only for its side-effects. */
6702 if (TREE_TYPE (val) == void_type_node)
6704 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6705 is_gimple_stmt, fb_none);
6706 val = NULL;
6708 else
6709 /* The temporary may not be an SSA name as later abnormal and EH
6710 control flow may invalidate use/def domination. When in SSA
6711 form then assume there are no such issues and SAVE_EXPRs only
6712 appear via GENERIC foldings. */
6713 val = get_initialized_tmp_var (val, pre_p, post_p,
6714 gimple_in_ssa_p (cfun));
6716 TREE_OPERAND (*expr_p, 0) = val;
6717 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6720 *expr_p = val;
6722 return ret;
6725 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6727 unary_expr
6728 : ...
6729 | '&' varname
6732 PRE_P points to the list where side effects that must happen before
6733 *EXPR_P should be stored.
6735 POST_P points to the list where side effects that must happen after
6736 *EXPR_P should be stored. */
6738 static enum gimplify_status
6739 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6741 tree expr = *expr_p;
6742 tree op0 = TREE_OPERAND (expr, 0);
6743 enum gimplify_status ret;
6744 location_t loc = EXPR_LOCATION (*expr_p);
6746 switch (TREE_CODE (op0))
6748 case INDIRECT_REF:
6749 do_indirect_ref:
6750 /* Check if we are dealing with an expression of the form '&*ptr'.
6751 While the front end folds away '&*ptr' into 'ptr', these
6752 expressions may be generated internally by the compiler (e.g.,
6753 builtins like __builtin_va_end). */
6754 /* Caution: the silent array decomposition semantics we allow for
6755 ADDR_EXPR means we can't always discard the pair. */
6756 /* Gimplification of the ADDR_EXPR operand may drop
6757 cv-qualification conversions, so make sure we add them if
6758 needed. */
6760 tree op00 = TREE_OPERAND (op0, 0);
6761 tree t_expr = TREE_TYPE (expr);
6762 tree t_op00 = TREE_TYPE (op00);
6764 if (!useless_type_conversion_p (t_expr, t_op00))
6765 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6766 *expr_p = op00;
6767 ret = GS_OK;
6769 break;
6771 case VIEW_CONVERT_EXPR:
6772 /* Take the address of our operand and then convert it to the type of
6773 this ADDR_EXPR.
6775 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6776 all clear. The impact of this transformation is even less clear. */
6778 /* If the operand is a useless conversion, look through it. Doing so
6779 guarantees that the ADDR_EXPR and its operand will remain of the
6780 same type. */
6781 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6782 op0 = TREE_OPERAND (op0, 0);
6784 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6785 build_fold_addr_expr_loc (loc,
6786 TREE_OPERAND (op0, 0)));
6787 ret = GS_OK;
6788 break;
6790 case MEM_REF:
6791 if (integer_zerop (TREE_OPERAND (op0, 1)))
6792 goto do_indirect_ref;
6794 /* fall through */
6796 default:
6797 /* If we see a call to a declared builtin or see its address
6798 being taken (we can unify those cases here) then we can mark
6799 the builtin for implicit generation by GCC. */
6800 if (TREE_CODE (op0) == FUNCTION_DECL
6801 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6802 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6803 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6805 /* We use fb_either here because the C frontend sometimes takes
6806 the address of a call that returns a struct; see
6807 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6808 the implied temporary explicit. */
6810 /* Make the operand addressable. */
6811 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6812 is_gimple_addressable, fb_either);
6813 if (ret == GS_ERROR)
6814 break;
6816 /* Then mark it. Beware that it may not be possible to do so directly
6817 if a temporary has been created by the gimplification. */
6818 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6820 op0 = TREE_OPERAND (expr, 0);
6822 /* For various reasons, the gimplification of the expression
6823 may have made a new INDIRECT_REF. */
6824 if (INDIRECT_REF_P (op0)
6825 || (TREE_CODE (op0) == MEM_REF
6826 && integer_zerop (TREE_OPERAND (op0, 1))))
6827 goto do_indirect_ref;
6829 mark_addressable (TREE_OPERAND (expr, 0));
6831 /* The FEs may end up building ADDR_EXPRs early on a decl with
6832 an incomplete type. Re-build ADDR_EXPRs in canonical form
6833 here. */
6834 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6835 *expr_p = build_fold_addr_expr (op0);
6837 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6838 recompute_tree_invariant_for_addr_expr (*expr_p);
6840 /* If we re-built the ADDR_EXPR add a conversion to the original type
6841 if required. */
6842 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6843 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6845 break;
6848 return ret;
6851 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6852 value; output operands should be a gimple lvalue. */
6854 static enum gimplify_status
6855 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6857 tree expr;
6858 int noutputs;
6859 const char **oconstraints;
6860 int i;
6861 tree link;
6862 const char *constraint;
6863 bool allows_mem, allows_reg, is_inout;
6864 enum gimplify_status ret, tret;
6865 gasm *stmt;
6866 vec<tree, va_gc> *inputs;
6867 vec<tree, va_gc> *outputs;
6868 vec<tree, va_gc> *clobbers;
6869 vec<tree, va_gc> *labels;
6870 tree link_next;
6872 expr = *expr_p;
6873 noutputs = list_length (ASM_OUTPUTS (expr));
6874 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6876 inputs = NULL;
6877 outputs = NULL;
6878 clobbers = NULL;
6879 labels = NULL;
6881 ret = GS_ALL_DONE;
6882 link_next = NULL_TREE;
6883 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6885 bool ok;
6886 size_t constraint_len;
6888 link_next = TREE_CHAIN (link);
6890 oconstraints[i]
6891 = constraint
6892 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6893 constraint_len = strlen (constraint);
6894 if (constraint_len == 0)
6895 continue;
6897 ok = parse_output_constraint (&constraint, i, 0, 0,
6898 &allows_mem, &allows_reg, &is_inout);
6899 if (!ok)
6901 ret = GS_ERROR;
6902 is_inout = false;
6905 /* If we can't make copies, we can only accept memory.
6906 Similarly for VLAs. */
6907 tree outtype = TREE_TYPE (TREE_VALUE (link));
6908 if (outtype != error_mark_node
6909 && (TREE_ADDRESSABLE (outtype)
6910 || !COMPLETE_TYPE_P (outtype)
6911 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6913 if (allows_mem)
6914 allows_reg = 0;
6915 else
6917 error ("impossible constraint in %<asm%>");
6918 error ("non-memory output %d must stay in memory", i);
6919 return GS_ERROR;
6923 if (!allows_reg && allows_mem)
6924 mark_addressable (TREE_VALUE (link));
6926 tree orig = TREE_VALUE (link);
6927 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6928 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6929 fb_lvalue | fb_mayfail);
6930 if (tret == GS_ERROR)
6932 if (orig != error_mark_node)
6933 error ("invalid lvalue in %<asm%> output %d", i);
6934 ret = tret;
6937 /* If the constraint does not allow memory make sure we gimplify
6938 it to a register if it is not already but its base is. This
6939 happens for complex and vector components. */
6940 if (!allows_mem)
6942 tree op = TREE_VALUE (link);
6943 if (! is_gimple_val (op)
6944 && is_gimple_reg_type (TREE_TYPE (op))
6945 && is_gimple_reg (get_base_address (op)))
6947 tree tem = create_tmp_reg (TREE_TYPE (op));
6948 tree ass;
6949 if (is_inout)
6951 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6952 tem, unshare_expr (op));
6953 gimplify_and_add (ass, pre_p);
6955 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6956 gimplify_and_add (ass, post_p);
6958 TREE_VALUE (link) = tem;
6959 tret = GS_OK;
6963 vec_safe_push (outputs, link);
6964 TREE_CHAIN (link) = NULL_TREE;
6966 if (is_inout)
6968 /* An input/output operand. To give the optimizers more
6969 flexibility, split it into separate input and output
6970 operands. */
6971 tree input;
6972 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6973 char buf[11];
6975 /* Turn the in/out constraint into an output constraint. */
6976 char *p = xstrdup (constraint);
6977 p[0] = '=';
6978 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6980 /* And add a matching input constraint. */
6981 if (allows_reg)
6983 sprintf (buf, "%u", i);
6985 /* If there are multiple alternatives in the constraint,
6986 handle each of them individually. Those that allow register
6987 will be replaced with operand number, the others will stay
6988 unchanged. */
6989 if (strchr (p, ',') != NULL)
6991 size_t len = 0, buflen = strlen (buf);
6992 char *beg, *end, *str, *dst;
6994 for (beg = p + 1;;)
6996 end = strchr (beg, ',');
6997 if (end == NULL)
6998 end = strchr (beg, '\0');
6999 if ((size_t) (end - beg) < buflen)
7000 len += buflen + 1;
7001 else
7002 len += end - beg + 1;
7003 if (*end)
7004 beg = end + 1;
7005 else
7006 break;
7009 str = (char *) alloca (len);
7010 for (beg = p + 1, dst = str;;)
7012 const char *tem;
7013 bool mem_p, reg_p, inout_p;
7015 end = strchr (beg, ',');
7016 if (end)
7017 *end = '\0';
7018 beg[-1] = '=';
7019 tem = beg - 1;
7020 parse_output_constraint (&tem, i, 0, 0,
7021 &mem_p, &reg_p, &inout_p);
7022 if (dst != str)
7023 *dst++ = ',';
7024 if (reg_p)
7026 memcpy (dst, buf, buflen);
7027 dst += buflen;
7029 else
7031 if (end)
7032 len = end - beg;
7033 else
7034 len = strlen (beg);
7035 memcpy (dst, beg, len);
7036 dst += len;
7038 if (end)
7039 beg = end + 1;
7040 else
7041 break;
7043 *dst = '\0';
7044 input = build_string (dst - str, str);
7046 else
7047 input = build_string (strlen (buf), buf);
7049 else
7050 input = build_string (constraint_len - 1, constraint + 1);
7052 free (p);
7054 input = build_tree_list (build_tree_list (NULL_TREE, input),
7055 unshare_expr (TREE_VALUE (link)));
7056 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
7060 link_next = NULL_TREE;
7061 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
7063 link_next = TREE_CHAIN (link);
7064 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7065 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7066 oconstraints, &allows_mem, &allows_reg);
7068 /* If we can't make copies, we can only accept memory. */
7069 tree intype = TREE_TYPE (TREE_VALUE (link));
7070 if (intype != error_mark_node
7071 && (TREE_ADDRESSABLE (intype)
7072 || !COMPLETE_TYPE_P (intype)
7073 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
7075 if (allows_mem)
7076 allows_reg = 0;
7077 else
7079 error ("impossible constraint in %<asm%>");
7080 error ("non-memory input %d must stay in memory", i);
7081 return GS_ERROR;
7085 /* If the operand is a memory input, it should be an lvalue. */
7086 if (!allows_reg && allows_mem)
7088 tree inputv = TREE_VALUE (link);
7089 STRIP_NOPS (inputv);
7090 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
7091 || TREE_CODE (inputv) == PREINCREMENT_EXPR
7092 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
7093 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
7094 || TREE_CODE (inputv) == MODIFY_EXPR)
7095 TREE_VALUE (link) = error_mark_node;
7096 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7097 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7098 if (tret != GS_ERROR)
7100 /* Unlike output operands, memory inputs are not guaranteed
7101 to be lvalues by the FE, and while the expressions are
7102 marked addressable there, if it is e.g. a statement
7103 expression, temporaries in it might not end up being
7104 addressable. They might be already used in the IL and thus
7105 it is too late to make them addressable now though. */
7106 tree x = TREE_VALUE (link);
7107 while (handled_component_p (x))
7108 x = TREE_OPERAND (x, 0);
7109 if (TREE_CODE (x) == MEM_REF
7110 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
7111 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
7112 if ((VAR_P (x)
7113 || TREE_CODE (x) == PARM_DECL
7114 || TREE_CODE (x) == RESULT_DECL)
7115 && !TREE_ADDRESSABLE (x)
7116 && is_gimple_reg (x))
7118 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
7119 input_location), 0,
7120 "memory input %d is not directly addressable",
7122 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
7125 mark_addressable (TREE_VALUE (link));
7126 if (tret == GS_ERROR)
7128 if (inputv != error_mark_node)
7129 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
7130 "memory input %d is not directly addressable", i);
7131 ret = tret;
7134 else
7136 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7137 is_gimple_asm_val, fb_rvalue);
7138 if (tret == GS_ERROR)
7139 ret = tret;
7142 TREE_CHAIN (link) = NULL_TREE;
7143 vec_safe_push (inputs, link);
7146 link_next = NULL_TREE;
7147 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
7149 link_next = TREE_CHAIN (link);
7150 TREE_CHAIN (link) = NULL_TREE;
7151 vec_safe_push (clobbers, link);
7154 link_next = NULL_TREE;
7155 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
7157 link_next = TREE_CHAIN (link);
7158 TREE_CHAIN (link) = NULL_TREE;
7159 vec_safe_push (labels, link);
7162 /* Do not add ASMs with errors to the gimple IL stream. */
7163 if (ret != GS_ERROR)
7165 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
7166 inputs, outputs, clobbers, labels);
7168 /* asm is volatile if it was marked by the user as volatile or
7169 there are no outputs or this is an asm goto. */
7170 gimple_asm_set_volatile (stmt,
7171 ASM_VOLATILE_P (expr)
7172 || noutputs == 0
7173 || labels);
7174 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
7175 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
7177 gimplify_seq_add_stmt (pre_p, stmt);
7180 return ret;
7183 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7184 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7185 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7186 return to this function.
7188 FIXME should we complexify the prequeue handling instead? Or use flags
7189 for all the cleanups and let the optimizer tighten them up? The current
7190 code seems pretty fragile; it will break on a cleanup within any
7191 non-conditional nesting. But any such nesting would be broken, anyway;
7192 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7193 and continues out of it. We can do that at the RTL level, though, so
7194 having an optimizer to tighten up try/finally regions would be a Good
7195 Thing. */
7197 static enum gimplify_status
7198 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
7200 gimple_stmt_iterator iter;
7201 gimple_seq body_sequence = NULL;
7203 tree temp = voidify_wrapper_expr (*expr_p, NULL);
7205 /* We only care about the number of conditions between the innermost
7206 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7207 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7208 int old_conds = gimplify_ctxp->conditions;
7209 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
7210 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
7211 gimplify_ctxp->conditions = 0;
7212 gimplify_ctxp->conditional_cleanups = NULL;
7213 gimplify_ctxp->in_cleanup_point_expr = true;
7215 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
7217 gimplify_ctxp->conditions = old_conds;
7218 gimplify_ctxp->conditional_cleanups = old_cleanups;
7219 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
7221 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
7223 gimple *wce = gsi_stmt (iter);
7225 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
7227 if (gsi_one_before_end_p (iter))
7229 /* Note that gsi_insert_seq_before and gsi_remove do not
7230 scan operands, unlike some other sequence mutators. */
7231 if (!gimple_wce_cleanup_eh_only (wce))
7232 gsi_insert_seq_before_without_update (&iter,
7233 gimple_wce_cleanup (wce),
7234 GSI_SAME_STMT);
7235 gsi_remove (&iter, true);
7236 break;
7238 else
7240 gtry *gtry;
7241 gimple_seq seq;
7242 enum gimple_try_flags kind;
7244 if (gimple_wce_cleanup_eh_only (wce))
7245 kind = GIMPLE_TRY_CATCH;
7246 else
7247 kind = GIMPLE_TRY_FINALLY;
7248 seq = gsi_split_seq_after (iter);
7250 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
7251 /* Do not use gsi_replace here, as it may scan operands.
7252 We want to do a simple structural modification only. */
7253 gsi_set_stmt (&iter, gtry);
7254 iter = gsi_start (gtry->eval);
7257 else
7258 gsi_next (&iter);
7261 gimplify_seq_add_seq (pre_p, body_sequence);
7262 if (temp)
7264 *expr_p = temp;
7265 return GS_OK;
7267 else
7269 *expr_p = NULL;
7270 return GS_ALL_DONE;
7274 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7275 is the cleanup action required. EH_ONLY is true if the cleanup should
7276 only be executed if an exception is thrown, not on normal exit.
7277 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7278 only valid for clobbers. */
7280 static void
7281 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7282 bool force_uncond = false)
7284 gimple *wce;
7285 gimple_seq cleanup_stmts = NULL;
7287 /* Errors can result in improperly nested cleanups. Which results in
7288 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7289 if (seen_error ())
7290 return;
7292 if (gimple_conditional_context ())
7294 /* If we're in a conditional context, this is more complex. We only
7295 want to run the cleanup if we actually ran the initialization that
7296 necessitates it, but we want to run it after the end of the
7297 conditional context. So we wrap the try/finally around the
7298 condition and use a flag to determine whether or not to actually
7299 run the destructor. Thus
7301 test ? f(A()) : 0
7303 becomes (approximately)
7305 flag = 0;
7306 try {
7307 if (test) { A::A(temp); flag = 1; val = f(temp); }
7308 else { val = 0; }
7309 } finally {
7310 if (flag) A::~A(temp);
7314 if (force_uncond)
7316 gimplify_stmt (&cleanup, &cleanup_stmts);
7317 wce = gimple_build_wce (cleanup_stmts);
7318 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7320 else
7322 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7323 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7324 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7326 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7327 gimplify_stmt (&cleanup, &cleanup_stmts);
7328 wce = gimple_build_wce (cleanup_stmts);
7329 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7331 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7332 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7333 gimplify_seq_add_stmt (pre_p, ftrue);
7335 /* Because of this manipulation, and the EH edges that jump
7336 threading cannot redirect, the temporary (VAR) will appear
7337 to be used uninitialized. Don't warn. */
7338 suppress_warning (var, OPT_Wuninitialized);
7341 else
7343 gimplify_stmt (&cleanup, &cleanup_stmts);
7344 wce = gimple_build_wce (cleanup_stmts);
7345 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7346 gimplify_seq_add_stmt (pre_p, wce);
7350 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7352 static enum gimplify_status
7353 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7355 tree targ = *expr_p;
7356 tree temp = TARGET_EXPR_SLOT (targ);
7357 tree init = TARGET_EXPR_INITIAL (targ);
7358 enum gimplify_status ret;
7360 bool unpoison_empty_seq = false;
7361 gimple_stmt_iterator unpoison_it;
7363 if (init)
7365 gimple_seq init_pre_p = NULL;
7367 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7368 to the temps list. Handle also variable length TARGET_EXPRs. */
7369 if (!poly_int_tree_p (DECL_SIZE (temp)))
7371 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7372 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7373 /* FIXME: this is correct only when the size of the type does
7374 not depend on expressions evaluated in init. */
7375 gimplify_vla_decl (temp, &init_pre_p);
7377 else
7379 /* Save location where we need to place unpoisoning. It's possible
7380 that a variable will be converted to needs_to_live_in_memory. */
7381 unpoison_it = gsi_last (*pre_p);
7382 unpoison_empty_seq = gsi_end_p (unpoison_it);
7384 gimple_add_tmp_var (temp);
7387 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7388 expression is supposed to initialize the slot. */
7389 if (VOID_TYPE_P (TREE_TYPE (init)))
7390 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7391 fb_none);
7392 else
7394 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7395 init = init_expr;
7396 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7397 fb_none);
7398 init = NULL;
7399 ggc_free (init_expr);
7401 if (ret == GS_ERROR)
7403 /* PR c++/28266 Make sure this is expanded only once. */
7404 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7405 return GS_ERROR;
7408 if (init)
7409 gimplify_and_add (init, &init_pre_p);
7411 /* Add a clobber for the temporary going out of scope, like
7412 gimplify_bind_expr. But only if we did not promote the
7413 temporary to static storage. */
7414 if (gimplify_ctxp->in_cleanup_point_expr
7415 && !TREE_STATIC (temp)
7416 && needs_to_live_in_memory (temp))
7418 if (flag_stack_reuse == SR_ALL)
7420 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7421 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7422 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7424 if (asan_poisoned_variables
7425 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7426 && !TREE_STATIC (temp)
7427 && dbg_cnt (asan_use_after_scope)
7428 && !gimplify_omp_ctxp)
7430 tree asan_cleanup = build_asan_poison_call_expr (temp);
7431 if (asan_cleanup)
7433 if (unpoison_empty_seq)
7434 unpoison_it = gsi_start (*pre_p);
7436 asan_poison_variable (temp, false, &unpoison_it,
7437 unpoison_empty_seq);
7438 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7443 gimple_seq_add_seq (pre_p, init_pre_p);
7445 /* If needed, push the cleanup for the temp. */
7446 if (TARGET_EXPR_CLEANUP (targ))
7447 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7448 CLEANUP_EH_ONLY (targ), pre_p);
7450 /* Only expand this once. */
7451 TREE_OPERAND (targ, 3) = init;
7452 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7454 else
7455 /* We should have expanded this before. */
7456 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7458 *expr_p = temp;
7459 return GS_OK;
7462 /* Gimplification of expression trees. */
7464 /* Gimplify an expression which appears at statement context. The
7465 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7466 NULL, a new sequence is allocated.
7468 Return true if we actually added a statement to the queue. */
7470 bool
7471 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7473 gimple_seq_node last;
7475 last = gimple_seq_last (*seq_p);
7476 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7477 return last != gimple_seq_last (*seq_p);
7480 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7481 to CTX. If entries already exist, force them to be some flavor of private.
7482 If there is no enclosing parallel, do nothing. */
7484 void
7485 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7487 splay_tree_node n;
7489 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7490 return;
7494 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7495 if (n != NULL)
7497 if (n->value & GOVD_SHARED)
7498 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7499 else if (n->value & GOVD_MAP)
7500 n->value |= GOVD_MAP_TO_ONLY;
7501 else
7502 return;
7504 else if ((ctx->region_type & ORT_TARGET) != 0)
7506 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7507 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7508 else
7509 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7511 else if (ctx->region_type != ORT_WORKSHARE
7512 && ctx->region_type != ORT_TASKGROUP
7513 && ctx->region_type != ORT_SIMD
7514 && ctx->region_type != ORT_ACC
7515 && !(ctx->region_type & ORT_TARGET_DATA))
7516 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7518 ctx = ctx->outer_context;
7520 while (ctx);
7523 /* Similarly for each of the type sizes of TYPE. */
7525 static void
7526 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7528 if (type == NULL || type == error_mark_node)
7529 return;
7530 type = TYPE_MAIN_VARIANT (type);
7532 if (ctx->privatized_types->add (type))
7533 return;
7535 switch (TREE_CODE (type))
7537 case INTEGER_TYPE:
7538 case ENUMERAL_TYPE:
7539 case BOOLEAN_TYPE:
7540 case REAL_TYPE:
7541 case FIXED_POINT_TYPE:
7542 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7543 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7544 break;
7546 case ARRAY_TYPE:
7547 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7548 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7549 break;
7551 case RECORD_TYPE:
7552 case UNION_TYPE:
7553 case QUAL_UNION_TYPE:
7555 tree field;
7556 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7557 if (TREE_CODE (field) == FIELD_DECL)
7559 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7560 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7563 break;
7565 case POINTER_TYPE:
7566 case REFERENCE_TYPE:
7567 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7568 break;
7570 default:
7571 break;
7574 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7575 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7576 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7579 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7581 static void
7582 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7584 splay_tree_node n;
7585 unsigned int nflags;
7586 tree t;
7588 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7589 return;
7591 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7592 there are constructors involved somewhere. Exception is a shared clause,
7593 there is nothing privatized in that case. */
7594 if ((flags & GOVD_SHARED) == 0
7595 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7596 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7597 flags |= GOVD_SEEN;
7599 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7600 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7602 /* We shouldn't be re-adding the decl with the same data
7603 sharing class. */
7604 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7605 nflags = n->value | flags;
7606 /* The only combination of data sharing classes we should see is
7607 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7608 reduction variables to be used in data sharing clauses. */
7609 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7610 || ((nflags & GOVD_DATA_SHARE_CLASS)
7611 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7612 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7613 n->value = nflags;
7614 return;
7617 /* When adding a variable-sized variable, we have to handle all sorts
7618 of additional bits of data: the pointer replacement variable, and
7619 the parameters of the type. */
7620 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7622 /* Add the pointer replacement variable as PRIVATE if the variable
7623 replacement is private, else FIRSTPRIVATE since we'll need the
7624 address of the original variable either for SHARED, or for the
7625 copy into or out of the context. */
7626 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7628 if (flags & GOVD_MAP)
7629 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7630 else if (flags & GOVD_PRIVATE)
7631 nflags = GOVD_PRIVATE;
7632 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7633 && (flags & GOVD_FIRSTPRIVATE))
7634 || (ctx->region_type == ORT_TARGET_DATA
7635 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7636 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7637 else
7638 nflags = GOVD_FIRSTPRIVATE;
7639 nflags |= flags & GOVD_SEEN;
7640 t = DECL_VALUE_EXPR (decl);
7641 gcc_assert (INDIRECT_REF_P (t));
7642 t = TREE_OPERAND (t, 0);
7643 gcc_assert (DECL_P (t));
7644 omp_add_variable (ctx, t, nflags);
7647 /* Add all of the variable and type parameters (which should have
7648 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7649 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7650 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7651 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7653 /* The variable-sized variable itself is never SHARED, only some form
7654 of PRIVATE. The sharing would take place via the pointer variable
7655 which we remapped above. */
7656 if (flags & GOVD_SHARED)
7657 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7658 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7660 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7661 alloca statement we generate for the variable, so make sure it
7662 is available. This isn't automatically needed for the SHARED
7663 case, since we won't be allocating local storage then.
7664 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7665 in this case omp_notice_variable will be called later
7666 on when it is gimplified. */
7667 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7668 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7669 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7671 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7672 && omp_privatize_by_reference (decl))
7674 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7676 /* Similar to the direct variable sized case above, we'll need the
7677 size of references being privatized. */
7678 if ((flags & GOVD_SHARED) == 0)
7680 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7681 if (t && DECL_P (t))
7682 omp_notice_variable (ctx, t, true);
7686 if (n != NULL)
7687 n->value |= flags;
7688 else
7689 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7691 /* For reductions clauses in OpenACC loop directives, by default create a
7692 copy clause on the enclosing parallel construct for carrying back the
7693 results. */
7694 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7696 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7697 while (outer_ctx)
7699 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7700 if (n != NULL)
7702 /* Ignore local variables and explicitly declared clauses. */
7703 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7704 break;
7705 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7707 /* According to the OpenACC spec, such a reduction variable
7708 should already have a copy map on a kernels construct,
7709 verify that here. */
7710 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7711 && (n->value & GOVD_MAP));
7713 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7715 /* Remove firstprivate and make it a copy map. */
7716 n->value &= ~GOVD_FIRSTPRIVATE;
7717 n->value |= GOVD_MAP;
7720 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7722 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7723 GOVD_MAP | GOVD_SEEN);
7724 break;
7726 outer_ctx = outer_ctx->outer_context;
7731 /* Notice a threadprivate variable DECL used in OMP context CTX.
7732 This just prints out diagnostics about threadprivate variable uses
7733 in untied tasks. If DECL2 is non-NULL, prevent this warning
7734 on that variable. */
7736 static bool
7737 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7738 tree decl2)
7740 splay_tree_node n;
7741 struct gimplify_omp_ctx *octx;
7743 for (octx = ctx; octx; octx = octx->outer_context)
7744 if ((octx->region_type & ORT_TARGET) != 0
7745 || octx->order_concurrent)
7747 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7748 if (n == NULL)
7750 if (octx->order_concurrent)
7752 error ("threadprivate variable %qE used in a region with"
7753 " %<order(concurrent)%> clause", DECL_NAME (decl));
7754 inform (octx->location, "enclosing region");
7756 else
7758 error ("threadprivate variable %qE used in target region",
7759 DECL_NAME (decl));
7760 inform (octx->location, "enclosing target region");
7762 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7764 if (decl2)
7765 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7768 if (ctx->region_type != ORT_UNTIED_TASK)
7769 return false;
7770 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7771 if (n == NULL)
7773 error ("threadprivate variable %qE used in untied task",
7774 DECL_NAME (decl));
7775 inform (ctx->location, "enclosing task");
7776 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7778 if (decl2)
7779 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7780 return false;
7783 /* Return true if global var DECL is device resident. */
7785 static bool
7786 device_resident_p (tree decl)
7788 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7790 if (!attr)
7791 return false;
7793 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7795 tree c = TREE_VALUE (t);
7796 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7797 return true;
7800 return false;
7803 /* Return true if DECL has an ACC DECLARE attribute. */
7805 static bool
7806 is_oacc_declared (tree decl)
7808 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7809 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7810 return declared != NULL_TREE;
7813 /* Determine outer default flags for DECL mentioned in an OMP region
7814 but not declared in an enclosing clause.
7816 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7817 remapped firstprivate instead of shared. To some extent this is
7818 addressed in omp_firstprivatize_type_sizes, but not
7819 effectively. */
7821 static unsigned
7822 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7823 bool in_code, unsigned flags)
7825 enum omp_clause_default_kind default_kind = ctx->default_kind;
7826 enum omp_clause_default_kind kind;
7828 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7829 if (ctx->region_type & ORT_TASK)
7831 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7833 /* The event-handle specified by a detach clause should always be firstprivate,
7834 regardless of the current default. */
7835 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7836 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7838 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7839 default_kind = kind;
7840 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7841 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7842 /* For C/C++ default({,first}private), variables with static storage duration
7843 declared in a namespace or global scope and referenced in construct
7844 must be explicitly specified, i.e. acts as default(none). */
7845 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7846 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7847 && VAR_P (decl)
7848 && is_global_var (decl)
7849 && (DECL_FILE_SCOPE_P (decl)
7850 || (DECL_CONTEXT (decl)
7851 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7852 && !lang_GNU_Fortran ())
7853 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7855 switch (default_kind)
7857 case OMP_CLAUSE_DEFAULT_NONE:
7859 const char *rtype;
7861 if (ctx->region_type & ORT_PARALLEL)
7862 rtype = "parallel";
7863 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7864 rtype = "taskloop";
7865 else if (ctx->region_type & ORT_TASK)
7866 rtype = "task";
7867 else if (ctx->region_type & ORT_TEAMS)
7868 rtype = "teams";
7869 else
7870 gcc_unreachable ();
7872 error ("%qE not specified in enclosing %qs",
7873 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7874 inform (ctx->location, "enclosing %qs", rtype);
7876 /* FALLTHRU */
7877 case OMP_CLAUSE_DEFAULT_SHARED:
7878 flags |= GOVD_SHARED;
7879 break;
7880 case OMP_CLAUSE_DEFAULT_PRIVATE:
7881 flags |= GOVD_PRIVATE;
7882 break;
7883 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7884 flags |= GOVD_FIRSTPRIVATE;
7885 break;
7886 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7887 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7888 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7889 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7891 omp_notice_variable (octx, decl, in_code);
7892 for (; octx; octx = octx->outer_context)
7894 splay_tree_node n2;
7896 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7897 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7898 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7899 continue;
7900 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7902 flags |= GOVD_FIRSTPRIVATE;
7903 goto found_outer;
7905 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7907 flags |= GOVD_SHARED;
7908 goto found_outer;
7913 if (TREE_CODE (decl) == PARM_DECL
7914 || (!is_global_var (decl)
7915 && DECL_CONTEXT (decl) == current_function_decl))
7916 flags |= GOVD_FIRSTPRIVATE;
7917 else
7918 flags |= GOVD_SHARED;
7919 found_outer:
7920 break;
7922 default:
7923 gcc_unreachable ();
7926 return flags;
7929 /* Return string name for types of OpenACC constructs from ORT_* values. */
7931 static const char *
7932 oacc_region_type_name (enum omp_region_type region_type)
7934 switch (region_type)
7936 case ORT_ACC_DATA:
7937 return "data";
7938 case ORT_ACC_PARALLEL:
7939 return "parallel";
7940 case ORT_ACC_KERNELS:
7941 return "kernels";
7942 case ORT_ACC_SERIAL:
7943 return "serial";
7944 default:
7945 gcc_unreachable ();
7949 /* Determine outer default flags for DECL mentioned in an OACC region
7950 but not declared in an enclosing clause. */
7952 static unsigned
7953 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7955 struct gimplify_omp_ctx *ctx_default = ctx;
7956 /* If no 'default' clause appears on this compute construct... */
7957 if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
7959 /* ..., see if one appears on a lexically containing 'data'
7960 construct. */
7961 while ((ctx_default = ctx_default->outer_context))
7963 if (ctx_default->region_type == ORT_ACC_DATA
7964 && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
7965 break;
7967 /* If not, reset. */
7968 if (!ctx_default)
7969 ctx_default = ctx;
7972 bool on_device = false;
7973 bool is_private = false;
7974 bool declared = is_oacc_declared (decl);
7975 tree type = TREE_TYPE (decl);
7977 if (omp_privatize_by_reference (decl))
7978 type = TREE_TYPE (type);
7980 /* For Fortran COMMON blocks, only used variables in those blocks are
7981 transfered and remapped. The block itself will have a private clause to
7982 avoid transfering the data twice.
7983 The hook evaluates to false by default. For a variable in Fortran's COMMON
7984 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7985 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7986 the whole block. For C++ and Fortran, it can also be true under certain
7987 other conditions, if DECL_HAS_VALUE_EXPR. */
7988 if (RECORD_OR_UNION_TYPE_P (type))
7989 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7991 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7992 && is_global_var (decl)
7993 && device_resident_p (decl)
7994 && !is_private)
7996 on_device = true;
7997 flags |= GOVD_MAP_TO_ONLY;
8000 switch (ctx->region_type)
8002 case ORT_ACC_KERNELS:
8003 if (is_private)
8004 flags |= GOVD_FIRSTPRIVATE;
8005 else if (AGGREGATE_TYPE_P (type))
8007 /* Aggregates default to 'present_or_copy', or 'present'. */
8008 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8009 flags |= GOVD_MAP;
8010 else
8011 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8013 else
8014 /* Scalars default to 'copy'. */
8015 flags |= GOVD_MAP | GOVD_MAP_FORCE;
8017 break;
8019 case ORT_ACC_PARALLEL:
8020 case ORT_ACC_SERIAL:
8021 if (is_private)
8022 flags |= GOVD_FIRSTPRIVATE;
8023 else if (on_device || declared)
8024 flags |= GOVD_MAP;
8025 else if (AGGREGATE_TYPE_P (type))
8027 /* Aggregates default to 'present_or_copy', or 'present'. */
8028 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8029 flags |= GOVD_MAP;
8030 else
8031 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8033 else
8034 /* Scalars default to 'firstprivate'. */
8035 flags |= GOVD_FIRSTPRIVATE;
8037 break;
8039 default:
8040 gcc_unreachable ();
8043 if (DECL_ARTIFICIAL (decl))
8044 ; /* We can get compiler-generated decls, and should not complain
8045 about them. */
8046 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
8048 error ("%qE not specified in enclosing OpenACC %qs construct",
8049 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
8050 oacc_region_type_name (ctx->region_type));
8051 if (ctx_default != ctx)
8052 inform (ctx->location, "enclosing OpenACC %qs construct and",
8053 oacc_region_type_name (ctx->region_type));
8054 inform (ctx_default->location,
8055 "enclosing OpenACC %qs construct with %qs clause",
8056 oacc_region_type_name (ctx_default->region_type),
8057 "default(none)");
8059 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
8060 ; /* Handled above. */
8061 else
8062 gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
8064 return flags;
8067 /* Record the fact that DECL was used within the OMP context CTX.
8068 IN_CODE is true when real code uses DECL, and false when we should
8069 merely emit default(none) errors. Return true if DECL is going to
8070 be remapped and thus DECL shouldn't be gimplified into its
8071 DECL_VALUE_EXPR (if any). */
8073 static bool
8074 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
8076 splay_tree_node n;
8077 unsigned flags = in_code ? GOVD_SEEN : 0;
8078 bool ret = false, shared;
8080 if (error_operand_p (decl))
8081 return false;
8083 if (DECL_ARTIFICIAL (decl))
8085 tree attr = lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl));
8086 if (attr)
8087 decl = TREE_VALUE (TREE_VALUE (attr));
8090 if (ctx->region_type == ORT_NONE)
8091 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
8093 if (is_global_var (decl))
8095 /* Threadprivate variables are predetermined. */
8096 if (DECL_THREAD_LOCAL_P (decl))
8097 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
8099 if (DECL_HAS_VALUE_EXPR_P (decl))
8101 if (ctx->region_type & ORT_ACC)
8102 /* For OpenACC, defer expansion of value to avoid transfering
8103 privatized common block data instead of im-/explicitly transfered
8104 variables which are in common blocks. */
8106 else
8108 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8110 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
8111 return omp_notice_threadprivate_variable (ctx, decl, value);
8115 if (gimplify_omp_ctxp->outer_context == NULL
8116 && VAR_P (decl)
8117 && oacc_get_fn_attrib (current_function_decl))
8119 location_t loc = DECL_SOURCE_LOCATION (decl);
8121 if (lookup_attribute ("omp declare target link",
8122 DECL_ATTRIBUTES (decl)))
8124 error_at (loc,
8125 "%qE with %<link%> clause used in %<routine%> function",
8126 DECL_NAME (decl));
8127 return false;
8129 else if (!lookup_attribute ("omp declare target",
8130 DECL_ATTRIBUTES (decl)))
8132 error_at (loc,
8133 "%qE requires a %<declare%> directive for use "
8134 "in a %<routine%> function", DECL_NAME (decl));
8135 return false;
8140 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8141 if ((ctx->region_type & ORT_TARGET) != 0)
8143 if (ctx->region_type & ORT_ACC)
8144 /* For OpenACC, as remarked above, defer expansion. */
8145 shared = false;
8146 else
8147 shared = true;
8149 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8150 if (n == NULL)
8152 unsigned nflags = flags;
8153 if ((ctx->region_type & ORT_ACC) == 0)
8155 bool is_declare_target = false;
8156 if (is_global_var (decl)
8157 && varpool_node::get_create (decl)->offloadable)
8159 struct gimplify_omp_ctx *octx;
8160 for (octx = ctx->outer_context;
8161 octx; octx = octx->outer_context)
8163 n = splay_tree_lookup (octx->variables,
8164 (splay_tree_key)decl);
8165 if (n
8166 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
8167 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8168 break;
8170 is_declare_target = octx == NULL;
8172 if (!is_declare_target)
8174 int gdmk;
8175 enum omp_clause_defaultmap_kind kind;
8176 if (lang_hooks.decls.omp_allocatable_p (decl))
8177 gdmk = GDMK_ALLOCATABLE;
8178 else if (lang_hooks.decls.omp_scalar_target_p (decl))
8179 gdmk = GDMK_SCALAR_TARGET;
8180 else if (lang_hooks.decls.omp_scalar_p (decl, false))
8181 gdmk = GDMK_SCALAR;
8182 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
8183 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8184 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
8185 == POINTER_TYPE)))
8186 gdmk = GDMK_POINTER;
8187 else
8188 gdmk = GDMK_AGGREGATE;
8189 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
8190 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
8192 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
8193 nflags |= GOVD_FIRSTPRIVATE;
8194 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
8195 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
8196 else
8197 gcc_unreachable ();
8199 else if (ctx->defaultmap[gdmk] == 0)
8201 tree d = lang_hooks.decls.omp_report_decl (decl);
8202 error ("%qE not specified in enclosing %<target%>",
8203 DECL_NAME (d));
8204 inform (ctx->location, "enclosing %<target%>");
8206 else if (ctx->defaultmap[gdmk]
8207 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
8208 nflags |= ctx->defaultmap[gdmk];
8209 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
8211 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8212 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
8214 else
8216 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8217 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
8222 struct gimplify_omp_ctx *octx = ctx->outer_context;
8223 if ((ctx->region_type & ORT_ACC) && octx)
8225 /* Look in outer OpenACC contexts, to see if there's a
8226 data attribute for this variable. */
8227 omp_notice_variable (octx, decl, in_code);
8229 for (; octx; octx = octx->outer_context)
8231 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
8232 break;
8233 splay_tree_node n2
8234 = splay_tree_lookup (octx->variables,
8235 (splay_tree_key) decl);
8236 if (n2)
8238 if (octx->region_type == ORT_ACC_HOST_DATA)
8239 error ("variable %qE declared in enclosing "
8240 "%<host_data%> region", DECL_NAME (decl));
8241 nflags |= GOVD_MAP;
8242 if (octx->region_type == ORT_ACC_DATA
8243 && (n2->value & GOVD_MAP_0LEN_ARRAY))
8244 nflags |= GOVD_MAP_0LEN_ARRAY;
8245 goto found_outer;
8250 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
8251 | GOVD_MAP_ALLOC_ONLY)) == flags)
8253 tree type = TREE_TYPE (decl);
8255 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8256 && omp_privatize_by_reference (decl))
8257 type = TREE_TYPE (type);
8258 if (!omp_mappable_type (type))
8260 error ("%qD referenced in target region does not have "
8261 "a mappable type", decl);
8262 nflags |= GOVD_MAP | GOVD_EXPLICIT;
8264 else
8266 if ((ctx->region_type & ORT_ACC) != 0)
8267 nflags = oacc_default_clause (ctx, decl, flags);
8268 else
8269 nflags |= GOVD_MAP;
8272 found_outer:
8273 omp_add_variable (ctx, decl, nflags);
8275 else
8277 /* If nothing changed, there's nothing left to do. */
8278 if ((n->value & flags) == flags)
8279 return ret;
8280 flags |= n->value;
8281 n->value = flags;
8283 goto do_outer;
8286 if (n == NULL)
8288 if (ctx->region_type == ORT_WORKSHARE
8289 || ctx->region_type == ORT_TASKGROUP
8290 || ctx->region_type == ORT_SIMD
8291 || ctx->region_type == ORT_ACC
8292 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8293 goto do_outer;
8295 flags = omp_default_clause (ctx, decl, in_code, flags);
8297 if ((flags & GOVD_PRIVATE)
8298 && lang_hooks.decls.omp_private_outer_ref (decl))
8299 flags |= GOVD_PRIVATE_OUTER_REF;
8301 omp_add_variable (ctx, decl, flags);
8303 shared = (flags & GOVD_SHARED) != 0;
8304 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8305 goto do_outer;
8308 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8309 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8310 if (ctx->region_type == ORT_SIMD
8311 && ctx->in_for_exprs
8312 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8313 == GOVD_PRIVATE))
8314 flags &= ~GOVD_SEEN;
8316 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8317 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8318 && DECL_SIZE (decl))
8320 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8322 splay_tree_node n2;
8323 tree t = DECL_VALUE_EXPR (decl);
8324 gcc_assert (INDIRECT_REF_P (t));
8325 t = TREE_OPERAND (t, 0);
8326 gcc_assert (DECL_P (t));
8327 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8328 n2->value |= GOVD_SEEN;
8330 else if (omp_privatize_by_reference (decl)
8331 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8332 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8333 != INTEGER_CST))
8335 splay_tree_node n2;
8336 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8337 gcc_assert (DECL_P (t));
8338 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8339 if (n2)
8340 omp_notice_variable (ctx, t, true);
8344 if (ctx->region_type & ORT_ACC)
8345 /* For OpenACC, as remarked above, defer expansion. */
8346 shared = false;
8347 else
8348 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8349 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8351 /* If nothing changed, there's nothing left to do. */
8352 if ((n->value & flags) == flags)
8353 return ret;
8354 flags |= n->value;
8355 n->value = flags;
8357 do_outer:
8358 /* If the variable is private in the current context, then we don't
8359 need to propagate anything to an outer context. */
8360 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8361 return ret;
8362 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8363 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8364 return ret;
8365 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8366 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8367 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8368 return ret;
8369 if (ctx->outer_context
8370 && omp_notice_variable (ctx->outer_context, decl, in_code))
8371 return true;
8372 return ret;
8375 /* Verify that DECL is private within CTX. If there's specific information
8376 to the contrary in the innermost scope, generate an error. */
8378 static bool
8379 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8381 splay_tree_node n;
8383 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8384 if (n != NULL)
8386 if (n->value & GOVD_SHARED)
8388 if (ctx == gimplify_omp_ctxp)
8390 if (simd)
8391 error ("iteration variable %qE is predetermined linear",
8392 DECL_NAME (decl));
8393 else
8394 error ("iteration variable %qE should be private",
8395 DECL_NAME (decl));
8396 n->value = GOVD_PRIVATE;
8397 return true;
8399 else
8400 return false;
8402 else if ((n->value & GOVD_EXPLICIT) != 0
8403 && (ctx == gimplify_omp_ctxp
8404 || (ctx->region_type == ORT_COMBINED_PARALLEL
8405 && gimplify_omp_ctxp->outer_context == ctx)))
8407 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8408 error ("iteration variable %qE should not be firstprivate",
8409 DECL_NAME (decl));
8410 else if ((n->value & GOVD_REDUCTION) != 0)
8411 error ("iteration variable %qE should not be reduction",
8412 DECL_NAME (decl));
8413 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8414 error ("iteration variable %qE should not be linear",
8415 DECL_NAME (decl));
8417 return (ctx == gimplify_omp_ctxp
8418 || (ctx->region_type == ORT_COMBINED_PARALLEL
8419 && gimplify_omp_ctxp->outer_context == ctx));
8422 if (ctx->region_type != ORT_WORKSHARE
8423 && ctx->region_type != ORT_TASKGROUP
8424 && ctx->region_type != ORT_SIMD
8425 && ctx->region_type != ORT_ACC)
8426 return false;
8427 else if (ctx->outer_context)
8428 return omp_is_private (ctx->outer_context, decl, simd);
8429 return false;
8432 /* Return true if DECL is private within a parallel region
8433 that binds to the current construct's context or in parallel
8434 region's REDUCTION clause. */
8436 static bool
8437 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8439 splay_tree_node n;
8443 ctx = ctx->outer_context;
8444 if (ctx == NULL)
8446 if (is_global_var (decl))
8447 return false;
8449 /* References might be private, but might be shared too,
8450 when checking for copyprivate, assume they might be
8451 private, otherwise assume they might be shared. */
8452 if (copyprivate)
8453 return true;
8455 if (omp_privatize_by_reference (decl))
8456 return false;
8458 /* Treat C++ privatized non-static data members outside
8459 of the privatization the same. */
8460 if (omp_member_access_dummy_var (decl))
8461 return false;
8463 return true;
8466 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8468 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8469 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8471 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8472 || n == NULL
8473 || (n->value & GOVD_MAP) == 0)
8474 continue;
8475 return false;
8478 if (n != NULL)
8480 if ((n->value & GOVD_LOCAL) != 0
8481 && omp_member_access_dummy_var (decl))
8482 return false;
8483 return (n->value & GOVD_SHARED) == 0;
8486 if (ctx->region_type == ORT_WORKSHARE
8487 || ctx->region_type == ORT_TASKGROUP
8488 || ctx->region_type == ORT_SIMD
8489 || ctx->region_type == ORT_ACC)
8490 continue;
8492 break;
8494 while (1);
8495 return false;
8498 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8500 static tree
8501 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8503 tree t = *tp;
8505 /* If this node has been visited, unmark it and keep looking. */
8506 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8507 return t;
8509 if (IS_TYPE_OR_DECL_P (t))
8510 *walk_subtrees = 0;
8511 return NULL_TREE;
8515 /* Gimplify the affinity clause but effectively ignore it.
8516 Generate:
8517 var = begin;
8518 if ((step > 1) ? var <= end : var > end)
8519 locatator_var_expr; */
8521 static void
8522 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8524 tree last_iter = NULL_TREE;
8525 tree last_bind = NULL_TREE;
8526 tree label = NULL_TREE;
8527 tree *last_body = NULL;
8528 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8529 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8531 tree t = OMP_CLAUSE_DECL (c);
8532 if (TREE_CODE (t) == TREE_LIST
8533 && TREE_PURPOSE (t)
8534 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8536 if (TREE_VALUE (t) == null_pointer_node)
8537 continue;
8538 if (TREE_PURPOSE (t) != last_iter)
8540 if (last_bind)
8542 append_to_statement_list (label, last_body);
8543 gimplify_and_add (last_bind, pre_p);
8544 last_bind = NULL_TREE;
8546 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8548 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8549 is_gimple_val, fb_rvalue) == GS_ERROR
8550 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8551 is_gimple_val, fb_rvalue) == GS_ERROR
8552 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8553 is_gimple_val, fb_rvalue) == GS_ERROR
8554 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8555 is_gimple_val, fb_rvalue)
8556 == GS_ERROR))
8557 return;
8559 last_iter = TREE_PURPOSE (t);
8560 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8561 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8562 NULL, block);
8563 last_body = &BIND_EXPR_BODY (last_bind);
8564 tree cond = NULL_TREE;
8565 location_t loc = OMP_CLAUSE_LOCATION (c);
8566 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8568 tree var = TREE_VEC_ELT (it, 0);
8569 tree begin = TREE_VEC_ELT (it, 1);
8570 tree end = TREE_VEC_ELT (it, 2);
8571 tree step = TREE_VEC_ELT (it, 3);
8572 loc = DECL_SOURCE_LOCATION (var);
8573 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8574 var, begin);
8575 append_to_statement_list_force (tem, last_body);
8577 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8578 step, build_zero_cst (TREE_TYPE (step)));
8579 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8580 var, end);
8581 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8582 var, end);
8583 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8584 cond1, cond2, cond3);
8585 if (cond)
8586 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8587 boolean_type_node, cond, cond1);
8588 else
8589 cond = cond1;
8591 tree cont_label = create_artificial_label (loc);
8592 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8593 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8594 void_node,
8595 build_and_jump (&cont_label));
8596 append_to_statement_list_force (tem, last_body);
8598 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8600 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8601 last_body);
8602 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8604 if (error_operand_p (TREE_VALUE (t)))
8605 return;
8606 append_to_statement_list_force (TREE_VALUE (t), last_body);
8607 TREE_VALUE (t) = null_pointer_node;
8609 else
8611 if (last_bind)
8613 append_to_statement_list (label, last_body);
8614 gimplify_and_add (last_bind, pre_p);
8615 last_bind = NULL_TREE;
8617 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8619 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8620 NULL, is_gimple_val, fb_rvalue);
8621 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8623 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8624 return;
8625 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8626 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8627 return;
8628 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8631 if (last_bind)
8633 append_to_statement_list (label, last_body);
8634 gimplify_and_add (last_bind, pre_p);
8636 return;
8639 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8640 lower all the depend clauses by populating corresponding depend
8641 array. Returns 0 if there are no such depend clauses, or
8642 2 if all depend clauses should be removed, 1 otherwise. */
8644 static int
8645 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8647 tree c;
8648 gimple *g;
8649 size_t n[5] = { 0, 0, 0, 0, 0 };
8650 bool unused[5];
8651 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8652 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8653 size_t i, j;
8654 location_t first_loc = UNKNOWN_LOCATION;
8656 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8657 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8659 switch (OMP_CLAUSE_DEPEND_KIND (c))
8661 case OMP_CLAUSE_DEPEND_IN:
8662 i = 2;
8663 break;
8664 case OMP_CLAUSE_DEPEND_OUT:
8665 case OMP_CLAUSE_DEPEND_INOUT:
8666 i = 0;
8667 break;
8668 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8669 i = 1;
8670 break;
8671 case OMP_CLAUSE_DEPEND_DEPOBJ:
8672 i = 3;
8673 break;
8674 case OMP_CLAUSE_DEPEND_INOUTSET:
8675 i = 4;
8676 break;
8677 default:
8678 gcc_unreachable ();
8680 tree t = OMP_CLAUSE_DECL (c);
8681 if (first_loc == UNKNOWN_LOCATION)
8682 first_loc = OMP_CLAUSE_LOCATION (c);
8683 if (TREE_CODE (t) == TREE_LIST
8684 && TREE_PURPOSE (t)
8685 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8687 if (TREE_PURPOSE (t) != last_iter)
8689 tree tcnt = size_one_node;
8690 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8692 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8693 is_gimple_val, fb_rvalue) == GS_ERROR
8694 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8695 is_gimple_val, fb_rvalue) == GS_ERROR
8696 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8697 is_gimple_val, fb_rvalue) == GS_ERROR
8698 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8699 is_gimple_val, fb_rvalue)
8700 == GS_ERROR))
8701 return 2;
8702 tree var = TREE_VEC_ELT (it, 0);
8703 tree begin = TREE_VEC_ELT (it, 1);
8704 tree end = TREE_VEC_ELT (it, 2);
8705 tree step = TREE_VEC_ELT (it, 3);
8706 tree orig_step = TREE_VEC_ELT (it, 4);
8707 tree type = TREE_TYPE (var);
8708 tree stype = TREE_TYPE (step);
8709 location_t loc = DECL_SOURCE_LOCATION (var);
8710 tree endmbegin;
8711 /* Compute count for this iterator as
8712 orig_step > 0
8713 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8714 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8715 and compute product of those for the entire depend
8716 clause. */
8717 if (POINTER_TYPE_P (type))
8718 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8719 stype, end, begin);
8720 else
8721 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8722 end, begin);
8723 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8724 step,
8725 build_int_cst (stype, 1));
8726 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8727 build_int_cst (stype, 1));
8728 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8729 unshare_expr (endmbegin),
8730 stepm1);
8731 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8732 pos, step);
8733 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8734 endmbegin, stepp1);
8735 if (TYPE_UNSIGNED (stype))
8737 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8738 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8740 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8741 neg, step);
8742 step = NULL_TREE;
8743 tree cond = fold_build2_loc (loc, LT_EXPR,
8744 boolean_type_node,
8745 begin, end);
8746 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8747 build_int_cst (stype, 0));
8748 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8749 end, begin);
8750 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8751 build_int_cst (stype, 0));
8752 tree osteptype = TREE_TYPE (orig_step);
8753 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8754 orig_step,
8755 build_int_cst (osteptype, 0));
8756 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8757 cond, pos, neg);
8758 cnt = fold_convert_loc (loc, sizetype, cnt);
8759 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8760 fb_rvalue) == GS_ERROR)
8761 return 2;
8762 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8764 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8765 fb_rvalue) == GS_ERROR)
8766 return 2;
8767 last_iter = TREE_PURPOSE (t);
8768 last_count = tcnt;
8770 if (counts[i] == NULL_TREE)
8771 counts[i] = last_count;
8772 else
8773 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8774 PLUS_EXPR, counts[i], last_count);
8776 else
8777 n[i]++;
8779 for (i = 0; i < 5; i++)
8780 if (counts[i])
8781 break;
8782 if (i == 5)
8783 return 0;
8785 tree total = size_zero_node;
8786 for (i = 0; i < 5; i++)
8788 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8789 if (counts[i] == NULL_TREE)
8790 counts[i] = size_zero_node;
8791 if (n[i])
8792 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8793 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8794 fb_rvalue) == GS_ERROR)
8795 return 2;
8796 total = size_binop (PLUS_EXPR, total, counts[i]);
8799 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8800 == GS_ERROR)
8801 return 2;
8802 bool is_old = unused[1] && unused[3] && unused[4];
8803 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8804 size_int (is_old ? 1 : 4));
8805 if (!unused[4])
8806 totalpx = size_binop (PLUS_EXPR, totalpx,
8807 size_binop (MULT_EXPR, counts[4], size_int (2)));
8808 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8809 tree array = create_tmp_var_raw (type);
8810 TREE_ADDRESSABLE (array) = 1;
8811 if (!poly_int_tree_p (totalpx))
8813 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8814 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8815 if (gimplify_omp_ctxp)
8817 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8818 while (ctx
8819 && (ctx->region_type == ORT_WORKSHARE
8820 || ctx->region_type == ORT_TASKGROUP
8821 || ctx->region_type == ORT_SIMD
8822 || ctx->region_type == ORT_ACC))
8823 ctx = ctx->outer_context;
8824 if (ctx)
8825 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8827 gimplify_vla_decl (array, pre_p);
8829 else
8830 gimple_add_tmp_var (array);
8831 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8832 NULL_TREE);
8833 tree tem;
8834 if (!is_old)
8836 tem = build2 (MODIFY_EXPR, void_type_node, r,
8837 build_int_cst (ptr_type_node, 0));
8838 gimplify_and_add (tem, pre_p);
8839 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8840 NULL_TREE);
8842 tem = build2 (MODIFY_EXPR, void_type_node, r,
8843 fold_convert (ptr_type_node, total));
8844 gimplify_and_add (tem, pre_p);
8845 for (i = 1; i < (is_old ? 2 : 4); i++)
8847 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8848 NULL_TREE, NULL_TREE);
8849 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8850 gimplify_and_add (tem, pre_p);
8853 tree cnts[6];
8854 for (j = 5; j; j--)
8855 if (!unused[j - 1])
8856 break;
8857 for (i = 0; i < 5; i++)
8859 if (i && (i >= j || unused[i - 1]))
8861 cnts[i] = cnts[i - 1];
8862 continue;
8864 cnts[i] = create_tmp_var (sizetype);
8865 if (i == 0)
8866 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8867 else
8869 tree t;
8870 if (is_old)
8871 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8872 else
8873 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8874 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8875 == GS_ERROR)
8876 return 2;
8877 g = gimple_build_assign (cnts[i], t);
8879 gimple_seq_add_stmt (pre_p, g);
8881 if (unused[4])
8882 cnts[5] = NULL_TREE;
8883 else
8885 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8886 cnts[5] = create_tmp_var (sizetype);
8887 g = gimple_build_assign (cnts[i], t);
8888 gimple_seq_add_stmt (pre_p, g);
8891 last_iter = NULL_TREE;
8892 tree last_bind = NULL_TREE;
8893 tree *last_body = NULL;
8894 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8895 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8897 switch (OMP_CLAUSE_DEPEND_KIND (c))
8899 case OMP_CLAUSE_DEPEND_IN:
8900 i = 2;
8901 break;
8902 case OMP_CLAUSE_DEPEND_OUT:
8903 case OMP_CLAUSE_DEPEND_INOUT:
8904 i = 0;
8905 break;
8906 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8907 i = 1;
8908 break;
8909 case OMP_CLAUSE_DEPEND_DEPOBJ:
8910 i = 3;
8911 break;
8912 case OMP_CLAUSE_DEPEND_INOUTSET:
8913 i = 4;
8914 break;
8915 default:
8916 gcc_unreachable ();
8918 tree t = OMP_CLAUSE_DECL (c);
8919 if (TREE_CODE (t) == TREE_LIST
8920 && TREE_PURPOSE (t)
8921 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8923 if (TREE_PURPOSE (t) != last_iter)
8925 if (last_bind)
8926 gimplify_and_add (last_bind, pre_p);
8927 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8928 last_bind = build3 (BIND_EXPR, void_type_node,
8929 BLOCK_VARS (block), NULL, block);
8930 TREE_SIDE_EFFECTS (last_bind) = 1;
8931 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8932 tree *p = &BIND_EXPR_BODY (last_bind);
8933 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8935 tree var = TREE_VEC_ELT (it, 0);
8936 tree begin = TREE_VEC_ELT (it, 1);
8937 tree end = TREE_VEC_ELT (it, 2);
8938 tree step = TREE_VEC_ELT (it, 3);
8939 tree orig_step = TREE_VEC_ELT (it, 4);
8940 tree type = TREE_TYPE (var);
8941 location_t loc = DECL_SOURCE_LOCATION (var);
8942 /* Emit:
8943 var = begin;
8944 goto cond_label;
8945 beg_label:
8947 var = var + step;
8948 cond_label:
8949 if (orig_step > 0) {
8950 if (var < end) goto beg_label;
8951 } else {
8952 if (var > end) goto beg_label;
8954 for each iterator, with inner iterators added to
8955 the ... above. */
8956 tree beg_label = create_artificial_label (loc);
8957 tree cond_label = NULL_TREE;
8958 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8959 var, begin);
8960 append_to_statement_list_force (tem, p);
8961 tem = build_and_jump (&cond_label);
8962 append_to_statement_list_force (tem, p);
8963 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8964 append_to_statement_list (tem, p);
8965 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8966 NULL_TREE, NULL_TREE);
8967 TREE_SIDE_EFFECTS (bind) = 1;
8968 SET_EXPR_LOCATION (bind, loc);
8969 append_to_statement_list_force (bind, p);
8970 if (POINTER_TYPE_P (type))
8971 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8972 var, fold_convert_loc (loc, sizetype,
8973 step));
8974 else
8975 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8976 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8977 var, tem);
8978 append_to_statement_list_force (tem, p);
8979 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8980 append_to_statement_list (tem, p);
8981 tree cond = fold_build2_loc (loc, LT_EXPR,
8982 boolean_type_node,
8983 var, end);
8984 tree pos
8985 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8986 cond, build_and_jump (&beg_label),
8987 void_node);
8988 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8989 var, end);
8990 tree neg
8991 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8992 cond, build_and_jump (&beg_label),
8993 void_node);
8994 tree osteptype = TREE_TYPE (orig_step);
8995 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8996 orig_step,
8997 build_int_cst (osteptype, 0));
8998 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8999 cond, pos, neg);
9000 append_to_statement_list_force (tem, p);
9001 p = &BIND_EXPR_BODY (bind);
9003 last_body = p;
9005 last_iter = TREE_PURPOSE (t);
9006 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
9008 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
9009 0), last_body);
9010 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
9012 if (error_operand_p (TREE_VALUE (t)))
9013 return 2;
9014 if (TREE_VALUE (t) != null_pointer_node)
9015 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
9016 if (i == 4)
9018 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9019 NULL_TREE, NULL_TREE);
9020 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9021 NULL_TREE, NULL_TREE);
9022 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9023 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9024 void_type_node, r, r2);
9025 append_to_statement_list_force (tem, last_body);
9026 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9027 void_type_node, cnts[i],
9028 size_binop (PLUS_EXPR, cnts[i],
9029 size_int (1)));
9030 append_to_statement_list_force (tem, last_body);
9031 i = 5;
9033 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9034 NULL_TREE, NULL_TREE);
9035 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9036 void_type_node, r, TREE_VALUE (t));
9037 append_to_statement_list_force (tem, last_body);
9038 if (i == 5)
9040 r = build4 (ARRAY_REF, ptr_type_node, array,
9041 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9042 NULL_TREE, NULL_TREE);
9043 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9044 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9045 void_type_node, r, tem);
9046 append_to_statement_list_force (tem, last_body);
9048 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9049 void_type_node, cnts[i],
9050 size_binop (PLUS_EXPR, cnts[i],
9051 size_int (1 + (i == 5))));
9052 append_to_statement_list_force (tem, last_body);
9053 TREE_VALUE (t) = null_pointer_node;
9055 else
9057 if (last_bind)
9059 gimplify_and_add (last_bind, pre_p);
9060 last_bind = NULL_TREE;
9062 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9064 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9065 NULL, is_gimple_val, fb_rvalue);
9066 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9068 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9069 return 2;
9070 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
9071 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9072 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9073 is_gimple_val, fb_rvalue) == GS_ERROR)
9074 return 2;
9075 if (i == 4)
9077 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9078 NULL_TREE, NULL_TREE);
9079 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9080 NULL_TREE, NULL_TREE);
9081 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9082 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
9083 gimplify_and_add (tem, pre_p);
9084 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
9085 cnts[i],
9086 size_int (1)));
9087 gimple_seq_add_stmt (pre_p, g);
9088 i = 5;
9090 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9091 NULL_TREE, NULL_TREE);
9092 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
9093 gimplify_and_add (tem, pre_p);
9094 if (i == 5)
9096 r = build4 (ARRAY_REF, ptr_type_node, array,
9097 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9098 NULL_TREE, NULL_TREE);
9099 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9100 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
9101 append_to_statement_list_force (tem, last_body);
9102 gimplify_and_add (tem, pre_p);
9104 g = gimple_build_assign (cnts[i],
9105 size_binop (PLUS_EXPR, cnts[i],
9106 size_int (1 + (i == 5))));
9107 gimple_seq_add_stmt (pre_p, g);
9110 if (last_bind)
9111 gimplify_and_add (last_bind, pre_p);
9112 tree cond = boolean_false_node;
9113 if (is_old)
9115 if (!unused[0])
9116 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
9117 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
9118 size_int (2)));
9119 if (!unused[2])
9120 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
9121 build2_loc (first_loc, NE_EXPR, boolean_type_node,
9122 cnts[2],
9123 size_binop_loc (first_loc, PLUS_EXPR,
9124 totalpx,
9125 size_int (1))));
9127 else
9129 tree prev = size_int (5);
9130 for (i = 0; i < 5; i++)
9132 if (unused[i])
9133 continue;
9134 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
9135 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
9136 build2_loc (first_loc, NE_EXPR, boolean_type_node,
9137 cnts[i], unshare_expr (prev)));
9140 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
9141 build_call_expr_loc (first_loc,
9142 builtin_decl_explicit (BUILT_IN_TRAP),
9143 0), void_node);
9144 gimplify_and_add (tem, pre_p);
9145 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9146 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
9147 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9148 OMP_CLAUSE_CHAIN (c) = *list_p;
9149 *list_p = c;
9150 return 1;
9153 /* For a set of mappings describing an array section pointed to by a struct
9154 (or derived type, etc.) component, create an "alloc" or "release" node to
9155 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9156 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9157 be created that is inserted into the list of mapping nodes attached to the
9158 directive being processed -- not part of the sorted list of nodes after
9159 GOMP_MAP_STRUCT.
9161 CODE is the code of the directive being processed. GRP_START and GRP_END
9162 are the first and last of two or three nodes representing this array section
9163 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9164 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9165 filled with the additional node described above, if needed.
9167 This function does not add the new nodes to any lists itself. It is the
9168 responsibility of the caller to do that. */
9170 static tree
9171 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
9172 tree *extra_node)
9174 enum gomp_map_kind mkind
9175 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
9176 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
9178 gcc_assert (grp_start != grp_end);
9180 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9181 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9182 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
9183 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
9184 tree grp_mid = NULL_TREE;
9185 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
9186 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
9188 if (grp_mid
9189 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9190 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
9191 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
9192 else
9193 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
9195 if (grp_mid
9196 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9197 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
9198 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
9200 tree c3
9201 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9202 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
9203 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
9204 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
9205 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
9207 *extra_node = c3;
9209 else
9210 *extra_node = NULL_TREE;
9212 return c2;
9215 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9216 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9217 If BASE_REF is non-NULL and the containing object is a reference, set
9218 *BASE_REF to that reference before dereferencing the object.
9219 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9220 has array type, else return NULL. */
9222 static tree
9223 extract_base_bit_offset (tree base, poly_int64 *bitposp,
9224 poly_offset_int *poffsetp)
9226 tree offset;
9227 poly_int64 bitsize, bitpos;
9228 machine_mode mode;
9229 int unsignedp, reversep, volatilep = 0;
9230 poly_offset_int poffset;
9232 STRIP_NOPS (base);
9234 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
9235 &unsignedp, &reversep, &volatilep);
9237 STRIP_NOPS (base);
9239 if (offset && poly_int_tree_p (offset))
9241 poffset = wi::to_poly_offset (offset);
9242 offset = NULL_TREE;
9244 else
9245 poffset = 0;
9247 if (maybe_ne (bitpos, 0))
9248 poffset += bits_to_bytes_round_down (bitpos);
9250 *bitposp = bitpos;
9251 *poffsetp = poffset;
9253 return base;
9256 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
9257 started processing the group yet. The TEMPORARY mark is used when we first
9258 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9259 when we have processed all the group's children (i.e. all the base pointers
9260 referred to by the group's mapping nodes, recursively). */
9262 enum omp_tsort_mark {
9263 UNVISITED,
9264 TEMPORARY,
9265 PERMANENT
9268 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
9269 but ignores side effects in the equality comparisons. */
9271 struct tree_operand_hash_no_se : tree_operand_hash
9273 static inline bool equal (const value_type &,
9274 const compare_type &);
9277 inline bool
9278 tree_operand_hash_no_se::equal (const value_type &t1,
9279 const compare_type &t2)
9281 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
9284 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9285 clause. */
9287 struct omp_mapping_group {
9288 tree *grp_start;
9289 tree grp_end;
9290 omp_tsort_mark mark;
9291 /* If we've removed the group but need to reindex, mark the group as
9292 deleted. */
9293 bool deleted;
9294 struct omp_mapping_group *sibling;
9295 struct omp_mapping_group *next;
9298 DEBUG_FUNCTION void
9299 debug_mapping_group (omp_mapping_group *grp)
9301 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9302 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9303 debug_generic_expr (*grp->grp_start);
9304 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9307 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9308 isn't one. */
9310 static tree
9311 omp_get_base_pointer (tree expr)
9313 while (TREE_CODE (expr) == ARRAY_REF
9314 || TREE_CODE (expr) == COMPONENT_REF)
9315 expr = TREE_OPERAND (expr, 0);
9317 if (INDIRECT_REF_P (expr)
9318 || (TREE_CODE (expr) == MEM_REF
9319 && integer_zerop (TREE_OPERAND (expr, 1))))
9321 expr = TREE_OPERAND (expr, 0);
9322 while (TREE_CODE (expr) == COMPOUND_EXPR)
9323 expr = TREE_OPERAND (expr, 1);
9324 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9325 expr = TREE_OPERAND (expr, 0);
9326 if (TREE_CODE (expr) == SAVE_EXPR)
9327 expr = TREE_OPERAND (expr, 0);
9328 STRIP_NOPS (expr);
9329 return expr;
9332 return NULL_TREE;
9335 /* Remove COMPONENT_REFS and indirections from EXPR. */
9337 static tree
9338 omp_strip_components_and_deref (tree expr)
9340 while (TREE_CODE (expr) == COMPONENT_REF
9341 || INDIRECT_REF_P (expr)
9342 || (TREE_CODE (expr) == MEM_REF
9343 && integer_zerop (TREE_OPERAND (expr, 1)))
9344 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9345 || TREE_CODE (expr) == COMPOUND_EXPR)
9346 if (TREE_CODE (expr) == COMPOUND_EXPR)
9347 expr = TREE_OPERAND (expr, 1);
9348 else
9349 expr = TREE_OPERAND (expr, 0);
9351 STRIP_NOPS (expr);
9353 return expr;
9356 static tree
9357 omp_strip_indirections (tree expr)
9359 while (INDIRECT_REF_P (expr)
9360 || (TREE_CODE (expr) == MEM_REF
9361 && integer_zerop (TREE_OPERAND (expr, 1))))
9362 expr = TREE_OPERAND (expr, 0);
9364 return expr;
9367 /* An attach or detach operation depends directly on the address being
9368 attached/detached. Return that address, or none if there are no
9369 attachments/detachments. */
9371 static tree
9372 omp_get_attachment (omp_mapping_group *grp)
9374 tree node = *grp->grp_start;
9376 switch (OMP_CLAUSE_MAP_KIND (node))
9378 case GOMP_MAP_TO:
9379 case GOMP_MAP_FROM:
9380 case GOMP_MAP_TOFROM:
9381 case GOMP_MAP_ALWAYS_FROM:
9382 case GOMP_MAP_ALWAYS_TO:
9383 case GOMP_MAP_ALWAYS_TOFROM:
9384 case GOMP_MAP_FORCE_FROM:
9385 case GOMP_MAP_FORCE_TO:
9386 case GOMP_MAP_FORCE_TOFROM:
9387 case GOMP_MAP_FORCE_PRESENT:
9388 case GOMP_MAP_PRESENT_ALLOC:
9389 case GOMP_MAP_PRESENT_FROM:
9390 case GOMP_MAP_PRESENT_TO:
9391 case GOMP_MAP_PRESENT_TOFROM:
9392 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9393 case GOMP_MAP_ALWAYS_PRESENT_TO:
9394 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9395 case GOMP_MAP_ALLOC:
9396 case GOMP_MAP_RELEASE:
9397 case GOMP_MAP_DELETE:
9398 case GOMP_MAP_FORCE_ALLOC:
9399 if (node == grp->grp_end)
9400 return NULL_TREE;
9402 node = OMP_CLAUSE_CHAIN (node);
9403 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9405 gcc_assert (node != grp->grp_end);
9406 node = OMP_CLAUSE_CHAIN (node);
9408 if (node)
9409 switch (OMP_CLAUSE_MAP_KIND (node))
9411 case GOMP_MAP_POINTER:
9412 case GOMP_MAP_ALWAYS_POINTER:
9413 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9414 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9415 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9416 return NULL_TREE;
9418 case GOMP_MAP_ATTACH_DETACH:
9419 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9420 return OMP_CLAUSE_DECL (node);
9422 default:
9423 internal_error ("unexpected mapping node");
9425 return error_mark_node;
9427 case GOMP_MAP_TO_PSET:
9428 gcc_assert (node != grp->grp_end);
9429 node = OMP_CLAUSE_CHAIN (node);
9430 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9431 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9432 return OMP_CLAUSE_DECL (node);
9433 else
9434 internal_error ("unexpected mapping node");
9435 return error_mark_node;
9437 case GOMP_MAP_ATTACH:
9438 case GOMP_MAP_DETACH:
9439 node = OMP_CLAUSE_CHAIN (node);
9440 if (!node || *grp->grp_start == grp->grp_end)
9441 return OMP_CLAUSE_DECL (*grp->grp_start);
9442 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9443 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9444 return OMP_CLAUSE_DECL (*grp->grp_start);
9445 else
9446 internal_error ("unexpected mapping node");
9447 return error_mark_node;
9449 case GOMP_MAP_STRUCT:
9450 case GOMP_MAP_FORCE_DEVICEPTR:
9451 case GOMP_MAP_DEVICE_RESIDENT:
9452 case GOMP_MAP_LINK:
9453 case GOMP_MAP_IF_PRESENT:
9454 case GOMP_MAP_FIRSTPRIVATE:
9455 case GOMP_MAP_FIRSTPRIVATE_INT:
9456 case GOMP_MAP_USE_DEVICE_PTR:
9457 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9458 return NULL_TREE;
9460 default:
9461 internal_error ("unexpected mapping node");
9464 return error_mark_node;
9467 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9468 mappings, return the chain pointer to the end of that group in the list. */
9470 static tree *
9471 omp_group_last (tree *start_p)
9473 tree c = *start_p, nc, *grp_last_p = start_p;
9475 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9477 nc = OMP_CLAUSE_CHAIN (c);
9479 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9480 return grp_last_p;
9482 switch (OMP_CLAUSE_MAP_KIND (c))
9484 default:
9485 while (nc
9486 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9487 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9488 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9489 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9490 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9491 || (OMP_CLAUSE_MAP_KIND (nc)
9492 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9493 || (OMP_CLAUSE_MAP_KIND (nc)
9494 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9495 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9496 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9498 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9499 c = nc;
9500 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9501 if (nc2
9502 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9503 && (OMP_CLAUSE_MAP_KIND (nc)
9504 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9505 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9507 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9508 c = nc2;
9509 nc2 = OMP_CLAUSE_CHAIN (nc2);
9511 nc = nc2;
9513 break;
9515 case GOMP_MAP_ATTACH:
9516 case GOMP_MAP_DETACH:
9517 /* This is a weird artifact of how directives are parsed: bare attach or
9518 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9519 FIRSTPRIVATE_REFERENCE node. FIXME. */
9520 if (nc
9521 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9522 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9523 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9524 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9525 break;
9527 case GOMP_MAP_TO_PSET:
9528 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9529 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9530 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9531 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9532 break;
9534 case GOMP_MAP_STRUCT:
9536 unsigned HOST_WIDE_INT num_mappings
9537 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9538 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9539 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9540 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9541 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9542 for (unsigned i = 0; i < num_mappings; i++)
9543 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9545 break;
9548 return grp_last_p;
9551 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9552 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9553 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9554 if we have more than one such group, else return NULL. */
9556 static void
9557 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9558 tree gather_sentinel)
9560 for (tree *cp = list_p;
9561 *cp && *cp != gather_sentinel;
9562 cp = &OMP_CLAUSE_CHAIN (*cp))
9564 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9565 continue;
9567 tree *grp_last_p = omp_group_last (cp);
9568 omp_mapping_group grp;
9570 grp.grp_start = cp;
9571 grp.grp_end = *grp_last_p;
9572 grp.mark = UNVISITED;
9573 grp.sibling = NULL;
9574 grp.deleted = false;
9575 grp.next = NULL;
9576 groups->safe_push (grp);
9578 cp = grp_last_p;
9582 static vec<omp_mapping_group> *
9583 omp_gather_mapping_groups (tree *list_p)
9585 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9587 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9589 if (groups->length () > 0)
9590 return groups;
9591 else
9593 delete groups;
9594 return NULL;
9598 /* A pointer mapping group GRP may define a block of memory starting at some
9599 base address, and maybe also define a firstprivate pointer or firstprivate
9600 reference that points to that block. The return value is a node containing
9601 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9602 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9603 return the number of consecutive chained nodes in CHAINED. */
9605 static tree
9606 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9607 tree *firstprivate)
9609 tree node = *grp->grp_start;
9611 *firstprivate = NULL_TREE;
9612 *chained = 1;
9614 switch (OMP_CLAUSE_MAP_KIND (node))
9616 case GOMP_MAP_TO:
9617 case GOMP_MAP_FROM:
9618 case GOMP_MAP_TOFROM:
9619 case GOMP_MAP_ALWAYS_FROM:
9620 case GOMP_MAP_ALWAYS_TO:
9621 case GOMP_MAP_ALWAYS_TOFROM:
9622 case GOMP_MAP_FORCE_FROM:
9623 case GOMP_MAP_FORCE_TO:
9624 case GOMP_MAP_FORCE_TOFROM:
9625 case GOMP_MAP_FORCE_PRESENT:
9626 case GOMP_MAP_PRESENT_ALLOC:
9627 case GOMP_MAP_PRESENT_FROM:
9628 case GOMP_MAP_PRESENT_TO:
9629 case GOMP_MAP_PRESENT_TOFROM:
9630 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9631 case GOMP_MAP_ALWAYS_PRESENT_TO:
9632 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9633 case GOMP_MAP_ALLOC:
9634 case GOMP_MAP_RELEASE:
9635 case GOMP_MAP_DELETE:
9636 case GOMP_MAP_FORCE_ALLOC:
9637 case GOMP_MAP_IF_PRESENT:
9638 if (node == grp->grp_end)
9639 return node;
9641 node = OMP_CLAUSE_CHAIN (node);
9642 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9644 if (node == grp->grp_end)
9645 return *grp->grp_start;
9646 node = OMP_CLAUSE_CHAIN (node);
9648 if (node)
9649 switch (OMP_CLAUSE_MAP_KIND (node))
9651 case GOMP_MAP_POINTER:
9652 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9653 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9654 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9655 *firstprivate = OMP_CLAUSE_DECL (node);
9656 return *grp->grp_start;
9658 case GOMP_MAP_ALWAYS_POINTER:
9659 case GOMP_MAP_ATTACH_DETACH:
9660 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9661 return *grp->grp_start;
9663 default:
9664 internal_error ("unexpected mapping node");
9666 else
9667 internal_error ("unexpected mapping node");
9668 return error_mark_node;
9670 case GOMP_MAP_TO_PSET:
9671 gcc_assert (node != grp->grp_end);
9672 node = OMP_CLAUSE_CHAIN (node);
9673 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9674 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9675 return NULL_TREE;
9676 else
9677 internal_error ("unexpected mapping node");
9678 return error_mark_node;
9680 case GOMP_MAP_ATTACH:
9681 case GOMP_MAP_DETACH:
9682 node = OMP_CLAUSE_CHAIN (node);
9683 if (!node || *grp->grp_start == grp->grp_end)
9684 return NULL_TREE;
9685 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9686 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9688 /* We're mapping the base pointer itself in a bare attach or detach
9689 node. This is a side effect of how parsing works, and the mapping
9690 will be removed anyway (at least for enter/exit data directives).
9691 We should ignore the mapping here. FIXME. */
9692 return NULL_TREE;
9694 else
9695 internal_error ("unexpected mapping node");
9696 return error_mark_node;
9698 case GOMP_MAP_STRUCT:
9700 unsigned HOST_WIDE_INT num_mappings
9701 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9702 node = OMP_CLAUSE_CHAIN (node);
9703 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9704 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9706 *firstprivate = OMP_CLAUSE_DECL (node);
9707 node = OMP_CLAUSE_CHAIN (node);
9709 *chained = num_mappings;
9710 return node;
9713 case GOMP_MAP_FORCE_DEVICEPTR:
9714 case GOMP_MAP_DEVICE_RESIDENT:
9715 case GOMP_MAP_LINK:
9716 case GOMP_MAP_FIRSTPRIVATE:
9717 case GOMP_MAP_FIRSTPRIVATE_INT:
9718 case GOMP_MAP_USE_DEVICE_PTR:
9719 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9720 return NULL_TREE;
9722 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9723 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9724 case GOMP_MAP_POINTER:
9725 case GOMP_MAP_ALWAYS_POINTER:
9726 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9727 /* These shouldn't appear by themselves. */
9728 if (!seen_error ())
9729 internal_error ("unexpected pointer mapping node");
9730 return error_mark_node;
9732 default:
9733 gcc_unreachable ();
9736 return error_mark_node;
9739 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9740 nodes by tree_operand_hash_no_se. */
9742 static void
9743 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9744 omp_mapping_group *> *grpmap,
9745 vec<omp_mapping_group> *groups,
9746 tree reindex_sentinel)
9748 omp_mapping_group *grp;
9749 unsigned int i;
9750 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9752 FOR_EACH_VEC_ELT (*groups, i, grp)
9754 if (reindexing && *grp->grp_start == reindex_sentinel)
9755 above_hwm = true;
9757 if (reindexing && !above_hwm)
9758 continue;
9760 tree fpp;
9761 unsigned int chained;
9762 tree node = omp_group_base (grp, &chained, &fpp);
9764 if (node == error_mark_node || (!node && !fpp))
9765 continue;
9767 for (unsigned j = 0;
9768 node && j < chained;
9769 node = OMP_CLAUSE_CHAIN (node), j++)
9771 tree decl = OMP_CLAUSE_DECL (node);
9772 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9773 meaning node-hash lookups don't work. This is a workaround for
9774 that, but ideally we should just create the INDIRECT_REF at
9775 source instead. FIXME. */
9776 if (TREE_CODE (decl) == MEM_REF
9777 && integer_zerop (TREE_OPERAND (decl, 1)))
9778 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9780 omp_mapping_group **prev = grpmap->get (decl);
9782 if (prev && *prev == grp)
9783 /* Empty. */;
9784 else if (prev)
9786 /* Mapping the same thing twice is normally diagnosed as an error,
9787 but can happen under some circumstances, e.g. in pr99928-16.c,
9788 the directive:
9790 #pragma omp target simd reduction(+:a[:3]) \
9791 map(always, tofrom: a[:6])
9794 will result in two "a[0]" mappings (of different sizes). */
9796 grp->sibling = (*prev)->sibling;
9797 (*prev)->sibling = grp;
9799 else
9800 grpmap->put (decl, grp);
9803 if (!fpp)
9804 continue;
9806 omp_mapping_group **prev = grpmap->get (fpp);
9807 if (prev && *prev != grp)
9809 grp->sibling = (*prev)->sibling;
9810 (*prev)->sibling = grp;
9812 else
9813 grpmap->put (fpp, grp);
9817 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9818 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9820 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9821 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9823 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9825 return grpmap;
9828 /* Rebuild group map from partially-processed clause list (during
9829 omp_build_struct_sibling_lists). We have already processed nodes up until
9830 a high-water mark (HWM). This is a bit tricky because the list is being
9831 reordered as it is scanned, but we know:
9833 1. The list after HWM has not been touched yet, so we can reindex it safely.
9835 2. The list before and including HWM has been altered, but remains
9836 well-formed throughout the sibling-list building operation.
9838 so, we can do the reindex operation in two parts, on the processed and
9839 then the unprocessed halves of the list. */
9841 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9842 omp_reindex_mapping_groups (tree *list_p,
9843 vec<omp_mapping_group> *groups,
9844 vec<omp_mapping_group> *processed_groups,
9845 tree sentinel)
9847 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9848 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9850 processed_groups->truncate (0);
9852 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9853 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9854 if (sentinel)
9855 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9857 return grpmap;
9860 /* Find the immediately-containing struct for a component ref (etc.)
9861 expression EXPR. */
9863 static tree
9864 omp_containing_struct (tree expr)
9866 tree expr0 = expr;
9868 STRIP_NOPS (expr);
9870 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9871 component ref. */
9872 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9873 return expr0;
9875 while (TREE_CODE (expr) == ARRAY_REF)
9876 expr = TREE_OPERAND (expr, 0);
9878 if (TREE_CODE (expr) == COMPONENT_REF)
9879 expr = TREE_OPERAND (expr, 0);
9881 return expr;
9884 /* Return TRUE if DECL describes a component that is part of a whole structure
9885 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9886 that maps that structure, if present. */
9888 static bool
9889 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9890 omp_mapping_group *> *grpmap,
9891 tree decl,
9892 omp_mapping_group **mapped_by_group)
9894 tree wsdecl = NULL_TREE;
9896 *mapped_by_group = NULL;
9898 while (true)
9900 wsdecl = omp_containing_struct (decl);
9901 if (wsdecl == decl)
9902 break;
9903 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9904 if (!wholestruct
9905 && TREE_CODE (wsdecl) == MEM_REF
9906 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9908 tree deref = TREE_OPERAND (wsdecl, 0);
9909 deref = build_fold_indirect_ref (deref);
9910 wholestruct = grpmap->get (deref);
9912 if (wholestruct)
9914 *mapped_by_group = *wholestruct;
9915 return true;
9917 decl = wsdecl;
9920 return false;
9923 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9924 FALSE on error. */
9926 static bool
9927 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9928 vec<omp_mapping_group> *groups,
9929 hash_map<tree_operand_hash_no_se,
9930 omp_mapping_group *> *grpmap,
9931 omp_mapping_group *grp)
9933 if (grp->mark == PERMANENT)
9934 return true;
9935 if (grp->mark == TEMPORARY)
9937 fprintf (stderr, "when processing group:\n");
9938 debug_mapping_group (grp);
9939 internal_error ("base pointer cycle detected");
9940 return false;
9942 grp->mark = TEMPORARY;
9944 tree attaches_to = omp_get_attachment (grp);
9946 if (attaches_to)
9948 omp_mapping_group **basep = grpmap->get (attaches_to);
9950 if (basep && *basep != grp)
9952 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9953 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9954 return false;
9958 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9960 while (decl)
9962 tree base = omp_get_base_pointer (decl);
9964 if (!base)
9965 break;
9967 omp_mapping_group **innerp = grpmap->get (base);
9968 omp_mapping_group *wholestruct;
9970 /* We should treat whole-structure mappings as if all (pointer, in this
9971 case) members are mapped as individual list items. Check if we have
9972 such a whole-structure mapping, if we don't have an explicit reference
9973 to the pointer member itself. */
9974 if (!innerp
9975 && TREE_CODE (base) == COMPONENT_REF
9976 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9977 innerp = &wholestruct;
9979 if (innerp && *innerp != grp)
9981 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9982 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9983 return false;
9984 break;
9987 decl = base;
9990 grp->mark = PERMANENT;
9992 /* Emit grp to output list. */
9994 **outlist = grp;
9995 *outlist = &grp->next;
9997 return true;
10000 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
10001 before mappings that use those pointers. This is an implementation of the
10002 depth-first search algorithm, described e.g. at:
10004 https://en.wikipedia.org/wiki/Topological_sorting
10007 static omp_mapping_group *
10008 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
10009 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
10010 *grpmap)
10012 omp_mapping_group *grp, *outlist = NULL, **cursor;
10013 unsigned int i;
10015 cursor = &outlist;
10017 FOR_EACH_VEC_ELT (*groups, i, grp)
10019 if (grp->mark != PERMANENT)
10020 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
10021 return NULL;
10024 return outlist;
10027 /* Split INLIST into two parts, moving groups corresponding to
10028 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
10029 The former list is then appended to the latter. Each sub-list retains the
10030 order of the original list.
10031 Note that ATTACH nodes are later moved to the end of the list in
10032 gimplify_adjust_omp_clauses, for target regions. */
10034 static omp_mapping_group *
10035 omp_segregate_mapping_groups (omp_mapping_group *inlist)
10037 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
10038 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
10040 for (omp_mapping_group *w = inlist; w;)
10042 tree c = *w->grp_start;
10043 omp_mapping_group *next = w->next;
10045 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
10047 switch (OMP_CLAUSE_MAP_KIND (c))
10049 case GOMP_MAP_ALLOC:
10050 case GOMP_MAP_RELEASE:
10051 case GOMP_MAP_DELETE:
10052 *ard_tail = w;
10053 w->next = NULL;
10054 ard_tail = &w->next;
10055 break;
10057 default:
10058 *tf_tail = w;
10059 w->next = NULL;
10060 tf_tail = &w->next;
10063 w = next;
10066 /* Now splice the lists together... */
10067 *tf_tail = ard_groups;
10069 return tf_groups;
10072 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10073 those groups based on the output list of omp_tsort_mapping_groups --
10074 singly-linked, threaded through each element's NEXT pointer starting at
10075 HEAD. Each list element appears exactly once in that linked list.
10077 Each element of GROUPS may correspond to one or several mapping nodes.
10078 Node groups are kept together, and in the reordered list, the positions of
10079 the original groups are reused for the positions of the reordered list.
10080 Hence if we have e.g.
10082 {to ptr ptr} firstprivate {tofrom ptr} ...
10083 ^ ^ ^
10084 first group non-"map" second group
10086 and say the second group contains a base pointer for the first so must be
10087 moved before it, the resulting list will contain:
10089 {tofrom ptr} firstprivate {to ptr ptr} ...
10090 ^ prev. second group ^ prev. first group
10093 static tree *
10094 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
10095 omp_mapping_group *head,
10096 tree *list_p)
10098 omp_mapping_group *grp;
10099 unsigned int i;
10100 unsigned numgroups = groups->length ();
10101 auto_vec<tree> old_heads (numgroups);
10102 auto_vec<tree *> old_headps (numgroups);
10103 auto_vec<tree> new_heads (numgroups);
10104 auto_vec<tree> old_succs (numgroups);
10105 bool map_at_start = (list_p == (*groups)[0].grp_start);
10107 tree *new_grp_tail = NULL;
10109 /* Stash the start & end nodes of each mapping group before we start
10110 modifying the list. */
10111 FOR_EACH_VEC_ELT (*groups, i, grp)
10113 old_headps.quick_push (grp->grp_start);
10114 old_heads.quick_push (*grp->grp_start);
10115 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
10118 /* And similarly, the heads of the groups in the order we want to rearrange
10119 the list to. */
10120 for (omp_mapping_group *w = head; w; w = w->next)
10121 new_heads.quick_push (*w->grp_start);
10123 FOR_EACH_VEC_ELT (*groups, i, grp)
10125 gcc_assert (head);
10127 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
10129 /* a {b c d} {e f g} h i j (original)
10131 a {k l m} {e f g} h i j (inserted new group on last iter)
10133 a {k l m} {n o p} h i j (this time, chain last group to new one)
10134 ^new_grp_tail
10136 *new_grp_tail = new_heads[i];
10138 else if (new_grp_tail)
10140 /* a {b c d} e {f g h} i j k (original)
10142 a {l m n} e {f g h} i j k (gap after last iter's group)
10144 a {l m n} e {o p q} h i j (chain last group to old successor)
10145 ^new_grp_tail
10147 *new_grp_tail = old_succs[i - 1];
10148 *old_headps[i] = new_heads[i];
10150 else
10152 /* The first inserted group -- point to new group, and leave end
10153 open.
10154 a {b c d} e f
10156 a {g h i...
10158 *grp->grp_start = new_heads[i];
10161 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
10163 head = head->next;
10166 if (new_grp_tail)
10167 *new_grp_tail = old_succs[numgroups - 1];
10169 gcc_assert (!head);
10171 return map_at_start ? (*groups)[0].grp_start : list_p;
10174 /* DECL is supposed to have lastprivate semantics in the outer contexts
10175 of combined/composite constructs, starting with OCTX.
10176 Add needed lastprivate, shared or map clause if no data sharing or
10177 mapping clause are present. IMPLICIT_P is true if it is an implicit
10178 clause (IV on simd), in which case the lastprivate will not be
10179 copied to some constructs. */
10181 static void
10182 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
10183 tree decl, bool implicit_p)
10185 struct gimplify_omp_ctx *orig_octx = octx;
10186 for (; octx; octx = octx->outer_context)
10188 if ((octx->region_type == ORT_COMBINED_PARALLEL
10189 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
10190 && splay_tree_lookup (octx->variables,
10191 (splay_tree_key) decl) == NULL)
10193 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
10194 continue;
10196 if ((octx->region_type & ORT_TASK) != 0
10197 && octx->combined_loop
10198 && splay_tree_lookup (octx->variables,
10199 (splay_tree_key) decl) == NULL)
10201 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10202 continue;
10204 if (implicit_p
10205 && octx->region_type == ORT_WORKSHARE
10206 && octx->combined_loop
10207 && splay_tree_lookup (octx->variables,
10208 (splay_tree_key) decl) == NULL
10209 && octx->outer_context
10210 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
10211 && splay_tree_lookup (octx->outer_context->variables,
10212 (splay_tree_key) decl) == NULL)
10214 octx = octx->outer_context;
10215 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10216 continue;
10218 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
10219 && octx->combined_loop
10220 && splay_tree_lookup (octx->variables,
10221 (splay_tree_key) decl) == NULL
10222 && !omp_check_private (octx, decl, false))
10224 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10225 continue;
10227 if (octx->region_type == ORT_COMBINED_TARGET)
10229 splay_tree_node n = splay_tree_lookup (octx->variables,
10230 (splay_tree_key) decl);
10231 if (n == NULL)
10233 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10234 octx = octx->outer_context;
10236 else if (!implicit_p
10237 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
10239 n->value &= ~(GOVD_FIRSTPRIVATE
10240 | GOVD_FIRSTPRIVATE_IMPLICIT
10241 | GOVD_EXPLICIT);
10242 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10243 octx = octx->outer_context;
10246 break;
10248 if (octx && (implicit_p || octx != orig_octx))
10249 omp_notice_variable (octx, decl, true);
10252 /* If we have mappings INNER and OUTER, where INNER is a component access and
10253 OUTER is a mapping of the whole containing struct, check that the mappings
10254 are compatible. We'll be deleting the inner mapping, so we need to make
10255 sure the outer mapping does (at least) the same transfers to/from the device
10256 as the inner mapping. */
10258 bool
10259 omp_check_mapping_compatibility (location_t loc,
10260 omp_mapping_group *outer,
10261 omp_mapping_group *inner)
10263 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
10265 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
10266 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
10268 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
10269 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
10271 if (outer_kind == inner_kind)
10272 return true;
10274 switch (outer_kind)
10276 case GOMP_MAP_ALWAYS_TO:
10277 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10278 || inner_kind == GOMP_MAP_ALLOC
10279 || inner_kind == GOMP_MAP_TO)
10280 return true;
10281 break;
10283 case GOMP_MAP_ALWAYS_FROM:
10284 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10285 || inner_kind == GOMP_MAP_ALLOC
10286 || inner_kind == GOMP_MAP_FROM)
10287 return true;
10288 break;
10290 case GOMP_MAP_TO:
10291 case GOMP_MAP_FROM:
10292 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10293 || inner_kind == GOMP_MAP_ALLOC)
10294 return true;
10295 break;
10297 case GOMP_MAP_ALWAYS_TOFROM:
10298 case GOMP_MAP_TOFROM:
10299 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10300 || inner_kind == GOMP_MAP_ALLOC
10301 || inner_kind == GOMP_MAP_TO
10302 || inner_kind == GOMP_MAP_FROM
10303 || inner_kind == GOMP_MAP_TOFROM)
10304 return true;
10305 break;
10307 default:
10311 error_at (loc, "data movement for component %qE is not compatible with "
10312 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10313 OMP_CLAUSE_DECL (first_outer));
10315 return false;
10318 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10319 clause dependencies we handle for now are struct element mappings and
10320 whole-struct mappings on the same directive, and duplicate clause
10321 detection. */
10323 void
10324 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10325 hash_map<tree_operand_hash_no_se,
10326 omp_mapping_group *> *grpmap)
10328 int i;
10329 omp_mapping_group *grp;
10330 hash_set<tree_operand_hash> *seen_components = NULL;
10331 hash_set<tree_operand_hash> *shown_error = NULL;
10333 FOR_EACH_VEC_ELT (*groups, i, grp)
10335 tree grp_end = grp->grp_end;
10336 tree decl = OMP_CLAUSE_DECL (grp_end);
10338 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10340 if (DECL_P (grp_end))
10341 continue;
10343 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10344 while (TREE_CODE (c) == ARRAY_REF)
10345 c = TREE_OPERAND (c, 0);
10346 if (TREE_CODE (c) != COMPONENT_REF)
10347 continue;
10348 if (!seen_components)
10349 seen_components = new hash_set<tree_operand_hash> ();
10350 if (!shown_error)
10351 shown_error = new hash_set<tree_operand_hash> ();
10352 if (seen_components->contains (c)
10353 && !shown_error->contains (c))
10355 error_at (OMP_CLAUSE_LOCATION (grp_end),
10356 "%qE appears more than once in map clauses",
10357 OMP_CLAUSE_DECL (grp_end));
10358 shown_error->add (c);
10360 else
10361 seen_components->add (c);
10363 omp_mapping_group *struct_group;
10364 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10365 && *grp->grp_start == grp_end)
10367 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10368 struct_group, grp);
10369 /* Remove the whole of this mapping -- redundant. */
10370 grp->deleted = true;
10374 if (seen_components)
10375 delete seen_components;
10376 if (shown_error)
10377 delete shown_error;
10380 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10381 is linked to the previous node pointed to by INSERT_AT. */
10383 static tree *
10384 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10386 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10387 *insert_at = newnode;
10388 return &OMP_CLAUSE_CHAIN (newnode);
10391 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10392 pointed to by chain MOVE_AFTER instead. */
10394 static void
10395 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10397 gcc_assert (node == *old_pos);
10398 *old_pos = OMP_CLAUSE_CHAIN (node);
10399 OMP_CLAUSE_CHAIN (node) = *move_after;
10400 *move_after = node;
10403 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10404 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10405 new nodes are prepended to the list before splicing into the new position.
10406 Return the position we should continue scanning the list at, or NULL to
10407 stay where we were. */
10409 static tree *
10410 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10411 tree *move_after)
10413 if (first_ptr == move_after)
10414 return NULL;
10416 tree tmp = *first_ptr;
10417 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10418 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10419 *move_after = tmp;
10421 return first_ptr;
10424 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10425 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10426 pointer MOVE_AFTER.
10428 The latter list was previously part of the OMP clause list, and the former
10429 (prepended) part is comprised of new nodes.
10431 We start with a list of nodes starting with a struct mapping node. We
10432 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10433 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10434 the group of mapping nodes we are currently processing (from the chain
10435 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10436 we should continue processing from, or NULL to stay where we were.
10438 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10439 different) is worked through below. Here we are processing LAST_NODE, and
10440 FIRST_PTR points at the preceding mapping clause:
10442 #. mapping node chain
10443 ---------------------------------------------------
10444 A. struct_node [->B]
10445 B. comp_1 [->C]
10446 C. comp_2 [->D (move_after)]
10447 D. map_to_3 [->E]
10448 E. attach_3 [->F (first_ptr)]
10449 F. map_to_4 [->G (continue_at)]
10450 G. attach_4 (last_node) [->H]
10451 H. ...
10453 *last_new_tail = *first_ptr;
10455 I. new_node (first_new) [->F (last_new_tail)]
10457 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10459 #. mapping node chain
10460 ----------------------------------------------------
10461 A. struct_node [->B]
10462 B. comp_1 [->C]
10463 C. comp_2 [->D (move_after)]
10464 D. map_to_3 [->E]
10465 E. attach_3 [->H (first_ptr)]
10466 F. map_to_4 [->G (continue_at)]
10467 G. attach_4 (last_node) [->H]
10468 H. ...
10470 I. new_node (first_new) [->F (last_new_tail)]
10472 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10474 #. mapping node chain
10475 ---------------------------------------------------
10476 A. struct_node [->B]
10477 B. comp_1 [->C]
10478 C. comp_2 [->D (move_after)]
10479 D. map_to_3 [->E]
10480 E. attach_3 [->H (continue_at)]
10481 F. map_to_4 [->G]
10482 G. attach_4 (last_node) [->D]
10483 H. ...
10485 I. new_node (first_new) [->F (last_new_tail)]
10487 *move_after = first_new;
10489 #. mapping node chain
10490 ---------------------------------------------------
10491 A. struct_node [->B]
10492 B. comp_1 [->C]
10493 C. comp_2 [->I (move_after)]
10494 D. map_to_3 [->E]
10495 E. attach_3 [->H (continue_at)]
10496 F. map_to_4 [->G]
10497 G. attach_4 (last_node) [->D]
10498 H. ...
10499 I. new_node (first_new) [->F (last_new_tail)]
10501 or, in order:
10503 #. mapping node chain
10504 ---------------------------------------------------
10505 A. struct_node [->B]
10506 B. comp_1 [->C]
10507 C. comp_2 [->I (move_after)]
10508 I. new_node (first_new) [->F (last_new_tail)]
10509 F. map_to_4 [->G]
10510 G. attach_4 (last_node) [->D]
10511 D. map_to_3 [->E]
10512 E. attach_3 [->H (continue_at)]
10513 H. ...
10516 static tree *
10517 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10518 tree *first_ptr, tree last_node,
10519 tree *move_after)
10521 tree *continue_at = NULL;
10522 *last_new_tail = *first_ptr;
10523 if (first_ptr == move_after)
10524 *move_after = first_new;
10525 else
10527 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10528 continue_at = first_ptr;
10529 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10530 *move_after = first_new;
10532 return continue_at;
10535 /* Mapping struct members causes an additional set of nodes to be created,
10536 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10537 number of members being mapped, in order of ascending position (address or
10538 bitwise).
10540 We scan through the list of mapping clauses, calling this function for each
10541 struct member mapping we find, and build up the list of mappings after the
10542 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10543 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10544 moved into place in the sorted list.
10546 struct {
10547 int *a;
10548 int *b;
10549 int c;
10550 int *d;
10553 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10554 struct.d[0:n])
10556 GOMP_MAP_STRUCT (4)
10557 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10558 GOMP_MAP_ALLOC (struct.a)
10559 GOMP_MAP_ALLOC (struct.b)
10560 GOMP_MAP_TO (struct.c)
10561 GOMP_MAP_ALLOC (struct.d)
10564 In the case where we are mapping references to pointers, or in Fortran if
10565 we are mapping an array with a descriptor, additional nodes may be created
10566 after the struct node list also.
10568 The return code is either a pointer to the next node to process (if the
10569 list has been rearranged), else NULL to continue with the next node in the
10570 original list. */
10572 static tree *
10573 omp_accumulate_sibling_list (enum omp_region_type region_type,
10574 enum tree_code code,
10575 hash_map<tree_operand_hash, tree>
10576 *&struct_map_to_clause, tree *grp_start_p,
10577 tree grp_end, tree *inner)
10579 poly_offset_int coffset;
10580 poly_int64 cbitpos;
10581 tree ocd = OMP_CLAUSE_DECL (grp_end);
10582 bool openmp = !(region_type & ORT_ACC);
10583 tree *continue_at = NULL;
10585 while (TREE_CODE (ocd) == ARRAY_REF)
10586 ocd = TREE_OPERAND (ocd, 0);
10588 if (INDIRECT_REF_P (ocd))
10589 ocd = TREE_OPERAND (ocd, 0);
10591 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10593 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10594 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10595 == GOMP_MAP_ATTACH_DETACH)
10596 || (OMP_CLAUSE_MAP_KIND (grp_end)
10597 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10598 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10599 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10601 /* FIXME: If we're not mapping the base pointer in some other clause on this
10602 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10603 early-exit. */
10604 if (openmp && attach_detach)
10605 return NULL;
10607 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10609 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10610 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10612 OMP_CLAUSE_SET_MAP_KIND (l, k);
10614 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10616 OMP_CLAUSE_SIZE (l)
10617 = (!attach ? size_int (1)
10618 : (DECL_P (OMP_CLAUSE_DECL (l))
10619 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10620 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10621 if (struct_map_to_clause == NULL)
10622 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10623 struct_map_to_clause->put (base, l);
10625 if (ptr || attach_detach)
10627 tree extra_node;
10628 tree alloc_node
10629 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10630 &extra_node);
10631 OMP_CLAUSE_CHAIN (l) = alloc_node;
10633 tree *insert_node_pos = grp_start_p;
10635 if (extra_node)
10637 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10638 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10640 else
10641 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10643 *insert_node_pos = l;
10645 else
10647 gcc_assert (*grp_start_p == grp_end);
10648 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10651 tree noind = omp_strip_indirections (base);
10653 if (!openmp
10654 && (region_type & ORT_TARGET)
10655 && TREE_CODE (noind) == COMPONENT_REF)
10657 /* The base for this component access is a struct component access
10658 itself. Insert a node to be processed on the next iteration of
10659 our caller's loop, which will subsequently be turned into a new,
10660 inner GOMP_MAP_STRUCT mapping.
10662 We need to do this else the non-DECL_P base won't be
10663 rewritten correctly in the offloaded region. */
10664 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10665 OMP_CLAUSE_MAP);
10666 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10667 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10668 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10669 *inner = c2;
10670 return NULL;
10673 tree sdecl = omp_strip_components_and_deref (base);
10675 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10677 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10678 OMP_CLAUSE_MAP);
10679 bool base_ref
10680 = (INDIRECT_REF_P (base)
10681 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10682 == REFERENCE_TYPE)
10683 || (INDIRECT_REF_P (TREE_OPERAND (base, 0))
10684 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10685 (TREE_OPERAND (base, 0), 0)))
10686 == REFERENCE_TYPE))));
10687 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10688 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10689 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10690 OMP_CLAUSE_DECL (c2) = sdecl;
10691 tree baddr = build_fold_addr_expr (base);
10692 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10693 ptrdiff_type_node, baddr);
10694 /* This isn't going to be good enough when we add support for more
10695 complicated lvalue expressions. FIXME. */
10696 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10697 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10698 sdecl = build_simple_mem_ref (sdecl);
10699 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10700 ptrdiff_type_node, sdecl);
10701 OMP_CLAUSE_SIZE (c2)
10702 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10703 ptrdiff_type_node, baddr, decladdr);
10704 /* Insert after struct node. */
10705 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10706 OMP_CLAUSE_CHAIN (l) = c2;
10709 return NULL;
10711 else if (struct_map_to_clause)
10713 tree *osc = struct_map_to_clause->get (base);
10714 tree *sc = NULL, *scp = NULL;
10715 sc = &OMP_CLAUSE_CHAIN (*osc);
10716 /* The struct mapping might be immediately followed by a
10717 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10718 indirect access or a reference, or both. (This added node is removed
10719 in omp-low.c after it has been processed there.) */
10720 if (*sc != grp_end
10721 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10722 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10723 sc = &OMP_CLAUSE_CHAIN (*sc);
10724 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10725 if ((ptr || attach_detach) && sc == grp_start_p)
10726 break;
10727 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10728 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10729 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10730 break;
10731 else
10733 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10734 poly_offset_int offset;
10735 poly_int64 bitpos;
10737 if (TREE_CODE (sc_decl) == ARRAY_REF)
10739 while (TREE_CODE (sc_decl) == ARRAY_REF)
10740 sc_decl = TREE_OPERAND (sc_decl, 0);
10741 if (TREE_CODE (sc_decl) != COMPONENT_REF
10742 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10743 break;
10745 else if (INDIRECT_REF_P (sc_decl)
10746 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10747 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10748 == REFERENCE_TYPE))
10749 sc_decl = TREE_OPERAND (sc_decl, 0);
10751 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10752 if (!base2 || !operand_equal_p (base2, base, 0))
10753 break;
10754 if (scp)
10755 continue;
10756 if (maybe_lt (coffset, offset)
10757 || (known_eq (coffset, offset)
10758 && maybe_lt (cbitpos, bitpos)))
10760 if (ptr || attach_detach)
10761 scp = sc;
10762 else
10763 break;
10767 if (!attach)
10768 OMP_CLAUSE_SIZE (*osc)
10769 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10770 if (ptr || attach_detach)
10772 tree cl = NULL_TREE, extra_node;
10773 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10774 grp_end, &extra_node);
10775 tree *tail_chain = NULL;
10777 /* Here, we have:
10779 grp_end : the last (or only) node in this group.
10780 grp_start_p : pointer to the first node in a pointer mapping group
10781 up to and including GRP_END.
10782 sc : pointer to the chain for the end of the struct component
10783 list.
10784 scp : pointer to the chain for the sorted position at which we
10785 should insert in the middle of the struct component list
10786 (else NULL to insert at end).
10787 alloc_node : the "alloc" node for the structure (pointer-type)
10788 component. We insert at SCP (if present), else SC
10789 (the end of the struct component list).
10790 extra_node : a newly-synthesized node for an additional indirect
10791 pointer mapping or a Fortran pointer set, if needed.
10792 cl : first node to prepend before grp_start_p.
10793 tail_chain : pointer to chain of last prepended node.
10795 The general idea is we move the nodes for this struct mapping
10796 together: the alloc node goes into the sorted list directly after
10797 the struct mapping, and any extra nodes (together with the nodes
10798 mapping arrays pointed to by struct components) get moved after
10799 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10800 the end of the struct component mapping list. It's important that
10801 the alloc_node comes first in that case because it's part of the
10802 sorted component mapping list (but subsequent nodes are not!). */
10804 if (scp)
10805 omp_siblist_insert_node_after (alloc_node, scp);
10807 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10808 already inserted it) and the extra_node (if it is present). The
10809 list can be empty if we added alloc_node above and there is no
10810 extra node. */
10811 if (scp && extra_node)
10813 cl = extra_node;
10814 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10816 else if (extra_node)
10818 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10819 cl = alloc_node;
10820 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10822 else if (!scp)
10824 cl = alloc_node;
10825 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10828 continue_at
10829 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10830 grp_start_p, grp_end,
10832 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10834 else if (*sc != grp_end)
10836 gcc_assert (*grp_start_p == grp_end);
10838 /* We are moving the current node back to a previous struct node:
10839 the node that used to point to the current node will now point to
10840 the next node. */
10841 continue_at = grp_start_p;
10842 /* In the non-pointer case, the mapping clause itself is moved into
10843 the correct position in the struct component list, which in this
10844 case is just SC. */
10845 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10848 return continue_at;
10851 /* Scan through GROUPS, and create sorted structure sibling lists without
10852 gimplifying. */
10854 static bool
10855 omp_build_struct_sibling_lists (enum tree_code code,
10856 enum omp_region_type region_type,
10857 vec<omp_mapping_group> *groups,
10858 hash_map<tree_operand_hash_no_se,
10859 omp_mapping_group *> **grpmap,
10860 tree *list_p)
10862 unsigned i;
10863 omp_mapping_group *grp;
10864 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10865 bool success = true;
10866 tree *new_next = NULL;
10867 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10868 auto_vec<omp_mapping_group> pre_hwm_groups;
10870 FOR_EACH_VEC_ELT (*groups, i, grp)
10872 tree c = grp->grp_end;
10873 tree decl = OMP_CLAUSE_DECL (c);
10874 tree grp_end = grp->grp_end;
10875 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10877 if (new_next)
10878 grp->grp_start = new_next;
10880 new_next = NULL;
10882 tree *grp_start_p = grp->grp_start;
10884 if (DECL_P (decl))
10885 continue;
10887 /* Skip groups we marked for deletion in
10888 oacc_resolve_clause_dependencies. */
10889 if (grp->deleted)
10890 continue;
10892 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10893 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10895 /* Don't process an array descriptor that isn't inside a derived type
10896 as a struct (the GOMP_MAP_POINTER following will have the form
10897 "var.data", but such mappings are handled specially). */
10898 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10899 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10900 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10901 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10902 continue;
10905 tree d = decl;
10906 if (TREE_CODE (d) == ARRAY_REF)
10908 while (TREE_CODE (d) == ARRAY_REF)
10909 d = TREE_OPERAND (d, 0);
10910 if (TREE_CODE (d) == COMPONENT_REF
10911 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10912 decl = d;
10914 if (d == decl
10915 && INDIRECT_REF_P (decl)
10916 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10917 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10918 == REFERENCE_TYPE)
10919 && (OMP_CLAUSE_MAP_KIND (c)
10920 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10921 decl = TREE_OPERAND (decl, 0);
10923 STRIP_NOPS (decl);
10925 if (TREE_CODE (decl) != COMPONENT_REF)
10926 continue;
10928 /* If we're mapping the whole struct in another node, skip adding this
10929 node to a sibling list. */
10930 omp_mapping_group *wholestruct;
10931 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10932 &wholestruct))
10934 if (!(region_type & ORT_ACC)
10935 && *grp_start_p == grp_end)
10936 /* Remove the whole of this mapping -- redundant. */
10937 grp->deleted = true;
10939 continue;
10942 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10943 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10944 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10945 && code != OACC_UPDATE
10946 && code != OMP_TARGET_UPDATE)
10948 if (error_operand_p (decl))
10950 success = false;
10951 goto error_out;
10954 tree stype = TREE_TYPE (decl);
10955 if (TREE_CODE (stype) == REFERENCE_TYPE)
10956 stype = TREE_TYPE (stype);
10957 if (TYPE_SIZE_UNIT (stype) == NULL
10958 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10960 error_at (OMP_CLAUSE_LOCATION (c),
10961 "mapping field %qE of variable length "
10962 "structure", OMP_CLAUSE_DECL (c));
10963 success = false;
10964 goto error_out;
10967 tree inner = NULL_TREE;
10969 new_next
10970 = omp_accumulate_sibling_list (region_type, code,
10971 struct_map_to_clause, grp_start_p,
10972 grp_end, &inner);
10974 if (inner)
10976 if (new_next && *new_next == NULL_TREE)
10977 *new_next = inner;
10978 else
10979 *tail = inner;
10981 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10982 omp_mapping_group newgrp;
10983 newgrp.grp_start = new_next ? new_next : tail;
10984 newgrp.grp_end = inner;
10985 newgrp.mark = UNVISITED;
10986 newgrp.sibling = NULL;
10987 newgrp.deleted = false;
10988 newgrp.next = NULL;
10989 groups->safe_push (newgrp);
10991 /* !!! Growing GROUPS might invalidate the pointers in the group
10992 map. Rebuild it here. This is a bit inefficient, but
10993 shouldn't happen very often. */
10994 delete (*grpmap);
10995 *grpmap
10996 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10997 sentinel);
10999 tail = &OMP_CLAUSE_CHAIN (inner);
11004 /* Delete groups marked for deletion above. At this point the order of the
11005 groups may no longer correspond to the order of the underlying list,
11006 which complicates this a little. First clear out OMP_CLAUSE_DECL for
11007 deleted nodes... */
11009 FOR_EACH_VEC_ELT (*groups, i, grp)
11010 if (grp->deleted)
11011 for (tree d = *grp->grp_start;
11012 d != OMP_CLAUSE_CHAIN (grp->grp_end);
11013 d = OMP_CLAUSE_CHAIN (d))
11014 OMP_CLAUSE_DECL (d) = NULL_TREE;
11016 /* ...then sweep through the list removing the now-empty nodes. */
11018 tail = list_p;
11019 while (*tail)
11021 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
11022 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
11023 *tail = OMP_CLAUSE_CHAIN (*tail);
11024 else
11025 tail = &OMP_CLAUSE_CHAIN (*tail);
11028 error_out:
11029 if (struct_map_to_clause)
11030 delete struct_map_to_clause;
11032 return success;
11035 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
11036 and previous omp contexts. */
11038 static void
11039 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
11040 enum omp_region_type region_type,
11041 enum tree_code code)
11043 struct gimplify_omp_ctx *ctx, *outer_ctx;
11044 tree c;
11045 tree *orig_list_p = list_p;
11046 int handled_depend_iterators = -1;
11047 int nowait = -1;
11049 ctx = new_omp_context (region_type);
11050 ctx->code = code;
11051 outer_ctx = ctx->outer_context;
11052 if (code == OMP_TARGET)
11054 if (!lang_GNU_Fortran ())
11055 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
11056 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
11057 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
11058 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11060 if (!lang_GNU_Fortran ())
11061 switch (code)
11063 case OMP_TARGET:
11064 case OMP_TARGET_DATA:
11065 case OMP_TARGET_ENTER_DATA:
11066 case OMP_TARGET_EXIT_DATA:
11067 case OACC_DECLARE:
11068 case OACC_HOST_DATA:
11069 case OACC_PARALLEL:
11070 case OACC_KERNELS:
11071 ctx->target_firstprivatize_array_bases = true;
11072 default:
11073 break;
11076 if (code == OMP_TARGET
11077 || code == OMP_TARGET_DATA
11078 || code == OMP_TARGET_ENTER_DATA
11079 || code == OMP_TARGET_EXIT_DATA)
11081 vec<omp_mapping_group> *groups;
11082 groups = omp_gather_mapping_groups (list_p);
11083 if (groups)
11085 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
11086 grpmap = omp_index_mapping_groups (groups);
11088 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
11089 list_p);
11091 omp_mapping_group *outlist = NULL;
11093 /* Topological sorting may fail if we have duplicate nodes, which
11094 we should have detected and shown an error for already. Skip
11095 sorting in that case. */
11096 if (seen_error ())
11097 goto failure;
11099 delete grpmap;
11100 delete groups;
11102 /* Rebuild now we have struct sibling lists. */
11103 groups = omp_gather_mapping_groups (list_p);
11104 grpmap = omp_index_mapping_groups (groups);
11106 outlist = omp_tsort_mapping_groups (groups, grpmap);
11107 outlist = omp_segregate_mapping_groups (outlist);
11108 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
11110 failure:
11111 delete grpmap;
11112 delete groups;
11115 /* OpenMP map clauses with 'present' need to go in front of those
11116 without. */
11117 tree present_map_head = NULL;
11118 tree *present_map_tail_p = &present_map_head;
11119 tree *first_map_clause_p = NULL;
11121 for (tree *c_p = list_p; *c_p; )
11123 tree c = *c_p;
11124 tree *next_c_p = &OMP_CLAUSE_CHAIN (c);
11126 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11128 if (!first_map_clause_p)
11129 first_map_clause_p = c_p;
11130 switch (OMP_CLAUSE_MAP_KIND (c))
11132 case GOMP_MAP_PRESENT_ALLOC:
11133 case GOMP_MAP_PRESENT_FROM:
11134 case GOMP_MAP_PRESENT_TO:
11135 case GOMP_MAP_PRESENT_TOFROM:
11136 next_c_p = c_p;
11137 *c_p = OMP_CLAUSE_CHAIN (c);
11139 OMP_CLAUSE_CHAIN (c) = NULL;
11140 *present_map_tail_p = c;
11141 present_map_tail_p = &OMP_CLAUSE_CHAIN (c);
11143 break;
11145 default:
11146 break;
11150 c_p = next_c_p;
11152 if (first_map_clause_p && present_map_head)
11154 tree next = *first_map_clause_p;
11155 *first_map_clause_p = present_map_head;
11156 *present_map_tail_p = next;
11159 else if (region_type & ORT_ACC)
11161 vec<omp_mapping_group> *groups;
11162 groups = omp_gather_mapping_groups (list_p);
11163 if (groups)
11165 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
11166 grpmap = omp_index_mapping_groups (groups);
11168 oacc_resolve_clause_dependencies (groups, grpmap);
11169 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
11170 list_p);
11172 delete groups;
11173 delete grpmap;
11177 while ((c = *list_p) != NULL)
11179 bool remove = false;
11180 bool notice_outer = true;
11181 const char *check_non_private = NULL;
11182 unsigned int flags;
11183 tree decl;
11185 switch (OMP_CLAUSE_CODE (c))
11187 case OMP_CLAUSE_PRIVATE:
11188 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
11189 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
11191 flags |= GOVD_PRIVATE_OUTER_REF;
11192 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
11194 else
11195 notice_outer = false;
11196 goto do_add;
11197 case OMP_CLAUSE_SHARED:
11198 flags = GOVD_SHARED | GOVD_EXPLICIT;
11199 goto do_add;
11200 case OMP_CLAUSE_FIRSTPRIVATE:
11201 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11202 check_non_private = "firstprivate";
11203 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11205 gcc_assert (code == OMP_TARGET);
11206 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
11208 goto do_add;
11209 case OMP_CLAUSE_LASTPRIVATE:
11210 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11211 switch (code)
11213 case OMP_DISTRIBUTE:
11214 error_at (OMP_CLAUSE_LOCATION (c),
11215 "conditional %<lastprivate%> clause on "
11216 "%qs construct", "distribute");
11217 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11218 break;
11219 case OMP_TASKLOOP:
11220 error_at (OMP_CLAUSE_LOCATION (c),
11221 "conditional %<lastprivate%> clause on "
11222 "%qs construct", "taskloop");
11223 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11224 break;
11225 default:
11226 break;
11228 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
11229 if (code != OMP_LOOP)
11230 check_non_private = "lastprivate";
11231 decl = OMP_CLAUSE_DECL (c);
11232 if (error_operand_p (decl))
11233 goto do_add;
11234 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
11235 && !lang_hooks.decls.omp_scalar_p (decl, true))
11237 error_at (OMP_CLAUSE_LOCATION (c),
11238 "non-scalar variable %qD in conditional "
11239 "%<lastprivate%> clause", decl);
11240 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11242 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11243 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
11244 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
11245 false);
11246 goto do_add;
11247 case OMP_CLAUSE_REDUCTION:
11248 if (OMP_CLAUSE_REDUCTION_TASK (c))
11250 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11252 if (nowait == -1)
11253 nowait = omp_find_clause (*list_p,
11254 OMP_CLAUSE_NOWAIT) != NULL_TREE;
11255 if (nowait
11256 && (outer_ctx == NULL
11257 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
11259 error_at (OMP_CLAUSE_LOCATION (c),
11260 "%<task%> reduction modifier on a construct "
11261 "with a %<nowait%> clause");
11262 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11265 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
11267 error_at (OMP_CLAUSE_LOCATION (c),
11268 "invalid %<task%> reduction modifier on construct "
11269 "other than %<parallel%>, %qs, %<sections%> or "
11270 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
11271 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11274 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11275 switch (code)
11277 case OMP_SECTIONS:
11278 error_at (OMP_CLAUSE_LOCATION (c),
11279 "%<inscan%> %<reduction%> clause on "
11280 "%qs construct", "sections");
11281 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11282 break;
11283 case OMP_PARALLEL:
11284 error_at (OMP_CLAUSE_LOCATION (c),
11285 "%<inscan%> %<reduction%> clause on "
11286 "%qs construct", "parallel");
11287 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11288 break;
11289 case OMP_TEAMS:
11290 error_at (OMP_CLAUSE_LOCATION (c),
11291 "%<inscan%> %<reduction%> clause on "
11292 "%qs construct", "teams");
11293 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11294 break;
11295 case OMP_TASKLOOP:
11296 error_at (OMP_CLAUSE_LOCATION (c),
11297 "%<inscan%> %<reduction%> clause on "
11298 "%qs construct", "taskloop");
11299 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11300 break;
11301 case OMP_SCOPE:
11302 error_at (OMP_CLAUSE_LOCATION (c),
11303 "%<inscan%> %<reduction%> clause on "
11304 "%qs construct", "scope");
11305 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11306 break;
11307 default:
11308 break;
11310 /* FALLTHRU */
11311 case OMP_CLAUSE_IN_REDUCTION:
11312 case OMP_CLAUSE_TASK_REDUCTION:
11313 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
11314 /* OpenACC permits reductions on private variables. */
11315 if (!(region_type & ORT_ACC)
11316 /* taskgroup is actually not a worksharing region. */
11317 && code != OMP_TASKGROUP)
11318 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
11319 decl = OMP_CLAUSE_DECL (c);
11320 if (TREE_CODE (decl) == MEM_REF)
11322 tree type = TREE_TYPE (decl);
11323 bool saved_into_ssa = gimplify_ctxp->into_ssa;
11324 gimplify_ctxp->into_ssa = false;
11325 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
11326 NULL, is_gimple_val, fb_rvalue, false)
11327 == GS_ERROR)
11329 gimplify_ctxp->into_ssa = saved_into_ssa;
11330 remove = true;
11331 break;
11333 gimplify_ctxp->into_ssa = saved_into_ssa;
11334 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11335 if (DECL_P (v))
11337 omp_firstprivatize_variable (ctx, v);
11338 omp_notice_variable (ctx, v, true);
11340 decl = TREE_OPERAND (decl, 0);
11341 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11343 gimplify_ctxp->into_ssa = false;
11344 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
11345 NULL, is_gimple_val, fb_rvalue, false)
11346 == GS_ERROR)
11348 gimplify_ctxp->into_ssa = saved_into_ssa;
11349 remove = true;
11350 break;
11352 gimplify_ctxp->into_ssa = saved_into_ssa;
11353 v = TREE_OPERAND (decl, 1);
11354 if (DECL_P (v))
11356 omp_firstprivatize_variable (ctx, v);
11357 omp_notice_variable (ctx, v, true);
11359 decl = TREE_OPERAND (decl, 0);
11361 if (TREE_CODE (decl) == ADDR_EXPR
11362 || TREE_CODE (decl) == INDIRECT_REF)
11363 decl = TREE_OPERAND (decl, 0);
11365 goto do_add_decl;
11366 case OMP_CLAUSE_LINEAR:
11367 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
11368 is_gimple_val, fb_rvalue) == GS_ERROR)
11370 remove = true;
11371 break;
11373 else
11375 if (code == OMP_SIMD
11376 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11378 struct gimplify_omp_ctx *octx = outer_ctx;
11379 if (octx
11380 && octx->region_type == ORT_WORKSHARE
11381 && octx->combined_loop
11382 && !octx->distribute)
11384 if (octx->outer_context
11385 && (octx->outer_context->region_type
11386 == ORT_COMBINED_PARALLEL))
11387 octx = octx->outer_context->outer_context;
11388 else
11389 octx = octx->outer_context;
11391 if (octx
11392 && octx->region_type == ORT_WORKSHARE
11393 && octx->combined_loop
11394 && octx->distribute)
11396 error_at (OMP_CLAUSE_LOCATION (c),
11397 "%<linear%> clause for variable other than "
11398 "loop iterator specified on construct "
11399 "combined with %<distribute%>");
11400 remove = true;
11401 break;
11404 /* For combined #pragma omp parallel for simd, need to put
11405 lastprivate and perhaps firstprivate too on the
11406 parallel. Similarly for #pragma omp for simd. */
11407 struct gimplify_omp_ctx *octx = outer_ctx;
11408 bool taskloop_seen = false;
11409 decl = NULL_TREE;
11412 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11413 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11414 break;
11415 decl = OMP_CLAUSE_DECL (c);
11416 if (error_operand_p (decl))
11418 decl = NULL_TREE;
11419 break;
11421 flags = GOVD_SEEN;
11422 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11423 flags |= GOVD_FIRSTPRIVATE;
11424 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11425 flags |= GOVD_LASTPRIVATE;
11426 if (octx
11427 && octx->region_type == ORT_WORKSHARE
11428 && octx->combined_loop)
11430 if (octx->outer_context
11431 && (octx->outer_context->region_type
11432 == ORT_COMBINED_PARALLEL))
11433 octx = octx->outer_context;
11434 else if (omp_check_private (octx, decl, false))
11435 break;
11437 else if (octx
11438 && (octx->region_type & ORT_TASK) != 0
11439 && octx->combined_loop)
11440 taskloop_seen = true;
11441 else if (octx
11442 && octx->region_type == ORT_COMBINED_PARALLEL
11443 && ((ctx->region_type == ORT_WORKSHARE
11444 && octx == outer_ctx)
11445 || taskloop_seen))
11446 flags = GOVD_SEEN | GOVD_SHARED;
11447 else if (octx
11448 && ((octx->region_type & ORT_COMBINED_TEAMS)
11449 == ORT_COMBINED_TEAMS))
11450 flags = GOVD_SEEN | GOVD_SHARED;
11451 else if (octx
11452 && octx->region_type == ORT_COMBINED_TARGET)
11454 if (flags & GOVD_LASTPRIVATE)
11455 flags = GOVD_SEEN | GOVD_MAP;
11457 else
11458 break;
11459 splay_tree_node on
11460 = splay_tree_lookup (octx->variables,
11461 (splay_tree_key) decl);
11462 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11464 octx = NULL;
11465 break;
11467 omp_add_variable (octx, decl, flags);
11468 if (octx->outer_context == NULL)
11469 break;
11470 octx = octx->outer_context;
11472 while (1);
11473 if (octx
11474 && decl
11475 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11476 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11477 omp_notice_variable (octx, decl, true);
11479 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11480 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11481 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11483 notice_outer = false;
11484 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11486 goto do_add;
11488 case OMP_CLAUSE_MAP:
11489 decl = OMP_CLAUSE_DECL (c);
11490 if (error_operand_p (decl))
11491 remove = true;
11492 switch (code)
11494 case OMP_TARGET:
11495 break;
11496 case OACC_DATA:
11497 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11498 break;
11499 /* FALLTHRU */
11500 case OMP_TARGET_DATA:
11501 case OMP_TARGET_ENTER_DATA:
11502 case OMP_TARGET_EXIT_DATA:
11503 case OACC_ENTER_DATA:
11504 case OACC_EXIT_DATA:
11505 case OACC_HOST_DATA:
11506 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11507 || (OMP_CLAUSE_MAP_KIND (c)
11508 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11509 /* For target {,enter ,exit }data only the array slice is
11510 mapped, but not the pointer to it. */
11511 remove = true;
11512 break;
11513 default:
11514 break;
11516 if (remove)
11517 break;
11518 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11520 struct gimplify_omp_ctx *octx;
11521 for (octx = outer_ctx; octx; octx = octx->outer_context)
11523 if (octx->region_type != ORT_ACC_HOST_DATA)
11524 break;
11525 splay_tree_node n2
11526 = splay_tree_lookup (octx->variables,
11527 (splay_tree_key) decl);
11528 if (n2)
11529 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11530 "declared in enclosing %<host_data%> region",
11531 DECL_NAME (decl));
11534 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11535 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11536 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11537 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11538 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11540 remove = true;
11541 break;
11543 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11544 || (OMP_CLAUSE_MAP_KIND (c)
11545 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11546 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11547 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11549 OMP_CLAUSE_SIZE (c)
11550 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11551 false);
11552 if ((region_type & ORT_TARGET) != 0)
11553 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11554 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11557 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11559 tree base = omp_strip_components_and_deref (decl);
11560 if (DECL_P (base))
11562 decl = base;
11563 splay_tree_node n
11564 = splay_tree_lookup (ctx->variables,
11565 (splay_tree_key) decl);
11566 if (seen_error ()
11567 && n
11568 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11570 remove = true;
11571 break;
11573 flags = GOVD_MAP | GOVD_EXPLICIT;
11575 goto do_add_decl;
11579 if (TREE_CODE (decl) == TARGET_EXPR)
11581 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11582 is_gimple_lvalue, fb_lvalue)
11583 == GS_ERROR)
11584 remove = true;
11586 else if (!DECL_P (decl))
11588 tree d = decl, *pd;
11589 if (TREE_CODE (d) == ARRAY_REF)
11591 while (TREE_CODE (d) == ARRAY_REF)
11592 d = TREE_OPERAND (d, 0);
11593 if (TREE_CODE (d) == COMPONENT_REF
11594 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11595 decl = d;
11597 pd = &OMP_CLAUSE_DECL (c);
11598 if (d == decl
11599 && TREE_CODE (decl) == INDIRECT_REF
11600 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11601 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11602 == REFERENCE_TYPE)
11603 && (OMP_CLAUSE_MAP_KIND (c)
11604 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11606 pd = &TREE_OPERAND (decl, 0);
11607 decl = TREE_OPERAND (decl, 0);
11609 /* An "attach/detach" operation on an update directive should
11610 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11611 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11612 depends on the previous mapping. */
11613 if (code == OACC_UPDATE
11614 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11615 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11617 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11619 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11620 == ARRAY_TYPE)
11621 remove = true;
11622 else
11624 gomp_map_kind k = ((code == OACC_EXIT_DATA
11625 || code == OMP_TARGET_EXIT_DATA)
11626 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11627 OMP_CLAUSE_SET_MAP_KIND (c, k);
11631 tree cref = decl;
11633 while (TREE_CODE (cref) == ARRAY_REF)
11634 cref = TREE_OPERAND (cref, 0);
11636 if (TREE_CODE (cref) == INDIRECT_REF)
11637 cref = TREE_OPERAND (cref, 0);
11639 if (TREE_CODE (cref) == COMPONENT_REF)
11641 tree base = cref;
11642 while (base && !DECL_P (base))
11644 tree innerbase = omp_get_base_pointer (base);
11645 if (!innerbase)
11646 break;
11647 base = innerbase;
11649 if (base
11650 && DECL_P (base)
11651 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11652 && POINTER_TYPE_P (TREE_TYPE (base)))
11654 splay_tree_node n
11655 = splay_tree_lookup (ctx->variables,
11656 (splay_tree_key) base);
11657 n->value |= GOVD_SEEN;
11661 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11663 /* Don't gimplify *pd fully at this point, as the base
11664 will need to be adjusted during omp lowering. */
11665 auto_vec<tree, 10> expr_stack;
11666 tree *p = pd;
11667 while (handled_component_p (*p)
11668 || TREE_CODE (*p) == INDIRECT_REF
11669 || TREE_CODE (*p) == ADDR_EXPR
11670 || TREE_CODE (*p) == MEM_REF
11671 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11673 expr_stack.safe_push (*p);
11674 p = &TREE_OPERAND (*p, 0);
11676 for (int i = expr_stack.length () - 1; i >= 0; i--)
11678 tree t = expr_stack[i];
11679 if (TREE_CODE (t) == ARRAY_REF
11680 || TREE_CODE (t) == ARRAY_RANGE_REF)
11682 if (TREE_OPERAND (t, 2) == NULL_TREE)
11684 tree low = unshare_expr (array_ref_low_bound (t));
11685 if (!is_gimple_min_invariant (low))
11687 TREE_OPERAND (t, 2) = low;
11688 if (gimplify_expr (&TREE_OPERAND (t, 2),
11689 pre_p, NULL,
11690 is_gimple_reg,
11691 fb_rvalue) == GS_ERROR)
11692 remove = true;
11695 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11696 NULL, is_gimple_reg,
11697 fb_rvalue) == GS_ERROR)
11698 remove = true;
11699 if (TREE_OPERAND (t, 3) == NULL_TREE)
11701 tree elmt_size = array_ref_element_size (t);
11702 if (!is_gimple_min_invariant (elmt_size))
11704 elmt_size = unshare_expr (elmt_size);
11705 tree elmt_type
11706 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11707 0)));
11708 tree factor
11709 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11710 elmt_size
11711 = size_binop (EXACT_DIV_EXPR, elmt_size,
11712 factor);
11713 TREE_OPERAND (t, 3) = elmt_size;
11714 if (gimplify_expr (&TREE_OPERAND (t, 3),
11715 pre_p, NULL,
11716 is_gimple_reg,
11717 fb_rvalue) == GS_ERROR)
11718 remove = true;
11721 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11722 NULL, is_gimple_reg,
11723 fb_rvalue) == GS_ERROR)
11724 remove = true;
11726 else if (TREE_CODE (t) == COMPONENT_REF)
11728 if (TREE_OPERAND (t, 2) == NULL_TREE)
11730 tree offset = component_ref_field_offset (t);
11731 if (!is_gimple_min_invariant (offset))
11733 offset = unshare_expr (offset);
11734 tree field = TREE_OPERAND (t, 1);
11735 tree factor
11736 = size_int (DECL_OFFSET_ALIGN (field)
11737 / BITS_PER_UNIT);
11738 offset = size_binop (EXACT_DIV_EXPR, offset,
11739 factor);
11740 TREE_OPERAND (t, 2) = offset;
11741 if (gimplify_expr (&TREE_OPERAND (t, 2),
11742 pre_p, NULL,
11743 is_gimple_reg,
11744 fb_rvalue) == GS_ERROR)
11745 remove = true;
11748 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11749 NULL, is_gimple_reg,
11750 fb_rvalue) == GS_ERROR)
11751 remove = true;
11754 for (; expr_stack.length () > 0; )
11756 tree t = expr_stack.pop ();
11758 if (TREE_CODE (t) == ARRAY_REF
11759 || TREE_CODE (t) == ARRAY_RANGE_REF)
11761 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11762 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11763 NULL, is_gimple_val,
11764 fb_rvalue) == GS_ERROR)
11765 remove = true;
11769 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11770 fb_lvalue) == GS_ERROR)
11772 remove = true;
11773 break;
11775 break;
11777 flags = GOVD_MAP | GOVD_EXPLICIT;
11778 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11779 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11780 flags |= GOVD_MAP_ALWAYS_TO;
11782 if ((code == OMP_TARGET
11783 || code == OMP_TARGET_DATA
11784 || code == OMP_TARGET_ENTER_DATA
11785 || code == OMP_TARGET_EXIT_DATA)
11786 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11788 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11789 octx = octx->outer_context)
11791 splay_tree_node n
11792 = splay_tree_lookup (octx->variables,
11793 (splay_tree_key) OMP_CLAUSE_DECL (c));
11794 /* If this is contained in an outer OpenMP region as a
11795 firstprivate value, remove the attach/detach. */
11796 if (n && (n->value & GOVD_FIRSTPRIVATE))
11798 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11799 goto do_add;
11803 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11804 ? GOMP_MAP_DETACH
11805 : GOMP_MAP_ATTACH);
11806 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11809 goto do_add;
11811 case OMP_CLAUSE_AFFINITY:
11812 gimplify_omp_affinity (list_p, pre_p);
11813 remove = true;
11814 break;
11815 case OMP_CLAUSE_DOACROSS:
11816 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11818 tree deps = OMP_CLAUSE_DECL (c);
11819 while (deps && TREE_CODE (deps) == TREE_LIST)
11821 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11822 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11823 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11824 pre_p, NULL, is_gimple_val, fb_rvalue);
11825 deps = TREE_CHAIN (deps);
11828 else
11829 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11830 == OMP_CLAUSE_DOACROSS_SOURCE);
11831 break;
11832 case OMP_CLAUSE_DEPEND:
11833 if (handled_depend_iterators == -1)
11834 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11835 if (handled_depend_iterators)
11837 if (handled_depend_iterators == 2)
11838 remove = true;
11839 break;
11841 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11843 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11844 NULL, is_gimple_val, fb_rvalue);
11845 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11847 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11849 remove = true;
11850 break;
11852 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11854 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11855 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11856 is_gimple_val, fb_rvalue) == GS_ERROR)
11858 remove = true;
11859 break;
11862 if (code == OMP_TASK)
11863 ctx->has_depend = true;
11864 break;
11866 case OMP_CLAUSE_TO:
11867 case OMP_CLAUSE_FROM:
11868 case OMP_CLAUSE__CACHE_:
11869 decl = OMP_CLAUSE_DECL (c);
11870 if (error_operand_p (decl))
11872 remove = true;
11873 break;
11875 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11876 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11877 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11878 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11879 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11881 remove = true;
11882 break;
11884 if (!DECL_P (decl))
11886 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11887 NULL, is_gimple_lvalue, fb_lvalue)
11888 == GS_ERROR)
11890 remove = true;
11891 break;
11893 break;
11895 goto do_notice;
11897 case OMP_CLAUSE_USE_DEVICE_PTR:
11898 case OMP_CLAUSE_USE_DEVICE_ADDR:
11899 flags = GOVD_EXPLICIT;
11900 goto do_add;
11902 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11903 decl = OMP_CLAUSE_DECL (c);
11904 while (TREE_CODE (decl) == INDIRECT_REF
11905 || TREE_CODE (decl) == ARRAY_REF)
11906 decl = TREE_OPERAND (decl, 0);
11907 flags = GOVD_EXPLICIT;
11908 goto do_add_decl;
11910 case OMP_CLAUSE_IS_DEVICE_PTR:
11911 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11912 goto do_add;
11914 do_add:
11915 decl = OMP_CLAUSE_DECL (c);
11916 do_add_decl:
11917 if (error_operand_p (decl))
11919 remove = true;
11920 break;
11922 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11924 tree t = omp_member_access_dummy_var (decl);
11925 if (t)
11927 tree v = DECL_VALUE_EXPR (decl);
11928 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11929 if (outer_ctx)
11930 omp_notice_variable (outer_ctx, t, true);
11933 if (code == OACC_DATA
11934 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11935 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11936 flags |= GOVD_MAP_0LEN_ARRAY;
11937 omp_add_variable (ctx, decl, flags);
11938 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11939 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11940 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11941 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11943 struct gimplify_omp_ctx *pctx
11944 = code == OMP_TARGET ? outer_ctx : ctx;
11945 if (pctx)
11946 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11947 GOVD_LOCAL | GOVD_SEEN);
11948 if (pctx
11949 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11950 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11951 find_decl_expr,
11952 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11953 NULL) == NULL_TREE)
11954 omp_add_variable (pctx,
11955 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11956 GOVD_LOCAL | GOVD_SEEN);
11957 gimplify_omp_ctxp = pctx;
11958 push_gimplify_context ();
11960 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11961 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11963 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11964 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11965 pop_gimplify_context
11966 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11967 push_gimplify_context ();
11968 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11969 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11970 pop_gimplify_context
11971 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11972 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11973 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11975 gimplify_omp_ctxp = outer_ctx;
11977 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11978 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11980 gimplify_omp_ctxp = ctx;
11981 push_gimplify_context ();
11982 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11984 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11985 NULL, NULL);
11986 TREE_SIDE_EFFECTS (bind) = 1;
11987 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11988 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11990 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11991 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11992 pop_gimplify_context
11993 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11994 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11996 gimplify_omp_ctxp = outer_ctx;
11998 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11999 && OMP_CLAUSE_LINEAR_STMT (c))
12001 gimplify_omp_ctxp = ctx;
12002 push_gimplify_context ();
12003 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
12005 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
12006 NULL, NULL);
12007 TREE_SIDE_EFFECTS (bind) = 1;
12008 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
12009 OMP_CLAUSE_LINEAR_STMT (c) = bind;
12011 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
12012 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
12013 pop_gimplify_context
12014 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
12015 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
12017 gimplify_omp_ctxp = outer_ctx;
12019 if (notice_outer)
12020 goto do_notice;
12021 break;
12023 case OMP_CLAUSE_COPYIN:
12024 case OMP_CLAUSE_COPYPRIVATE:
12025 decl = OMP_CLAUSE_DECL (c);
12026 if (error_operand_p (decl))
12028 remove = true;
12029 break;
12031 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
12032 && !remove
12033 && !omp_check_private (ctx, decl, true))
12035 remove = true;
12036 if (is_global_var (decl))
12038 if (DECL_THREAD_LOCAL_P (decl))
12039 remove = false;
12040 else if (DECL_HAS_VALUE_EXPR_P (decl))
12042 tree value = get_base_address (DECL_VALUE_EXPR (decl));
12044 if (value
12045 && DECL_P (value)
12046 && DECL_THREAD_LOCAL_P (value))
12047 remove = false;
12050 if (remove)
12051 error_at (OMP_CLAUSE_LOCATION (c),
12052 "copyprivate variable %qE is not threadprivate"
12053 " or private in outer context", DECL_NAME (decl));
12055 do_notice:
12056 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12057 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
12058 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12059 && outer_ctx
12060 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
12061 || (region_type == ORT_WORKSHARE
12062 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12063 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
12064 || code == OMP_LOOP)))
12065 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
12066 || (code == OMP_LOOP
12067 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12068 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
12069 == ORT_COMBINED_TEAMS))))
12071 splay_tree_node on
12072 = splay_tree_lookup (outer_ctx->variables,
12073 (splay_tree_key)decl);
12074 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
12076 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12077 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12078 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
12079 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12080 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
12081 == POINTER_TYPE))))
12082 omp_firstprivatize_variable (outer_ctx, decl);
12083 else
12085 omp_add_variable (outer_ctx, decl,
12086 GOVD_SEEN | GOVD_SHARED);
12087 if (outer_ctx->outer_context)
12088 omp_notice_variable (outer_ctx->outer_context, decl,
12089 true);
12093 if (outer_ctx)
12094 omp_notice_variable (outer_ctx, decl, true);
12095 if (check_non_private
12096 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
12097 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
12098 || decl == OMP_CLAUSE_DECL (c)
12099 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12100 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12101 == ADDR_EXPR
12102 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12103 == POINTER_PLUS_EXPR
12104 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
12105 (OMP_CLAUSE_DECL (c), 0), 0))
12106 == ADDR_EXPR)))))
12107 && omp_check_private (ctx, decl, false))
12109 error ("%s variable %qE is private in outer context",
12110 check_non_private, DECL_NAME (decl));
12111 remove = true;
12113 break;
12115 case OMP_CLAUSE_DETACH:
12116 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
12117 goto do_add;
12119 case OMP_CLAUSE_IF:
12120 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
12121 && OMP_CLAUSE_IF_MODIFIER (c) != code)
12123 const char *p[2];
12124 for (int i = 0; i < 2; i++)
12125 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
12127 case VOID_CST: p[i] = "cancel"; break;
12128 case OMP_PARALLEL: p[i] = "parallel"; break;
12129 case OMP_SIMD: p[i] = "simd"; break;
12130 case OMP_TASK: p[i] = "task"; break;
12131 case OMP_TASKLOOP: p[i] = "taskloop"; break;
12132 case OMP_TARGET_DATA: p[i] = "target data"; break;
12133 case OMP_TARGET: p[i] = "target"; break;
12134 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
12135 case OMP_TARGET_ENTER_DATA:
12136 p[i] = "target enter data"; break;
12137 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
12138 default: gcc_unreachable ();
12140 error_at (OMP_CLAUSE_LOCATION (c),
12141 "expected %qs %<if%> clause modifier rather than %qs",
12142 p[0], p[1]);
12143 remove = true;
12145 /* Fall through. */
12147 case OMP_CLAUSE_SELF:
12148 case OMP_CLAUSE_FINAL:
12149 OMP_CLAUSE_OPERAND (c, 0)
12150 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
12151 /* Fall through. */
12153 case OMP_CLAUSE_NUM_TEAMS:
12154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
12155 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
12156 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
12158 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
12160 remove = true;
12161 break;
12163 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
12164 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
12165 pre_p, NULL, true);
12167 /* Fall through. */
12169 case OMP_CLAUSE_SCHEDULE:
12170 case OMP_CLAUSE_NUM_THREADS:
12171 case OMP_CLAUSE_THREAD_LIMIT:
12172 case OMP_CLAUSE_DIST_SCHEDULE:
12173 case OMP_CLAUSE_DEVICE:
12174 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
12175 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
12177 if (code != OMP_TARGET)
12179 error_at (OMP_CLAUSE_LOCATION (c),
12180 "%<device%> clause with %<ancestor%> is only "
12181 "allowed on %<target%> construct");
12182 remove = true;
12183 break;
12186 tree clauses = *orig_list_p;
12187 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
12188 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
12189 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
12190 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
12191 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
12192 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
12195 error_at (OMP_CLAUSE_LOCATION (c),
12196 "with %<ancestor%>, only the %<device%>, "
12197 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
12198 "and %<map%> clauses may appear on the "
12199 "construct");
12200 remove = true;
12201 break;
12204 /* Fall through. */
12206 case OMP_CLAUSE_PRIORITY:
12207 case OMP_CLAUSE_GRAINSIZE:
12208 case OMP_CLAUSE_NUM_TASKS:
12209 case OMP_CLAUSE_FILTER:
12210 case OMP_CLAUSE_HINT:
12211 case OMP_CLAUSE_ASYNC:
12212 case OMP_CLAUSE_WAIT:
12213 case OMP_CLAUSE_NUM_GANGS:
12214 case OMP_CLAUSE_NUM_WORKERS:
12215 case OMP_CLAUSE_VECTOR_LENGTH:
12216 case OMP_CLAUSE_WORKER:
12217 case OMP_CLAUSE_VECTOR:
12218 if (OMP_CLAUSE_OPERAND (c, 0)
12219 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
12221 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
12223 remove = true;
12224 break;
12226 /* All these clauses care about value, not a particular decl,
12227 so try to force it into a SSA_NAME or fresh temporary. */
12228 OMP_CLAUSE_OPERAND (c, 0)
12229 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
12230 pre_p, NULL, true);
12232 break;
12234 case OMP_CLAUSE_GANG:
12235 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
12236 is_gimple_val, fb_rvalue) == GS_ERROR)
12237 remove = true;
12238 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
12239 is_gimple_val, fb_rvalue) == GS_ERROR)
12240 remove = true;
12241 break;
12243 case OMP_CLAUSE_NOWAIT:
12244 nowait = 1;
12245 break;
12247 case OMP_CLAUSE_ORDERED:
12248 case OMP_CLAUSE_UNTIED:
12249 case OMP_CLAUSE_COLLAPSE:
12250 case OMP_CLAUSE_TILE:
12251 case OMP_CLAUSE_AUTO:
12252 case OMP_CLAUSE_SEQ:
12253 case OMP_CLAUSE_INDEPENDENT:
12254 case OMP_CLAUSE_MERGEABLE:
12255 case OMP_CLAUSE_PROC_BIND:
12256 case OMP_CLAUSE_SAFELEN:
12257 case OMP_CLAUSE_SIMDLEN:
12258 case OMP_CLAUSE_NOGROUP:
12259 case OMP_CLAUSE_THREADS:
12260 case OMP_CLAUSE_SIMD:
12261 case OMP_CLAUSE_BIND:
12262 case OMP_CLAUSE_IF_PRESENT:
12263 case OMP_CLAUSE_FINALIZE:
12264 break;
12266 case OMP_CLAUSE_ORDER:
12267 ctx->order_concurrent = true;
12268 break;
12270 case OMP_CLAUSE_DEFAULTMAP:
12271 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
12272 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
12274 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
12275 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
12276 gdmkmin = GDMK_SCALAR;
12277 gdmkmax = GDMK_POINTER;
12278 break;
12279 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
12280 gdmkmin = GDMK_SCALAR;
12281 gdmkmax = GDMK_SCALAR_TARGET;
12282 break;
12283 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
12284 gdmkmin = gdmkmax = GDMK_AGGREGATE;
12285 break;
12286 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
12287 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
12288 break;
12289 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
12290 gdmkmin = gdmkmax = GDMK_POINTER;
12291 break;
12292 default:
12293 gcc_unreachable ();
12295 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
12296 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
12298 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
12299 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
12300 break;
12301 case OMP_CLAUSE_DEFAULTMAP_TO:
12302 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
12303 break;
12304 case OMP_CLAUSE_DEFAULTMAP_FROM:
12305 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
12306 break;
12307 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
12308 ctx->defaultmap[gdmk] = GOVD_MAP;
12309 break;
12310 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
12311 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12312 break;
12313 case OMP_CLAUSE_DEFAULTMAP_NONE:
12314 ctx->defaultmap[gdmk] = 0;
12315 break;
12316 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
12317 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
12318 break;
12319 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
12320 switch (gdmk)
12322 case GDMK_SCALAR:
12323 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12324 break;
12325 case GDMK_SCALAR_TARGET:
12326 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
12327 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
12328 break;
12329 case GDMK_AGGREGATE:
12330 case GDMK_ALLOCATABLE:
12331 ctx->defaultmap[gdmk] = GOVD_MAP;
12332 break;
12333 case GDMK_POINTER:
12334 ctx->defaultmap[gdmk] = GOVD_MAP;
12335 if (!lang_GNU_Fortran ())
12336 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
12337 break;
12338 default:
12339 gcc_unreachable ();
12341 break;
12342 default:
12343 gcc_unreachable ();
12345 break;
12347 case OMP_CLAUSE_ALIGNED:
12348 decl = OMP_CLAUSE_DECL (c);
12349 if (error_operand_p (decl))
12351 remove = true;
12352 break;
12354 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12355 is_gimple_val, fb_rvalue) == GS_ERROR)
12357 remove = true;
12358 break;
12360 if (!is_global_var (decl)
12361 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12362 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12363 break;
12365 case OMP_CLAUSE_NONTEMPORAL:
12366 decl = OMP_CLAUSE_DECL (c);
12367 if (error_operand_p (decl))
12369 remove = true;
12370 break;
12372 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12373 break;
12375 case OMP_CLAUSE_ALLOCATE:
12376 decl = OMP_CLAUSE_DECL (c);
12377 if (error_operand_p (decl))
12379 remove = true;
12380 break;
12382 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12383 is_gimple_val, fb_rvalue) == GS_ERROR)
12385 remove = true;
12386 break;
12388 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12389 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12390 == INTEGER_CST))
12392 else if (code == OMP_TASKLOOP
12393 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12394 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12395 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12396 pre_p, NULL, false);
12397 break;
12399 case OMP_CLAUSE_DEFAULT:
12400 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12401 break;
12403 case OMP_CLAUSE_INCLUSIVE:
12404 case OMP_CLAUSE_EXCLUSIVE:
12405 decl = OMP_CLAUSE_DECL (c);
12407 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12408 (splay_tree_key) decl);
12409 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12411 error_at (OMP_CLAUSE_LOCATION (c),
12412 "%qD specified in %qs clause but not in %<inscan%> "
12413 "%<reduction%> clause on the containing construct",
12414 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12415 remove = true;
12417 else
12419 n->value |= GOVD_REDUCTION_INSCAN;
12420 if (outer_ctx->region_type == ORT_SIMD
12421 && outer_ctx->outer_context
12422 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12424 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12425 (splay_tree_key) decl);
12426 if (n && (n->value & GOVD_REDUCTION) != 0)
12427 n->value |= GOVD_REDUCTION_INSCAN;
12431 break;
12433 case OMP_CLAUSE_NOHOST:
12434 default:
12435 gcc_unreachable ();
12438 if (code == OACC_DATA
12439 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12440 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12441 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12442 remove = true;
12443 if (remove)
12444 *list_p = OMP_CLAUSE_CHAIN (c);
12445 else
12446 list_p = &OMP_CLAUSE_CHAIN (c);
12449 ctx->clauses = *orig_list_p;
12450 gimplify_omp_ctxp = ctx;
12453 /* Return true if DECL is a candidate for shared to firstprivate
12454 optimization. We only consider non-addressable scalars, not
12455 too big, and not references. */
12457 static bool
12458 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12460 if (TREE_ADDRESSABLE (decl))
12461 return false;
12462 tree type = TREE_TYPE (decl);
12463 if (!is_gimple_reg_type (type)
12464 || TREE_CODE (type) == REFERENCE_TYPE
12465 || TREE_ADDRESSABLE (type))
12466 return false;
12467 /* Don't optimize too large decls, as each thread/task will have
12468 its own. */
12469 HOST_WIDE_INT len = int_size_in_bytes (type);
12470 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12471 return false;
12472 if (omp_privatize_by_reference (decl))
12473 return false;
12474 return true;
12477 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12478 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12479 GOVD_WRITTEN in outer contexts. */
12481 static void
12482 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12484 for (; ctx; ctx = ctx->outer_context)
12486 splay_tree_node n = splay_tree_lookup (ctx->variables,
12487 (splay_tree_key) decl);
12488 if (n == NULL)
12489 continue;
12490 else if (n->value & GOVD_SHARED)
12492 n->value |= GOVD_WRITTEN;
12493 return;
12495 else if (n->value & GOVD_DATA_SHARE_CLASS)
12496 return;
12500 /* Helper callback for walk_gimple_seq to discover possible stores
12501 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12502 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12503 for those. */
12505 static tree
12506 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12508 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12510 *walk_subtrees = 0;
12511 if (!wi->is_lhs)
12512 return NULL_TREE;
12514 tree op = *tp;
12517 if (handled_component_p (op))
12518 op = TREE_OPERAND (op, 0);
12519 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12520 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12521 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12522 else
12523 break;
12525 while (1);
12526 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12527 return NULL_TREE;
12529 omp_mark_stores (gimplify_omp_ctxp, op);
12530 return NULL_TREE;
12533 /* Helper callback for walk_gimple_seq to discover possible stores
12534 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12535 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12536 for those. */
12538 static tree
12539 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12540 bool *handled_ops_p,
12541 struct walk_stmt_info *wi)
12543 gimple *stmt = gsi_stmt (*gsi_p);
12544 switch (gimple_code (stmt))
12546 /* Don't recurse on OpenMP constructs for which
12547 gimplify_adjust_omp_clauses already handled the bodies,
12548 except handle gimple_omp_for_pre_body. */
12549 case GIMPLE_OMP_FOR:
12550 *handled_ops_p = true;
12551 if (gimple_omp_for_pre_body (stmt))
12552 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12553 omp_find_stores_stmt, omp_find_stores_op, wi);
12554 break;
12555 case GIMPLE_OMP_PARALLEL:
12556 case GIMPLE_OMP_TASK:
12557 case GIMPLE_OMP_SECTIONS:
12558 case GIMPLE_OMP_SINGLE:
12559 case GIMPLE_OMP_SCOPE:
12560 case GIMPLE_OMP_TARGET:
12561 case GIMPLE_OMP_TEAMS:
12562 case GIMPLE_OMP_CRITICAL:
12563 *handled_ops_p = true;
12564 break;
12565 default:
12566 break;
12568 return NULL_TREE;
12571 struct gimplify_adjust_omp_clauses_data
12573 tree *list_p;
12574 gimple_seq *pre_p;
12577 /* For all variables that were not actually used within the context,
12578 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12580 static int
12581 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12583 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12584 gimple_seq *pre_p
12585 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12586 tree decl = (tree) n->key;
12587 unsigned flags = n->value;
12588 enum omp_clause_code code;
12589 tree clause;
12590 bool private_debug;
12592 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12593 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12594 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12595 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12596 return 0;
12597 if ((flags & GOVD_SEEN) == 0)
12598 return 0;
12599 if (flags & GOVD_DEBUG_PRIVATE)
12601 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12602 private_debug = true;
12604 else if (flags & GOVD_MAP)
12605 private_debug = false;
12606 else
12607 private_debug
12608 = lang_hooks.decls.omp_private_debug_clause (decl,
12609 !!(flags & GOVD_SHARED));
12610 if (private_debug)
12611 code = OMP_CLAUSE_PRIVATE;
12612 else if (flags & GOVD_MAP)
12614 code = OMP_CLAUSE_MAP;
12615 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12616 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12618 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12619 return 0;
12621 if (VAR_P (decl)
12622 && DECL_IN_CONSTANT_POOL (decl)
12623 && !lookup_attribute ("omp declare target",
12624 DECL_ATTRIBUTES (decl)))
12626 tree id = get_identifier ("omp declare target");
12627 DECL_ATTRIBUTES (decl)
12628 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12629 varpool_node *node = varpool_node::get (decl);
12630 if (node)
12632 node->offloadable = 1;
12633 if (ENABLE_OFFLOADING)
12634 g->have_offload = true;
12638 else if (flags & GOVD_SHARED)
12640 if (is_global_var (decl))
12642 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12643 while (ctx != NULL)
12645 splay_tree_node on
12646 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12647 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12648 | GOVD_PRIVATE | GOVD_REDUCTION
12649 | GOVD_LINEAR | GOVD_MAP)) != 0)
12650 break;
12651 ctx = ctx->outer_context;
12653 if (ctx == NULL)
12654 return 0;
12656 code = OMP_CLAUSE_SHARED;
12657 /* Don't optimize shared into firstprivate for read-only vars
12658 on tasks with depend clause, we shouldn't try to copy them
12659 until the dependencies are satisfied. */
12660 if (gimplify_omp_ctxp->has_depend)
12661 flags |= GOVD_WRITTEN;
12663 else if (flags & GOVD_PRIVATE)
12664 code = OMP_CLAUSE_PRIVATE;
12665 else if (flags & GOVD_FIRSTPRIVATE)
12667 code = OMP_CLAUSE_FIRSTPRIVATE;
12668 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12669 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12670 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12672 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12673 "%<target%> construct", decl);
12674 return 0;
12677 else if (flags & GOVD_LASTPRIVATE)
12678 code = OMP_CLAUSE_LASTPRIVATE;
12679 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12680 return 0;
12681 else if (flags & GOVD_CONDTEMP)
12683 code = OMP_CLAUSE__CONDTEMP_;
12684 gimple_add_tmp_var (decl);
12686 else
12687 gcc_unreachable ();
12689 if (((flags & GOVD_LASTPRIVATE)
12690 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12691 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12692 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12694 tree chain = *list_p;
12695 clause = build_omp_clause (input_location, code);
12696 OMP_CLAUSE_DECL (clause) = decl;
12697 OMP_CLAUSE_CHAIN (clause) = chain;
12698 if (private_debug)
12699 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12700 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12701 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12702 else if (code == OMP_CLAUSE_SHARED
12703 && (flags & GOVD_WRITTEN) == 0
12704 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12705 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12706 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12707 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12708 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12710 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12711 OMP_CLAUSE_DECL (nc) = decl;
12712 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12713 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12714 OMP_CLAUSE_DECL (clause)
12715 = build_simple_mem_ref_loc (input_location, decl);
12716 OMP_CLAUSE_DECL (clause)
12717 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12718 build_int_cst (build_pointer_type (char_type_node), 0));
12719 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12720 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12721 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12722 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12723 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12724 OMP_CLAUSE_CHAIN (nc) = chain;
12725 OMP_CLAUSE_CHAIN (clause) = nc;
12726 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12727 gimplify_omp_ctxp = ctx->outer_context;
12728 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12729 pre_p, NULL, is_gimple_val, fb_rvalue);
12730 gimplify_omp_ctxp = ctx;
12732 else if (code == OMP_CLAUSE_MAP)
12734 int kind;
12735 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12736 switch (flags & (GOVD_MAP_TO_ONLY
12737 | GOVD_MAP_FORCE
12738 | GOVD_MAP_FORCE_PRESENT
12739 | GOVD_MAP_ALLOC_ONLY
12740 | GOVD_MAP_FROM_ONLY))
12742 case 0:
12743 kind = GOMP_MAP_TOFROM;
12744 break;
12745 case GOVD_MAP_FORCE:
12746 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12747 break;
12748 case GOVD_MAP_TO_ONLY:
12749 kind = GOMP_MAP_TO;
12750 break;
12751 case GOVD_MAP_FROM_ONLY:
12752 kind = GOMP_MAP_FROM;
12753 break;
12754 case GOVD_MAP_ALLOC_ONLY:
12755 kind = GOMP_MAP_ALLOC;
12756 break;
12757 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12758 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12759 break;
12760 case GOVD_MAP_FORCE_PRESENT:
12761 kind = GOMP_MAP_FORCE_PRESENT;
12762 break;
12763 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
12764 kind = GOMP_MAP_FORCE_PRESENT;
12765 break;
12766 default:
12767 gcc_unreachable ();
12769 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12770 /* Setting of the implicit flag for the runtime is currently disabled for
12771 OpenACC. */
12772 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12773 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12774 if (DECL_SIZE (decl)
12775 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12777 tree decl2 = DECL_VALUE_EXPR (decl);
12778 gcc_assert (INDIRECT_REF_P (decl2));
12779 decl2 = TREE_OPERAND (decl2, 0);
12780 gcc_assert (DECL_P (decl2));
12781 tree mem = build_simple_mem_ref (decl2);
12782 OMP_CLAUSE_DECL (clause) = mem;
12783 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12784 if (gimplify_omp_ctxp->outer_context)
12786 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12787 omp_notice_variable (ctx, decl2, true);
12788 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12790 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12791 OMP_CLAUSE_MAP);
12792 OMP_CLAUSE_DECL (nc) = decl;
12793 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12794 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12795 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12796 else
12797 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12798 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12799 OMP_CLAUSE_CHAIN (clause) = nc;
12801 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12802 && omp_privatize_by_reference (decl))
12804 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12805 OMP_CLAUSE_SIZE (clause)
12806 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12807 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12808 gimplify_omp_ctxp = ctx->outer_context;
12809 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12810 pre_p, NULL, is_gimple_val, fb_rvalue);
12811 gimplify_omp_ctxp = ctx;
12812 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12813 OMP_CLAUSE_MAP);
12814 OMP_CLAUSE_DECL (nc) = decl;
12815 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12816 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12817 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12818 OMP_CLAUSE_CHAIN (clause) = nc;
12820 else
12821 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12823 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12825 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12826 OMP_CLAUSE_DECL (nc) = decl;
12827 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12828 OMP_CLAUSE_CHAIN (nc) = chain;
12829 OMP_CLAUSE_CHAIN (clause) = nc;
12830 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12831 gimplify_omp_ctxp = ctx->outer_context;
12832 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12833 (ctx->region_type & ORT_ACC) != 0);
12834 gimplify_omp_ctxp = ctx;
12836 *list_p = clause;
12837 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12838 gimplify_omp_ctxp = ctx->outer_context;
12839 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12840 in simd. Those are only added for the local vars inside of simd body
12841 and they don't need to be e.g. default constructible. */
12842 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12843 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12844 (ctx->region_type & ORT_ACC) != 0);
12845 if (gimplify_omp_ctxp)
12846 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12847 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12848 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12849 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12850 true);
12851 gimplify_omp_ctxp = ctx;
12852 return 0;
12855 static void
12856 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12857 enum tree_code code)
12859 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12860 tree *orig_list_p = list_p;
12861 tree c, decl;
12862 bool has_inscan_reductions = false;
12864 if (body)
12866 struct gimplify_omp_ctx *octx;
12867 for (octx = ctx; octx; octx = octx->outer_context)
12868 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12869 break;
12870 if (octx)
12872 struct walk_stmt_info wi;
12873 memset (&wi, 0, sizeof (wi));
12874 walk_gimple_seq (body, omp_find_stores_stmt,
12875 omp_find_stores_op, &wi);
12879 if (ctx->add_safelen1)
12881 /* If there are VLAs in the body of simd loop, prevent
12882 vectorization. */
12883 gcc_assert (ctx->region_type == ORT_SIMD);
12884 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12885 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12886 OMP_CLAUSE_CHAIN (c) = *list_p;
12887 *list_p = c;
12888 list_p = &OMP_CLAUSE_CHAIN (c);
12891 if (ctx->region_type == ORT_WORKSHARE
12892 && ctx->outer_context
12893 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12895 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12896 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12897 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12899 decl = OMP_CLAUSE_DECL (c);
12900 splay_tree_node n
12901 = splay_tree_lookup (ctx->outer_context->variables,
12902 (splay_tree_key) decl);
12903 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12904 (splay_tree_key) decl));
12905 omp_add_variable (ctx, decl, n->value);
12906 tree c2 = copy_node (c);
12907 OMP_CLAUSE_CHAIN (c2) = *list_p;
12908 *list_p = c2;
12909 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12910 continue;
12911 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12912 OMP_CLAUSE_FIRSTPRIVATE);
12913 OMP_CLAUSE_DECL (c2) = decl;
12914 OMP_CLAUSE_CHAIN (c2) = *list_p;
12915 *list_p = c2;
12919 tree attach_list = NULL_TREE;
12920 tree *attach_tail = &attach_list;
12922 while ((c = *list_p) != NULL)
12924 splay_tree_node n;
12925 bool remove = false;
12926 bool move_attach = false;
12928 switch (OMP_CLAUSE_CODE (c))
12930 case OMP_CLAUSE_FIRSTPRIVATE:
12931 if ((ctx->region_type & ORT_TARGET)
12932 && (ctx->region_type & ORT_ACC) == 0
12933 && TYPE_ATOMIC (strip_array_types
12934 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12936 error_at (OMP_CLAUSE_LOCATION (c),
12937 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12938 "%<target%> construct", OMP_CLAUSE_DECL (c));
12939 remove = true;
12940 break;
12942 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12944 decl = OMP_CLAUSE_DECL (c);
12945 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12946 if ((n->value & GOVD_MAP) != 0)
12948 remove = true;
12949 break;
12951 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12952 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12954 /* FALLTHRU */
12955 case OMP_CLAUSE_PRIVATE:
12956 case OMP_CLAUSE_SHARED:
12957 case OMP_CLAUSE_LINEAR:
12958 decl = OMP_CLAUSE_DECL (c);
12959 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12960 remove = !(n->value & GOVD_SEEN);
12961 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12962 && code == OMP_PARALLEL
12963 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12964 remove = true;
12965 if (! remove)
12967 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12968 if ((n->value & GOVD_DEBUG_PRIVATE)
12969 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12971 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12972 || ((n->value & GOVD_DATA_SHARE_CLASS)
12973 == GOVD_SHARED));
12974 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12975 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12977 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12978 && ctx->has_depend
12979 && DECL_P (decl))
12980 n->value |= GOVD_WRITTEN;
12981 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12982 && (n->value & GOVD_WRITTEN) == 0
12983 && DECL_P (decl)
12984 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12985 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12986 else if (DECL_P (decl)
12987 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12988 && (n->value & GOVD_WRITTEN) != 0)
12989 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12990 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12991 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12992 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12994 else
12995 n->value &= ~GOVD_EXPLICIT;
12996 break;
12998 case OMP_CLAUSE_LASTPRIVATE:
12999 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
13000 accurately reflect the presence of a FIRSTPRIVATE clause. */
13001 decl = OMP_CLAUSE_DECL (c);
13002 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13003 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
13004 = (n->value & GOVD_FIRSTPRIVATE) != 0;
13005 if (code == OMP_DISTRIBUTE
13006 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
13008 remove = true;
13009 error_at (OMP_CLAUSE_LOCATION (c),
13010 "same variable used in %<firstprivate%> and "
13011 "%<lastprivate%> clauses on %<distribute%> "
13012 "construct");
13014 if (!remove
13015 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13016 && DECL_P (decl)
13017 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13018 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13019 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
13020 remove = true;
13021 break;
13023 case OMP_CLAUSE_ALIGNED:
13024 decl = OMP_CLAUSE_DECL (c);
13025 if (!is_global_var (decl))
13027 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13028 remove = n == NULL || !(n->value & GOVD_SEEN);
13029 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
13031 struct gimplify_omp_ctx *octx;
13032 if (n != NULL
13033 && (n->value & (GOVD_DATA_SHARE_CLASS
13034 & ~GOVD_FIRSTPRIVATE)))
13035 remove = true;
13036 else
13037 for (octx = ctx->outer_context; octx;
13038 octx = octx->outer_context)
13040 n = splay_tree_lookup (octx->variables,
13041 (splay_tree_key) decl);
13042 if (n == NULL)
13043 continue;
13044 if (n->value & GOVD_LOCAL)
13045 break;
13046 /* We have to avoid assigning a shared variable
13047 to itself when trying to add
13048 __builtin_assume_aligned. */
13049 if (n->value & GOVD_SHARED)
13051 remove = true;
13052 break;
13057 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
13059 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13060 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
13061 remove = true;
13063 break;
13065 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13066 decl = OMP_CLAUSE_DECL (c);
13067 while (INDIRECT_REF_P (decl)
13068 || TREE_CODE (decl) == ARRAY_REF)
13069 decl = TREE_OPERAND (decl, 0);
13070 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13071 remove = n == NULL || !(n->value & GOVD_SEEN);
13072 break;
13074 case OMP_CLAUSE_IS_DEVICE_PTR:
13075 case OMP_CLAUSE_NONTEMPORAL:
13076 decl = OMP_CLAUSE_DECL (c);
13077 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13078 remove = n == NULL || !(n->value & GOVD_SEEN);
13079 break;
13081 case OMP_CLAUSE_MAP:
13082 switch (OMP_CLAUSE_MAP_KIND (c))
13084 case GOMP_MAP_PRESENT_ALLOC:
13085 case GOMP_MAP_PRESENT_TO:
13086 case GOMP_MAP_PRESENT_FROM:
13087 case GOMP_MAP_PRESENT_TOFROM:
13088 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
13089 break;
13090 default:
13091 break;
13093 if (code == OMP_TARGET_EXIT_DATA
13094 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
13096 remove = true;
13097 break;
13099 /* If we have a target region, we can push all the attaches to the
13100 end of the list (we may have standalone "attach" operations
13101 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
13102 the attachment point AND the pointed-to block have been mapped).
13103 If we have something else, e.g. "enter data", we need to keep
13104 "attach" nodes together with the previous node they attach to so
13105 that separate "exit data" operations work properly (see
13106 libgomp/target.c). */
13107 if ((ctx->region_type & ORT_TARGET) != 0
13108 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13109 || (OMP_CLAUSE_MAP_KIND (c)
13110 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
13111 move_attach = true;
13112 decl = OMP_CLAUSE_DECL (c);
13113 /* Data clauses associated with reductions must be
13114 compatible with present_or_copy. Warn and adjust the clause
13115 if that is not the case. */
13116 if (ctx->region_type == ORT_ACC_PARALLEL
13117 || ctx->region_type == ORT_ACC_SERIAL)
13119 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
13120 n = NULL;
13122 if (DECL_P (t))
13123 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
13125 if (n && (n->value & GOVD_REDUCTION))
13127 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
13129 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
13130 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
13131 && kind != GOMP_MAP_FORCE_PRESENT
13132 && kind != GOMP_MAP_POINTER)
13134 warning_at (OMP_CLAUSE_LOCATION (c), 0,
13135 "incompatible data clause with reduction "
13136 "on %qE; promoting to %<present_or_copy%>",
13137 DECL_NAME (t));
13138 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
13142 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
13143 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
13145 remove = true;
13146 break;
13148 if (!DECL_P (decl))
13150 if ((ctx->region_type & ORT_TARGET) != 0
13151 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
13153 if (INDIRECT_REF_P (decl)
13154 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
13155 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
13156 == REFERENCE_TYPE))
13157 decl = TREE_OPERAND (decl, 0);
13158 if (TREE_CODE (decl) == COMPONENT_REF)
13160 while (TREE_CODE (decl) == COMPONENT_REF)
13161 decl = TREE_OPERAND (decl, 0);
13162 if (DECL_P (decl))
13164 n = splay_tree_lookup (ctx->variables,
13165 (splay_tree_key) decl);
13166 if (!(n->value & GOVD_SEEN))
13167 remove = true;
13171 break;
13173 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13174 if ((ctx->region_type & ORT_TARGET) != 0
13175 && !(n->value & GOVD_SEEN)
13176 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
13177 && (!is_global_var (decl)
13178 || !lookup_attribute ("omp declare target link",
13179 DECL_ATTRIBUTES (decl))))
13181 remove = true;
13182 /* For struct element mapping, if struct is never referenced
13183 in target block and none of the mapping has always modifier,
13184 remove all the struct element mappings, which immediately
13185 follow the GOMP_MAP_STRUCT map clause. */
13186 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
13188 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
13189 while (cnt--)
13190 OMP_CLAUSE_CHAIN (c)
13191 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
13194 else if (DECL_SIZE (decl)
13195 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
13196 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
13197 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
13198 && (OMP_CLAUSE_MAP_KIND (c)
13199 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
13201 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
13202 for these, TREE_CODE (DECL_SIZE (decl)) will always be
13203 INTEGER_CST. */
13204 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
13206 tree decl2 = DECL_VALUE_EXPR (decl);
13207 gcc_assert (INDIRECT_REF_P (decl2));
13208 decl2 = TREE_OPERAND (decl2, 0);
13209 gcc_assert (DECL_P (decl2));
13210 tree mem = build_simple_mem_ref (decl2);
13211 OMP_CLAUSE_DECL (c) = mem;
13212 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13213 if (ctx->outer_context)
13215 omp_notice_variable (ctx->outer_context, decl2, true);
13216 omp_notice_variable (ctx->outer_context,
13217 OMP_CLAUSE_SIZE (c), true);
13219 if (((ctx->region_type & ORT_TARGET) != 0
13220 || !ctx->target_firstprivatize_array_bases)
13221 && ((n->value & GOVD_SEEN) == 0
13222 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
13224 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13225 OMP_CLAUSE_MAP);
13226 OMP_CLAUSE_DECL (nc) = decl;
13227 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13228 if (ctx->target_firstprivatize_array_bases)
13229 OMP_CLAUSE_SET_MAP_KIND (nc,
13230 GOMP_MAP_FIRSTPRIVATE_POINTER);
13231 else
13232 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13233 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
13234 OMP_CLAUSE_CHAIN (c) = nc;
13235 c = nc;
13238 else
13240 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13241 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
13242 gcc_assert ((n->value & GOVD_SEEN) == 0
13243 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13244 == 0));
13246 break;
13248 case OMP_CLAUSE_TO:
13249 case OMP_CLAUSE_FROM:
13250 case OMP_CLAUSE__CACHE_:
13251 decl = OMP_CLAUSE_DECL (c);
13252 if (!DECL_P (decl))
13253 break;
13254 if (DECL_SIZE (decl)
13255 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
13257 tree decl2 = DECL_VALUE_EXPR (decl);
13258 gcc_assert (INDIRECT_REF_P (decl2));
13259 decl2 = TREE_OPERAND (decl2, 0);
13260 gcc_assert (DECL_P (decl2));
13261 tree mem = build_simple_mem_ref (decl2);
13262 OMP_CLAUSE_DECL (c) = mem;
13263 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13264 if (ctx->outer_context)
13266 omp_notice_variable (ctx->outer_context, decl2, true);
13267 omp_notice_variable (ctx->outer_context,
13268 OMP_CLAUSE_SIZE (c), true);
13271 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13272 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
13273 break;
13275 case OMP_CLAUSE_REDUCTION:
13276 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
13278 decl = OMP_CLAUSE_DECL (c);
13279 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13280 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
13282 remove = true;
13283 error_at (OMP_CLAUSE_LOCATION (c),
13284 "%qD specified in %<inscan%> %<reduction%> clause "
13285 "but not in %<scan%> directive clause", decl);
13286 break;
13288 has_inscan_reductions = true;
13290 /* FALLTHRU */
13291 case OMP_CLAUSE_IN_REDUCTION:
13292 case OMP_CLAUSE_TASK_REDUCTION:
13293 decl = OMP_CLAUSE_DECL (c);
13294 /* OpenACC reductions need a present_or_copy data clause.
13295 Add one if necessary. Emit error when the reduction is private. */
13296 if (ctx->region_type == ORT_ACC_PARALLEL
13297 || ctx->region_type == ORT_ACC_SERIAL)
13299 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13300 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13302 remove = true;
13303 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
13304 "reduction on %qE", DECL_NAME (decl));
13306 else if ((n->value & GOVD_MAP) == 0)
13308 tree next = OMP_CLAUSE_CHAIN (c);
13309 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
13310 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
13311 OMP_CLAUSE_DECL (nc) = decl;
13312 OMP_CLAUSE_CHAIN (c) = nc;
13313 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13314 (ctx->region_type
13315 & ORT_ACC) != 0);
13316 while (1)
13318 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
13319 if (OMP_CLAUSE_CHAIN (nc) == NULL)
13320 break;
13321 nc = OMP_CLAUSE_CHAIN (nc);
13323 OMP_CLAUSE_CHAIN (nc) = next;
13324 n->value |= GOVD_MAP;
13327 if (DECL_P (decl)
13328 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13329 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13330 break;
13332 case OMP_CLAUSE_ALLOCATE:
13333 decl = OMP_CLAUSE_DECL (c);
13334 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13335 if (n != NULL && !(n->value & GOVD_SEEN))
13337 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
13338 != 0
13339 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
13340 remove = true;
13342 if (!remove
13343 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13344 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
13345 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
13346 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
13347 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
13349 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13350 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
13351 if (n == NULL)
13353 enum omp_clause_default_kind default_kind
13354 = ctx->default_kind;
13355 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
13356 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13357 true);
13358 ctx->default_kind = default_kind;
13360 else
13361 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13362 true);
13364 break;
13366 case OMP_CLAUSE_COPYIN:
13367 case OMP_CLAUSE_COPYPRIVATE:
13368 case OMP_CLAUSE_IF:
13369 case OMP_CLAUSE_SELF:
13370 case OMP_CLAUSE_NUM_THREADS:
13371 case OMP_CLAUSE_NUM_TEAMS:
13372 case OMP_CLAUSE_THREAD_LIMIT:
13373 case OMP_CLAUSE_DIST_SCHEDULE:
13374 case OMP_CLAUSE_DEVICE:
13375 case OMP_CLAUSE_SCHEDULE:
13376 case OMP_CLAUSE_NOWAIT:
13377 case OMP_CLAUSE_ORDERED:
13378 case OMP_CLAUSE_DEFAULT:
13379 case OMP_CLAUSE_UNTIED:
13380 case OMP_CLAUSE_COLLAPSE:
13381 case OMP_CLAUSE_FINAL:
13382 case OMP_CLAUSE_MERGEABLE:
13383 case OMP_CLAUSE_PROC_BIND:
13384 case OMP_CLAUSE_SAFELEN:
13385 case OMP_CLAUSE_SIMDLEN:
13386 case OMP_CLAUSE_DEPEND:
13387 case OMP_CLAUSE_DOACROSS:
13388 case OMP_CLAUSE_PRIORITY:
13389 case OMP_CLAUSE_GRAINSIZE:
13390 case OMP_CLAUSE_NUM_TASKS:
13391 case OMP_CLAUSE_NOGROUP:
13392 case OMP_CLAUSE_THREADS:
13393 case OMP_CLAUSE_SIMD:
13394 case OMP_CLAUSE_FILTER:
13395 case OMP_CLAUSE_HINT:
13396 case OMP_CLAUSE_DEFAULTMAP:
13397 case OMP_CLAUSE_ORDER:
13398 case OMP_CLAUSE_BIND:
13399 case OMP_CLAUSE_DETACH:
13400 case OMP_CLAUSE_USE_DEVICE_PTR:
13401 case OMP_CLAUSE_USE_DEVICE_ADDR:
13402 case OMP_CLAUSE_ASYNC:
13403 case OMP_CLAUSE_WAIT:
13404 case OMP_CLAUSE_INDEPENDENT:
13405 case OMP_CLAUSE_NUM_GANGS:
13406 case OMP_CLAUSE_NUM_WORKERS:
13407 case OMP_CLAUSE_VECTOR_LENGTH:
13408 case OMP_CLAUSE_GANG:
13409 case OMP_CLAUSE_WORKER:
13410 case OMP_CLAUSE_VECTOR:
13411 case OMP_CLAUSE_AUTO:
13412 case OMP_CLAUSE_SEQ:
13413 case OMP_CLAUSE_TILE:
13414 case OMP_CLAUSE_IF_PRESENT:
13415 case OMP_CLAUSE_FINALIZE:
13416 case OMP_CLAUSE_INCLUSIVE:
13417 case OMP_CLAUSE_EXCLUSIVE:
13418 break;
13420 case OMP_CLAUSE_NOHOST:
13421 default:
13422 gcc_unreachable ();
13425 if (remove)
13426 *list_p = OMP_CLAUSE_CHAIN (c);
13427 else if (move_attach)
13429 /* Remove attach node from here, separate out into its own list. */
13430 *attach_tail = c;
13431 *list_p = OMP_CLAUSE_CHAIN (c);
13432 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13433 attach_tail = &OMP_CLAUSE_CHAIN (c);
13435 else
13436 list_p = &OMP_CLAUSE_CHAIN (c);
13439 /* Splice attach nodes at the end of the list. */
13440 if (attach_list)
13442 *list_p = attach_list;
13443 list_p = attach_tail;
13446 /* Add in any implicit data sharing. */
13447 struct gimplify_adjust_omp_clauses_data data;
13448 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13450 /* OpenMP. Implicit clauses are added at the start of the clause list,
13451 but after any non-map clauses. */
13452 tree *implicit_add_list_p = orig_list_p;
13453 while (*implicit_add_list_p
13454 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13455 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13456 data.list_p = implicit_add_list_p;
13458 else
13459 /* OpenACC. */
13460 data.list_p = list_p;
13461 data.pre_p = pre_p;
13462 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13464 if (has_inscan_reductions)
13465 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13466 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13467 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13469 error_at (OMP_CLAUSE_LOCATION (c),
13470 "%<inscan%> %<reduction%> clause used together with "
13471 "%<linear%> clause for a variable other than loop "
13472 "iterator");
13473 break;
13476 gimplify_omp_ctxp = ctx->outer_context;
13477 delete_omp_context (ctx);
13480 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13481 -1 if unknown yet (simd is involved, won't be known until vectorization)
13482 and 1 if they do. If SCORES is non-NULL, it should point to an array
13483 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13484 of the CONSTRUCTS (position -1 if it will never match) followed by
13485 number of constructs in the OpenMP context construct trait. If the
13486 score depends on whether it will be in a declare simd clone or not,
13487 the function returns 2 and there will be two sets of the scores, the first
13488 one for the case that it is not in a declare simd clone, the other
13489 that it is in a declare simd clone. */
13492 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13493 int *scores)
13495 int matched = 0, cnt = 0;
13496 bool simd_seen = false;
13497 bool target_seen = false;
13498 int declare_simd_cnt = -1;
13499 auto_vec<enum tree_code, 16> codes;
13500 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13502 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13503 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13504 == ORT_TARGET && ctx->code == OMP_TARGET)
13505 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13506 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13507 || (ctx->region_type == ORT_SIMD
13508 && ctx->code == OMP_SIMD
13509 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13511 ++cnt;
13512 if (scores)
13513 codes.safe_push (ctx->code);
13514 else if (matched < nconstructs && ctx->code == constructs[matched])
13516 if (ctx->code == OMP_SIMD)
13518 if (matched)
13519 return 0;
13520 simd_seen = true;
13522 ++matched;
13524 if (ctx->code == OMP_TARGET)
13526 if (scores == NULL)
13527 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13528 target_seen = true;
13529 break;
13532 else if (ctx->region_type == ORT_WORKSHARE
13533 && ctx->code == OMP_LOOP
13534 && ctx->outer_context
13535 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13536 && ctx->outer_context->outer_context
13537 && ctx->outer_context->outer_context->code == OMP_LOOP
13538 && ctx->outer_context->outer_context->distribute)
13539 ctx = ctx->outer_context->outer_context;
13540 ctx = ctx->outer_context;
13542 if (!target_seen
13543 && lookup_attribute ("omp declare simd",
13544 DECL_ATTRIBUTES (current_function_decl)))
13546 /* Declare simd is a maybe case, it is supposed to be added only to the
13547 omp-simd-clone.cc added clones and not to the base function. */
13548 declare_simd_cnt = cnt++;
13549 if (scores)
13550 codes.safe_push (OMP_SIMD);
13551 else if (cnt == 0
13552 && constructs[0] == OMP_SIMD)
13554 gcc_assert (matched == 0);
13555 simd_seen = true;
13556 if (++matched == nconstructs)
13557 return -1;
13560 if (tree attr = lookup_attribute ("omp declare variant variant",
13561 DECL_ATTRIBUTES (current_function_decl)))
13563 enum tree_code variant_constructs[5];
13564 int variant_nconstructs = 0;
13565 if (!target_seen)
13566 variant_nconstructs
13567 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13568 variant_constructs);
13569 for (int i = 0; i < variant_nconstructs; i++)
13571 ++cnt;
13572 if (scores)
13573 codes.safe_push (variant_constructs[i]);
13574 else if (matched < nconstructs
13575 && variant_constructs[i] == constructs[matched])
13577 if (variant_constructs[i] == OMP_SIMD)
13579 if (matched)
13580 return 0;
13581 simd_seen = true;
13583 ++matched;
13587 if (!target_seen
13588 && lookup_attribute ("omp declare target block",
13589 DECL_ATTRIBUTES (current_function_decl)))
13591 if (scores)
13592 codes.safe_push (OMP_TARGET);
13593 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13594 ++matched;
13596 if (scores)
13598 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13600 int j = codes.length () - 1;
13601 for (int i = nconstructs - 1; i >= 0; i--)
13603 while (j >= 0
13604 && (pass != 0 || declare_simd_cnt != j)
13605 && constructs[i] != codes[j])
13606 --j;
13607 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13608 *scores++ = j - 1;
13609 else
13610 *scores++ = j;
13612 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13613 ? codes.length () - 1 : codes.length ());
13615 return declare_simd_cnt == -1 ? 1 : 2;
13617 if (matched == nconstructs)
13618 return simd_seen ? -1 : 1;
13619 return 0;
13622 /* Gimplify OACC_CACHE. */
13624 static void
13625 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13627 tree expr = *expr_p;
13629 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13630 OACC_CACHE);
13631 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13632 OACC_CACHE);
13634 /* TODO: Do something sensible with this information. */
13636 *expr_p = NULL_TREE;
13639 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13640 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13641 kind. The entry kind will replace the one in CLAUSE, while the exit
13642 kind will be used in a new omp_clause and returned to the caller. */
13644 static tree
13645 gimplify_oacc_declare_1 (tree clause)
13647 HOST_WIDE_INT kind, new_op;
13648 bool ret = false;
13649 tree c = NULL;
13651 kind = OMP_CLAUSE_MAP_KIND (clause);
13653 switch (kind)
13655 case GOMP_MAP_ALLOC:
13656 new_op = GOMP_MAP_RELEASE;
13657 ret = true;
13658 break;
13660 case GOMP_MAP_FROM:
13661 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13662 new_op = GOMP_MAP_FROM;
13663 ret = true;
13664 break;
13666 case GOMP_MAP_TOFROM:
13667 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13668 new_op = GOMP_MAP_FROM;
13669 ret = true;
13670 break;
13672 case GOMP_MAP_DEVICE_RESIDENT:
13673 case GOMP_MAP_FORCE_DEVICEPTR:
13674 case GOMP_MAP_FORCE_PRESENT:
13675 case GOMP_MAP_LINK:
13676 case GOMP_MAP_POINTER:
13677 case GOMP_MAP_TO:
13678 break;
13680 default:
13681 gcc_unreachable ();
13682 break;
13685 if (ret)
13687 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13688 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13689 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13692 return c;
13695 /* Gimplify OACC_DECLARE. */
13697 static void
13698 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13700 tree expr = *expr_p;
13701 gomp_target *stmt;
13702 tree clauses, t, decl;
13704 clauses = OACC_DECLARE_CLAUSES (expr);
13706 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13707 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13709 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13711 decl = OMP_CLAUSE_DECL (t);
13713 if (TREE_CODE (decl) == MEM_REF)
13714 decl = TREE_OPERAND (decl, 0);
13716 if (VAR_P (decl) && !is_oacc_declared (decl))
13718 tree attr = get_identifier ("oacc declare target");
13719 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13720 DECL_ATTRIBUTES (decl));
13723 if (VAR_P (decl)
13724 && !is_global_var (decl)
13725 && DECL_CONTEXT (decl) == current_function_decl)
13727 tree c = gimplify_oacc_declare_1 (t);
13728 if (c)
13730 if (oacc_declare_returns == NULL)
13731 oacc_declare_returns = new hash_map<tree, tree>;
13733 oacc_declare_returns->put (decl, c);
13737 if (gimplify_omp_ctxp)
13738 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13741 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13742 clauses);
13744 gimplify_seq_add_stmt (pre_p, stmt);
13746 *expr_p = NULL_TREE;
13749 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13750 gimplification of the body, as well as scanning the body for used
13751 variables. We need to do this scan now, because variable-sized
13752 decls will be decomposed during gimplification. */
13754 static void
13755 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13757 tree expr = *expr_p;
13758 gimple *g;
13759 gimple_seq body = NULL;
13761 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13762 OMP_PARALLEL_COMBINED (expr)
13763 ? ORT_COMBINED_PARALLEL
13764 : ORT_PARALLEL, OMP_PARALLEL);
13766 push_gimplify_context ();
13768 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13769 if (gimple_code (g) == GIMPLE_BIND)
13770 pop_gimplify_context (g);
13771 else
13772 pop_gimplify_context (NULL);
13774 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13775 OMP_PARALLEL);
13777 g = gimple_build_omp_parallel (body,
13778 OMP_PARALLEL_CLAUSES (expr),
13779 NULL_TREE, NULL_TREE);
13780 if (OMP_PARALLEL_COMBINED (expr))
13781 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13782 gimplify_seq_add_stmt (pre_p, g);
13783 *expr_p = NULL_TREE;
13786 /* Gimplify the contents of an OMP_TASK statement. This involves
13787 gimplification of the body, as well as scanning the body for used
13788 variables. We need to do this scan now, because variable-sized
13789 decls will be decomposed during gimplification. */
13791 static void
13792 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13794 tree expr = *expr_p;
13795 gimple *g;
13796 gimple_seq body = NULL;
13797 bool nowait = false;
13798 bool has_depend = false;
13800 if (OMP_TASK_BODY (expr) == NULL_TREE)
13802 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13805 has_depend = true;
13806 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13808 error_at (OMP_CLAUSE_LOCATION (c),
13809 "%<mutexinoutset%> kind in %<depend%> clause on a "
13810 "%<taskwait%> construct");
13811 break;
13814 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13815 nowait = true;
13816 if (nowait && !has_depend)
13818 error_at (EXPR_LOCATION (expr),
13819 "%<taskwait%> construct with %<nowait%> clause but no "
13820 "%<depend%> clauses");
13821 *expr_p = NULL_TREE;
13822 return;
13826 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13827 omp_find_clause (OMP_TASK_CLAUSES (expr),
13828 OMP_CLAUSE_UNTIED)
13829 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13831 if (OMP_TASK_BODY (expr))
13833 push_gimplify_context ();
13835 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13836 if (gimple_code (g) == GIMPLE_BIND)
13837 pop_gimplify_context (g);
13838 else
13839 pop_gimplify_context (NULL);
13842 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13843 OMP_TASK);
13845 g = gimple_build_omp_task (body,
13846 OMP_TASK_CLAUSES (expr),
13847 NULL_TREE, NULL_TREE,
13848 NULL_TREE, NULL_TREE, NULL_TREE);
13849 if (OMP_TASK_BODY (expr) == NULL_TREE)
13850 gimple_omp_task_set_taskwait_p (g, true);
13851 gimplify_seq_add_stmt (pre_p, g);
13852 *expr_p = NULL_TREE;
13855 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13856 force it into a temporary initialized in PRE_P and add firstprivate clause
13857 to ORIG_FOR_STMT. */
13859 static void
13860 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13861 tree orig_for_stmt)
13863 if (*tp == NULL || is_gimple_constant (*tp))
13864 return;
13866 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13867 /* Reference to pointer conversion is considered useless,
13868 but is significant for firstprivate clause. Force it
13869 here. */
13870 if (type
13871 && TREE_CODE (type) == POINTER_TYPE
13872 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13874 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13875 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13876 gimplify_and_add (m, pre_p);
13877 *tp = v;
13880 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13881 OMP_CLAUSE_DECL (c) = *tp;
13882 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13883 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13886 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13887 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13889 static tree
13890 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13892 switch (TREE_CODE (*tp))
13894 case OMP_ORDERED:
13895 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13896 return *tp;
13897 break;
13898 case OMP_SIMD:
13899 case OMP_PARALLEL:
13900 case OMP_TARGET:
13901 *walk_subtrees = 0;
13902 break;
13903 default:
13904 break;
13906 return NULL_TREE;
13909 /* Gimplify the gross structure of an OMP_FOR statement. */
13911 static enum gimplify_status
13912 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13914 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13915 enum gimplify_status ret = GS_ALL_DONE;
13916 enum gimplify_status tret;
13917 gomp_for *gfor;
13918 gimple_seq for_body, for_pre_body;
13919 int i;
13920 bitmap has_decl_expr = NULL;
13921 enum omp_region_type ort = ORT_WORKSHARE;
13922 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13924 orig_for_stmt = for_stmt = *expr_p;
13926 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13927 != NULL_TREE);
13928 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13930 tree *data[4] = { NULL, NULL, NULL, NULL };
13931 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13932 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13933 find_combined_omp_for, data, NULL);
13934 if (inner_for_stmt == NULL_TREE)
13936 gcc_assert (seen_error ());
13937 *expr_p = NULL_TREE;
13938 return GS_ERROR;
13940 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13942 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13943 &OMP_FOR_PRE_BODY (for_stmt));
13944 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13946 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13948 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13949 &OMP_FOR_PRE_BODY (for_stmt));
13950 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13953 if (data[0])
13955 /* We have some statements or variable declarations in between
13956 the composite construct directives. Move them around the
13957 inner_for_stmt. */
13958 data[0] = expr_p;
13959 for (i = 0; i < 3; i++)
13960 if (data[i])
13962 tree t = *data[i];
13963 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13964 data[i + 1] = data[i];
13965 *data[i] = OMP_BODY (t);
13966 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13967 NULL_TREE, make_node (BLOCK));
13968 OMP_BODY (t) = body;
13969 append_to_statement_list_force (inner_for_stmt,
13970 &BIND_EXPR_BODY (body));
13971 *data[3] = t;
13972 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13973 gcc_assert (*data[3] == inner_for_stmt);
13975 return GS_OK;
13978 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13979 if (!loop_p
13980 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13981 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13982 i)) == TREE_LIST
13983 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13984 i)))
13986 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13987 /* Class iterators aren't allowed on OMP_SIMD, so the only
13988 case we need to solve is distribute parallel for. They are
13989 allowed on the loop construct, but that is already handled
13990 in gimplify_omp_loop. */
13991 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13992 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13993 && data[1]);
13994 tree orig_decl = TREE_PURPOSE (orig);
13995 tree last = TREE_VALUE (orig);
13996 tree *pc;
13997 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13998 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13999 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
14000 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
14001 && OMP_CLAUSE_DECL (*pc) == orig_decl)
14002 break;
14003 if (*pc == NULL_TREE)
14005 tree *spc;
14006 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
14007 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
14008 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
14009 && OMP_CLAUSE_DECL (*spc) == orig_decl)
14010 break;
14011 if (*spc)
14013 tree c = *spc;
14014 *spc = OMP_CLAUSE_CHAIN (c);
14015 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
14016 *pc = c;
14019 if (*pc == NULL_TREE)
14021 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
14023 /* private clause will appear only on inner_for_stmt.
14024 Change it into firstprivate, and add private clause
14025 on for_stmt. */
14026 tree c = copy_node (*pc);
14027 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14028 OMP_FOR_CLAUSES (for_stmt) = c;
14029 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
14030 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
14032 else
14034 /* lastprivate clause will appear on both inner_for_stmt
14035 and for_stmt. Add firstprivate clause to
14036 inner_for_stmt. */
14037 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
14038 OMP_CLAUSE_FIRSTPRIVATE);
14039 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
14040 OMP_CLAUSE_CHAIN (c) = *pc;
14041 *pc = c;
14042 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
14044 tree c = build_omp_clause (UNKNOWN_LOCATION,
14045 OMP_CLAUSE_FIRSTPRIVATE);
14046 OMP_CLAUSE_DECL (c) = last;
14047 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14048 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14049 c = build_omp_clause (UNKNOWN_LOCATION,
14050 *pc ? OMP_CLAUSE_SHARED
14051 : OMP_CLAUSE_FIRSTPRIVATE);
14052 OMP_CLAUSE_DECL (c) = orig_decl;
14053 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14054 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14056 /* Similarly, take care of C++ range for temporaries, those should
14057 be firstprivate on OMP_PARALLEL if any. */
14058 if (data[1])
14059 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
14060 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
14061 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
14062 i)) == TREE_LIST
14063 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
14064 i)))
14066 tree orig
14067 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
14068 tree v = TREE_CHAIN (orig);
14069 tree c = build_omp_clause (UNKNOWN_LOCATION,
14070 OMP_CLAUSE_FIRSTPRIVATE);
14071 /* First add firstprivate clause for the __for_end artificial
14072 decl. */
14073 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
14074 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14075 == REFERENCE_TYPE)
14076 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
14077 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14078 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14079 if (TREE_VEC_ELT (v, 0))
14081 /* And now the same for __for_range artificial decl if it
14082 exists. */
14083 c = build_omp_clause (UNKNOWN_LOCATION,
14084 OMP_CLAUSE_FIRSTPRIVATE);
14085 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
14086 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14087 == REFERENCE_TYPE)
14088 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
14089 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14090 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14095 switch (TREE_CODE (for_stmt))
14097 case OMP_FOR:
14098 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
14100 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14101 OMP_CLAUSE_SCHEDULE))
14102 error_at (EXPR_LOCATION (for_stmt),
14103 "%qs clause may not appear on non-rectangular %qs",
14104 "schedule", lang_GNU_Fortran () ? "do" : "for");
14105 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
14106 error_at (EXPR_LOCATION (for_stmt),
14107 "%qs clause may not appear on non-rectangular %qs",
14108 "ordered", lang_GNU_Fortran () ? "do" : "for");
14110 break;
14111 case OMP_DISTRIBUTE:
14112 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
14113 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14114 OMP_CLAUSE_DIST_SCHEDULE))
14115 error_at (EXPR_LOCATION (for_stmt),
14116 "%qs clause may not appear on non-rectangular %qs",
14117 "dist_schedule", "distribute");
14118 break;
14119 case OACC_LOOP:
14120 ort = ORT_ACC;
14121 break;
14122 case OMP_TASKLOOP:
14123 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
14125 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14126 OMP_CLAUSE_GRAINSIZE))
14127 error_at (EXPR_LOCATION (for_stmt),
14128 "%qs clause may not appear on non-rectangular %qs",
14129 "grainsize", "taskloop");
14130 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14131 OMP_CLAUSE_NUM_TASKS))
14132 error_at (EXPR_LOCATION (for_stmt),
14133 "%qs clause may not appear on non-rectangular %qs",
14134 "num_tasks", "taskloop");
14136 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
14137 ort = ORT_UNTIED_TASKLOOP;
14138 else
14139 ort = ORT_TASKLOOP;
14140 break;
14141 case OMP_SIMD:
14142 ort = ORT_SIMD;
14143 break;
14144 default:
14145 gcc_unreachable ();
14148 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
14149 clause for the IV. */
14150 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14152 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
14153 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14154 decl = TREE_OPERAND (t, 0);
14155 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
14156 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14157 && OMP_CLAUSE_DECL (c) == decl)
14159 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14160 break;
14164 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
14165 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
14166 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
14167 ? OMP_LOOP : TREE_CODE (for_stmt));
14169 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
14170 gimplify_omp_ctxp->distribute = true;
14172 /* Handle OMP_FOR_INIT. */
14173 for_pre_body = NULL;
14174 if ((ort == ORT_SIMD
14175 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
14176 && OMP_FOR_PRE_BODY (for_stmt))
14178 has_decl_expr = BITMAP_ALLOC (NULL);
14179 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
14180 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
14182 t = OMP_FOR_PRE_BODY (for_stmt);
14183 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
14185 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
14187 tree_stmt_iterator si;
14188 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
14189 tsi_next (&si))
14191 t = tsi_stmt (si);
14192 if (TREE_CODE (t) == DECL_EXPR
14193 && VAR_P (DECL_EXPR_DECL (t)))
14194 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
14198 if (OMP_FOR_PRE_BODY (for_stmt))
14200 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
14201 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
14202 else
14204 struct gimplify_omp_ctx ctx;
14205 memset (&ctx, 0, sizeof (ctx));
14206 ctx.region_type = ORT_NONE;
14207 gimplify_omp_ctxp = &ctx;
14208 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
14209 gimplify_omp_ctxp = NULL;
14212 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
14214 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
14215 for_stmt = inner_for_stmt;
14217 /* For taskloop, need to gimplify the start, end and step before the
14218 taskloop, outside of the taskloop omp context. */
14219 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14221 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14223 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14224 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
14225 ? pre_p : &for_pre_body);
14226 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
14227 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14229 tree v = TREE_OPERAND (t, 1);
14230 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
14231 for_pre_p, orig_for_stmt);
14232 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
14233 for_pre_p, orig_for_stmt);
14235 else
14236 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
14237 orig_for_stmt);
14239 /* Handle OMP_FOR_COND. */
14240 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14241 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14243 tree v = TREE_OPERAND (t, 1);
14244 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
14245 for_pre_p, orig_for_stmt);
14246 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
14247 for_pre_p, orig_for_stmt);
14249 else
14250 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
14251 orig_for_stmt);
14253 /* Handle OMP_FOR_INCR. */
14254 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14255 if (TREE_CODE (t) == MODIFY_EXPR)
14257 decl = TREE_OPERAND (t, 0);
14258 t = TREE_OPERAND (t, 1);
14259 tree *tp = &TREE_OPERAND (t, 1);
14260 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
14261 tp = &TREE_OPERAND (t, 0);
14263 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
14264 orig_for_stmt);
14268 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
14269 OMP_TASKLOOP);
14272 if (orig_for_stmt != for_stmt)
14273 gimplify_omp_ctxp->combined_loop = true;
14275 for_body = NULL;
14276 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14277 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
14278 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14279 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
14281 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
14282 bool is_doacross = false;
14283 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
14284 find_standalone_omp_ordered, NULL))
14286 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
14287 is_doacross = true;
14288 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
14289 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
14290 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14291 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
14293 error_at (OMP_CLAUSE_LOCATION (*pc),
14294 "%<linear%> clause may not be specified together "
14295 "with %<ordered%> clause if stand-alone %<ordered%> "
14296 "construct is nested in it");
14297 *pc = OMP_CLAUSE_CHAIN (*pc);
14299 else
14300 pc = &OMP_CLAUSE_CHAIN (*pc);
14302 int collapse = 1, tile = 0;
14303 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
14304 if (c)
14305 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
14306 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
14307 if (c)
14308 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
14309 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
14310 hash_set<tree> *allocate_uids = NULL;
14311 if (c)
14313 allocate_uids = new hash_set<tree>;
14314 for (; c; c = OMP_CLAUSE_CHAIN (c))
14315 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
14316 allocate_uids->add (OMP_CLAUSE_DECL (c));
14318 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14320 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14321 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14322 decl = TREE_OPERAND (t, 0);
14323 gcc_assert (DECL_P (decl));
14324 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
14325 || POINTER_TYPE_P (TREE_TYPE (decl)));
14326 if (is_doacross)
14328 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
14330 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14331 if (TREE_CODE (orig_decl) == TREE_LIST)
14333 orig_decl = TREE_PURPOSE (orig_decl);
14334 if (!orig_decl)
14335 orig_decl = decl;
14337 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
14339 else
14340 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14341 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14344 if (for_stmt == orig_for_stmt)
14346 tree orig_decl = decl;
14347 if (OMP_FOR_ORIG_DECLS (for_stmt))
14349 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14350 if (TREE_CODE (orig_decl) == TREE_LIST)
14352 orig_decl = TREE_PURPOSE (orig_decl);
14353 if (!orig_decl)
14354 orig_decl = decl;
14357 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
14358 error_at (EXPR_LOCATION (for_stmt),
14359 "threadprivate iteration variable %qD", orig_decl);
14362 /* Make sure the iteration variable is private. */
14363 tree c = NULL_TREE;
14364 tree c2 = NULL_TREE;
14365 if (orig_for_stmt != for_stmt)
14367 /* Preserve this information until we gimplify the inner simd. */
14368 if (has_decl_expr
14369 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14370 TREE_PRIVATE (t) = 1;
14372 else if (ort == ORT_SIMD)
14374 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14375 (splay_tree_key) decl);
14376 omp_is_private (gimplify_omp_ctxp, decl,
14377 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14378 != 1));
14379 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14381 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14382 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14383 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14384 OMP_CLAUSE_LASTPRIVATE);
14385 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14386 OMP_CLAUSE_LASTPRIVATE))
14387 if (OMP_CLAUSE_DECL (c3) == decl)
14389 warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
14390 "conditional %<lastprivate%> on loop "
14391 "iterator %qD ignored", decl);
14392 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14393 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14396 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14398 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14399 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14400 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14401 if ((has_decl_expr
14402 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14403 || TREE_PRIVATE (t))
14405 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14406 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14408 struct gimplify_omp_ctx *outer
14409 = gimplify_omp_ctxp->outer_context;
14410 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14412 if (outer->region_type == ORT_WORKSHARE
14413 && outer->combined_loop)
14415 n = splay_tree_lookup (outer->variables,
14416 (splay_tree_key)decl);
14417 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14419 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14420 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14422 else
14424 struct gimplify_omp_ctx *octx = outer->outer_context;
14425 if (octx
14426 && octx->region_type == ORT_COMBINED_PARALLEL
14427 && octx->outer_context
14428 && (octx->outer_context->region_type
14429 == ORT_WORKSHARE)
14430 && octx->outer_context->combined_loop)
14432 octx = octx->outer_context;
14433 n = splay_tree_lookup (octx->variables,
14434 (splay_tree_key)decl);
14435 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14437 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14438 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14445 OMP_CLAUSE_DECL (c) = decl;
14446 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14447 OMP_FOR_CLAUSES (for_stmt) = c;
14448 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14449 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14450 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14451 true);
14453 else
14455 bool lastprivate
14456 = (!has_decl_expr
14457 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14458 if (TREE_PRIVATE (t))
14459 lastprivate = false;
14460 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14462 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14463 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14464 lastprivate = false;
14467 struct gimplify_omp_ctx *outer
14468 = gimplify_omp_ctxp->outer_context;
14469 if (outer && lastprivate)
14470 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14471 true);
14473 c = build_omp_clause (input_location,
14474 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14475 : OMP_CLAUSE_PRIVATE);
14476 OMP_CLAUSE_DECL (c) = decl;
14477 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14478 OMP_FOR_CLAUSES (for_stmt) = c;
14479 omp_add_variable (gimplify_omp_ctxp, decl,
14480 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14481 | GOVD_EXPLICIT | GOVD_SEEN);
14482 c = NULL_TREE;
14485 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14487 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14488 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14489 (splay_tree_key) decl);
14490 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14491 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14492 OMP_CLAUSE_LASTPRIVATE);
14493 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14494 OMP_CLAUSE_LASTPRIVATE))
14495 if (OMP_CLAUSE_DECL (c3) == decl)
14497 warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
14498 "conditional %<lastprivate%> on loop "
14499 "iterator %qD ignored", decl);
14500 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14501 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14504 else
14505 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14507 /* If DECL is not a gimple register, create a temporary variable to act
14508 as an iteration counter. This is valid, since DECL cannot be
14509 modified in the body of the loop. Similarly for any iteration vars
14510 in simd with collapse > 1 where the iterator vars must be
14511 lastprivate. And similarly for vars mentioned in allocate clauses. */
14512 if (orig_for_stmt != for_stmt)
14513 var = decl;
14514 else if (!is_gimple_reg (decl)
14515 || (ort == ORT_SIMD
14516 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14517 || (allocate_uids && allocate_uids->contains (decl)))
14519 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14520 /* Make sure omp_add_variable is not called on it prematurely.
14521 We call it ourselves a few lines later. */
14522 gimplify_omp_ctxp = NULL;
14523 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14524 gimplify_omp_ctxp = ctx;
14525 TREE_OPERAND (t, 0) = var;
14527 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14529 if (ort == ORT_SIMD
14530 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14532 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14533 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14534 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14535 OMP_CLAUSE_DECL (c2) = var;
14536 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14537 OMP_FOR_CLAUSES (for_stmt) = c2;
14538 omp_add_variable (gimplify_omp_ctxp, var,
14539 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14540 if (c == NULL_TREE)
14542 c = c2;
14543 c2 = NULL_TREE;
14546 else
14547 omp_add_variable (gimplify_omp_ctxp, var,
14548 GOVD_PRIVATE | GOVD_SEEN);
14550 else
14551 var = decl;
14553 gimplify_omp_ctxp->in_for_exprs = true;
14554 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14556 tree lb = TREE_OPERAND (t, 1);
14557 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14558 is_gimple_val, fb_rvalue, false);
14559 ret = MIN (ret, tret);
14560 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14561 is_gimple_val, fb_rvalue, false);
14563 else
14564 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14565 is_gimple_val, fb_rvalue, false);
14566 gimplify_omp_ctxp->in_for_exprs = false;
14567 ret = MIN (ret, tret);
14568 if (ret == GS_ERROR)
14569 return ret;
14571 /* Handle OMP_FOR_COND. */
14572 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14573 gcc_assert (COMPARISON_CLASS_P (t));
14574 gcc_assert (TREE_OPERAND (t, 0) == decl);
14576 gimplify_omp_ctxp->in_for_exprs = true;
14577 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14579 tree ub = TREE_OPERAND (t, 1);
14580 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14581 is_gimple_val, fb_rvalue, false);
14582 ret = MIN (ret, tret);
14583 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14584 is_gimple_val, fb_rvalue, false);
14586 else
14587 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14588 is_gimple_val, fb_rvalue, false);
14589 gimplify_omp_ctxp->in_for_exprs = false;
14590 ret = MIN (ret, tret);
14592 /* Handle OMP_FOR_INCR. */
14593 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14594 switch (TREE_CODE (t))
14596 case PREINCREMENT_EXPR:
14597 case POSTINCREMENT_EXPR:
14599 tree decl = TREE_OPERAND (t, 0);
14600 /* c_omp_for_incr_canonicalize_ptr() should have been
14601 called to massage things appropriately. */
14602 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14604 if (orig_for_stmt != for_stmt)
14605 break;
14606 t = build_int_cst (TREE_TYPE (decl), 1);
14607 if (c)
14608 OMP_CLAUSE_LINEAR_STEP (c) = t;
14609 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14610 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14611 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14612 break;
14615 case PREDECREMENT_EXPR:
14616 case POSTDECREMENT_EXPR:
14617 /* c_omp_for_incr_canonicalize_ptr() should have been
14618 called to massage things appropriately. */
14619 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14620 if (orig_for_stmt != for_stmt)
14621 break;
14622 t = build_int_cst (TREE_TYPE (decl), -1);
14623 if (c)
14624 OMP_CLAUSE_LINEAR_STEP (c) = t;
14625 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14626 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14627 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14628 break;
14630 case MODIFY_EXPR:
14631 gcc_assert (TREE_OPERAND (t, 0) == decl);
14632 TREE_OPERAND (t, 0) = var;
14634 t = TREE_OPERAND (t, 1);
14635 switch (TREE_CODE (t))
14637 case PLUS_EXPR:
14638 if (TREE_OPERAND (t, 1) == decl)
14640 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14641 TREE_OPERAND (t, 0) = var;
14642 break;
14645 /* Fallthru. */
14646 case MINUS_EXPR:
14647 case POINTER_PLUS_EXPR:
14648 gcc_assert (TREE_OPERAND (t, 0) == decl);
14649 TREE_OPERAND (t, 0) = var;
14650 break;
14651 default:
14652 gcc_unreachable ();
14655 gimplify_omp_ctxp->in_for_exprs = true;
14656 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14657 is_gimple_val, fb_rvalue, false);
14658 ret = MIN (ret, tret);
14659 if (c)
14661 tree step = TREE_OPERAND (t, 1);
14662 tree stept = TREE_TYPE (decl);
14663 if (POINTER_TYPE_P (stept))
14664 stept = sizetype;
14665 step = fold_convert (stept, step);
14666 if (TREE_CODE (t) == MINUS_EXPR)
14667 step = fold_build1 (NEGATE_EXPR, stept, step);
14668 OMP_CLAUSE_LINEAR_STEP (c) = step;
14669 if (step != TREE_OPERAND (t, 1))
14671 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14672 &for_pre_body, NULL,
14673 is_gimple_val, fb_rvalue, false);
14674 ret = MIN (ret, tret);
14677 gimplify_omp_ctxp->in_for_exprs = false;
14678 break;
14680 default:
14681 gcc_unreachable ();
14684 if (c2)
14686 gcc_assert (c);
14687 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14690 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14692 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14693 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14694 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14695 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14696 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14697 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14698 && OMP_CLAUSE_DECL (c) == decl)
14700 if (is_doacross && (collapse == 1 || i >= collapse))
14701 t = var;
14702 else
14704 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14705 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14706 gcc_assert (TREE_OPERAND (t, 0) == var);
14707 t = TREE_OPERAND (t, 1);
14708 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14709 || TREE_CODE (t) == MINUS_EXPR
14710 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14711 gcc_assert (TREE_OPERAND (t, 0) == var);
14712 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14713 is_doacross ? var : decl,
14714 TREE_OPERAND (t, 1));
14716 gimple_seq *seq;
14717 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14718 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14719 else
14720 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14721 push_gimplify_context ();
14722 gimplify_assign (decl, t, seq);
14723 gimple *bind = NULL;
14724 if (gimplify_ctxp->temps)
14726 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14727 *seq = NULL;
14728 gimplify_seq_add_stmt (seq, bind);
14730 pop_gimplify_context (bind);
14733 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14734 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14736 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14737 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14738 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14739 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14740 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14741 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14742 gcc_assert (COMPARISON_CLASS_P (t));
14743 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14744 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14745 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14749 BITMAP_FREE (has_decl_expr);
14750 delete allocate_uids;
14752 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14753 || (loop_p && orig_for_stmt == for_stmt))
14755 push_gimplify_context ();
14756 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14758 OMP_FOR_BODY (orig_for_stmt)
14759 = build3 (BIND_EXPR, void_type_node, NULL,
14760 OMP_FOR_BODY (orig_for_stmt), NULL);
14761 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14765 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14766 &for_body);
14768 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14769 || (loop_p && orig_for_stmt == for_stmt))
14771 if (gimple_code (g) == GIMPLE_BIND)
14772 pop_gimplify_context (g);
14773 else
14774 pop_gimplify_context (NULL);
14777 if (orig_for_stmt != for_stmt)
14778 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14780 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14781 decl = TREE_OPERAND (t, 0);
14782 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14783 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14784 gimplify_omp_ctxp = ctx->outer_context;
14785 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14786 gimplify_omp_ctxp = ctx;
14787 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14788 TREE_OPERAND (t, 0) = var;
14789 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14790 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14791 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14792 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14793 for (int j = i + 1;
14794 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14796 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14797 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14798 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14799 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14801 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14802 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14804 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14805 gcc_assert (COMPARISON_CLASS_P (t));
14806 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14807 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14809 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14810 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14815 gimplify_adjust_omp_clauses (pre_p, for_body,
14816 &OMP_FOR_CLAUSES (orig_for_stmt),
14817 TREE_CODE (orig_for_stmt));
14819 int kind;
14820 switch (TREE_CODE (orig_for_stmt))
14822 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14823 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14824 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14825 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14826 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14827 default:
14828 gcc_unreachable ();
14830 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14832 gimplify_seq_add_seq (pre_p, for_pre_body);
14833 for_pre_body = NULL;
14835 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14836 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14837 for_pre_body);
14838 if (orig_for_stmt != for_stmt)
14839 gimple_omp_for_set_combined_p (gfor, true);
14840 if (gimplify_omp_ctxp
14841 && (gimplify_omp_ctxp->combined_loop
14842 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14843 && gimplify_omp_ctxp->outer_context
14844 && gimplify_omp_ctxp->outer_context->combined_loop)))
14846 gimple_omp_for_set_combined_into_p (gfor, true);
14847 if (gimplify_omp_ctxp->combined_loop)
14848 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14849 else
14850 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14853 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14855 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14856 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14857 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14858 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14859 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14860 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14861 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14862 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14865 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14866 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14867 The outer taskloop stands for computing the number of iterations,
14868 counts for collapsed loops and holding taskloop specific clauses.
14869 The task construct stands for the effect of data sharing on the
14870 explicit task it creates and the inner taskloop stands for expansion
14871 of the static loop inside of the explicit task construct. */
14872 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14874 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14875 tree task_clauses = NULL_TREE;
14876 tree c = *gfor_clauses_ptr;
14877 tree *gtask_clauses_ptr = &task_clauses;
14878 tree outer_for_clauses = NULL_TREE;
14879 tree *gforo_clauses_ptr = &outer_for_clauses;
14880 bitmap lastprivate_uids = NULL;
14881 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14883 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14884 if (c)
14886 lastprivate_uids = BITMAP_ALLOC (NULL);
14887 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14888 OMP_CLAUSE_LASTPRIVATE))
14889 bitmap_set_bit (lastprivate_uids,
14890 DECL_UID (OMP_CLAUSE_DECL (c)));
14892 c = *gfor_clauses_ptr;
14894 for (; c; c = OMP_CLAUSE_CHAIN (c))
14895 switch (OMP_CLAUSE_CODE (c))
14897 /* These clauses are allowed on task, move them there. */
14898 case OMP_CLAUSE_SHARED:
14899 case OMP_CLAUSE_FIRSTPRIVATE:
14900 case OMP_CLAUSE_DEFAULT:
14901 case OMP_CLAUSE_IF:
14902 case OMP_CLAUSE_UNTIED:
14903 case OMP_CLAUSE_FINAL:
14904 case OMP_CLAUSE_MERGEABLE:
14905 case OMP_CLAUSE_PRIORITY:
14906 case OMP_CLAUSE_REDUCTION:
14907 case OMP_CLAUSE_IN_REDUCTION:
14908 *gtask_clauses_ptr = c;
14909 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14910 break;
14911 case OMP_CLAUSE_PRIVATE:
14912 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14914 /* We want private on outer for and firstprivate
14915 on task. */
14916 *gtask_clauses_ptr
14917 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14918 OMP_CLAUSE_FIRSTPRIVATE);
14919 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14920 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14921 openacc);
14922 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14923 *gforo_clauses_ptr = c;
14924 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14926 else
14928 *gtask_clauses_ptr = c;
14929 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14931 break;
14932 /* These clauses go into outer taskloop clauses. */
14933 case OMP_CLAUSE_GRAINSIZE:
14934 case OMP_CLAUSE_NUM_TASKS:
14935 case OMP_CLAUSE_NOGROUP:
14936 *gforo_clauses_ptr = c;
14937 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14938 break;
14939 /* Collapse clause we duplicate on both taskloops. */
14940 case OMP_CLAUSE_COLLAPSE:
14941 *gfor_clauses_ptr = c;
14942 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14943 *gforo_clauses_ptr = copy_node (c);
14944 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14945 break;
14946 /* For lastprivate, keep the clause on inner taskloop, and add
14947 a shared clause on task. If the same decl is also firstprivate,
14948 add also firstprivate clause on the inner taskloop. */
14949 case OMP_CLAUSE_LASTPRIVATE:
14950 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14952 /* For taskloop C++ lastprivate IVs, we want:
14953 1) private on outer taskloop
14954 2) firstprivate and shared on task
14955 3) lastprivate on inner taskloop */
14956 *gtask_clauses_ptr
14957 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14958 OMP_CLAUSE_FIRSTPRIVATE);
14959 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14960 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14961 openacc);
14962 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14963 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14964 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14965 OMP_CLAUSE_PRIVATE);
14966 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14967 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14968 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14969 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14971 *gfor_clauses_ptr = c;
14972 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14973 *gtask_clauses_ptr
14974 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14975 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14976 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14977 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14978 gtask_clauses_ptr
14979 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14980 break;
14981 /* Allocate clause we duplicate on task and inner taskloop
14982 if the decl is lastprivate, otherwise just put on task. */
14983 case OMP_CLAUSE_ALLOCATE:
14984 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14985 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14987 /* Additionally, put firstprivate clause on task
14988 for the allocator if it is not constant. */
14989 *gtask_clauses_ptr
14990 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14991 OMP_CLAUSE_FIRSTPRIVATE);
14992 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14993 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14994 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14996 if (lastprivate_uids
14997 && bitmap_bit_p (lastprivate_uids,
14998 DECL_UID (OMP_CLAUSE_DECL (c))))
15000 *gfor_clauses_ptr = c;
15001 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
15002 *gtask_clauses_ptr = copy_node (c);
15003 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
15005 else
15007 *gtask_clauses_ptr = c;
15008 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
15010 break;
15011 default:
15012 gcc_unreachable ();
15014 *gfor_clauses_ptr = NULL_TREE;
15015 *gtask_clauses_ptr = NULL_TREE;
15016 *gforo_clauses_ptr = NULL_TREE;
15017 BITMAP_FREE (lastprivate_uids);
15018 gimple_set_location (gfor, input_location);
15019 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
15020 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
15021 NULL_TREE, NULL_TREE, NULL_TREE);
15022 gimple_set_location (g, input_location);
15023 gimple_omp_task_set_taskloop_p (g, true);
15024 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
15025 gomp_for *gforo
15026 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
15027 gimple_omp_for_collapse (gfor),
15028 gimple_omp_for_pre_body (gfor));
15029 gimple_omp_for_set_pre_body (gfor, NULL);
15030 gimple_omp_for_set_combined_p (gforo, true);
15031 gimple_omp_for_set_combined_into_p (gfor, true);
15032 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
15034 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
15035 tree v = create_tmp_var (type);
15036 gimple_omp_for_set_index (gforo, i, v);
15037 t = unshare_expr (gimple_omp_for_initial (gfor, i));
15038 gimple_omp_for_set_initial (gforo, i, t);
15039 gimple_omp_for_set_cond (gforo, i,
15040 gimple_omp_for_cond (gfor, i));
15041 t = unshare_expr (gimple_omp_for_final (gfor, i));
15042 gimple_omp_for_set_final (gforo, i, t);
15043 t = unshare_expr (gimple_omp_for_incr (gfor, i));
15044 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
15045 TREE_OPERAND (t, 0) = v;
15046 gimple_omp_for_set_incr (gforo, i, t);
15047 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
15048 OMP_CLAUSE_DECL (t) = v;
15049 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
15050 gimple_omp_for_set_clauses (gforo, t);
15051 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
15053 tree *p1 = NULL, *p2 = NULL;
15054 t = gimple_omp_for_initial (gforo, i);
15055 if (TREE_CODE (t) == TREE_VEC)
15056 p1 = &TREE_VEC_ELT (t, 0);
15057 t = gimple_omp_for_final (gforo, i);
15058 if (TREE_CODE (t) == TREE_VEC)
15060 if (p1)
15061 p2 = &TREE_VEC_ELT (t, 0);
15062 else
15063 p1 = &TREE_VEC_ELT (t, 0);
15065 if (p1)
15067 int j;
15068 for (j = 0; j < i; j++)
15069 if (*p1 == gimple_omp_for_index (gfor, j))
15071 *p1 = gimple_omp_for_index (gforo, j);
15072 if (p2)
15073 *p2 = *p1;
15074 break;
15076 gcc_assert (j < i);
15080 gimplify_seq_add_stmt (pre_p, gforo);
15082 else
15083 gimplify_seq_add_stmt (pre_p, gfor);
15085 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
15087 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15088 unsigned lastprivate_conditional = 0;
15089 while (ctx
15090 && (ctx->region_type == ORT_TARGET_DATA
15091 || ctx->region_type == ORT_TASKGROUP))
15092 ctx = ctx->outer_context;
15093 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
15094 for (tree c = gimple_omp_for_clauses (gfor);
15095 c; c = OMP_CLAUSE_CHAIN (c))
15096 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15097 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
15098 ++lastprivate_conditional;
15099 if (lastprivate_conditional)
15101 struct omp_for_data fd;
15102 omp_extract_for_data (gfor, &fd, NULL);
15103 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
15104 lastprivate_conditional);
15105 tree var = create_tmp_var_raw (type);
15106 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
15107 OMP_CLAUSE_DECL (c) = var;
15108 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
15109 gimple_omp_for_set_clauses (gfor, c);
15110 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
15113 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
15115 unsigned lastprivate_conditional = 0;
15116 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
15117 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15118 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
15119 ++lastprivate_conditional;
15120 if (lastprivate_conditional)
15122 struct omp_for_data fd;
15123 omp_extract_for_data (gfor, &fd, NULL);
15124 tree type = unsigned_type_for (fd.iter_type);
15125 while (lastprivate_conditional--)
15127 tree c = build_omp_clause (UNKNOWN_LOCATION,
15128 OMP_CLAUSE__CONDTEMP_);
15129 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
15130 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
15131 gimple_omp_for_set_clauses (gfor, c);
15136 if (ret != GS_ALL_DONE)
15137 return GS_ERROR;
15138 *expr_p = NULL_TREE;
15139 return GS_ALL_DONE;
15142 /* Helper for gimplify_omp_loop, called through walk_tree. */
15144 static tree
15145 note_no_context_vars (tree *tp, int *, void *data)
15147 if (VAR_P (*tp)
15148 && DECL_CONTEXT (*tp) == NULL_TREE
15149 && !is_global_var (*tp))
15151 vec<tree> *d = (vec<tree> *) data;
15152 d->safe_push (*tp);
15153 DECL_CONTEXT (*tp) = current_function_decl;
15155 return NULL_TREE;
15158 /* Gimplify the gross structure of an OMP_LOOP statement. */
15160 static enum gimplify_status
15161 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
15163 tree for_stmt = *expr_p;
15164 tree clauses = OMP_FOR_CLAUSES (for_stmt);
15165 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
15166 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
15167 int i;
15169 /* If order is not present, the behavior is as if order(concurrent)
15170 appeared. */
15171 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
15172 if (order == NULL_TREE)
15174 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
15175 OMP_CLAUSE_CHAIN (order) = clauses;
15176 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
15179 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
15180 if (bind == NULL_TREE)
15182 if (!flag_openmp) /* flag_openmp_simd */
15184 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
15185 kind = OMP_CLAUSE_BIND_TEAMS;
15186 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
15187 kind = OMP_CLAUSE_BIND_PARALLEL;
15188 else
15190 for (; octx; octx = octx->outer_context)
15192 if ((octx->region_type & ORT_ACC) != 0
15193 || octx->region_type == ORT_NONE
15194 || octx->region_type == ORT_IMPLICIT_TARGET)
15195 continue;
15196 break;
15198 if (octx == NULL && !in_omp_construct)
15199 error_at (EXPR_LOCATION (for_stmt),
15200 "%<bind%> clause not specified on a %<loop%> "
15201 "construct not nested inside another OpenMP construct");
15203 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
15204 OMP_CLAUSE_CHAIN (bind) = clauses;
15205 OMP_CLAUSE_BIND_KIND (bind) = kind;
15206 OMP_FOR_CLAUSES (for_stmt) = bind;
15208 else
15209 switch (OMP_CLAUSE_BIND_KIND (bind))
15211 case OMP_CLAUSE_BIND_THREAD:
15212 break;
15213 case OMP_CLAUSE_BIND_PARALLEL:
15214 if (!flag_openmp) /* flag_openmp_simd */
15216 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15217 break;
15219 for (; octx; octx = octx->outer_context)
15220 if (octx->region_type == ORT_SIMD
15221 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
15223 error_at (EXPR_LOCATION (for_stmt),
15224 "%<bind(parallel)%> on a %<loop%> construct nested "
15225 "inside %<simd%> construct");
15226 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15227 break;
15229 kind = OMP_CLAUSE_BIND_PARALLEL;
15230 break;
15231 case OMP_CLAUSE_BIND_TEAMS:
15232 if (!flag_openmp) /* flag_openmp_simd */
15234 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15235 break;
15237 if ((octx
15238 && octx->region_type != ORT_IMPLICIT_TARGET
15239 && octx->region_type != ORT_NONE
15240 && (octx->region_type & ORT_TEAMS) == 0)
15241 || in_omp_construct)
15243 error_at (EXPR_LOCATION (for_stmt),
15244 "%<bind(teams)%> on a %<loop%> region not strictly "
15245 "nested inside of a %<teams%> region");
15246 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15247 break;
15249 kind = OMP_CLAUSE_BIND_TEAMS;
15250 break;
15251 default:
15252 gcc_unreachable ();
15255 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
15256 switch (OMP_CLAUSE_CODE (*pc))
15258 case OMP_CLAUSE_REDUCTION:
15259 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
15261 error_at (OMP_CLAUSE_LOCATION (*pc),
15262 "%<inscan%> %<reduction%> clause on "
15263 "%qs construct", "loop");
15264 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
15266 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
15268 error_at (OMP_CLAUSE_LOCATION (*pc),
15269 "invalid %<task%> reduction modifier on construct "
15270 "other than %<parallel%>, %qs or %<sections%>",
15271 lang_GNU_Fortran () ? "do" : "for");
15272 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
15274 pc = &OMP_CLAUSE_CHAIN (*pc);
15275 break;
15276 case OMP_CLAUSE_LASTPRIVATE:
15277 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15279 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15280 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15281 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
15282 break;
15283 if (OMP_FOR_ORIG_DECLS (for_stmt)
15284 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15285 i)) == TREE_LIST
15286 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15287 i)))
15289 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15290 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
15291 break;
15294 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
15296 error_at (OMP_CLAUSE_LOCATION (*pc),
15297 "%<lastprivate%> clause on a %<loop%> construct refers "
15298 "to a variable %qD which is not the loop iterator",
15299 OMP_CLAUSE_DECL (*pc));
15300 *pc = OMP_CLAUSE_CHAIN (*pc);
15301 break;
15303 pc = &OMP_CLAUSE_CHAIN (*pc);
15304 break;
15305 default:
15306 pc = &OMP_CLAUSE_CHAIN (*pc);
15307 break;
15310 TREE_SET_CODE (for_stmt, OMP_SIMD);
15312 int last;
15313 switch (kind)
15315 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
15316 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
15317 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
15319 for (int pass = 1; pass <= last; pass++)
15321 if (pass == 2)
15323 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
15324 make_node (BLOCK));
15325 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
15326 *expr_p = make_node (OMP_PARALLEL);
15327 TREE_TYPE (*expr_p) = void_type_node;
15328 OMP_PARALLEL_BODY (*expr_p) = bind;
15329 OMP_PARALLEL_COMBINED (*expr_p) = 1;
15330 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
15331 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
15332 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15333 if (OMP_FOR_ORIG_DECLS (for_stmt)
15334 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
15335 == TREE_LIST))
15337 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15338 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
15340 *pc = build_omp_clause (UNKNOWN_LOCATION,
15341 OMP_CLAUSE_FIRSTPRIVATE);
15342 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
15343 pc = &OMP_CLAUSE_CHAIN (*pc);
15347 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
15348 tree *pc = &OMP_FOR_CLAUSES (t);
15349 TREE_TYPE (t) = void_type_node;
15350 OMP_FOR_BODY (t) = *expr_p;
15351 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
15352 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15353 switch (OMP_CLAUSE_CODE (c))
15355 case OMP_CLAUSE_BIND:
15356 case OMP_CLAUSE_ORDER:
15357 case OMP_CLAUSE_COLLAPSE:
15358 *pc = copy_node (c);
15359 pc = &OMP_CLAUSE_CHAIN (*pc);
15360 break;
15361 case OMP_CLAUSE_PRIVATE:
15362 case OMP_CLAUSE_FIRSTPRIVATE:
15363 /* Only needed on innermost. */
15364 break;
15365 case OMP_CLAUSE_LASTPRIVATE:
15366 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15368 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15369 OMP_CLAUSE_FIRSTPRIVATE);
15370 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15371 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15372 pc = &OMP_CLAUSE_CHAIN (*pc);
15374 *pc = copy_node (c);
15375 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15376 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15377 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15379 if (pass != last)
15380 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15381 else
15382 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15383 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15385 pc = &OMP_CLAUSE_CHAIN (*pc);
15386 break;
15387 case OMP_CLAUSE_REDUCTION:
15388 *pc = copy_node (c);
15389 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15390 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15391 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15393 auto_vec<tree> no_context_vars;
15394 int walk_subtrees = 0;
15395 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15396 &walk_subtrees, &no_context_vars);
15397 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15398 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15399 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15400 note_no_context_vars,
15401 &no_context_vars);
15402 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15403 note_no_context_vars,
15404 &no_context_vars);
15406 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15407 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15408 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15409 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15410 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15412 hash_map<tree, tree> decl_map;
15413 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15414 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15415 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15416 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15417 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15418 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15420 copy_body_data id;
15421 memset (&id, 0, sizeof (id));
15422 id.src_fn = current_function_decl;
15423 id.dst_fn = current_function_decl;
15424 id.src_cfun = cfun;
15425 id.decl_map = &decl_map;
15426 id.copy_decl = copy_decl_no_change;
15427 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15428 id.transform_new_cfg = true;
15429 id.transform_return_to_modify = false;
15430 id.eh_lp_nr = 0;
15431 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15432 &id, NULL);
15433 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15434 &id, NULL);
15436 for (tree d : no_context_vars)
15438 DECL_CONTEXT (d) = NULL_TREE;
15439 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15442 else
15444 OMP_CLAUSE_REDUCTION_INIT (*pc)
15445 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15446 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15447 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15449 pc = &OMP_CLAUSE_CHAIN (*pc);
15450 break;
15451 default:
15452 gcc_unreachable ();
15454 *pc = NULL_TREE;
15455 *expr_p = t;
15457 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15461 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15462 of OMP_TARGET's body. */
15464 static tree
15465 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15467 *walk_subtrees = 0;
15468 switch (TREE_CODE (*tp))
15470 case OMP_TEAMS:
15471 return *tp;
15472 case BIND_EXPR:
15473 case STATEMENT_LIST:
15474 *walk_subtrees = 1;
15475 break;
15476 default:
15477 break;
15479 return NULL_TREE;
15482 /* Helper function of optimize_target_teams, determine if the expression
15483 can be computed safely before the target construct on the host. */
15485 static tree
15486 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15488 splay_tree_node n;
15490 if (TYPE_P (*tp))
15492 *walk_subtrees = 0;
15493 return NULL_TREE;
15495 switch (TREE_CODE (*tp))
15497 case VAR_DECL:
15498 case PARM_DECL:
15499 case RESULT_DECL:
15500 *walk_subtrees = 0;
15501 if (error_operand_p (*tp)
15502 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15503 || DECL_HAS_VALUE_EXPR_P (*tp)
15504 || DECL_THREAD_LOCAL_P (*tp)
15505 || TREE_SIDE_EFFECTS (*tp)
15506 || TREE_THIS_VOLATILE (*tp))
15507 return *tp;
15508 if (is_global_var (*tp)
15509 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15510 || lookup_attribute ("omp declare target link",
15511 DECL_ATTRIBUTES (*tp))))
15512 return *tp;
15513 if (VAR_P (*tp)
15514 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15515 && !is_global_var (*tp)
15516 && decl_function_context (*tp) == current_function_decl)
15517 return *tp;
15518 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15519 (splay_tree_key) *tp);
15520 if (n == NULL)
15522 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15523 return NULL_TREE;
15524 return *tp;
15526 else if (n->value & GOVD_LOCAL)
15527 return *tp;
15528 else if (n->value & GOVD_FIRSTPRIVATE)
15529 return NULL_TREE;
15530 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15531 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15532 return NULL_TREE;
15533 return *tp;
15534 case INTEGER_CST:
15535 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15536 return *tp;
15537 return NULL_TREE;
15538 case TARGET_EXPR:
15539 if (TARGET_EXPR_INITIAL (*tp)
15540 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15541 return *tp;
15542 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15543 walk_subtrees, NULL);
15544 /* Allow some reasonable subset of integral arithmetics. */
15545 case PLUS_EXPR:
15546 case MINUS_EXPR:
15547 case MULT_EXPR:
15548 case TRUNC_DIV_EXPR:
15549 case CEIL_DIV_EXPR:
15550 case FLOOR_DIV_EXPR:
15551 case ROUND_DIV_EXPR:
15552 case TRUNC_MOD_EXPR:
15553 case CEIL_MOD_EXPR:
15554 case FLOOR_MOD_EXPR:
15555 case ROUND_MOD_EXPR:
15556 case RDIV_EXPR:
15557 case EXACT_DIV_EXPR:
15558 case MIN_EXPR:
15559 case MAX_EXPR:
15560 case LSHIFT_EXPR:
15561 case RSHIFT_EXPR:
15562 case BIT_IOR_EXPR:
15563 case BIT_XOR_EXPR:
15564 case BIT_AND_EXPR:
15565 case NEGATE_EXPR:
15566 case ABS_EXPR:
15567 case BIT_NOT_EXPR:
15568 case NON_LVALUE_EXPR:
15569 CASE_CONVERT:
15570 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15571 return *tp;
15572 return NULL_TREE;
15573 /* And disallow anything else, except for comparisons. */
15574 default:
15575 if (COMPARISON_CLASS_P (*tp))
15576 return NULL_TREE;
15577 return *tp;
15581 /* Try to determine if the num_teams and/or thread_limit expressions
15582 can have their values determined already before entering the
15583 target construct.
15584 INTEGER_CSTs trivially are,
15585 integral decls that are firstprivate (explicitly or implicitly)
15586 or explicitly map(always, to:) or map(always, tofrom:) on the target
15587 region too, and expressions involving simple arithmetics on those
15588 too, function calls are not ok, dereferencing something neither etc.
15589 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15590 EXPR based on what we find:
15591 0 stands for clause not specified at all, use implementation default
15592 -1 stands for value that can't be determined easily before entering
15593 the target construct.
15594 -2 means that no explicit teams construct was specified
15595 If teams construct is not present at all, use 1 for num_teams
15596 and 0 for thread_limit (only one team is involved, and the thread
15597 limit is implementation defined. */
15599 static void
15600 optimize_target_teams (tree target, gimple_seq *pre_p)
15602 tree body = OMP_BODY (target);
15603 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15604 tree num_teams_lower = NULL_TREE;
15605 tree num_teams_upper = integer_zero_node;
15606 tree thread_limit = integer_zero_node;
15607 location_t num_teams_loc = EXPR_LOCATION (target);
15608 location_t thread_limit_loc = EXPR_LOCATION (target);
15609 tree c, *p, expr;
15610 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15612 if (teams == NULL_TREE)
15613 num_teams_upper = build_int_cst (integer_type_node, -2);
15614 else
15615 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15617 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15619 p = &num_teams_upper;
15620 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15621 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15623 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15624 if (TREE_CODE (expr) == INTEGER_CST)
15625 num_teams_lower = expr;
15626 else if (walk_tree (&expr, computable_teams_clause,
15627 NULL, NULL))
15628 num_teams_lower = integer_minus_one_node;
15629 else
15631 num_teams_lower = expr;
15632 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15633 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15634 is_gimple_val, fb_rvalue, false)
15635 == GS_ERROR)
15637 gimplify_omp_ctxp = target_ctx;
15638 num_teams_lower = integer_minus_one_node;
15640 else
15642 gimplify_omp_ctxp = target_ctx;
15643 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15644 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15645 = num_teams_lower;
15650 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15652 p = &thread_limit;
15653 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15655 else
15656 continue;
15657 expr = OMP_CLAUSE_OPERAND (c, 0);
15658 if (TREE_CODE (expr) == INTEGER_CST)
15660 *p = expr;
15661 continue;
15663 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15665 *p = integer_minus_one_node;
15666 continue;
15668 *p = expr;
15669 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15670 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15671 == GS_ERROR)
15673 gimplify_omp_ctxp = target_ctx;
15674 *p = integer_minus_one_node;
15675 continue;
15677 gimplify_omp_ctxp = target_ctx;
15678 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15679 OMP_CLAUSE_OPERAND (c, 0) = *p;
15681 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15683 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15684 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15685 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15686 OMP_TARGET_CLAUSES (target) = c;
15688 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15689 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15690 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15691 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15692 OMP_TARGET_CLAUSES (target) = c;
15695 /* Gimplify the gross structure of several OMP constructs. */
15697 static void
15698 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15700 tree expr = *expr_p;
15701 gimple *stmt;
15702 gimple_seq body = NULL;
15703 enum omp_region_type ort;
15705 switch (TREE_CODE (expr))
15707 case OMP_SECTIONS:
15708 case OMP_SINGLE:
15709 ort = ORT_WORKSHARE;
15710 break;
15711 case OMP_SCOPE:
15712 ort = ORT_TASKGROUP;
15713 break;
15714 case OMP_TARGET:
15715 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15716 break;
15717 case OACC_KERNELS:
15718 ort = ORT_ACC_KERNELS;
15719 break;
15720 case OACC_PARALLEL:
15721 ort = ORT_ACC_PARALLEL;
15722 break;
15723 case OACC_SERIAL:
15724 ort = ORT_ACC_SERIAL;
15725 break;
15726 case OACC_DATA:
15727 ort = ORT_ACC_DATA;
15728 break;
15729 case OMP_TARGET_DATA:
15730 ort = ORT_TARGET_DATA;
15731 break;
15732 case OMP_TEAMS:
15733 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15734 if (gimplify_omp_ctxp == NULL
15735 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15736 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15737 break;
15738 case OACC_HOST_DATA:
15739 ort = ORT_ACC_HOST_DATA;
15740 break;
15741 default:
15742 gcc_unreachable ();
15745 bool save_in_omp_construct = in_omp_construct;
15746 if ((ort & ORT_ACC) == 0)
15747 in_omp_construct = false;
15748 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15749 TREE_CODE (expr));
15750 if (TREE_CODE (expr) == OMP_TARGET)
15751 optimize_target_teams (expr, pre_p);
15752 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15753 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15755 push_gimplify_context ();
15756 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15757 if (gimple_code (g) == GIMPLE_BIND)
15758 pop_gimplify_context (g);
15759 else
15760 pop_gimplify_context (NULL);
15761 if ((ort & ORT_TARGET_DATA) != 0)
15763 enum built_in_function end_ix;
15764 switch (TREE_CODE (expr))
15766 case OACC_DATA:
15767 case OACC_HOST_DATA:
15768 end_ix = BUILT_IN_GOACC_DATA_END;
15769 break;
15770 case OMP_TARGET_DATA:
15771 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15772 break;
15773 default:
15774 gcc_unreachable ();
15776 tree fn = builtin_decl_explicit (end_ix);
15777 g = gimple_build_call (fn, 0);
15778 gimple_seq cleanup = NULL;
15779 gimple_seq_add_stmt (&cleanup, g);
15780 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15781 body = NULL;
15782 gimple_seq_add_stmt (&body, g);
15785 else
15786 gimplify_and_add (OMP_BODY (expr), &body);
15787 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15788 TREE_CODE (expr));
15789 in_omp_construct = save_in_omp_construct;
15791 switch (TREE_CODE (expr))
15793 case OACC_DATA:
15794 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15795 OMP_CLAUSES (expr));
15796 break;
15797 case OACC_HOST_DATA:
15798 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15800 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15801 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15802 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15805 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15806 OMP_CLAUSES (expr));
15807 break;
15808 case OACC_KERNELS:
15809 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15810 OMP_CLAUSES (expr));
15811 break;
15812 case OACC_PARALLEL:
15813 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15814 OMP_CLAUSES (expr));
15815 break;
15816 case OACC_SERIAL:
15817 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15818 OMP_CLAUSES (expr));
15819 break;
15820 case OMP_SECTIONS:
15821 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15822 break;
15823 case OMP_SINGLE:
15824 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15825 break;
15826 case OMP_SCOPE:
15827 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15828 break;
15829 case OMP_TARGET:
15830 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15831 OMP_CLAUSES (expr));
15832 break;
15833 case OMP_TARGET_DATA:
15834 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15835 to be evaluated before the use_device_{ptr,addr} clauses if they
15836 refer to the same variables. */
15838 tree use_device_clauses;
15839 tree *pc, *uc = &use_device_clauses;
15840 for (pc = &OMP_CLAUSES (expr); *pc; )
15841 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15842 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15844 *uc = *pc;
15845 *pc = OMP_CLAUSE_CHAIN (*pc);
15846 uc = &OMP_CLAUSE_CHAIN (*uc);
15848 else
15849 pc = &OMP_CLAUSE_CHAIN (*pc);
15850 *uc = NULL_TREE;
15851 *pc = use_device_clauses;
15852 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15853 OMP_CLAUSES (expr));
15855 break;
15856 case OMP_TEAMS:
15857 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15858 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15859 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15860 break;
15861 default:
15862 gcc_unreachable ();
15865 gimplify_seq_add_stmt (pre_p, stmt);
15866 *expr_p = NULL_TREE;
15869 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15870 target update constructs. */
15872 static void
15873 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15875 tree expr = *expr_p;
15876 int kind;
15877 gomp_target *stmt;
15878 enum omp_region_type ort = ORT_WORKSHARE;
15880 switch (TREE_CODE (expr))
15882 case OACC_ENTER_DATA:
15883 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15884 ort = ORT_ACC;
15885 break;
15886 case OACC_EXIT_DATA:
15887 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15888 ort = ORT_ACC;
15889 break;
15890 case OACC_UPDATE:
15891 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15892 ort = ORT_ACC;
15893 break;
15894 case OMP_TARGET_UPDATE:
15895 kind = GF_OMP_TARGET_KIND_UPDATE;
15896 break;
15897 case OMP_TARGET_ENTER_DATA:
15898 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15899 break;
15900 case OMP_TARGET_EXIT_DATA:
15901 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15902 break;
15903 default:
15904 gcc_unreachable ();
15906 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15907 ort, TREE_CODE (expr));
15908 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15909 TREE_CODE (expr));
15910 if (TREE_CODE (expr) == OACC_UPDATE
15911 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15912 OMP_CLAUSE_IF_PRESENT))
15914 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15915 clause. */
15916 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15917 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15918 switch (OMP_CLAUSE_MAP_KIND (c))
15920 case GOMP_MAP_FORCE_TO:
15921 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15922 break;
15923 case GOMP_MAP_FORCE_FROM:
15924 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15925 break;
15926 default:
15927 break;
15930 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15931 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15932 OMP_CLAUSE_FINALIZE))
15934 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15935 semantics. */
15936 bool have_clause = false;
15937 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15938 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15939 switch (OMP_CLAUSE_MAP_KIND (c))
15941 case GOMP_MAP_FROM:
15942 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15943 have_clause = true;
15944 break;
15945 case GOMP_MAP_RELEASE:
15946 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15947 have_clause = true;
15948 break;
15949 case GOMP_MAP_TO_PSET:
15950 /* Fortran arrays with descriptors must map that descriptor when
15951 doing standalone "attach" operations (in OpenACC). In that
15952 case GOMP_MAP_TO_PSET appears by itself with no preceding
15953 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15954 break;
15955 case GOMP_MAP_POINTER:
15956 /* TODO PR92929: we may see these here, but they'll always follow
15957 one of the clauses above, and will be handled by libgomp as
15958 one group, so no handling required here. */
15959 gcc_assert (have_clause);
15960 break;
15961 case GOMP_MAP_DETACH:
15962 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15963 have_clause = false;
15964 break;
15965 case GOMP_MAP_STRUCT:
15966 have_clause = false;
15967 break;
15968 default:
15969 gcc_unreachable ();
15972 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15974 gimplify_seq_add_stmt (pre_p, stmt);
15975 *expr_p = NULL_TREE;
15978 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15979 stabilized the lhs of the atomic operation as *ADDR. Return true if
15980 EXPR is this stabilized form. */
15982 static bool
15983 goa_lhs_expr_p (tree expr, tree addr)
15985 /* Also include casts to other type variants. The C front end is fond
15986 of adding these for e.g. volatile variables. This is like
15987 STRIP_TYPE_NOPS but includes the main variant lookup. */
15988 STRIP_USELESS_TYPE_CONVERSION (expr);
15990 if (INDIRECT_REF_P (expr))
15992 expr = TREE_OPERAND (expr, 0);
15993 while (expr != addr
15994 && (CONVERT_EXPR_P (expr)
15995 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15996 && TREE_CODE (expr) == TREE_CODE (addr)
15997 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15999 expr = TREE_OPERAND (expr, 0);
16000 addr = TREE_OPERAND (addr, 0);
16002 if (expr == addr)
16003 return true;
16004 return (TREE_CODE (addr) == ADDR_EXPR
16005 && TREE_CODE (expr) == ADDR_EXPR
16006 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
16008 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
16009 return true;
16010 return false;
16013 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
16014 expression does not involve the lhs, evaluate it into a temporary.
16015 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
16016 or -1 if an error was encountered. */
16018 static int
16019 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
16020 tree lhs_var, tree &target_expr, bool rhs, int depth)
16022 tree expr = *expr_p;
16023 int saw_lhs = 0;
16025 if (goa_lhs_expr_p (expr, lhs_addr))
16027 if (pre_p)
16028 *expr_p = lhs_var;
16029 return 1;
16031 if (is_gimple_val (expr))
16032 return 0;
16034 /* Maximum depth of lhs in expression is for the
16035 __builtin_clear_padding (...), __builtin_clear_padding (...),
16036 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
16037 if (++depth > 7)
16038 goto finish;
16040 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
16042 case tcc_binary:
16043 case tcc_comparison:
16044 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
16045 lhs_var, target_expr, true, depth);
16046 /* FALLTHRU */
16047 case tcc_unary:
16048 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
16049 lhs_var, target_expr, true, depth);
16050 break;
16051 case tcc_expression:
16052 switch (TREE_CODE (expr))
16054 case TRUTH_ANDIF_EXPR:
16055 case TRUTH_ORIF_EXPR:
16056 case TRUTH_AND_EXPR:
16057 case TRUTH_OR_EXPR:
16058 case TRUTH_XOR_EXPR:
16059 case BIT_INSERT_EXPR:
16060 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
16061 lhs_addr, lhs_var, target_expr, true,
16062 depth);
16063 /* FALLTHRU */
16064 case TRUTH_NOT_EXPR:
16065 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16066 lhs_addr, lhs_var, target_expr, true,
16067 depth);
16068 break;
16069 case MODIFY_EXPR:
16070 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
16071 target_expr, true, depth))
16072 break;
16073 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
16074 lhs_addr, lhs_var, target_expr, true,
16075 depth);
16076 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16077 lhs_addr, lhs_var, target_expr, false,
16078 depth);
16079 break;
16080 /* FALLTHRU */
16081 case ADDR_EXPR:
16082 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
16083 target_expr, true, depth))
16084 break;
16085 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16086 lhs_addr, lhs_var, target_expr, false,
16087 depth);
16088 break;
16089 case COMPOUND_EXPR:
16090 /* Break out any preevaluations from cp_build_modify_expr. */
16091 for (; TREE_CODE (expr) == COMPOUND_EXPR;
16092 expr = TREE_OPERAND (expr, 1))
16094 /* Special-case __builtin_clear_padding call before
16095 __builtin_memcmp. */
16096 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
16098 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
16099 if (fndecl
16100 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
16101 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
16102 && (!pre_p
16103 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
16104 lhs_addr, lhs_var,
16105 target_expr, true, depth)))
16107 if (pre_p)
16108 *expr_p = expr;
16109 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
16110 pre_p, lhs_addr, lhs_var,
16111 target_expr, true, depth);
16112 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
16113 pre_p, lhs_addr, lhs_var,
16114 target_expr, rhs, depth);
16115 return saw_lhs;
16119 if (pre_p)
16120 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
16122 if (!pre_p)
16123 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
16124 target_expr, rhs, depth);
16125 *expr_p = expr;
16126 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
16127 target_expr, rhs, depth);
16128 case COND_EXPR:
16129 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
16130 lhs_var, target_expr, true, depth))
16131 break;
16132 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16133 lhs_addr, lhs_var, target_expr, true,
16134 depth);
16135 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
16136 lhs_addr, lhs_var, target_expr, true,
16137 depth);
16138 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
16139 lhs_addr, lhs_var, target_expr, true,
16140 depth);
16141 break;
16142 case TARGET_EXPR:
16143 if (TARGET_EXPR_INITIAL (expr))
16145 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
16146 lhs_var, target_expr, true,
16147 depth))
16148 break;
16149 if (expr == target_expr)
16150 saw_lhs = 1;
16151 else
16153 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
16154 pre_p, lhs_addr, lhs_var,
16155 target_expr, true, depth);
16156 if (saw_lhs && target_expr == NULL_TREE && pre_p)
16157 target_expr = expr;
16160 break;
16161 default:
16162 break;
16164 break;
16165 case tcc_reference:
16166 if (TREE_CODE (expr) == BIT_FIELD_REF
16167 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
16168 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
16169 lhs_addr, lhs_var, target_expr, true,
16170 depth);
16171 break;
16172 case tcc_vl_exp:
16173 if (TREE_CODE (expr) == CALL_EXPR)
16175 if (tree fndecl = get_callee_fndecl (expr))
16176 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
16177 BUILT_IN_MEMCMP))
16179 int nargs = call_expr_nargs (expr);
16180 for (int i = 0; i < nargs; i++)
16181 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
16182 pre_p, lhs_addr, lhs_var,
16183 target_expr, true, depth);
16186 break;
16187 default:
16188 break;
16191 finish:
16192 if (saw_lhs == 0 && pre_p)
16194 enum gimplify_status gs;
16195 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
16197 gimplify_stmt (&expr, pre_p);
16198 return saw_lhs;
16200 else if (rhs)
16201 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
16202 else
16203 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
16204 if (gs != GS_ALL_DONE)
16205 saw_lhs = -1;
16208 return saw_lhs;
16211 /* Gimplify an OMP_ATOMIC statement. */
16213 static enum gimplify_status
16214 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
16216 tree addr = TREE_OPERAND (*expr_p, 0);
16217 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
16218 ? NULL : TREE_OPERAND (*expr_p, 1);
16219 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
16220 tree tmp_load;
16221 gomp_atomic_load *loadstmt;
16222 gomp_atomic_store *storestmt;
16223 tree target_expr = NULL_TREE;
16225 tmp_load = create_tmp_reg (type);
16226 if (rhs
16227 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
16228 true, 0) < 0)
16229 return GS_ERROR;
16231 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
16232 != GS_ALL_DONE)
16233 return GS_ERROR;
16235 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
16236 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16237 gimplify_seq_add_stmt (pre_p, loadstmt);
16238 if (rhs)
16240 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
16241 representatives. Use BIT_FIELD_REF on the lhs instead. */
16242 tree rhsarg = rhs;
16243 if (TREE_CODE (rhs) == COND_EXPR)
16244 rhsarg = TREE_OPERAND (rhs, 1);
16245 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
16246 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
16248 tree bitpos = TREE_OPERAND (rhsarg, 2);
16249 tree op1 = TREE_OPERAND (rhsarg, 1);
16250 tree bitsize;
16251 tree tmp_store = tmp_load;
16252 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
16253 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
16254 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
16255 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
16256 else
16257 bitsize = TYPE_SIZE (TREE_TYPE (op1));
16258 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
16259 tree t = build2_loc (EXPR_LOCATION (rhsarg),
16260 MODIFY_EXPR, void_type_node,
16261 build3_loc (EXPR_LOCATION (rhsarg),
16262 BIT_FIELD_REF, TREE_TYPE (op1),
16263 tmp_store, bitsize, bitpos), op1);
16264 if (TREE_CODE (rhs) == COND_EXPR)
16265 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
16266 TREE_OPERAND (rhs, 0), t, void_node);
16267 gimplify_and_add (t, pre_p);
16268 rhs = tmp_store;
16270 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
16271 if (TREE_CODE (rhs) == COND_EXPR)
16272 gimplify_ctxp->allow_rhs_cond_expr = true;
16273 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
16274 is_gimple_val, fb_rvalue);
16275 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
16276 if (gs != GS_ALL_DONE)
16277 return GS_ERROR;
16280 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
16281 rhs = tmp_load;
16282 storestmt
16283 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16284 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
16286 gimple_omp_atomic_set_weak (loadstmt);
16287 gimple_omp_atomic_set_weak (storestmt);
16289 gimplify_seq_add_stmt (pre_p, storestmt);
16290 switch (TREE_CODE (*expr_p))
16292 case OMP_ATOMIC_READ:
16293 case OMP_ATOMIC_CAPTURE_OLD:
16294 *expr_p = tmp_load;
16295 gimple_omp_atomic_set_need_value (loadstmt);
16296 break;
16297 case OMP_ATOMIC_CAPTURE_NEW:
16298 *expr_p = rhs;
16299 gimple_omp_atomic_set_need_value (storestmt);
16300 break;
16301 default:
16302 *expr_p = NULL;
16303 break;
16306 return GS_ALL_DONE;
16309 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
16310 body, and adding some EH bits. */
16312 static enum gimplify_status
16313 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
16315 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
16316 gimple *body_stmt;
16317 gtransaction *trans_stmt;
16318 gimple_seq body = NULL;
16319 int subcode = 0;
16321 /* Wrap the transaction body in a BIND_EXPR so we have a context
16322 where to put decls for OMP. */
16323 if (TREE_CODE (tbody) != BIND_EXPR)
16325 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
16326 TREE_SIDE_EFFECTS (bind) = 1;
16327 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
16328 TRANSACTION_EXPR_BODY (expr) = bind;
16331 push_gimplify_context ();
16332 temp = voidify_wrapper_expr (*expr_p, NULL);
16334 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
16335 pop_gimplify_context (body_stmt);
16337 trans_stmt = gimple_build_transaction (body);
16338 if (TRANSACTION_EXPR_OUTER (expr))
16339 subcode = GTMA_IS_OUTER;
16340 else if (TRANSACTION_EXPR_RELAXED (expr))
16341 subcode = GTMA_IS_RELAXED;
16342 gimple_transaction_set_subcode (trans_stmt, subcode);
16344 gimplify_seq_add_stmt (pre_p, trans_stmt);
16346 if (temp)
16348 *expr_p = temp;
16349 return GS_OK;
16352 *expr_p = NULL_TREE;
16353 return GS_ALL_DONE;
16356 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
16357 is the OMP_BODY of the original EXPR (which has already been
16358 gimplified so it's not present in the EXPR).
16360 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16362 static gimple *
16363 gimplify_omp_ordered (tree expr, gimple_seq body)
16365 tree c, decls;
16366 int failures = 0;
16367 unsigned int i;
16368 tree source_c = NULL_TREE;
16369 tree sink_c = NULL_TREE;
16371 if (gimplify_omp_ctxp)
16373 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16374 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16375 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16377 error_at (OMP_CLAUSE_LOCATION (c),
16378 "%<ordered%> construct with %qs clause must be "
16379 "closely nested inside a loop with %<ordered%> clause",
16380 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16381 failures++;
16383 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16384 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16386 bool fail = false;
16387 sink_c = c;
16388 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16389 continue; /* omp_cur_iteration - 1 */
16390 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16391 decls && TREE_CODE (decls) == TREE_LIST;
16392 decls = TREE_CHAIN (decls), ++i)
16393 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16394 continue;
16395 else if (TREE_VALUE (decls)
16396 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16398 error_at (OMP_CLAUSE_LOCATION (c),
16399 "variable %qE is not an iteration "
16400 "of outermost loop %d, expected %qE",
16401 TREE_VALUE (decls), i + 1,
16402 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16403 fail = true;
16404 failures++;
16406 else
16407 TREE_VALUE (decls)
16408 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16409 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16411 error_at (OMP_CLAUSE_LOCATION (c),
16412 "number of variables in %qs clause with "
16413 "%<sink%> modifier does not match number of "
16414 "iteration variables",
16415 OMP_CLAUSE_DOACROSS_DEPEND (c)
16416 ? "depend" : "doacross");
16417 failures++;
16420 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16421 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16423 if (source_c)
16425 error_at (OMP_CLAUSE_LOCATION (c),
16426 "more than one %qs clause with %<source%> "
16427 "modifier on an %<ordered%> construct",
16428 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16429 ? "depend" : "doacross");
16430 failures++;
16432 else
16433 source_c = c;
16436 if (source_c && sink_c)
16438 error_at (OMP_CLAUSE_LOCATION (source_c),
16439 "%qs clause with %<source%> modifier specified "
16440 "together with %qs clauses with %<sink%> modifier "
16441 "on the same construct",
16442 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16443 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16444 failures++;
16447 if (failures)
16448 return gimple_build_nop ();
16449 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16452 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16453 expression produces a value to be used as an operand inside a GIMPLE
16454 statement, the value will be stored back in *EXPR_P. This value will
16455 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16456 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16457 emitted in PRE_P and POST_P.
16459 Additionally, this process may overwrite parts of the input
16460 expression during gimplification. Ideally, it should be
16461 possible to do non-destructive gimplification.
16463 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16464 the expression needs to evaluate to a value to be used as
16465 an operand in a GIMPLE statement, this value will be stored in
16466 *EXPR_P on exit. This happens when the caller specifies one
16467 of fb_lvalue or fb_rvalue fallback flags.
16469 PRE_P will contain the sequence of GIMPLE statements corresponding
16470 to the evaluation of EXPR and all the side-effects that must
16471 be executed before the main expression. On exit, the last
16472 statement of PRE_P is the core statement being gimplified. For
16473 instance, when gimplifying 'if (++a)' the last statement in
16474 PRE_P will be 'if (t.1)' where t.1 is the result of
16475 pre-incrementing 'a'.
16477 POST_P will contain the sequence of GIMPLE statements corresponding
16478 to the evaluation of all the side-effects that must be executed
16479 after the main expression. If this is NULL, the post
16480 side-effects are stored at the end of PRE_P.
16482 The reason why the output is split in two is to handle post
16483 side-effects explicitly. In some cases, an expression may have
16484 inner and outer post side-effects which need to be emitted in
16485 an order different from the one given by the recursive
16486 traversal. For instance, for the expression (*p--)++ the post
16487 side-effects of '--' must actually occur *after* the post
16488 side-effects of '++'. However, gimplification will first visit
16489 the inner expression, so if a separate POST sequence was not
16490 used, the resulting sequence would be:
16492 1 t.1 = *p
16493 2 p = p - 1
16494 3 t.2 = t.1 + 1
16495 4 *p = t.2
16497 However, the post-decrement operation in line #2 must not be
16498 evaluated until after the store to *p at line #4, so the
16499 correct sequence should be:
16501 1 t.1 = *p
16502 2 t.2 = t.1 + 1
16503 3 *p = t.2
16504 4 p = p - 1
16506 So, by specifying a separate post queue, it is possible
16507 to emit the post side-effects in the correct order.
16508 If POST_P is NULL, an internal queue will be used. Before
16509 returning to the caller, the sequence POST_P is appended to
16510 the main output sequence PRE_P.
16512 GIMPLE_TEST_F points to a function that takes a tree T and
16513 returns nonzero if T is in the GIMPLE form requested by the
16514 caller. The GIMPLE predicates are in gimple.cc.
16516 FALLBACK tells the function what sort of a temporary we want if
16517 gimplification cannot produce an expression that complies with
16518 GIMPLE_TEST_F.
16520 fb_none means that no temporary should be generated
16521 fb_rvalue means that an rvalue is OK to generate
16522 fb_lvalue means that an lvalue is OK to generate
16523 fb_either means that either is OK, but an lvalue is preferable.
16524 fb_mayfail means that gimplification may fail (in which case
16525 GS_ERROR will be returned)
16527 The return value is either GS_ERROR or GS_ALL_DONE, since this
16528 function iterates until EXPR is completely gimplified or an error
16529 occurs. */
16531 enum gimplify_status
16532 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16533 bool (*gimple_test_f) (tree), fallback_t fallback)
16535 tree tmp;
16536 gimple_seq internal_pre = NULL;
16537 gimple_seq internal_post = NULL;
16538 tree save_expr;
16539 bool is_statement;
16540 location_t saved_location;
16541 enum gimplify_status ret;
16542 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16543 tree label;
16545 save_expr = *expr_p;
16546 if (save_expr == NULL_TREE)
16547 return GS_ALL_DONE;
16549 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16550 is_statement = gimple_test_f == is_gimple_stmt;
16551 if (is_statement)
16552 gcc_assert (pre_p);
16554 /* Consistency checks. */
16555 if (gimple_test_f == is_gimple_reg)
16556 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16557 else if (gimple_test_f == is_gimple_val
16558 || gimple_test_f == is_gimple_call_addr
16559 || gimple_test_f == is_gimple_condexpr_for_cond
16560 || gimple_test_f == is_gimple_mem_rhs
16561 || gimple_test_f == is_gimple_mem_rhs_or_call
16562 || gimple_test_f == is_gimple_reg_rhs
16563 || gimple_test_f == is_gimple_reg_rhs_or_call
16564 || gimple_test_f == is_gimple_asm_val
16565 || gimple_test_f == is_gimple_mem_ref_addr)
16566 gcc_assert (fallback & fb_rvalue);
16567 else if (gimple_test_f == is_gimple_min_lval
16568 || gimple_test_f == is_gimple_lvalue)
16569 gcc_assert (fallback & fb_lvalue);
16570 else if (gimple_test_f == is_gimple_addressable)
16571 gcc_assert (fallback & fb_either);
16572 else if (gimple_test_f == is_gimple_stmt)
16573 gcc_assert (fallback == fb_none);
16574 else
16576 /* We should have recognized the GIMPLE_TEST_F predicate to
16577 know what kind of fallback to use in case a temporary is
16578 needed to hold the value or address of *EXPR_P. */
16579 gcc_unreachable ();
16582 /* We used to check the predicate here and return immediately if it
16583 succeeds. This is wrong; the design is for gimplification to be
16584 idempotent, and for the predicates to only test for valid forms, not
16585 whether they are fully simplified. */
16586 if (pre_p == NULL)
16587 pre_p = &internal_pre;
16589 if (post_p == NULL)
16590 post_p = &internal_post;
16592 /* Remember the last statements added to PRE_P and POST_P. Every
16593 new statement added by the gimplification helpers needs to be
16594 annotated with location information. To centralize the
16595 responsibility, we remember the last statement that had been
16596 added to both queues before gimplifying *EXPR_P. If
16597 gimplification produces new statements in PRE_P and POST_P, those
16598 statements will be annotated with the same location information
16599 as *EXPR_P. */
16600 pre_last_gsi = gsi_last (*pre_p);
16601 post_last_gsi = gsi_last (*post_p);
16603 saved_location = input_location;
16604 if (save_expr != error_mark_node
16605 && EXPR_HAS_LOCATION (*expr_p))
16606 input_location = EXPR_LOCATION (*expr_p);
16608 /* Loop over the specific gimplifiers until the toplevel node
16609 remains the same. */
16612 /* Strip away as many useless type conversions as possible
16613 at the toplevel. */
16614 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16616 /* Remember the expr. */
16617 save_expr = *expr_p;
16619 /* Die, die, die, my darling. */
16620 if (error_operand_p (save_expr))
16622 ret = GS_ERROR;
16623 break;
16626 /* Do any language-specific gimplification. */
16627 ret = ((enum gimplify_status)
16628 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16629 if (ret == GS_OK)
16631 if (*expr_p == NULL_TREE)
16632 break;
16633 if (*expr_p != save_expr)
16634 continue;
16636 else if (ret != GS_UNHANDLED)
16637 break;
16639 /* Make sure that all the cases set 'ret' appropriately. */
16640 ret = GS_UNHANDLED;
16641 switch (TREE_CODE (*expr_p))
16643 /* First deal with the special cases. */
16645 case POSTINCREMENT_EXPR:
16646 case POSTDECREMENT_EXPR:
16647 case PREINCREMENT_EXPR:
16648 case PREDECREMENT_EXPR:
16649 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16650 fallback != fb_none,
16651 TREE_TYPE (*expr_p));
16652 break;
16654 case VIEW_CONVERT_EXPR:
16655 if ((fallback & fb_rvalue)
16656 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16657 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16659 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16660 post_p, is_gimple_val, fb_rvalue);
16661 recalculate_side_effects (*expr_p);
16662 break;
16664 /* Fallthru. */
16666 case ARRAY_REF:
16667 case ARRAY_RANGE_REF:
16668 case REALPART_EXPR:
16669 case IMAGPART_EXPR:
16670 case COMPONENT_REF:
16671 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16672 fallback ? fallback : fb_rvalue);
16673 break;
16675 case COND_EXPR:
16676 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16678 /* C99 code may assign to an array in a structure value of a
16679 conditional expression, and this has undefined behavior
16680 only on execution, so create a temporary if an lvalue is
16681 required. */
16682 if (fallback == fb_lvalue)
16684 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16685 mark_addressable (*expr_p);
16686 ret = GS_OK;
16688 break;
16690 case CALL_EXPR:
16691 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16693 /* C99 code may assign to an array in a structure returned
16694 from a function, and this has undefined behavior only on
16695 execution, so create a temporary if an lvalue is
16696 required. */
16697 if (fallback == fb_lvalue)
16699 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16700 mark_addressable (*expr_p);
16701 ret = GS_OK;
16703 break;
16705 case TREE_LIST:
16706 gcc_unreachable ();
16708 case COMPOUND_EXPR:
16709 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16710 break;
16712 case COMPOUND_LITERAL_EXPR:
16713 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16714 gimple_test_f, fallback);
16715 break;
16717 case MODIFY_EXPR:
16718 case INIT_EXPR:
16719 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16720 fallback != fb_none);
16721 break;
16723 case TRUTH_ANDIF_EXPR:
16724 case TRUTH_ORIF_EXPR:
16726 /* Preserve the original type of the expression and the
16727 source location of the outer expression. */
16728 tree org_type = TREE_TYPE (*expr_p);
16729 *expr_p = gimple_boolify (*expr_p);
16730 *expr_p = build3_loc (input_location, COND_EXPR,
16731 org_type, *expr_p,
16732 fold_convert_loc
16733 (input_location,
16734 org_type, boolean_true_node),
16735 fold_convert_loc
16736 (input_location,
16737 org_type, boolean_false_node));
16738 ret = GS_OK;
16739 break;
16742 case TRUTH_NOT_EXPR:
16744 tree type = TREE_TYPE (*expr_p);
16745 /* The parsers are careful to generate TRUTH_NOT_EXPR
16746 only with operands that are always zero or one.
16747 We do not fold here but handle the only interesting case
16748 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16749 *expr_p = gimple_boolify (*expr_p);
16750 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16751 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16752 TREE_TYPE (*expr_p),
16753 TREE_OPERAND (*expr_p, 0));
16754 else
16755 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16756 TREE_TYPE (*expr_p),
16757 TREE_OPERAND (*expr_p, 0),
16758 build_int_cst (TREE_TYPE (*expr_p), 1));
16759 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16760 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16761 ret = GS_OK;
16762 break;
16765 case ADDR_EXPR:
16766 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16767 break;
16769 case ANNOTATE_EXPR:
16771 tree cond = TREE_OPERAND (*expr_p, 0);
16772 tree kind = TREE_OPERAND (*expr_p, 1);
16773 tree data = TREE_OPERAND (*expr_p, 2);
16774 tree type = TREE_TYPE (cond);
16775 if (!INTEGRAL_TYPE_P (type))
16777 *expr_p = cond;
16778 ret = GS_OK;
16779 break;
16781 tree tmp = create_tmp_var (type);
16782 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16783 gcall *call
16784 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16785 gimple_call_set_lhs (call, tmp);
16786 gimplify_seq_add_stmt (pre_p, call);
16787 *expr_p = tmp;
16788 ret = GS_ALL_DONE;
16789 break;
16792 case VA_ARG_EXPR:
16793 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16794 break;
16796 CASE_CONVERT:
16797 if (IS_EMPTY_STMT (*expr_p))
16799 ret = GS_ALL_DONE;
16800 break;
16803 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16804 || fallback == fb_none)
16806 /* Just strip a conversion to void (or in void context) and
16807 try again. */
16808 *expr_p = TREE_OPERAND (*expr_p, 0);
16809 ret = GS_OK;
16810 break;
16813 ret = gimplify_conversion (expr_p);
16814 if (ret == GS_ERROR)
16815 break;
16816 if (*expr_p != save_expr)
16817 break;
16818 /* FALLTHRU */
16820 case FIX_TRUNC_EXPR:
16821 /* unary_expr: ... | '(' cast ')' val | ... */
16822 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16823 is_gimple_val, fb_rvalue);
16824 recalculate_side_effects (*expr_p);
16825 break;
16827 case INDIRECT_REF:
16829 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16830 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16831 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16833 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16834 if (*expr_p != save_expr)
16836 ret = GS_OK;
16837 break;
16840 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16841 is_gimple_reg, fb_rvalue);
16842 if (ret == GS_ERROR)
16843 break;
16845 recalculate_side_effects (*expr_p);
16846 *expr_p = fold_build2_loc (input_location, MEM_REF,
16847 TREE_TYPE (*expr_p),
16848 TREE_OPERAND (*expr_p, 0),
16849 build_int_cst (saved_ptr_type, 0));
16850 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16851 TREE_THIS_NOTRAP (*expr_p) = notrap;
16852 ret = GS_OK;
16853 break;
16856 /* We arrive here through the various re-gimplifcation paths. */
16857 case MEM_REF:
16858 /* First try re-folding the whole thing. */
16859 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16860 TREE_OPERAND (*expr_p, 0),
16861 TREE_OPERAND (*expr_p, 1));
16862 if (tmp)
16864 REF_REVERSE_STORAGE_ORDER (tmp)
16865 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16866 *expr_p = tmp;
16867 recalculate_side_effects (*expr_p);
16868 ret = GS_OK;
16869 break;
16871 /* Avoid re-gimplifying the address operand if it is already
16872 in suitable form. Re-gimplifying would mark the address
16873 operand addressable. Always gimplify when not in SSA form
16874 as we still may have to gimplify decls with value-exprs. */
16875 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16876 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16878 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16879 is_gimple_mem_ref_addr, fb_rvalue);
16880 if (ret == GS_ERROR)
16881 break;
16883 recalculate_side_effects (*expr_p);
16884 ret = GS_ALL_DONE;
16885 break;
16887 /* Constants need not be gimplified. */
16888 case INTEGER_CST:
16889 case REAL_CST:
16890 case FIXED_CST:
16891 case STRING_CST:
16892 case COMPLEX_CST:
16893 case VECTOR_CST:
16894 /* Drop the overflow flag on constants, we do not want
16895 that in the GIMPLE IL. */
16896 if (TREE_OVERFLOW_P (*expr_p))
16897 *expr_p = drop_tree_overflow (*expr_p);
16898 ret = GS_ALL_DONE;
16899 break;
16901 case CONST_DECL:
16902 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16903 CONST_DECL node. Otherwise the decl is replaceable by its
16904 value. */
16905 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16906 if (fallback & fb_lvalue)
16907 ret = GS_ALL_DONE;
16908 else
16910 *expr_p = DECL_INITIAL (*expr_p);
16911 ret = GS_OK;
16913 break;
16915 case DECL_EXPR:
16916 ret = gimplify_decl_expr (expr_p, pre_p);
16917 break;
16919 case BIND_EXPR:
16920 ret = gimplify_bind_expr (expr_p, pre_p);
16921 break;
16923 case LOOP_EXPR:
16924 ret = gimplify_loop_expr (expr_p, pre_p);
16925 break;
16927 case SWITCH_EXPR:
16928 ret = gimplify_switch_expr (expr_p, pre_p);
16929 break;
16931 case EXIT_EXPR:
16932 ret = gimplify_exit_expr (expr_p);
16933 break;
16935 case GOTO_EXPR:
16936 /* If the target is not LABEL, then it is a computed jump
16937 and the target needs to be gimplified. */
16938 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16940 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16941 NULL, is_gimple_val, fb_rvalue);
16942 if (ret == GS_ERROR)
16943 break;
16945 gimplify_seq_add_stmt (pre_p,
16946 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16947 ret = GS_ALL_DONE;
16948 break;
16950 case PREDICT_EXPR:
16951 gimplify_seq_add_stmt (pre_p,
16952 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16953 PREDICT_EXPR_OUTCOME (*expr_p)));
16954 ret = GS_ALL_DONE;
16955 break;
16957 case LABEL_EXPR:
16958 ret = gimplify_label_expr (expr_p, pre_p);
16959 label = LABEL_EXPR_LABEL (*expr_p);
16960 gcc_assert (decl_function_context (label) == current_function_decl);
16962 /* If the label is used in a goto statement, or address of the label
16963 is taken, we need to unpoison all variables that were seen so far.
16964 Doing so would prevent us from reporting a false positives. */
16965 if (asan_poisoned_variables
16966 && asan_used_labels != NULL
16967 && asan_used_labels->contains (label)
16968 && !gimplify_omp_ctxp)
16969 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16970 break;
16972 case CASE_LABEL_EXPR:
16973 ret = gimplify_case_label_expr (expr_p, pre_p);
16975 if (gimplify_ctxp->live_switch_vars)
16976 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16977 pre_p);
16978 break;
16980 case RETURN_EXPR:
16981 ret = gimplify_return_expr (*expr_p, pre_p);
16982 break;
16984 case CONSTRUCTOR:
16985 /* Don't reduce this in place; let gimplify_init_constructor work its
16986 magic. Buf if we're just elaborating this for side effects, just
16987 gimplify any element that has side-effects. */
16988 if (fallback == fb_none)
16990 unsigned HOST_WIDE_INT ix;
16991 tree val;
16992 tree temp = NULL_TREE;
16993 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16994 if (TREE_SIDE_EFFECTS (val))
16995 append_to_statement_list (val, &temp);
16997 *expr_p = temp;
16998 ret = temp ? GS_OK : GS_ALL_DONE;
17000 /* C99 code may assign to an array in a constructed
17001 structure or union, and this has undefined behavior only
17002 on execution, so create a temporary if an lvalue is
17003 required. */
17004 else if (fallback == fb_lvalue)
17006 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
17007 mark_addressable (*expr_p);
17008 ret = GS_OK;
17010 else
17011 ret = GS_ALL_DONE;
17012 break;
17014 /* The following are special cases that are not handled by the
17015 original GIMPLE grammar. */
17017 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
17018 eliminated. */
17019 case SAVE_EXPR:
17020 ret = gimplify_save_expr (expr_p, pre_p, post_p);
17021 break;
17023 case BIT_FIELD_REF:
17024 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17025 post_p, is_gimple_lvalue, fb_either);
17026 recalculate_side_effects (*expr_p);
17027 break;
17029 case TARGET_MEM_REF:
17031 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
17033 if (TMR_BASE (*expr_p))
17034 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
17035 post_p, is_gimple_mem_ref_addr, fb_either);
17036 if (TMR_INDEX (*expr_p))
17037 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
17038 post_p, is_gimple_val, fb_rvalue);
17039 if (TMR_INDEX2 (*expr_p))
17040 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
17041 post_p, is_gimple_val, fb_rvalue);
17042 /* TMR_STEP and TMR_OFFSET are always integer constants. */
17043 ret = MIN (r0, r1);
17045 break;
17047 case NON_LVALUE_EXPR:
17048 /* This should have been stripped above. */
17049 gcc_unreachable ();
17051 case ASM_EXPR:
17052 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
17053 break;
17055 case TRY_FINALLY_EXPR:
17056 case TRY_CATCH_EXPR:
17058 gimple_seq eval, cleanup;
17059 gtry *try_;
17061 /* Calls to destructors are generated automatically in FINALLY/CATCH
17062 block. They should have location as UNKNOWN_LOCATION. However,
17063 gimplify_call_expr will reset these call stmts to input_location
17064 if it finds stmt's location is unknown. To prevent resetting for
17065 destructors, we set the input_location to unknown.
17066 Note that this only affects the destructor calls in FINALLY/CATCH
17067 block, and will automatically reset to its original value by the
17068 end of gimplify_expr. */
17069 input_location = UNKNOWN_LOCATION;
17070 eval = cleanup = NULL;
17071 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
17072 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
17073 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
17075 gimple_seq n = NULL, e = NULL;
17076 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
17077 0), &n);
17078 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
17079 1), &e);
17080 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
17082 geh_else *stmt = gimple_build_eh_else (n, e);
17083 gimple_seq_add_stmt (&cleanup, stmt);
17086 else
17087 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
17088 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
17089 if (gimple_seq_empty_p (cleanup))
17091 gimple_seq_add_seq (pre_p, eval);
17092 ret = GS_ALL_DONE;
17093 break;
17095 try_ = gimple_build_try (eval, cleanup,
17096 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
17097 ? GIMPLE_TRY_FINALLY
17098 : GIMPLE_TRY_CATCH);
17099 if (EXPR_HAS_LOCATION (save_expr))
17100 gimple_set_location (try_, EXPR_LOCATION (save_expr));
17101 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
17102 gimple_set_location (try_, saved_location);
17103 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
17104 gimple_try_set_catch_is_cleanup (try_,
17105 TRY_CATCH_IS_CLEANUP (*expr_p));
17106 gimplify_seq_add_stmt (pre_p, try_);
17107 ret = GS_ALL_DONE;
17108 break;
17111 case CLEANUP_POINT_EXPR:
17112 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
17113 break;
17115 case TARGET_EXPR:
17116 ret = gimplify_target_expr (expr_p, pre_p, post_p);
17117 break;
17119 case CATCH_EXPR:
17121 gimple *c;
17122 gimple_seq handler = NULL;
17123 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
17124 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
17125 gimplify_seq_add_stmt (pre_p, c);
17126 ret = GS_ALL_DONE;
17127 break;
17130 case EH_FILTER_EXPR:
17132 gimple *ehf;
17133 gimple_seq failure = NULL;
17135 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
17136 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
17137 copy_warning (ehf, *expr_p);
17138 gimplify_seq_add_stmt (pre_p, ehf);
17139 ret = GS_ALL_DONE;
17140 break;
17143 case OBJ_TYPE_REF:
17145 enum gimplify_status r0, r1;
17146 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
17147 post_p, is_gimple_val, fb_rvalue);
17148 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
17149 post_p, is_gimple_val, fb_rvalue);
17150 TREE_SIDE_EFFECTS (*expr_p) = 0;
17151 ret = MIN (r0, r1);
17153 break;
17155 case LABEL_DECL:
17156 /* We get here when taking the address of a label. We mark
17157 the label as "forced"; meaning it can never be removed and
17158 it is a potential target for any computed goto. */
17159 FORCED_LABEL (*expr_p) = 1;
17160 ret = GS_ALL_DONE;
17161 break;
17163 case STATEMENT_LIST:
17164 ret = gimplify_statement_list (expr_p, pre_p);
17165 break;
17167 case WITH_SIZE_EXPR:
17169 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17170 post_p == &internal_post ? NULL : post_p,
17171 gimple_test_f, fallback);
17172 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17173 is_gimple_val, fb_rvalue);
17174 ret = GS_ALL_DONE;
17176 break;
17178 case VAR_DECL:
17179 case PARM_DECL:
17180 ret = gimplify_var_or_parm_decl (expr_p);
17181 break;
17183 case RESULT_DECL:
17184 /* When within an OMP context, notice uses of variables. */
17185 if (gimplify_omp_ctxp)
17186 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
17187 ret = GS_ALL_DONE;
17188 break;
17190 case DEBUG_EXPR_DECL:
17191 gcc_unreachable ();
17193 case DEBUG_BEGIN_STMT:
17194 gimplify_seq_add_stmt (pre_p,
17195 gimple_build_debug_begin_stmt
17196 (TREE_BLOCK (*expr_p),
17197 EXPR_LOCATION (*expr_p)));
17198 ret = GS_ALL_DONE;
17199 *expr_p = NULL;
17200 break;
17202 case SSA_NAME:
17203 /* Allow callbacks into the gimplifier during optimization. */
17204 ret = GS_ALL_DONE;
17205 break;
17207 case OMP_PARALLEL:
17208 gimplify_omp_parallel (expr_p, pre_p);
17209 ret = GS_ALL_DONE;
17210 break;
17212 case OMP_TASK:
17213 gimplify_omp_task (expr_p, pre_p);
17214 ret = GS_ALL_DONE;
17215 break;
17217 case OMP_SIMD:
17219 /* Temporarily disable into_ssa, as scan_omp_simd
17220 which calls copy_gimple_seq_and_replace_locals can't deal
17221 with SSA_NAMEs defined outside of the body properly. */
17222 bool saved_into_ssa = gimplify_ctxp->into_ssa;
17223 gimplify_ctxp->into_ssa = false;
17224 ret = gimplify_omp_for (expr_p, pre_p);
17225 gimplify_ctxp->into_ssa = saved_into_ssa;
17226 break;
17229 case OMP_FOR:
17230 case OMP_DISTRIBUTE:
17231 case OMP_TASKLOOP:
17232 case OACC_LOOP:
17233 ret = gimplify_omp_for (expr_p, pre_p);
17234 break;
17236 case OMP_LOOP:
17237 ret = gimplify_omp_loop (expr_p, pre_p);
17238 break;
17240 case OACC_CACHE:
17241 gimplify_oacc_cache (expr_p, pre_p);
17242 ret = GS_ALL_DONE;
17243 break;
17245 case OACC_DECLARE:
17246 gimplify_oacc_declare (expr_p, pre_p);
17247 ret = GS_ALL_DONE;
17248 break;
17250 case OACC_HOST_DATA:
17251 case OACC_DATA:
17252 case OACC_KERNELS:
17253 case OACC_PARALLEL:
17254 case OACC_SERIAL:
17255 case OMP_SCOPE:
17256 case OMP_SECTIONS:
17257 case OMP_SINGLE:
17258 case OMP_TARGET:
17259 case OMP_TARGET_DATA:
17260 case OMP_TEAMS:
17261 gimplify_omp_workshare (expr_p, pre_p);
17262 ret = GS_ALL_DONE;
17263 break;
17265 case OACC_ENTER_DATA:
17266 case OACC_EXIT_DATA:
17267 case OACC_UPDATE:
17268 case OMP_TARGET_UPDATE:
17269 case OMP_TARGET_ENTER_DATA:
17270 case OMP_TARGET_EXIT_DATA:
17271 gimplify_omp_target_update (expr_p, pre_p);
17272 ret = GS_ALL_DONE;
17273 break;
17275 case OMP_SECTION:
17276 case OMP_STRUCTURED_BLOCK:
17277 case OMP_MASTER:
17278 case OMP_MASKED:
17279 case OMP_ORDERED:
17280 case OMP_CRITICAL:
17281 case OMP_SCAN:
17283 gimple_seq body = NULL;
17284 gimple *g;
17285 bool saved_in_omp_construct = in_omp_construct;
17287 in_omp_construct = true;
17288 gimplify_and_add (OMP_BODY (*expr_p), &body);
17289 in_omp_construct = saved_in_omp_construct;
17290 switch (TREE_CODE (*expr_p))
17292 case OMP_SECTION:
17293 g = gimple_build_omp_section (body);
17294 break;
17295 case OMP_STRUCTURED_BLOCK:
17296 g = gimple_build_omp_structured_block (body);
17297 break;
17298 case OMP_MASTER:
17299 g = gimple_build_omp_master (body);
17300 break;
17301 case OMP_ORDERED:
17302 g = gimplify_omp_ordered (*expr_p, body);
17303 if (OMP_BODY (*expr_p) == NULL_TREE
17304 && gimple_code (g) == GIMPLE_OMP_ORDERED)
17305 gimple_omp_ordered_standalone (g);
17306 break;
17307 case OMP_MASKED:
17308 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
17309 pre_p, ORT_WORKSHARE, OMP_MASKED);
17310 gimplify_adjust_omp_clauses (pre_p, body,
17311 &OMP_MASKED_CLAUSES (*expr_p),
17312 OMP_MASKED);
17313 g = gimple_build_omp_masked (body,
17314 OMP_MASKED_CLAUSES (*expr_p));
17315 break;
17316 case OMP_CRITICAL:
17317 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
17318 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
17319 gimplify_adjust_omp_clauses (pre_p, body,
17320 &OMP_CRITICAL_CLAUSES (*expr_p),
17321 OMP_CRITICAL);
17322 g = gimple_build_omp_critical (body,
17323 OMP_CRITICAL_NAME (*expr_p),
17324 OMP_CRITICAL_CLAUSES (*expr_p));
17325 break;
17326 case OMP_SCAN:
17327 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
17328 pre_p, ORT_WORKSHARE, OMP_SCAN);
17329 gimplify_adjust_omp_clauses (pre_p, body,
17330 &OMP_SCAN_CLAUSES (*expr_p),
17331 OMP_SCAN);
17332 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
17333 break;
17334 default:
17335 gcc_unreachable ();
17337 gimplify_seq_add_stmt (pre_p, g);
17338 ret = GS_ALL_DONE;
17339 break;
17342 case OMP_TASKGROUP:
17344 gimple_seq body = NULL;
17346 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
17347 bool saved_in_omp_construct = in_omp_construct;
17348 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
17349 OMP_TASKGROUP);
17350 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
17352 in_omp_construct = true;
17353 gimplify_and_add (OMP_BODY (*expr_p), &body);
17354 in_omp_construct = saved_in_omp_construct;
17355 gimple_seq cleanup = NULL;
17356 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
17357 gimple *g = gimple_build_call (fn, 0);
17358 gimple_seq_add_stmt (&cleanup, g);
17359 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
17360 body = NULL;
17361 gimple_seq_add_stmt (&body, g);
17362 g = gimple_build_omp_taskgroup (body, *pclauses);
17363 gimplify_seq_add_stmt (pre_p, g);
17364 ret = GS_ALL_DONE;
17365 break;
17368 case OMP_ATOMIC:
17369 case OMP_ATOMIC_READ:
17370 case OMP_ATOMIC_CAPTURE_OLD:
17371 case OMP_ATOMIC_CAPTURE_NEW:
17372 ret = gimplify_omp_atomic (expr_p, pre_p);
17373 break;
17375 case TRANSACTION_EXPR:
17376 ret = gimplify_transaction (expr_p, pre_p);
17377 break;
17379 case TRUTH_AND_EXPR:
17380 case TRUTH_OR_EXPR:
17381 case TRUTH_XOR_EXPR:
17383 tree orig_type = TREE_TYPE (*expr_p);
17384 tree new_type, xop0, xop1;
17385 *expr_p = gimple_boolify (*expr_p);
17386 new_type = TREE_TYPE (*expr_p);
17387 if (!useless_type_conversion_p (orig_type, new_type))
17389 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17390 ret = GS_OK;
17391 break;
17394 /* Boolified binary truth expressions are semantically equivalent
17395 to bitwise binary expressions. Canonicalize them to the
17396 bitwise variant. */
17397 switch (TREE_CODE (*expr_p))
17399 case TRUTH_AND_EXPR:
17400 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17401 break;
17402 case TRUTH_OR_EXPR:
17403 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17404 break;
17405 case TRUTH_XOR_EXPR:
17406 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17407 break;
17408 default:
17409 break;
17411 /* Now make sure that operands have compatible type to
17412 expression's new_type. */
17413 xop0 = TREE_OPERAND (*expr_p, 0);
17414 xop1 = TREE_OPERAND (*expr_p, 1);
17415 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17416 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17417 new_type,
17418 xop0);
17419 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17420 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17421 new_type,
17422 xop1);
17423 /* Continue classified as tcc_binary. */
17424 goto expr_2;
17427 case VEC_COND_EXPR:
17428 goto expr_3;
17430 case VEC_PERM_EXPR:
17431 /* Classified as tcc_expression. */
17432 goto expr_3;
17434 case BIT_INSERT_EXPR:
17435 /* Argument 3 is a constant. */
17436 goto expr_2;
17438 case POINTER_PLUS_EXPR:
17440 enum gimplify_status r0, r1;
17441 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17442 post_p, is_gimple_val, fb_rvalue);
17443 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17444 post_p, is_gimple_val, fb_rvalue);
17445 recalculate_side_effects (*expr_p);
17446 ret = MIN (r0, r1);
17447 break;
17450 default:
17451 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17453 case tcc_comparison:
17454 /* Handle comparison of objects of non scalar mode aggregates
17455 with a call to memcmp. It would be nice to only have to do
17456 this for variable-sized objects, but then we'd have to allow
17457 the same nest of reference nodes we allow for MODIFY_EXPR and
17458 that's too complex.
17460 Compare scalar mode aggregates as scalar mode values. Using
17461 memcmp for them would be very inefficient at best, and is
17462 plain wrong if bitfields are involved. */
17463 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17464 ret = GS_ERROR;
17465 else
17467 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17469 /* Vector comparisons need no boolification. */
17470 if (TREE_CODE (type) == VECTOR_TYPE)
17471 goto expr_2;
17472 else if (!AGGREGATE_TYPE_P (type))
17474 tree org_type = TREE_TYPE (*expr_p);
17475 *expr_p = gimple_boolify (*expr_p);
17476 if (!useless_type_conversion_p (org_type,
17477 TREE_TYPE (*expr_p)))
17479 *expr_p = fold_convert_loc (input_location,
17480 org_type, *expr_p);
17481 ret = GS_OK;
17483 else
17484 goto expr_2;
17486 else if (TYPE_MODE (type) != BLKmode)
17487 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17488 else
17489 ret = gimplify_variable_sized_compare (expr_p);
17491 break;
17493 /* If *EXPR_P does not need to be special-cased, handle it
17494 according to its class. */
17495 case tcc_unary:
17496 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17497 post_p, is_gimple_val, fb_rvalue);
17498 break;
17500 case tcc_binary:
17501 expr_2:
17503 enum gimplify_status r0, r1;
17505 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17506 post_p, is_gimple_val, fb_rvalue);
17507 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17508 post_p, is_gimple_val, fb_rvalue);
17510 ret = MIN (r0, r1);
17511 break;
17514 expr_3:
17516 enum gimplify_status r0, r1, r2;
17518 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17519 post_p, is_gimple_val, fb_rvalue);
17520 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17521 post_p, is_gimple_val, fb_rvalue);
17522 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17523 post_p, is_gimple_val, fb_rvalue);
17525 ret = MIN (MIN (r0, r1), r2);
17526 break;
17529 case tcc_declaration:
17530 case tcc_constant:
17531 ret = GS_ALL_DONE;
17532 goto dont_recalculate;
17534 default:
17535 gcc_unreachable ();
17538 recalculate_side_effects (*expr_p);
17540 dont_recalculate:
17541 break;
17544 gcc_assert (*expr_p || ret != GS_OK);
17546 while (ret == GS_OK);
17548 /* If we encountered an error_mark somewhere nested inside, either
17549 stub out the statement or propagate the error back out. */
17550 if (ret == GS_ERROR)
17552 if (is_statement)
17553 *expr_p = NULL;
17554 goto out;
17557 /* This was only valid as a return value from the langhook, which
17558 we handled. Make sure it doesn't escape from any other context. */
17559 gcc_assert (ret != GS_UNHANDLED);
17561 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17563 /* We aren't looking for a value, and we don't have a valid
17564 statement. If it doesn't have side-effects, throw it away.
17565 We can also get here with code such as "*&&L;", where L is
17566 a LABEL_DECL that is marked as FORCED_LABEL. */
17567 if (TREE_CODE (*expr_p) == LABEL_DECL
17568 || !TREE_SIDE_EFFECTS (*expr_p))
17569 *expr_p = NULL;
17570 else if (!TREE_THIS_VOLATILE (*expr_p))
17572 /* This is probably a _REF that contains something nested that
17573 has side effects. Recurse through the operands to find it. */
17574 enum tree_code code = TREE_CODE (*expr_p);
17576 switch (code)
17578 case COMPONENT_REF:
17579 case REALPART_EXPR:
17580 case IMAGPART_EXPR:
17581 case VIEW_CONVERT_EXPR:
17582 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17583 gimple_test_f, fallback);
17584 break;
17586 case ARRAY_REF:
17587 case ARRAY_RANGE_REF:
17588 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17589 gimple_test_f, fallback);
17590 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17591 gimple_test_f, fallback);
17592 break;
17594 default:
17595 /* Anything else with side-effects must be converted to
17596 a valid statement before we get here. */
17597 gcc_unreachable ();
17600 *expr_p = NULL;
17602 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17603 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17604 && !is_empty_type (TREE_TYPE (*expr_p)))
17606 /* Historically, the compiler has treated a bare reference
17607 to a non-BLKmode volatile lvalue as forcing a load. */
17608 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17610 /* Normally, we do not want to create a temporary for a
17611 TREE_ADDRESSABLE type because such a type should not be
17612 copied by bitwise-assignment. However, we make an
17613 exception here, as all we are doing here is ensuring that
17614 we read the bytes that make up the type. We use
17615 create_tmp_var_raw because create_tmp_var will abort when
17616 given a TREE_ADDRESSABLE type. */
17617 tree tmp = create_tmp_var_raw (type, "vol");
17618 gimple_add_tmp_var (tmp);
17619 gimplify_assign (tmp, *expr_p, pre_p);
17620 *expr_p = NULL;
17622 else
17623 /* We can't do anything useful with a volatile reference to
17624 an incomplete type, so just throw it away. Likewise for
17625 a BLKmode type, since any implicit inner load should
17626 already have been turned into an explicit one by the
17627 gimplification process. */
17628 *expr_p = NULL;
17631 /* If we are gimplifying at the statement level, we're done. Tack
17632 everything together and return. */
17633 if (fallback == fb_none || is_statement)
17635 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17636 it out for GC to reclaim it. */
17637 *expr_p = NULL_TREE;
17639 if (!gimple_seq_empty_p (internal_pre)
17640 || !gimple_seq_empty_p (internal_post))
17642 gimplify_seq_add_seq (&internal_pre, internal_post);
17643 gimplify_seq_add_seq (pre_p, internal_pre);
17646 /* The result of gimplifying *EXPR_P is going to be the last few
17647 statements in *PRE_P and *POST_P. Add location information
17648 to all the statements that were added by the gimplification
17649 helpers. */
17650 if (!gimple_seq_empty_p (*pre_p))
17651 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17653 if (!gimple_seq_empty_p (*post_p))
17654 annotate_all_with_location_after (*post_p, post_last_gsi,
17655 input_location);
17657 goto out;
17660 #ifdef ENABLE_GIMPLE_CHECKING
17661 if (*expr_p)
17663 enum tree_code code = TREE_CODE (*expr_p);
17664 /* These expressions should already be in gimple IR form. */
17665 gcc_assert (code != MODIFY_EXPR
17666 && code != ASM_EXPR
17667 && code != BIND_EXPR
17668 && code != CATCH_EXPR
17669 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17670 && code != EH_FILTER_EXPR
17671 && code != GOTO_EXPR
17672 && code != LABEL_EXPR
17673 && code != LOOP_EXPR
17674 && code != SWITCH_EXPR
17675 && code != TRY_FINALLY_EXPR
17676 && code != EH_ELSE_EXPR
17677 && code != OACC_PARALLEL
17678 && code != OACC_KERNELS
17679 && code != OACC_SERIAL
17680 && code != OACC_DATA
17681 && code != OACC_HOST_DATA
17682 && code != OACC_DECLARE
17683 && code != OACC_UPDATE
17684 && code != OACC_ENTER_DATA
17685 && code != OACC_EXIT_DATA
17686 && code != OACC_CACHE
17687 && code != OMP_CRITICAL
17688 && code != OMP_FOR
17689 && code != OACC_LOOP
17690 && code != OMP_MASTER
17691 && code != OMP_MASKED
17692 && code != OMP_TASKGROUP
17693 && code != OMP_ORDERED
17694 && code != OMP_PARALLEL
17695 && code != OMP_SCAN
17696 && code != OMP_SECTIONS
17697 && code != OMP_SECTION
17698 && code != OMP_STRUCTURED_BLOCK
17699 && code != OMP_SINGLE
17700 && code != OMP_SCOPE);
17702 #endif
17704 /* Otherwise we're gimplifying a subexpression, so the resulting
17705 value is interesting. If it's a valid operand that matches
17706 GIMPLE_TEST_F, we're done. Unless we are handling some
17707 post-effects internally; if that's the case, we need to copy into
17708 a temporary before adding the post-effects to POST_P. */
17709 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17710 goto out;
17712 /* Otherwise, we need to create a new temporary for the gimplified
17713 expression. */
17715 /* We can't return an lvalue if we have an internal postqueue. The
17716 object the lvalue refers to would (probably) be modified by the
17717 postqueue; we need to copy the value out first, which means an
17718 rvalue. */
17719 if ((fallback & fb_lvalue)
17720 && gimple_seq_empty_p (internal_post)
17721 && is_gimple_addressable (*expr_p))
17723 /* An lvalue will do. Take the address of the expression, store it
17724 in a temporary, and replace the expression with an INDIRECT_REF of
17725 that temporary. */
17726 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17727 unsigned int ref_align = get_object_alignment (*expr_p);
17728 tree ref_type = TREE_TYPE (*expr_p);
17729 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17730 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17731 if (TYPE_ALIGN (ref_type) != ref_align)
17732 ref_type = build_aligned_type (ref_type, ref_align);
17733 *expr_p = build2 (MEM_REF, ref_type,
17734 tmp, build_zero_cst (ref_alias_type));
17736 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17738 /* An rvalue will do. Assign the gimplified expression into a
17739 new temporary TMP and replace the original expression with
17740 TMP. First, make sure that the expression has a type so that
17741 it can be assigned into a temporary. */
17742 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17743 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17745 else
17747 #ifdef ENABLE_GIMPLE_CHECKING
17748 if (!(fallback & fb_mayfail))
17750 fprintf (stderr, "gimplification failed:\n");
17751 print_generic_expr (stderr, *expr_p);
17752 debug_tree (*expr_p);
17753 internal_error ("gimplification failed");
17755 #endif
17756 gcc_assert (fallback & fb_mayfail);
17758 /* If this is an asm statement, and the user asked for the
17759 impossible, don't die. Fail and let gimplify_asm_expr
17760 issue an error. */
17761 ret = GS_ERROR;
17762 goto out;
17765 /* Make sure the temporary matches our predicate. */
17766 gcc_assert ((*gimple_test_f) (*expr_p));
17768 if (!gimple_seq_empty_p (internal_post))
17770 annotate_all_with_location (internal_post, input_location);
17771 gimplify_seq_add_seq (pre_p, internal_post);
17774 out:
17775 input_location = saved_location;
17776 return ret;
17779 /* Like gimplify_expr but make sure the gimplified result is not itself
17780 a SSA name (but a decl if it were). Temporaries required by
17781 evaluating *EXPR_P may be still SSA names. */
17783 static enum gimplify_status
17784 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17785 bool (*gimple_test_f) (tree), fallback_t fallback,
17786 bool allow_ssa)
17788 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17789 gimple_test_f, fallback);
17790 if (! allow_ssa
17791 && TREE_CODE (*expr_p) == SSA_NAME)
17792 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17793 return ret;
17796 /* Look through TYPE for variable-sized objects and gimplify each such
17797 size that we find. Add to LIST_P any statements generated. */
17799 void
17800 gimplify_type_sizes (tree type, gimple_seq *list_p)
17802 if (type == NULL || type == error_mark_node)
17803 return;
17805 const bool ignored_p
17806 = TYPE_NAME (type)
17807 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17808 && DECL_IGNORED_P (TYPE_NAME (type));
17809 tree t;
17811 /* We first do the main variant, then copy into any other variants. */
17812 type = TYPE_MAIN_VARIANT (type);
17814 /* Avoid infinite recursion. */
17815 if (TYPE_SIZES_GIMPLIFIED (type))
17816 return;
17818 TYPE_SIZES_GIMPLIFIED (type) = 1;
17820 switch (TREE_CODE (type))
17822 case INTEGER_TYPE:
17823 case ENUMERAL_TYPE:
17824 case BOOLEAN_TYPE:
17825 case REAL_TYPE:
17826 case FIXED_POINT_TYPE:
17827 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17828 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17830 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17832 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17833 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17835 break;
17837 case ARRAY_TYPE:
17838 /* These types may not have declarations, so handle them here. */
17839 gimplify_type_sizes (TREE_TYPE (type), list_p);
17840 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17841 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17842 with assigned stack slots, for -O1+ -g they should be tracked
17843 by VTA. */
17844 if (!ignored_p
17845 && TYPE_DOMAIN (type)
17846 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17848 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17849 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17850 DECL_IGNORED_P (t) = 0;
17851 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17852 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17853 DECL_IGNORED_P (t) = 0;
17855 break;
17857 case RECORD_TYPE:
17858 case UNION_TYPE:
17859 case QUAL_UNION_TYPE:
17860 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17861 if (TREE_CODE (field) == FIELD_DECL)
17863 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17864 /* Likewise, ensure variable offsets aren't removed. */
17865 if (!ignored_p
17866 && (t = DECL_FIELD_OFFSET (field))
17867 && VAR_P (t)
17868 && DECL_ARTIFICIAL (t))
17869 DECL_IGNORED_P (t) = 0;
17870 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17871 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17872 gimplify_type_sizes (TREE_TYPE (field), list_p);
17874 break;
17876 case POINTER_TYPE:
17877 case REFERENCE_TYPE:
17878 /* We used to recurse on the pointed-to type here, which turned out to
17879 be incorrect because its definition might refer to variables not
17880 yet initialized at this point if a forward declaration is involved.
17882 It was actually useful for anonymous pointed-to types to ensure
17883 that the sizes evaluation dominates every possible later use of the
17884 values. Restricting to such types here would be safe since there
17885 is no possible forward declaration around, but would introduce an
17886 undesirable middle-end semantic to anonymity. We then defer to
17887 front-ends the responsibility of ensuring that the sizes are
17888 evaluated both early and late enough, e.g. by attaching artificial
17889 type declarations to the tree. */
17890 break;
17892 default:
17893 break;
17896 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17897 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17899 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17901 TYPE_SIZE (t) = TYPE_SIZE (type);
17902 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17903 TYPE_SIZES_GIMPLIFIED (t) = 1;
17907 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17908 a size or position, has had all of its SAVE_EXPRs evaluated.
17909 We add any required statements to *STMT_P. */
17911 void
17912 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17914 tree expr = *expr_p;
17916 /* We don't do anything if the value isn't there, is constant, or contains
17917 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17918 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17919 will want to replace it with a new variable, but that will cause problems
17920 if this type is from outside the function. It's OK to have that here. */
17921 if (expr == NULL_TREE
17922 || is_gimple_constant (expr)
17923 || VAR_P (expr)
17924 || CONTAINS_PLACEHOLDER_P (expr))
17925 return;
17927 *expr_p = unshare_expr (expr);
17929 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17930 if the def vanishes. */
17931 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17933 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17934 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17935 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17936 if (is_gimple_constant (*expr_p))
17937 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17940 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17941 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17942 is true, also gimplify the parameters. */
17944 gbind *
17945 gimplify_body (tree fndecl, bool do_parms)
17947 location_t saved_location = input_location;
17948 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17949 gimple *outer_stmt;
17950 gbind *outer_bind;
17952 timevar_push (TV_TREE_GIMPLIFY);
17954 init_tree_ssa (cfun);
17956 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17957 gimplification. */
17958 default_rtl_profile ();
17960 gcc_assert (gimplify_ctxp == NULL);
17961 push_gimplify_context (true);
17963 if (flag_openacc || flag_openmp)
17965 gcc_assert (gimplify_omp_ctxp == NULL);
17966 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17967 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17970 /* Unshare most shared trees in the body and in that of any nested functions.
17971 It would seem we don't have to do this for nested functions because
17972 they are supposed to be output and then the outer function gimplified
17973 first, but the g++ front end doesn't always do it that way. */
17974 unshare_body (fndecl);
17975 unvisit_body (fndecl);
17977 /* Make sure input_location isn't set to something weird. */
17978 input_location = DECL_SOURCE_LOCATION (fndecl);
17980 /* Resolve callee-copies. This has to be done before processing
17981 the body so that DECL_VALUE_EXPR gets processed correctly. */
17982 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17984 /* Gimplify the function's body. */
17985 seq = NULL;
17986 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17987 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17988 if (!outer_stmt)
17990 outer_stmt = gimple_build_nop ();
17991 gimplify_seq_add_stmt (&seq, outer_stmt);
17994 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17995 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17996 if (gimple_code (outer_stmt) == GIMPLE_BIND
17997 && (gimple_seq_first_nondebug_stmt (seq)
17998 == gimple_seq_last_nondebug_stmt (seq)))
18000 outer_bind = as_a <gbind *> (outer_stmt);
18001 if (gimple_seq_first_stmt (seq) != outer_stmt
18002 || gimple_seq_last_stmt (seq) != outer_stmt)
18004 /* If there are debug stmts before or after outer_stmt, move them
18005 inside of outer_bind body. */
18006 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
18007 gimple_seq second_seq = NULL;
18008 if (gimple_seq_first_stmt (seq) != outer_stmt
18009 && gimple_seq_last_stmt (seq) != outer_stmt)
18011 second_seq = gsi_split_seq_after (gsi);
18012 gsi_remove (&gsi, false);
18014 else if (gimple_seq_first_stmt (seq) != outer_stmt)
18015 gsi_remove (&gsi, false);
18016 else
18018 gsi_remove (&gsi, false);
18019 second_seq = seq;
18020 seq = NULL;
18022 gimple_seq_add_seq_without_update (&seq,
18023 gimple_bind_body (outer_bind));
18024 gimple_seq_add_seq_without_update (&seq, second_seq);
18025 gimple_bind_set_body (outer_bind, seq);
18028 else
18029 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
18031 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18033 /* If we had callee-copies statements, insert them at the beginning
18034 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
18035 if (!gimple_seq_empty_p (parm_stmts))
18037 tree parm;
18039 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
18040 if (parm_cleanup)
18042 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
18043 GIMPLE_TRY_FINALLY);
18044 parm_stmts = NULL;
18045 gimple_seq_add_stmt (&parm_stmts, g);
18047 gimple_bind_set_body (outer_bind, parm_stmts);
18049 for (parm = DECL_ARGUMENTS (current_function_decl);
18050 parm; parm = DECL_CHAIN (parm))
18051 if (DECL_HAS_VALUE_EXPR_P (parm))
18053 DECL_HAS_VALUE_EXPR_P (parm) = 0;
18054 DECL_IGNORED_P (parm) = 0;
18058 if ((flag_openacc || flag_openmp || flag_openmp_simd)
18059 && gimplify_omp_ctxp)
18061 delete_omp_context (gimplify_omp_ctxp);
18062 gimplify_omp_ctxp = NULL;
18065 pop_gimplify_context (outer_bind);
18066 gcc_assert (gimplify_ctxp == NULL);
18068 if (flag_checking && !seen_error ())
18069 verify_gimple_in_seq (gimple_bind_body (outer_bind));
18071 timevar_pop (TV_TREE_GIMPLIFY);
18072 input_location = saved_location;
18074 return outer_bind;
18077 typedef char *char_p; /* For DEF_VEC_P. */
18079 /* Return whether we should exclude FNDECL from instrumentation. */
18081 static bool
18082 flag_instrument_functions_exclude_p (tree fndecl)
18084 vec<char_p> *v;
18086 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
18087 if (v && v->length () > 0)
18089 const char *name;
18090 int i;
18091 char *s;
18093 name = lang_hooks.decl_printable_name (fndecl, 1);
18094 FOR_EACH_VEC_ELT (*v, i, s)
18095 if (strstr (name, s) != NULL)
18096 return true;
18099 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
18100 if (v && v->length () > 0)
18102 const char *name;
18103 int i;
18104 char *s;
18106 name = DECL_SOURCE_FILE (fndecl);
18107 FOR_EACH_VEC_ELT (*v, i, s)
18108 if (strstr (name, s) != NULL)
18109 return true;
18112 return false;
18115 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
18116 If COND_VAR is not NULL, it is a boolean variable guarding the call to
18117 the instrumentation function. IF STMT is not NULL, it is a statement
18118 to be executed just before the call to the instrumentation function. */
18120 static void
18121 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
18122 tree cond_var, gimple *stmt)
18124 /* The instrumentation hooks aren't going to call the instrumented
18125 function and the address they receive is expected to be matchable
18126 against symbol addresses. Make sure we don't create a trampoline,
18127 in case the current function is nested. */
18128 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
18129 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
18131 tree label_true, label_false;
18132 if (cond_var)
18134 label_true = create_artificial_label (UNKNOWN_LOCATION);
18135 label_false = create_artificial_label (UNKNOWN_LOCATION);
18136 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
18137 label_true, label_false);
18138 gimplify_seq_add_stmt (seq, cond);
18139 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
18140 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
18141 NOT_TAKEN));
18144 if (stmt)
18145 gimplify_seq_add_stmt (seq, stmt);
18147 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
18148 gcall *call = gimple_build_call (x, 1, integer_zero_node);
18149 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
18150 gimple_call_set_lhs (call, tmp_var);
18151 gimplify_seq_add_stmt (seq, call);
18152 x = builtin_decl_implicit (fncode);
18153 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
18154 gimplify_seq_add_stmt (seq, call);
18156 if (cond_var)
18157 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
18160 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
18161 node for the function we want to gimplify.
18163 Return the sequence of GIMPLE statements corresponding to the body
18164 of FNDECL. */
18166 void
18167 gimplify_function_tree (tree fndecl)
18169 gimple_seq seq;
18170 gbind *bind;
18172 gcc_assert (!gimple_body (fndecl));
18174 if (DECL_STRUCT_FUNCTION (fndecl))
18175 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
18176 else
18177 push_struct_function (fndecl);
18179 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
18180 if necessary. */
18181 cfun->curr_properties |= PROP_gimple_lva;
18183 if (asan_sanitize_use_after_scope ())
18184 asan_poisoned_variables = new hash_set<tree> ();
18185 bind = gimplify_body (fndecl, true);
18186 if (asan_poisoned_variables)
18188 delete asan_poisoned_variables;
18189 asan_poisoned_variables = NULL;
18192 /* The tree body of the function is no longer needed, replace it
18193 with the new GIMPLE body. */
18194 seq = NULL;
18195 gimple_seq_add_stmt (&seq, bind);
18196 gimple_set_body (fndecl, seq);
18198 /* If we're instrumenting function entry/exit, then prepend the call to
18199 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
18200 catch the exit hook. */
18201 /* ??? Add some way to ignore exceptions for this TFE. */
18202 if (flag_instrument_function_entry_exit
18203 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
18204 /* Do not instrument extern inline functions. */
18205 && !(DECL_DECLARED_INLINE_P (fndecl)
18206 && DECL_EXTERNAL (fndecl)
18207 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
18208 && !flag_instrument_functions_exclude_p (fndecl))
18210 gimple_seq body = NULL, cleanup = NULL;
18211 gassign *assign;
18212 tree cond_var;
18214 /* If -finstrument-functions-once is specified, generate:
18216 static volatile bool C.0 = false;
18217 bool tmp_called;
18219 tmp_called = C.0;
18220 if (!tmp_called)
18222 C.0 = true;
18223 [call profiling enter function]
18226 without specific protection for data races. */
18227 if (flag_instrument_function_entry_exit > 1)
18229 tree first_var
18230 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
18231 VAR_DECL,
18232 create_tmp_var_name ("C"),
18233 boolean_type_node);
18234 DECL_ARTIFICIAL (first_var) = 1;
18235 DECL_IGNORED_P (first_var) = 1;
18236 TREE_STATIC (first_var) = 1;
18237 TREE_THIS_VOLATILE (first_var) = 1;
18238 TREE_USED (first_var) = 1;
18239 DECL_INITIAL (first_var) = boolean_false_node;
18240 varpool_node::add (first_var);
18242 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
18243 assign = gimple_build_assign (cond_var, first_var);
18244 gimplify_seq_add_stmt (&body, assign);
18246 assign = gimple_build_assign (first_var, boolean_true_node);
18249 else
18251 cond_var = NULL_TREE;
18252 assign = NULL;
18255 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
18256 cond_var, assign);
18258 /* If -finstrument-functions-once is specified, generate:
18260 if (!tmp_called)
18261 [call profiling exit function]
18263 without specific protection for data races. */
18264 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
18265 cond_var, NULL);
18267 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
18268 gimplify_seq_add_stmt (&body, tf);
18269 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
18271 /* Replace the current function body with the body
18272 wrapped in the try/finally TF. */
18273 seq = NULL;
18274 gimple_seq_add_stmt (&seq, new_bind);
18275 gimple_set_body (fndecl, seq);
18276 bind = new_bind;
18279 if (sanitize_flags_p (SANITIZE_THREAD)
18280 && param_tsan_instrument_func_entry_exit)
18282 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
18283 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
18284 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
18285 /* Replace the current function body with the body
18286 wrapped in the try/finally TF. */
18287 seq = NULL;
18288 gimple_seq_add_stmt (&seq, new_bind);
18289 gimple_set_body (fndecl, seq);
18292 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18293 cfun->curr_properties |= PROP_gimple_any;
18295 pop_cfun ();
18297 dump_function (TDI_gimple, fndecl);
18300 /* Return a dummy expression of type TYPE in order to keep going after an
18301 error. */
18303 static tree
18304 dummy_object (tree type)
18306 tree t = build_int_cst (build_pointer_type (type), 0);
18307 return build2 (MEM_REF, type, t, t);
18310 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
18311 builtin function, but a very special sort of operator. */
18313 enum gimplify_status
18314 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
18315 gimple_seq *post_p ATTRIBUTE_UNUSED)
18317 tree promoted_type, have_va_type;
18318 tree valist = TREE_OPERAND (*expr_p, 0);
18319 tree type = TREE_TYPE (*expr_p);
18320 tree t, tag, aptag;
18321 location_t loc = EXPR_LOCATION (*expr_p);
18323 /* Verify that valist is of the proper type. */
18324 have_va_type = TREE_TYPE (valist);
18325 if (have_va_type == error_mark_node)
18326 return GS_ERROR;
18327 have_va_type = targetm.canonical_va_list_type (have_va_type);
18328 if (have_va_type == NULL_TREE
18329 && POINTER_TYPE_P (TREE_TYPE (valist)))
18330 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
18331 have_va_type
18332 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
18333 gcc_assert (have_va_type != NULL_TREE);
18335 /* Generate a diagnostic for requesting data of a type that cannot
18336 be passed through `...' due to type promotion at the call site. */
18337 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
18338 != type)
18340 static bool gave_help;
18341 bool warned;
18342 /* Use the expansion point to handle cases such as passing bool (defined
18343 in a system header) through `...'. */
18344 location_t xloc
18345 = expansion_point_location_if_in_system_header (loc);
18347 /* Unfortunately, this is merely undefined, rather than a constraint
18348 violation, so we cannot make this an error. If this call is never
18349 executed, the program is still strictly conforming. */
18350 auto_diagnostic_group d;
18351 warned = warning_at (xloc, 0,
18352 "%qT is promoted to %qT when passed through %<...%>",
18353 type, promoted_type);
18354 if (!gave_help && warned)
18356 gave_help = true;
18357 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
18358 promoted_type, type);
18361 /* We can, however, treat "undefined" any way we please.
18362 Call abort to encourage the user to fix the program. */
18363 if (warned)
18364 inform (xloc, "if this code is reached, the program will abort");
18365 /* Before the abort, allow the evaluation of the va_list
18366 expression to exit or longjmp. */
18367 gimplify_and_add (valist, pre_p);
18368 t = build_call_expr_loc (loc,
18369 builtin_decl_implicit (BUILT_IN_TRAP), 0);
18370 gimplify_and_add (t, pre_p);
18372 /* This is dead code, but go ahead and finish so that the
18373 mode of the result comes out right. */
18374 *expr_p = dummy_object (type);
18375 return GS_ALL_DONE;
18378 tag = build_int_cst (build_pointer_type (type), 0);
18379 aptag = build_int_cst (TREE_TYPE (valist), 0);
18381 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18382 valist, tag, aptag);
18384 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18385 needs to be expanded. */
18386 cfun->curr_properties &= ~PROP_gimple_lva;
18388 return GS_OK;
18391 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18393 DST/SRC are the destination and source respectively. You can pass
18394 ungimplified trees in DST or SRC, in which case they will be
18395 converted to a gimple operand if necessary.
18397 This function returns the newly created GIMPLE_ASSIGN tuple. */
18399 gimple *
18400 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18402 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18403 gimplify_and_add (t, seq_p);
18404 ggc_free (t);
18405 return gimple_seq_last_stmt (*seq_p);
18408 inline hashval_t
18409 gimplify_hasher::hash (const elt_t *p)
18411 tree t = p->val;
18412 return iterative_hash_expr (t, 0);
18415 inline bool
18416 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18418 tree t1 = p1->val;
18419 tree t2 = p2->val;
18420 enum tree_code code = TREE_CODE (t1);
18422 if (TREE_CODE (t2) != code
18423 || TREE_TYPE (t1) != TREE_TYPE (t2))
18424 return false;
18426 if (!operand_equal_p (t1, t2, 0))
18427 return false;
18429 /* Only allow them to compare equal if they also hash equal; otherwise
18430 results are nondeterminate, and we fail bootstrap comparison. */
18431 gcc_checking_assert (hash (p1) == hash (p2));
18433 return true;