ada: Fix (again) incorrect handling of Aggregate aspect
[official-gcc.git] / gcc / gimplify.cc
blobd0d16a248203f007f42525df4a40ff94dad9049c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
76 enum gimplify_omp_var_data
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
118 GOVD_NONTEMPORAL = 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
123 GOVD_CONDTEMP = 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
137 enum omp_region_type
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher : free_ptr_hash <elt_t>
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
185 struct gimplify_ctx
187 struct gimplify_ctx *prev_context;
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
209 enum gimplify_defaultmap_kind
211 GDMK_SCALAR,
212 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
213 GDMK_AGGREGATE,
214 GDMK_ALLOCATABLE,
215 GDMK_POINTER
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx *outer_context;
221 splay_tree variables;
222 hash_set<tree> *privatized_types;
223 tree clauses;
224 /* Iteration variables in an OMP_FOR. */
225 vec<tree> loop_iter_var;
226 location_t location;
227 enum omp_clause_default_kind default_kind;
228 enum omp_region_type region_type;
229 enum tree_code code;
230 bool combined_loop;
231 bool distribute;
232 bool target_firstprivatize_array_bases;
233 bool add_safelen1;
234 bool order_concurrent;
235 bool has_depend;
236 bool in_for_exprs;
237 int defaultmap[5];
240 static struct gimplify_ctx *gimplify_ctxp;
241 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
242 static bool in_omp_construct;
244 /* Forward declaration. */
245 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
246 static hash_map<tree, tree> *oacc_declare_returns;
247 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
248 bool (*) (tree), fallback_t, bool);
249 static void prepare_gimple_addressable (tree *, gimple_seq *);
251 /* Shorter alias name for the above function for use in gimplify.cc
252 only. */
254 static inline void
255 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
257 gimple_seq_add_stmt_without_update (seq_p, gs);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
266 static void
267 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
269 gimple_stmt_iterator si;
271 if (src == NULL)
272 return;
274 si = gsi_last (*dst_p);
275 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx *ctx_pool = NULL;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx *
287 ctx_alloc (void)
289 struct gimplify_ctx * c = ctx_pool;
291 if (c)
292 ctx_pool = c->prev_context;
293 else
294 c = XNEW (struct gimplify_ctx);
296 memset (c, '\0', sizeof (*c));
297 return c;
300 /* Put gimplify context C back into the pool. */
302 static inline void
303 ctx_free (struct gimplify_ctx *c)
305 c->prev_context = ctx_pool;
306 ctx_pool = c;
309 /* Free allocated ctx stack memory. */
311 void
312 free_gimplify_stack (void)
314 struct gimplify_ctx *c;
316 while ((c = ctx_pool))
318 ctx_pool = c->prev_context;
319 free (c);
324 /* Set up a context for the gimplifier. */
326 void
327 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
329 struct gimplify_ctx *c = ctx_alloc ();
331 c->prev_context = gimplify_ctxp;
332 gimplify_ctxp = c;
333 gimplify_ctxp->into_ssa = in_ssa;
334 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 in the local_decls.
341 BODY is not a sequence, but the first tuple in a sequence. */
343 void
344 pop_gimplify_context (gimple *body)
346 struct gimplify_ctx *c = gimplify_ctxp;
348 gcc_assert (c
349 && (!c->bind_expr_stack.exists ()
350 || c->bind_expr_stack.is_empty ()));
351 c->bind_expr_stack.release ();
352 gimplify_ctxp = c->prev_context;
354 if (body)
355 declare_vars (c->temps, body, false);
356 else
357 record_vars (c->temps);
359 delete c->temp_htab;
360 c->temp_htab = NULL;
361 ctx_free (c);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
366 static void
367 gimple_push_bind_expr (gbind *bind_stmt)
369 gimplify_ctxp->bind_expr_stack.reserve (8);
370 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
373 /* Pop the first element off the stack of bindings. */
375 static void
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp->bind_expr_stack.pop ();
381 /* Return the first element of the stack of bindings. */
383 gbind *
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp->bind_expr_stack.last ();
389 /* Return the stack of bindings created during gimplification. */
391 vec<gbind *>
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp->bind_expr_stack;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
400 static bool
401 gimple_conditional_context (void)
403 return gimplify_ctxp->conditions > 0;
406 /* Note that we've entered a COND_EXPR. */
408 static void
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp->conditions == 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
414 #endif
415 ++(gimplify_ctxp->conditions);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
421 static void
422 gimple_pop_condition (gimple_seq *pre_p)
424 int conds = --(gimplify_ctxp->conditions);
426 gcc_assert (conds >= 0);
427 if (conds == 0)
429 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
430 gimplify_ctxp->conditional_cleanups = NULL;
434 /* A stable comparison routine for use with splay trees and DECLs. */
436 static int
437 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
439 tree a = (tree) xa;
440 tree b = (tree) xb;
442 return DECL_UID (a) - DECL_UID (b);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx *
448 new_omp_context (enum omp_region_type region_type)
450 struct gimplify_omp_ctx *c;
452 c = XCNEW (struct gimplify_omp_ctx);
453 c->outer_context = gimplify_omp_ctxp;
454 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
455 c->privatized_types = new hash_set<tree>;
456 c->location = input_location;
457 c->region_type = region_type;
458 if ((region_type & ORT_TASK) == 0)
459 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
460 else
461 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
462 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
463 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
464 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
465 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
466 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
468 return c;
471 /* Destroy an omp construct that deals with variable remapping. */
473 static void
474 delete_omp_context (struct gimplify_omp_ctx *c)
476 splay_tree_delete (c->variables);
477 delete c->privatized_types;
478 c->loop_iter_var.release ();
479 XDELETE (c);
482 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 reference. */
489 void
490 gimplify_and_add (tree t, gimple_seq *seq_p)
492 gimplify_stmt (&t, seq_p);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
499 static gimple *
500 gimplify_and_return_first (tree t, gimple_seq *seq_p)
502 gimple_stmt_iterator last = gsi_last (*seq_p);
504 gimplify_and_add (t, seq_p);
506 if (!gsi_end_p (last))
508 gsi_next (&last);
509 return gsi_stmt (last);
511 else
512 return gimple_seq_first_stmt (*seq_p);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
518 static bool
519 is_gimple_mem_rhs (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return is_gimple_val (t) || is_gimple_lvalue (t);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
534 static bool
535 is_gimple_reg_rhs_or_call (tree t)
537 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t) == CALL_EXPR);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
545 static bool
546 is_gimple_mem_rhs_or_call (tree t)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t)))
551 return is_gimple_val (t);
552 else
553 return (is_gimple_val (t)
554 || is_gimple_lvalue (t)
555 || TREE_CLOBBER_P (t)
556 || TREE_CODE (t) == CALL_EXPR);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
562 static inline tree
563 create_tmp_from_val (tree val)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
567 tree var = create_tmp_var (type, get_name (val));
568 return var;
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
574 static tree
575 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
577 tree ret;
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal || !not_gimple_reg);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 ret = create_tmp_from_val (val);
590 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
592 else
594 elt_t elt, *elt_p;
595 elt_t **slot;
597 elt.val = val;
598 if (!gimplify_ctxp->temp_htab)
599 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
600 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
601 if (*slot == NULL)
603 elt_p = XNEW (elt_t);
604 elt_p->val = val;
605 elt_p->temp = ret = create_tmp_from_val (val);
606 *slot = elt_p;
608 else
610 elt_p = *slot;
611 ret = elt_p->temp;
615 return ret;
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
620 static tree
621 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
622 bool is_formal, bool allow_ssa, bool not_gimple_reg)
624 tree t, mod;
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
629 fb_rvalue);
631 if (allow_ssa
632 && gimplify_ctxp->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val)))
635 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
636 if (! gimple_in_ssa_p (cfun))
638 const char *name = get_name (val);
639 if (name)
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
643 else
644 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
646 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
648 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod, pre_p);
652 ggc_free (mod);
654 return t;
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
669 tree
670 get_formal_tmp_var (tree val, gimple_seq *pre_p)
672 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
678 tree
679 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
680 gimple_seq *post_p /* = NULL */,
681 bool allow_ssa /* = true */)
683 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
689 void
690 declare_vars (tree vars, gimple *gs, bool debug_info)
692 tree last = vars;
693 if (last)
695 tree temps, block;
697 gbind *scope = as_a <gbind *> (gs);
699 temps = nreverse (last);
701 block = gimple_bind_block (scope);
702 gcc_assert (!block || TREE_CODE (block) == BLOCK);
703 if (!block || !debug_info)
705 DECL_CHAIN (last) = gimple_bind_vars (scope);
706 gimple_bind_set_vars (scope, temps);
708 else
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block))
715 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
716 else
718 gimple_bind_set_vars (scope,
719 chainon (gimple_bind_vars (scope), temps));
720 BLOCK_VARS (block) = temps;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
730 static void
731 force_constant_size (tree var)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size;
738 gcc_assert (VAR_P (var));
740 max_size = max_int_size_in_bytes (TREE_TYPE (var));
742 gcc_assert (max_size >= 0);
744 DECL_SIZE_UNIT (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
746 DECL_SIZE (var)
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
750 /* Push the temporary variable TMP into the current binding. */
752 void
753 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
755 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
759 this case. */
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
761 force_constant_size (tmp);
763 DECL_CONTEXT (tmp) = fn->decl;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
766 record_vars_into (tmp, fn->decl);
769 /* Push the temporary variable TMP into the current binding. */
771 void
772 gimple_add_tmp_var (tree tmp)
774 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
778 this case. */
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
780 force_constant_size (tmp);
782 DECL_CONTEXT (tmp) = current_function_decl;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
785 if (gimplify_ctxp)
787 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
788 gimplify_ctxp->temps = tmp;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp)
793 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
794 int flag = GOVD_LOCAL | GOVD_SEEN;
795 while (ctx
796 && (ctx->region_type == ORT_WORKSHARE
797 || ctx->region_type == ORT_TASKGROUP
798 || ctx->region_type == ORT_SIMD
799 || ctx->region_type == ORT_ACC))
801 if (ctx->region_type == ORT_SIMD
802 && TREE_ADDRESSABLE (tmp)
803 && !TREE_STATIC (tmp))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
806 ctx->add_safelen1 = true;
807 else if (ctx->in_for_exprs)
808 flag = GOVD_PRIVATE;
809 else
810 flag = GOVD_PRIVATE | GOVD_SEEN;
811 break;
813 ctx = ctx->outer_context;
815 if (ctx)
816 omp_add_variable (ctx, tmp, flag);
819 else if (cfun)
820 record_vars (tmp);
821 else
823 gimple_seq body_seq;
825 /* This case is for nested functions. We need to expose the locals
826 they create. */
827 body_seq = gimple_body (current_function_decl);
828 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
868 way to go. */
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
879 tree t = *tp;
880 enum tree_code code = TREE_CODE (t);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
886 if (data && !((hash_set<tree> *)data)->add (t))
888 else
889 *walk_subtrees = 0;
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code) == tcc_type
894 || TREE_CODE_CLASS (code) == tcc_declaration
895 || TREE_CODE_CLASS (code) == tcc_constant)
896 *walk_subtrees = 0;
898 /* Cope with the statement expression extension. */
899 else if (code == STATEMENT_LIST)
902 /* Leave the bulk of the work to copy_tree_r itself. */
903 else
904 copy_tree_r (tp, walk_subtrees, NULL);
906 return NULL_TREE;
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
913 static tree
914 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
916 tree t = *tp;
917 enum tree_code code = TREE_CODE (t);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code) == tcc_type
924 || TREE_CODE_CLASS (code) == tcc_declaration
925 || TREE_CODE_CLASS (code) == tcc_constant)
927 if (TREE_VISITED (t))
928 *walk_subtrees = 0;
929 else
930 TREE_VISITED (t) = 1;
933 /* If this node has been visited already, unshare it and don't look
934 any deeper. */
935 else if (TREE_VISITED (t))
937 walk_tree (tp, mostly_copy_tree_r, data, NULL);
938 *walk_subtrees = 0;
941 /* Otherwise, mark the node as visited and keep looking. */
942 else
943 TREE_VISITED (t) = 1;
945 return NULL_TREE;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
951 void
952 copy_if_shared (tree *tp, void *data)
954 walk_tree (tp, copy_if_shared_r, data, NULL);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
960 static void
961 unshare_body (tree fndecl)
963 struct cgraph_node *cgn = cgraph_node::get (fndecl);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set<tree> *visited
967 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
973 delete visited;
975 if (cgn)
976 for (cgn = first_nested_function (cgn); cgn;
977 cgn = next_nested_function (cgn))
978 unshare_body (cgn->decl);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
984 static tree
985 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
987 tree t = *tp;
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t))
991 TREE_VISITED (t) = 0;
993 /* Otherwise, don't look any deeper. */
994 else
995 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Unmark the visited trees rooted at *TP. */
1002 static inline void
1003 unmark_visited (tree *tp)
1005 walk_tree (tp, unmark_visited_r, NULL, NULL);
1008 /* Likewise, but mark all trees as not visited. */
1010 static void
1011 unvisit_body (tree fndecl)
1013 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1019 if (cgn)
1020 for (cgn = first_nested_function (cgn);
1021 cgn; cgn = next_nested_function (cgn))
1022 unvisit_body (cgn->decl);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1029 tree
1030 unshare_expr (tree expr)
1032 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1033 return expr;
1036 /* Worker for unshare_expr_without_location. */
1038 static tree
1039 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1041 if (EXPR_P (*tp))
1042 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1043 else
1044 *walk_subtrees = 0;
1045 return NULL_TREE;
1048 /* Similar to unshare_expr but also prune all expression locations
1049 from EXPR. */
1051 tree
1052 unshare_expr_without_location (tree expr)
1054 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1055 if (EXPR_P (expr))
1056 walk_tree (&expr, prune_expr_location, NULL, NULL);
1057 return expr;
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1065 static location_t
1066 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1068 if (!expr)
1069 return or_else;
1071 if (EXPR_HAS_LOCATION (expr))
1072 return EXPR_LOCATION (expr);
1074 if (TREE_CODE (expr) != STATEMENT_LIST)
1075 return or_else;
1077 tree_stmt_iterator i = tsi_start (expr);
1079 bool found = false;
1080 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1082 found = true;
1083 tsi_next (&i);
1086 if (!found || !tsi_one_before_end_p (i))
1087 return or_else;
1089 return rexpr_location (tsi_stmt (i), or_else);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1095 static inline bool
1096 rexpr_has_location (tree expr)
1098 return rexpr_location (expr) != UNKNOWN_LOCATION;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1107 tree
1108 voidify_wrapper_expr (tree wrapper, tree temp)
1110 tree type = TREE_TYPE (wrapper);
1111 if (type && !VOID_TYPE_P (type))
1113 tree *p;
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p = &wrapper; p && *p; )
1119 switch (TREE_CODE (*p))
1121 case BIND_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p = &BIND_EXPR_BODY (*p);
1126 break;
1128 case CLEANUP_POINT_EXPR:
1129 case TRY_FINALLY_EXPR:
1130 case TRY_CATCH_EXPR:
1131 TREE_SIDE_EFFECTS (*p) = 1;
1132 TREE_TYPE (*p) = void_type_node;
1133 p = &TREE_OPERAND (*p, 0);
1134 break;
1136 case STATEMENT_LIST:
1138 tree_stmt_iterator i = tsi_last (*p);
1139 TREE_SIDE_EFFECTS (*p) = 1;
1140 TREE_TYPE (*p) = void_type_node;
1141 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1143 break;
1145 case COMPOUND_EXPR:
1146 /* Advance to the last statement. Set all container types to
1147 void. */
1148 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1153 break;
1155 case TRANSACTION_EXPR:
1156 TREE_SIDE_EFFECTS (*p) = 1;
1157 TREE_TYPE (*p) = void_type_node;
1158 p = &TRANSACTION_EXPR_BODY (*p);
1159 break;
1161 default:
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1164 if (p == &wrapper)
1166 TREE_SIDE_EFFECTS (*p) = 1;
1167 TREE_TYPE (*p) = void_type_node;
1168 p = &TREE_OPERAND (*p, 0);
1169 break;
1171 goto out;
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1187 else
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1193 return temp;
1196 return NULL_TREE;
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1202 static void
1203 build_stack_save_restore (gcall **save, gcall **restore)
1205 tree tmp_var;
1207 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1211 *restore
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1213 1, tmp_var);
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1218 static tree
1219 build_asan_poison_call_expr (tree decl)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size = DECL_SIZE_UNIT (decl);
1223 if (zerop (unit_size))
1224 return NULL_TREE;
1226 tree base = build_fold_addr_expr (decl);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1229 void_type_node, 3,
1230 build_int_cst (integer_type_node,
1231 ASAN_MARK_POISON),
1232 base, unit_size);
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1240 static void
1241 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1242 bool before)
1244 tree unit_size = DECL_SIZE_UNIT (decl);
1245 tree base = build_fold_addr_expr (decl);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size))
1249 return;
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1252 bytes. */
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1256 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1257 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1259 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1261 gimple *g
1262 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1263 build_int_cst (integer_type_node, flags),
1264 base, unit_size);
1266 if (before)
1267 gsi_insert_before (it, g, GSI_NEW_STMT);
1268 else
1269 gsi_insert_after (it, g, GSI_NEW_STMT);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1276 static void
1277 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1279 gimple_stmt_iterator it = gsi_last (*seq_p);
1280 bool before = false;
1282 if (gsi_end_p (it))
1283 before = true;
1285 asan_poison_variable (decl, poison, &it, before);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1290 static int
1291 sort_by_decl_uid (const void *a, const void *b)
1293 const tree *t1 = (const tree *)a;
1294 const tree *t2 = (const tree *)b;
1296 int uid1 = DECL_UID (*t1);
1297 int uid2 = DECL_UID (*t2);
1299 if (uid1 < uid2)
1300 return -1;
1301 else if (uid1 > uid2)
1302 return 1;
1303 else
1304 return 0;
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1311 static void
1312 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1314 unsigned c = variables->elements ();
1315 if (c == 0)
1316 return;
1318 auto_vec<tree> sorted_variables (c);
1320 for (hash_set<tree>::iterator it = variables->begin ();
1321 it != variables->end (); ++it)
1322 sorted_variables.safe_push (*it);
1324 sorted_variables.qsort (sort_by_decl_uid);
1326 unsigned i;
1327 tree var;
1328 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1330 asan_poison_variable (var, poison, seq_p);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1335 DECL_ATTRIBUTES (var)))
1336 DECL_ATTRIBUTES (var)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1338 integer_one_node,
1339 DECL_ATTRIBUTES (var));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1348 tree bind_expr = *expr_p;
1349 bool old_keep_stack = gimplify_ctxp->keep_stack;
1350 bool old_save_stack = gimplify_ctxp->save_stack;
1351 tree t;
1352 gbind *bind_stmt;
1353 gimple_seq body, cleanup;
1354 gcall *stack_save;
1355 location_t start_locus = 0, end_locus = 0;
1356 tree ret_clauses = NULL;
1358 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1360 /* Mark variables seen in this bind expr. */
1361 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1363 if (VAR_P (t))
1365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 /* Mark variable as local. */
1368 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1371 || splay_tree_lookup (ctx->variables,
1372 (splay_tree_key) t) == NULL)
1374 int flag = GOVD_LOCAL;
1375 if (ctx->region_type == ORT_SIMD
1376 && TREE_ADDRESSABLE (t)
1377 && !TREE_STATIC (t))
1379 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1380 ctx->add_safelen1 = true;
1381 else
1382 flag = GOVD_PRIVATE;
1384 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t))
1389 for (; ctx; ctx = ctx->outer_context)
1390 if ((ctx->region_type & ORT_TARGET) != 0)
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t)))
1395 tree id = get_identifier ("omp declare target");
1396 DECL_ATTRIBUTES (t)
1397 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1398 varpool_node *node = varpool_node::get (t);
1399 if (node)
1401 node->offloadable = 1;
1402 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1404 g->have_offload = true;
1405 if (!in_lto_p)
1406 vec_safe_push (offload_vars, t);
1410 break;
1414 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1416 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1417 cfun->has_local_explicit_reg_vars = true;
1421 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1422 BIND_EXPR_BLOCK (bind_expr));
1423 gimple_push_bind_expr (bind_stmt);
1425 gimplify_ctxp->keep_stack = false;
1426 gimplify_ctxp->save_stack = false;
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1429 body = NULL;
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1431 gimple_bind_set_body (bind_stmt, body);
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1437 assignment. */
1438 if (BIND_EXPR_BLOCK (bind_expr))
1440 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1441 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1443 if (start_locus == 0)
1444 start_locus = EXPR_LOCATION (bind_expr);
1446 cleanup = NULL;
1447 stack_save = NULL;
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1453 gcall *stack_restore;
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save, &stack_restore);
1459 gimple_set_location (stack_save, start_locus);
1460 gimple_set_location (stack_restore, end_locus);
1462 gimplify_seq_add_stmt (&cleanup, stack_restore);
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1468 if (VAR_P (t)
1469 && !is_global_var (t)
1470 && DECL_CONTEXT (t) == current_function_decl)
1472 if (!DECL_HARD_REGISTER (t)
1473 && !TREE_THIS_VOLATILE (t)
1474 && !DECL_HAS_VALUE_EXPR_P (t)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1477 top-level. */
1478 && !is_gimple_reg (t)
1479 && flag_stack_reuse != SR_NONE)
1481 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1482 gimple *clobber_stmt;
1483 clobber_stmt = gimple_build_assign (t, clobber);
1484 gimple_set_location (clobber_stmt, end_locus);
1485 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1488 if (flag_openacc && oacc_declare_returns != NULL)
1490 tree key = t;
1491 if (DECL_HAS_VALUE_EXPR_P (key))
1493 key = DECL_VALUE_EXPR (key);
1494 if (INDIRECT_REF_P (key))
1495 key = TREE_OPERAND (key, 0);
1497 tree *c = oacc_declare_returns->get (key);
1498 if (c != NULL)
1500 if (ret_clauses)
1501 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1503 ret_clauses = unshare_expr (*c);
1505 oacc_declare_returns->remove (key);
1507 if (oacc_declare_returns->is_empty ())
1509 delete oacc_declare_returns;
1510 oacc_declare_returns = NULL;
1516 if (asan_poisoned_variables != NULL
1517 && asan_poisoned_variables->contains (t))
1519 asan_poisoned_variables->remove (t);
1520 asan_poison_variable (t, true, &cleanup);
1523 if (gimplify_ctxp->live_switch_vars != NULL
1524 && gimplify_ctxp->live_switch_vars->contains (t))
1525 gimplify_ctxp->live_switch_vars->remove (t);
1528 if (ret_clauses)
1530 gomp_target *stmt;
1531 gimple_stmt_iterator si = gsi_start (cleanup);
1533 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1534 ret_clauses);
1535 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1538 if (cleanup)
1540 gtry *gs;
1541 gimple_seq new_body;
1543 new_body = NULL;
1544 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1545 GIMPLE_TRY_FINALLY);
1547 if (stack_save)
1548 gimplify_seq_add_stmt (&new_body, stack_save);
1549 gimplify_seq_add_stmt (&new_body, gs);
1550 gimple_bind_set_body (bind_stmt, new_body);
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp->keep_stack)
1555 gimplify_ctxp->keep_stack = old_keep_stack;
1556 gimplify_ctxp->save_stack = old_save_stack;
1558 gimple_pop_bind_expr ();
1560 gimplify_seq_add_stmt (pre_p, bind_stmt);
1562 if (temp)
1564 *expr_p = temp;
1565 return GS_OK;
1568 *expr_p = NULL_TREE;
1569 return GS_ALL_DONE;
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1574 static void
1575 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1580 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1581 NOT_TAKEN);
1582 gimplify_seq_add_stmt (pre_p, predict);
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1596 greturn *ret;
1597 tree ret_expr = TREE_OPERAND (stmt, 0);
1598 tree result_decl, result;
1600 if (ret_expr == error_mark_node)
1601 return GS_ERROR;
1603 if (!ret_expr
1604 || TREE_CODE (ret_expr) == RESULT_DECL)
1606 maybe_add_early_return_predict_stmt (pre_p);
1607 greturn *ret = gimple_build_return (ret_expr);
1608 copy_warning (ret, stmt);
1609 gimplify_seq_add_stmt (pre_p, ret);
1610 return GS_ALL_DONE;
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1614 result_decl = NULL_TREE;
1615 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl = DECL_RESULT (current_function_decl);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr = NULL_TREE;
1623 else
1625 result_decl = TREE_OPERAND (ret_expr, 0);
1627 /* See through a return by reference. */
1628 if (INDIRECT_REF_P (result_decl))
1629 result_decl = TREE_OPERAND (result_decl, 0);
1631 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr) == INIT_EXPR)
1633 && TREE_CODE (result_decl) == RESULT_DECL);
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1644 if (!result_decl)
1645 result = NULL_TREE;
1646 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1658 result = result_decl;
1660 else if (gimplify_ctxp->return_temp)
1661 result = gimplify_ctxp->return_temp;
1662 else
1664 result = create_tmp_reg (TREE_TYPE (result_decl));
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result, OPT_Wuninitialized);
1672 gimplify_ctxp->return_temp = result;
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result != result_decl)
1678 TREE_OPERAND (ret_expr, 0) = result;
1680 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1682 maybe_add_early_return_predict_stmt (pre_p);
1683 ret = gimple_build_return (result);
1684 copy_warning (ret, stmt);
1685 gimplify_seq_add_stmt (pre_p, ret);
1687 return GS_ALL_DONE;
1690 /* Gimplify a variable-length array DECL. */
1692 static void
1693 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t, addr, ptr_type;
1699 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl))
1704 return;
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type = build_pointer_type (TREE_TYPE (decl));
1712 addr = create_tmp_var (ptr_type, get_name (decl));
1713 DECL_IGNORED_P (addr) = 0;
1714 t = build_fold_indirect_ref (addr);
1715 TREE_THIS_NOTRAP (t) = 1;
1716 SET_DECL_VALUE_EXPR (decl, t);
1717 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1719 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1720 max_int_size_in_bytes (TREE_TYPE (decl)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1723 t = fold_convert (ptr_type, t);
1724 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1726 gimplify_and_add (t, seq_p);
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1730 record_dynamic_alloc (decl);
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1736 static tree
1737 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1739 if (TYPE_P (*tp))
1740 *walk_subtrees = 0;
1741 if (TREE_CODE (*tp) == LABEL_DECL)
1743 FORCED_LABEL (*tp) = 1;
1744 cfun->has_forced_label_in_static = 1;
1747 return NULL_TREE;
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1758 static void
1759 gimple_add_init_for_auto_var (tree decl,
1760 enum auto_init_type init_type,
1761 gimple_seq *seq_p)
1763 gcc_assert (auto_var_p (decl));
1764 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1765 location_t loc = EXPR_LOCATION (decl);
1766 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1768 tree init_type_node
1769 = build_int_cst (integer_type_node, (int) init_type);
1771 tree decl_name = NULL_TREE;
1772 if (DECL_NAME (decl))
1774 decl_name = build_string_literal (DECL_NAME (decl));
1776 else
1778 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1779 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
1780 decl_name = build_string_literal (decl_name_anonymous);
1783 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1784 TREE_TYPE (decl), 3,
1785 decl_size, init_type_node,
1786 decl_name);
1788 gimplify_assign (decl, call, seq_p);
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1803 static void
1804 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1805 gimple_seq *seq_p)
1807 tree addr_of_decl = NULL_TREE;
1808 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1810 if (is_vla)
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1815 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
1816 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1818 else
1820 mark_addressable (decl);
1821 addr_of_decl = build_fold_addr_expr (decl);
1824 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1825 build_one_cst (TREE_TYPE (addr_of_decl)));
1826 gimplify_seq_add_stmt (seq_p, call);
1829 /* Return true if the DECL need to be automaticly initialized by the
1830 compiler. */
1831 static bool
1832 is_var_need_auto_init (tree decl)
1834 if (auto_var_p (decl)
1835 && (TREE_CODE (decl) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl))
1837 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1840 && !is_empty_type (TREE_TYPE (decl)))
1841 return true;
1842 return false;
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1848 static enum gimplify_status
1849 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1851 tree stmt = *stmt_p;
1852 tree decl = DECL_EXPR_DECL (stmt);
1854 *stmt_p = NULL_TREE;
1856 if (TREE_TYPE (decl) == error_mark_node)
1857 return GS_ERROR;
1859 if ((TREE_CODE (decl) == TYPE_DECL
1860 || VAR_P (decl))
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1863 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1864 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1879 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1881 tree init = DECL_INITIAL (decl);
1882 bool is_vla = false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1890 poly_uint64 size;
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1892 || (!TREE_STATIC (decl)
1893 && flag_stack_check == GENERIC_STACK_CHECK
1894 && maybe_gt (size,
1895 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1897 gimplify_vla_decl (decl, seq_p);
1898 is_vla = true;
1901 if (asan_poisoned_variables
1902 && !is_vla
1903 && TREE_ADDRESSABLE (decl)
1904 && !TREE_STATIC (decl)
1905 && !DECL_HAS_VALUE_EXPR_P (decl)
1906 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1914 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1916 asan_poisoned_variables->add (decl);
1917 asan_poison_variable (decl, false, seq_p);
1918 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1919 gimplify_ctxp->live_switch_vars->add (decl);
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1927 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1928 gimple_add_tmp_var (decl);
1930 if (init && init != error_mark_node)
1932 if (!TREE_STATIC (decl))
1934 DECL_INITIAL (decl) = NULL_TREE;
1935 init = build2 (INIT_EXPR, void_type_node, decl, init);
1936 gimplify_and_add (init, seq_p);
1937 ggc_free (init);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl)
1940 && !omp_privatize_by_reference (decl))
1941 TREE_READONLY (decl) = 0;
1943 else
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init, force_labels_r, NULL, NULL);
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1950 variable. */
1951 else if (is_var_need_auto_init (decl)
1952 && !decl_had_value_expr_p)
1954 gimple_add_init_for_auto_var (decl,
1955 flag_auto_var_init,
1956 seq_p);
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init == AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1972 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1976 return GS_ALL_DONE;
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1983 static enum gimplify_status
1984 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1986 tree saved_label = gimplify_ctxp->exit_label;
1987 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1989 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1991 gimplify_ctxp->exit_label = NULL_TREE;
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1995 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1997 if (gimplify_ctxp->exit_label)
1998 gimplify_seq_add_stmt (pre_p,
1999 gimple_build_label (gimplify_ctxp->exit_label));
2001 gimplify_ctxp->exit_label = saved_label;
2003 *expr_p = NULL;
2004 return GS_ALL_DONE;
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2010 static enum gimplify_status
2011 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2013 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2015 tree_stmt_iterator i = tsi_start (*expr_p);
2017 while (!tsi_end_p (i))
2019 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2020 tsi_delink (&i);
2023 if (temp)
2025 *expr_p = temp;
2026 return GS_OK;
2029 return GS_ALL_DONE;
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2035 return NULL. */
2036 static gimple *
2037 emit_warn_switch_unreachable (gimple *stmt)
2039 if (gimple_code (stmt) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2044 return NULL;
2045 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2046 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2047 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2049 || (is_gimple_assign (stmt)
2050 && gimple_assign_single_p (stmt)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2054 IFN_DEFERRED_INIT))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2057 There are 3 cases:
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2064 i1 = _1. */
2065 return NULL;
2066 else
2067 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2068 "statement will never be executed");
2069 return stmt;
2072 /* Callback for walk_gimple_seq. */
2074 static tree
2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2076 bool *handled_ops_p,
2077 struct walk_stmt_info *wi)
2079 gimple *stmt = gsi_stmt (*gsi_p);
2080 bool unreachable_issued = wi->info != NULL;
2082 *handled_ops_p = true;
2083 switch (gimple_code (stmt))
2085 case GIMPLE_TRY:
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt) == NULL)
2091 if (warn_switch_unreachable && !unreachable_issued)
2092 wi->info = emit_warn_switch_unreachable (stmt);
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init)
2096 return integer_zero_node;
2098 /* Fall through. */
2099 case GIMPLE_BIND:
2100 case GIMPLE_CATCH:
2101 case GIMPLE_EH_FILTER:
2102 case GIMPLE_TRANSACTION:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p = false;
2105 break;
2107 case GIMPLE_DEBUG:
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2111 break;
2113 case GIMPLE_LABEL:
2114 /* Stop till the first Label. */
2115 return integer_zero_node;
2116 case GIMPLE_CALL:
2117 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2119 *handled_ops_p = false;
2120 break;
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name = gimple_call_arg (stmt, 2);
2128 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2129 const char *var_name_str = TREE_STRING_POINTER (var_name);
2131 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2134 var_name_str);
2135 break;
2138 /* Fall through. */
2139 default:
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable && !unreachable_issued)
2143 wi->info = emit_warn_switch_unreachable (stmt);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init)
2146 return integer_zero_node;
2147 break;
2149 return NULL_TREE;
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2158 static void
2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2161 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2162 /* This warning doesn't play well with Fortran when optimizations
2163 are on. */
2164 || lang_GNU_Fortran ()
2165 || seq == NULL)
2166 return;
2168 struct walk_stmt_info wi;
2170 memset (&wi, 0, sizeof (wi));
2171 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2175 /* A label entry that pairs label and a location. */
2176 struct label_entry
2178 tree label;
2179 location_t loc;
2182 /* Find LABEL in vector of label entries VEC. */
2184 static struct label_entry *
2185 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2187 unsigned int i;
2188 struct label_entry *l;
2190 FOR_EACH_VEC_ELT (*vec, i, l)
2191 if (l->label == label)
2192 return l;
2193 return NULL;
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2199 static bool
2200 case_label_p (const vec<tree> *cases, tree label)
2202 unsigned int i;
2203 tree l;
2205 FOR_EACH_VEC_ELT (*cases, i, l)
2206 if (CASE_LABEL (l) == label)
2207 return true;
2208 return false;
2211 /* Find the last nondebug statement in a scope STMT. */
2213 static gimple *
2214 last_stmt_in_scope (gimple *stmt)
2216 if (!stmt)
2217 return NULL;
2219 switch (gimple_code (stmt))
2221 case GIMPLE_BIND:
2223 gbind *bind = as_a <gbind *> (stmt);
2224 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2225 return last_stmt_in_scope (stmt);
2228 case GIMPLE_TRY:
2230 gtry *try_stmt = as_a <gtry *> (stmt);
2231 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2232 gimple *last_eval = last_stmt_in_scope (stmt);
2233 if (gimple_stmt_may_fallthru (last_eval)
2234 && (last_eval == NULL
2235 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2236 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2238 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2239 return last_stmt_in_scope (stmt);
2241 else
2242 return last_eval;
2245 case GIMPLE_DEBUG:
2246 gcc_unreachable ();
2248 default:
2249 return stmt;
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2258 static gimple *
2259 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2260 auto_vec <struct label_entry> *labels,
2261 location_t *prevloc)
2263 gimple *prev = NULL;
2265 *prevloc = UNKNOWN_LOCATION;
2268 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2274 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2275 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2276 if (last
2277 && gimple_code (first) == GIMPLE_SWITCH
2278 && gimple_code (last) == GIMPLE_LABEL)
2280 tree label = gimple_label_label (as_a <glabel *> (last));
2281 if (SWITCH_BREAK_LABEL_P (label))
2283 prev = bind;
2284 gsi_next (gsi_p);
2285 continue;
2289 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2295 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2296 if (last)
2298 prev = last;
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev))
2302 *prevloc = bind_loc;
2304 gsi_next (gsi_p);
2305 continue;
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2311 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2312 tree false_lab = gimple_cond_false_label (cond_stmt);
2313 location_t if_loc = gimple_location (cond_stmt);
2315 /* If we have e.g.
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab))
2319 break;
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2324 gimple *stmt = gsi_stmt (*gsi_p);
2325 if (gimple_code (stmt) == GIMPLE_LABEL
2326 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2327 break;
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p))
2332 break;
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab))
2337 struct label_entry l = { false_lab, if_loc };
2338 labels->safe_push (l);
2341 /* Go to the last statement of the then branch. */
2342 gsi_prev (gsi_p);
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2345 <D.1759>:
2346 <stmt>;
2347 goto <D.1761>;
2348 <D.1760>:
2350 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p)))
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2355 gsi_prev (gsi_p);
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2358 gsi_next (gsi_p);
2359 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2360 if (!fallthru_before_dest)
2362 struct label_entry l = { goto_dest, if_loc };
2363 labels->safe_push (l);
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2368 <D.2022>:
2369 n = n + 1; // #1
2370 <D.2023>: // #2
2371 <D.1988>: // #3
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab))
2375 prev = gsi_stmt (*gsi_p);
2377 /* And move back. */
2378 gsi_next (gsi_p);
2381 /* Remember the last statement. Skip labels that are of no interest
2382 to us. */
2383 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2385 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2386 if (find_label_entry (labels, label))
2387 prev = gsi_stmt (*gsi_p);
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2391 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2394 prev = gsi_stmt (*gsi_p);
2395 gsi_next (gsi_p);
2397 while (!gsi_end_p (*gsi_p)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p))));
2402 if (prev && gimple_has_location (prev))
2403 *prevloc = gimple_location (prev);
2404 return prev;
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2410 static bool
2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2413 gimple_stmt_iterator gsi = *gsi_p;
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label))
2417 return false;
2419 /* Don't warn for non-case labels followed by a statement:
2420 case 0:
2421 foo ();
2422 label:
2423 bar ();
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2427 tree l;
2428 while (!gsi_end_p (gsi)
2429 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2430 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2431 && !case_label_p (&gimplify_ctxp->case_labels, l))
2432 gsi_next_nondebug (&gsi);
2433 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2434 return false;
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2439 gsi = *gsi_p;
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi)
2443 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2445 gsi_next_nondebug (&gsi);
2447 /* { ... something; default:; } */
2448 if (gsi_end_p (gsi)
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2454 return false;
2456 return true;
2459 /* Callback for walk_gimple_seq. */
2461 static tree
2462 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2463 struct walk_stmt_info *)
2465 gimple *stmt = gsi_stmt (*gsi_p);
2467 *handled_ops_p = true;
2468 switch (gimple_code (stmt))
2470 case GIMPLE_TRY:
2471 case GIMPLE_BIND:
2472 case GIMPLE_CATCH:
2473 case GIMPLE_EH_FILTER:
2474 case GIMPLE_TRANSACTION:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p = false;
2477 break;
2479 /* Find a sequence of form:
2481 GIMPLE_LABEL
2482 [...]
2483 <may fallthru stmt>
2484 GIMPLE_LABEL
2486 and possibly warn. */
2487 case GIMPLE_LABEL:
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p)
2491 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2492 gsi_next_nondebug (gsi_p);
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p))
2496 return integer_zero_node;
2498 /* Vector of labels that fall through. */
2499 auto_vec <struct label_entry> labels;
2500 location_t prevloc;
2501 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p))
2505 return integer_zero_node;
2507 gimple *next = gsi_stmt (*gsi_p);
2508 tree label;
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next) == GIMPLE_LABEL
2511 && gimple_has_location (next)
2512 && (label = gimple_label_label (as_a <glabel *> (next)))
2513 && prev != NULL)
2515 struct label_entry *l;
2516 bool warned_p = false;
2517 auto_diagnostic_group d;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2519 /* Quiet. */;
2520 else if (gimple_code (prev) == GIMPLE_LABEL
2521 && (label = gimple_label_label (as_a <glabel *> (prev)))
2522 && (l = find_label_entry (&labels, label)))
2523 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev)
2529 && prevloc != UNKNOWN_LOCATION)
2530 warned_p = warning_at (prevloc,
2531 OPT_Wimplicit_fallthrough_,
2532 "this statement may fall through");
2533 if (warned_p)
2534 inform (gimple_location (next), "here");
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label) = true;
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2542 gsi_prev (gsi_p);
2545 break;
2546 default:
2547 break;
2549 return NULL_TREE;
2552 /* Warn when a switch case falls through. */
2554 static void
2555 maybe_warn_implicit_fallthrough (gimple_seq seq)
2557 if (!warn_implicit_fallthrough)
2558 return;
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2561 if (!(lang_GNU_C ()
2562 || lang_GNU_CXX ()
2563 || lang_GNU_OBJC ()))
2564 return;
2566 struct walk_stmt_info wi;
2567 memset (&wi, 0, sizeof (wi));
2568 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2571 /* Callback for walk_gimple_seq. */
2573 static tree
2574 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2575 struct walk_stmt_info *wi)
2577 gimple *stmt = gsi_stmt (*gsi_p);
2579 *handled_ops_p = true;
2580 switch (gimple_code (stmt))
2582 case GIMPLE_TRY:
2583 case GIMPLE_BIND:
2584 case GIMPLE_CATCH:
2585 case GIMPLE_EH_FILTER:
2586 case GIMPLE_TRANSACTION:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p = false;
2589 break;
2590 case GIMPLE_CALL:
2591 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2593 gsi_remove (gsi_p, true);
2594 if (gsi_end_p (*gsi_p))
2596 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2597 return integer_zero_node;
2600 bool found = false;
2601 location_t loc = gimple_location (stmt);
2603 gimple_stmt_iterator gsi2 = *gsi_p;
2604 stmt = gsi_stmt (gsi2);
2605 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2607 /* Go on until the artificial label. */
2608 tree goto_dest = gimple_goto_dest (stmt);
2609 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2611 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2613 == goto_dest)
2614 break;
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2))
2619 break;
2621 /* Look one past it. */
2622 gsi_next (&gsi2);
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2))
2628 stmt = gsi_stmt (gsi2);
2629 if (gimple_code (stmt) == GIMPLE_LABEL)
2631 tree label = gimple_label_label (as_a <glabel *> (stmt));
2632 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2634 found = true;
2635 break;
2638 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2640 else if (!is_gimple_debug (stmt))
2641 /* Anything else is not expected. */
2642 break;
2643 gsi_next (&gsi2);
2645 if (!found)
2646 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2649 break;
2650 default:
2651 break;
2653 return NULL_TREE;
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2658 static void
2659 expand_FALLTHROUGH (gimple_seq *seq_p)
2661 struct walk_stmt_info wi;
2662 location_t loc;
2663 memset (&wi, 0, sizeof (wi));
2664 wi.info = (void *) &loc;
2665 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2666 if (wi.callback_result == integer_zero_node)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2675 branch to. */
2677 static enum gimplify_status
2678 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2680 tree switch_expr = *expr_p;
2681 gimple_seq switch_body_seq = NULL;
2682 enum gimplify_status ret;
2683 tree index_type = TREE_TYPE (switch_expr);
2684 if (index_type == NULL_TREE)
2685 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2687 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2688 fb_rvalue);
2689 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2690 return ret;
2692 if (SWITCH_BODY (switch_expr))
2694 vec<tree> labels;
2695 vec<tree> saved_labels;
2696 hash_set<tree> *saved_live_switch_vars = NULL;
2697 tree default_case = NULL_TREE;
2698 gswitch *switch_stmt;
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels = gimplify_ctxp->case_labels;
2703 gimplify_ctxp->case_labels.create (8);
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2707 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2708 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2709 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2710 else
2711 gimplify_ctxp->live_switch_vars = NULL;
2713 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2714 gimplify_ctxp->in_switch_expr = true;
2716 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2718 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2720 maybe_warn_implicit_fallthrough (switch_body_seq);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp->in_switch_expr)
2723 expand_FALLTHROUGH (&switch_body_seq);
2725 labels = gimplify_ctxp->case_labels;
2726 gimplify_ctxp->case_labels = saved_labels;
2728 if (gimplify_ctxp->live_switch_vars)
2730 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2731 delete gimplify_ctxp->live_switch_vars;
2733 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2735 preprocess_case_label_vec_for_gimple (labels, index_type,
2736 &default_case);
2738 bool add_bind = false;
2739 if (!default_case)
2741 glabel *new_default;
2743 default_case
2744 = build_case_label (NULL_TREE, NULL_TREE,
2745 create_artificial_label (UNKNOWN_LOCATION));
2746 if (old_in_switch_expr)
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2749 add_bind = true;
2751 new_default = gimple_build_label (CASE_LABEL (default_case));
2752 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2754 else if (old_in_switch_expr)
2756 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2757 if (last && gimple_code (last) == GIMPLE_LABEL)
2759 tree label = gimple_label_label (as_a <glabel *> (last));
2760 if (SWITCH_BREAK_LABEL_P (label))
2761 add_bind = true;
2765 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2766 default_case, labels);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2771 statement. */
2772 if (add_bind)
2774 gimple_seq bind_body = NULL;
2775 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2776 gimple_seq_add_seq (&bind_body, switch_body_seq);
2777 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2778 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2779 gimplify_seq_add_stmt (pre_p, bind);
2781 else
2783 gimplify_seq_add_stmt (pre_p, switch_stmt);
2784 gimplify_seq_add_seq (pre_p, switch_body_seq);
2786 labels.release ();
2788 else
2789 gcc_unreachable ();
2791 return GS_ALL_DONE;
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2796 static enum gimplify_status
2797 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2800 == current_function_decl);
2802 tree label = LABEL_EXPR_LABEL (*expr_p);
2803 glabel *label_stmt = gimple_build_label (label);
2804 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2805 gimplify_seq_add_stmt (pre_p, label_stmt);
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2808 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2809 NOT_TAKEN));
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2811 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2812 TAKEN));
2814 return GS_ALL_DONE;
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2819 static enum gimplify_status
2820 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2822 struct gimplify_ctx *ctxp;
2823 glabel *label_stmt;
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2830 if (ctxp->case_labels.exists ())
2831 break;
2833 tree label = CASE_LABEL (*expr_p);
2834 label_stmt = gimple_build_label (label);
2835 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2836 ctxp->case_labels.safe_push (*expr_p);
2837 gimplify_seq_add_stmt (pre_p, label_stmt);
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2840 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2841 NOT_TAKEN));
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2843 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2844 TAKEN));
2846 return GS_ALL_DONE;
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2850 if necessary. */
2852 tree
2853 build_and_jump (tree *label_p)
2855 if (label_p == NULL)
2856 /* If there's nowhere to jump, just fall through. */
2857 return NULL_TREE;
2859 if (*label_p == NULL_TREE)
2861 tree label = create_artificial_label (UNKNOWN_LOCATION);
2862 *label_p = label;
2865 return build1 (GOTO_EXPR, void_type_node, *label_p);
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2872 static enum gimplify_status
2873 gimplify_exit_expr (tree *expr_p)
2875 tree cond = TREE_OPERAND (*expr_p, 0);
2876 tree expr;
2878 expr = build_and_jump (&gimplify_ctxp->exit_label);
2879 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2880 *expr_p = expr;
2882 return GS_OK;
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2888 type.
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2895 static void
2896 canonicalize_component_ref (tree *expr_p)
2898 tree expr = *expr_p;
2899 tree type;
2901 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2904 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2905 else
2906 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr) != type)
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type = TREE_TYPE (expr);
2915 #endif
2916 int type_quals;
2918 /* We need to preserve qualifiers and propagate them from
2919 operand 0. */
2920 type_quals = TYPE_QUALS (type)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2922 if (TYPE_QUALS (type) != type_quals)
2923 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr) = type;
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type, type));
2932 #endif
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2938 T array[U];
2939 (T *)&array
2941 &array[L]
2942 where L is the lower bound. For simplicity, only do this for constant
2943 lower bound.
2944 The constraint is that the type of &array[L] is trivially convertible
2945 to T *. */
2947 static void
2948 canonicalize_addr_expr (tree *expr_p)
2950 tree expr = *expr_p;
2951 tree addr_expr = TREE_OPERAND (expr, 0);
2952 tree datype, ddatype, pddatype;
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2956 || TREE_CODE (addr_expr) != ADDR_EXPR)
2957 return;
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2961 if (TREE_CODE (datype) != ARRAY_TYPE)
2962 return;
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype = TREE_TYPE (datype);
2967 pddatype = build_pointer_type (ddatype);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2969 pddatype))
2970 return;
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2977 return;
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2982 NULL_TREE, NULL_TREE);
2983 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2987 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2993 static enum gimplify_status
2994 gimplify_conversion (tree *expr_p)
2996 location_t loc = EXPR_LOCATION (*expr_p);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p));
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p))
3004 *expr_p = TREE_OPERAND (*expr_p, 0);
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p))
3010 tree sub = TREE_OPERAND (*expr_p, 0);
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub) == COMPONENT_REF)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub) == ADDR_EXPR)
3021 canonicalize_addr_expr (expr_p);
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3027 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3028 TREE_OPERAND (*expr_p, 0));
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3032 TREE_SET_CODE (*expr_p, NOP_EXPR);
3034 return GS_OK;
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3040 static enum gimplify_status
3041 gimplify_var_or_parm_decl (tree *expr_p)
3043 tree decl = *expr_p;
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3053 if (VAR_P (decl)
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3055 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3056 && decl_function_context (decl) == current_function_decl)
3058 gcc_assert (seen_error ());
3059 return GS_ERROR;
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3064 return GS_ALL_DONE;
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl))
3069 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3070 return GS_OK;
3073 return GS_ALL_DONE;
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3078 static void
3079 recalculate_side_effects (tree t)
3081 enum tree_code code = TREE_CODE (t);
3082 int len = TREE_OPERAND_LENGTH (t);
3083 int i;
3085 switch (TREE_CODE_CLASS (code))
3087 case tcc_expression:
3088 switch (code)
3090 case INIT_EXPR:
3091 case MODIFY_EXPR:
3092 case VA_ARG_EXPR:
3093 case PREDECREMENT_EXPR:
3094 case PREINCREMENT_EXPR:
3095 case POSTDECREMENT_EXPR:
3096 case POSTINCREMENT_EXPR:
3097 /* All of these have side-effects, no matter what their
3098 operands are. */
3099 return;
3101 default:
3102 break;
3104 /* Fall through. */
3106 case tcc_comparison: /* a comparison expression */
3107 case tcc_unary: /* a unary arithmetic expression */
3108 case tcc_binary: /* a binary arithmetic expression */
3109 case tcc_reference: /* a reference */
3110 case tcc_vl_exp: /* a function call */
3111 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3112 for (i = 0; i < len; ++i)
3114 tree op = TREE_OPERAND (t, i);
3115 if (op && TREE_SIDE_EFFECTS (op))
3116 TREE_SIDE_EFFECTS (t) = 1;
3118 break;
3120 case tcc_constant:
3121 /* No side-effects. */
3122 return;
3124 default:
3125 gcc_unreachable ();
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3130 node *EXPR_P.
3132 compound_lval
3133 : min_lval '[' val ']'
3134 | min_lval '.' ID
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3151 static enum gimplify_status
3152 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3153 fallback_t fallback)
3155 tree *p;
3156 enum gimplify_status ret = GS_ALL_DONE, tret;
3157 int i;
3158 location_t loc = EXPR_LOCATION (*expr_p);
3159 tree expr = *expr_p;
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec<tree, 10> expr_stack;
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3168 restart:
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p) == INDIRECT_REF)
3171 *p = fold_indirect_ref_loc (loc, *p);
3173 if (handled_component_p (*p))
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3178 && gimplify_var_or_parm_decl (p) == GS_OK)
3179 goto restart;
3180 else
3181 break;
3183 expr_stack.safe_push (*p);
3186 gcc_assert (expr_stack.length ());
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3205 to right. */
3207 bool need_non_reg = false;
3208 for (i = expr_stack.length () - 1; i >= 0; i--)
3210 tree t = expr_stack[i];
3212 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3214 /* Deal with the low bound and element type size and put them into
3215 the ARRAY_REF. If these values are set, they have already been
3216 gimplified. */
3217 if (TREE_OPERAND (t, 2) == NULL_TREE)
3219 tree low = unshare_expr (array_ref_low_bound (t));
3220 if (!is_gimple_min_invariant (low))
3222 TREE_OPERAND (t, 2) = low;
3226 if (TREE_OPERAND (t, 3) == NULL_TREE)
3228 tree elmt_size = array_ref_element_size (t);
3229 if (!is_gimple_min_invariant (elmt_size))
3231 elmt_size = unshare_expr (elmt_size);
3232 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3233 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3235 /* Divide the element size by the alignment of the element
3236 type (above). */
3237 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3238 elmt_size, factor);
3240 TREE_OPERAND (t, 3) = elmt_size;
3243 need_non_reg = true;
3245 else if (TREE_CODE (t) == COMPONENT_REF)
3247 /* Set the field offset into T and gimplify it. */
3248 if (TREE_OPERAND (t, 2) == NULL_TREE)
3250 tree offset = component_ref_field_offset (t);
3251 if (!is_gimple_min_invariant (offset))
3253 offset = unshare_expr (offset);
3254 tree field = TREE_OPERAND (t, 1);
3255 tree factor
3256 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3258 /* Divide the offset by its alignment. */
3259 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3260 offset, factor);
3262 TREE_OPERAND (t, 2) = offset;
3265 need_non_reg = true;
3267 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3268 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3269 is a non-register type then require the base object to be a
3270 non-register as well. */
3271 need_non_reg = true;
3274 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3275 so as to match the min_lval predicate. Failure to do so may result
3276 in the creation of large aggregate temporaries. */
3277 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3278 fallback | fb_lvalue);
3279 ret = MIN (ret, tret);
3280 if (ret == GS_ERROR)
3281 return GS_ERROR;
3283 /* Step 2a: if we have component references we do not support on
3284 registers then make sure the base isn't a register. Of course
3285 we can only do so if an rvalue is OK. */
3286 if (need_non_reg && (fallback & fb_rvalue))
3287 prepare_gimple_addressable (p, pre_p);
3290 /* Step 3: gimplify size expressions and the indices and operands of
3291 ARRAY_REF. During this loop we also remove any useless conversions.
3292 If we operate on a register also make sure to properly gimplify
3293 to individual operations. */
3295 bool reg_operations = is_gimple_reg (*p);
3296 for (; expr_stack.length () > 0; )
3298 tree t = expr_stack.pop ();
3300 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3302 gcc_assert (!reg_operations);
3304 /* Gimplify the low bound and element type size. */
3305 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3306 is_gimple_reg, fb_rvalue);
3307 ret = MIN (ret, tret);
3309 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3310 is_gimple_reg, fb_rvalue);
3311 ret = MIN (ret, tret);
3313 /* Gimplify the dimension. */
3314 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3315 is_gimple_val, fb_rvalue);
3316 ret = MIN (ret, tret);
3318 else if (TREE_CODE (t) == COMPONENT_REF)
3320 gcc_assert (!reg_operations);
3322 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3323 is_gimple_reg, fb_rvalue);
3324 ret = MIN (ret, tret);
3326 else if (reg_operations)
3328 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3329 is_gimple_val, fb_rvalue);
3330 ret = MIN (ret, tret);
3333 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3335 /* The innermost expression P may have originally had
3336 TREE_SIDE_EFFECTS set which would have caused all the outer
3337 expressions in *EXPR_P leading to P to also have had
3338 TREE_SIDE_EFFECTS set. */
3339 recalculate_side_effects (t);
3342 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3343 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3345 canonicalize_component_ref (expr_p);
3348 expr_stack.release ();
3350 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3352 return ret;
3355 /* Gimplify the self modifying expression pointed to by EXPR_P
3356 (++, --, +=, -=).
3358 PRE_P points to the list where side effects that must happen before
3359 *EXPR_P should be stored.
3361 POST_P points to the list where side effects that must happen after
3362 *EXPR_P should be stored.
3364 WANT_VALUE is nonzero iff we want to use the value of this expression
3365 in another expression.
3367 ARITH_TYPE is the type the computation should be performed in. */
3369 enum gimplify_status
3370 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3371 bool want_value, tree arith_type)
3373 enum tree_code code;
3374 tree lhs, lvalue, rhs, t1;
3375 gimple_seq post = NULL, *orig_post_p = post_p;
3376 bool postfix;
3377 enum tree_code arith_code;
3378 enum gimplify_status ret;
3379 location_t loc = EXPR_LOCATION (*expr_p);
3381 code = TREE_CODE (*expr_p);
3383 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3384 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3386 /* Prefix or postfix? */
3387 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3388 /* Faster to treat as prefix if result is not used. */
3389 postfix = want_value;
3390 else
3391 postfix = false;
3393 /* For postfix, make sure the inner expression's post side effects
3394 are executed after side effects from this expression. */
3395 if (postfix)
3396 post_p = &post;
3398 /* Add or subtract? */
3399 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3400 arith_code = PLUS_EXPR;
3401 else
3402 arith_code = MINUS_EXPR;
3404 /* Gimplify the LHS into a GIMPLE lvalue. */
3405 lvalue = TREE_OPERAND (*expr_p, 0);
3406 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3407 if (ret == GS_ERROR)
3408 return ret;
3410 /* Extract the operands to the arithmetic operation. */
3411 lhs = lvalue;
3412 rhs = TREE_OPERAND (*expr_p, 1);
3414 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3415 that as the result value and in the postqueue operation. */
3416 if (postfix)
3418 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3419 if (ret == GS_ERROR)
3420 return ret;
3422 lhs = get_initialized_tmp_var (lhs, pre_p);
3425 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3426 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3428 rhs = convert_to_ptrofftype_loc (loc, rhs);
3429 if (arith_code == MINUS_EXPR)
3430 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3431 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3433 else
3434 t1 = fold_convert (TREE_TYPE (*expr_p),
3435 fold_build2 (arith_code, arith_type,
3436 fold_convert (arith_type, lhs),
3437 fold_convert (arith_type, rhs)));
3439 if (postfix)
3441 gimplify_assign (lvalue, t1, pre_p);
3442 gimplify_seq_add_seq (orig_post_p, post);
3443 *expr_p = lhs;
3444 return GS_ALL_DONE;
3446 else
3448 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3449 return GS_OK;
3453 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3455 static void
3456 maybe_with_size_expr (tree *expr_p)
3458 tree expr = *expr_p;
3459 tree type = TREE_TYPE (expr);
3460 tree size;
3462 /* If we've already wrapped this or the type is error_mark_node, we can't do
3463 anything. */
3464 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3465 || type == error_mark_node)
3466 return;
3468 /* If the size isn't known or is a constant, we have nothing to do. */
3469 size = TYPE_SIZE_UNIT (type);
3470 if (!size || poly_int_tree_p (size))
3471 return;
3473 /* Otherwise, make a WITH_SIZE_EXPR. */
3474 size = unshare_expr (size);
3475 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3476 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3479 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3480 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3481 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3482 gimplified to an SSA name. */
3484 enum gimplify_status
3485 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3486 bool allow_ssa)
3488 bool (*test) (tree);
3489 fallback_t fb;
3491 /* In general, we allow lvalues for function arguments to avoid
3492 extra overhead of copying large aggregates out of even larger
3493 aggregates into temporaries only to copy the temporaries to
3494 the argument list. Make optimizers happy by pulling out to
3495 temporaries those types that fit in registers. */
3496 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3497 test = is_gimple_val, fb = fb_rvalue;
3498 else
3500 test = is_gimple_lvalue, fb = fb_either;
3501 /* Also strip a TARGET_EXPR that would force an extra copy. */
3502 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3504 tree init = TARGET_EXPR_INITIAL (*arg_p);
3505 if (init
3506 && !VOID_TYPE_P (TREE_TYPE (init)))
3507 *arg_p = init;
3511 /* If this is a variable sized type, we must remember the size. */
3512 maybe_with_size_expr (arg_p);
3514 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3515 /* Make sure arguments have the same location as the function call
3516 itself. */
3517 protected_set_expr_location (*arg_p, call_location);
3519 /* There is a sequence point before a function call. Side effects in
3520 the argument list must occur before the actual call. So, when
3521 gimplifying arguments, force gimplify_expr to use an internal
3522 post queue which is then appended to the end of PRE_P. */
3523 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3526 /* Don't fold inside offloading or taskreg regions: it can break code by
3527 adding decl references that weren't in the source. We'll do it during
3528 omplower pass instead. */
3530 static bool
3531 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3533 struct gimplify_omp_ctx *ctx;
3534 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3535 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3536 return false;
3537 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3538 return false;
3539 /* Delay folding of builtins until the IL is in consistent state
3540 so the diagnostic machinery can do a better job. */
3541 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3542 return false;
3543 return fold_stmt (gsi);
3546 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3547 WANT_VALUE is true if the result of the call is desired. */
3549 static enum gimplify_status
3550 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3552 tree fndecl, parms, p, fnptrtype;
3553 enum gimplify_status ret;
3554 int i, nargs;
3555 gcall *call;
3556 bool builtin_va_start_p = false;
3557 location_t loc = EXPR_LOCATION (*expr_p);
3559 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3561 /* For reliable diagnostics during inlining, it is necessary that
3562 every call_expr be annotated with file and line. */
3563 if (! EXPR_HAS_LOCATION (*expr_p))
3564 SET_EXPR_LOCATION (*expr_p, input_location);
3566 /* Gimplify internal functions created in the FEs. */
3567 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3569 if (want_value)
3570 return GS_ALL_DONE;
3572 nargs = call_expr_nargs (*expr_p);
3573 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3574 auto_vec<tree> vargs (nargs);
3576 if (ifn == IFN_ASSUME)
3578 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3580 /* If the [[assume (cond)]]; condition is simple
3581 enough and can be evaluated unconditionally
3582 without side-effects, expand it as
3583 if (!cond) __builtin_unreachable (); */
3584 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3585 *expr_p = build3 (COND_EXPR, void_type_node,
3586 CALL_EXPR_ARG (*expr_p, 0), void_node,
3587 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3588 fndecl, 0));
3589 return GS_OK;
3591 /* If not optimizing, ignore the assumptions. */
3592 if (!optimize || seen_error ())
3594 *expr_p = NULL_TREE;
3595 return GS_ALL_DONE;
3597 /* Temporarily, until gimple lowering, transform
3598 .ASSUME (cond);
3599 into:
3600 [[assume (guard)]]
3602 guard = cond;
3604 such that gimple lowering can outline the condition into
3605 a separate function easily. */
3606 tree guard = create_tmp_var (boolean_type_node);
3607 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3608 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3609 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3610 push_gimplify_context ();
3611 gimple_seq body = NULL;
3612 gimple *g = gimplify_and_return_first (*expr_p, &body);
3613 pop_gimplify_context (g);
3614 g = gimple_build_assume (guard, body);
3615 gimple_set_location (g, loc);
3616 gimplify_seq_add_stmt (pre_p, g);
3617 *expr_p = NULL_TREE;
3618 return GS_ALL_DONE;
3621 for (i = 0; i < nargs; i++)
3623 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3624 EXPR_LOCATION (*expr_p));
3625 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3628 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3629 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3630 gimplify_seq_add_stmt (pre_p, call);
3631 return GS_ALL_DONE;
3634 /* This may be a call to a builtin function.
3636 Builtin function calls may be transformed into different
3637 (and more efficient) builtin function calls under certain
3638 circumstances. Unfortunately, gimplification can muck things
3639 up enough that the builtin expanders are not aware that certain
3640 transformations are still valid.
3642 So we attempt transformation/gimplification of the call before
3643 we gimplify the CALL_EXPR. At this time we do not manage to
3644 transform all calls in the same manner as the expanders do, but
3645 we do transform most of them. */
3646 fndecl = get_callee_fndecl (*expr_p);
3647 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3648 switch (DECL_FUNCTION_CODE (fndecl))
3650 CASE_BUILT_IN_ALLOCA:
3651 /* If the call has been built for a variable-sized object, then we
3652 want to restore the stack level when the enclosing BIND_EXPR is
3653 exited to reclaim the allocated space; otherwise, we precisely
3654 need to do the opposite and preserve the latest stack level. */
3655 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3656 gimplify_ctxp->save_stack = true;
3657 else
3658 gimplify_ctxp->keep_stack = true;
3659 break;
3661 case BUILT_IN_VA_START:
3663 builtin_va_start_p = TRUE;
3664 if (call_expr_nargs (*expr_p) < 2)
3666 error ("too few arguments to function %<va_start%>");
3667 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3668 return GS_OK;
3671 if (fold_builtin_next_arg (*expr_p, true))
3673 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3674 return GS_OK;
3676 break;
3679 case BUILT_IN_EH_RETURN:
3680 cfun->calls_eh_return = true;
3681 break;
3683 case BUILT_IN_CLEAR_PADDING:
3684 if (call_expr_nargs (*expr_p) == 1)
3686 /* Remember the original type of the argument in an internal
3687 dummy second argument, as in GIMPLE pointer conversions are
3688 useless. Also mark this call as not for automatic
3689 initialization in the internal dummy third argument. */
3690 p = CALL_EXPR_ARG (*expr_p, 0);
3691 *expr_p
3692 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3693 build_zero_cst (TREE_TYPE (p)));
3694 return GS_OK;
3696 break;
3698 default:
3701 if (fndecl && fndecl_built_in_p (fndecl))
3703 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3704 if (new_tree && new_tree != *expr_p)
3706 /* There was a transformation of this call which computes the
3707 same value, but in a more efficient way. Return and try
3708 again. */
3709 *expr_p = new_tree;
3710 return GS_OK;
3714 /* Remember the original function pointer type. */
3715 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3717 if (flag_openmp
3718 && fndecl
3719 && cfun
3720 && (cfun->curr_properties & PROP_gimple_any) == 0)
3722 tree variant = omp_resolve_declare_variant (fndecl);
3723 if (variant != fndecl)
3724 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3727 /* There is a sequence point before the call, so any side effects in
3728 the calling expression must occur before the actual call. Force
3729 gimplify_expr to use an internal post queue. */
3730 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3731 is_gimple_call_addr, fb_rvalue);
3733 if (ret == GS_ERROR)
3734 return GS_ERROR;
3736 nargs = call_expr_nargs (*expr_p);
3738 /* Get argument types for verification. */
3739 fndecl = get_callee_fndecl (*expr_p);
3740 parms = NULL_TREE;
3741 if (fndecl)
3742 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3743 else
3744 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3746 if (fndecl && DECL_ARGUMENTS (fndecl))
3747 p = DECL_ARGUMENTS (fndecl);
3748 else if (parms)
3749 p = parms;
3750 else
3751 p = NULL_TREE;
3752 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3755 /* If the last argument is __builtin_va_arg_pack () and it is not
3756 passed as a named argument, decrease the number of CALL_EXPR
3757 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3758 if (!p
3759 && i < nargs
3760 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3762 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3763 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3765 if (last_arg_fndecl
3766 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3768 tree call = *expr_p;
3770 --nargs;
3771 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3772 CALL_EXPR_FN (call),
3773 nargs, CALL_EXPR_ARGP (call));
3775 /* Copy all CALL_EXPR flags, location and block, except
3776 CALL_EXPR_VA_ARG_PACK flag. */
3777 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3778 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3779 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3780 = CALL_EXPR_RETURN_SLOT_OPT (call);
3781 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3782 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3784 /* Set CALL_EXPR_VA_ARG_PACK. */
3785 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3789 /* If the call returns twice then after building the CFG the call
3790 argument computations will no longer dominate the call because
3791 we add an abnormal incoming edge to the call. So do not use SSA
3792 vars there. */
3793 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3795 /* Gimplify the function arguments. */
3796 if (nargs > 0)
3798 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3799 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3800 PUSH_ARGS_REVERSED ? i-- : i++)
3802 enum gimplify_status t;
3804 /* Avoid gimplifying the second argument to va_start, which needs to
3805 be the plain PARM_DECL. */
3806 if ((i != 1) || !builtin_va_start_p)
3808 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3809 EXPR_LOCATION (*expr_p), ! returns_twice);
3811 if (t == GS_ERROR)
3812 ret = GS_ERROR;
3817 /* Gimplify the static chain. */
3818 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3820 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3821 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3822 else
3824 enum gimplify_status t;
3825 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3826 EXPR_LOCATION (*expr_p), ! returns_twice);
3827 if (t == GS_ERROR)
3828 ret = GS_ERROR;
3832 /* Verify the function result. */
3833 if (want_value && fndecl
3834 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3836 error_at (loc, "using result of function returning %<void%>");
3837 ret = GS_ERROR;
3840 /* Try this again in case gimplification exposed something. */
3841 if (ret != GS_ERROR)
3843 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3845 if (new_tree && new_tree != *expr_p)
3847 /* There was a transformation of this call which computes the
3848 same value, but in a more efficient way. Return and try
3849 again. */
3850 *expr_p = new_tree;
3851 return GS_OK;
3854 else
3856 *expr_p = error_mark_node;
3857 return GS_ERROR;
3860 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3861 decl. This allows us to eliminate redundant or useless
3862 calls to "const" functions. */
3863 if (TREE_CODE (*expr_p) == CALL_EXPR)
3865 int flags = call_expr_flags (*expr_p);
3866 if (flags & (ECF_CONST | ECF_PURE)
3867 /* An infinite loop is considered a side effect. */
3868 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3869 TREE_SIDE_EFFECTS (*expr_p) = 0;
3872 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3873 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3874 form and delegate the creation of a GIMPLE_CALL to
3875 gimplify_modify_expr. This is always possible because when
3876 WANT_VALUE is true, the caller wants the result of this call into
3877 a temporary, which means that we will emit an INIT_EXPR in
3878 internal_get_tmp_var which will then be handled by
3879 gimplify_modify_expr. */
3880 if (!want_value)
3882 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3883 have to do is replicate it as a GIMPLE_CALL tuple. */
3884 gimple_stmt_iterator gsi;
3885 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3886 notice_special_calls (call);
3887 gimplify_seq_add_stmt (pre_p, call);
3888 gsi = gsi_last (*pre_p);
3889 maybe_fold_stmt (&gsi);
3890 *expr_p = NULL_TREE;
3892 else
3893 /* Remember the original function type. */
3894 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3895 CALL_EXPR_FN (*expr_p));
3897 return ret;
3900 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3901 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3903 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3904 condition is true or false, respectively. If null, we should generate
3905 our own to skip over the evaluation of this specific expression.
3907 LOCUS is the source location of the COND_EXPR.
3909 This function is the tree equivalent of do_jump.
3911 shortcut_cond_r should only be called by shortcut_cond_expr. */
3913 static tree
3914 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3915 location_t locus)
3917 tree local_label = NULL_TREE;
3918 tree t, expr = NULL;
3920 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3921 retain the shortcut semantics. Just insert the gotos here;
3922 shortcut_cond_expr will append the real blocks later. */
3923 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3925 location_t new_locus;
3927 /* Turn if (a && b) into
3929 if (a); else goto no;
3930 if (b) goto yes; else goto no;
3931 (no:) */
3933 if (false_label_p == NULL)
3934 false_label_p = &local_label;
3936 /* Keep the original source location on the first 'if'. */
3937 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3938 append_to_statement_list (t, &expr);
3940 /* Set the source location of the && on the second 'if'. */
3941 new_locus = rexpr_location (pred, locus);
3942 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3943 new_locus);
3944 append_to_statement_list (t, &expr);
3946 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3948 location_t new_locus;
3950 /* Turn if (a || b) into
3952 if (a) goto yes;
3953 if (b) goto yes; else goto no;
3954 (yes:) */
3956 if (true_label_p == NULL)
3957 true_label_p = &local_label;
3959 /* Keep the original source location on the first 'if'. */
3960 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3961 append_to_statement_list (t, &expr);
3963 /* Set the source location of the || on the second 'if'. */
3964 new_locus = rexpr_location (pred, locus);
3965 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3966 new_locus);
3967 append_to_statement_list (t, &expr);
3969 else if (TREE_CODE (pred) == COND_EXPR
3970 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3971 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3973 location_t new_locus;
3975 /* As long as we're messing with gotos, turn if (a ? b : c) into
3976 if (a)
3977 if (b) goto yes; else goto no;
3978 else
3979 if (c) goto yes; else goto no;
3981 Don't do this if one of the arms has void type, which can happen
3982 in C++ when the arm is throw. */
3984 /* Keep the original source location on the first 'if'. Set the source
3985 location of the ? on the second 'if'. */
3986 new_locus = rexpr_location (pred, locus);
3987 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3988 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3989 false_label_p, locus),
3990 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3991 false_label_p, new_locus));
3993 else
3995 expr = build3 (COND_EXPR, void_type_node, pred,
3996 build_and_jump (true_label_p),
3997 build_and_jump (false_label_p));
3998 SET_EXPR_LOCATION (expr, locus);
4001 if (local_label)
4003 t = build1 (LABEL_EXPR, void_type_node, local_label);
4004 append_to_statement_list (t, &expr);
4007 return expr;
4010 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4011 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4012 statement, if it is the last one. Otherwise, return NULL. */
4014 static tree
4015 find_goto (tree expr)
4017 if (!expr)
4018 return NULL_TREE;
4020 if (TREE_CODE (expr) == GOTO_EXPR)
4021 return expr;
4023 if (TREE_CODE (expr) != STATEMENT_LIST)
4024 return NULL_TREE;
4026 tree_stmt_iterator i = tsi_start (expr);
4028 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4029 tsi_next (&i);
4031 if (!tsi_one_before_end_p (i))
4032 return NULL_TREE;
4034 return find_goto (tsi_stmt (i));
4037 /* Same as find_goto, except that it returns NULL if the destination
4038 is not a LABEL_DECL. */
4040 static inline tree
4041 find_goto_label (tree expr)
4043 tree dest = find_goto (expr);
4044 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4045 return dest;
4046 return NULL_TREE;
4049 /* Given a conditional expression EXPR with short-circuit boolean
4050 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4051 predicate apart into the equivalent sequence of conditionals. */
4053 static tree
4054 shortcut_cond_expr (tree expr)
4056 tree pred = TREE_OPERAND (expr, 0);
4057 tree then_ = TREE_OPERAND (expr, 1);
4058 tree else_ = TREE_OPERAND (expr, 2);
4059 tree true_label, false_label, end_label, t;
4060 tree *true_label_p;
4061 tree *false_label_p;
4062 bool emit_end, emit_false, jump_over_else;
4063 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4064 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4066 /* First do simple transformations. */
4067 if (!else_se)
4069 /* If there is no 'else', turn
4070 if (a && b) then c
4071 into
4072 if (a) if (b) then c. */
4073 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4075 /* Keep the original source location on the first 'if'. */
4076 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4077 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4078 /* Set the source location of the && on the second 'if'. */
4079 if (rexpr_has_location (pred))
4080 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4081 then_ = shortcut_cond_expr (expr);
4082 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4083 pred = TREE_OPERAND (pred, 0);
4084 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4085 SET_EXPR_LOCATION (expr, locus);
4089 if (!then_se)
4091 /* If there is no 'then', turn
4092 if (a || b); else d
4093 into
4094 if (a); else if (b); else d. */
4095 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4097 /* Keep the original source location on the first 'if'. */
4098 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4099 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4100 /* Set the source location of the || on the second 'if'. */
4101 if (rexpr_has_location (pred))
4102 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4103 else_ = shortcut_cond_expr (expr);
4104 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4105 pred = TREE_OPERAND (pred, 0);
4106 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4107 SET_EXPR_LOCATION (expr, locus);
4111 /* If we're done, great. */
4112 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4113 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4114 return expr;
4116 /* Otherwise we need to mess with gotos. Change
4117 if (a) c; else d;
4119 if (a); else goto no;
4120 c; goto end;
4121 no: d; end:
4122 and recursively gimplify the condition. */
4124 true_label = false_label = end_label = NULL_TREE;
4126 /* If our arms just jump somewhere, hijack those labels so we don't
4127 generate jumps to jumps. */
4129 if (tree then_goto = find_goto_label (then_))
4131 true_label = GOTO_DESTINATION (then_goto);
4132 then_ = NULL;
4133 then_se = false;
4136 if (tree else_goto = find_goto_label (else_))
4138 false_label = GOTO_DESTINATION (else_goto);
4139 else_ = NULL;
4140 else_se = false;
4143 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4144 if (true_label)
4145 true_label_p = &true_label;
4146 else
4147 true_label_p = NULL;
4149 /* The 'else' branch also needs a label if it contains interesting code. */
4150 if (false_label || else_se)
4151 false_label_p = &false_label;
4152 else
4153 false_label_p = NULL;
4155 /* If there was nothing else in our arms, just forward the label(s). */
4156 if (!then_se && !else_se)
4157 return shortcut_cond_r (pred, true_label_p, false_label_p,
4158 EXPR_LOC_OR_LOC (expr, input_location));
4160 /* If our last subexpression already has a terminal label, reuse it. */
4161 if (else_se)
4162 t = expr_last (else_);
4163 else if (then_se)
4164 t = expr_last (then_);
4165 else
4166 t = NULL;
4167 if (t && TREE_CODE (t) == LABEL_EXPR)
4168 end_label = LABEL_EXPR_LABEL (t);
4170 /* If we don't care about jumping to the 'else' branch, jump to the end
4171 if the condition is false. */
4172 if (!false_label_p)
4173 false_label_p = &end_label;
4175 /* We only want to emit these labels if we aren't hijacking them. */
4176 emit_end = (end_label == NULL_TREE);
4177 emit_false = (false_label == NULL_TREE);
4179 /* We only emit the jump over the else clause if we have to--if the
4180 then clause may fall through. Otherwise we can wind up with a
4181 useless jump and a useless label at the end of gimplified code,
4182 which will cause us to think that this conditional as a whole
4183 falls through even if it doesn't. If we then inline a function
4184 which ends with such a condition, that can cause us to issue an
4185 inappropriate warning about control reaching the end of a
4186 non-void function. */
4187 jump_over_else = block_may_fallthru (then_);
4189 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4190 EXPR_LOC_OR_LOC (expr, input_location));
4192 expr = NULL;
4193 append_to_statement_list (pred, &expr);
4195 append_to_statement_list (then_, &expr);
4196 if (else_se)
4198 if (jump_over_else)
4200 tree last = expr_last (expr);
4201 t = build_and_jump (&end_label);
4202 if (rexpr_has_location (last))
4203 SET_EXPR_LOCATION (t, rexpr_location (last));
4204 append_to_statement_list (t, &expr);
4206 if (emit_false)
4208 t = build1 (LABEL_EXPR, void_type_node, false_label);
4209 append_to_statement_list (t, &expr);
4211 append_to_statement_list (else_, &expr);
4213 if (emit_end && end_label)
4215 t = build1 (LABEL_EXPR, void_type_node, end_label);
4216 append_to_statement_list (t, &expr);
4219 return expr;
4222 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4224 tree
4225 gimple_boolify (tree expr)
4227 tree type = TREE_TYPE (expr);
4228 location_t loc = EXPR_LOCATION (expr);
4230 if (TREE_CODE (expr) == NE_EXPR
4231 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4232 && integer_zerop (TREE_OPERAND (expr, 1)))
4234 tree call = TREE_OPERAND (expr, 0);
4235 tree fn = get_callee_fndecl (call);
4237 /* For __builtin_expect ((long) (x), y) recurse into x as well
4238 if x is truth_value_p. */
4239 if (fn
4240 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4241 && call_expr_nargs (call) == 2)
4243 tree arg = CALL_EXPR_ARG (call, 0);
4244 if (arg)
4246 if (TREE_CODE (arg) == NOP_EXPR
4247 && TREE_TYPE (arg) == TREE_TYPE (call))
4248 arg = TREE_OPERAND (arg, 0);
4249 if (truth_value_p (TREE_CODE (arg)))
4251 arg = gimple_boolify (arg);
4252 CALL_EXPR_ARG (call, 0)
4253 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4259 switch (TREE_CODE (expr))
4261 case TRUTH_AND_EXPR:
4262 case TRUTH_OR_EXPR:
4263 case TRUTH_XOR_EXPR:
4264 case TRUTH_ANDIF_EXPR:
4265 case TRUTH_ORIF_EXPR:
4266 /* Also boolify the arguments of truth exprs. */
4267 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4268 /* FALLTHRU */
4270 case TRUTH_NOT_EXPR:
4271 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4273 /* These expressions always produce boolean results. */
4274 if (TREE_CODE (type) != BOOLEAN_TYPE)
4275 TREE_TYPE (expr) = boolean_type_node;
4276 return expr;
4278 case ANNOTATE_EXPR:
4279 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4281 case annot_expr_ivdep_kind:
4282 case annot_expr_unroll_kind:
4283 case annot_expr_no_vector_kind:
4284 case annot_expr_vector_kind:
4285 case annot_expr_parallel_kind:
4286 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4287 if (TREE_CODE (type) != BOOLEAN_TYPE)
4288 TREE_TYPE (expr) = boolean_type_node;
4289 return expr;
4290 default:
4291 gcc_unreachable ();
4294 default:
4295 if (COMPARISON_CLASS_P (expr))
4297 /* These expressions always produce boolean results. */
4298 if (TREE_CODE (type) != BOOLEAN_TYPE)
4299 TREE_TYPE (expr) = boolean_type_node;
4300 return expr;
4302 /* Other expressions that get here must have boolean values, but
4303 might need to be converted to the appropriate mode. */
4304 if (TREE_CODE (type) == BOOLEAN_TYPE)
4305 return expr;
4306 return fold_convert_loc (loc, boolean_type_node, expr);
4310 /* Given a conditional expression *EXPR_P without side effects, gimplify
4311 its operands. New statements are inserted to PRE_P. */
4313 static enum gimplify_status
4314 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4316 tree expr = *expr_p, cond;
4317 enum gimplify_status ret, tret;
4318 enum tree_code code;
4320 cond = gimple_boolify (COND_EXPR_COND (expr));
4322 /* We need to handle && and || specially, as their gimplification
4323 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4324 code = TREE_CODE (cond);
4325 if (code == TRUTH_ANDIF_EXPR)
4326 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4327 else if (code == TRUTH_ORIF_EXPR)
4328 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4329 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4330 COND_EXPR_COND (*expr_p) = cond;
4332 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4333 is_gimple_val, fb_rvalue);
4334 ret = MIN (ret, tret);
4335 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4336 is_gimple_val, fb_rvalue);
4338 return MIN (ret, tret);
4341 /* Return true if evaluating EXPR could trap.
4342 EXPR is GENERIC, while tree_could_trap_p can be called
4343 only on GIMPLE. */
4345 bool
4346 generic_expr_could_trap_p (tree expr)
4348 unsigned i, n;
4350 if (!expr || is_gimple_val (expr))
4351 return false;
4353 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4354 return true;
4356 n = TREE_OPERAND_LENGTH (expr);
4357 for (i = 0; i < n; i++)
4358 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4359 return true;
4361 return false;
4364 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4365 into
4367 if (p) if (p)
4368 t1 = a; a;
4369 else or else
4370 t1 = b; b;
4373 The second form is used when *EXPR_P is of type void.
4375 PRE_P points to the list where side effects that must happen before
4376 *EXPR_P should be stored. */
4378 static enum gimplify_status
4379 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4381 tree expr = *expr_p;
4382 tree type = TREE_TYPE (expr);
4383 location_t loc = EXPR_LOCATION (expr);
4384 tree tmp, arm1, arm2;
4385 enum gimplify_status ret;
4386 tree label_true, label_false, label_cont;
4387 bool have_then_clause_p, have_else_clause_p;
4388 gcond *cond_stmt;
4389 enum tree_code pred_code;
4390 gimple_seq seq = NULL;
4392 /* If this COND_EXPR has a value, copy the values into a temporary within
4393 the arms. */
4394 if (!VOID_TYPE_P (type))
4396 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4397 tree result;
4399 /* If either an rvalue is ok or we do not require an lvalue, create the
4400 temporary. But we cannot do that if the type is addressable. */
4401 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4402 && !TREE_ADDRESSABLE (type))
4404 if (gimplify_ctxp->allow_rhs_cond_expr
4405 /* If either branch has side effects or could trap, it can't be
4406 evaluated unconditionally. */
4407 && !TREE_SIDE_EFFECTS (then_)
4408 && !generic_expr_could_trap_p (then_)
4409 && !TREE_SIDE_EFFECTS (else_)
4410 && !generic_expr_could_trap_p (else_))
4411 return gimplify_pure_cond_expr (expr_p, pre_p);
4413 tmp = create_tmp_var (type, "iftmp");
4414 result = tmp;
4417 /* Otherwise, only create and copy references to the values. */
4418 else
4420 type = build_pointer_type (type);
4422 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4423 then_ = build_fold_addr_expr_loc (loc, then_);
4425 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4426 else_ = build_fold_addr_expr_loc (loc, else_);
4428 expr
4429 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4431 tmp = create_tmp_var (type, "iftmp");
4432 result = build_simple_mem_ref_loc (loc, tmp);
4435 /* Build the new then clause, `tmp = then_;'. But don't build the
4436 assignment if the value is void; in C++ it can be if it's a throw. */
4437 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4438 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4440 /* Similarly, build the new else clause, `tmp = else_;'. */
4441 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4442 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4444 TREE_TYPE (expr) = void_type_node;
4445 recalculate_side_effects (expr);
4447 /* Move the COND_EXPR to the prequeue. */
4448 gimplify_stmt (&expr, pre_p);
4450 *expr_p = result;
4451 return GS_ALL_DONE;
4454 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4455 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4456 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4457 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4459 /* Make sure the condition has BOOLEAN_TYPE. */
4460 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4462 /* Break apart && and || conditions. */
4463 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4464 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4466 expr = shortcut_cond_expr (expr);
4468 if (expr != *expr_p)
4470 *expr_p = expr;
4472 /* We can't rely on gimplify_expr to re-gimplify the expanded
4473 form properly, as cleanups might cause the target labels to be
4474 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4475 set up a conditional context. */
4476 gimple_push_condition ();
4477 gimplify_stmt (expr_p, &seq);
4478 gimple_pop_condition (pre_p);
4479 gimple_seq_add_seq (pre_p, seq);
4481 return GS_ALL_DONE;
4485 /* Now do the normal gimplification. */
4487 /* Gimplify condition. */
4488 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4489 is_gimple_condexpr_for_cond, fb_rvalue);
4490 if (ret == GS_ERROR)
4491 return GS_ERROR;
4492 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4494 gimple_push_condition ();
4496 have_then_clause_p = have_else_clause_p = false;
4497 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4498 if (label_true
4499 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4500 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4501 have different locations, otherwise we end up with incorrect
4502 location information on the branches. */
4503 && (optimize
4504 || !EXPR_HAS_LOCATION (expr)
4505 || !rexpr_has_location (label_true)
4506 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4508 have_then_clause_p = true;
4509 label_true = GOTO_DESTINATION (label_true);
4511 else
4512 label_true = create_artificial_label (UNKNOWN_LOCATION);
4513 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4514 if (label_false
4515 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4516 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4517 have different locations, otherwise we end up with incorrect
4518 location information on the branches. */
4519 && (optimize
4520 || !EXPR_HAS_LOCATION (expr)
4521 || !rexpr_has_location (label_false)
4522 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4524 have_else_clause_p = true;
4525 label_false = GOTO_DESTINATION (label_false);
4527 else
4528 label_false = create_artificial_label (UNKNOWN_LOCATION);
4530 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4531 &arm2);
4532 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4533 label_false);
4534 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4535 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4536 gimplify_seq_add_stmt (&seq, cond_stmt);
4537 gimple_stmt_iterator gsi = gsi_last (seq);
4538 maybe_fold_stmt (&gsi);
4540 label_cont = NULL_TREE;
4541 if (!have_then_clause_p)
4543 /* For if (...) {} else { code; } put label_true after
4544 the else block. */
4545 if (TREE_OPERAND (expr, 1) == NULL_TREE
4546 && !have_else_clause_p
4547 && TREE_OPERAND (expr, 2) != NULL_TREE)
4549 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4550 handling that label_cont == label_true can be only reached
4551 through fallthrough from { code; }. */
4552 if (integer_zerop (COND_EXPR_COND (expr)))
4553 UNUSED_LABEL_P (label_true) = 1;
4554 label_cont = label_true;
4556 else
4558 bool then_side_effects
4559 = (TREE_OPERAND (expr, 1)
4560 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4561 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4562 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4563 /* For if (...) { code; } else {} or
4564 if (...) { code; } else goto label; or
4565 if (...) { code; return; } else { ... }
4566 label_cont isn't needed. */
4567 if (!have_else_clause_p
4568 && TREE_OPERAND (expr, 2) != NULL_TREE
4569 && gimple_seq_may_fallthru (seq))
4571 gimple *g;
4572 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4574 /* For if (0) { non-side-effect-code } else { code }
4575 tell -Wimplicit-fallthrough handling that label_cont can
4576 be only reached through fallthrough from { code }. */
4577 if (integer_zerop (COND_EXPR_COND (expr)))
4579 UNUSED_LABEL_P (label_true) = 1;
4580 if (!then_side_effects)
4581 UNUSED_LABEL_P (label_cont) = 1;
4584 g = gimple_build_goto (label_cont);
4586 /* GIMPLE_COND's are very low level; they have embedded
4587 gotos. This particular embedded goto should not be marked
4588 with the location of the original COND_EXPR, as it would
4589 correspond to the COND_EXPR's condition, not the ELSE or the
4590 THEN arms. To avoid marking it with the wrong location, flag
4591 it as "no location". */
4592 gimple_set_do_not_emit_location (g);
4594 gimplify_seq_add_stmt (&seq, g);
4598 if (!have_else_clause_p)
4600 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4601 tell -Wimplicit-fallthrough handling that label_false can be only
4602 reached through fallthrough from { code }. */
4603 if (integer_nonzerop (COND_EXPR_COND (expr))
4604 && (TREE_OPERAND (expr, 2) == NULL_TREE
4605 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4606 UNUSED_LABEL_P (label_false) = 1;
4607 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4608 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4610 if (label_cont)
4611 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4613 gimple_pop_condition (pre_p);
4614 gimple_seq_add_seq (pre_p, seq);
4616 if (ret == GS_ERROR)
4617 ; /* Do nothing. */
4618 else if (have_then_clause_p || have_else_clause_p)
4619 ret = GS_ALL_DONE;
4620 else
4622 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4623 expr = TREE_OPERAND (expr, 0);
4624 gimplify_stmt (&expr, pre_p);
4627 *expr_p = NULL;
4628 return ret;
4631 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4632 to be marked addressable.
4634 We cannot rely on such an expression being directly markable if a temporary
4635 has been created by the gimplification. In this case, we create another
4636 temporary and initialize it with a copy, which will become a store after we
4637 mark it addressable. This can happen if the front-end passed us something
4638 that it could not mark addressable yet, like a Fortran pass-by-reference
4639 parameter (int) floatvar. */
4641 static void
4642 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4644 while (handled_component_p (*expr_p))
4645 expr_p = &TREE_OPERAND (*expr_p, 0);
4647 /* Do not allow an SSA name as the temporary. */
4648 if (is_gimple_reg (*expr_p))
4649 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4652 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4653 a call to __builtin_memcpy. */
4655 static enum gimplify_status
4656 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4657 gimple_seq *seq_p)
4659 tree t, to, to_ptr, from, from_ptr;
4660 gcall *gs;
4661 location_t loc = EXPR_LOCATION (*expr_p);
4663 to = TREE_OPERAND (*expr_p, 0);
4664 from = TREE_OPERAND (*expr_p, 1);
4666 /* Mark the RHS addressable. Beware that it may not be possible to do so
4667 directly if a temporary has been created by the gimplification. */
4668 prepare_gimple_addressable (&from, seq_p);
4670 mark_addressable (from);
4671 from_ptr = build_fold_addr_expr_loc (loc, from);
4672 gimplify_arg (&from_ptr, seq_p, loc);
4674 mark_addressable (to);
4675 to_ptr = build_fold_addr_expr_loc (loc, to);
4676 gimplify_arg (&to_ptr, seq_p, loc);
4678 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4680 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4681 gimple_call_set_alloca_for_var (gs, true);
4683 if (want_value)
4685 /* tmp = memcpy() */
4686 t = create_tmp_var (TREE_TYPE (to_ptr));
4687 gimple_call_set_lhs (gs, t);
4688 gimplify_seq_add_stmt (seq_p, gs);
4690 *expr_p = build_simple_mem_ref (t);
4691 return GS_ALL_DONE;
4694 gimplify_seq_add_stmt (seq_p, gs);
4695 *expr_p = NULL;
4696 return GS_ALL_DONE;
4699 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4700 a call to __builtin_memset. In this case we know that the RHS is
4701 a CONSTRUCTOR with an empty element list. */
4703 static enum gimplify_status
4704 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4705 gimple_seq *seq_p)
4707 tree t, from, to, to_ptr;
4708 gcall *gs;
4709 location_t loc = EXPR_LOCATION (*expr_p);
4711 /* Assert our assumptions, to abort instead of producing wrong code
4712 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4713 not be immediately exposed. */
4714 from = TREE_OPERAND (*expr_p, 1);
4715 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4716 from = TREE_OPERAND (from, 0);
4718 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4719 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4721 /* Now proceed. */
4722 to = TREE_OPERAND (*expr_p, 0);
4724 to_ptr = build_fold_addr_expr_loc (loc, to);
4725 gimplify_arg (&to_ptr, seq_p, loc);
4726 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4728 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4730 if (want_value)
4732 /* tmp = memset() */
4733 t = create_tmp_var (TREE_TYPE (to_ptr));
4734 gimple_call_set_lhs (gs, t);
4735 gimplify_seq_add_stmt (seq_p, gs);
4737 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4738 return GS_ALL_DONE;
4741 gimplify_seq_add_stmt (seq_p, gs);
4742 *expr_p = NULL;
4743 return GS_ALL_DONE;
4746 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4747 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4748 assignment. Return non-null if we detect a potential overlap. */
4750 struct gimplify_init_ctor_preeval_data
4752 /* The base decl of the lhs object. May be NULL, in which case we
4753 have to assume the lhs is indirect. */
4754 tree lhs_base_decl;
4756 /* The alias set of the lhs object. */
4757 alias_set_type lhs_alias_set;
4760 static tree
4761 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4763 struct gimplify_init_ctor_preeval_data *data
4764 = (struct gimplify_init_ctor_preeval_data *) xdata;
4765 tree t = *tp;
4767 /* If we find the base object, obviously we have overlap. */
4768 if (data->lhs_base_decl == t)
4769 return t;
4771 /* If the constructor component is indirect, determine if we have a
4772 potential overlap with the lhs. The only bits of information we
4773 have to go on at this point are addressability and alias sets. */
4774 if ((INDIRECT_REF_P (t)
4775 || TREE_CODE (t) == MEM_REF)
4776 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4777 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4778 return t;
4780 /* If the constructor component is a call, determine if it can hide a
4781 potential overlap with the lhs through an INDIRECT_REF like above.
4782 ??? Ugh - this is completely broken. In fact this whole analysis
4783 doesn't look conservative. */
4784 if (TREE_CODE (t) == CALL_EXPR)
4786 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4788 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4789 if (POINTER_TYPE_P (TREE_VALUE (type))
4790 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4791 && alias_sets_conflict_p (data->lhs_alias_set,
4792 get_alias_set
4793 (TREE_TYPE (TREE_VALUE (type)))))
4794 return t;
4797 if (IS_TYPE_OR_DECL_P (t))
4798 *walk_subtrees = 0;
4799 return NULL;
4802 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4803 force values that overlap with the lhs (as described by *DATA)
4804 into temporaries. */
4806 static void
4807 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4808 struct gimplify_init_ctor_preeval_data *data)
4810 enum gimplify_status one;
4812 /* If the value is constant, then there's nothing to pre-evaluate. */
4813 if (TREE_CONSTANT (*expr_p))
4815 /* Ensure it does not have side effects, it might contain a reference to
4816 the object we're initializing. */
4817 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4818 return;
4821 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4822 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4823 return;
4825 /* Recurse for nested constructors. */
4826 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4828 unsigned HOST_WIDE_INT ix;
4829 constructor_elt *ce;
4830 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4832 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4833 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4835 return;
4838 /* If this is a variable sized type, we must remember the size. */
4839 maybe_with_size_expr (expr_p);
4841 /* Gimplify the constructor element to something appropriate for the rhs
4842 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4843 the gimplifier will consider this a store to memory. Doing this
4844 gimplification now means that we won't have to deal with complicated
4845 language-specific trees, nor trees like SAVE_EXPR that can induce
4846 exponential search behavior. */
4847 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4848 if (one == GS_ERROR)
4850 *expr_p = NULL;
4851 return;
4854 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4855 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4856 always be true for all scalars, since is_gimple_mem_rhs insists on a
4857 temporary variable for them. */
4858 if (DECL_P (*expr_p))
4859 return;
4861 /* If this is of variable size, we have no choice but to assume it doesn't
4862 overlap since we can't make a temporary for it. */
4863 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4864 return;
4866 /* Otherwise, we must search for overlap ... */
4867 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4868 return;
4870 /* ... and if found, force the value into a temporary. */
4871 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4874 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4875 a RANGE_EXPR in a CONSTRUCTOR for an array.
4877 var = lower;
4878 loop_entry:
4879 object[var] = value;
4880 if (var == upper)
4881 goto loop_exit;
4882 var = var + 1;
4883 goto loop_entry;
4884 loop_exit:
4886 We increment var _after_ the loop exit check because we might otherwise
4887 fail if upper == TYPE_MAX_VALUE (type for upper).
4889 Note that we never have to deal with SAVE_EXPRs here, because this has
4890 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4892 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4893 gimple_seq *, bool);
4895 static void
4896 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4897 tree value, tree array_elt_type,
4898 gimple_seq *pre_p, bool cleared)
4900 tree loop_entry_label, loop_exit_label, fall_thru_label;
4901 tree var, var_type, cref, tmp;
4903 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4904 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4905 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4907 /* Create and initialize the index variable. */
4908 var_type = TREE_TYPE (upper);
4909 var = create_tmp_var (var_type);
4910 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4912 /* Add the loop entry label. */
4913 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4915 /* Build the reference. */
4916 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4917 var, NULL_TREE, NULL_TREE);
4919 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4920 the store. Otherwise just assign value to the reference. */
4922 if (TREE_CODE (value) == CONSTRUCTOR)
4923 /* NB we might have to call ourself recursively through
4924 gimplify_init_ctor_eval if the value is a constructor. */
4925 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4926 pre_p, cleared);
4927 else
4929 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4930 != GS_ERROR)
4931 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4934 /* We exit the loop when the index var is equal to the upper bound. */
4935 gimplify_seq_add_stmt (pre_p,
4936 gimple_build_cond (EQ_EXPR, var, upper,
4937 loop_exit_label, fall_thru_label));
4939 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4941 /* Otherwise, increment the index var... */
4942 tmp = build2 (PLUS_EXPR, var_type, var,
4943 fold_convert (var_type, integer_one_node));
4944 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4946 /* ...and jump back to the loop entry. */
4947 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4949 /* Add the loop exit label. */
4950 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4953 /* A subroutine of gimplify_init_constructor. Generate individual
4954 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4955 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4956 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4957 zeroed first. */
4959 static void
4960 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4961 gimple_seq *pre_p, bool cleared)
4963 tree array_elt_type = NULL;
4964 unsigned HOST_WIDE_INT ix;
4965 tree purpose, value;
4967 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4968 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4970 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4972 tree cref;
4974 /* NULL values are created above for gimplification errors. */
4975 if (value == NULL)
4976 continue;
4978 if (cleared && initializer_zerop (value))
4979 continue;
4981 /* ??? Here's to hoping the front end fills in all of the indices,
4982 so we don't have to figure out what's missing ourselves. */
4983 gcc_assert (purpose);
4985 /* Skip zero-sized fields, unless value has side-effects. This can
4986 happen with calls to functions returning a empty type, which
4987 we shouldn't discard. As a number of downstream passes don't
4988 expect sets of empty type fields, we rely on the gimplification of
4989 the MODIFY_EXPR we make below to drop the assignment statement. */
4990 if (!TREE_SIDE_EFFECTS (value)
4991 && TREE_CODE (purpose) == FIELD_DECL
4992 && is_empty_type (TREE_TYPE (purpose)))
4993 continue;
4995 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4996 whole range. */
4997 if (TREE_CODE (purpose) == RANGE_EXPR)
4999 tree lower = TREE_OPERAND (purpose, 0);
5000 tree upper = TREE_OPERAND (purpose, 1);
5002 /* If the lower bound is equal to upper, just treat it as if
5003 upper was the index. */
5004 if (simple_cst_equal (lower, upper))
5005 purpose = upper;
5006 else
5008 gimplify_init_ctor_eval_range (object, lower, upper, value,
5009 array_elt_type, pre_p, cleared);
5010 continue;
5014 if (array_elt_type)
5016 /* Do not use bitsizetype for ARRAY_REF indices. */
5017 if (TYPE_DOMAIN (TREE_TYPE (object)))
5018 purpose
5019 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5020 purpose);
5021 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5022 purpose, NULL_TREE, NULL_TREE);
5024 else
5026 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5027 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5028 unshare_expr (object), purpose, NULL_TREE);
5031 if (TREE_CODE (value) == CONSTRUCTOR
5032 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5033 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5034 pre_p, cleared);
5035 else
5037 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5038 gimplify_and_add (init, pre_p);
5039 ggc_free (init);
5044 /* Return the appropriate RHS predicate for this LHS. */
5046 gimple_predicate
5047 rhs_predicate_for (tree lhs)
5049 if (is_gimple_reg (lhs))
5050 return is_gimple_reg_rhs_or_call;
5051 else
5052 return is_gimple_mem_rhs_or_call;
5055 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5056 before the LHS has been gimplified. */
5058 static gimple_predicate
5059 initial_rhs_predicate_for (tree lhs)
5061 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5062 return is_gimple_reg_rhs_or_call;
5063 else
5064 return is_gimple_mem_rhs_or_call;
5067 /* Gimplify a C99 compound literal expression. This just means adding
5068 the DECL_EXPR before the current statement and using its anonymous
5069 decl instead. */
5071 static enum gimplify_status
5072 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5073 bool (*gimple_test_f) (tree),
5074 fallback_t fallback)
5076 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5077 tree decl = DECL_EXPR_DECL (decl_s);
5078 tree init = DECL_INITIAL (decl);
5079 /* Mark the decl as addressable if the compound literal
5080 expression is addressable now, otherwise it is marked too late
5081 after we gimplify the initialization expression. */
5082 if (TREE_ADDRESSABLE (*expr_p))
5083 TREE_ADDRESSABLE (decl) = 1;
5084 /* Otherwise, if we don't need an lvalue and have a literal directly
5085 substitute it. Check if it matches the gimple predicate, as
5086 otherwise we'd generate a new temporary, and we can as well just
5087 use the decl we already have. */
5088 else if (!TREE_ADDRESSABLE (decl)
5089 && !TREE_THIS_VOLATILE (decl)
5090 && init
5091 && (fallback & fb_lvalue) == 0
5092 && gimple_test_f (init))
5094 *expr_p = init;
5095 return GS_OK;
5098 /* If the decl is not addressable, then it is being used in some
5099 expression or on the right hand side of a statement, and it can
5100 be put into a readonly data section. */
5101 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5102 TREE_READONLY (decl) = 1;
5104 /* This decl isn't mentioned in the enclosing block, so add it to the
5105 list of temps. FIXME it seems a bit of a kludge to say that
5106 anonymous artificial vars aren't pushed, but everything else is. */
5107 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5108 gimple_add_tmp_var (decl);
5110 gimplify_and_add (decl_s, pre_p);
5111 *expr_p = decl;
5112 return GS_OK;
5115 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5116 return a new CONSTRUCTOR if something changed. */
5118 static tree
5119 optimize_compound_literals_in_ctor (tree orig_ctor)
5121 tree ctor = orig_ctor;
5122 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5123 unsigned int idx, num = vec_safe_length (elts);
5125 for (idx = 0; idx < num; idx++)
5127 tree value = (*elts)[idx].value;
5128 tree newval = value;
5129 if (TREE_CODE (value) == CONSTRUCTOR)
5130 newval = optimize_compound_literals_in_ctor (value);
5131 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5133 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5134 tree decl = DECL_EXPR_DECL (decl_s);
5135 tree init = DECL_INITIAL (decl);
5137 if (!TREE_ADDRESSABLE (value)
5138 && !TREE_ADDRESSABLE (decl)
5139 && init
5140 && TREE_CODE (init) == CONSTRUCTOR)
5141 newval = optimize_compound_literals_in_ctor (init);
5143 if (newval == value)
5144 continue;
5146 if (ctor == orig_ctor)
5148 ctor = copy_node (orig_ctor);
5149 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5150 elts = CONSTRUCTOR_ELTS (ctor);
5152 (*elts)[idx].value = newval;
5154 return ctor;
5157 /* A subroutine of gimplify_modify_expr. Break out elements of a
5158 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5160 Note that we still need to clear any elements that don't have explicit
5161 initializers, so if not all elements are initialized we keep the
5162 original MODIFY_EXPR, we just remove all of the constructor elements.
5164 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5165 GS_ERROR if we would have to create a temporary when gimplifying
5166 this constructor. Otherwise, return GS_OK.
5168 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5170 static enum gimplify_status
5171 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5172 bool want_value, bool notify_temp_creation)
5174 tree object, ctor, type;
5175 enum gimplify_status ret;
5176 vec<constructor_elt, va_gc> *elts;
5177 bool cleared = false;
5178 bool is_empty_ctor = false;
5179 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5181 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5183 if (!notify_temp_creation)
5185 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5186 is_gimple_lvalue, fb_lvalue);
5187 if (ret == GS_ERROR)
5188 return ret;
5191 object = TREE_OPERAND (*expr_p, 0);
5192 ctor = TREE_OPERAND (*expr_p, 1)
5193 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5194 type = TREE_TYPE (ctor);
5195 elts = CONSTRUCTOR_ELTS (ctor);
5196 ret = GS_ALL_DONE;
5198 switch (TREE_CODE (type))
5200 case RECORD_TYPE:
5201 case UNION_TYPE:
5202 case QUAL_UNION_TYPE:
5203 case ARRAY_TYPE:
5205 /* Use readonly data for initializers of this or smaller size
5206 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5207 ratio. */
5208 const HOST_WIDE_INT min_unique_size = 64;
5209 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5210 is smaller than this, use readonly data. */
5211 const int unique_nonzero_ratio = 8;
5212 /* True if a single access of the object must be ensured. This is the
5213 case if the target is volatile, the type is non-addressable and more
5214 than one field need to be assigned. */
5215 const bool ensure_single_access
5216 = TREE_THIS_VOLATILE (object)
5217 && !TREE_ADDRESSABLE (type)
5218 && vec_safe_length (elts) > 1;
5219 struct gimplify_init_ctor_preeval_data preeval_data;
5220 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5221 HOST_WIDE_INT num_unique_nonzero_elements;
5222 bool complete_p, valid_const_initializer;
5224 /* Aggregate types must lower constructors to initialization of
5225 individual elements. The exception is that a CONSTRUCTOR node
5226 with no elements indicates zero-initialization of the whole. */
5227 if (vec_safe_is_empty (elts))
5229 if (notify_temp_creation)
5230 return GS_OK;
5232 /* The var will be initialized and so appear on lhs of
5233 assignment, it can't be TREE_READONLY anymore. */
5234 if (VAR_P (object))
5235 TREE_READONLY (object) = 0;
5237 is_empty_ctor = true;
5238 break;
5241 /* Fetch information about the constructor to direct later processing.
5242 We might want to make static versions of it in various cases, and
5243 can only do so if it known to be a valid constant initializer. */
5244 valid_const_initializer
5245 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5246 &num_unique_nonzero_elements,
5247 &num_ctor_elements, &complete_p);
5249 /* If a const aggregate variable is being initialized, then it
5250 should never be a lose to promote the variable to be static. */
5251 if (valid_const_initializer
5252 && num_nonzero_elements > 1
5253 && TREE_READONLY (object)
5254 && VAR_P (object)
5255 && !DECL_REGISTER (object)
5256 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
5257 /* For ctors that have many repeated nonzero elements
5258 represented through RANGE_EXPRs, prefer initializing
5259 those through runtime loops over copies of large amounts
5260 of data from readonly data section. */
5261 && (num_unique_nonzero_elements
5262 > num_nonzero_elements / unique_nonzero_ratio
5263 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5264 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5266 if (notify_temp_creation)
5267 return GS_ERROR;
5269 DECL_INITIAL (object) = ctor;
5270 TREE_STATIC (object) = 1;
5271 if (!DECL_NAME (object))
5272 DECL_NAME (object) = create_tmp_var_name ("C");
5273 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5275 /* ??? C++ doesn't automatically append a .<number> to the
5276 assembler name, and even when it does, it looks at FE private
5277 data structures to figure out what that number should be,
5278 which are not set for this variable. I suppose this is
5279 important for local statics for inline functions, which aren't
5280 "local" in the object file sense. So in order to get a unique
5281 TU-local symbol, we must invoke the lhd version now. */
5282 lhd_set_decl_assembler_name (object);
5284 *expr_p = NULL_TREE;
5285 break;
5288 /* The var will be initialized and so appear on lhs of
5289 assignment, it can't be TREE_READONLY anymore. */
5290 if (VAR_P (object) && !notify_temp_creation)
5291 TREE_READONLY (object) = 0;
5293 /* If there are "lots" of initialized elements, even discounting
5294 those that are not address constants (and thus *must* be
5295 computed at runtime), then partition the constructor into
5296 constant and non-constant parts. Block copy the constant
5297 parts in, then generate code for the non-constant parts. */
5298 /* TODO. There's code in cp/typeck.cc to do this. */
5300 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5301 /* store_constructor will ignore the clearing of variable-sized
5302 objects. Initializers for such objects must explicitly set
5303 every field that needs to be set. */
5304 cleared = false;
5305 else if (!complete_p)
5306 /* If the constructor isn't complete, clear the whole object
5307 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5309 ??? This ought not to be needed. For any element not present
5310 in the initializer, we should simply set them to zero. Except
5311 we'd need to *find* the elements that are not present, and that
5312 requires trickery to avoid quadratic compile-time behavior in
5313 large cases or excessive memory use in small cases. */
5314 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5315 else if (num_ctor_elements - num_nonzero_elements
5316 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5317 && num_nonzero_elements < num_ctor_elements / 4)
5318 /* If there are "lots" of zeros, it's more efficient to clear
5319 the memory and then set the nonzero elements. */
5320 cleared = true;
5321 else if (ensure_single_access && num_nonzero_elements == 0)
5322 /* If a single access to the target must be ensured and all elements
5323 are zero, then it's optimal to clear whatever their number. */
5324 cleared = true;
5325 else
5326 cleared = false;
5328 /* If there are "lots" of initialized elements, and all of them
5329 are valid address constants, then the entire initializer can
5330 be dropped to memory, and then memcpy'd out. Don't do this
5331 for sparse arrays, though, as it's more efficient to follow
5332 the standard CONSTRUCTOR behavior of memset followed by
5333 individual element initialization. Also don't do this for small
5334 all-zero initializers (which aren't big enough to merit
5335 clearing), and don't try to make bitwise copies of
5336 TREE_ADDRESSABLE types. */
5337 if (valid_const_initializer
5338 && complete_p
5339 && !(cleared || num_nonzero_elements == 0)
5340 && !TREE_ADDRESSABLE (type))
5342 HOST_WIDE_INT size = int_size_in_bytes (type);
5343 unsigned int align;
5345 /* ??? We can still get unbounded array types, at least
5346 from the C++ front end. This seems wrong, but attempt
5347 to work around it for now. */
5348 if (size < 0)
5350 size = int_size_in_bytes (TREE_TYPE (object));
5351 if (size >= 0)
5352 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5355 /* Find the maximum alignment we can assume for the object. */
5356 /* ??? Make use of DECL_OFFSET_ALIGN. */
5357 if (DECL_P (object))
5358 align = DECL_ALIGN (object);
5359 else
5360 align = TYPE_ALIGN (type);
5362 /* Do a block move either if the size is so small as to make
5363 each individual move a sub-unit move on average, or if it
5364 is so large as to make individual moves inefficient. */
5365 if (size > 0
5366 && num_nonzero_elements > 1
5367 /* For ctors that have many repeated nonzero elements
5368 represented through RANGE_EXPRs, prefer initializing
5369 those through runtime loops over copies of large amounts
5370 of data from readonly data section. */
5371 && (num_unique_nonzero_elements
5372 > num_nonzero_elements / unique_nonzero_ratio
5373 || size <= min_unique_size)
5374 && (size < num_nonzero_elements
5375 || !can_move_by_pieces (size, align)))
5377 if (notify_temp_creation)
5378 return GS_ERROR;
5380 walk_tree (&ctor, force_labels_r, NULL, NULL);
5381 ctor = tree_output_constant_def (ctor);
5382 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5383 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5384 TREE_OPERAND (*expr_p, 1) = ctor;
5386 /* This is no longer an assignment of a CONSTRUCTOR, but
5387 we still may have processing to do on the LHS. So
5388 pretend we didn't do anything here to let that happen. */
5389 return GS_UNHANDLED;
5393 /* If a single access to the target must be ensured and there are
5394 nonzero elements or the zero elements are not assigned en masse,
5395 initialize the target from a temporary. */
5396 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5398 if (notify_temp_creation)
5399 return GS_ERROR;
5401 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5402 TREE_OPERAND (*expr_p, 0) = temp;
5403 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5404 *expr_p,
5405 build2 (MODIFY_EXPR, void_type_node,
5406 object, temp));
5407 return GS_OK;
5410 if (notify_temp_creation)
5411 return GS_OK;
5413 /* If there are nonzero elements and if needed, pre-evaluate to capture
5414 elements overlapping with the lhs into temporaries. We must do this
5415 before clearing to fetch the values before they are zeroed-out. */
5416 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5418 preeval_data.lhs_base_decl = get_base_address (object);
5419 if (!DECL_P (preeval_data.lhs_base_decl))
5420 preeval_data.lhs_base_decl = NULL;
5421 preeval_data.lhs_alias_set = get_alias_set (object);
5423 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5424 pre_p, post_p, &preeval_data);
5427 bool ctor_has_side_effects_p
5428 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5430 if (cleared)
5432 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5433 Note that we still have to gimplify, in order to handle the
5434 case of variable sized types. Avoid shared tree structures. */
5435 CONSTRUCTOR_ELTS (ctor) = NULL;
5436 TREE_SIDE_EFFECTS (ctor) = 0;
5437 object = unshare_expr (object);
5438 gimplify_stmt (expr_p, pre_p);
5441 /* If we have not block cleared the object, or if there are nonzero
5442 elements in the constructor, or if the constructor has side effects,
5443 add assignments to the individual scalar fields of the object. */
5444 if (!cleared
5445 || num_nonzero_elements > 0
5446 || ctor_has_side_effects_p)
5447 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5449 *expr_p = NULL_TREE;
5451 break;
5453 case COMPLEX_TYPE:
5455 tree r, i;
5457 if (notify_temp_creation)
5458 return GS_OK;
5460 /* Extract the real and imaginary parts out of the ctor. */
5461 gcc_assert (elts->length () == 2);
5462 r = (*elts)[0].value;
5463 i = (*elts)[1].value;
5464 if (r == NULL || i == NULL)
5466 tree zero = build_zero_cst (TREE_TYPE (type));
5467 if (r == NULL)
5468 r = zero;
5469 if (i == NULL)
5470 i = zero;
5473 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5474 represent creation of a complex value. */
5475 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5477 ctor = build_complex (type, r, i);
5478 TREE_OPERAND (*expr_p, 1) = ctor;
5480 else
5482 ctor = build2 (COMPLEX_EXPR, type, r, i);
5483 TREE_OPERAND (*expr_p, 1) = ctor;
5484 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5485 pre_p,
5486 post_p,
5487 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5488 fb_rvalue);
5491 break;
5493 case VECTOR_TYPE:
5495 unsigned HOST_WIDE_INT ix;
5496 constructor_elt *ce;
5498 if (notify_temp_creation)
5499 return GS_OK;
5501 /* Vector types use CONSTRUCTOR all the way through gimple
5502 compilation as a general initializer. */
5503 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5505 enum gimplify_status tret;
5506 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5507 fb_rvalue);
5508 if (tret == GS_ERROR)
5509 ret = GS_ERROR;
5510 else if (TREE_STATIC (ctor)
5511 && !initializer_constant_valid_p (ce->value,
5512 TREE_TYPE (ce->value)))
5513 TREE_STATIC (ctor) = 0;
5515 recompute_constructor_flags (ctor);
5517 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5518 if (TREE_CONSTANT (ctor))
5520 bool constant_p = true;
5521 tree value;
5523 /* Even when ctor is constant, it might contain non-*_CST
5524 elements, such as addresses or trapping values like
5525 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5526 in VECTOR_CST nodes. */
5527 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5528 if (!CONSTANT_CLASS_P (value))
5530 constant_p = false;
5531 break;
5534 if (constant_p)
5536 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5537 break;
5541 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5542 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5544 break;
5546 default:
5547 /* So how did we get a CONSTRUCTOR for a scalar type? */
5548 gcc_unreachable ();
5551 if (ret == GS_ERROR)
5552 return GS_ERROR;
5553 /* If we have gimplified both sides of the initializer but have
5554 not emitted an assignment, do so now. */
5555 if (*expr_p
5556 /* If the type is an empty type, we don't need to emit the
5557 assignment. */
5558 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5560 tree lhs = TREE_OPERAND (*expr_p, 0);
5561 tree rhs = TREE_OPERAND (*expr_p, 1);
5562 if (want_value && object == lhs)
5563 lhs = unshare_expr (lhs);
5564 gassign *init = gimple_build_assign (lhs, rhs);
5565 gimplify_seq_add_stmt (pre_p, init);
5567 if (want_value)
5569 *expr_p = object;
5570 ret = GS_OK;
5572 else
5574 *expr_p = NULL;
5575 ret = GS_ALL_DONE;
5578 /* If the user requests to initialize automatic variables, we
5579 should initialize paddings inside the variable. Add a call to
5580 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5581 initialize paddings of object always to zero regardless of
5582 INIT_TYPE. Note, we will not insert this call if the aggregate
5583 variable has be completely cleared already or it's initialized
5584 with an empty constructor. We cannot insert this call if the
5585 variable is a gimple register since __builtin_clear_padding will take
5586 the address of the variable. As a result, if a long double/_Complex long
5587 double variable will be spilled into stack later, its padding cannot
5588 be cleared with __builtin_clear_padding. We should clear its padding
5589 when it is spilled into memory. */
5590 if (is_init_expr
5591 && !is_gimple_reg (object)
5592 && clear_padding_type_may_have_padding_p (type)
5593 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5594 || !AGGREGATE_TYPE_P (type))
5595 && is_var_need_auto_init (object))
5596 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5598 return ret;
5601 /* Given a pointer value OP0, return a simplified version of an
5602 indirection through OP0, or NULL_TREE if no simplification is
5603 possible. This may only be applied to a rhs of an expression.
5604 Note that the resulting type may be different from the type pointed
5605 to in the sense that it is still compatible from the langhooks
5606 point of view. */
5608 static tree
5609 gimple_fold_indirect_ref_rhs (tree t)
5611 return gimple_fold_indirect_ref (t);
5614 /* Subroutine of gimplify_modify_expr to do simplifications of
5615 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5616 something changes. */
5618 static enum gimplify_status
5619 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5620 gimple_seq *pre_p, gimple_seq *post_p,
5621 bool want_value)
5623 enum gimplify_status ret = GS_UNHANDLED;
5624 bool changed;
5628 changed = false;
5629 switch (TREE_CODE (*from_p))
5631 case VAR_DECL:
5632 /* If we're assigning from a read-only variable initialized with
5633 a constructor and not volatile, do the direct assignment from
5634 the constructor, but only if the target is not volatile either
5635 since this latter assignment might end up being done on a per
5636 field basis. However, if the target is volatile and the type
5637 is aggregate and non-addressable, gimplify_init_constructor
5638 knows that it needs to ensure a single access to the target
5639 and it will return GS_OK only in this case. */
5640 if (TREE_READONLY (*from_p)
5641 && DECL_INITIAL (*from_p)
5642 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5643 && !TREE_THIS_VOLATILE (*from_p)
5644 && (!TREE_THIS_VOLATILE (*to_p)
5645 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5646 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5648 tree old_from = *from_p;
5649 enum gimplify_status subret;
5651 /* Move the constructor into the RHS. */
5652 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5654 /* Let's see if gimplify_init_constructor will need to put
5655 it in memory. */
5656 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5657 false, true);
5658 if (subret == GS_ERROR)
5660 /* If so, revert the change. */
5661 *from_p = old_from;
5663 else
5665 ret = GS_OK;
5666 changed = true;
5669 break;
5670 case INDIRECT_REF:
5671 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5672 /* If we have code like
5674 *(const A*)(A*)&x
5676 where the type of "x" is a (possibly cv-qualified variant
5677 of "A"), treat the entire expression as identical to "x".
5678 This kind of code arises in C++ when an object is bound
5679 to a const reference, and if "x" is a TARGET_EXPR we want
5680 to take advantage of the optimization below. But not if
5681 the type is TREE_ADDRESSABLE; then C++17 says that the
5682 TARGET_EXPR needs to be a temporary. */
5683 if (tree t
5684 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5686 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5687 if (TREE_THIS_VOLATILE (t) != volatile_p)
5689 if (DECL_P (t))
5690 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5691 build_fold_addr_expr (t));
5692 if (REFERENCE_CLASS_P (t))
5693 TREE_THIS_VOLATILE (t) = volatile_p;
5695 *from_p = t;
5696 ret = GS_OK;
5697 changed = true;
5699 break;
5701 case TARGET_EXPR:
5703 /* If we are initializing something from a TARGET_EXPR, strip the
5704 TARGET_EXPR and initialize it directly, if possible. This can't
5705 be done if the initializer is void, since that implies that the
5706 temporary is set in some non-trivial way.
5708 ??? What about code that pulls out the temp and uses it
5709 elsewhere? I think that such code never uses the TARGET_EXPR as
5710 an initializer. If I'm wrong, we'll die because the temp won't
5711 have any RTL. In that case, I guess we'll need to replace
5712 references somehow. */
5713 tree init = TARGET_EXPR_INITIAL (*from_p);
5715 if (init
5716 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5717 || !TARGET_EXPR_NO_ELIDE (*from_p))
5718 && !VOID_TYPE_P (TREE_TYPE (init)))
5720 *from_p = init;
5721 ret = GS_OK;
5722 changed = true;
5725 break;
5727 case COMPOUND_EXPR:
5728 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5729 caught. */
5730 gimplify_compound_expr (from_p, pre_p, true);
5731 ret = GS_OK;
5732 changed = true;
5733 break;
5735 case CONSTRUCTOR:
5736 /* If we already made some changes, let the front end have a
5737 crack at this before we break it down. */
5738 if (ret != GS_UNHANDLED)
5739 break;
5741 /* If we're initializing from a CONSTRUCTOR, break this into
5742 individual MODIFY_EXPRs. */
5743 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5744 false);
5745 return ret;
5747 case COND_EXPR:
5748 /* If we're assigning to a non-register type, push the assignment
5749 down into the branches. This is mandatory for ADDRESSABLE types,
5750 since we cannot generate temporaries for such, but it saves a
5751 copy in other cases as well. */
5752 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5754 /* This code should mirror the code in gimplify_cond_expr. */
5755 enum tree_code code = TREE_CODE (*expr_p);
5756 tree cond = *from_p;
5757 tree result = *to_p;
5759 ret = gimplify_expr (&result, pre_p, post_p,
5760 is_gimple_lvalue, fb_lvalue);
5761 if (ret != GS_ERROR)
5762 ret = GS_OK;
5764 /* If we are going to write RESULT more than once, clear
5765 TREE_READONLY flag, otherwise we might incorrectly promote
5766 the variable to static const and initialize it at compile
5767 time in one of the branches. */
5768 if (VAR_P (result)
5769 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5770 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5771 TREE_READONLY (result) = 0;
5772 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5773 TREE_OPERAND (cond, 1)
5774 = build2 (code, void_type_node, result,
5775 TREE_OPERAND (cond, 1));
5776 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5777 TREE_OPERAND (cond, 2)
5778 = build2 (code, void_type_node, unshare_expr (result),
5779 TREE_OPERAND (cond, 2));
5781 TREE_TYPE (cond) = void_type_node;
5782 recalculate_side_effects (cond);
5784 if (want_value)
5786 gimplify_and_add (cond, pre_p);
5787 *expr_p = unshare_expr (result);
5789 else
5790 *expr_p = cond;
5791 return ret;
5793 break;
5795 case CALL_EXPR:
5796 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5797 return slot so that we don't generate a temporary. */
5798 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5799 && aggregate_value_p (*from_p, *from_p))
5801 bool use_target;
5803 if (!(rhs_predicate_for (*to_p))(*from_p))
5804 /* If we need a temporary, *to_p isn't accurate. */
5805 use_target = false;
5806 /* It's OK to use the return slot directly unless it's an NRV. */
5807 else if (TREE_CODE (*to_p) == RESULT_DECL
5808 && DECL_NAME (*to_p) == NULL_TREE
5809 && needs_to_live_in_memory (*to_p))
5810 use_target = true;
5811 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5812 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5813 /* Don't force regs into memory. */
5814 use_target = false;
5815 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5816 /* It's OK to use the target directly if it's being
5817 initialized. */
5818 use_target = true;
5819 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5820 != INTEGER_CST)
5821 /* Always use the target and thus RSO for variable-sized types.
5822 GIMPLE cannot deal with a variable-sized assignment
5823 embedded in a call statement. */
5824 use_target = true;
5825 else if (TREE_CODE (*to_p) != SSA_NAME
5826 && (!is_gimple_variable (*to_p)
5827 || needs_to_live_in_memory (*to_p)))
5828 /* Don't use the original target if it's already addressable;
5829 if its address escapes, and the called function uses the
5830 NRV optimization, a conforming program could see *to_p
5831 change before the called function returns; see c++/19317.
5832 When optimizing, the return_slot pass marks more functions
5833 as safe after we have escape info. */
5834 use_target = false;
5835 else
5836 use_target = true;
5838 if (use_target)
5840 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5841 mark_addressable (*to_p);
5844 break;
5846 case WITH_SIZE_EXPR:
5847 /* Likewise for calls that return an aggregate of non-constant size,
5848 since we would not be able to generate a temporary at all. */
5849 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5851 *from_p = TREE_OPERAND (*from_p, 0);
5852 /* We don't change ret in this case because the
5853 WITH_SIZE_EXPR might have been added in
5854 gimplify_modify_expr, so returning GS_OK would lead to an
5855 infinite loop. */
5856 changed = true;
5858 break;
5860 /* If we're initializing from a container, push the initialization
5861 inside it. */
5862 case CLEANUP_POINT_EXPR:
5863 case BIND_EXPR:
5864 case STATEMENT_LIST:
5866 tree wrap = *from_p;
5867 tree t;
5869 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5870 fb_lvalue);
5871 if (ret != GS_ERROR)
5872 ret = GS_OK;
5874 t = voidify_wrapper_expr (wrap, *expr_p);
5875 gcc_assert (t == *expr_p);
5877 if (want_value)
5879 gimplify_and_add (wrap, pre_p);
5880 *expr_p = unshare_expr (*to_p);
5882 else
5883 *expr_p = wrap;
5884 return GS_OK;
5887 case NOP_EXPR:
5888 /* Pull out compound literal expressions from a NOP_EXPR.
5889 Those are created in the C FE to drop qualifiers during
5890 lvalue conversion. */
5891 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5892 && tree_ssa_useless_type_conversion (*from_p))
5894 *from_p = TREE_OPERAND (*from_p, 0);
5895 ret = GS_OK;
5896 changed = true;
5898 break;
5900 case COMPOUND_LITERAL_EXPR:
5902 tree complit = TREE_OPERAND (*expr_p, 1);
5903 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5904 tree decl = DECL_EXPR_DECL (decl_s);
5905 tree init = DECL_INITIAL (decl);
5907 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5908 into struct T x = { 0, 1, 2 } if the address of the
5909 compound literal has never been taken. */
5910 if (!TREE_ADDRESSABLE (complit)
5911 && !TREE_ADDRESSABLE (decl)
5912 && init)
5914 *expr_p = copy_node (*expr_p);
5915 TREE_OPERAND (*expr_p, 1) = init;
5916 return GS_OK;
5920 default:
5921 break;
5924 while (changed);
5926 return ret;
5930 /* Return true if T looks like a valid GIMPLE statement. */
5932 static bool
5933 is_gimple_stmt (tree t)
5935 const enum tree_code code = TREE_CODE (t);
5937 switch (code)
5939 case NOP_EXPR:
5940 /* The only valid NOP_EXPR is the empty statement. */
5941 return IS_EMPTY_STMT (t);
5943 case BIND_EXPR:
5944 case COND_EXPR:
5945 /* These are only valid if they're void. */
5946 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5948 case SWITCH_EXPR:
5949 case GOTO_EXPR:
5950 case RETURN_EXPR:
5951 case LABEL_EXPR:
5952 case CASE_LABEL_EXPR:
5953 case TRY_CATCH_EXPR:
5954 case TRY_FINALLY_EXPR:
5955 case EH_FILTER_EXPR:
5956 case CATCH_EXPR:
5957 case ASM_EXPR:
5958 case STATEMENT_LIST:
5959 case OACC_PARALLEL:
5960 case OACC_KERNELS:
5961 case OACC_SERIAL:
5962 case OACC_DATA:
5963 case OACC_HOST_DATA:
5964 case OACC_DECLARE:
5965 case OACC_UPDATE:
5966 case OACC_ENTER_DATA:
5967 case OACC_EXIT_DATA:
5968 case OACC_CACHE:
5969 case OMP_PARALLEL:
5970 case OMP_FOR:
5971 case OMP_SIMD:
5972 case OMP_DISTRIBUTE:
5973 case OMP_LOOP:
5974 case OACC_LOOP:
5975 case OMP_SCAN:
5976 case OMP_SCOPE:
5977 case OMP_SECTIONS:
5978 case OMP_SECTION:
5979 case OMP_SINGLE:
5980 case OMP_MASTER:
5981 case OMP_MASKED:
5982 case OMP_TASKGROUP:
5983 case OMP_ORDERED:
5984 case OMP_CRITICAL:
5985 case OMP_TASK:
5986 case OMP_TARGET:
5987 case OMP_TARGET_DATA:
5988 case OMP_TARGET_UPDATE:
5989 case OMP_TARGET_ENTER_DATA:
5990 case OMP_TARGET_EXIT_DATA:
5991 case OMP_TASKLOOP:
5992 case OMP_TEAMS:
5993 /* These are always void. */
5994 return true;
5996 case CALL_EXPR:
5997 case MODIFY_EXPR:
5998 case PREDICT_EXPR:
5999 /* These are valid regardless of their type. */
6000 return true;
6002 default:
6003 return false;
6008 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6009 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6011 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6012 other, unmodified part of the complex object just before the total store.
6013 As a consequence, if the object is still uninitialized, an undefined value
6014 will be loaded into a register, which may result in a spurious exception
6015 if the register is floating-point and the value happens to be a signaling
6016 NaN for example. Then the fully-fledged complex operations lowering pass
6017 followed by a DCE pass are necessary in order to fix things up. */
6019 static enum gimplify_status
6020 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6021 bool want_value)
6023 enum tree_code code, ocode;
6024 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6026 lhs = TREE_OPERAND (*expr_p, 0);
6027 rhs = TREE_OPERAND (*expr_p, 1);
6028 code = TREE_CODE (lhs);
6029 lhs = TREE_OPERAND (lhs, 0);
6031 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6032 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6033 suppress_warning (other);
6034 other = get_formal_tmp_var (other, pre_p);
6036 realpart = code == REALPART_EXPR ? rhs : other;
6037 imagpart = code == REALPART_EXPR ? other : rhs;
6039 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6040 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6041 else
6042 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6044 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6045 *expr_p = (want_value) ? rhs : NULL_TREE;
6047 return GS_ALL_DONE;
6050 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6052 modify_expr
6053 : varname '=' rhs
6054 | '*' ID '=' rhs
6056 PRE_P points to the list where side effects that must happen before
6057 *EXPR_P should be stored.
6059 POST_P points to the list where side effects that must happen after
6060 *EXPR_P should be stored.
6062 WANT_VALUE is nonzero iff we want to use the value of this expression
6063 in another expression. */
6065 static enum gimplify_status
6066 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6067 bool want_value)
6069 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6070 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6071 enum gimplify_status ret = GS_UNHANDLED;
6072 gimple *assign;
6073 location_t loc = EXPR_LOCATION (*expr_p);
6074 gimple_stmt_iterator gsi;
6076 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6077 return GS_ERROR;
6079 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6080 || TREE_CODE (*expr_p) == INIT_EXPR);
6082 /* Trying to simplify a clobber using normal logic doesn't work,
6083 so handle it here. */
6084 if (TREE_CLOBBER_P (*from_p))
6086 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6087 if (ret == GS_ERROR)
6088 return ret;
6089 gcc_assert (!want_value);
6090 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6092 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6093 pre_p, post_p);
6094 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6096 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6097 *expr_p = NULL;
6098 return GS_ALL_DONE;
6101 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6102 memset. */
6103 if (TREE_TYPE (*from_p) != error_mark_node
6104 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6105 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6106 && TREE_CODE (*from_p) == CONSTRUCTOR
6107 && CONSTRUCTOR_NELTS (*from_p) == 0)
6109 maybe_with_size_expr (from_p);
6110 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6111 return gimplify_modify_expr_to_memset (expr_p,
6112 TREE_OPERAND (*from_p, 1),
6113 want_value, pre_p);
6116 /* Insert pointer conversions required by the middle-end that are not
6117 required by the frontend. This fixes middle-end type checking for
6118 for example gcc.dg/redecl-6.c. */
6119 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6121 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6122 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6123 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6126 /* See if any simplifications can be done based on what the RHS is. */
6127 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6128 want_value);
6129 if (ret != GS_UNHANDLED)
6130 return ret;
6132 /* For empty types only gimplify the left hand side and right hand
6133 side as statements and throw away the assignment. Do this after
6134 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6135 types properly. */
6136 if (is_empty_type (TREE_TYPE (*from_p))
6137 && !want_value
6138 /* Don't do this for calls that return addressable types, expand_call
6139 relies on those having a lhs. */
6140 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6141 && TREE_CODE (*from_p) == CALL_EXPR))
6143 gimplify_stmt (from_p, pre_p);
6144 gimplify_stmt (to_p, pre_p);
6145 *expr_p = NULL_TREE;
6146 return GS_ALL_DONE;
6149 /* If the value being copied is of variable width, compute the length
6150 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6151 before gimplifying any of the operands so that we can resolve any
6152 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6153 the size of the expression to be copied, not of the destination, so
6154 that is what we must do here. */
6155 maybe_with_size_expr (from_p);
6157 /* As a special case, we have to temporarily allow for assignments
6158 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6159 a toplevel statement, when gimplifying the GENERIC expression
6160 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6161 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6163 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6164 prevent gimplify_expr from trying to create a new temporary for
6165 foo's LHS, we tell it that it should only gimplify until it
6166 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6167 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6168 and all we need to do here is set 'a' to be its LHS. */
6170 /* Gimplify the RHS first for C++17 and bug 71104. */
6171 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6172 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6173 if (ret == GS_ERROR)
6174 return ret;
6176 /* Then gimplify the LHS. */
6177 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6178 twice we have to make sure to gimplify into non-SSA as otherwise
6179 the abnormal edge added later will make those defs not dominate
6180 their uses.
6181 ??? Technically this applies only to the registers used in the
6182 resulting non-register *TO_P. */
6183 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6184 if (saved_into_ssa
6185 && TREE_CODE (*from_p) == CALL_EXPR
6186 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6187 gimplify_ctxp->into_ssa = false;
6188 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6189 gimplify_ctxp->into_ssa = saved_into_ssa;
6190 if (ret == GS_ERROR)
6191 return ret;
6193 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6194 guess for the predicate was wrong. */
6195 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6196 if (final_pred != initial_pred)
6198 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6199 if (ret == GS_ERROR)
6200 return ret;
6203 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6204 size as argument to the call. */
6205 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6207 tree call = TREE_OPERAND (*from_p, 0);
6208 tree vlasize = TREE_OPERAND (*from_p, 1);
6210 if (TREE_CODE (call) == CALL_EXPR
6211 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6213 int nargs = call_expr_nargs (call);
6214 tree type = TREE_TYPE (call);
6215 tree ap = CALL_EXPR_ARG (call, 0);
6216 tree tag = CALL_EXPR_ARG (call, 1);
6217 tree aptag = CALL_EXPR_ARG (call, 2);
6218 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6219 IFN_VA_ARG, type,
6220 nargs + 1, ap, tag,
6221 aptag, vlasize);
6222 TREE_OPERAND (*from_p, 0) = newcall;
6226 /* Now see if the above changed *from_p to something we handle specially. */
6227 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6228 want_value);
6229 if (ret != GS_UNHANDLED)
6230 return ret;
6232 /* If we've got a variable sized assignment between two lvalues (i.e. does
6233 not involve a call), then we can make things a bit more straightforward
6234 by converting the assignment to memcpy or memset. */
6235 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6237 tree from = TREE_OPERAND (*from_p, 0);
6238 tree size = TREE_OPERAND (*from_p, 1);
6240 if (TREE_CODE (from) == CONSTRUCTOR)
6241 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6243 if (is_gimple_addressable (from))
6245 *from_p = from;
6246 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6247 pre_p);
6251 /* Transform partial stores to non-addressable complex variables into
6252 total stores. This allows us to use real instead of virtual operands
6253 for these variables, which improves optimization. */
6254 if ((TREE_CODE (*to_p) == REALPART_EXPR
6255 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6256 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6257 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6259 /* Try to alleviate the effects of the gimplification creating artificial
6260 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6261 make sure not to create DECL_DEBUG_EXPR links across functions. */
6262 if (!gimplify_ctxp->into_ssa
6263 && VAR_P (*from_p)
6264 && DECL_IGNORED_P (*from_p)
6265 && DECL_P (*to_p)
6266 && !DECL_IGNORED_P (*to_p)
6267 && decl_function_context (*to_p) == current_function_decl
6268 && decl_function_context (*from_p) == current_function_decl)
6270 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6271 DECL_NAME (*from_p)
6272 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6273 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6274 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6277 if (want_value && TREE_THIS_VOLATILE (*to_p))
6278 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6280 if (TREE_CODE (*from_p) == CALL_EXPR)
6282 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6283 instead of a GIMPLE_ASSIGN. */
6284 gcall *call_stmt;
6285 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6287 /* Gimplify internal functions created in the FEs. */
6288 int nargs = call_expr_nargs (*from_p), i;
6289 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6290 auto_vec<tree> vargs (nargs);
6292 for (i = 0; i < nargs; i++)
6294 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6295 EXPR_LOCATION (*from_p));
6296 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6298 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6299 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6300 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6302 else
6304 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6305 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6306 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6307 tree fndecl = get_callee_fndecl (*from_p);
6308 if (fndecl
6309 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6310 && call_expr_nargs (*from_p) == 3)
6311 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6312 CALL_EXPR_ARG (*from_p, 0),
6313 CALL_EXPR_ARG (*from_p, 1),
6314 CALL_EXPR_ARG (*from_p, 2));
6315 else
6317 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6320 notice_special_calls (call_stmt);
6321 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6322 gimple_call_set_lhs (call_stmt, *to_p);
6323 else if (TREE_CODE (*to_p) == SSA_NAME)
6324 /* The above is somewhat premature, avoid ICEing later for a
6325 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6326 ??? This doesn't make it a default-def. */
6327 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6329 assign = call_stmt;
6331 else
6333 assign = gimple_build_assign (*to_p, *from_p);
6334 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6335 if (COMPARISON_CLASS_P (*from_p))
6336 copy_warning (assign, *from_p);
6339 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6341 /* We should have got an SSA name from the start. */
6342 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6343 || ! gimple_in_ssa_p (cfun));
6346 gimplify_seq_add_stmt (pre_p, assign);
6347 gsi = gsi_last (*pre_p);
6348 maybe_fold_stmt (&gsi);
6350 if (want_value)
6352 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6353 return GS_OK;
6355 else
6356 *expr_p = NULL;
6358 return GS_ALL_DONE;
6361 /* Gimplify a comparison between two variable-sized objects. Do this
6362 with a call to BUILT_IN_MEMCMP. */
6364 static enum gimplify_status
6365 gimplify_variable_sized_compare (tree *expr_p)
6367 location_t loc = EXPR_LOCATION (*expr_p);
6368 tree op0 = TREE_OPERAND (*expr_p, 0);
6369 tree op1 = TREE_OPERAND (*expr_p, 1);
6370 tree t, arg, dest, src, expr;
6372 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6373 arg = unshare_expr (arg);
6374 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6375 src = build_fold_addr_expr_loc (loc, op1);
6376 dest = build_fold_addr_expr_loc (loc, op0);
6377 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6378 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6380 expr
6381 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6382 SET_EXPR_LOCATION (expr, loc);
6383 *expr_p = expr;
6385 return GS_OK;
6388 /* Gimplify a comparison between two aggregate objects of integral scalar
6389 mode as a comparison between the bitwise equivalent scalar values. */
6391 static enum gimplify_status
6392 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6394 location_t loc = EXPR_LOCATION (*expr_p);
6395 tree op0 = TREE_OPERAND (*expr_p, 0);
6396 tree op1 = TREE_OPERAND (*expr_p, 1);
6398 tree type = TREE_TYPE (op0);
6399 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6401 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6402 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6404 *expr_p
6405 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6407 return GS_OK;
6410 /* Gimplify an expression sequence. This function gimplifies each
6411 expression and rewrites the original expression with the last
6412 expression of the sequence in GIMPLE form.
6414 PRE_P points to the list where the side effects for all the
6415 expressions in the sequence will be emitted.
6417 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6419 static enum gimplify_status
6420 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6422 tree t = *expr_p;
6426 tree *sub_p = &TREE_OPERAND (t, 0);
6428 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6429 gimplify_compound_expr (sub_p, pre_p, false);
6430 else
6431 gimplify_stmt (sub_p, pre_p);
6433 t = TREE_OPERAND (t, 1);
6435 while (TREE_CODE (t) == COMPOUND_EXPR);
6437 *expr_p = t;
6438 if (want_value)
6439 return GS_OK;
6440 else
6442 gimplify_stmt (expr_p, pre_p);
6443 return GS_ALL_DONE;
6447 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6448 gimplify. After gimplification, EXPR_P will point to a new temporary
6449 that holds the original value of the SAVE_EXPR node.
6451 PRE_P points to the list where side effects that must happen before
6452 *EXPR_P should be stored. */
6454 static enum gimplify_status
6455 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6457 enum gimplify_status ret = GS_ALL_DONE;
6458 tree val;
6460 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6461 val = TREE_OPERAND (*expr_p, 0);
6463 if (val && TREE_TYPE (val) == error_mark_node)
6464 return GS_ERROR;
6466 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6467 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6469 /* The operand may be a void-valued expression. It is
6470 being executed only for its side-effects. */
6471 if (TREE_TYPE (val) == void_type_node)
6473 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6474 is_gimple_stmt, fb_none);
6475 val = NULL;
6477 else
6478 /* The temporary may not be an SSA name as later abnormal and EH
6479 control flow may invalidate use/def domination. When in SSA
6480 form then assume there are no such issues and SAVE_EXPRs only
6481 appear via GENERIC foldings. */
6482 val = get_initialized_tmp_var (val, pre_p, post_p,
6483 gimple_in_ssa_p (cfun));
6485 TREE_OPERAND (*expr_p, 0) = val;
6486 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6489 *expr_p = val;
6491 return ret;
6494 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6496 unary_expr
6497 : ...
6498 | '&' varname
6501 PRE_P points to the list where side effects that must happen before
6502 *EXPR_P should be stored.
6504 POST_P points to the list where side effects that must happen after
6505 *EXPR_P should be stored. */
6507 static enum gimplify_status
6508 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6510 tree expr = *expr_p;
6511 tree op0 = TREE_OPERAND (expr, 0);
6512 enum gimplify_status ret;
6513 location_t loc = EXPR_LOCATION (*expr_p);
6515 switch (TREE_CODE (op0))
6517 case INDIRECT_REF:
6518 do_indirect_ref:
6519 /* Check if we are dealing with an expression of the form '&*ptr'.
6520 While the front end folds away '&*ptr' into 'ptr', these
6521 expressions may be generated internally by the compiler (e.g.,
6522 builtins like __builtin_va_end). */
6523 /* Caution: the silent array decomposition semantics we allow for
6524 ADDR_EXPR means we can't always discard the pair. */
6525 /* Gimplification of the ADDR_EXPR operand may drop
6526 cv-qualification conversions, so make sure we add them if
6527 needed. */
6529 tree op00 = TREE_OPERAND (op0, 0);
6530 tree t_expr = TREE_TYPE (expr);
6531 tree t_op00 = TREE_TYPE (op00);
6533 if (!useless_type_conversion_p (t_expr, t_op00))
6534 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6535 *expr_p = op00;
6536 ret = GS_OK;
6538 break;
6540 case VIEW_CONVERT_EXPR:
6541 /* Take the address of our operand and then convert it to the type of
6542 this ADDR_EXPR.
6544 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6545 all clear. The impact of this transformation is even less clear. */
6547 /* If the operand is a useless conversion, look through it. Doing so
6548 guarantees that the ADDR_EXPR and its operand will remain of the
6549 same type. */
6550 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6551 op0 = TREE_OPERAND (op0, 0);
6553 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6554 build_fold_addr_expr_loc (loc,
6555 TREE_OPERAND (op0, 0)));
6556 ret = GS_OK;
6557 break;
6559 case MEM_REF:
6560 if (integer_zerop (TREE_OPERAND (op0, 1)))
6561 goto do_indirect_ref;
6563 /* fall through */
6565 default:
6566 /* If we see a call to a declared builtin or see its address
6567 being taken (we can unify those cases here) then we can mark
6568 the builtin for implicit generation by GCC. */
6569 if (TREE_CODE (op0) == FUNCTION_DECL
6570 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6571 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6572 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6574 /* We use fb_either here because the C frontend sometimes takes
6575 the address of a call that returns a struct; see
6576 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6577 the implied temporary explicit. */
6579 /* Make the operand addressable. */
6580 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6581 is_gimple_addressable, fb_either);
6582 if (ret == GS_ERROR)
6583 break;
6585 /* Then mark it. Beware that it may not be possible to do so directly
6586 if a temporary has been created by the gimplification. */
6587 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6589 op0 = TREE_OPERAND (expr, 0);
6591 /* For various reasons, the gimplification of the expression
6592 may have made a new INDIRECT_REF. */
6593 if (INDIRECT_REF_P (op0)
6594 || (TREE_CODE (op0) == MEM_REF
6595 && integer_zerop (TREE_OPERAND (op0, 1))))
6596 goto do_indirect_ref;
6598 mark_addressable (TREE_OPERAND (expr, 0));
6600 /* The FEs may end up building ADDR_EXPRs early on a decl with
6601 an incomplete type. Re-build ADDR_EXPRs in canonical form
6602 here. */
6603 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6604 *expr_p = build_fold_addr_expr (op0);
6606 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6607 recompute_tree_invariant_for_addr_expr (*expr_p);
6609 /* If we re-built the ADDR_EXPR add a conversion to the original type
6610 if required. */
6611 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6612 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6614 break;
6617 return ret;
6620 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6621 value; output operands should be a gimple lvalue. */
6623 static enum gimplify_status
6624 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6626 tree expr;
6627 int noutputs;
6628 const char **oconstraints;
6629 int i;
6630 tree link;
6631 const char *constraint;
6632 bool allows_mem, allows_reg, is_inout;
6633 enum gimplify_status ret, tret;
6634 gasm *stmt;
6635 vec<tree, va_gc> *inputs;
6636 vec<tree, va_gc> *outputs;
6637 vec<tree, va_gc> *clobbers;
6638 vec<tree, va_gc> *labels;
6639 tree link_next;
6641 expr = *expr_p;
6642 noutputs = list_length (ASM_OUTPUTS (expr));
6643 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6645 inputs = NULL;
6646 outputs = NULL;
6647 clobbers = NULL;
6648 labels = NULL;
6650 ret = GS_ALL_DONE;
6651 link_next = NULL_TREE;
6652 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6654 bool ok;
6655 size_t constraint_len;
6657 link_next = TREE_CHAIN (link);
6659 oconstraints[i]
6660 = constraint
6661 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6662 constraint_len = strlen (constraint);
6663 if (constraint_len == 0)
6664 continue;
6666 ok = parse_output_constraint (&constraint, i, 0, 0,
6667 &allows_mem, &allows_reg, &is_inout);
6668 if (!ok)
6670 ret = GS_ERROR;
6671 is_inout = false;
6674 /* If we can't make copies, we can only accept memory.
6675 Similarly for VLAs. */
6676 tree outtype = TREE_TYPE (TREE_VALUE (link));
6677 if (outtype != error_mark_node
6678 && (TREE_ADDRESSABLE (outtype)
6679 || !COMPLETE_TYPE_P (outtype)
6680 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6682 if (allows_mem)
6683 allows_reg = 0;
6684 else
6686 error ("impossible constraint in %<asm%>");
6687 error ("non-memory output %d must stay in memory", i);
6688 return GS_ERROR;
6692 if (!allows_reg && allows_mem)
6693 mark_addressable (TREE_VALUE (link));
6695 tree orig = TREE_VALUE (link);
6696 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6697 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6698 fb_lvalue | fb_mayfail);
6699 if (tret == GS_ERROR)
6701 if (orig != error_mark_node)
6702 error ("invalid lvalue in %<asm%> output %d", i);
6703 ret = tret;
6706 /* If the constraint does not allow memory make sure we gimplify
6707 it to a register if it is not already but its base is. This
6708 happens for complex and vector components. */
6709 if (!allows_mem)
6711 tree op = TREE_VALUE (link);
6712 if (! is_gimple_val (op)
6713 && is_gimple_reg_type (TREE_TYPE (op))
6714 && is_gimple_reg (get_base_address (op)))
6716 tree tem = create_tmp_reg (TREE_TYPE (op));
6717 tree ass;
6718 if (is_inout)
6720 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6721 tem, unshare_expr (op));
6722 gimplify_and_add (ass, pre_p);
6724 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6725 gimplify_and_add (ass, post_p);
6727 TREE_VALUE (link) = tem;
6728 tret = GS_OK;
6732 vec_safe_push (outputs, link);
6733 TREE_CHAIN (link) = NULL_TREE;
6735 if (is_inout)
6737 /* An input/output operand. To give the optimizers more
6738 flexibility, split it into separate input and output
6739 operands. */
6740 tree input;
6741 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6742 char buf[11];
6744 /* Turn the in/out constraint into an output constraint. */
6745 char *p = xstrdup (constraint);
6746 p[0] = '=';
6747 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6749 /* And add a matching input constraint. */
6750 if (allows_reg)
6752 sprintf (buf, "%u", i);
6754 /* If there are multiple alternatives in the constraint,
6755 handle each of them individually. Those that allow register
6756 will be replaced with operand number, the others will stay
6757 unchanged. */
6758 if (strchr (p, ',') != NULL)
6760 size_t len = 0, buflen = strlen (buf);
6761 char *beg, *end, *str, *dst;
6763 for (beg = p + 1;;)
6765 end = strchr (beg, ',');
6766 if (end == NULL)
6767 end = strchr (beg, '\0');
6768 if ((size_t) (end - beg) < buflen)
6769 len += buflen + 1;
6770 else
6771 len += end - beg + 1;
6772 if (*end)
6773 beg = end + 1;
6774 else
6775 break;
6778 str = (char *) alloca (len);
6779 for (beg = p + 1, dst = str;;)
6781 const char *tem;
6782 bool mem_p, reg_p, inout_p;
6784 end = strchr (beg, ',');
6785 if (end)
6786 *end = '\0';
6787 beg[-1] = '=';
6788 tem = beg - 1;
6789 parse_output_constraint (&tem, i, 0, 0,
6790 &mem_p, &reg_p, &inout_p);
6791 if (dst != str)
6792 *dst++ = ',';
6793 if (reg_p)
6795 memcpy (dst, buf, buflen);
6796 dst += buflen;
6798 else
6800 if (end)
6801 len = end - beg;
6802 else
6803 len = strlen (beg);
6804 memcpy (dst, beg, len);
6805 dst += len;
6807 if (end)
6808 beg = end + 1;
6809 else
6810 break;
6812 *dst = '\0';
6813 input = build_string (dst - str, str);
6815 else
6816 input = build_string (strlen (buf), buf);
6818 else
6819 input = build_string (constraint_len - 1, constraint + 1);
6821 free (p);
6823 input = build_tree_list (build_tree_list (NULL_TREE, input),
6824 unshare_expr (TREE_VALUE (link)));
6825 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6829 link_next = NULL_TREE;
6830 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6832 link_next = TREE_CHAIN (link);
6833 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6834 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6835 oconstraints, &allows_mem, &allows_reg);
6837 /* If we can't make copies, we can only accept memory. */
6838 tree intype = TREE_TYPE (TREE_VALUE (link));
6839 if (intype != error_mark_node
6840 && (TREE_ADDRESSABLE (intype)
6841 || !COMPLETE_TYPE_P (intype)
6842 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6844 if (allows_mem)
6845 allows_reg = 0;
6846 else
6848 error ("impossible constraint in %<asm%>");
6849 error ("non-memory input %d must stay in memory", i);
6850 return GS_ERROR;
6854 /* If the operand is a memory input, it should be an lvalue. */
6855 if (!allows_reg && allows_mem)
6857 tree inputv = TREE_VALUE (link);
6858 STRIP_NOPS (inputv);
6859 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6860 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6861 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6862 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6863 || TREE_CODE (inputv) == MODIFY_EXPR)
6864 TREE_VALUE (link) = error_mark_node;
6865 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6866 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6867 if (tret != GS_ERROR)
6869 /* Unlike output operands, memory inputs are not guaranteed
6870 to be lvalues by the FE, and while the expressions are
6871 marked addressable there, if it is e.g. a statement
6872 expression, temporaries in it might not end up being
6873 addressable. They might be already used in the IL and thus
6874 it is too late to make them addressable now though. */
6875 tree x = TREE_VALUE (link);
6876 while (handled_component_p (x))
6877 x = TREE_OPERAND (x, 0);
6878 if (TREE_CODE (x) == MEM_REF
6879 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6880 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6881 if ((VAR_P (x)
6882 || TREE_CODE (x) == PARM_DECL
6883 || TREE_CODE (x) == RESULT_DECL)
6884 && !TREE_ADDRESSABLE (x)
6885 && is_gimple_reg (x))
6887 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6888 input_location), 0,
6889 "memory input %d is not directly addressable",
6891 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6894 mark_addressable (TREE_VALUE (link));
6895 if (tret == GS_ERROR)
6897 if (inputv != error_mark_node)
6898 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6899 "memory input %d is not directly addressable", i);
6900 ret = tret;
6903 else
6905 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6906 is_gimple_asm_val, fb_rvalue);
6907 if (tret == GS_ERROR)
6908 ret = tret;
6911 TREE_CHAIN (link) = NULL_TREE;
6912 vec_safe_push (inputs, link);
6915 link_next = NULL_TREE;
6916 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6918 link_next = TREE_CHAIN (link);
6919 TREE_CHAIN (link) = NULL_TREE;
6920 vec_safe_push (clobbers, link);
6923 link_next = NULL_TREE;
6924 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6926 link_next = TREE_CHAIN (link);
6927 TREE_CHAIN (link) = NULL_TREE;
6928 vec_safe_push (labels, link);
6931 /* Do not add ASMs with errors to the gimple IL stream. */
6932 if (ret != GS_ERROR)
6934 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6935 inputs, outputs, clobbers, labels);
6937 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6938 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6939 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6941 gimplify_seq_add_stmt (pre_p, stmt);
6944 return ret;
6947 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6948 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6949 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6950 return to this function.
6952 FIXME should we complexify the prequeue handling instead? Or use flags
6953 for all the cleanups and let the optimizer tighten them up? The current
6954 code seems pretty fragile; it will break on a cleanup within any
6955 non-conditional nesting. But any such nesting would be broken, anyway;
6956 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6957 and continues out of it. We can do that at the RTL level, though, so
6958 having an optimizer to tighten up try/finally regions would be a Good
6959 Thing. */
6961 static enum gimplify_status
6962 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6964 gimple_stmt_iterator iter;
6965 gimple_seq body_sequence = NULL;
6967 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6969 /* We only care about the number of conditions between the innermost
6970 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6971 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6972 int old_conds = gimplify_ctxp->conditions;
6973 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6974 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6975 gimplify_ctxp->conditions = 0;
6976 gimplify_ctxp->conditional_cleanups = NULL;
6977 gimplify_ctxp->in_cleanup_point_expr = true;
6979 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6981 gimplify_ctxp->conditions = old_conds;
6982 gimplify_ctxp->conditional_cleanups = old_cleanups;
6983 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6985 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6987 gimple *wce = gsi_stmt (iter);
6989 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6991 if (gsi_one_before_end_p (iter))
6993 /* Note that gsi_insert_seq_before and gsi_remove do not
6994 scan operands, unlike some other sequence mutators. */
6995 if (!gimple_wce_cleanup_eh_only (wce))
6996 gsi_insert_seq_before_without_update (&iter,
6997 gimple_wce_cleanup (wce),
6998 GSI_SAME_STMT);
6999 gsi_remove (&iter, true);
7000 break;
7002 else
7004 gtry *gtry;
7005 gimple_seq seq;
7006 enum gimple_try_flags kind;
7008 if (gimple_wce_cleanup_eh_only (wce))
7009 kind = GIMPLE_TRY_CATCH;
7010 else
7011 kind = GIMPLE_TRY_FINALLY;
7012 seq = gsi_split_seq_after (iter);
7014 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
7015 /* Do not use gsi_replace here, as it may scan operands.
7016 We want to do a simple structural modification only. */
7017 gsi_set_stmt (&iter, gtry);
7018 iter = gsi_start (gtry->eval);
7021 else
7022 gsi_next (&iter);
7025 gimplify_seq_add_seq (pre_p, body_sequence);
7026 if (temp)
7028 *expr_p = temp;
7029 return GS_OK;
7031 else
7033 *expr_p = NULL;
7034 return GS_ALL_DONE;
7038 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7039 is the cleanup action required. EH_ONLY is true if the cleanup should
7040 only be executed if an exception is thrown, not on normal exit.
7041 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7042 only valid for clobbers. */
7044 static void
7045 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7046 bool force_uncond = false)
7048 gimple *wce;
7049 gimple_seq cleanup_stmts = NULL;
7051 /* Errors can result in improperly nested cleanups. Which results in
7052 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7053 if (seen_error ())
7054 return;
7056 if (gimple_conditional_context ())
7058 /* If we're in a conditional context, this is more complex. We only
7059 want to run the cleanup if we actually ran the initialization that
7060 necessitates it, but we want to run it after the end of the
7061 conditional context. So we wrap the try/finally around the
7062 condition and use a flag to determine whether or not to actually
7063 run the destructor. Thus
7065 test ? f(A()) : 0
7067 becomes (approximately)
7069 flag = 0;
7070 try {
7071 if (test) { A::A(temp); flag = 1; val = f(temp); }
7072 else { val = 0; }
7073 } finally {
7074 if (flag) A::~A(temp);
7078 if (force_uncond)
7080 gimplify_stmt (&cleanup, &cleanup_stmts);
7081 wce = gimple_build_wce (cleanup_stmts);
7082 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7084 else
7086 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7087 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7088 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7090 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7091 gimplify_stmt (&cleanup, &cleanup_stmts);
7092 wce = gimple_build_wce (cleanup_stmts);
7093 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7095 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7096 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7097 gimplify_seq_add_stmt (pre_p, ftrue);
7099 /* Because of this manipulation, and the EH edges that jump
7100 threading cannot redirect, the temporary (VAR) will appear
7101 to be used uninitialized. Don't warn. */
7102 suppress_warning (var, OPT_Wuninitialized);
7105 else
7107 gimplify_stmt (&cleanup, &cleanup_stmts);
7108 wce = gimple_build_wce (cleanup_stmts);
7109 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7110 gimplify_seq_add_stmt (pre_p, wce);
7114 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7116 static enum gimplify_status
7117 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7119 tree targ = *expr_p;
7120 tree temp = TARGET_EXPR_SLOT (targ);
7121 tree init = TARGET_EXPR_INITIAL (targ);
7122 enum gimplify_status ret;
7124 bool unpoison_empty_seq = false;
7125 gimple_stmt_iterator unpoison_it;
7127 if (init)
7129 gimple_seq init_pre_p = NULL;
7131 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7132 to the temps list. Handle also variable length TARGET_EXPRs. */
7133 if (!poly_int_tree_p (DECL_SIZE (temp)))
7135 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7136 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7137 /* FIXME: this is correct only when the size of the type does
7138 not depend on expressions evaluated in init. */
7139 gimplify_vla_decl (temp, &init_pre_p);
7141 else
7143 /* Save location where we need to place unpoisoning. It's possible
7144 that a variable will be converted to needs_to_live_in_memory. */
7145 unpoison_it = gsi_last (*pre_p);
7146 unpoison_empty_seq = gsi_end_p (unpoison_it);
7148 gimple_add_tmp_var (temp);
7151 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7152 expression is supposed to initialize the slot. */
7153 if (VOID_TYPE_P (TREE_TYPE (init)))
7154 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7155 fb_none);
7156 else
7158 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7159 init = init_expr;
7160 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7161 fb_none);
7162 init = NULL;
7163 ggc_free (init_expr);
7165 if (ret == GS_ERROR)
7167 /* PR c++/28266 Make sure this is expanded only once. */
7168 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7169 return GS_ERROR;
7172 if (init)
7173 gimplify_and_add (init, &init_pre_p);
7175 /* Add a clobber for the temporary going out of scope, like
7176 gimplify_bind_expr. */
7177 if (gimplify_ctxp->in_cleanup_point_expr
7178 && needs_to_live_in_memory (temp))
7180 if (flag_stack_reuse == SR_ALL)
7182 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7183 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7184 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7186 if (asan_poisoned_variables
7187 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7188 && !TREE_STATIC (temp)
7189 && dbg_cnt (asan_use_after_scope)
7190 && !gimplify_omp_ctxp)
7192 tree asan_cleanup = build_asan_poison_call_expr (temp);
7193 if (asan_cleanup)
7195 if (unpoison_empty_seq)
7196 unpoison_it = gsi_start (*pre_p);
7198 asan_poison_variable (temp, false, &unpoison_it,
7199 unpoison_empty_seq);
7200 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7205 gimple_seq_add_seq (pre_p, init_pre_p);
7207 /* If needed, push the cleanup for the temp. */
7208 if (TARGET_EXPR_CLEANUP (targ))
7209 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7210 CLEANUP_EH_ONLY (targ), pre_p);
7212 /* Only expand this once. */
7213 TREE_OPERAND (targ, 3) = init;
7214 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7216 else
7217 /* We should have expanded this before. */
7218 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7220 *expr_p = temp;
7221 return GS_OK;
7224 /* Gimplification of expression trees. */
7226 /* Gimplify an expression which appears at statement context. The
7227 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7228 NULL, a new sequence is allocated.
7230 Return true if we actually added a statement to the queue. */
7232 bool
7233 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7235 gimple_seq_node last;
7237 last = gimple_seq_last (*seq_p);
7238 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7239 return last != gimple_seq_last (*seq_p);
7242 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7243 to CTX. If entries already exist, force them to be some flavor of private.
7244 If there is no enclosing parallel, do nothing. */
7246 void
7247 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7249 splay_tree_node n;
7251 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7252 return;
7256 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7257 if (n != NULL)
7259 if (n->value & GOVD_SHARED)
7260 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7261 else if (n->value & GOVD_MAP)
7262 n->value |= GOVD_MAP_TO_ONLY;
7263 else
7264 return;
7266 else if ((ctx->region_type & ORT_TARGET) != 0)
7268 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7269 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7270 else
7271 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7273 else if (ctx->region_type != ORT_WORKSHARE
7274 && ctx->region_type != ORT_TASKGROUP
7275 && ctx->region_type != ORT_SIMD
7276 && ctx->region_type != ORT_ACC
7277 && !(ctx->region_type & ORT_TARGET_DATA))
7278 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7280 ctx = ctx->outer_context;
7282 while (ctx);
7285 /* Similarly for each of the type sizes of TYPE. */
7287 static void
7288 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7290 if (type == NULL || type == error_mark_node)
7291 return;
7292 type = TYPE_MAIN_VARIANT (type);
7294 if (ctx->privatized_types->add (type))
7295 return;
7297 switch (TREE_CODE (type))
7299 case INTEGER_TYPE:
7300 case ENUMERAL_TYPE:
7301 case BOOLEAN_TYPE:
7302 case REAL_TYPE:
7303 case FIXED_POINT_TYPE:
7304 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7305 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7306 break;
7308 case ARRAY_TYPE:
7309 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7310 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7311 break;
7313 case RECORD_TYPE:
7314 case UNION_TYPE:
7315 case QUAL_UNION_TYPE:
7317 tree field;
7318 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7319 if (TREE_CODE (field) == FIELD_DECL)
7321 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7322 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7325 break;
7327 case POINTER_TYPE:
7328 case REFERENCE_TYPE:
7329 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7330 break;
7332 default:
7333 break;
7336 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7337 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7338 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7341 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7343 static void
7344 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7346 splay_tree_node n;
7347 unsigned int nflags;
7348 tree t;
7350 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7351 return;
7353 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7354 there are constructors involved somewhere. Exception is a shared clause,
7355 there is nothing privatized in that case. */
7356 if ((flags & GOVD_SHARED) == 0
7357 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7358 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7359 flags |= GOVD_SEEN;
7361 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7362 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7364 /* We shouldn't be re-adding the decl with the same data
7365 sharing class. */
7366 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7367 nflags = n->value | flags;
7368 /* The only combination of data sharing classes we should see is
7369 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7370 reduction variables to be used in data sharing clauses. */
7371 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7372 || ((nflags & GOVD_DATA_SHARE_CLASS)
7373 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7374 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7375 n->value = nflags;
7376 return;
7379 /* When adding a variable-sized variable, we have to handle all sorts
7380 of additional bits of data: the pointer replacement variable, and
7381 the parameters of the type. */
7382 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7384 /* Add the pointer replacement variable as PRIVATE if the variable
7385 replacement is private, else FIRSTPRIVATE since we'll need the
7386 address of the original variable either for SHARED, or for the
7387 copy into or out of the context. */
7388 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7390 if (flags & GOVD_MAP)
7391 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7392 else if (flags & GOVD_PRIVATE)
7393 nflags = GOVD_PRIVATE;
7394 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7395 && (flags & GOVD_FIRSTPRIVATE))
7396 || (ctx->region_type == ORT_TARGET_DATA
7397 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7398 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7399 else
7400 nflags = GOVD_FIRSTPRIVATE;
7401 nflags |= flags & GOVD_SEEN;
7402 t = DECL_VALUE_EXPR (decl);
7403 gcc_assert (INDIRECT_REF_P (t));
7404 t = TREE_OPERAND (t, 0);
7405 gcc_assert (DECL_P (t));
7406 omp_add_variable (ctx, t, nflags);
7409 /* Add all of the variable and type parameters (which should have
7410 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7411 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7412 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7413 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7415 /* The variable-sized variable itself is never SHARED, only some form
7416 of PRIVATE. The sharing would take place via the pointer variable
7417 which we remapped above. */
7418 if (flags & GOVD_SHARED)
7419 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7420 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7422 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7423 alloca statement we generate for the variable, so make sure it
7424 is available. This isn't automatically needed for the SHARED
7425 case, since we won't be allocating local storage then.
7426 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7427 in this case omp_notice_variable will be called later
7428 on when it is gimplified. */
7429 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7430 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7431 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7433 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7434 && omp_privatize_by_reference (decl))
7436 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7438 /* Similar to the direct variable sized case above, we'll need the
7439 size of references being privatized. */
7440 if ((flags & GOVD_SHARED) == 0)
7442 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7443 if (t && DECL_P (t))
7444 omp_notice_variable (ctx, t, true);
7448 if (n != NULL)
7449 n->value |= flags;
7450 else
7451 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7453 /* For reductions clauses in OpenACC loop directives, by default create a
7454 copy clause on the enclosing parallel construct for carrying back the
7455 results. */
7456 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7458 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7459 while (outer_ctx)
7461 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7462 if (n != NULL)
7464 /* Ignore local variables and explicitly declared clauses. */
7465 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7466 break;
7467 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7469 /* According to the OpenACC spec, such a reduction variable
7470 should already have a copy map on a kernels construct,
7471 verify that here. */
7472 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7473 && (n->value & GOVD_MAP));
7475 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7477 /* Remove firstprivate and make it a copy map. */
7478 n->value &= ~GOVD_FIRSTPRIVATE;
7479 n->value |= GOVD_MAP;
7482 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7484 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7485 GOVD_MAP | GOVD_SEEN);
7486 break;
7488 outer_ctx = outer_ctx->outer_context;
7493 /* Notice a threadprivate variable DECL used in OMP context CTX.
7494 This just prints out diagnostics about threadprivate variable uses
7495 in untied tasks. If DECL2 is non-NULL, prevent this warning
7496 on that variable. */
7498 static bool
7499 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7500 tree decl2)
7502 splay_tree_node n;
7503 struct gimplify_omp_ctx *octx;
7505 for (octx = ctx; octx; octx = octx->outer_context)
7506 if ((octx->region_type & ORT_TARGET) != 0
7507 || octx->order_concurrent)
7509 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7510 if (n == NULL)
7512 if (octx->order_concurrent)
7514 error ("threadprivate variable %qE used in a region with"
7515 " %<order(concurrent)%> clause", DECL_NAME (decl));
7516 inform (octx->location, "enclosing region");
7518 else
7520 error ("threadprivate variable %qE used in target region",
7521 DECL_NAME (decl));
7522 inform (octx->location, "enclosing target region");
7524 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7526 if (decl2)
7527 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7530 if (ctx->region_type != ORT_UNTIED_TASK)
7531 return false;
7532 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7533 if (n == NULL)
7535 error ("threadprivate variable %qE used in untied task",
7536 DECL_NAME (decl));
7537 inform (ctx->location, "enclosing task");
7538 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7540 if (decl2)
7541 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7542 return false;
7545 /* Return true if global var DECL is device resident. */
7547 static bool
7548 device_resident_p (tree decl)
7550 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7552 if (!attr)
7553 return false;
7555 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7557 tree c = TREE_VALUE (t);
7558 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7559 return true;
7562 return false;
7565 /* Return true if DECL has an ACC DECLARE attribute. */
7567 static bool
7568 is_oacc_declared (tree decl)
7570 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7571 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7572 return declared != NULL_TREE;
7575 /* Determine outer default flags for DECL mentioned in an OMP region
7576 but not declared in an enclosing clause.
7578 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7579 remapped firstprivate instead of shared. To some extent this is
7580 addressed in omp_firstprivatize_type_sizes, but not
7581 effectively. */
7583 static unsigned
7584 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7585 bool in_code, unsigned flags)
7587 enum omp_clause_default_kind default_kind = ctx->default_kind;
7588 enum omp_clause_default_kind kind;
7590 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7591 if (ctx->region_type & ORT_TASK)
7593 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7595 /* The event-handle specified by a detach clause should always be firstprivate,
7596 regardless of the current default. */
7597 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7598 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7600 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7601 default_kind = kind;
7602 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7603 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7604 /* For C/C++ default({,first}private), variables with static storage duration
7605 declared in a namespace or global scope and referenced in construct
7606 must be explicitly specified, i.e. acts as default(none). */
7607 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7608 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7609 && VAR_P (decl)
7610 && is_global_var (decl)
7611 && (DECL_FILE_SCOPE_P (decl)
7612 || (DECL_CONTEXT (decl)
7613 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7614 && !lang_GNU_Fortran ())
7615 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7617 switch (default_kind)
7619 case OMP_CLAUSE_DEFAULT_NONE:
7621 const char *rtype;
7623 if (ctx->region_type & ORT_PARALLEL)
7624 rtype = "parallel";
7625 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7626 rtype = "taskloop";
7627 else if (ctx->region_type & ORT_TASK)
7628 rtype = "task";
7629 else if (ctx->region_type & ORT_TEAMS)
7630 rtype = "teams";
7631 else
7632 gcc_unreachable ();
7634 error ("%qE not specified in enclosing %qs",
7635 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7636 inform (ctx->location, "enclosing %qs", rtype);
7638 /* FALLTHRU */
7639 case OMP_CLAUSE_DEFAULT_SHARED:
7640 flags |= GOVD_SHARED;
7641 break;
7642 case OMP_CLAUSE_DEFAULT_PRIVATE:
7643 flags |= GOVD_PRIVATE;
7644 break;
7645 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7646 flags |= GOVD_FIRSTPRIVATE;
7647 break;
7648 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7649 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7650 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7651 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7653 omp_notice_variable (octx, decl, in_code);
7654 for (; octx; octx = octx->outer_context)
7656 splay_tree_node n2;
7658 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7659 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7660 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7661 continue;
7662 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7664 flags |= GOVD_FIRSTPRIVATE;
7665 goto found_outer;
7667 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7669 flags |= GOVD_SHARED;
7670 goto found_outer;
7675 if (TREE_CODE (decl) == PARM_DECL
7676 || (!is_global_var (decl)
7677 && DECL_CONTEXT (decl) == current_function_decl))
7678 flags |= GOVD_FIRSTPRIVATE;
7679 else
7680 flags |= GOVD_SHARED;
7681 found_outer:
7682 break;
7684 default:
7685 gcc_unreachable ();
7688 return flags;
7692 /* Determine outer default flags for DECL mentioned in an OACC region
7693 but not declared in an enclosing clause. */
7695 static unsigned
7696 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7698 const char *rkind;
7699 bool on_device = false;
7700 bool is_private = false;
7701 bool declared = is_oacc_declared (decl);
7702 tree type = TREE_TYPE (decl);
7704 if (omp_privatize_by_reference (decl))
7705 type = TREE_TYPE (type);
7707 /* For Fortran COMMON blocks, only used variables in those blocks are
7708 transfered and remapped. The block itself will have a private clause to
7709 avoid transfering the data twice.
7710 The hook evaluates to false by default. For a variable in Fortran's COMMON
7711 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7712 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7713 the whole block. For C++ and Fortran, it can also be true under certain
7714 other conditions, if DECL_HAS_VALUE_EXPR. */
7715 if (RECORD_OR_UNION_TYPE_P (type))
7716 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7718 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7719 && is_global_var (decl)
7720 && device_resident_p (decl)
7721 && !is_private)
7723 on_device = true;
7724 flags |= GOVD_MAP_TO_ONLY;
7727 switch (ctx->region_type)
7729 case ORT_ACC_KERNELS:
7730 rkind = "kernels";
7732 if (is_private)
7733 flags |= GOVD_FIRSTPRIVATE;
7734 else if (AGGREGATE_TYPE_P (type))
7736 /* Aggregates default to 'present_or_copy', or 'present'. */
7737 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7738 flags |= GOVD_MAP;
7739 else
7740 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7742 else
7743 /* Scalars default to 'copy'. */
7744 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7746 break;
7748 case ORT_ACC_PARALLEL:
7749 case ORT_ACC_SERIAL:
7750 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7752 if (is_private)
7753 flags |= GOVD_FIRSTPRIVATE;
7754 else if (on_device || declared)
7755 flags |= GOVD_MAP;
7756 else if (AGGREGATE_TYPE_P (type))
7758 /* Aggregates default to 'present_or_copy', or 'present'. */
7759 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7760 flags |= GOVD_MAP;
7761 else
7762 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7764 else
7765 /* Scalars default to 'firstprivate'. */
7766 flags |= GOVD_FIRSTPRIVATE;
7768 break;
7770 default:
7771 gcc_unreachable ();
7774 if (DECL_ARTIFICIAL (decl))
7775 ; /* We can get compiler-generated decls, and should not complain
7776 about them. */
7777 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7779 error ("%qE not specified in enclosing OpenACC %qs construct",
7780 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7781 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7783 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7784 ; /* Handled above. */
7785 else
7786 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7788 return flags;
7791 /* Record the fact that DECL was used within the OMP context CTX.
7792 IN_CODE is true when real code uses DECL, and false when we should
7793 merely emit default(none) errors. Return true if DECL is going to
7794 be remapped and thus DECL shouldn't be gimplified into its
7795 DECL_VALUE_EXPR (if any). */
7797 static bool
7798 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7800 splay_tree_node n;
7801 unsigned flags = in_code ? GOVD_SEEN : 0;
7802 bool ret = false, shared;
7804 if (error_operand_p (decl))
7805 return false;
7807 if (ctx->region_type == ORT_NONE)
7808 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7810 if (is_global_var (decl))
7812 /* Threadprivate variables are predetermined. */
7813 if (DECL_THREAD_LOCAL_P (decl))
7814 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7816 if (DECL_HAS_VALUE_EXPR_P (decl))
7818 if (ctx->region_type & ORT_ACC)
7819 /* For OpenACC, defer expansion of value to avoid transfering
7820 privatized common block data instead of im-/explicitly transfered
7821 variables which are in common blocks. */
7823 else
7825 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7827 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7828 return omp_notice_threadprivate_variable (ctx, decl, value);
7832 if (gimplify_omp_ctxp->outer_context == NULL
7833 && VAR_P (decl)
7834 && oacc_get_fn_attrib (current_function_decl))
7836 location_t loc = DECL_SOURCE_LOCATION (decl);
7838 if (lookup_attribute ("omp declare target link",
7839 DECL_ATTRIBUTES (decl)))
7841 error_at (loc,
7842 "%qE with %<link%> clause used in %<routine%> function",
7843 DECL_NAME (decl));
7844 return false;
7846 else if (!lookup_attribute ("omp declare target",
7847 DECL_ATTRIBUTES (decl)))
7849 error_at (loc,
7850 "%qE requires a %<declare%> directive for use "
7851 "in a %<routine%> function", DECL_NAME (decl));
7852 return false;
7857 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7858 if ((ctx->region_type & ORT_TARGET) != 0)
7860 if (ctx->region_type & ORT_ACC)
7861 /* For OpenACC, as remarked above, defer expansion. */
7862 shared = false;
7863 else
7864 shared = true;
7866 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7867 if (n == NULL)
7869 unsigned nflags = flags;
7870 if ((ctx->region_type & ORT_ACC) == 0)
7872 bool is_declare_target = false;
7873 if (is_global_var (decl)
7874 && varpool_node::get_create (decl)->offloadable)
7876 struct gimplify_omp_ctx *octx;
7877 for (octx = ctx->outer_context;
7878 octx; octx = octx->outer_context)
7880 n = splay_tree_lookup (octx->variables,
7881 (splay_tree_key)decl);
7882 if (n
7883 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7884 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7885 break;
7887 is_declare_target = octx == NULL;
7889 if (!is_declare_target)
7891 int gdmk;
7892 enum omp_clause_defaultmap_kind kind;
7893 if (lang_hooks.decls.omp_allocatable_p (decl))
7894 gdmk = GDMK_ALLOCATABLE;
7895 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7896 gdmk = GDMK_SCALAR_TARGET;
7897 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7898 gdmk = GDMK_SCALAR;
7899 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7900 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7901 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7902 == POINTER_TYPE)))
7903 gdmk = GDMK_POINTER;
7904 else
7905 gdmk = GDMK_AGGREGATE;
7906 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7907 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7909 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7910 nflags |= GOVD_FIRSTPRIVATE;
7911 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7912 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7913 else
7914 gcc_unreachable ();
7916 else if (ctx->defaultmap[gdmk] == 0)
7918 tree d = lang_hooks.decls.omp_report_decl (decl);
7919 error ("%qE not specified in enclosing %<target%>",
7920 DECL_NAME (d));
7921 inform (ctx->location, "enclosing %<target%>");
7923 else if (ctx->defaultmap[gdmk]
7924 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7925 nflags |= ctx->defaultmap[gdmk];
7926 else
7928 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7929 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7934 struct gimplify_omp_ctx *octx = ctx->outer_context;
7935 if ((ctx->region_type & ORT_ACC) && octx)
7937 /* Look in outer OpenACC contexts, to see if there's a
7938 data attribute for this variable. */
7939 omp_notice_variable (octx, decl, in_code);
7941 for (; octx; octx = octx->outer_context)
7943 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7944 break;
7945 splay_tree_node n2
7946 = splay_tree_lookup (octx->variables,
7947 (splay_tree_key) decl);
7948 if (n2)
7950 if (octx->region_type == ORT_ACC_HOST_DATA)
7951 error ("variable %qE declared in enclosing "
7952 "%<host_data%> region", DECL_NAME (decl));
7953 nflags |= GOVD_MAP;
7954 if (octx->region_type == ORT_ACC_DATA
7955 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7956 nflags |= GOVD_MAP_0LEN_ARRAY;
7957 goto found_outer;
7962 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7963 | GOVD_MAP_ALLOC_ONLY)) == flags)
7965 tree type = TREE_TYPE (decl);
7967 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7968 && omp_privatize_by_reference (decl))
7969 type = TREE_TYPE (type);
7970 if (!omp_mappable_type (type))
7972 error ("%qD referenced in target region does not have "
7973 "a mappable type", decl);
7974 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7976 else
7978 if ((ctx->region_type & ORT_ACC) != 0)
7979 nflags = oacc_default_clause (ctx, decl, flags);
7980 else
7981 nflags |= GOVD_MAP;
7984 found_outer:
7985 omp_add_variable (ctx, decl, nflags);
7987 else
7989 /* If nothing changed, there's nothing left to do. */
7990 if ((n->value & flags) == flags)
7991 return ret;
7992 flags |= n->value;
7993 n->value = flags;
7995 goto do_outer;
7998 if (n == NULL)
8000 if (ctx->region_type == ORT_WORKSHARE
8001 || ctx->region_type == ORT_TASKGROUP
8002 || ctx->region_type == ORT_SIMD
8003 || ctx->region_type == ORT_ACC
8004 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8005 goto do_outer;
8007 flags = omp_default_clause (ctx, decl, in_code, flags);
8009 if ((flags & GOVD_PRIVATE)
8010 && lang_hooks.decls.omp_private_outer_ref (decl))
8011 flags |= GOVD_PRIVATE_OUTER_REF;
8013 omp_add_variable (ctx, decl, flags);
8015 shared = (flags & GOVD_SHARED) != 0;
8016 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8017 goto do_outer;
8020 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8021 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8022 if (ctx->region_type == ORT_SIMD
8023 && ctx->in_for_exprs
8024 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8025 == GOVD_PRIVATE))
8026 flags &= ~GOVD_SEEN;
8028 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8029 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8030 && DECL_SIZE (decl))
8032 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8034 splay_tree_node n2;
8035 tree t = DECL_VALUE_EXPR (decl);
8036 gcc_assert (INDIRECT_REF_P (t));
8037 t = TREE_OPERAND (t, 0);
8038 gcc_assert (DECL_P (t));
8039 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8040 n2->value |= GOVD_SEEN;
8042 else if (omp_privatize_by_reference (decl)
8043 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8044 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8045 != INTEGER_CST))
8047 splay_tree_node n2;
8048 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8049 gcc_assert (DECL_P (t));
8050 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8051 if (n2)
8052 omp_notice_variable (ctx, t, true);
8056 if (ctx->region_type & ORT_ACC)
8057 /* For OpenACC, as remarked above, defer expansion. */
8058 shared = false;
8059 else
8060 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8061 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8063 /* If nothing changed, there's nothing left to do. */
8064 if ((n->value & flags) == flags)
8065 return ret;
8066 flags |= n->value;
8067 n->value = flags;
8069 do_outer:
8070 /* If the variable is private in the current context, then we don't
8071 need to propagate anything to an outer context. */
8072 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8073 return ret;
8074 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8075 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8076 return ret;
8077 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8078 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8079 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8080 return ret;
8081 if (ctx->outer_context
8082 && omp_notice_variable (ctx->outer_context, decl, in_code))
8083 return true;
8084 return ret;
8087 /* Verify that DECL is private within CTX. If there's specific information
8088 to the contrary in the innermost scope, generate an error. */
8090 static bool
8091 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8093 splay_tree_node n;
8095 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8096 if (n != NULL)
8098 if (n->value & GOVD_SHARED)
8100 if (ctx == gimplify_omp_ctxp)
8102 if (simd)
8103 error ("iteration variable %qE is predetermined linear",
8104 DECL_NAME (decl));
8105 else
8106 error ("iteration variable %qE should be private",
8107 DECL_NAME (decl));
8108 n->value = GOVD_PRIVATE;
8109 return true;
8111 else
8112 return false;
8114 else if ((n->value & GOVD_EXPLICIT) != 0
8115 && (ctx == gimplify_omp_ctxp
8116 || (ctx->region_type == ORT_COMBINED_PARALLEL
8117 && gimplify_omp_ctxp->outer_context == ctx)))
8119 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8120 error ("iteration variable %qE should not be firstprivate",
8121 DECL_NAME (decl));
8122 else if ((n->value & GOVD_REDUCTION) != 0)
8123 error ("iteration variable %qE should not be reduction",
8124 DECL_NAME (decl));
8125 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8126 error ("iteration variable %qE should not be linear",
8127 DECL_NAME (decl));
8129 return (ctx == gimplify_omp_ctxp
8130 || (ctx->region_type == ORT_COMBINED_PARALLEL
8131 && gimplify_omp_ctxp->outer_context == ctx));
8134 if (ctx->region_type != ORT_WORKSHARE
8135 && ctx->region_type != ORT_TASKGROUP
8136 && ctx->region_type != ORT_SIMD
8137 && ctx->region_type != ORT_ACC)
8138 return false;
8139 else if (ctx->outer_context)
8140 return omp_is_private (ctx->outer_context, decl, simd);
8141 return false;
8144 /* Return true if DECL is private within a parallel region
8145 that binds to the current construct's context or in parallel
8146 region's REDUCTION clause. */
8148 static bool
8149 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8151 splay_tree_node n;
8155 ctx = ctx->outer_context;
8156 if (ctx == NULL)
8158 if (is_global_var (decl))
8159 return false;
8161 /* References might be private, but might be shared too,
8162 when checking for copyprivate, assume they might be
8163 private, otherwise assume they might be shared. */
8164 if (copyprivate)
8165 return true;
8167 if (omp_privatize_by_reference (decl))
8168 return false;
8170 /* Treat C++ privatized non-static data members outside
8171 of the privatization the same. */
8172 if (omp_member_access_dummy_var (decl))
8173 return false;
8175 return true;
8178 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8180 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8181 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8183 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8184 || n == NULL
8185 || (n->value & GOVD_MAP) == 0)
8186 continue;
8187 return false;
8190 if (n != NULL)
8192 if ((n->value & GOVD_LOCAL) != 0
8193 && omp_member_access_dummy_var (decl))
8194 return false;
8195 return (n->value & GOVD_SHARED) == 0;
8198 if (ctx->region_type == ORT_WORKSHARE
8199 || ctx->region_type == ORT_TASKGROUP
8200 || ctx->region_type == ORT_SIMD
8201 || ctx->region_type == ORT_ACC)
8202 continue;
8204 break;
8206 while (1);
8207 return false;
8210 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8212 static tree
8213 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8215 tree t = *tp;
8217 /* If this node has been visited, unmark it and keep looking. */
8218 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8219 return t;
8221 if (IS_TYPE_OR_DECL_P (t))
8222 *walk_subtrees = 0;
8223 return NULL_TREE;
8227 /* Gimplify the affinity clause but effectively ignore it.
8228 Generate:
8229 var = begin;
8230 if ((step > 1) ? var <= end : var > end)
8231 locatator_var_expr; */
8233 static void
8234 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8236 tree last_iter = NULL_TREE;
8237 tree last_bind = NULL_TREE;
8238 tree label = NULL_TREE;
8239 tree *last_body = NULL;
8240 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8241 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8243 tree t = OMP_CLAUSE_DECL (c);
8244 if (TREE_CODE (t) == TREE_LIST
8245 && TREE_PURPOSE (t)
8246 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8248 if (TREE_VALUE (t) == null_pointer_node)
8249 continue;
8250 if (TREE_PURPOSE (t) != last_iter)
8252 if (last_bind)
8254 append_to_statement_list (label, last_body);
8255 gimplify_and_add (last_bind, pre_p);
8256 last_bind = NULL_TREE;
8258 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8260 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8261 is_gimple_val, fb_rvalue) == GS_ERROR
8262 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8263 is_gimple_val, fb_rvalue) == GS_ERROR
8264 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8265 is_gimple_val, fb_rvalue) == GS_ERROR
8266 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8267 is_gimple_val, fb_rvalue)
8268 == GS_ERROR))
8269 return;
8271 last_iter = TREE_PURPOSE (t);
8272 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8273 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8274 NULL, block);
8275 last_body = &BIND_EXPR_BODY (last_bind);
8276 tree cond = NULL_TREE;
8277 location_t loc = OMP_CLAUSE_LOCATION (c);
8278 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8280 tree var = TREE_VEC_ELT (it, 0);
8281 tree begin = TREE_VEC_ELT (it, 1);
8282 tree end = TREE_VEC_ELT (it, 2);
8283 tree step = TREE_VEC_ELT (it, 3);
8284 loc = DECL_SOURCE_LOCATION (var);
8285 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8286 var, begin);
8287 append_to_statement_list_force (tem, last_body);
8289 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8290 step, build_zero_cst (TREE_TYPE (step)));
8291 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8292 var, end);
8293 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8294 var, end);
8295 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8296 cond1, cond2, cond3);
8297 if (cond)
8298 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8299 boolean_type_node, cond, cond1);
8300 else
8301 cond = cond1;
8303 tree cont_label = create_artificial_label (loc);
8304 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8305 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8306 void_node,
8307 build_and_jump (&cont_label));
8308 append_to_statement_list_force (tem, last_body);
8310 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8312 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8313 last_body);
8314 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8316 if (error_operand_p (TREE_VALUE (t)))
8317 return;
8318 append_to_statement_list_force (TREE_VALUE (t), last_body);
8319 TREE_VALUE (t) = null_pointer_node;
8321 else
8323 if (last_bind)
8325 append_to_statement_list (label, last_body);
8326 gimplify_and_add (last_bind, pre_p);
8327 last_bind = NULL_TREE;
8329 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8331 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8332 NULL, is_gimple_val, fb_rvalue);
8333 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8335 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8336 return;
8337 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8338 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8339 return;
8340 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8343 if (last_bind)
8345 append_to_statement_list (label, last_body);
8346 gimplify_and_add (last_bind, pre_p);
8348 return;
8351 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8352 lower all the depend clauses by populating corresponding depend
8353 array. Returns 0 if there are no such depend clauses, or
8354 2 if all depend clauses should be removed, 1 otherwise. */
8356 static int
8357 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8359 tree c;
8360 gimple *g;
8361 size_t n[5] = { 0, 0, 0, 0, 0 };
8362 bool unused[5];
8363 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8364 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8365 size_t i, j;
8366 location_t first_loc = UNKNOWN_LOCATION;
8368 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8369 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8371 switch (OMP_CLAUSE_DEPEND_KIND (c))
8373 case OMP_CLAUSE_DEPEND_IN:
8374 i = 2;
8375 break;
8376 case OMP_CLAUSE_DEPEND_OUT:
8377 case OMP_CLAUSE_DEPEND_INOUT:
8378 i = 0;
8379 break;
8380 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8381 i = 1;
8382 break;
8383 case OMP_CLAUSE_DEPEND_DEPOBJ:
8384 i = 3;
8385 break;
8386 case OMP_CLAUSE_DEPEND_INOUTSET:
8387 i = 4;
8388 break;
8389 default:
8390 gcc_unreachable ();
8392 tree t = OMP_CLAUSE_DECL (c);
8393 if (first_loc == UNKNOWN_LOCATION)
8394 first_loc = OMP_CLAUSE_LOCATION (c);
8395 if (TREE_CODE (t) == TREE_LIST
8396 && TREE_PURPOSE (t)
8397 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8399 if (TREE_PURPOSE (t) != last_iter)
8401 tree tcnt = size_one_node;
8402 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8404 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8405 is_gimple_val, fb_rvalue) == GS_ERROR
8406 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8407 is_gimple_val, fb_rvalue) == GS_ERROR
8408 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8409 is_gimple_val, fb_rvalue) == GS_ERROR
8410 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8411 is_gimple_val, fb_rvalue)
8412 == GS_ERROR))
8413 return 2;
8414 tree var = TREE_VEC_ELT (it, 0);
8415 tree begin = TREE_VEC_ELT (it, 1);
8416 tree end = TREE_VEC_ELT (it, 2);
8417 tree step = TREE_VEC_ELT (it, 3);
8418 tree orig_step = TREE_VEC_ELT (it, 4);
8419 tree type = TREE_TYPE (var);
8420 tree stype = TREE_TYPE (step);
8421 location_t loc = DECL_SOURCE_LOCATION (var);
8422 tree endmbegin;
8423 /* Compute count for this iterator as
8424 orig_step > 0
8425 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8426 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8427 and compute product of those for the entire depend
8428 clause. */
8429 if (POINTER_TYPE_P (type))
8430 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8431 stype, end, begin);
8432 else
8433 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8434 end, begin);
8435 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8436 step,
8437 build_int_cst (stype, 1));
8438 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8439 build_int_cst (stype, 1));
8440 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8441 unshare_expr (endmbegin),
8442 stepm1);
8443 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8444 pos, step);
8445 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8446 endmbegin, stepp1);
8447 if (TYPE_UNSIGNED (stype))
8449 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8450 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8452 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8453 neg, step);
8454 step = NULL_TREE;
8455 tree cond = fold_build2_loc (loc, LT_EXPR,
8456 boolean_type_node,
8457 begin, end);
8458 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8459 build_int_cst (stype, 0));
8460 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8461 end, begin);
8462 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8463 build_int_cst (stype, 0));
8464 tree osteptype = TREE_TYPE (orig_step);
8465 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8466 orig_step,
8467 build_int_cst (osteptype, 0));
8468 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8469 cond, pos, neg);
8470 cnt = fold_convert_loc (loc, sizetype, cnt);
8471 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8472 fb_rvalue) == GS_ERROR)
8473 return 2;
8474 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8476 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8477 fb_rvalue) == GS_ERROR)
8478 return 2;
8479 last_iter = TREE_PURPOSE (t);
8480 last_count = tcnt;
8482 if (counts[i] == NULL_TREE)
8483 counts[i] = last_count;
8484 else
8485 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8486 PLUS_EXPR, counts[i], last_count);
8488 else
8489 n[i]++;
8491 for (i = 0; i < 5; i++)
8492 if (counts[i])
8493 break;
8494 if (i == 5)
8495 return 0;
8497 tree total = size_zero_node;
8498 for (i = 0; i < 5; i++)
8500 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8501 if (counts[i] == NULL_TREE)
8502 counts[i] = size_zero_node;
8503 if (n[i])
8504 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8505 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8506 fb_rvalue) == GS_ERROR)
8507 return 2;
8508 total = size_binop (PLUS_EXPR, total, counts[i]);
8511 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8512 == GS_ERROR)
8513 return 2;
8514 bool is_old = unused[1] && unused[3] && unused[4];
8515 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8516 size_int (is_old ? 1 : 4));
8517 if (!unused[4])
8518 totalpx = size_binop (PLUS_EXPR, totalpx,
8519 size_binop (MULT_EXPR, counts[4], size_int (2)));
8520 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8521 tree array = create_tmp_var_raw (type);
8522 TREE_ADDRESSABLE (array) = 1;
8523 if (!poly_int_tree_p (totalpx))
8525 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8526 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8527 if (gimplify_omp_ctxp)
8529 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8530 while (ctx
8531 && (ctx->region_type == ORT_WORKSHARE
8532 || ctx->region_type == ORT_TASKGROUP
8533 || ctx->region_type == ORT_SIMD
8534 || ctx->region_type == ORT_ACC))
8535 ctx = ctx->outer_context;
8536 if (ctx)
8537 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8539 gimplify_vla_decl (array, pre_p);
8541 else
8542 gimple_add_tmp_var (array);
8543 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8544 NULL_TREE);
8545 tree tem;
8546 if (!is_old)
8548 tem = build2 (MODIFY_EXPR, void_type_node, r,
8549 build_int_cst (ptr_type_node, 0));
8550 gimplify_and_add (tem, pre_p);
8551 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8552 NULL_TREE);
8554 tem = build2 (MODIFY_EXPR, void_type_node, r,
8555 fold_convert (ptr_type_node, total));
8556 gimplify_and_add (tem, pre_p);
8557 for (i = 1; i < (is_old ? 2 : 4); i++)
8559 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8560 NULL_TREE, NULL_TREE);
8561 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8562 gimplify_and_add (tem, pre_p);
8565 tree cnts[6];
8566 for (j = 5; j; j--)
8567 if (!unused[j - 1])
8568 break;
8569 for (i = 0; i < 5; i++)
8571 if (i && (i >= j || unused[i - 1]))
8573 cnts[i] = cnts[i - 1];
8574 continue;
8576 cnts[i] = create_tmp_var (sizetype);
8577 if (i == 0)
8578 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8579 else
8581 tree t;
8582 if (is_old)
8583 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8584 else
8585 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8586 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8587 == GS_ERROR)
8588 return 2;
8589 g = gimple_build_assign (cnts[i], t);
8591 gimple_seq_add_stmt (pre_p, g);
8593 if (unused[4])
8594 cnts[5] = NULL_TREE;
8595 else
8597 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8598 cnts[5] = create_tmp_var (sizetype);
8599 g = gimple_build_assign (cnts[i], t);
8600 gimple_seq_add_stmt (pre_p, g);
8603 last_iter = NULL_TREE;
8604 tree last_bind = NULL_TREE;
8605 tree *last_body = NULL;
8606 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8607 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8609 switch (OMP_CLAUSE_DEPEND_KIND (c))
8611 case OMP_CLAUSE_DEPEND_IN:
8612 i = 2;
8613 break;
8614 case OMP_CLAUSE_DEPEND_OUT:
8615 case OMP_CLAUSE_DEPEND_INOUT:
8616 i = 0;
8617 break;
8618 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8619 i = 1;
8620 break;
8621 case OMP_CLAUSE_DEPEND_DEPOBJ:
8622 i = 3;
8623 break;
8624 case OMP_CLAUSE_DEPEND_INOUTSET:
8625 i = 4;
8626 break;
8627 default:
8628 gcc_unreachable ();
8630 tree t = OMP_CLAUSE_DECL (c);
8631 if (TREE_CODE (t) == TREE_LIST
8632 && TREE_PURPOSE (t)
8633 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8635 if (TREE_PURPOSE (t) != last_iter)
8637 if (last_bind)
8638 gimplify_and_add (last_bind, pre_p);
8639 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8640 last_bind = build3 (BIND_EXPR, void_type_node,
8641 BLOCK_VARS (block), NULL, block);
8642 TREE_SIDE_EFFECTS (last_bind) = 1;
8643 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8644 tree *p = &BIND_EXPR_BODY (last_bind);
8645 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8647 tree var = TREE_VEC_ELT (it, 0);
8648 tree begin = TREE_VEC_ELT (it, 1);
8649 tree end = TREE_VEC_ELT (it, 2);
8650 tree step = TREE_VEC_ELT (it, 3);
8651 tree orig_step = TREE_VEC_ELT (it, 4);
8652 tree type = TREE_TYPE (var);
8653 location_t loc = DECL_SOURCE_LOCATION (var);
8654 /* Emit:
8655 var = begin;
8656 goto cond_label;
8657 beg_label:
8659 var = var + step;
8660 cond_label:
8661 if (orig_step > 0) {
8662 if (var < end) goto beg_label;
8663 } else {
8664 if (var > end) goto beg_label;
8666 for each iterator, with inner iterators added to
8667 the ... above. */
8668 tree beg_label = create_artificial_label (loc);
8669 tree cond_label = NULL_TREE;
8670 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8671 var, begin);
8672 append_to_statement_list_force (tem, p);
8673 tem = build_and_jump (&cond_label);
8674 append_to_statement_list_force (tem, p);
8675 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8676 append_to_statement_list (tem, p);
8677 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8678 NULL_TREE, NULL_TREE);
8679 TREE_SIDE_EFFECTS (bind) = 1;
8680 SET_EXPR_LOCATION (bind, loc);
8681 append_to_statement_list_force (bind, p);
8682 if (POINTER_TYPE_P (type))
8683 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8684 var, fold_convert_loc (loc, sizetype,
8685 step));
8686 else
8687 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8688 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8689 var, tem);
8690 append_to_statement_list_force (tem, p);
8691 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8692 append_to_statement_list (tem, p);
8693 tree cond = fold_build2_loc (loc, LT_EXPR,
8694 boolean_type_node,
8695 var, end);
8696 tree pos
8697 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8698 cond, build_and_jump (&beg_label),
8699 void_node);
8700 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8701 var, end);
8702 tree neg
8703 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8704 cond, build_and_jump (&beg_label),
8705 void_node);
8706 tree osteptype = TREE_TYPE (orig_step);
8707 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8708 orig_step,
8709 build_int_cst (osteptype, 0));
8710 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8711 cond, pos, neg);
8712 append_to_statement_list_force (tem, p);
8713 p = &BIND_EXPR_BODY (bind);
8715 last_body = p;
8717 last_iter = TREE_PURPOSE (t);
8718 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8720 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8721 0), last_body);
8722 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8724 if (error_operand_p (TREE_VALUE (t)))
8725 return 2;
8726 if (TREE_VALUE (t) != null_pointer_node)
8727 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8728 if (i == 4)
8730 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8731 NULL_TREE, NULL_TREE);
8732 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8733 NULL_TREE, NULL_TREE);
8734 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8735 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8736 void_type_node, r, r2);
8737 append_to_statement_list_force (tem, last_body);
8738 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8739 void_type_node, cnts[i],
8740 size_binop (PLUS_EXPR, cnts[i],
8741 size_int (1)));
8742 append_to_statement_list_force (tem, last_body);
8743 i = 5;
8745 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8746 NULL_TREE, NULL_TREE);
8747 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8748 void_type_node, r, TREE_VALUE (t));
8749 append_to_statement_list_force (tem, last_body);
8750 if (i == 5)
8752 r = build4 (ARRAY_REF, ptr_type_node, array,
8753 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8754 NULL_TREE, NULL_TREE);
8755 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8756 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8757 void_type_node, r, tem);
8758 append_to_statement_list_force (tem, last_body);
8760 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8761 void_type_node, cnts[i],
8762 size_binop (PLUS_EXPR, cnts[i],
8763 size_int (1 + (i == 5))));
8764 append_to_statement_list_force (tem, last_body);
8765 TREE_VALUE (t) = null_pointer_node;
8767 else
8769 if (last_bind)
8771 gimplify_and_add (last_bind, pre_p);
8772 last_bind = NULL_TREE;
8774 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8776 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8777 NULL, is_gimple_val, fb_rvalue);
8778 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8780 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8781 return 2;
8782 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
8783 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8784 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8785 is_gimple_val, fb_rvalue) == GS_ERROR)
8786 return 2;
8787 if (i == 4)
8789 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8790 NULL_TREE, NULL_TREE);
8791 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8792 NULL_TREE, NULL_TREE);
8793 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8794 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
8795 gimplify_and_add (tem, pre_p);
8796 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
8797 cnts[i],
8798 size_int (1)));
8799 gimple_seq_add_stmt (pre_p, g);
8800 i = 5;
8802 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8803 NULL_TREE, NULL_TREE);
8804 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8805 gimplify_and_add (tem, pre_p);
8806 if (i == 5)
8808 r = build4 (ARRAY_REF, ptr_type_node, array,
8809 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8810 NULL_TREE, NULL_TREE);
8811 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8812 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
8813 append_to_statement_list_force (tem, last_body);
8814 gimplify_and_add (tem, pre_p);
8816 g = gimple_build_assign (cnts[i],
8817 size_binop (PLUS_EXPR, cnts[i],
8818 size_int (1 + (i == 5))));
8819 gimple_seq_add_stmt (pre_p, g);
8822 if (last_bind)
8823 gimplify_and_add (last_bind, pre_p);
8824 tree cond = boolean_false_node;
8825 if (is_old)
8827 if (!unused[0])
8828 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8829 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8830 size_int (2)));
8831 if (!unused[2])
8832 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8833 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8834 cnts[2],
8835 size_binop_loc (first_loc, PLUS_EXPR,
8836 totalpx,
8837 size_int (1))));
8839 else
8841 tree prev = size_int (5);
8842 for (i = 0; i < 5; i++)
8844 if (unused[i])
8845 continue;
8846 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8847 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8848 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8849 cnts[i], unshare_expr (prev)));
8852 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8853 build_call_expr_loc (first_loc,
8854 builtin_decl_explicit (BUILT_IN_TRAP),
8855 0), void_node);
8856 gimplify_and_add (tem, pre_p);
8857 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8858 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8859 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8860 OMP_CLAUSE_CHAIN (c) = *list_p;
8861 *list_p = c;
8862 return 1;
8865 /* For a set of mappings describing an array section pointed to by a struct
8866 (or derived type, etc.) component, create an "alloc" or "release" node to
8867 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8868 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8869 be created that is inserted into the list of mapping nodes attached to the
8870 directive being processed -- not part of the sorted list of nodes after
8871 GOMP_MAP_STRUCT.
8873 CODE is the code of the directive being processed. GRP_START and GRP_END
8874 are the first and last of two or three nodes representing this array section
8875 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8876 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8877 filled with the additional node described above, if needed.
8879 This function does not add the new nodes to any lists itself. It is the
8880 responsibility of the caller to do that. */
8882 static tree
8883 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
8884 tree *extra_node)
8886 enum gomp_map_kind mkind
8887 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8888 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8890 gcc_assert (grp_start != grp_end);
8892 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8893 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8894 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
8895 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
8896 tree grp_mid = NULL_TREE;
8897 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
8898 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
8900 if (grp_mid
8901 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8902 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
8903 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
8904 else
8905 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8907 if (grp_mid
8908 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8909 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
8910 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
8912 tree c3
8913 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8914 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8915 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
8916 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8917 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
8919 *extra_node = c3;
8921 else
8922 *extra_node = NULL_TREE;
8924 return c2;
8927 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8928 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8929 If BASE_REF is non-NULL and the containing object is a reference, set
8930 *BASE_REF to that reference before dereferencing the object.
8931 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8932 has array type, else return NULL. */
8934 static tree
8935 extract_base_bit_offset (tree base, poly_int64 *bitposp,
8936 poly_offset_int *poffsetp)
8938 tree offset;
8939 poly_int64 bitsize, bitpos;
8940 machine_mode mode;
8941 int unsignedp, reversep, volatilep = 0;
8942 poly_offset_int poffset;
8944 STRIP_NOPS (base);
8946 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8947 &unsignedp, &reversep, &volatilep);
8949 STRIP_NOPS (base);
8951 if (offset && poly_int_tree_p (offset))
8953 poffset = wi::to_poly_offset (offset);
8954 offset = NULL_TREE;
8956 else
8957 poffset = 0;
8959 if (maybe_ne (bitpos, 0))
8960 poffset += bits_to_bytes_round_down (bitpos);
8962 *bitposp = bitpos;
8963 *poffsetp = poffset;
8965 return base;
8968 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8969 started processing the group yet. The TEMPORARY mark is used when we first
8970 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8971 when we have processed all the group's children (i.e. all the base pointers
8972 referred to by the group's mapping nodes, recursively). */
8974 enum omp_tsort_mark {
8975 UNVISITED,
8976 TEMPORARY,
8977 PERMANENT
8980 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
8981 but ignores side effects in the equality comparisons. */
8983 struct tree_operand_hash_no_se : tree_operand_hash
8985 static inline bool equal (const value_type &,
8986 const compare_type &);
8989 inline bool
8990 tree_operand_hash_no_se::equal (const value_type &t1,
8991 const compare_type &t2)
8993 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
8996 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
8997 clause. */
8999 struct omp_mapping_group {
9000 tree *grp_start;
9001 tree grp_end;
9002 omp_tsort_mark mark;
9003 /* If we've removed the group but need to reindex, mark the group as
9004 deleted. */
9005 bool deleted;
9006 struct omp_mapping_group *sibling;
9007 struct omp_mapping_group *next;
9010 DEBUG_FUNCTION void
9011 debug_mapping_group (omp_mapping_group *grp)
9013 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9014 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9015 debug_generic_expr (*grp->grp_start);
9016 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9019 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9020 isn't one. */
9022 static tree
9023 omp_get_base_pointer (tree expr)
9025 while (TREE_CODE (expr) == ARRAY_REF
9026 || TREE_CODE (expr) == COMPONENT_REF)
9027 expr = TREE_OPERAND (expr, 0);
9029 if (INDIRECT_REF_P (expr)
9030 || (TREE_CODE (expr) == MEM_REF
9031 && integer_zerop (TREE_OPERAND (expr, 1))))
9033 expr = TREE_OPERAND (expr, 0);
9034 while (TREE_CODE (expr) == COMPOUND_EXPR)
9035 expr = TREE_OPERAND (expr, 1);
9036 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9037 expr = TREE_OPERAND (expr, 0);
9038 if (TREE_CODE (expr) == SAVE_EXPR)
9039 expr = TREE_OPERAND (expr, 0);
9040 STRIP_NOPS (expr);
9041 return expr;
9044 return NULL_TREE;
9047 /* Remove COMPONENT_REFS and indirections from EXPR. */
9049 static tree
9050 omp_strip_components_and_deref (tree expr)
9052 while (TREE_CODE (expr) == COMPONENT_REF
9053 || INDIRECT_REF_P (expr)
9054 || (TREE_CODE (expr) == MEM_REF
9055 && integer_zerop (TREE_OPERAND (expr, 1)))
9056 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9057 || TREE_CODE (expr) == COMPOUND_EXPR)
9058 if (TREE_CODE (expr) == COMPOUND_EXPR)
9059 expr = TREE_OPERAND (expr, 1);
9060 else
9061 expr = TREE_OPERAND (expr, 0);
9063 STRIP_NOPS (expr);
9065 return expr;
9068 static tree
9069 omp_strip_indirections (tree expr)
9071 while (INDIRECT_REF_P (expr)
9072 || (TREE_CODE (expr) == MEM_REF
9073 && integer_zerop (TREE_OPERAND (expr, 1))))
9074 expr = TREE_OPERAND (expr, 0);
9076 return expr;
9079 /* An attach or detach operation depends directly on the address being
9080 attached/detached. Return that address, or none if there are no
9081 attachments/detachments. */
9083 static tree
9084 omp_get_attachment (omp_mapping_group *grp)
9086 tree node = *grp->grp_start;
9088 switch (OMP_CLAUSE_MAP_KIND (node))
9090 case GOMP_MAP_TO:
9091 case GOMP_MAP_FROM:
9092 case GOMP_MAP_TOFROM:
9093 case GOMP_MAP_ALWAYS_FROM:
9094 case GOMP_MAP_ALWAYS_TO:
9095 case GOMP_MAP_ALWAYS_TOFROM:
9096 case GOMP_MAP_FORCE_FROM:
9097 case GOMP_MAP_FORCE_TO:
9098 case GOMP_MAP_FORCE_TOFROM:
9099 case GOMP_MAP_FORCE_PRESENT:
9100 case GOMP_MAP_ALLOC:
9101 case GOMP_MAP_RELEASE:
9102 case GOMP_MAP_DELETE:
9103 case GOMP_MAP_FORCE_ALLOC:
9104 if (node == grp->grp_end)
9105 return NULL_TREE;
9107 node = OMP_CLAUSE_CHAIN (node);
9108 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9110 gcc_assert (node != grp->grp_end);
9111 node = OMP_CLAUSE_CHAIN (node);
9113 if (node)
9114 switch (OMP_CLAUSE_MAP_KIND (node))
9116 case GOMP_MAP_POINTER:
9117 case GOMP_MAP_ALWAYS_POINTER:
9118 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9119 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9120 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9121 return NULL_TREE;
9123 case GOMP_MAP_ATTACH_DETACH:
9124 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9125 return OMP_CLAUSE_DECL (node);
9127 default:
9128 internal_error ("unexpected mapping node");
9130 return error_mark_node;
9132 case GOMP_MAP_TO_PSET:
9133 gcc_assert (node != grp->grp_end);
9134 node = OMP_CLAUSE_CHAIN (node);
9135 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9136 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9137 return OMP_CLAUSE_DECL (node);
9138 else
9139 internal_error ("unexpected mapping node");
9140 return error_mark_node;
9142 case GOMP_MAP_ATTACH:
9143 case GOMP_MAP_DETACH:
9144 node = OMP_CLAUSE_CHAIN (node);
9145 if (!node || *grp->grp_start == grp->grp_end)
9146 return OMP_CLAUSE_DECL (*grp->grp_start);
9147 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9148 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9149 return OMP_CLAUSE_DECL (*grp->grp_start);
9150 else
9151 internal_error ("unexpected mapping node");
9152 return error_mark_node;
9154 case GOMP_MAP_STRUCT:
9155 case GOMP_MAP_FORCE_DEVICEPTR:
9156 case GOMP_MAP_DEVICE_RESIDENT:
9157 case GOMP_MAP_LINK:
9158 case GOMP_MAP_IF_PRESENT:
9159 case GOMP_MAP_FIRSTPRIVATE:
9160 case GOMP_MAP_FIRSTPRIVATE_INT:
9161 case GOMP_MAP_USE_DEVICE_PTR:
9162 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9163 return NULL_TREE;
9165 default:
9166 internal_error ("unexpected mapping node");
9169 return error_mark_node;
9172 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9173 mappings, return the chain pointer to the end of that group in the list. */
9175 static tree *
9176 omp_group_last (tree *start_p)
9178 tree c = *start_p, nc, *grp_last_p = start_p;
9180 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9182 nc = OMP_CLAUSE_CHAIN (c);
9184 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9185 return grp_last_p;
9187 switch (OMP_CLAUSE_MAP_KIND (c))
9189 default:
9190 while (nc
9191 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9192 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9193 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9194 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9195 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9196 || (OMP_CLAUSE_MAP_KIND (nc)
9197 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9198 || (OMP_CLAUSE_MAP_KIND (nc)
9199 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9200 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9201 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9203 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9204 c = nc;
9205 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9206 if (nc2
9207 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9208 && (OMP_CLAUSE_MAP_KIND (nc)
9209 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9210 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9212 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9213 c = nc2;
9214 nc2 = OMP_CLAUSE_CHAIN (nc2);
9216 nc = nc2;
9218 break;
9220 case GOMP_MAP_ATTACH:
9221 case GOMP_MAP_DETACH:
9222 /* This is a weird artifact of how directives are parsed: bare attach or
9223 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9224 FIRSTPRIVATE_REFERENCE node. FIXME. */
9225 if (nc
9226 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9227 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9228 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9229 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9230 break;
9232 case GOMP_MAP_TO_PSET:
9233 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9234 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9235 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9236 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9237 break;
9239 case GOMP_MAP_STRUCT:
9241 unsigned HOST_WIDE_INT num_mappings
9242 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9243 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9244 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9245 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9246 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9247 for (unsigned i = 0; i < num_mappings; i++)
9248 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9250 break;
9253 return grp_last_p;
9256 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9257 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9258 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9259 if we have more than one such group, else return NULL. */
9261 static void
9262 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9263 tree gather_sentinel)
9265 for (tree *cp = list_p;
9266 *cp && *cp != gather_sentinel;
9267 cp = &OMP_CLAUSE_CHAIN (*cp))
9269 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9270 continue;
9272 tree *grp_last_p = omp_group_last (cp);
9273 omp_mapping_group grp;
9275 grp.grp_start = cp;
9276 grp.grp_end = *grp_last_p;
9277 grp.mark = UNVISITED;
9278 grp.sibling = NULL;
9279 grp.deleted = false;
9280 grp.next = NULL;
9281 groups->safe_push (grp);
9283 cp = grp_last_p;
9287 static vec<omp_mapping_group> *
9288 omp_gather_mapping_groups (tree *list_p)
9290 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9292 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9294 if (groups->length () > 0)
9295 return groups;
9296 else
9298 delete groups;
9299 return NULL;
9303 /* A pointer mapping group GRP may define a block of memory starting at some
9304 base address, and maybe also define a firstprivate pointer or firstprivate
9305 reference that points to that block. The return value is a node containing
9306 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9307 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9308 return the number of consecutive chained nodes in CHAINED. */
9310 static tree
9311 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9312 tree *firstprivate)
9314 tree node = *grp->grp_start;
9316 *firstprivate = NULL_TREE;
9317 *chained = 1;
9319 switch (OMP_CLAUSE_MAP_KIND (node))
9321 case GOMP_MAP_TO:
9322 case GOMP_MAP_FROM:
9323 case GOMP_MAP_TOFROM:
9324 case GOMP_MAP_ALWAYS_FROM:
9325 case GOMP_MAP_ALWAYS_TO:
9326 case GOMP_MAP_ALWAYS_TOFROM:
9327 case GOMP_MAP_FORCE_FROM:
9328 case GOMP_MAP_FORCE_TO:
9329 case GOMP_MAP_FORCE_TOFROM:
9330 case GOMP_MAP_FORCE_PRESENT:
9331 case GOMP_MAP_ALLOC:
9332 case GOMP_MAP_RELEASE:
9333 case GOMP_MAP_DELETE:
9334 case GOMP_MAP_FORCE_ALLOC:
9335 case GOMP_MAP_IF_PRESENT:
9336 if (node == grp->grp_end)
9337 return node;
9339 node = OMP_CLAUSE_CHAIN (node);
9340 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9342 if (node == grp->grp_end)
9343 return *grp->grp_start;
9344 node = OMP_CLAUSE_CHAIN (node);
9346 if (node)
9347 switch (OMP_CLAUSE_MAP_KIND (node))
9349 case GOMP_MAP_POINTER:
9350 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9351 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9352 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9353 *firstprivate = OMP_CLAUSE_DECL (node);
9354 return *grp->grp_start;
9356 case GOMP_MAP_ALWAYS_POINTER:
9357 case GOMP_MAP_ATTACH_DETACH:
9358 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9359 return *grp->grp_start;
9361 default:
9362 internal_error ("unexpected mapping node");
9364 else
9365 internal_error ("unexpected mapping node");
9366 return error_mark_node;
9368 case GOMP_MAP_TO_PSET:
9369 gcc_assert (node != grp->grp_end);
9370 node = OMP_CLAUSE_CHAIN (node);
9371 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9372 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9373 return NULL_TREE;
9374 else
9375 internal_error ("unexpected mapping node");
9376 return error_mark_node;
9378 case GOMP_MAP_ATTACH:
9379 case GOMP_MAP_DETACH:
9380 node = OMP_CLAUSE_CHAIN (node);
9381 if (!node || *grp->grp_start == grp->grp_end)
9382 return NULL_TREE;
9383 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9384 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9386 /* We're mapping the base pointer itself in a bare attach or detach
9387 node. This is a side effect of how parsing works, and the mapping
9388 will be removed anyway (at least for enter/exit data directives).
9389 We should ignore the mapping here. FIXME. */
9390 return NULL_TREE;
9392 else
9393 internal_error ("unexpected mapping node");
9394 return error_mark_node;
9396 case GOMP_MAP_STRUCT:
9398 unsigned HOST_WIDE_INT num_mappings
9399 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9400 node = OMP_CLAUSE_CHAIN (node);
9401 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9402 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9404 *firstprivate = OMP_CLAUSE_DECL (node);
9405 node = OMP_CLAUSE_CHAIN (node);
9407 *chained = num_mappings;
9408 return node;
9411 case GOMP_MAP_FORCE_DEVICEPTR:
9412 case GOMP_MAP_DEVICE_RESIDENT:
9413 case GOMP_MAP_LINK:
9414 case GOMP_MAP_FIRSTPRIVATE:
9415 case GOMP_MAP_FIRSTPRIVATE_INT:
9416 case GOMP_MAP_USE_DEVICE_PTR:
9417 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9418 return NULL_TREE;
9420 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9421 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9422 case GOMP_MAP_POINTER:
9423 case GOMP_MAP_ALWAYS_POINTER:
9424 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9425 /* These shouldn't appear by themselves. */
9426 if (!seen_error ())
9427 internal_error ("unexpected pointer mapping node");
9428 return error_mark_node;
9430 default:
9431 gcc_unreachable ();
9434 return error_mark_node;
9437 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9438 nodes by tree_operand_hash_no_se. */
9440 static void
9441 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9442 omp_mapping_group *> *grpmap,
9443 vec<omp_mapping_group> *groups,
9444 tree reindex_sentinel)
9446 omp_mapping_group *grp;
9447 unsigned int i;
9448 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9450 FOR_EACH_VEC_ELT (*groups, i, grp)
9452 if (reindexing && *grp->grp_start == reindex_sentinel)
9453 above_hwm = true;
9455 if (reindexing && !above_hwm)
9456 continue;
9458 tree fpp;
9459 unsigned int chained;
9460 tree node = omp_group_base (grp, &chained, &fpp);
9462 if (node == error_mark_node || (!node && !fpp))
9463 continue;
9465 for (unsigned j = 0;
9466 node && j < chained;
9467 node = OMP_CLAUSE_CHAIN (node), j++)
9469 tree decl = OMP_CLAUSE_DECL (node);
9470 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9471 meaning node-hash lookups don't work. This is a workaround for
9472 that, but ideally we should just create the INDIRECT_REF at
9473 source instead. FIXME. */
9474 if (TREE_CODE (decl) == MEM_REF
9475 && integer_zerop (TREE_OPERAND (decl, 1)))
9476 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9478 omp_mapping_group **prev = grpmap->get (decl);
9480 if (prev && *prev == grp)
9481 /* Empty. */;
9482 else if (prev)
9484 /* Mapping the same thing twice is normally diagnosed as an error,
9485 but can happen under some circumstances, e.g. in pr99928-16.c,
9486 the directive:
9488 #pragma omp target simd reduction(+:a[:3]) \
9489 map(always, tofrom: a[:6])
9492 will result in two "a[0]" mappings (of different sizes). */
9494 grp->sibling = (*prev)->sibling;
9495 (*prev)->sibling = grp;
9497 else
9498 grpmap->put (decl, grp);
9501 if (!fpp)
9502 continue;
9504 omp_mapping_group **prev = grpmap->get (fpp);
9505 if (prev && *prev != grp)
9507 grp->sibling = (*prev)->sibling;
9508 (*prev)->sibling = grp;
9510 else
9511 grpmap->put (fpp, grp);
9515 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9516 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9518 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9519 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9521 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9523 return grpmap;
9526 /* Rebuild group map from partially-processed clause list (during
9527 omp_build_struct_sibling_lists). We have already processed nodes up until
9528 a high-water mark (HWM). This is a bit tricky because the list is being
9529 reordered as it is scanned, but we know:
9531 1. The list after HWM has not been touched yet, so we can reindex it safely.
9533 2. The list before and including HWM has been altered, but remains
9534 well-formed throughout the sibling-list building operation.
9536 so, we can do the reindex operation in two parts, on the processed and
9537 then the unprocessed halves of the list. */
9539 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9540 omp_reindex_mapping_groups (tree *list_p,
9541 vec<omp_mapping_group> *groups,
9542 vec<omp_mapping_group> *processed_groups,
9543 tree sentinel)
9545 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9546 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9548 processed_groups->truncate (0);
9550 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9551 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9552 if (sentinel)
9553 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9555 return grpmap;
9558 /* Find the immediately-containing struct for a component ref (etc.)
9559 expression EXPR. */
9561 static tree
9562 omp_containing_struct (tree expr)
9564 tree expr0 = expr;
9566 STRIP_NOPS (expr);
9568 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9569 component ref. */
9570 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9571 return expr0;
9573 while (TREE_CODE (expr) == ARRAY_REF)
9574 expr = TREE_OPERAND (expr, 0);
9576 if (TREE_CODE (expr) == COMPONENT_REF)
9577 expr = TREE_OPERAND (expr, 0);
9579 return expr;
9582 /* Return TRUE if DECL describes a component that is part of a whole structure
9583 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9584 that maps that structure, if present. */
9586 static bool
9587 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9588 omp_mapping_group *> *grpmap,
9589 tree decl,
9590 omp_mapping_group **mapped_by_group)
9592 tree wsdecl = NULL_TREE;
9594 *mapped_by_group = NULL;
9596 while (true)
9598 wsdecl = omp_containing_struct (decl);
9599 if (wsdecl == decl)
9600 break;
9601 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9602 if (!wholestruct
9603 && TREE_CODE (wsdecl) == MEM_REF
9604 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9606 tree deref = TREE_OPERAND (wsdecl, 0);
9607 deref = build_fold_indirect_ref (deref);
9608 wholestruct = grpmap->get (deref);
9610 if (wholestruct)
9612 *mapped_by_group = *wholestruct;
9613 return true;
9615 decl = wsdecl;
9618 return false;
9621 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9622 FALSE on error. */
9624 static bool
9625 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9626 vec<omp_mapping_group> *groups,
9627 hash_map<tree_operand_hash_no_se,
9628 omp_mapping_group *> *grpmap,
9629 omp_mapping_group *grp)
9631 if (grp->mark == PERMANENT)
9632 return true;
9633 if (grp->mark == TEMPORARY)
9635 fprintf (stderr, "when processing group:\n");
9636 debug_mapping_group (grp);
9637 internal_error ("base pointer cycle detected");
9638 return false;
9640 grp->mark = TEMPORARY;
9642 tree attaches_to = omp_get_attachment (grp);
9644 if (attaches_to)
9646 omp_mapping_group **basep = grpmap->get (attaches_to);
9648 if (basep && *basep != grp)
9650 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9651 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9652 return false;
9656 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9658 while (decl)
9660 tree base = omp_get_base_pointer (decl);
9662 if (!base)
9663 break;
9665 omp_mapping_group **innerp = grpmap->get (base);
9666 omp_mapping_group *wholestruct;
9668 /* We should treat whole-structure mappings as if all (pointer, in this
9669 case) members are mapped as individual list items. Check if we have
9670 such a whole-structure mapping, if we don't have an explicit reference
9671 to the pointer member itself. */
9672 if (!innerp
9673 && TREE_CODE (base) == COMPONENT_REF
9674 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9675 innerp = &wholestruct;
9677 if (innerp && *innerp != grp)
9679 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9680 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9681 return false;
9682 break;
9685 decl = base;
9688 grp->mark = PERMANENT;
9690 /* Emit grp to output list. */
9692 **outlist = grp;
9693 *outlist = &grp->next;
9695 return true;
9698 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9699 before mappings that use those pointers. This is an implementation of the
9700 depth-first search algorithm, described e.g. at:
9702 https://en.wikipedia.org/wiki/Topological_sorting
9705 static omp_mapping_group *
9706 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9707 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
9708 *grpmap)
9710 omp_mapping_group *grp, *outlist = NULL, **cursor;
9711 unsigned int i;
9713 cursor = &outlist;
9715 FOR_EACH_VEC_ELT (*groups, i, grp)
9717 if (grp->mark != PERMANENT)
9718 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9719 return NULL;
9722 return outlist;
9725 /* Split INLIST into two parts, moving groups corresponding to
9726 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9727 The former list is then appended to the latter. Each sub-list retains the
9728 order of the original list.
9729 Note that ATTACH nodes are later moved to the end of the list in
9730 gimplify_adjust_omp_clauses, for target regions. */
9732 static omp_mapping_group *
9733 omp_segregate_mapping_groups (omp_mapping_group *inlist)
9735 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
9736 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
9738 for (omp_mapping_group *w = inlist; w;)
9740 tree c = *w->grp_start;
9741 omp_mapping_group *next = w->next;
9743 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9745 switch (OMP_CLAUSE_MAP_KIND (c))
9747 case GOMP_MAP_ALLOC:
9748 case GOMP_MAP_RELEASE:
9749 case GOMP_MAP_DELETE:
9750 *ard_tail = w;
9751 w->next = NULL;
9752 ard_tail = &w->next;
9753 break;
9755 default:
9756 *tf_tail = w;
9757 w->next = NULL;
9758 tf_tail = &w->next;
9761 w = next;
9764 /* Now splice the lists together... */
9765 *tf_tail = ard_groups;
9767 return tf_groups;
9770 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9771 those groups based on the output list of omp_tsort_mapping_groups --
9772 singly-linked, threaded through each element's NEXT pointer starting at
9773 HEAD. Each list element appears exactly once in that linked list.
9775 Each element of GROUPS may correspond to one or several mapping nodes.
9776 Node groups are kept together, and in the reordered list, the positions of
9777 the original groups are reused for the positions of the reordered list.
9778 Hence if we have e.g.
9780 {to ptr ptr} firstprivate {tofrom ptr} ...
9781 ^ ^ ^
9782 first group non-"map" second group
9784 and say the second group contains a base pointer for the first so must be
9785 moved before it, the resulting list will contain:
9787 {tofrom ptr} firstprivate {to ptr ptr} ...
9788 ^ prev. second group ^ prev. first group
9791 static tree *
9792 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
9793 omp_mapping_group *head,
9794 tree *list_p)
9796 omp_mapping_group *grp;
9797 unsigned int i;
9798 unsigned numgroups = groups->length ();
9799 auto_vec<tree> old_heads (numgroups);
9800 auto_vec<tree *> old_headps (numgroups);
9801 auto_vec<tree> new_heads (numgroups);
9802 auto_vec<tree> old_succs (numgroups);
9803 bool map_at_start = (list_p == (*groups)[0].grp_start);
9805 tree *new_grp_tail = NULL;
9807 /* Stash the start & end nodes of each mapping group before we start
9808 modifying the list. */
9809 FOR_EACH_VEC_ELT (*groups, i, grp)
9811 old_headps.quick_push (grp->grp_start);
9812 old_heads.quick_push (*grp->grp_start);
9813 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
9816 /* And similarly, the heads of the groups in the order we want to rearrange
9817 the list to. */
9818 for (omp_mapping_group *w = head; w; w = w->next)
9819 new_heads.quick_push (*w->grp_start);
9821 FOR_EACH_VEC_ELT (*groups, i, grp)
9823 gcc_assert (head);
9825 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
9827 /* a {b c d} {e f g} h i j (original)
9829 a {k l m} {e f g} h i j (inserted new group on last iter)
9831 a {k l m} {n o p} h i j (this time, chain last group to new one)
9832 ^new_grp_tail
9834 *new_grp_tail = new_heads[i];
9836 else if (new_grp_tail)
9838 /* a {b c d} e {f g h} i j k (original)
9840 a {l m n} e {f g h} i j k (gap after last iter's group)
9842 a {l m n} e {o p q} h i j (chain last group to old successor)
9843 ^new_grp_tail
9845 *new_grp_tail = old_succs[i - 1];
9846 *old_headps[i] = new_heads[i];
9848 else
9850 /* The first inserted group -- point to new group, and leave end
9851 open.
9852 a {b c d} e f
9854 a {g h i...
9856 *grp->grp_start = new_heads[i];
9859 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
9861 head = head->next;
9864 if (new_grp_tail)
9865 *new_grp_tail = old_succs[numgroups - 1];
9867 gcc_assert (!head);
9869 return map_at_start ? (*groups)[0].grp_start : list_p;
9872 /* DECL is supposed to have lastprivate semantics in the outer contexts
9873 of combined/composite constructs, starting with OCTX.
9874 Add needed lastprivate, shared or map clause if no data sharing or
9875 mapping clause are present. IMPLICIT_P is true if it is an implicit
9876 clause (IV on simd), in which case the lastprivate will not be
9877 copied to some constructs. */
9879 static void
9880 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9881 tree decl, bool implicit_p)
9883 struct gimplify_omp_ctx *orig_octx = octx;
9884 for (; octx; octx = octx->outer_context)
9886 if ((octx->region_type == ORT_COMBINED_PARALLEL
9887 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9888 && splay_tree_lookup (octx->variables,
9889 (splay_tree_key) decl) == NULL)
9891 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9892 continue;
9894 if ((octx->region_type & ORT_TASK) != 0
9895 && octx->combined_loop
9896 && splay_tree_lookup (octx->variables,
9897 (splay_tree_key) decl) == NULL)
9899 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9900 continue;
9902 if (implicit_p
9903 && octx->region_type == ORT_WORKSHARE
9904 && octx->combined_loop
9905 && splay_tree_lookup (octx->variables,
9906 (splay_tree_key) decl) == NULL
9907 && octx->outer_context
9908 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9909 && splay_tree_lookup (octx->outer_context->variables,
9910 (splay_tree_key) decl) == NULL)
9912 octx = octx->outer_context;
9913 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9914 continue;
9916 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9917 && octx->combined_loop
9918 && splay_tree_lookup (octx->variables,
9919 (splay_tree_key) decl) == NULL
9920 && !omp_check_private (octx, decl, false))
9922 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9923 continue;
9925 if (octx->region_type == ORT_COMBINED_TARGET)
9927 splay_tree_node n = splay_tree_lookup (octx->variables,
9928 (splay_tree_key) decl);
9929 if (n == NULL)
9931 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9932 octx = octx->outer_context;
9934 else if (!implicit_p
9935 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9937 n->value &= ~(GOVD_FIRSTPRIVATE
9938 | GOVD_FIRSTPRIVATE_IMPLICIT
9939 | GOVD_EXPLICIT);
9940 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9941 octx = octx->outer_context;
9944 break;
9946 if (octx && (implicit_p || octx != orig_octx))
9947 omp_notice_variable (octx, decl, true);
9950 /* If we have mappings INNER and OUTER, where INNER is a component access and
9951 OUTER is a mapping of the whole containing struct, check that the mappings
9952 are compatible. We'll be deleting the inner mapping, so we need to make
9953 sure the outer mapping does (at least) the same transfers to/from the device
9954 as the inner mapping. */
9956 bool
9957 omp_check_mapping_compatibility (location_t loc,
9958 omp_mapping_group *outer,
9959 omp_mapping_group *inner)
9961 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
9963 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
9964 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
9966 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
9967 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
9969 if (outer_kind == inner_kind)
9970 return true;
9972 switch (outer_kind)
9974 case GOMP_MAP_ALWAYS_TO:
9975 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9976 || inner_kind == GOMP_MAP_ALLOC
9977 || inner_kind == GOMP_MAP_TO)
9978 return true;
9979 break;
9981 case GOMP_MAP_ALWAYS_FROM:
9982 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9983 || inner_kind == GOMP_MAP_ALLOC
9984 || inner_kind == GOMP_MAP_FROM)
9985 return true;
9986 break;
9988 case GOMP_MAP_TO:
9989 case GOMP_MAP_FROM:
9990 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9991 || inner_kind == GOMP_MAP_ALLOC)
9992 return true;
9993 break;
9995 case GOMP_MAP_ALWAYS_TOFROM:
9996 case GOMP_MAP_TOFROM:
9997 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9998 || inner_kind == GOMP_MAP_ALLOC
9999 || inner_kind == GOMP_MAP_TO
10000 || inner_kind == GOMP_MAP_FROM
10001 || inner_kind == GOMP_MAP_TOFROM)
10002 return true;
10003 break;
10005 default:
10009 error_at (loc, "data movement for component %qE is not compatible with "
10010 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10011 OMP_CLAUSE_DECL (first_outer));
10013 return false;
10016 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10017 clause dependencies we handle for now are struct element mappings and
10018 whole-struct mappings on the same directive, and duplicate clause
10019 detection. */
10021 void
10022 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10023 hash_map<tree_operand_hash_no_se,
10024 omp_mapping_group *> *grpmap)
10026 int i;
10027 omp_mapping_group *grp;
10028 hash_set<tree_operand_hash> *seen_components = NULL;
10029 hash_set<tree_operand_hash> *shown_error = NULL;
10031 FOR_EACH_VEC_ELT (*groups, i, grp)
10033 tree grp_end = grp->grp_end;
10034 tree decl = OMP_CLAUSE_DECL (grp_end);
10036 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10038 if (DECL_P (grp_end))
10039 continue;
10041 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10042 while (TREE_CODE (c) == ARRAY_REF)
10043 c = TREE_OPERAND (c, 0);
10044 if (TREE_CODE (c) != COMPONENT_REF)
10045 continue;
10046 if (!seen_components)
10047 seen_components = new hash_set<tree_operand_hash> ();
10048 if (!shown_error)
10049 shown_error = new hash_set<tree_operand_hash> ();
10050 if (seen_components->contains (c)
10051 && !shown_error->contains (c))
10053 error_at (OMP_CLAUSE_LOCATION (grp_end),
10054 "%qE appears more than once in map clauses",
10055 OMP_CLAUSE_DECL (grp_end));
10056 shown_error->add (c);
10058 else
10059 seen_components->add (c);
10061 omp_mapping_group *struct_group;
10062 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10063 && *grp->grp_start == grp_end)
10065 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10066 struct_group, grp);
10067 /* Remove the whole of this mapping -- redundant. */
10068 grp->deleted = true;
10072 if (seen_components)
10073 delete seen_components;
10074 if (shown_error)
10075 delete shown_error;
10078 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10079 is linked to the previous node pointed to by INSERT_AT. */
10081 static tree *
10082 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10084 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10085 *insert_at = newnode;
10086 return &OMP_CLAUSE_CHAIN (newnode);
10089 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10090 pointed to by chain MOVE_AFTER instead. */
10092 static void
10093 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10095 gcc_assert (node == *old_pos);
10096 *old_pos = OMP_CLAUSE_CHAIN (node);
10097 OMP_CLAUSE_CHAIN (node) = *move_after;
10098 *move_after = node;
10101 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10102 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10103 new nodes are prepended to the list before splicing into the new position.
10104 Return the position we should continue scanning the list at, or NULL to
10105 stay where we were. */
10107 static tree *
10108 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10109 tree *move_after)
10111 if (first_ptr == move_after)
10112 return NULL;
10114 tree tmp = *first_ptr;
10115 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10116 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10117 *move_after = tmp;
10119 return first_ptr;
10122 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10123 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10124 pointer MOVE_AFTER.
10126 The latter list was previously part of the OMP clause list, and the former
10127 (prepended) part is comprised of new nodes.
10129 We start with a list of nodes starting with a struct mapping node. We
10130 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10131 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10132 the group of mapping nodes we are currently processing (from the chain
10133 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10134 we should continue processing from, or NULL to stay where we were.
10136 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10137 different) is worked through below. Here we are processing LAST_NODE, and
10138 FIRST_PTR points at the preceding mapping clause:
10140 #. mapping node chain
10141 ---------------------------------------------------
10142 A. struct_node [->B]
10143 B. comp_1 [->C]
10144 C. comp_2 [->D (move_after)]
10145 D. map_to_3 [->E]
10146 E. attach_3 [->F (first_ptr)]
10147 F. map_to_4 [->G (continue_at)]
10148 G. attach_4 (last_node) [->H]
10149 H. ...
10151 *last_new_tail = *first_ptr;
10153 I. new_node (first_new) [->F (last_new_tail)]
10155 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10157 #. mapping node chain
10158 ----------------------------------------------------
10159 A. struct_node [->B]
10160 B. comp_1 [->C]
10161 C. comp_2 [->D (move_after)]
10162 D. map_to_3 [->E]
10163 E. attach_3 [->H (first_ptr)]
10164 F. map_to_4 [->G (continue_at)]
10165 G. attach_4 (last_node) [->H]
10166 H. ...
10168 I. new_node (first_new) [->F (last_new_tail)]
10170 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10172 #. mapping node chain
10173 ---------------------------------------------------
10174 A. struct_node [->B]
10175 B. comp_1 [->C]
10176 C. comp_2 [->D (move_after)]
10177 D. map_to_3 [->E]
10178 E. attach_3 [->H (continue_at)]
10179 F. map_to_4 [->G]
10180 G. attach_4 (last_node) [->D]
10181 H. ...
10183 I. new_node (first_new) [->F (last_new_tail)]
10185 *move_after = first_new;
10187 #. mapping node chain
10188 ---------------------------------------------------
10189 A. struct_node [->B]
10190 B. comp_1 [->C]
10191 C. comp_2 [->I (move_after)]
10192 D. map_to_3 [->E]
10193 E. attach_3 [->H (continue_at)]
10194 F. map_to_4 [->G]
10195 G. attach_4 (last_node) [->D]
10196 H. ...
10197 I. new_node (first_new) [->F (last_new_tail)]
10199 or, in order:
10201 #. mapping node chain
10202 ---------------------------------------------------
10203 A. struct_node [->B]
10204 B. comp_1 [->C]
10205 C. comp_2 [->I (move_after)]
10206 I. new_node (first_new) [->F (last_new_tail)]
10207 F. map_to_4 [->G]
10208 G. attach_4 (last_node) [->D]
10209 D. map_to_3 [->E]
10210 E. attach_3 [->H (continue_at)]
10211 H. ...
10214 static tree *
10215 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10216 tree *first_ptr, tree last_node,
10217 tree *move_after)
10219 tree *continue_at = NULL;
10220 *last_new_tail = *first_ptr;
10221 if (first_ptr == move_after)
10222 *move_after = first_new;
10223 else
10225 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10226 continue_at = first_ptr;
10227 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10228 *move_after = first_new;
10230 return continue_at;
10233 /* Mapping struct members causes an additional set of nodes to be created,
10234 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10235 number of members being mapped, in order of ascending position (address or
10236 bitwise).
10238 We scan through the list of mapping clauses, calling this function for each
10239 struct member mapping we find, and build up the list of mappings after the
10240 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10241 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10242 moved into place in the sorted list.
10244 struct {
10245 int *a;
10246 int *b;
10247 int c;
10248 int *d;
10251 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10252 struct.d[0:n])
10254 GOMP_MAP_STRUCT (4)
10255 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10256 GOMP_MAP_ALLOC (struct.a)
10257 GOMP_MAP_ALLOC (struct.b)
10258 GOMP_MAP_TO (struct.c)
10259 GOMP_MAP_ALLOC (struct.d)
10262 In the case where we are mapping references to pointers, or in Fortran if
10263 we are mapping an array with a descriptor, additional nodes may be created
10264 after the struct node list also.
10266 The return code is either a pointer to the next node to process (if the
10267 list has been rearranged), else NULL to continue with the next node in the
10268 original list. */
10270 static tree *
10271 omp_accumulate_sibling_list (enum omp_region_type region_type,
10272 enum tree_code code,
10273 hash_map<tree_operand_hash, tree>
10274 *&struct_map_to_clause, tree *grp_start_p,
10275 tree grp_end, tree *inner)
10277 poly_offset_int coffset;
10278 poly_int64 cbitpos;
10279 tree ocd = OMP_CLAUSE_DECL (grp_end);
10280 bool openmp = !(region_type & ORT_ACC);
10281 tree *continue_at = NULL;
10283 while (TREE_CODE (ocd) == ARRAY_REF)
10284 ocd = TREE_OPERAND (ocd, 0);
10286 if (INDIRECT_REF_P (ocd))
10287 ocd = TREE_OPERAND (ocd, 0);
10289 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10291 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10292 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10293 == GOMP_MAP_ATTACH_DETACH)
10294 || (OMP_CLAUSE_MAP_KIND (grp_end)
10295 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10296 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10297 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10299 /* FIXME: If we're not mapping the base pointer in some other clause on this
10300 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10301 early-exit. */
10302 if (openmp && attach_detach)
10303 return NULL;
10305 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10307 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10308 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10310 OMP_CLAUSE_SET_MAP_KIND (l, k);
10312 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10314 OMP_CLAUSE_SIZE (l)
10315 = (!attach ? size_int (1)
10316 : (DECL_P (OMP_CLAUSE_DECL (l))
10317 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10318 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10319 if (struct_map_to_clause == NULL)
10320 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10321 struct_map_to_clause->put (base, l);
10323 if (ptr || attach_detach)
10325 tree extra_node;
10326 tree alloc_node
10327 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10328 &extra_node);
10329 OMP_CLAUSE_CHAIN (l) = alloc_node;
10331 tree *insert_node_pos = grp_start_p;
10333 if (extra_node)
10335 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10336 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10338 else
10339 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10341 *insert_node_pos = l;
10343 else
10345 gcc_assert (*grp_start_p == grp_end);
10346 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10349 tree noind = omp_strip_indirections (base);
10351 if (!openmp
10352 && (region_type & ORT_TARGET)
10353 && TREE_CODE (noind) == COMPONENT_REF)
10355 /* The base for this component access is a struct component access
10356 itself. Insert a node to be processed on the next iteration of
10357 our caller's loop, which will subsequently be turned into a new,
10358 inner GOMP_MAP_STRUCT mapping.
10360 We need to do this else the non-DECL_P base won't be
10361 rewritten correctly in the offloaded region. */
10362 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10363 OMP_CLAUSE_MAP);
10364 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10365 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10366 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10367 *inner = c2;
10368 return NULL;
10371 tree sdecl = omp_strip_components_and_deref (base);
10373 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10375 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10376 OMP_CLAUSE_MAP);
10377 bool base_ref
10378 = (INDIRECT_REF_P (base)
10379 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10380 == REFERENCE_TYPE)
10381 || (INDIRECT_REF_P (TREE_OPERAND (base, 0))
10382 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10383 (TREE_OPERAND (base, 0), 0)))
10384 == REFERENCE_TYPE))));
10385 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10386 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10387 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10388 OMP_CLAUSE_DECL (c2) = sdecl;
10389 tree baddr = build_fold_addr_expr (base);
10390 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10391 ptrdiff_type_node, baddr);
10392 /* This isn't going to be good enough when we add support for more
10393 complicated lvalue expressions. FIXME. */
10394 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10395 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10396 sdecl = build_simple_mem_ref (sdecl);
10397 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10398 ptrdiff_type_node, sdecl);
10399 OMP_CLAUSE_SIZE (c2)
10400 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10401 ptrdiff_type_node, baddr, decladdr);
10402 /* Insert after struct node. */
10403 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10404 OMP_CLAUSE_CHAIN (l) = c2;
10407 return NULL;
10409 else if (struct_map_to_clause)
10411 tree *osc = struct_map_to_clause->get (base);
10412 tree *sc = NULL, *scp = NULL;
10413 sc = &OMP_CLAUSE_CHAIN (*osc);
10414 /* The struct mapping might be immediately followed by a
10415 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10416 indirect access or a reference, or both. (This added node is removed
10417 in omp-low.c after it has been processed there.) */
10418 if (*sc != grp_end
10419 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10420 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10421 sc = &OMP_CLAUSE_CHAIN (*sc);
10422 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10423 if ((ptr || attach_detach) && sc == grp_start_p)
10424 break;
10425 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10426 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10427 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10428 break;
10429 else
10431 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10432 poly_offset_int offset;
10433 poly_int64 bitpos;
10435 if (TREE_CODE (sc_decl) == ARRAY_REF)
10437 while (TREE_CODE (sc_decl) == ARRAY_REF)
10438 sc_decl = TREE_OPERAND (sc_decl, 0);
10439 if (TREE_CODE (sc_decl) != COMPONENT_REF
10440 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10441 break;
10443 else if (INDIRECT_REF_P (sc_decl)
10444 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10445 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10446 == REFERENCE_TYPE))
10447 sc_decl = TREE_OPERAND (sc_decl, 0);
10449 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10450 if (!base2 || !operand_equal_p (base2, base, 0))
10451 break;
10452 if (scp)
10453 continue;
10454 if (maybe_lt (coffset, offset)
10455 || (known_eq (coffset, offset)
10456 && maybe_lt (cbitpos, bitpos)))
10458 if (ptr || attach_detach)
10459 scp = sc;
10460 else
10461 break;
10465 if (!attach)
10466 OMP_CLAUSE_SIZE (*osc)
10467 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10468 if (ptr || attach_detach)
10470 tree cl = NULL_TREE, extra_node;
10471 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10472 grp_end, &extra_node);
10473 tree *tail_chain = NULL;
10475 /* Here, we have:
10477 grp_end : the last (or only) node in this group.
10478 grp_start_p : pointer to the first node in a pointer mapping group
10479 up to and including GRP_END.
10480 sc : pointer to the chain for the end of the struct component
10481 list.
10482 scp : pointer to the chain for the sorted position at which we
10483 should insert in the middle of the struct component list
10484 (else NULL to insert at end).
10485 alloc_node : the "alloc" node for the structure (pointer-type)
10486 component. We insert at SCP (if present), else SC
10487 (the end of the struct component list).
10488 extra_node : a newly-synthesized node for an additional indirect
10489 pointer mapping or a Fortran pointer set, if needed.
10490 cl : first node to prepend before grp_start_p.
10491 tail_chain : pointer to chain of last prepended node.
10493 The general idea is we move the nodes for this struct mapping
10494 together: the alloc node goes into the sorted list directly after
10495 the struct mapping, and any extra nodes (together with the nodes
10496 mapping arrays pointed to by struct components) get moved after
10497 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10498 the end of the struct component mapping list. It's important that
10499 the alloc_node comes first in that case because it's part of the
10500 sorted component mapping list (but subsequent nodes are not!). */
10502 if (scp)
10503 omp_siblist_insert_node_after (alloc_node, scp);
10505 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10506 already inserted it) and the extra_node (if it is present). The
10507 list can be empty if we added alloc_node above and there is no
10508 extra node. */
10509 if (scp && extra_node)
10511 cl = extra_node;
10512 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10514 else if (extra_node)
10516 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10517 cl = alloc_node;
10518 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10520 else if (!scp)
10522 cl = alloc_node;
10523 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10526 continue_at
10527 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10528 grp_start_p, grp_end,
10530 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10532 else if (*sc != grp_end)
10534 gcc_assert (*grp_start_p == grp_end);
10536 /* We are moving the current node back to a previous struct node:
10537 the node that used to point to the current node will now point to
10538 the next node. */
10539 continue_at = grp_start_p;
10540 /* In the non-pointer case, the mapping clause itself is moved into
10541 the correct position in the struct component list, which in this
10542 case is just SC. */
10543 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10546 return continue_at;
10549 /* Scan through GROUPS, and create sorted structure sibling lists without
10550 gimplifying. */
10552 static bool
10553 omp_build_struct_sibling_lists (enum tree_code code,
10554 enum omp_region_type region_type,
10555 vec<omp_mapping_group> *groups,
10556 hash_map<tree_operand_hash_no_se,
10557 omp_mapping_group *> **grpmap,
10558 tree *list_p)
10560 unsigned i;
10561 omp_mapping_group *grp;
10562 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10563 bool success = true;
10564 tree *new_next = NULL;
10565 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10566 auto_vec<omp_mapping_group> pre_hwm_groups;
10568 FOR_EACH_VEC_ELT (*groups, i, grp)
10570 tree c = grp->grp_end;
10571 tree decl = OMP_CLAUSE_DECL (c);
10572 tree grp_end = grp->grp_end;
10573 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10575 if (new_next)
10576 grp->grp_start = new_next;
10578 new_next = NULL;
10580 tree *grp_start_p = grp->grp_start;
10582 if (DECL_P (decl))
10583 continue;
10585 /* Skip groups we marked for deletion in
10586 oacc_resolve_clause_dependencies. */
10587 if (grp->deleted)
10588 continue;
10590 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10591 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10593 /* Don't process an array descriptor that isn't inside a derived type
10594 as a struct (the GOMP_MAP_POINTER following will have the form
10595 "var.data", but such mappings are handled specially). */
10596 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10597 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10598 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10599 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10600 continue;
10603 tree d = decl;
10604 if (TREE_CODE (d) == ARRAY_REF)
10606 while (TREE_CODE (d) == ARRAY_REF)
10607 d = TREE_OPERAND (d, 0);
10608 if (TREE_CODE (d) == COMPONENT_REF
10609 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10610 decl = d;
10612 if (d == decl
10613 && INDIRECT_REF_P (decl)
10614 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10615 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10616 == REFERENCE_TYPE)
10617 && (OMP_CLAUSE_MAP_KIND (c)
10618 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10619 decl = TREE_OPERAND (decl, 0);
10621 STRIP_NOPS (decl);
10623 if (TREE_CODE (decl) != COMPONENT_REF)
10624 continue;
10626 /* If we're mapping the whole struct in another node, skip adding this
10627 node to a sibling list. */
10628 omp_mapping_group *wholestruct;
10629 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10630 &wholestruct))
10632 if (!(region_type & ORT_ACC)
10633 && *grp_start_p == grp_end)
10634 /* Remove the whole of this mapping -- redundant. */
10635 grp->deleted = true;
10637 continue;
10640 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10641 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10642 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10643 && code != OACC_UPDATE
10644 && code != OMP_TARGET_UPDATE)
10646 if (error_operand_p (decl))
10648 success = false;
10649 goto error_out;
10652 tree stype = TREE_TYPE (decl);
10653 if (TREE_CODE (stype) == REFERENCE_TYPE)
10654 stype = TREE_TYPE (stype);
10655 if (TYPE_SIZE_UNIT (stype) == NULL
10656 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10658 error_at (OMP_CLAUSE_LOCATION (c),
10659 "mapping field %qE of variable length "
10660 "structure", OMP_CLAUSE_DECL (c));
10661 success = false;
10662 goto error_out;
10665 tree inner = NULL_TREE;
10667 new_next
10668 = omp_accumulate_sibling_list (region_type, code,
10669 struct_map_to_clause, grp_start_p,
10670 grp_end, &inner);
10672 if (inner)
10674 if (new_next && *new_next == NULL_TREE)
10675 *new_next = inner;
10676 else
10677 *tail = inner;
10679 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10680 omp_mapping_group newgrp;
10681 newgrp.grp_start = new_next ? new_next : tail;
10682 newgrp.grp_end = inner;
10683 newgrp.mark = UNVISITED;
10684 newgrp.sibling = NULL;
10685 newgrp.deleted = false;
10686 newgrp.next = NULL;
10687 groups->safe_push (newgrp);
10689 /* !!! Growing GROUPS might invalidate the pointers in the group
10690 map. Rebuild it here. This is a bit inefficient, but
10691 shouldn't happen very often. */
10692 delete (*grpmap);
10693 *grpmap
10694 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10695 sentinel);
10697 tail = &OMP_CLAUSE_CHAIN (inner);
10702 /* Delete groups marked for deletion above. At this point the order of the
10703 groups may no longer correspond to the order of the underlying list,
10704 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10705 deleted nodes... */
10707 FOR_EACH_VEC_ELT (*groups, i, grp)
10708 if (grp->deleted)
10709 for (tree d = *grp->grp_start;
10710 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10711 d = OMP_CLAUSE_CHAIN (d))
10712 OMP_CLAUSE_DECL (d) = NULL_TREE;
10714 /* ...then sweep through the list removing the now-empty nodes. */
10716 tail = list_p;
10717 while (*tail)
10719 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10720 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
10721 *tail = OMP_CLAUSE_CHAIN (*tail);
10722 else
10723 tail = &OMP_CLAUSE_CHAIN (*tail);
10726 error_out:
10727 if (struct_map_to_clause)
10728 delete struct_map_to_clause;
10730 return success;
10733 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10734 and previous omp contexts. */
10736 static void
10737 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
10738 enum omp_region_type region_type,
10739 enum tree_code code)
10741 struct gimplify_omp_ctx *ctx, *outer_ctx;
10742 tree c;
10743 tree *orig_list_p = list_p;
10744 int handled_depend_iterators = -1;
10745 int nowait = -1;
10747 ctx = new_omp_context (region_type);
10748 ctx->code = code;
10749 outer_ctx = ctx->outer_context;
10750 if (code == OMP_TARGET)
10752 if (!lang_GNU_Fortran ())
10753 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
10754 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
10755 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
10756 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10758 if (!lang_GNU_Fortran ())
10759 switch (code)
10761 case OMP_TARGET:
10762 case OMP_TARGET_DATA:
10763 case OMP_TARGET_ENTER_DATA:
10764 case OMP_TARGET_EXIT_DATA:
10765 case OACC_DECLARE:
10766 case OACC_HOST_DATA:
10767 case OACC_PARALLEL:
10768 case OACC_KERNELS:
10769 ctx->target_firstprivatize_array_bases = true;
10770 default:
10771 break;
10774 if (code == OMP_TARGET
10775 || code == OMP_TARGET_DATA
10776 || code == OMP_TARGET_ENTER_DATA
10777 || code == OMP_TARGET_EXIT_DATA)
10779 vec<omp_mapping_group> *groups;
10780 groups = omp_gather_mapping_groups (list_p);
10781 if (groups)
10783 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10784 grpmap = omp_index_mapping_groups (groups);
10786 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10787 list_p);
10789 omp_mapping_group *outlist = NULL;
10791 /* Topological sorting may fail if we have duplicate nodes, which
10792 we should have detected and shown an error for already. Skip
10793 sorting in that case. */
10794 if (seen_error ())
10795 goto failure;
10797 delete grpmap;
10798 delete groups;
10800 /* Rebuild now we have struct sibling lists. */
10801 groups = omp_gather_mapping_groups (list_p);
10802 grpmap = omp_index_mapping_groups (groups);
10804 outlist = omp_tsort_mapping_groups (groups, grpmap);
10805 outlist = omp_segregate_mapping_groups (outlist);
10806 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
10808 failure:
10809 delete grpmap;
10810 delete groups;
10813 else if (region_type & ORT_ACC)
10815 vec<omp_mapping_group> *groups;
10816 groups = omp_gather_mapping_groups (list_p);
10817 if (groups)
10819 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10820 grpmap = omp_index_mapping_groups (groups);
10822 oacc_resolve_clause_dependencies (groups, grpmap);
10823 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10824 list_p);
10826 delete groups;
10827 delete grpmap;
10831 while ((c = *list_p) != NULL)
10833 bool remove = false;
10834 bool notice_outer = true;
10835 const char *check_non_private = NULL;
10836 unsigned int flags;
10837 tree decl;
10839 switch (OMP_CLAUSE_CODE (c))
10841 case OMP_CLAUSE_PRIVATE:
10842 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
10843 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
10845 flags |= GOVD_PRIVATE_OUTER_REF;
10846 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
10848 else
10849 notice_outer = false;
10850 goto do_add;
10851 case OMP_CLAUSE_SHARED:
10852 flags = GOVD_SHARED | GOVD_EXPLICIT;
10853 goto do_add;
10854 case OMP_CLAUSE_FIRSTPRIVATE:
10855 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10856 check_non_private = "firstprivate";
10857 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10859 gcc_assert (code == OMP_TARGET);
10860 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
10862 goto do_add;
10863 case OMP_CLAUSE_LASTPRIVATE:
10864 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10865 switch (code)
10867 case OMP_DISTRIBUTE:
10868 error_at (OMP_CLAUSE_LOCATION (c),
10869 "conditional %<lastprivate%> clause on "
10870 "%qs construct", "distribute");
10871 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10872 break;
10873 case OMP_TASKLOOP:
10874 error_at (OMP_CLAUSE_LOCATION (c),
10875 "conditional %<lastprivate%> clause on "
10876 "%qs construct", "taskloop");
10877 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10878 break;
10879 default:
10880 break;
10882 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
10883 if (code != OMP_LOOP)
10884 check_non_private = "lastprivate";
10885 decl = OMP_CLAUSE_DECL (c);
10886 if (error_operand_p (decl))
10887 goto do_add;
10888 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
10889 && !lang_hooks.decls.omp_scalar_p (decl, true))
10891 error_at (OMP_CLAUSE_LOCATION (c),
10892 "non-scalar variable %qD in conditional "
10893 "%<lastprivate%> clause", decl);
10894 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10896 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10897 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
10898 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
10899 false);
10900 goto do_add;
10901 case OMP_CLAUSE_REDUCTION:
10902 if (OMP_CLAUSE_REDUCTION_TASK (c))
10904 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10906 if (nowait == -1)
10907 nowait = omp_find_clause (*list_p,
10908 OMP_CLAUSE_NOWAIT) != NULL_TREE;
10909 if (nowait
10910 && (outer_ctx == NULL
10911 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
10913 error_at (OMP_CLAUSE_LOCATION (c),
10914 "%<task%> reduction modifier on a construct "
10915 "with a %<nowait%> clause");
10916 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10919 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
10921 error_at (OMP_CLAUSE_LOCATION (c),
10922 "invalid %<task%> reduction modifier on construct "
10923 "other than %<parallel%>, %qs, %<sections%> or "
10924 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10925 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10928 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10929 switch (code)
10931 case OMP_SECTIONS:
10932 error_at (OMP_CLAUSE_LOCATION (c),
10933 "%<inscan%> %<reduction%> clause on "
10934 "%qs construct", "sections");
10935 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10936 break;
10937 case OMP_PARALLEL:
10938 error_at (OMP_CLAUSE_LOCATION (c),
10939 "%<inscan%> %<reduction%> clause on "
10940 "%qs construct", "parallel");
10941 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10942 break;
10943 case OMP_TEAMS:
10944 error_at (OMP_CLAUSE_LOCATION (c),
10945 "%<inscan%> %<reduction%> clause on "
10946 "%qs construct", "teams");
10947 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10948 break;
10949 case OMP_TASKLOOP:
10950 error_at (OMP_CLAUSE_LOCATION (c),
10951 "%<inscan%> %<reduction%> clause on "
10952 "%qs construct", "taskloop");
10953 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10954 break;
10955 case OMP_SCOPE:
10956 error_at (OMP_CLAUSE_LOCATION (c),
10957 "%<inscan%> %<reduction%> clause on "
10958 "%qs construct", "scope");
10959 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10960 break;
10961 default:
10962 break;
10964 /* FALLTHRU */
10965 case OMP_CLAUSE_IN_REDUCTION:
10966 case OMP_CLAUSE_TASK_REDUCTION:
10967 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
10968 /* OpenACC permits reductions on private variables. */
10969 if (!(region_type & ORT_ACC)
10970 /* taskgroup is actually not a worksharing region. */
10971 && code != OMP_TASKGROUP)
10972 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
10973 decl = OMP_CLAUSE_DECL (c);
10974 if (TREE_CODE (decl) == MEM_REF)
10976 tree type = TREE_TYPE (decl);
10977 bool saved_into_ssa = gimplify_ctxp->into_ssa;
10978 gimplify_ctxp->into_ssa = false;
10979 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
10980 NULL, is_gimple_val, fb_rvalue, false)
10981 == GS_ERROR)
10983 gimplify_ctxp->into_ssa = saved_into_ssa;
10984 remove = true;
10985 break;
10987 gimplify_ctxp->into_ssa = saved_into_ssa;
10988 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
10989 if (DECL_P (v))
10991 omp_firstprivatize_variable (ctx, v);
10992 omp_notice_variable (ctx, v, true);
10994 decl = TREE_OPERAND (decl, 0);
10995 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10997 gimplify_ctxp->into_ssa = false;
10998 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
10999 NULL, is_gimple_val, fb_rvalue, false)
11000 == GS_ERROR)
11002 gimplify_ctxp->into_ssa = saved_into_ssa;
11003 remove = true;
11004 break;
11006 gimplify_ctxp->into_ssa = saved_into_ssa;
11007 v = TREE_OPERAND (decl, 1);
11008 if (DECL_P (v))
11010 omp_firstprivatize_variable (ctx, v);
11011 omp_notice_variable (ctx, v, true);
11013 decl = TREE_OPERAND (decl, 0);
11015 if (TREE_CODE (decl) == ADDR_EXPR
11016 || TREE_CODE (decl) == INDIRECT_REF)
11017 decl = TREE_OPERAND (decl, 0);
11019 goto do_add_decl;
11020 case OMP_CLAUSE_LINEAR:
11021 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
11022 is_gimple_val, fb_rvalue) == GS_ERROR)
11024 remove = true;
11025 break;
11027 else
11029 if (code == OMP_SIMD
11030 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11032 struct gimplify_omp_ctx *octx = outer_ctx;
11033 if (octx
11034 && octx->region_type == ORT_WORKSHARE
11035 && octx->combined_loop
11036 && !octx->distribute)
11038 if (octx->outer_context
11039 && (octx->outer_context->region_type
11040 == ORT_COMBINED_PARALLEL))
11041 octx = octx->outer_context->outer_context;
11042 else
11043 octx = octx->outer_context;
11045 if (octx
11046 && octx->region_type == ORT_WORKSHARE
11047 && octx->combined_loop
11048 && octx->distribute)
11050 error_at (OMP_CLAUSE_LOCATION (c),
11051 "%<linear%> clause for variable other than "
11052 "loop iterator specified on construct "
11053 "combined with %<distribute%>");
11054 remove = true;
11055 break;
11058 /* For combined #pragma omp parallel for simd, need to put
11059 lastprivate and perhaps firstprivate too on the
11060 parallel. Similarly for #pragma omp for simd. */
11061 struct gimplify_omp_ctx *octx = outer_ctx;
11062 bool taskloop_seen = false;
11063 decl = NULL_TREE;
11066 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11067 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11068 break;
11069 decl = OMP_CLAUSE_DECL (c);
11070 if (error_operand_p (decl))
11072 decl = NULL_TREE;
11073 break;
11075 flags = GOVD_SEEN;
11076 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11077 flags |= GOVD_FIRSTPRIVATE;
11078 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11079 flags |= GOVD_LASTPRIVATE;
11080 if (octx
11081 && octx->region_type == ORT_WORKSHARE
11082 && octx->combined_loop)
11084 if (octx->outer_context
11085 && (octx->outer_context->region_type
11086 == ORT_COMBINED_PARALLEL))
11087 octx = octx->outer_context;
11088 else if (omp_check_private (octx, decl, false))
11089 break;
11091 else if (octx
11092 && (octx->region_type & ORT_TASK) != 0
11093 && octx->combined_loop)
11094 taskloop_seen = true;
11095 else if (octx
11096 && octx->region_type == ORT_COMBINED_PARALLEL
11097 && ((ctx->region_type == ORT_WORKSHARE
11098 && octx == outer_ctx)
11099 || taskloop_seen))
11100 flags = GOVD_SEEN | GOVD_SHARED;
11101 else if (octx
11102 && ((octx->region_type & ORT_COMBINED_TEAMS)
11103 == ORT_COMBINED_TEAMS))
11104 flags = GOVD_SEEN | GOVD_SHARED;
11105 else if (octx
11106 && octx->region_type == ORT_COMBINED_TARGET)
11108 if (flags & GOVD_LASTPRIVATE)
11109 flags = GOVD_SEEN | GOVD_MAP;
11111 else
11112 break;
11113 splay_tree_node on
11114 = splay_tree_lookup (octx->variables,
11115 (splay_tree_key) decl);
11116 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11118 octx = NULL;
11119 break;
11121 omp_add_variable (octx, decl, flags);
11122 if (octx->outer_context == NULL)
11123 break;
11124 octx = octx->outer_context;
11126 while (1);
11127 if (octx
11128 && decl
11129 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11130 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11131 omp_notice_variable (octx, decl, true);
11133 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11134 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11135 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11137 notice_outer = false;
11138 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11140 goto do_add;
11142 case OMP_CLAUSE_MAP:
11143 decl = OMP_CLAUSE_DECL (c);
11144 if (error_operand_p (decl))
11145 remove = true;
11146 switch (code)
11148 case OMP_TARGET:
11149 break;
11150 case OACC_DATA:
11151 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11152 break;
11153 /* FALLTHRU */
11154 case OMP_TARGET_DATA:
11155 case OMP_TARGET_ENTER_DATA:
11156 case OMP_TARGET_EXIT_DATA:
11157 case OACC_ENTER_DATA:
11158 case OACC_EXIT_DATA:
11159 case OACC_HOST_DATA:
11160 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11161 || (OMP_CLAUSE_MAP_KIND (c)
11162 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11163 /* For target {,enter ,exit }data only the array slice is
11164 mapped, but not the pointer to it. */
11165 remove = true;
11166 break;
11167 default:
11168 break;
11170 if (remove)
11171 break;
11172 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11174 struct gimplify_omp_ctx *octx;
11175 for (octx = outer_ctx; octx; octx = octx->outer_context)
11177 if (octx->region_type != ORT_ACC_HOST_DATA)
11178 break;
11179 splay_tree_node n2
11180 = splay_tree_lookup (octx->variables,
11181 (splay_tree_key) decl);
11182 if (n2)
11183 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11184 "declared in enclosing %<host_data%> region",
11185 DECL_NAME (decl));
11188 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11189 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11190 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11191 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11192 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11194 remove = true;
11195 break;
11197 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11198 || (OMP_CLAUSE_MAP_KIND (c)
11199 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11200 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11201 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11203 OMP_CLAUSE_SIZE (c)
11204 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11205 false);
11206 if ((region_type & ORT_TARGET) != 0)
11207 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11208 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11211 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11213 tree base = omp_strip_components_and_deref (decl);
11214 if (DECL_P (base))
11216 decl = base;
11217 splay_tree_node n
11218 = splay_tree_lookup (ctx->variables,
11219 (splay_tree_key) decl);
11220 if (seen_error ()
11221 && n
11222 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11224 remove = true;
11225 break;
11227 flags = GOVD_MAP | GOVD_EXPLICIT;
11229 goto do_add_decl;
11233 if (TREE_CODE (decl) == TARGET_EXPR)
11235 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11236 is_gimple_lvalue, fb_lvalue)
11237 == GS_ERROR)
11238 remove = true;
11240 else if (!DECL_P (decl))
11242 tree d = decl, *pd;
11243 if (TREE_CODE (d) == ARRAY_REF)
11245 while (TREE_CODE (d) == ARRAY_REF)
11246 d = TREE_OPERAND (d, 0);
11247 if (TREE_CODE (d) == COMPONENT_REF
11248 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11249 decl = d;
11251 pd = &OMP_CLAUSE_DECL (c);
11252 if (d == decl
11253 && TREE_CODE (decl) == INDIRECT_REF
11254 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11255 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11256 == REFERENCE_TYPE)
11257 && (OMP_CLAUSE_MAP_KIND (c)
11258 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11260 pd = &TREE_OPERAND (decl, 0);
11261 decl = TREE_OPERAND (decl, 0);
11263 /* An "attach/detach" operation on an update directive should
11264 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11265 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11266 depends on the previous mapping. */
11267 if (code == OACC_UPDATE
11268 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11269 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11271 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11273 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11274 == ARRAY_TYPE)
11275 remove = true;
11276 else
11278 gomp_map_kind k = ((code == OACC_EXIT_DATA
11279 || code == OMP_TARGET_EXIT_DATA)
11280 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11281 OMP_CLAUSE_SET_MAP_KIND (c, k);
11285 tree cref = decl;
11287 while (TREE_CODE (cref) == ARRAY_REF)
11288 cref = TREE_OPERAND (cref, 0);
11290 if (TREE_CODE (cref) == INDIRECT_REF)
11291 cref = TREE_OPERAND (cref, 0);
11293 if (TREE_CODE (cref) == COMPONENT_REF)
11295 tree base = cref;
11296 while (base && !DECL_P (base))
11298 tree innerbase = omp_get_base_pointer (base);
11299 if (!innerbase)
11300 break;
11301 base = innerbase;
11303 if (base
11304 && DECL_P (base)
11305 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11306 && POINTER_TYPE_P (TREE_TYPE (base)))
11308 splay_tree_node n
11309 = splay_tree_lookup (ctx->variables,
11310 (splay_tree_key) base);
11311 n->value |= GOVD_SEEN;
11315 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11317 /* Don't gimplify *pd fully at this point, as the base
11318 will need to be adjusted during omp lowering. */
11319 auto_vec<tree, 10> expr_stack;
11320 tree *p = pd;
11321 while (handled_component_p (*p)
11322 || TREE_CODE (*p) == INDIRECT_REF
11323 || TREE_CODE (*p) == ADDR_EXPR
11324 || TREE_CODE (*p) == MEM_REF
11325 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11327 expr_stack.safe_push (*p);
11328 p = &TREE_OPERAND (*p, 0);
11330 for (int i = expr_stack.length () - 1; i >= 0; i--)
11332 tree t = expr_stack[i];
11333 if (TREE_CODE (t) == ARRAY_REF
11334 || TREE_CODE (t) == ARRAY_RANGE_REF)
11336 if (TREE_OPERAND (t, 2) == NULL_TREE)
11338 tree low = unshare_expr (array_ref_low_bound (t));
11339 if (!is_gimple_min_invariant (low))
11341 TREE_OPERAND (t, 2) = low;
11342 if (gimplify_expr (&TREE_OPERAND (t, 2),
11343 pre_p, NULL,
11344 is_gimple_reg,
11345 fb_rvalue) == GS_ERROR)
11346 remove = true;
11349 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11350 NULL, is_gimple_reg,
11351 fb_rvalue) == GS_ERROR)
11352 remove = true;
11353 if (TREE_OPERAND (t, 3) == NULL_TREE)
11355 tree elmt_size = array_ref_element_size (t);
11356 if (!is_gimple_min_invariant (elmt_size))
11358 elmt_size = unshare_expr (elmt_size);
11359 tree elmt_type
11360 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11361 0)));
11362 tree factor
11363 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11364 elmt_size
11365 = size_binop (EXACT_DIV_EXPR, elmt_size,
11366 factor);
11367 TREE_OPERAND (t, 3) = elmt_size;
11368 if (gimplify_expr (&TREE_OPERAND (t, 3),
11369 pre_p, NULL,
11370 is_gimple_reg,
11371 fb_rvalue) == GS_ERROR)
11372 remove = true;
11375 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11376 NULL, is_gimple_reg,
11377 fb_rvalue) == GS_ERROR)
11378 remove = true;
11380 else if (TREE_CODE (t) == COMPONENT_REF)
11382 if (TREE_OPERAND (t, 2) == NULL_TREE)
11384 tree offset = component_ref_field_offset (t);
11385 if (!is_gimple_min_invariant (offset))
11387 offset = unshare_expr (offset);
11388 tree field = TREE_OPERAND (t, 1);
11389 tree factor
11390 = size_int (DECL_OFFSET_ALIGN (field)
11391 / BITS_PER_UNIT);
11392 offset = size_binop (EXACT_DIV_EXPR, offset,
11393 factor);
11394 TREE_OPERAND (t, 2) = offset;
11395 if (gimplify_expr (&TREE_OPERAND (t, 2),
11396 pre_p, NULL,
11397 is_gimple_reg,
11398 fb_rvalue) == GS_ERROR)
11399 remove = true;
11402 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11403 NULL, is_gimple_reg,
11404 fb_rvalue) == GS_ERROR)
11405 remove = true;
11408 for (; expr_stack.length () > 0; )
11410 tree t = expr_stack.pop ();
11412 if (TREE_CODE (t) == ARRAY_REF
11413 || TREE_CODE (t) == ARRAY_RANGE_REF)
11415 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11416 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11417 NULL, is_gimple_val,
11418 fb_rvalue) == GS_ERROR)
11419 remove = true;
11423 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11424 fb_lvalue) == GS_ERROR)
11426 remove = true;
11427 break;
11429 break;
11431 flags = GOVD_MAP | GOVD_EXPLICIT;
11432 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11434 flags |= GOVD_MAP_ALWAYS_TO;
11436 if ((code == OMP_TARGET
11437 || code == OMP_TARGET_DATA
11438 || code == OMP_TARGET_ENTER_DATA
11439 || code == OMP_TARGET_EXIT_DATA)
11440 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11442 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11443 octx = octx->outer_context)
11445 splay_tree_node n
11446 = splay_tree_lookup (octx->variables,
11447 (splay_tree_key) OMP_CLAUSE_DECL (c));
11448 /* If this is contained in an outer OpenMP region as a
11449 firstprivate value, remove the attach/detach. */
11450 if (n && (n->value & GOVD_FIRSTPRIVATE))
11452 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11453 goto do_add;
11457 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11458 ? GOMP_MAP_DETACH
11459 : GOMP_MAP_ATTACH);
11460 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11463 goto do_add;
11465 case OMP_CLAUSE_AFFINITY:
11466 gimplify_omp_affinity (list_p, pre_p);
11467 remove = true;
11468 break;
11469 case OMP_CLAUSE_DOACROSS:
11470 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11472 tree deps = OMP_CLAUSE_DECL (c);
11473 while (deps && TREE_CODE (deps) == TREE_LIST)
11475 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11476 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11477 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11478 pre_p, NULL, is_gimple_val, fb_rvalue);
11479 deps = TREE_CHAIN (deps);
11482 else
11483 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11484 == OMP_CLAUSE_DOACROSS_SOURCE);
11485 break;
11486 case OMP_CLAUSE_DEPEND:
11487 if (handled_depend_iterators == -1)
11488 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11489 if (handled_depend_iterators)
11491 if (handled_depend_iterators == 2)
11492 remove = true;
11493 break;
11495 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11497 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11498 NULL, is_gimple_val, fb_rvalue);
11499 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11501 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11503 remove = true;
11504 break;
11506 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11508 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11509 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11510 is_gimple_val, fb_rvalue) == GS_ERROR)
11512 remove = true;
11513 break;
11516 if (code == OMP_TASK)
11517 ctx->has_depend = true;
11518 break;
11520 case OMP_CLAUSE_TO:
11521 case OMP_CLAUSE_FROM:
11522 case OMP_CLAUSE__CACHE_:
11523 decl = OMP_CLAUSE_DECL (c);
11524 if (error_operand_p (decl))
11526 remove = true;
11527 break;
11529 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11530 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11531 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11532 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11533 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11535 remove = true;
11536 break;
11538 if (!DECL_P (decl))
11540 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11541 NULL, is_gimple_lvalue, fb_lvalue)
11542 == GS_ERROR)
11544 remove = true;
11545 break;
11547 break;
11549 goto do_notice;
11551 case OMP_CLAUSE_USE_DEVICE_PTR:
11552 case OMP_CLAUSE_USE_DEVICE_ADDR:
11553 flags = GOVD_EXPLICIT;
11554 goto do_add;
11556 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11557 decl = OMP_CLAUSE_DECL (c);
11558 while (TREE_CODE (decl) == INDIRECT_REF
11559 || TREE_CODE (decl) == ARRAY_REF)
11560 decl = TREE_OPERAND (decl, 0);
11561 flags = GOVD_EXPLICIT;
11562 goto do_add_decl;
11564 case OMP_CLAUSE_IS_DEVICE_PTR:
11565 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11566 goto do_add;
11568 do_add:
11569 decl = OMP_CLAUSE_DECL (c);
11570 do_add_decl:
11571 if (error_operand_p (decl))
11573 remove = true;
11574 break;
11576 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11578 tree t = omp_member_access_dummy_var (decl);
11579 if (t)
11581 tree v = DECL_VALUE_EXPR (decl);
11582 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11583 if (outer_ctx)
11584 omp_notice_variable (outer_ctx, t, true);
11587 if (code == OACC_DATA
11588 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11589 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11590 flags |= GOVD_MAP_0LEN_ARRAY;
11591 omp_add_variable (ctx, decl, flags);
11592 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11593 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11594 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11595 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11597 struct gimplify_omp_ctx *pctx
11598 = code == OMP_TARGET ? outer_ctx : ctx;
11599 if (pctx)
11600 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11601 GOVD_LOCAL | GOVD_SEEN);
11602 if (pctx
11603 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11604 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11605 find_decl_expr,
11606 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11607 NULL) == NULL_TREE)
11608 omp_add_variable (pctx,
11609 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11610 GOVD_LOCAL | GOVD_SEEN);
11611 gimplify_omp_ctxp = pctx;
11612 push_gimplify_context ();
11614 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11615 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11617 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11618 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11619 pop_gimplify_context
11620 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11621 push_gimplify_context ();
11622 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11623 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11624 pop_gimplify_context
11625 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11626 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11627 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11629 gimplify_omp_ctxp = outer_ctx;
11631 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11632 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11634 gimplify_omp_ctxp = ctx;
11635 push_gimplify_context ();
11636 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11638 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11639 NULL, NULL);
11640 TREE_SIDE_EFFECTS (bind) = 1;
11641 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11642 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11644 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11645 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11646 pop_gimplify_context
11647 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11648 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11650 gimplify_omp_ctxp = outer_ctx;
11652 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11653 && OMP_CLAUSE_LINEAR_STMT (c))
11655 gimplify_omp_ctxp = ctx;
11656 push_gimplify_context ();
11657 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11659 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11660 NULL, NULL);
11661 TREE_SIDE_EFFECTS (bind) = 1;
11662 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11663 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11665 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11666 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11667 pop_gimplify_context
11668 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11669 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11671 gimplify_omp_ctxp = outer_ctx;
11673 if (notice_outer)
11674 goto do_notice;
11675 break;
11677 case OMP_CLAUSE_COPYIN:
11678 case OMP_CLAUSE_COPYPRIVATE:
11679 decl = OMP_CLAUSE_DECL (c);
11680 if (error_operand_p (decl))
11682 remove = true;
11683 break;
11685 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
11686 && !remove
11687 && !omp_check_private (ctx, decl, true))
11689 remove = true;
11690 if (is_global_var (decl))
11692 if (DECL_THREAD_LOCAL_P (decl))
11693 remove = false;
11694 else if (DECL_HAS_VALUE_EXPR_P (decl))
11696 tree value = get_base_address (DECL_VALUE_EXPR (decl));
11698 if (value
11699 && DECL_P (value)
11700 && DECL_THREAD_LOCAL_P (value))
11701 remove = false;
11704 if (remove)
11705 error_at (OMP_CLAUSE_LOCATION (c),
11706 "copyprivate variable %qE is not threadprivate"
11707 " or private in outer context", DECL_NAME (decl));
11709 do_notice:
11710 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11711 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
11712 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11713 && outer_ctx
11714 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
11715 || (region_type == ORT_WORKSHARE
11716 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11717 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
11718 || code == OMP_LOOP)))
11719 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
11720 || (code == OMP_LOOP
11721 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11722 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
11723 == ORT_COMBINED_TEAMS))))
11725 splay_tree_node on
11726 = splay_tree_lookup (outer_ctx->variables,
11727 (splay_tree_key)decl);
11728 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
11730 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11731 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11732 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11733 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11734 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
11735 == POINTER_TYPE))))
11736 omp_firstprivatize_variable (outer_ctx, decl);
11737 else
11739 omp_add_variable (outer_ctx, decl,
11740 GOVD_SEEN | GOVD_SHARED);
11741 if (outer_ctx->outer_context)
11742 omp_notice_variable (outer_ctx->outer_context, decl,
11743 true);
11747 if (outer_ctx)
11748 omp_notice_variable (outer_ctx, decl, true);
11749 if (check_non_private
11750 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11751 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
11752 || decl == OMP_CLAUSE_DECL (c)
11753 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11754 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11755 == ADDR_EXPR
11756 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11757 == POINTER_PLUS_EXPR
11758 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11759 (OMP_CLAUSE_DECL (c), 0), 0))
11760 == ADDR_EXPR)))))
11761 && omp_check_private (ctx, decl, false))
11763 error ("%s variable %qE is private in outer context",
11764 check_non_private, DECL_NAME (decl));
11765 remove = true;
11767 break;
11769 case OMP_CLAUSE_DETACH:
11770 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
11771 goto do_add;
11773 case OMP_CLAUSE_IF:
11774 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
11775 && OMP_CLAUSE_IF_MODIFIER (c) != code)
11777 const char *p[2];
11778 for (int i = 0; i < 2; i++)
11779 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
11781 case VOID_CST: p[i] = "cancel"; break;
11782 case OMP_PARALLEL: p[i] = "parallel"; break;
11783 case OMP_SIMD: p[i] = "simd"; break;
11784 case OMP_TASK: p[i] = "task"; break;
11785 case OMP_TASKLOOP: p[i] = "taskloop"; break;
11786 case OMP_TARGET_DATA: p[i] = "target data"; break;
11787 case OMP_TARGET: p[i] = "target"; break;
11788 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
11789 case OMP_TARGET_ENTER_DATA:
11790 p[i] = "target enter data"; break;
11791 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
11792 default: gcc_unreachable ();
11794 error_at (OMP_CLAUSE_LOCATION (c),
11795 "expected %qs %<if%> clause modifier rather than %qs",
11796 p[0], p[1]);
11797 remove = true;
11799 /* Fall through. */
11801 case OMP_CLAUSE_FINAL:
11802 OMP_CLAUSE_OPERAND (c, 0)
11803 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
11804 /* Fall through. */
11806 case OMP_CLAUSE_NUM_TEAMS:
11807 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
11808 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11809 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11811 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11813 remove = true;
11814 break;
11816 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11817 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
11818 pre_p, NULL, true);
11820 /* Fall through. */
11822 case OMP_CLAUSE_SCHEDULE:
11823 case OMP_CLAUSE_NUM_THREADS:
11824 case OMP_CLAUSE_THREAD_LIMIT:
11825 case OMP_CLAUSE_DIST_SCHEDULE:
11826 case OMP_CLAUSE_DEVICE:
11827 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
11828 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
11830 if (code != OMP_TARGET)
11832 error_at (OMP_CLAUSE_LOCATION (c),
11833 "%<device%> clause with %<ancestor%> is only "
11834 "allowed on %<target%> construct");
11835 remove = true;
11836 break;
11839 tree clauses = *orig_list_p;
11840 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
11841 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
11842 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
11843 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
11844 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
11845 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
11848 error_at (OMP_CLAUSE_LOCATION (c),
11849 "with %<ancestor%>, only the %<device%>, "
11850 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11851 "and %<map%> clauses may appear on the "
11852 "construct");
11853 remove = true;
11854 break;
11857 /* Fall through. */
11859 case OMP_CLAUSE_PRIORITY:
11860 case OMP_CLAUSE_GRAINSIZE:
11861 case OMP_CLAUSE_NUM_TASKS:
11862 case OMP_CLAUSE_FILTER:
11863 case OMP_CLAUSE_HINT:
11864 case OMP_CLAUSE_ASYNC:
11865 case OMP_CLAUSE_WAIT:
11866 case OMP_CLAUSE_NUM_GANGS:
11867 case OMP_CLAUSE_NUM_WORKERS:
11868 case OMP_CLAUSE_VECTOR_LENGTH:
11869 case OMP_CLAUSE_WORKER:
11870 case OMP_CLAUSE_VECTOR:
11871 if (OMP_CLAUSE_OPERAND (c, 0)
11872 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
11874 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
11876 remove = true;
11877 break;
11879 /* All these clauses care about value, not a particular decl,
11880 so try to force it into a SSA_NAME or fresh temporary. */
11881 OMP_CLAUSE_OPERAND (c, 0)
11882 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
11883 pre_p, NULL, true);
11885 break;
11887 case OMP_CLAUSE_GANG:
11888 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
11889 is_gimple_val, fb_rvalue) == GS_ERROR)
11890 remove = true;
11891 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
11892 is_gimple_val, fb_rvalue) == GS_ERROR)
11893 remove = true;
11894 break;
11896 case OMP_CLAUSE_NOWAIT:
11897 nowait = 1;
11898 break;
11900 case OMP_CLAUSE_ORDERED:
11901 case OMP_CLAUSE_UNTIED:
11902 case OMP_CLAUSE_COLLAPSE:
11903 case OMP_CLAUSE_TILE:
11904 case OMP_CLAUSE_AUTO:
11905 case OMP_CLAUSE_SEQ:
11906 case OMP_CLAUSE_INDEPENDENT:
11907 case OMP_CLAUSE_MERGEABLE:
11908 case OMP_CLAUSE_PROC_BIND:
11909 case OMP_CLAUSE_SAFELEN:
11910 case OMP_CLAUSE_SIMDLEN:
11911 case OMP_CLAUSE_NOGROUP:
11912 case OMP_CLAUSE_THREADS:
11913 case OMP_CLAUSE_SIMD:
11914 case OMP_CLAUSE_BIND:
11915 case OMP_CLAUSE_IF_PRESENT:
11916 case OMP_CLAUSE_FINALIZE:
11917 break;
11919 case OMP_CLAUSE_ORDER:
11920 ctx->order_concurrent = true;
11921 break;
11923 case OMP_CLAUSE_DEFAULTMAP:
11924 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
11925 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
11927 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
11928 gdmkmin = GDMK_SCALAR;
11929 gdmkmax = GDMK_POINTER;
11930 break;
11931 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
11932 gdmkmin = GDMK_SCALAR;
11933 gdmkmax = GDMK_SCALAR_TARGET;
11934 break;
11935 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
11936 gdmkmin = gdmkmax = GDMK_AGGREGATE;
11937 break;
11938 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
11939 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
11940 break;
11941 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
11942 gdmkmin = gdmkmax = GDMK_POINTER;
11943 break;
11944 default:
11945 gcc_unreachable ();
11947 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
11948 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
11950 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
11951 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
11952 break;
11953 case OMP_CLAUSE_DEFAULTMAP_TO:
11954 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
11955 break;
11956 case OMP_CLAUSE_DEFAULTMAP_FROM:
11957 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
11958 break;
11959 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
11960 ctx->defaultmap[gdmk] = GOVD_MAP;
11961 break;
11962 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
11963 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11964 break;
11965 case OMP_CLAUSE_DEFAULTMAP_NONE:
11966 ctx->defaultmap[gdmk] = 0;
11967 break;
11968 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
11969 switch (gdmk)
11971 case GDMK_SCALAR:
11972 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11973 break;
11974 case GDMK_SCALAR_TARGET:
11975 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
11976 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11977 break;
11978 case GDMK_AGGREGATE:
11979 case GDMK_ALLOCATABLE:
11980 ctx->defaultmap[gdmk] = GOVD_MAP;
11981 break;
11982 case GDMK_POINTER:
11983 ctx->defaultmap[gdmk] = GOVD_MAP;
11984 if (!lang_GNU_Fortran ())
11985 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
11986 break;
11987 default:
11988 gcc_unreachable ();
11990 break;
11991 default:
11992 gcc_unreachable ();
11994 break;
11996 case OMP_CLAUSE_ALIGNED:
11997 decl = OMP_CLAUSE_DECL (c);
11998 if (error_operand_p (decl))
12000 remove = true;
12001 break;
12003 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12004 is_gimple_val, fb_rvalue) == GS_ERROR)
12006 remove = true;
12007 break;
12009 if (!is_global_var (decl)
12010 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12011 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12012 break;
12014 case OMP_CLAUSE_NONTEMPORAL:
12015 decl = OMP_CLAUSE_DECL (c);
12016 if (error_operand_p (decl))
12018 remove = true;
12019 break;
12021 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12022 break;
12024 case OMP_CLAUSE_ALLOCATE:
12025 decl = OMP_CLAUSE_DECL (c);
12026 if (error_operand_p (decl))
12028 remove = true;
12029 break;
12031 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12032 is_gimple_val, fb_rvalue) == GS_ERROR)
12034 remove = true;
12035 break;
12037 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12038 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12039 == INTEGER_CST))
12041 else if (code == OMP_TASKLOOP
12042 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12043 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12044 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12045 pre_p, NULL, false);
12046 break;
12048 case OMP_CLAUSE_DEFAULT:
12049 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12050 break;
12052 case OMP_CLAUSE_INCLUSIVE:
12053 case OMP_CLAUSE_EXCLUSIVE:
12054 decl = OMP_CLAUSE_DECL (c);
12056 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12057 (splay_tree_key) decl);
12058 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12060 error_at (OMP_CLAUSE_LOCATION (c),
12061 "%qD specified in %qs clause but not in %<inscan%> "
12062 "%<reduction%> clause on the containing construct",
12063 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12064 remove = true;
12066 else
12068 n->value |= GOVD_REDUCTION_INSCAN;
12069 if (outer_ctx->region_type == ORT_SIMD
12070 && outer_ctx->outer_context
12071 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12073 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12074 (splay_tree_key) decl);
12075 if (n && (n->value & GOVD_REDUCTION) != 0)
12076 n->value |= GOVD_REDUCTION_INSCAN;
12080 break;
12082 case OMP_CLAUSE_NOHOST:
12083 default:
12084 gcc_unreachable ();
12087 if (code == OACC_DATA
12088 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12089 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12090 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12091 remove = true;
12092 if (remove)
12093 *list_p = OMP_CLAUSE_CHAIN (c);
12094 else
12095 list_p = &OMP_CLAUSE_CHAIN (c);
12098 ctx->clauses = *orig_list_p;
12099 gimplify_omp_ctxp = ctx;
12102 /* Return true if DECL is a candidate for shared to firstprivate
12103 optimization. We only consider non-addressable scalars, not
12104 too big, and not references. */
12106 static bool
12107 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12109 if (TREE_ADDRESSABLE (decl))
12110 return false;
12111 tree type = TREE_TYPE (decl);
12112 if (!is_gimple_reg_type (type)
12113 || TREE_CODE (type) == REFERENCE_TYPE
12114 || TREE_ADDRESSABLE (type))
12115 return false;
12116 /* Don't optimize too large decls, as each thread/task will have
12117 its own. */
12118 HOST_WIDE_INT len = int_size_in_bytes (type);
12119 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12120 return false;
12121 if (omp_privatize_by_reference (decl))
12122 return false;
12123 return true;
12126 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12127 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12128 GOVD_WRITTEN in outer contexts. */
12130 static void
12131 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12133 for (; ctx; ctx = ctx->outer_context)
12135 splay_tree_node n = splay_tree_lookup (ctx->variables,
12136 (splay_tree_key) decl);
12137 if (n == NULL)
12138 continue;
12139 else if (n->value & GOVD_SHARED)
12141 n->value |= GOVD_WRITTEN;
12142 return;
12144 else if (n->value & GOVD_DATA_SHARE_CLASS)
12145 return;
12149 /* Helper callback for walk_gimple_seq to discover possible stores
12150 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12151 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12152 for those. */
12154 static tree
12155 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12157 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12159 *walk_subtrees = 0;
12160 if (!wi->is_lhs)
12161 return NULL_TREE;
12163 tree op = *tp;
12166 if (handled_component_p (op))
12167 op = TREE_OPERAND (op, 0);
12168 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12169 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12170 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12171 else
12172 break;
12174 while (1);
12175 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12176 return NULL_TREE;
12178 omp_mark_stores (gimplify_omp_ctxp, op);
12179 return NULL_TREE;
12182 /* Helper callback for walk_gimple_seq to discover possible stores
12183 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12184 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12185 for those. */
12187 static tree
12188 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12189 bool *handled_ops_p,
12190 struct walk_stmt_info *wi)
12192 gimple *stmt = gsi_stmt (*gsi_p);
12193 switch (gimple_code (stmt))
12195 /* Don't recurse on OpenMP constructs for which
12196 gimplify_adjust_omp_clauses already handled the bodies,
12197 except handle gimple_omp_for_pre_body. */
12198 case GIMPLE_OMP_FOR:
12199 *handled_ops_p = true;
12200 if (gimple_omp_for_pre_body (stmt))
12201 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12202 omp_find_stores_stmt, omp_find_stores_op, wi);
12203 break;
12204 case GIMPLE_OMP_PARALLEL:
12205 case GIMPLE_OMP_TASK:
12206 case GIMPLE_OMP_SECTIONS:
12207 case GIMPLE_OMP_SINGLE:
12208 case GIMPLE_OMP_SCOPE:
12209 case GIMPLE_OMP_TARGET:
12210 case GIMPLE_OMP_TEAMS:
12211 case GIMPLE_OMP_CRITICAL:
12212 *handled_ops_p = true;
12213 break;
12214 default:
12215 break;
12217 return NULL_TREE;
12220 struct gimplify_adjust_omp_clauses_data
12222 tree *list_p;
12223 gimple_seq *pre_p;
12226 /* For all variables that were not actually used within the context,
12227 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12229 static int
12230 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12232 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12233 gimple_seq *pre_p
12234 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12235 tree decl = (tree) n->key;
12236 unsigned flags = n->value;
12237 enum omp_clause_code code;
12238 tree clause;
12239 bool private_debug;
12241 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12242 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12243 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12244 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12245 return 0;
12246 if ((flags & GOVD_SEEN) == 0)
12247 return 0;
12248 if (flags & GOVD_DEBUG_PRIVATE)
12250 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12251 private_debug = true;
12253 else if (flags & GOVD_MAP)
12254 private_debug = false;
12255 else
12256 private_debug
12257 = lang_hooks.decls.omp_private_debug_clause (decl,
12258 !!(flags & GOVD_SHARED));
12259 if (private_debug)
12260 code = OMP_CLAUSE_PRIVATE;
12261 else if (flags & GOVD_MAP)
12263 code = OMP_CLAUSE_MAP;
12264 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12265 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12267 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12268 return 0;
12270 if (VAR_P (decl)
12271 && DECL_IN_CONSTANT_POOL (decl)
12272 && !lookup_attribute ("omp declare target",
12273 DECL_ATTRIBUTES (decl)))
12275 tree id = get_identifier ("omp declare target");
12276 DECL_ATTRIBUTES (decl)
12277 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12278 varpool_node *node = varpool_node::get (decl);
12279 if (node)
12281 node->offloadable = 1;
12282 if (ENABLE_OFFLOADING)
12283 g->have_offload = true;
12287 else if (flags & GOVD_SHARED)
12289 if (is_global_var (decl))
12291 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12292 while (ctx != NULL)
12294 splay_tree_node on
12295 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12296 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12297 | GOVD_PRIVATE | GOVD_REDUCTION
12298 | GOVD_LINEAR | GOVD_MAP)) != 0)
12299 break;
12300 ctx = ctx->outer_context;
12302 if (ctx == NULL)
12303 return 0;
12305 code = OMP_CLAUSE_SHARED;
12306 /* Don't optimize shared into firstprivate for read-only vars
12307 on tasks with depend clause, we shouldn't try to copy them
12308 until the dependencies are satisfied. */
12309 if (gimplify_omp_ctxp->has_depend)
12310 flags |= GOVD_WRITTEN;
12312 else if (flags & GOVD_PRIVATE)
12313 code = OMP_CLAUSE_PRIVATE;
12314 else if (flags & GOVD_FIRSTPRIVATE)
12316 code = OMP_CLAUSE_FIRSTPRIVATE;
12317 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12318 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12319 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12321 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12322 "%<target%> construct", decl);
12323 return 0;
12326 else if (flags & GOVD_LASTPRIVATE)
12327 code = OMP_CLAUSE_LASTPRIVATE;
12328 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12329 return 0;
12330 else if (flags & GOVD_CONDTEMP)
12332 code = OMP_CLAUSE__CONDTEMP_;
12333 gimple_add_tmp_var (decl);
12335 else
12336 gcc_unreachable ();
12338 if (((flags & GOVD_LASTPRIVATE)
12339 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12340 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12341 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12343 tree chain = *list_p;
12344 clause = build_omp_clause (input_location, code);
12345 OMP_CLAUSE_DECL (clause) = decl;
12346 OMP_CLAUSE_CHAIN (clause) = chain;
12347 if (private_debug)
12348 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12349 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12350 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12351 else if (code == OMP_CLAUSE_SHARED
12352 && (flags & GOVD_WRITTEN) == 0
12353 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12354 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12355 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12356 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12357 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12359 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12360 OMP_CLAUSE_DECL (nc) = decl;
12361 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12362 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12363 OMP_CLAUSE_DECL (clause)
12364 = build_simple_mem_ref_loc (input_location, decl);
12365 OMP_CLAUSE_DECL (clause)
12366 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12367 build_int_cst (build_pointer_type (char_type_node), 0));
12368 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12369 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12370 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12371 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12372 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12373 OMP_CLAUSE_CHAIN (nc) = chain;
12374 OMP_CLAUSE_CHAIN (clause) = nc;
12375 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12376 gimplify_omp_ctxp = ctx->outer_context;
12377 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12378 pre_p, NULL, is_gimple_val, fb_rvalue);
12379 gimplify_omp_ctxp = ctx;
12381 else if (code == OMP_CLAUSE_MAP)
12383 int kind;
12384 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12385 switch (flags & (GOVD_MAP_TO_ONLY
12386 | GOVD_MAP_FORCE
12387 | GOVD_MAP_FORCE_PRESENT
12388 | GOVD_MAP_ALLOC_ONLY
12389 | GOVD_MAP_FROM_ONLY))
12391 case 0:
12392 kind = GOMP_MAP_TOFROM;
12393 break;
12394 case GOVD_MAP_FORCE:
12395 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12396 break;
12397 case GOVD_MAP_TO_ONLY:
12398 kind = GOMP_MAP_TO;
12399 break;
12400 case GOVD_MAP_FROM_ONLY:
12401 kind = GOMP_MAP_FROM;
12402 break;
12403 case GOVD_MAP_ALLOC_ONLY:
12404 kind = GOMP_MAP_ALLOC;
12405 break;
12406 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12407 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12408 break;
12409 case GOVD_MAP_FORCE_PRESENT:
12410 kind = GOMP_MAP_FORCE_PRESENT;
12411 break;
12412 default:
12413 gcc_unreachable ();
12415 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12416 /* Setting of the implicit flag for the runtime is currently disabled for
12417 OpenACC. */
12418 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12419 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12420 if (DECL_SIZE (decl)
12421 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12423 tree decl2 = DECL_VALUE_EXPR (decl);
12424 gcc_assert (INDIRECT_REF_P (decl2));
12425 decl2 = TREE_OPERAND (decl2, 0);
12426 gcc_assert (DECL_P (decl2));
12427 tree mem = build_simple_mem_ref (decl2);
12428 OMP_CLAUSE_DECL (clause) = mem;
12429 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12430 if (gimplify_omp_ctxp->outer_context)
12432 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12433 omp_notice_variable (ctx, decl2, true);
12434 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12436 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12437 OMP_CLAUSE_MAP);
12438 OMP_CLAUSE_DECL (nc) = decl;
12439 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12440 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12441 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12442 else
12443 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12444 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12445 OMP_CLAUSE_CHAIN (clause) = nc;
12447 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12448 && omp_privatize_by_reference (decl))
12450 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12451 OMP_CLAUSE_SIZE (clause)
12452 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12453 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12454 gimplify_omp_ctxp = ctx->outer_context;
12455 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12456 pre_p, NULL, is_gimple_val, fb_rvalue);
12457 gimplify_omp_ctxp = ctx;
12458 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12459 OMP_CLAUSE_MAP);
12460 OMP_CLAUSE_DECL (nc) = decl;
12461 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12462 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12463 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12464 OMP_CLAUSE_CHAIN (clause) = nc;
12466 else
12467 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12469 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12471 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12472 OMP_CLAUSE_DECL (nc) = decl;
12473 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12474 OMP_CLAUSE_CHAIN (nc) = chain;
12475 OMP_CLAUSE_CHAIN (clause) = nc;
12476 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12477 gimplify_omp_ctxp = ctx->outer_context;
12478 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12479 (ctx->region_type & ORT_ACC) != 0);
12480 gimplify_omp_ctxp = ctx;
12482 *list_p = clause;
12483 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12484 gimplify_omp_ctxp = ctx->outer_context;
12485 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12486 in simd. Those are only added for the local vars inside of simd body
12487 and they don't need to be e.g. default constructible. */
12488 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12489 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12490 (ctx->region_type & ORT_ACC) != 0);
12491 if (gimplify_omp_ctxp)
12492 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12493 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12494 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12495 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12496 true);
12497 gimplify_omp_ctxp = ctx;
12498 return 0;
12501 static void
12502 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12503 enum tree_code code)
12505 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12506 tree *orig_list_p = list_p;
12507 tree c, decl;
12508 bool has_inscan_reductions = false;
12510 if (body)
12512 struct gimplify_omp_ctx *octx;
12513 for (octx = ctx; octx; octx = octx->outer_context)
12514 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12515 break;
12516 if (octx)
12518 struct walk_stmt_info wi;
12519 memset (&wi, 0, sizeof (wi));
12520 walk_gimple_seq (body, omp_find_stores_stmt,
12521 omp_find_stores_op, &wi);
12525 if (ctx->add_safelen1)
12527 /* If there are VLAs in the body of simd loop, prevent
12528 vectorization. */
12529 gcc_assert (ctx->region_type == ORT_SIMD);
12530 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12531 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12532 OMP_CLAUSE_CHAIN (c) = *list_p;
12533 *list_p = c;
12534 list_p = &OMP_CLAUSE_CHAIN (c);
12537 if (ctx->region_type == ORT_WORKSHARE
12538 && ctx->outer_context
12539 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12541 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12542 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12543 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12545 decl = OMP_CLAUSE_DECL (c);
12546 splay_tree_node n
12547 = splay_tree_lookup (ctx->outer_context->variables,
12548 (splay_tree_key) decl);
12549 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12550 (splay_tree_key) decl));
12551 omp_add_variable (ctx, decl, n->value);
12552 tree c2 = copy_node (c);
12553 OMP_CLAUSE_CHAIN (c2) = *list_p;
12554 *list_p = c2;
12555 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12556 continue;
12557 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12558 OMP_CLAUSE_FIRSTPRIVATE);
12559 OMP_CLAUSE_DECL (c2) = decl;
12560 OMP_CLAUSE_CHAIN (c2) = *list_p;
12561 *list_p = c2;
12565 tree attach_list = NULL_TREE;
12566 tree *attach_tail = &attach_list;
12568 while ((c = *list_p) != NULL)
12570 splay_tree_node n;
12571 bool remove = false;
12572 bool move_attach = false;
12574 switch (OMP_CLAUSE_CODE (c))
12576 case OMP_CLAUSE_FIRSTPRIVATE:
12577 if ((ctx->region_type & ORT_TARGET)
12578 && (ctx->region_type & ORT_ACC) == 0
12579 && TYPE_ATOMIC (strip_array_types
12580 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12582 error_at (OMP_CLAUSE_LOCATION (c),
12583 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12584 "%<target%> construct", OMP_CLAUSE_DECL (c));
12585 remove = true;
12586 break;
12588 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12590 decl = OMP_CLAUSE_DECL (c);
12591 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12592 if ((n->value & GOVD_MAP) != 0)
12594 remove = true;
12595 break;
12597 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12598 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12600 /* FALLTHRU */
12601 case OMP_CLAUSE_PRIVATE:
12602 case OMP_CLAUSE_SHARED:
12603 case OMP_CLAUSE_LINEAR:
12604 decl = OMP_CLAUSE_DECL (c);
12605 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12606 remove = !(n->value & GOVD_SEEN);
12607 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12608 && code == OMP_PARALLEL
12609 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12610 remove = true;
12611 if (! remove)
12613 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12614 if ((n->value & GOVD_DEBUG_PRIVATE)
12615 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12617 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12618 || ((n->value & GOVD_DATA_SHARE_CLASS)
12619 == GOVD_SHARED));
12620 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12621 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12623 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12624 && ctx->has_depend
12625 && DECL_P (decl))
12626 n->value |= GOVD_WRITTEN;
12627 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12628 && (n->value & GOVD_WRITTEN) == 0
12629 && DECL_P (decl)
12630 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12631 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12632 else if (DECL_P (decl)
12633 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12634 && (n->value & GOVD_WRITTEN) != 0)
12635 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12636 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12637 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12638 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12640 else
12641 n->value &= ~GOVD_EXPLICIT;
12642 break;
12644 case OMP_CLAUSE_LASTPRIVATE:
12645 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12646 accurately reflect the presence of a FIRSTPRIVATE clause. */
12647 decl = OMP_CLAUSE_DECL (c);
12648 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12649 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12650 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12651 if (code == OMP_DISTRIBUTE
12652 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12654 remove = true;
12655 error_at (OMP_CLAUSE_LOCATION (c),
12656 "same variable used in %<firstprivate%> and "
12657 "%<lastprivate%> clauses on %<distribute%> "
12658 "construct");
12660 if (!remove
12661 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12662 && DECL_P (decl)
12663 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12664 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12665 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12666 remove = true;
12667 break;
12669 case OMP_CLAUSE_ALIGNED:
12670 decl = OMP_CLAUSE_DECL (c);
12671 if (!is_global_var (decl))
12673 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12674 remove = n == NULL || !(n->value & GOVD_SEEN);
12675 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12677 struct gimplify_omp_ctx *octx;
12678 if (n != NULL
12679 && (n->value & (GOVD_DATA_SHARE_CLASS
12680 & ~GOVD_FIRSTPRIVATE)))
12681 remove = true;
12682 else
12683 for (octx = ctx->outer_context; octx;
12684 octx = octx->outer_context)
12686 n = splay_tree_lookup (octx->variables,
12687 (splay_tree_key) decl);
12688 if (n == NULL)
12689 continue;
12690 if (n->value & GOVD_LOCAL)
12691 break;
12692 /* We have to avoid assigning a shared variable
12693 to itself when trying to add
12694 __builtin_assume_aligned. */
12695 if (n->value & GOVD_SHARED)
12697 remove = true;
12698 break;
12703 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
12705 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12706 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12707 remove = true;
12709 break;
12711 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12712 decl = OMP_CLAUSE_DECL (c);
12713 while (INDIRECT_REF_P (decl)
12714 || TREE_CODE (decl) == ARRAY_REF)
12715 decl = TREE_OPERAND (decl, 0);
12716 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12717 remove = n == NULL || !(n->value & GOVD_SEEN);
12718 break;
12720 case OMP_CLAUSE_IS_DEVICE_PTR:
12721 case OMP_CLAUSE_NONTEMPORAL:
12722 decl = OMP_CLAUSE_DECL (c);
12723 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12724 remove = n == NULL || !(n->value & GOVD_SEEN);
12725 break;
12727 case OMP_CLAUSE_MAP:
12728 if (code == OMP_TARGET_EXIT_DATA
12729 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
12731 remove = true;
12732 break;
12734 /* If we have a target region, we can push all the attaches to the
12735 end of the list (we may have standalone "attach" operations
12736 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12737 the attachment point AND the pointed-to block have been mapped).
12738 If we have something else, e.g. "enter data", we need to keep
12739 "attach" nodes together with the previous node they attach to so
12740 that separate "exit data" operations work properly (see
12741 libgomp/target.c). */
12742 if ((ctx->region_type & ORT_TARGET) != 0
12743 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12744 || (OMP_CLAUSE_MAP_KIND (c)
12745 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
12746 move_attach = true;
12747 decl = OMP_CLAUSE_DECL (c);
12748 /* Data clauses associated with reductions must be
12749 compatible with present_or_copy. Warn and adjust the clause
12750 if that is not the case. */
12751 if (ctx->region_type == ORT_ACC_PARALLEL
12752 || ctx->region_type == ORT_ACC_SERIAL)
12754 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
12755 n = NULL;
12757 if (DECL_P (t))
12758 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
12760 if (n && (n->value & GOVD_REDUCTION))
12762 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
12764 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
12765 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
12766 && kind != GOMP_MAP_FORCE_PRESENT
12767 && kind != GOMP_MAP_POINTER)
12769 warning_at (OMP_CLAUSE_LOCATION (c), 0,
12770 "incompatible data clause with reduction "
12771 "on %qE; promoting to %<present_or_copy%>",
12772 DECL_NAME (t));
12773 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
12777 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
12778 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
12780 remove = true;
12781 break;
12783 if (!DECL_P (decl))
12785 if ((ctx->region_type & ORT_TARGET) != 0
12786 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12788 if (INDIRECT_REF_P (decl)
12789 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12790 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12791 == REFERENCE_TYPE))
12792 decl = TREE_OPERAND (decl, 0);
12793 if (TREE_CODE (decl) == COMPONENT_REF)
12795 while (TREE_CODE (decl) == COMPONENT_REF)
12796 decl = TREE_OPERAND (decl, 0);
12797 if (DECL_P (decl))
12799 n = splay_tree_lookup (ctx->variables,
12800 (splay_tree_key) decl);
12801 if (!(n->value & GOVD_SEEN))
12802 remove = true;
12806 break;
12808 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12809 if ((ctx->region_type & ORT_TARGET) != 0
12810 && !(n->value & GOVD_SEEN)
12811 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
12812 && (!is_global_var (decl)
12813 || !lookup_attribute ("omp declare target link",
12814 DECL_ATTRIBUTES (decl))))
12816 remove = true;
12817 /* For struct element mapping, if struct is never referenced
12818 in target block and none of the mapping has always modifier,
12819 remove all the struct element mappings, which immediately
12820 follow the GOMP_MAP_STRUCT map clause. */
12821 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
12823 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
12824 while (cnt--)
12825 OMP_CLAUSE_CHAIN (c)
12826 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
12829 else if (DECL_SIZE (decl)
12830 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
12831 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
12832 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
12833 && (OMP_CLAUSE_MAP_KIND (c)
12834 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12836 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12837 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12838 INTEGER_CST. */
12839 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
12841 tree decl2 = DECL_VALUE_EXPR (decl);
12842 gcc_assert (INDIRECT_REF_P (decl2));
12843 decl2 = TREE_OPERAND (decl2, 0);
12844 gcc_assert (DECL_P (decl2));
12845 tree mem = build_simple_mem_ref (decl2);
12846 OMP_CLAUSE_DECL (c) = mem;
12847 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12848 if (ctx->outer_context)
12850 omp_notice_variable (ctx->outer_context, decl2, true);
12851 omp_notice_variable (ctx->outer_context,
12852 OMP_CLAUSE_SIZE (c), true);
12854 if (((ctx->region_type & ORT_TARGET) != 0
12855 || !ctx->target_firstprivatize_array_bases)
12856 && ((n->value & GOVD_SEEN) == 0
12857 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
12859 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12860 OMP_CLAUSE_MAP);
12861 OMP_CLAUSE_DECL (nc) = decl;
12862 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12863 if (ctx->target_firstprivatize_array_bases)
12864 OMP_CLAUSE_SET_MAP_KIND (nc,
12865 GOMP_MAP_FIRSTPRIVATE_POINTER);
12866 else
12867 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12868 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
12869 OMP_CLAUSE_CHAIN (c) = nc;
12870 c = nc;
12873 else
12875 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12876 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12877 gcc_assert ((n->value & GOVD_SEEN) == 0
12878 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12879 == 0));
12881 break;
12883 case OMP_CLAUSE_TO:
12884 case OMP_CLAUSE_FROM:
12885 case OMP_CLAUSE__CACHE_:
12886 decl = OMP_CLAUSE_DECL (c);
12887 if (!DECL_P (decl))
12888 break;
12889 if (DECL_SIZE (decl)
12890 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12892 tree decl2 = DECL_VALUE_EXPR (decl);
12893 gcc_assert (INDIRECT_REF_P (decl2));
12894 decl2 = TREE_OPERAND (decl2, 0);
12895 gcc_assert (DECL_P (decl2));
12896 tree mem = build_simple_mem_ref (decl2);
12897 OMP_CLAUSE_DECL (c) = mem;
12898 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12899 if (ctx->outer_context)
12901 omp_notice_variable (ctx->outer_context, decl2, true);
12902 omp_notice_variable (ctx->outer_context,
12903 OMP_CLAUSE_SIZE (c), true);
12906 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12907 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12908 break;
12910 case OMP_CLAUSE_REDUCTION:
12911 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
12913 decl = OMP_CLAUSE_DECL (c);
12914 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12915 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
12917 remove = true;
12918 error_at (OMP_CLAUSE_LOCATION (c),
12919 "%qD specified in %<inscan%> %<reduction%> clause "
12920 "but not in %<scan%> directive clause", decl);
12921 break;
12923 has_inscan_reductions = true;
12925 /* FALLTHRU */
12926 case OMP_CLAUSE_IN_REDUCTION:
12927 case OMP_CLAUSE_TASK_REDUCTION:
12928 decl = OMP_CLAUSE_DECL (c);
12929 /* OpenACC reductions need a present_or_copy data clause.
12930 Add one if necessary. Emit error when the reduction is private. */
12931 if (ctx->region_type == ORT_ACC_PARALLEL
12932 || ctx->region_type == ORT_ACC_SERIAL)
12934 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12935 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12937 remove = true;
12938 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
12939 "reduction on %qE", DECL_NAME (decl));
12941 else if ((n->value & GOVD_MAP) == 0)
12943 tree next = OMP_CLAUSE_CHAIN (c);
12944 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
12945 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
12946 OMP_CLAUSE_DECL (nc) = decl;
12947 OMP_CLAUSE_CHAIN (c) = nc;
12948 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12949 (ctx->region_type
12950 & ORT_ACC) != 0);
12951 while (1)
12953 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
12954 if (OMP_CLAUSE_CHAIN (nc) == NULL)
12955 break;
12956 nc = OMP_CLAUSE_CHAIN (nc);
12958 OMP_CLAUSE_CHAIN (nc) = next;
12959 n->value |= GOVD_MAP;
12962 if (DECL_P (decl)
12963 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12964 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12965 break;
12967 case OMP_CLAUSE_ALLOCATE:
12968 decl = OMP_CLAUSE_DECL (c);
12969 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12970 if (n != NULL && !(n->value & GOVD_SEEN))
12972 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
12973 != 0
12974 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
12975 remove = true;
12977 if (!remove
12978 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12979 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
12980 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
12981 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
12982 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
12984 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
12985 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
12986 if (n == NULL)
12988 enum omp_clause_default_kind default_kind
12989 = ctx->default_kind;
12990 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
12991 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12992 true);
12993 ctx->default_kind = default_kind;
12995 else
12996 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12997 true);
12999 break;
13001 case OMP_CLAUSE_COPYIN:
13002 case OMP_CLAUSE_COPYPRIVATE:
13003 case OMP_CLAUSE_IF:
13004 case OMP_CLAUSE_NUM_THREADS:
13005 case OMP_CLAUSE_NUM_TEAMS:
13006 case OMP_CLAUSE_THREAD_LIMIT:
13007 case OMP_CLAUSE_DIST_SCHEDULE:
13008 case OMP_CLAUSE_DEVICE:
13009 case OMP_CLAUSE_SCHEDULE:
13010 case OMP_CLAUSE_NOWAIT:
13011 case OMP_CLAUSE_ORDERED:
13012 case OMP_CLAUSE_DEFAULT:
13013 case OMP_CLAUSE_UNTIED:
13014 case OMP_CLAUSE_COLLAPSE:
13015 case OMP_CLAUSE_FINAL:
13016 case OMP_CLAUSE_MERGEABLE:
13017 case OMP_CLAUSE_PROC_BIND:
13018 case OMP_CLAUSE_SAFELEN:
13019 case OMP_CLAUSE_SIMDLEN:
13020 case OMP_CLAUSE_DEPEND:
13021 case OMP_CLAUSE_DOACROSS:
13022 case OMP_CLAUSE_PRIORITY:
13023 case OMP_CLAUSE_GRAINSIZE:
13024 case OMP_CLAUSE_NUM_TASKS:
13025 case OMP_CLAUSE_NOGROUP:
13026 case OMP_CLAUSE_THREADS:
13027 case OMP_CLAUSE_SIMD:
13028 case OMP_CLAUSE_FILTER:
13029 case OMP_CLAUSE_HINT:
13030 case OMP_CLAUSE_DEFAULTMAP:
13031 case OMP_CLAUSE_ORDER:
13032 case OMP_CLAUSE_BIND:
13033 case OMP_CLAUSE_DETACH:
13034 case OMP_CLAUSE_USE_DEVICE_PTR:
13035 case OMP_CLAUSE_USE_DEVICE_ADDR:
13036 case OMP_CLAUSE_ASYNC:
13037 case OMP_CLAUSE_WAIT:
13038 case OMP_CLAUSE_INDEPENDENT:
13039 case OMP_CLAUSE_NUM_GANGS:
13040 case OMP_CLAUSE_NUM_WORKERS:
13041 case OMP_CLAUSE_VECTOR_LENGTH:
13042 case OMP_CLAUSE_GANG:
13043 case OMP_CLAUSE_WORKER:
13044 case OMP_CLAUSE_VECTOR:
13045 case OMP_CLAUSE_AUTO:
13046 case OMP_CLAUSE_SEQ:
13047 case OMP_CLAUSE_TILE:
13048 case OMP_CLAUSE_IF_PRESENT:
13049 case OMP_CLAUSE_FINALIZE:
13050 case OMP_CLAUSE_INCLUSIVE:
13051 case OMP_CLAUSE_EXCLUSIVE:
13052 break;
13054 case OMP_CLAUSE_NOHOST:
13055 default:
13056 gcc_unreachable ();
13059 if (remove)
13060 *list_p = OMP_CLAUSE_CHAIN (c);
13061 else if (move_attach)
13063 /* Remove attach node from here, separate out into its own list. */
13064 *attach_tail = c;
13065 *list_p = OMP_CLAUSE_CHAIN (c);
13066 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13067 attach_tail = &OMP_CLAUSE_CHAIN (c);
13069 else
13070 list_p = &OMP_CLAUSE_CHAIN (c);
13073 /* Splice attach nodes at the end of the list. */
13074 if (attach_list)
13076 *list_p = attach_list;
13077 list_p = attach_tail;
13080 /* Add in any implicit data sharing. */
13081 struct gimplify_adjust_omp_clauses_data data;
13082 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13084 /* OpenMP. Implicit clauses are added at the start of the clause list,
13085 but after any non-map clauses. */
13086 tree *implicit_add_list_p = orig_list_p;
13087 while (*implicit_add_list_p
13088 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13089 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13090 data.list_p = implicit_add_list_p;
13092 else
13093 /* OpenACC. */
13094 data.list_p = list_p;
13095 data.pre_p = pre_p;
13096 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13098 if (has_inscan_reductions)
13099 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13100 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13101 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13103 error_at (OMP_CLAUSE_LOCATION (c),
13104 "%<inscan%> %<reduction%> clause used together with "
13105 "%<linear%> clause for a variable other than loop "
13106 "iterator");
13107 break;
13110 gimplify_omp_ctxp = ctx->outer_context;
13111 delete_omp_context (ctx);
13114 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13115 -1 if unknown yet (simd is involved, won't be known until vectorization)
13116 and 1 if they do. If SCORES is non-NULL, it should point to an array
13117 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13118 of the CONSTRUCTS (position -1 if it will never match) followed by
13119 number of constructs in the OpenMP context construct trait. If the
13120 score depends on whether it will be in a declare simd clone or not,
13121 the function returns 2 and there will be two sets of the scores, the first
13122 one for the case that it is not in a declare simd clone, the other
13123 that it is in a declare simd clone. */
13126 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13127 int *scores)
13129 int matched = 0, cnt = 0;
13130 bool simd_seen = false;
13131 bool target_seen = false;
13132 int declare_simd_cnt = -1;
13133 auto_vec<enum tree_code, 16> codes;
13134 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13136 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13137 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13138 == ORT_TARGET && ctx->code == OMP_TARGET)
13139 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13140 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13141 || (ctx->region_type == ORT_SIMD
13142 && ctx->code == OMP_SIMD
13143 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13145 ++cnt;
13146 if (scores)
13147 codes.safe_push (ctx->code);
13148 else if (matched < nconstructs && ctx->code == constructs[matched])
13150 if (ctx->code == OMP_SIMD)
13152 if (matched)
13153 return 0;
13154 simd_seen = true;
13156 ++matched;
13158 if (ctx->code == OMP_TARGET)
13160 if (scores == NULL)
13161 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13162 target_seen = true;
13163 break;
13166 else if (ctx->region_type == ORT_WORKSHARE
13167 && ctx->code == OMP_LOOP
13168 && ctx->outer_context
13169 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13170 && ctx->outer_context->outer_context
13171 && ctx->outer_context->outer_context->code == OMP_LOOP
13172 && ctx->outer_context->outer_context->distribute)
13173 ctx = ctx->outer_context->outer_context;
13174 ctx = ctx->outer_context;
13176 if (!target_seen
13177 && lookup_attribute ("omp declare simd",
13178 DECL_ATTRIBUTES (current_function_decl)))
13180 /* Declare simd is a maybe case, it is supposed to be added only to the
13181 omp-simd-clone.cc added clones and not to the base function. */
13182 declare_simd_cnt = cnt++;
13183 if (scores)
13184 codes.safe_push (OMP_SIMD);
13185 else if (cnt == 0
13186 && constructs[0] == OMP_SIMD)
13188 gcc_assert (matched == 0);
13189 simd_seen = true;
13190 if (++matched == nconstructs)
13191 return -1;
13194 if (tree attr = lookup_attribute ("omp declare variant variant",
13195 DECL_ATTRIBUTES (current_function_decl)))
13197 enum tree_code variant_constructs[5];
13198 int variant_nconstructs = 0;
13199 if (!target_seen)
13200 variant_nconstructs
13201 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13202 variant_constructs);
13203 for (int i = 0; i < variant_nconstructs; i++)
13205 ++cnt;
13206 if (scores)
13207 codes.safe_push (variant_constructs[i]);
13208 else if (matched < nconstructs
13209 && variant_constructs[i] == constructs[matched])
13211 if (variant_constructs[i] == OMP_SIMD)
13213 if (matched)
13214 return 0;
13215 simd_seen = true;
13217 ++matched;
13221 if (!target_seen
13222 && lookup_attribute ("omp declare target block",
13223 DECL_ATTRIBUTES (current_function_decl)))
13225 if (scores)
13226 codes.safe_push (OMP_TARGET);
13227 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13228 ++matched;
13230 if (scores)
13232 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13234 int j = codes.length () - 1;
13235 for (int i = nconstructs - 1; i >= 0; i--)
13237 while (j >= 0
13238 && (pass != 0 || declare_simd_cnt != j)
13239 && constructs[i] != codes[j])
13240 --j;
13241 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13242 *scores++ = j - 1;
13243 else
13244 *scores++ = j;
13246 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13247 ? codes.length () - 1 : codes.length ());
13249 return declare_simd_cnt == -1 ? 1 : 2;
13251 if (matched == nconstructs)
13252 return simd_seen ? -1 : 1;
13253 return 0;
13256 /* Gimplify OACC_CACHE. */
13258 static void
13259 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13261 tree expr = *expr_p;
13263 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13264 OACC_CACHE);
13265 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13266 OACC_CACHE);
13268 /* TODO: Do something sensible with this information. */
13270 *expr_p = NULL_TREE;
13273 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13274 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13275 kind. The entry kind will replace the one in CLAUSE, while the exit
13276 kind will be used in a new omp_clause and returned to the caller. */
13278 static tree
13279 gimplify_oacc_declare_1 (tree clause)
13281 HOST_WIDE_INT kind, new_op;
13282 bool ret = false;
13283 tree c = NULL;
13285 kind = OMP_CLAUSE_MAP_KIND (clause);
13287 switch (kind)
13289 case GOMP_MAP_ALLOC:
13290 new_op = GOMP_MAP_RELEASE;
13291 ret = true;
13292 break;
13294 case GOMP_MAP_FROM:
13295 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13296 new_op = GOMP_MAP_FROM;
13297 ret = true;
13298 break;
13300 case GOMP_MAP_TOFROM:
13301 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13302 new_op = GOMP_MAP_FROM;
13303 ret = true;
13304 break;
13306 case GOMP_MAP_DEVICE_RESIDENT:
13307 case GOMP_MAP_FORCE_DEVICEPTR:
13308 case GOMP_MAP_FORCE_PRESENT:
13309 case GOMP_MAP_LINK:
13310 case GOMP_MAP_POINTER:
13311 case GOMP_MAP_TO:
13312 break;
13314 default:
13315 gcc_unreachable ();
13316 break;
13319 if (ret)
13321 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13322 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13323 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13326 return c;
13329 /* Gimplify OACC_DECLARE. */
13331 static void
13332 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13334 tree expr = *expr_p;
13335 gomp_target *stmt;
13336 tree clauses, t, decl;
13338 clauses = OACC_DECLARE_CLAUSES (expr);
13340 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13341 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13343 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13345 decl = OMP_CLAUSE_DECL (t);
13347 if (TREE_CODE (decl) == MEM_REF)
13348 decl = TREE_OPERAND (decl, 0);
13350 if (VAR_P (decl) && !is_oacc_declared (decl))
13352 tree attr = get_identifier ("oacc declare target");
13353 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13354 DECL_ATTRIBUTES (decl));
13357 if (VAR_P (decl)
13358 && !is_global_var (decl)
13359 && DECL_CONTEXT (decl) == current_function_decl)
13361 tree c = gimplify_oacc_declare_1 (t);
13362 if (c)
13364 if (oacc_declare_returns == NULL)
13365 oacc_declare_returns = new hash_map<tree, tree>;
13367 oacc_declare_returns->put (decl, c);
13371 if (gimplify_omp_ctxp)
13372 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13375 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13376 clauses);
13378 gimplify_seq_add_stmt (pre_p, stmt);
13380 *expr_p = NULL_TREE;
13383 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13384 gimplification of the body, as well as scanning the body for used
13385 variables. We need to do this scan now, because variable-sized
13386 decls will be decomposed during gimplification. */
13388 static void
13389 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13391 tree expr = *expr_p;
13392 gimple *g;
13393 gimple_seq body = NULL;
13395 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13396 OMP_PARALLEL_COMBINED (expr)
13397 ? ORT_COMBINED_PARALLEL
13398 : ORT_PARALLEL, OMP_PARALLEL);
13400 push_gimplify_context ();
13402 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13403 if (gimple_code (g) == GIMPLE_BIND)
13404 pop_gimplify_context (g);
13405 else
13406 pop_gimplify_context (NULL);
13408 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13409 OMP_PARALLEL);
13411 g = gimple_build_omp_parallel (body,
13412 OMP_PARALLEL_CLAUSES (expr),
13413 NULL_TREE, NULL_TREE);
13414 if (OMP_PARALLEL_COMBINED (expr))
13415 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13416 gimplify_seq_add_stmt (pre_p, g);
13417 *expr_p = NULL_TREE;
13420 /* Gimplify the contents of an OMP_TASK statement. This involves
13421 gimplification of the body, as well as scanning the body for used
13422 variables. We need to do this scan now, because variable-sized
13423 decls will be decomposed during gimplification. */
13425 static void
13426 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13428 tree expr = *expr_p;
13429 gimple *g;
13430 gimple_seq body = NULL;
13431 bool nowait = false;
13432 bool has_depend = false;
13434 if (OMP_TASK_BODY (expr) == NULL_TREE)
13436 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13437 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13439 has_depend = true;
13440 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13442 error_at (OMP_CLAUSE_LOCATION (c),
13443 "%<mutexinoutset%> kind in %<depend%> clause on a "
13444 "%<taskwait%> construct");
13445 break;
13448 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13449 nowait = true;
13450 if (nowait && !has_depend)
13452 error_at (EXPR_LOCATION (expr),
13453 "%<taskwait%> construct with %<nowait%> clause but no "
13454 "%<depend%> clauses");
13455 *expr_p = NULL_TREE;
13456 return;
13460 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13461 omp_find_clause (OMP_TASK_CLAUSES (expr),
13462 OMP_CLAUSE_UNTIED)
13463 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13465 if (OMP_TASK_BODY (expr))
13467 push_gimplify_context ();
13469 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13470 if (gimple_code (g) == GIMPLE_BIND)
13471 pop_gimplify_context (g);
13472 else
13473 pop_gimplify_context (NULL);
13476 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13477 OMP_TASK);
13479 g = gimple_build_omp_task (body,
13480 OMP_TASK_CLAUSES (expr),
13481 NULL_TREE, NULL_TREE,
13482 NULL_TREE, NULL_TREE, NULL_TREE);
13483 if (OMP_TASK_BODY (expr) == NULL_TREE)
13484 gimple_omp_task_set_taskwait_p (g, true);
13485 gimplify_seq_add_stmt (pre_p, g);
13486 *expr_p = NULL_TREE;
13489 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13490 force it into a temporary initialized in PRE_P and add firstprivate clause
13491 to ORIG_FOR_STMT. */
13493 static void
13494 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13495 tree orig_for_stmt)
13497 if (*tp == NULL || is_gimple_constant (*tp))
13498 return;
13500 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13501 /* Reference to pointer conversion is considered useless,
13502 but is significant for firstprivate clause. Force it
13503 here. */
13504 if (type
13505 && TREE_CODE (type) == POINTER_TYPE
13506 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13508 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13509 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13510 gimplify_and_add (m, pre_p);
13511 *tp = v;
13514 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13515 OMP_CLAUSE_DECL (c) = *tp;
13516 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13517 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13520 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13521 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13523 static tree
13524 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13526 switch (TREE_CODE (*tp))
13528 case OMP_ORDERED:
13529 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13530 return *tp;
13531 break;
13532 case OMP_SIMD:
13533 case OMP_PARALLEL:
13534 case OMP_TARGET:
13535 *walk_subtrees = 0;
13536 break;
13537 default:
13538 break;
13540 return NULL_TREE;
13543 /* Gimplify the gross structure of an OMP_FOR statement. */
13545 static enum gimplify_status
13546 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13548 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13549 enum gimplify_status ret = GS_ALL_DONE;
13550 enum gimplify_status tret;
13551 gomp_for *gfor;
13552 gimple_seq for_body, for_pre_body;
13553 int i;
13554 bitmap has_decl_expr = NULL;
13555 enum omp_region_type ort = ORT_WORKSHARE;
13556 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13558 orig_for_stmt = for_stmt = *expr_p;
13560 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13561 != NULL_TREE);
13562 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13564 tree *data[4] = { NULL, NULL, NULL, NULL };
13565 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13566 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13567 find_combined_omp_for, data, NULL);
13568 if (inner_for_stmt == NULL_TREE)
13570 gcc_assert (seen_error ());
13571 *expr_p = NULL_TREE;
13572 return GS_ERROR;
13574 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13576 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13577 &OMP_FOR_PRE_BODY (for_stmt));
13578 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13580 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13582 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13583 &OMP_FOR_PRE_BODY (for_stmt));
13584 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13587 if (data[0])
13589 /* We have some statements or variable declarations in between
13590 the composite construct directives. Move them around the
13591 inner_for_stmt. */
13592 data[0] = expr_p;
13593 for (i = 0; i < 3; i++)
13594 if (data[i])
13596 tree t = *data[i];
13597 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13598 data[i + 1] = data[i];
13599 *data[i] = OMP_BODY (t);
13600 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13601 NULL_TREE, make_node (BLOCK));
13602 OMP_BODY (t) = body;
13603 append_to_statement_list_force (inner_for_stmt,
13604 &BIND_EXPR_BODY (body));
13605 *data[3] = t;
13606 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13607 gcc_assert (*data[3] == inner_for_stmt);
13609 return GS_OK;
13612 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13613 if (!loop_p
13614 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13615 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13616 i)) == TREE_LIST
13617 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13618 i)))
13620 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13621 /* Class iterators aren't allowed on OMP_SIMD, so the only
13622 case we need to solve is distribute parallel for. They are
13623 allowed on the loop construct, but that is already handled
13624 in gimplify_omp_loop. */
13625 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13626 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13627 && data[1]);
13628 tree orig_decl = TREE_PURPOSE (orig);
13629 tree last = TREE_VALUE (orig);
13630 tree *pc;
13631 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13632 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13633 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13634 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13635 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13636 break;
13637 if (*pc == NULL_TREE)
13639 tree *spc;
13640 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13641 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13642 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13643 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13644 break;
13645 if (*spc)
13647 tree c = *spc;
13648 *spc = OMP_CLAUSE_CHAIN (c);
13649 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13650 *pc = c;
13653 if (*pc == NULL_TREE)
13655 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13657 /* private clause will appear only on inner_for_stmt.
13658 Change it into firstprivate, and add private clause
13659 on for_stmt. */
13660 tree c = copy_node (*pc);
13661 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
13662 OMP_FOR_CLAUSES (for_stmt) = c;
13663 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
13664 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13666 else
13668 /* lastprivate clause will appear on both inner_for_stmt
13669 and for_stmt. Add firstprivate clause to
13670 inner_for_stmt. */
13671 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
13672 OMP_CLAUSE_FIRSTPRIVATE);
13673 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
13674 OMP_CLAUSE_CHAIN (c) = *pc;
13675 *pc = c;
13676 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13678 tree c = build_omp_clause (UNKNOWN_LOCATION,
13679 OMP_CLAUSE_FIRSTPRIVATE);
13680 OMP_CLAUSE_DECL (c) = last;
13681 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13682 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13683 c = build_omp_clause (UNKNOWN_LOCATION,
13684 *pc ? OMP_CLAUSE_SHARED
13685 : OMP_CLAUSE_FIRSTPRIVATE);
13686 OMP_CLAUSE_DECL (c) = orig_decl;
13687 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13688 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13690 /* Similarly, take care of C++ range for temporaries, those should
13691 be firstprivate on OMP_PARALLEL if any. */
13692 if (data[1])
13693 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13694 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
13695 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13696 i)) == TREE_LIST
13697 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13698 i)))
13700 tree orig
13701 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13702 tree v = TREE_CHAIN (orig);
13703 tree c = build_omp_clause (UNKNOWN_LOCATION,
13704 OMP_CLAUSE_FIRSTPRIVATE);
13705 /* First add firstprivate clause for the __for_end artificial
13706 decl. */
13707 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
13708 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13709 == REFERENCE_TYPE)
13710 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13711 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13712 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13713 if (TREE_VEC_ELT (v, 0))
13715 /* And now the same for __for_range artificial decl if it
13716 exists. */
13717 c = build_omp_clause (UNKNOWN_LOCATION,
13718 OMP_CLAUSE_FIRSTPRIVATE);
13719 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
13720 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13721 == REFERENCE_TYPE)
13722 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13723 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13724 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13729 switch (TREE_CODE (for_stmt))
13731 case OMP_FOR:
13732 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13734 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13735 OMP_CLAUSE_SCHEDULE))
13736 error_at (EXPR_LOCATION (for_stmt),
13737 "%qs clause may not appear on non-rectangular %qs",
13738 "schedule", lang_GNU_Fortran () ? "do" : "for");
13739 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
13740 error_at (EXPR_LOCATION (for_stmt),
13741 "%qs clause may not appear on non-rectangular %qs",
13742 "ordered", lang_GNU_Fortran () ? "do" : "for");
13744 break;
13745 case OMP_DISTRIBUTE:
13746 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
13747 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13748 OMP_CLAUSE_DIST_SCHEDULE))
13749 error_at (EXPR_LOCATION (for_stmt),
13750 "%qs clause may not appear on non-rectangular %qs",
13751 "dist_schedule", "distribute");
13752 break;
13753 case OACC_LOOP:
13754 ort = ORT_ACC;
13755 break;
13756 case OMP_TASKLOOP:
13757 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13759 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13760 OMP_CLAUSE_GRAINSIZE))
13761 error_at (EXPR_LOCATION (for_stmt),
13762 "%qs clause may not appear on non-rectangular %qs",
13763 "grainsize", "taskloop");
13764 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13765 OMP_CLAUSE_NUM_TASKS))
13766 error_at (EXPR_LOCATION (for_stmt),
13767 "%qs clause may not appear on non-rectangular %qs",
13768 "num_tasks", "taskloop");
13770 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
13771 ort = ORT_UNTIED_TASKLOOP;
13772 else
13773 ort = ORT_TASKLOOP;
13774 break;
13775 case OMP_SIMD:
13776 ort = ORT_SIMD;
13777 break;
13778 default:
13779 gcc_unreachable ();
13782 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13783 clause for the IV. */
13784 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
13786 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
13787 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13788 decl = TREE_OPERAND (t, 0);
13789 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13790 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13791 && OMP_CLAUSE_DECL (c) == decl)
13793 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
13794 break;
13798 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
13799 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
13800 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
13801 ? OMP_LOOP : TREE_CODE (for_stmt));
13803 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
13804 gimplify_omp_ctxp->distribute = true;
13806 /* Handle OMP_FOR_INIT. */
13807 for_pre_body = NULL;
13808 if ((ort == ORT_SIMD
13809 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
13810 && OMP_FOR_PRE_BODY (for_stmt))
13812 has_decl_expr = BITMAP_ALLOC (NULL);
13813 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
13814 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
13816 t = OMP_FOR_PRE_BODY (for_stmt);
13817 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13819 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
13821 tree_stmt_iterator si;
13822 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
13823 tsi_next (&si))
13825 t = tsi_stmt (si);
13826 if (TREE_CODE (t) == DECL_EXPR
13827 && VAR_P (DECL_EXPR_DECL (t)))
13828 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13832 if (OMP_FOR_PRE_BODY (for_stmt))
13834 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
13835 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13836 else
13838 struct gimplify_omp_ctx ctx;
13839 memset (&ctx, 0, sizeof (ctx));
13840 ctx.region_type = ORT_NONE;
13841 gimplify_omp_ctxp = &ctx;
13842 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13843 gimplify_omp_ctxp = NULL;
13846 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
13848 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13849 for_stmt = inner_for_stmt;
13851 /* For taskloop, need to gimplify the start, end and step before the
13852 taskloop, outside of the taskloop omp context. */
13853 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13855 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13857 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13858 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
13859 ? pre_p : &for_pre_body);
13860 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
13861 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13863 tree v = TREE_OPERAND (t, 1);
13864 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13865 for_pre_p, orig_for_stmt);
13866 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13867 for_pre_p, orig_for_stmt);
13869 else
13870 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13871 orig_for_stmt);
13873 /* Handle OMP_FOR_COND. */
13874 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13875 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13877 tree v = TREE_OPERAND (t, 1);
13878 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13879 for_pre_p, orig_for_stmt);
13880 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13881 for_pre_p, orig_for_stmt);
13883 else
13884 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13885 orig_for_stmt);
13887 /* Handle OMP_FOR_INCR. */
13888 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13889 if (TREE_CODE (t) == MODIFY_EXPR)
13891 decl = TREE_OPERAND (t, 0);
13892 t = TREE_OPERAND (t, 1);
13893 tree *tp = &TREE_OPERAND (t, 1);
13894 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
13895 tp = &TREE_OPERAND (t, 0);
13897 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
13898 orig_for_stmt);
13902 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
13903 OMP_TASKLOOP);
13906 if (orig_for_stmt != for_stmt)
13907 gimplify_omp_ctxp->combined_loop = true;
13909 for_body = NULL;
13910 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13911 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
13912 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13913 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
13915 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
13916 bool is_doacross = false;
13917 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
13918 find_standalone_omp_ordered, NULL))
13920 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
13921 is_doacross = true;
13922 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
13923 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
13924 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
13925 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
13927 error_at (OMP_CLAUSE_LOCATION (*pc),
13928 "%<linear%> clause may not be specified together "
13929 "with %<ordered%> clause if stand-alone %<ordered%> "
13930 "construct is nested in it");
13931 *pc = OMP_CLAUSE_CHAIN (*pc);
13933 else
13934 pc = &OMP_CLAUSE_CHAIN (*pc);
13936 int collapse = 1, tile = 0;
13937 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
13938 if (c)
13939 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
13940 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
13941 if (c)
13942 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
13943 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
13944 hash_set<tree> *allocate_uids = NULL;
13945 if (c)
13947 allocate_uids = new hash_set<tree>;
13948 for (; c; c = OMP_CLAUSE_CHAIN (c))
13949 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
13950 allocate_uids->add (OMP_CLAUSE_DECL (c));
13952 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13954 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13955 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13956 decl = TREE_OPERAND (t, 0);
13957 gcc_assert (DECL_P (decl));
13958 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
13959 || POINTER_TYPE_P (TREE_TYPE (decl)));
13960 if (is_doacross)
13962 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
13964 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13965 if (TREE_CODE (orig_decl) == TREE_LIST)
13967 orig_decl = TREE_PURPOSE (orig_decl);
13968 if (!orig_decl)
13969 orig_decl = decl;
13971 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
13973 else
13974 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13975 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13978 if (for_stmt == orig_for_stmt)
13980 tree orig_decl = decl;
13981 if (OMP_FOR_ORIG_DECLS (for_stmt))
13983 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13984 if (TREE_CODE (orig_decl) == TREE_LIST)
13986 orig_decl = TREE_PURPOSE (orig_decl);
13987 if (!orig_decl)
13988 orig_decl = decl;
13991 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
13992 error_at (EXPR_LOCATION (for_stmt),
13993 "threadprivate iteration variable %qD", orig_decl);
13996 /* Make sure the iteration variable is private. */
13997 tree c = NULL_TREE;
13998 tree c2 = NULL_TREE;
13999 if (orig_for_stmt != for_stmt)
14001 /* Preserve this information until we gimplify the inner simd. */
14002 if (has_decl_expr
14003 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14004 TREE_PRIVATE (t) = 1;
14006 else if (ort == ORT_SIMD)
14008 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14009 (splay_tree_key) decl);
14010 omp_is_private (gimplify_omp_ctxp, decl,
14011 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14012 != 1));
14013 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14015 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14016 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14017 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14018 OMP_CLAUSE_LASTPRIVATE);
14019 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14020 OMP_CLAUSE_LASTPRIVATE))
14021 if (OMP_CLAUSE_DECL (c3) == decl)
14023 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14024 "conditional %<lastprivate%> on loop "
14025 "iterator %qD ignored", decl);
14026 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14027 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14030 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14032 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14033 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14034 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14035 if ((has_decl_expr
14036 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14037 || TREE_PRIVATE (t))
14039 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14040 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14042 struct gimplify_omp_ctx *outer
14043 = gimplify_omp_ctxp->outer_context;
14044 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14046 if (outer->region_type == ORT_WORKSHARE
14047 && outer->combined_loop)
14049 n = splay_tree_lookup (outer->variables,
14050 (splay_tree_key)decl);
14051 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14053 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14054 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14056 else
14058 struct gimplify_omp_ctx *octx = outer->outer_context;
14059 if (octx
14060 && octx->region_type == ORT_COMBINED_PARALLEL
14061 && octx->outer_context
14062 && (octx->outer_context->region_type
14063 == ORT_WORKSHARE)
14064 && octx->outer_context->combined_loop)
14066 octx = octx->outer_context;
14067 n = splay_tree_lookup (octx->variables,
14068 (splay_tree_key)decl);
14069 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14071 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14072 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14079 OMP_CLAUSE_DECL (c) = decl;
14080 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14081 OMP_FOR_CLAUSES (for_stmt) = c;
14082 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14083 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14084 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14085 true);
14087 else
14089 bool lastprivate
14090 = (!has_decl_expr
14091 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14092 if (TREE_PRIVATE (t))
14093 lastprivate = false;
14094 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14096 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14097 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14098 lastprivate = false;
14101 struct gimplify_omp_ctx *outer
14102 = gimplify_omp_ctxp->outer_context;
14103 if (outer && lastprivate)
14104 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14105 true);
14107 c = build_omp_clause (input_location,
14108 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14109 : OMP_CLAUSE_PRIVATE);
14110 OMP_CLAUSE_DECL (c) = decl;
14111 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14112 OMP_FOR_CLAUSES (for_stmt) = c;
14113 omp_add_variable (gimplify_omp_ctxp, decl,
14114 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14115 | GOVD_EXPLICIT | GOVD_SEEN);
14116 c = NULL_TREE;
14119 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14121 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14122 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14123 (splay_tree_key) decl);
14124 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14125 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14126 OMP_CLAUSE_LASTPRIVATE);
14127 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14128 OMP_CLAUSE_LASTPRIVATE))
14129 if (OMP_CLAUSE_DECL (c3) == decl)
14131 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14132 "conditional %<lastprivate%> on loop "
14133 "iterator %qD ignored", decl);
14134 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14135 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14138 else
14139 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14141 /* If DECL is not a gimple register, create a temporary variable to act
14142 as an iteration counter. This is valid, since DECL cannot be
14143 modified in the body of the loop. Similarly for any iteration vars
14144 in simd with collapse > 1 where the iterator vars must be
14145 lastprivate. And similarly for vars mentioned in allocate clauses. */
14146 if (orig_for_stmt != for_stmt)
14147 var = decl;
14148 else if (!is_gimple_reg (decl)
14149 || (ort == ORT_SIMD
14150 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14151 || (allocate_uids && allocate_uids->contains (decl)))
14153 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14154 /* Make sure omp_add_variable is not called on it prematurely.
14155 We call it ourselves a few lines later. */
14156 gimplify_omp_ctxp = NULL;
14157 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14158 gimplify_omp_ctxp = ctx;
14159 TREE_OPERAND (t, 0) = var;
14161 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14163 if (ort == ORT_SIMD
14164 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14166 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14167 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14168 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14169 OMP_CLAUSE_DECL (c2) = var;
14170 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14171 OMP_FOR_CLAUSES (for_stmt) = c2;
14172 omp_add_variable (gimplify_omp_ctxp, var,
14173 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14174 if (c == NULL_TREE)
14176 c = c2;
14177 c2 = NULL_TREE;
14180 else
14181 omp_add_variable (gimplify_omp_ctxp, var,
14182 GOVD_PRIVATE | GOVD_SEEN);
14184 else
14185 var = decl;
14187 gimplify_omp_ctxp->in_for_exprs = true;
14188 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14190 tree lb = TREE_OPERAND (t, 1);
14191 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14192 is_gimple_val, fb_rvalue, false);
14193 ret = MIN (ret, tret);
14194 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14195 is_gimple_val, fb_rvalue, false);
14197 else
14198 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14199 is_gimple_val, fb_rvalue, false);
14200 gimplify_omp_ctxp->in_for_exprs = false;
14201 ret = MIN (ret, tret);
14202 if (ret == GS_ERROR)
14203 return ret;
14205 /* Handle OMP_FOR_COND. */
14206 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14207 gcc_assert (COMPARISON_CLASS_P (t));
14208 gcc_assert (TREE_OPERAND (t, 0) == decl);
14210 gimplify_omp_ctxp->in_for_exprs = true;
14211 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14213 tree ub = TREE_OPERAND (t, 1);
14214 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14215 is_gimple_val, fb_rvalue, false);
14216 ret = MIN (ret, tret);
14217 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14218 is_gimple_val, fb_rvalue, false);
14220 else
14221 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14222 is_gimple_val, fb_rvalue, false);
14223 gimplify_omp_ctxp->in_for_exprs = false;
14224 ret = MIN (ret, tret);
14226 /* Handle OMP_FOR_INCR. */
14227 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14228 switch (TREE_CODE (t))
14230 case PREINCREMENT_EXPR:
14231 case POSTINCREMENT_EXPR:
14233 tree decl = TREE_OPERAND (t, 0);
14234 /* c_omp_for_incr_canonicalize_ptr() should have been
14235 called to massage things appropriately. */
14236 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14238 if (orig_for_stmt != for_stmt)
14239 break;
14240 t = build_int_cst (TREE_TYPE (decl), 1);
14241 if (c)
14242 OMP_CLAUSE_LINEAR_STEP (c) = t;
14243 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14244 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14245 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14246 break;
14249 case PREDECREMENT_EXPR:
14250 case POSTDECREMENT_EXPR:
14251 /* c_omp_for_incr_canonicalize_ptr() should have been
14252 called to massage things appropriately. */
14253 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14254 if (orig_for_stmt != for_stmt)
14255 break;
14256 t = build_int_cst (TREE_TYPE (decl), -1);
14257 if (c)
14258 OMP_CLAUSE_LINEAR_STEP (c) = t;
14259 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14260 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14261 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14262 break;
14264 case MODIFY_EXPR:
14265 gcc_assert (TREE_OPERAND (t, 0) == decl);
14266 TREE_OPERAND (t, 0) = var;
14268 t = TREE_OPERAND (t, 1);
14269 switch (TREE_CODE (t))
14271 case PLUS_EXPR:
14272 if (TREE_OPERAND (t, 1) == decl)
14274 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14275 TREE_OPERAND (t, 0) = var;
14276 break;
14279 /* Fallthru. */
14280 case MINUS_EXPR:
14281 case POINTER_PLUS_EXPR:
14282 gcc_assert (TREE_OPERAND (t, 0) == decl);
14283 TREE_OPERAND (t, 0) = var;
14284 break;
14285 default:
14286 gcc_unreachable ();
14289 gimplify_omp_ctxp->in_for_exprs = true;
14290 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14291 is_gimple_val, fb_rvalue, false);
14292 ret = MIN (ret, tret);
14293 if (c)
14295 tree step = TREE_OPERAND (t, 1);
14296 tree stept = TREE_TYPE (decl);
14297 if (POINTER_TYPE_P (stept))
14298 stept = sizetype;
14299 step = fold_convert (stept, step);
14300 if (TREE_CODE (t) == MINUS_EXPR)
14301 step = fold_build1 (NEGATE_EXPR, stept, step);
14302 OMP_CLAUSE_LINEAR_STEP (c) = step;
14303 if (step != TREE_OPERAND (t, 1))
14305 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14306 &for_pre_body, NULL,
14307 is_gimple_val, fb_rvalue, false);
14308 ret = MIN (ret, tret);
14311 gimplify_omp_ctxp->in_for_exprs = false;
14312 break;
14314 default:
14315 gcc_unreachable ();
14318 if (c2)
14320 gcc_assert (c);
14321 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14324 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14326 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14327 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14328 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14329 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14330 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14331 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14332 && OMP_CLAUSE_DECL (c) == decl)
14334 if (is_doacross && (collapse == 1 || i >= collapse))
14335 t = var;
14336 else
14338 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14339 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14340 gcc_assert (TREE_OPERAND (t, 0) == var);
14341 t = TREE_OPERAND (t, 1);
14342 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14343 || TREE_CODE (t) == MINUS_EXPR
14344 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14345 gcc_assert (TREE_OPERAND (t, 0) == var);
14346 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14347 is_doacross ? var : decl,
14348 TREE_OPERAND (t, 1));
14350 gimple_seq *seq;
14351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14352 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14353 else
14354 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14355 push_gimplify_context ();
14356 gimplify_assign (decl, t, seq);
14357 gimple *bind = NULL;
14358 if (gimplify_ctxp->temps)
14360 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14361 *seq = NULL;
14362 gimplify_seq_add_stmt (seq, bind);
14364 pop_gimplify_context (bind);
14367 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14368 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14370 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14371 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14372 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14373 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14374 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14375 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14376 gcc_assert (COMPARISON_CLASS_P (t));
14377 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14378 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14379 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14383 BITMAP_FREE (has_decl_expr);
14384 delete allocate_uids;
14386 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14387 || (loop_p && orig_for_stmt == for_stmt))
14389 push_gimplify_context ();
14390 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14392 OMP_FOR_BODY (orig_for_stmt)
14393 = build3 (BIND_EXPR, void_type_node, NULL,
14394 OMP_FOR_BODY (orig_for_stmt), NULL);
14395 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14399 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14400 &for_body);
14402 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14403 || (loop_p && orig_for_stmt == for_stmt))
14405 if (gimple_code (g) == GIMPLE_BIND)
14406 pop_gimplify_context (g);
14407 else
14408 pop_gimplify_context (NULL);
14411 if (orig_for_stmt != for_stmt)
14412 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14414 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14415 decl = TREE_OPERAND (t, 0);
14416 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14417 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14418 gimplify_omp_ctxp = ctx->outer_context;
14419 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14420 gimplify_omp_ctxp = ctx;
14421 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14422 TREE_OPERAND (t, 0) = var;
14423 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14424 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14425 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14426 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14427 for (int j = i + 1;
14428 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14430 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14431 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14432 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14433 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14435 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14436 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14438 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14439 gcc_assert (COMPARISON_CLASS_P (t));
14440 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14441 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14443 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14444 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14449 gimplify_adjust_omp_clauses (pre_p, for_body,
14450 &OMP_FOR_CLAUSES (orig_for_stmt),
14451 TREE_CODE (orig_for_stmt));
14453 int kind;
14454 switch (TREE_CODE (orig_for_stmt))
14456 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14457 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14458 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14459 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14460 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14461 default:
14462 gcc_unreachable ();
14464 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14466 gimplify_seq_add_seq (pre_p, for_pre_body);
14467 for_pre_body = NULL;
14469 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14470 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14471 for_pre_body);
14472 if (orig_for_stmt != for_stmt)
14473 gimple_omp_for_set_combined_p (gfor, true);
14474 if (gimplify_omp_ctxp
14475 && (gimplify_omp_ctxp->combined_loop
14476 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14477 && gimplify_omp_ctxp->outer_context
14478 && gimplify_omp_ctxp->outer_context->combined_loop)))
14480 gimple_omp_for_set_combined_into_p (gfor, true);
14481 if (gimplify_omp_ctxp->combined_loop)
14482 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14483 else
14484 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14487 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14489 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14490 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14491 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14492 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14493 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14494 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14495 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14496 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14499 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14500 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14501 The outer taskloop stands for computing the number of iterations,
14502 counts for collapsed loops and holding taskloop specific clauses.
14503 The task construct stands for the effect of data sharing on the
14504 explicit task it creates and the inner taskloop stands for expansion
14505 of the static loop inside of the explicit task construct. */
14506 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14508 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14509 tree task_clauses = NULL_TREE;
14510 tree c = *gfor_clauses_ptr;
14511 tree *gtask_clauses_ptr = &task_clauses;
14512 tree outer_for_clauses = NULL_TREE;
14513 tree *gforo_clauses_ptr = &outer_for_clauses;
14514 bitmap lastprivate_uids = NULL;
14515 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14517 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14518 if (c)
14520 lastprivate_uids = BITMAP_ALLOC (NULL);
14521 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14522 OMP_CLAUSE_LASTPRIVATE))
14523 bitmap_set_bit (lastprivate_uids,
14524 DECL_UID (OMP_CLAUSE_DECL (c)));
14526 c = *gfor_clauses_ptr;
14528 for (; c; c = OMP_CLAUSE_CHAIN (c))
14529 switch (OMP_CLAUSE_CODE (c))
14531 /* These clauses are allowed on task, move them there. */
14532 case OMP_CLAUSE_SHARED:
14533 case OMP_CLAUSE_FIRSTPRIVATE:
14534 case OMP_CLAUSE_DEFAULT:
14535 case OMP_CLAUSE_IF:
14536 case OMP_CLAUSE_UNTIED:
14537 case OMP_CLAUSE_FINAL:
14538 case OMP_CLAUSE_MERGEABLE:
14539 case OMP_CLAUSE_PRIORITY:
14540 case OMP_CLAUSE_REDUCTION:
14541 case OMP_CLAUSE_IN_REDUCTION:
14542 *gtask_clauses_ptr = c;
14543 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14544 break;
14545 case OMP_CLAUSE_PRIVATE:
14546 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14548 /* We want private on outer for and firstprivate
14549 on task. */
14550 *gtask_clauses_ptr
14551 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14552 OMP_CLAUSE_FIRSTPRIVATE);
14553 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14554 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14555 openacc);
14556 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14557 *gforo_clauses_ptr = c;
14558 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14560 else
14562 *gtask_clauses_ptr = c;
14563 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14565 break;
14566 /* These clauses go into outer taskloop clauses. */
14567 case OMP_CLAUSE_GRAINSIZE:
14568 case OMP_CLAUSE_NUM_TASKS:
14569 case OMP_CLAUSE_NOGROUP:
14570 *gforo_clauses_ptr = c;
14571 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14572 break;
14573 /* Collapse clause we duplicate on both taskloops. */
14574 case OMP_CLAUSE_COLLAPSE:
14575 *gfor_clauses_ptr = c;
14576 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14577 *gforo_clauses_ptr = copy_node (c);
14578 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14579 break;
14580 /* For lastprivate, keep the clause on inner taskloop, and add
14581 a shared clause on task. If the same decl is also firstprivate,
14582 add also firstprivate clause on the inner taskloop. */
14583 case OMP_CLAUSE_LASTPRIVATE:
14584 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14586 /* For taskloop C++ lastprivate IVs, we want:
14587 1) private on outer taskloop
14588 2) firstprivate and shared on task
14589 3) lastprivate on inner taskloop */
14590 *gtask_clauses_ptr
14591 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14592 OMP_CLAUSE_FIRSTPRIVATE);
14593 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14594 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14595 openacc);
14596 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14597 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14598 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14599 OMP_CLAUSE_PRIVATE);
14600 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14601 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14602 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14603 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14605 *gfor_clauses_ptr = c;
14606 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14607 *gtask_clauses_ptr
14608 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14609 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14610 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14611 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14612 gtask_clauses_ptr
14613 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14614 break;
14615 /* Allocate clause we duplicate on task and inner taskloop
14616 if the decl is lastprivate, otherwise just put on task. */
14617 case OMP_CLAUSE_ALLOCATE:
14618 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14619 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14621 /* Additionally, put firstprivate clause on task
14622 for the allocator if it is not constant. */
14623 *gtask_clauses_ptr
14624 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14625 OMP_CLAUSE_FIRSTPRIVATE);
14626 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14627 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14628 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14630 if (lastprivate_uids
14631 && bitmap_bit_p (lastprivate_uids,
14632 DECL_UID (OMP_CLAUSE_DECL (c))))
14634 *gfor_clauses_ptr = c;
14635 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14636 *gtask_clauses_ptr = copy_node (c);
14637 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14639 else
14641 *gtask_clauses_ptr = c;
14642 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14644 break;
14645 default:
14646 gcc_unreachable ();
14648 *gfor_clauses_ptr = NULL_TREE;
14649 *gtask_clauses_ptr = NULL_TREE;
14650 *gforo_clauses_ptr = NULL_TREE;
14651 BITMAP_FREE (lastprivate_uids);
14652 gimple_set_location (gfor, input_location);
14653 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14654 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14655 NULL_TREE, NULL_TREE, NULL_TREE);
14656 gimple_set_location (g, input_location);
14657 gimple_omp_task_set_taskloop_p (g, true);
14658 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
14659 gomp_for *gforo
14660 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
14661 gimple_omp_for_collapse (gfor),
14662 gimple_omp_for_pre_body (gfor));
14663 gimple_omp_for_set_pre_body (gfor, NULL);
14664 gimple_omp_for_set_combined_p (gforo, true);
14665 gimple_omp_for_set_combined_into_p (gfor, true);
14666 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
14668 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
14669 tree v = create_tmp_var (type);
14670 gimple_omp_for_set_index (gforo, i, v);
14671 t = unshare_expr (gimple_omp_for_initial (gfor, i));
14672 gimple_omp_for_set_initial (gforo, i, t);
14673 gimple_omp_for_set_cond (gforo, i,
14674 gimple_omp_for_cond (gfor, i));
14675 t = unshare_expr (gimple_omp_for_final (gfor, i));
14676 gimple_omp_for_set_final (gforo, i, t);
14677 t = unshare_expr (gimple_omp_for_incr (gfor, i));
14678 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
14679 TREE_OPERAND (t, 0) = v;
14680 gimple_omp_for_set_incr (gforo, i, t);
14681 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
14682 OMP_CLAUSE_DECL (t) = v;
14683 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
14684 gimple_omp_for_set_clauses (gforo, t);
14685 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14687 tree *p1 = NULL, *p2 = NULL;
14688 t = gimple_omp_for_initial (gforo, i);
14689 if (TREE_CODE (t) == TREE_VEC)
14690 p1 = &TREE_VEC_ELT (t, 0);
14691 t = gimple_omp_for_final (gforo, i);
14692 if (TREE_CODE (t) == TREE_VEC)
14694 if (p1)
14695 p2 = &TREE_VEC_ELT (t, 0);
14696 else
14697 p1 = &TREE_VEC_ELT (t, 0);
14699 if (p1)
14701 int j;
14702 for (j = 0; j < i; j++)
14703 if (*p1 == gimple_omp_for_index (gfor, j))
14705 *p1 = gimple_omp_for_index (gforo, j);
14706 if (p2)
14707 *p2 = *p1;
14708 break;
14710 gcc_assert (j < i);
14714 gimplify_seq_add_stmt (pre_p, gforo);
14716 else
14717 gimplify_seq_add_stmt (pre_p, gfor);
14719 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
14721 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14722 unsigned lastprivate_conditional = 0;
14723 while (ctx
14724 && (ctx->region_type == ORT_TARGET_DATA
14725 || ctx->region_type == ORT_TASKGROUP))
14726 ctx = ctx->outer_context;
14727 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
14728 for (tree c = gimple_omp_for_clauses (gfor);
14729 c; c = OMP_CLAUSE_CHAIN (c))
14730 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14731 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14732 ++lastprivate_conditional;
14733 if (lastprivate_conditional)
14735 struct omp_for_data fd;
14736 omp_extract_for_data (gfor, &fd, NULL);
14737 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
14738 lastprivate_conditional);
14739 tree var = create_tmp_var_raw (type);
14740 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
14741 OMP_CLAUSE_DECL (c) = var;
14742 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14743 gimple_omp_for_set_clauses (gfor, c);
14744 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
14747 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
14749 unsigned lastprivate_conditional = 0;
14750 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
14751 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14752 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14753 ++lastprivate_conditional;
14754 if (lastprivate_conditional)
14756 struct omp_for_data fd;
14757 omp_extract_for_data (gfor, &fd, NULL);
14758 tree type = unsigned_type_for (fd.iter_type);
14759 while (lastprivate_conditional--)
14761 tree c = build_omp_clause (UNKNOWN_LOCATION,
14762 OMP_CLAUSE__CONDTEMP_);
14763 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
14764 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14765 gimple_omp_for_set_clauses (gfor, c);
14770 if (ret != GS_ALL_DONE)
14771 return GS_ERROR;
14772 *expr_p = NULL_TREE;
14773 return GS_ALL_DONE;
14776 /* Helper for gimplify_omp_loop, called through walk_tree. */
14778 static tree
14779 note_no_context_vars (tree *tp, int *, void *data)
14781 if (VAR_P (*tp)
14782 && DECL_CONTEXT (*tp) == NULL_TREE
14783 && !is_global_var (*tp))
14785 vec<tree> *d = (vec<tree> *) data;
14786 d->safe_push (*tp);
14787 DECL_CONTEXT (*tp) = current_function_decl;
14789 return NULL_TREE;
14792 /* Gimplify the gross structure of an OMP_LOOP statement. */
14794 static enum gimplify_status
14795 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
14797 tree for_stmt = *expr_p;
14798 tree clauses = OMP_FOR_CLAUSES (for_stmt);
14799 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
14800 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
14801 int i;
14803 /* If order is not present, the behavior is as if order(concurrent)
14804 appeared. */
14805 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
14806 if (order == NULL_TREE)
14808 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
14809 OMP_CLAUSE_CHAIN (order) = clauses;
14810 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
14813 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
14814 if (bind == NULL_TREE)
14816 if (!flag_openmp) /* flag_openmp_simd */
14818 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
14819 kind = OMP_CLAUSE_BIND_TEAMS;
14820 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
14821 kind = OMP_CLAUSE_BIND_PARALLEL;
14822 else
14824 for (; octx; octx = octx->outer_context)
14826 if ((octx->region_type & ORT_ACC) != 0
14827 || octx->region_type == ORT_NONE
14828 || octx->region_type == ORT_IMPLICIT_TARGET)
14829 continue;
14830 break;
14832 if (octx == NULL && !in_omp_construct)
14833 error_at (EXPR_LOCATION (for_stmt),
14834 "%<bind%> clause not specified on a %<loop%> "
14835 "construct not nested inside another OpenMP construct");
14837 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
14838 OMP_CLAUSE_CHAIN (bind) = clauses;
14839 OMP_CLAUSE_BIND_KIND (bind) = kind;
14840 OMP_FOR_CLAUSES (for_stmt) = bind;
14842 else
14843 switch (OMP_CLAUSE_BIND_KIND (bind))
14845 case OMP_CLAUSE_BIND_THREAD:
14846 break;
14847 case OMP_CLAUSE_BIND_PARALLEL:
14848 if (!flag_openmp) /* flag_openmp_simd */
14850 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14851 break;
14853 for (; octx; octx = octx->outer_context)
14854 if (octx->region_type == ORT_SIMD
14855 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
14857 error_at (EXPR_LOCATION (for_stmt),
14858 "%<bind(parallel)%> on a %<loop%> construct nested "
14859 "inside %<simd%> construct");
14860 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14861 break;
14863 kind = OMP_CLAUSE_BIND_PARALLEL;
14864 break;
14865 case OMP_CLAUSE_BIND_TEAMS:
14866 if (!flag_openmp) /* flag_openmp_simd */
14868 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14869 break;
14871 if ((octx
14872 && octx->region_type != ORT_IMPLICIT_TARGET
14873 && octx->region_type != ORT_NONE
14874 && (octx->region_type & ORT_TEAMS) == 0)
14875 || in_omp_construct)
14877 error_at (EXPR_LOCATION (for_stmt),
14878 "%<bind(teams)%> on a %<loop%> region not strictly "
14879 "nested inside of a %<teams%> region");
14880 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14881 break;
14883 kind = OMP_CLAUSE_BIND_TEAMS;
14884 break;
14885 default:
14886 gcc_unreachable ();
14889 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14890 switch (OMP_CLAUSE_CODE (*pc))
14892 case OMP_CLAUSE_REDUCTION:
14893 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
14895 error_at (OMP_CLAUSE_LOCATION (*pc),
14896 "%<inscan%> %<reduction%> clause on "
14897 "%qs construct", "loop");
14898 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
14900 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
14902 error_at (OMP_CLAUSE_LOCATION (*pc),
14903 "invalid %<task%> reduction modifier on construct "
14904 "other than %<parallel%>, %qs or %<sections%>",
14905 lang_GNU_Fortran () ? "do" : "for");
14906 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
14908 pc = &OMP_CLAUSE_CHAIN (*pc);
14909 break;
14910 case OMP_CLAUSE_LASTPRIVATE:
14911 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14913 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14914 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14915 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
14916 break;
14917 if (OMP_FOR_ORIG_DECLS (for_stmt)
14918 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14919 i)) == TREE_LIST
14920 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14921 i)))
14923 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14924 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
14925 break;
14928 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
14930 error_at (OMP_CLAUSE_LOCATION (*pc),
14931 "%<lastprivate%> clause on a %<loop%> construct refers "
14932 "to a variable %qD which is not the loop iterator",
14933 OMP_CLAUSE_DECL (*pc));
14934 *pc = OMP_CLAUSE_CHAIN (*pc);
14935 break;
14937 pc = &OMP_CLAUSE_CHAIN (*pc);
14938 break;
14939 default:
14940 pc = &OMP_CLAUSE_CHAIN (*pc);
14941 break;
14944 TREE_SET_CODE (for_stmt, OMP_SIMD);
14946 int last;
14947 switch (kind)
14949 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
14950 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
14951 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
14953 for (int pass = 1; pass <= last; pass++)
14955 if (pass == 2)
14957 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
14958 make_node (BLOCK));
14959 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
14960 *expr_p = make_node (OMP_PARALLEL);
14961 TREE_TYPE (*expr_p) = void_type_node;
14962 OMP_PARALLEL_BODY (*expr_p) = bind;
14963 OMP_PARALLEL_COMBINED (*expr_p) = 1;
14964 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
14965 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
14966 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14967 if (OMP_FOR_ORIG_DECLS (for_stmt)
14968 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
14969 == TREE_LIST))
14971 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14972 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
14974 *pc = build_omp_clause (UNKNOWN_LOCATION,
14975 OMP_CLAUSE_FIRSTPRIVATE);
14976 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
14977 pc = &OMP_CLAUSE_CHAIN (*pc);
14981 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
14982 tree *pc = &OMP_FOR_CLAUSES (t);
14983 TREE_TYPE (t) = void_type_node;
14984 OMP_FOR_BODY (t) = *expr_p;
14985 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
14986 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
14987 switch (OMP_CLAUSE_CODE (c))
14989 case OMP_CLAUSE_BIND:
14990 case OMP_CLAUSE_ORDER:
14991 case OMP_CLAUSE_COLLAPSE:
14992 *pc = copy_node (c);
14993 pc = &OMP_CLAUSE_CHAIN (*pc);
14994 break;
14995 case OMP_CLAUSE_PRIVATE:
14996 case OMP_CLAUSE_FIRSTPRIVATE:
14997 /* Only needed on innermost. */
14998 break;
14999 case OMP_CLAUSE_LASTPRIVATE:
15000 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15002 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15003 OMP_CLAUSE_FIRSTPRIVATE);
15004 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15005 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15006 pc = &OMP_CLAUSE_CHAIN (*pc);
15008 *pc = copy_node (c);
15009 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15010 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15011 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15013 if (pass != last)
15014 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15015 else
15016 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15017 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15019 pc = &OMP_CLAUSE_CHAIN (*pc);
15020 break;
15021 case OMP_CLAUSE_REDUCTION:
15022 *pc = copy_node (c);
15023 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15024 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15025 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15027 auto_vec<tree> no_context_vars;
15028 int walk_subtrees = 0;
15029 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15030 &walk_subtrees, &no_context_vars);
15031 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15032 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15033 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15034 note_no_context_vars,
15035 &no_context_vars);
15036 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15037 note_no_context_vars,
15038 &no_context_vars);
15040 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15041 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15042 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15043 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15044 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15046 hash_map<tree, tree> decl_map;
15047 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15048 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15049 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15050 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15051 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15052 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15054 copy_body_data id;
15055 memset (&id, 0, sizeof (id));
15056 id.src_fn = current_function_decl;
15057 id.dst_fn = current_function_decl;
15058 id.src_cfun = cfun;
15059 id.decl_map = &decl_map;
15060 id.copy_decl = copy_decl_no_change;
15061 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15062 id.transform_new_cfg = true;
15063 id.transform_return_to_modify = false;
15064 id.eh_lp_nr = 0;
15065 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15066 &id, NULL);
15067 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15068 &id, NULL);
15070 for (tree d : no_context_vars)
15072 DECL_CONTEXT (d) = NULL_TREE;
15073 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15076 else
15078 OMP_CLAUSE_REDUCTION_INIT (*pc)
15079 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15080 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15081 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15083 pc = &OMP_CLAUSE_CHAIN (*pc);
15084 break;
15085 default:
15086 gcc_unreachable ();
15088 *pc = NULL_TREE;
15089 *expr_p = t;
15091 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15095 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15096 of OMP_TARGET's body. */
15098 static tree
15099 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15101 *walk_subtrees = 0;
15102 switch (TREE_CODE (*tp))
15104 case OMP_TEAMS:
15105 return *tp;
15106 case BIND_EXPR:
15107 case STATEMENT_LIST:
15108 *walk_subtrees = 1;
15109 break;
15110 default:
15111 break;
15113 return NULL_TREE;
15116 /* Helper function of optimize_target_teams, determine if the expression
15117 can be computed safely before the target construct on the host. */
15119 static tree
15120 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15122 splay_tree_node n;
15124 if (TYPE_P (*tp))
15126 *walk_subtrees = 0;
15127 return NULL_TREE;
15129 switch (TREE_CODE (*tp))
15131 case VAR_DECL:
15132 case PARM_DECL:
15133 case RESULT_DECL:
15134 *walk_subtrees = 0;
15135 if (error_operand_p (*tp)
15136 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15137 || DECL_HAS_VALUE_EXPR_P (*tp)
15138 || DECL_THREAD_LOCAL_P (*tp)
15139 || TREE_SIDE_EFFECTS (*tp)
15140 || TREE_THIS_VOLATILE (*tp))
15141 return *tp;
15142 if (is_global_var (*tp)
15143 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15144 || lookup_attribute ("omp declare target link",
15145 DECL_ATTRIBUTES (*tp))))
15146 return *tp;
15147 if (VAR_P (*tp)
15148 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15149 && !is_global_var (*tp)
15150 && decl_function_context (*tp) == current_function_decl)
15151 return *tp;
15152 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15153 (splay_tree_key) *tp);
15154 if (n == NULL)
15156 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15157 return NULL_TREE;
15158 return *tp;
15160 else if (n->value & GOVD_LOCAL)
15161 return *tp;
15162 else if (n->value & GOVD_FIRSTPRIVATE)
15163 return NULL_TREE;
15164 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15165 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15166 return NULL_TREE;
15167 return *tp;
15168 case INTEGER_CST:
15169 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15170 return *tp;
15171 return NULL_TREE;
15172 case TARGET_EXPR:
15173 if (TARGET_EXPR_INITIAL (*tp)
15174 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15175 return *tp;
15176 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15177 walk_subtrees, NULL);
15178 /* Allow some reasonable subset of integral arithmetics. */
15179 case PLUS_EXPR:
15180 case MINUS_EXPR:
15181 case MULT_EXPR:
15182 case TRUNC_DIV_EXPR:
15183 case CEIL_DIV_EXPR:
15184 case FLOOR_DIV_EXPR:
15185 case ROUND_DIV_EXPR:
15186 case TRUNC_MOD_EXPR:
15187 case CEIL_MOD_EXPR:
15188 case FLOOR_MOD_EXPR:
15189 case ROUND_MOD_EXPR:
15190 case RDIV_EXPR:
15191 case EXACT_DIV_EXPR:
15192 case MIN_EXPR:
15193 case MAX_EXPR:
15194 case LSHIFT_EXPR:
15195 case RSHIFT_EXPR:
15196 case BIT_IOR_EXPR:
15197 case BIT_XOR_EXPR:
15198 case BIT_AND_EXPR:
15199 case NEGATE_EXPR:
15200 case ABS_EXPR:
15201 case BIT_NOT_EXPR:
15202 case NON_LVALUE_EXPR:
15203 CASE_CONVERT:
15204 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15205 return *tp;
15206 return NULL_TREE;
15207 /* And disallow anything else, except for comparisons. */
15208 default:
15209 if (COMPARISON_CLASS_P (*tp))
15210 return NULL_TREE;
15211 return *tp;
15215 /* Try to determine if the num_teams and/or thread_limit expressions
15216 can have their values determined already before entering the
15217 target construct.
15218 INTEGER_CSTs trivially are,
15219 integral decls that are firstprivate (explicitly or implicitly)
15220 or explicitly map(always, to:) or map(always, tofrom:) on the target
15221 region too, and expressions involving simple arithmetics on those
15222 too, function calls are not ok, dereferencing something neither etc.
15223 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15224 EXPR based on what we find:
15225 0 stands for clause not specified at all, use implementation default
15226 -1 stands for value that can't be determined easily before entering
15227 the target construct.
15228 -2 means that no explicit teams construct was specified
15229 If teams construct is not present at all, use 1 for num_teams
15230 and 0 for thread_limit (only one team is involved, and the thread
15231 limit is implementation defined. */
15233 static void
15234 optimize_target_teams (tree target, gimple_seq *pre_p)
15236 tree body = OMP_BODY (target);
15237 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15238 tree num_teams_lower = NULL_TREE;
15239 tree num_teams_upper = integer_zero_node;
15240 tree thread_limit = integer_zero_node;
15241 location_t num_teams_loc = EXPR_LOCATION (target);
15242 location_t thread_limit_loc = EXPR_LOCATION (target);
15243 tree c, *p, expr;
15244 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15246 if (teams == NULL_TREE)
15247 num_teams_upper = build_int_cst (integer_type_node, -2);
15248 else
15249 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15251 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15253 p = &num_teams_upper;
15254 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15255 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15257 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15258 if (TREE_CODE (expr) == INTEGER_CST)
15259 num_teams_lower = expr;
15260 else if (walk_tree (&expr, computable_teams_clause,
15261 NULL, NULL))
15262 num_teams_lower = integer_minus_one_node;
15263 else
15265 num_teams_lower = expr;
15266 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15267 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15268 is_gimple_val, fb_rvalue, false)
15269 == GS_ERROR)
15271 gimplify_omp_ctxp = target_ctx;
15272 num_teams_lower = integer_minus_one_node;
15274 else
15276 gimplify_omp_ctxp = target_ctx;
15277 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15278 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15279 = num_teams_lower;
15284 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15286 p = &thread_limit;
15287 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15289 else
15290 continue;
15291 expr = OMP_CLAUSE_OPERAND (c, 0);
15292 if (TREE_CODE (expr) == INTEGER_CST)
15294 *p = expr;
15295 continue;
15297 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15299 *p = integer_minus_one_node;
15300 continue;
15302 *p = expr;
15303 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15304 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15305 == GS_ERROR)
15307 gimplify_omp_ctxp = target_ctx;
15308 *p = integer_minus_one_node;
15309 continue;
15311 gimplify_omp_ctxp = target_ctx;
15312 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15313 OMP_CLAUSE_OPERAND (c, 0) = *p;
15315 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15317 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15318 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15319 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15320 OMP_TARGET_CLAUSES (target) = c;
15322 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15323 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15324 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15325 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15326 OMP_TARGET_CLAUSES (target) = c;
15329 /* Gimplify the gross structure of several OMP constructs. */
15331 static void
15332 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15334 tree expr = *expr_p;
15335 gimple *stmt;
15336 gimple_seq body = NULL;
15337 enum omp_region_type ort;
15339 switch (TREE_CODE (expr))
15341 case OMP_SECTIONS:
15342 case OMP_SINGLE:
15343 ort = ORT_WORKSHARE;
15344 break;
15345 case OMP_SCOPE:
15346 ort = ORT_TASKGROUP;
15347 break;
15348 case OMP_TARGET:
15349 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15350 break;
15351 case OACC_KERNELS:
15352 ort = ORT_ACC_KERNELS;
15353 break;
15354 case OACC_PARALLEL:
15355 ort = ORT_ACC_PARALLEL;
15356 break;
15357 case OACC_SERIAL:
15358 ort = ORT_ACC_SERIAL;
15359 break;
15360 case OACC_DATA:
15361 ort = ORT_ACC_DATA;
15362 break;
15363 case OMP_TARGET_DATA:
15364 ort = ORT_TARGET_DATA;
15365 break;
15366 case OMP_TEAMS:
15367 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15368 if (gimplify_omp_ctxp == NULL
15369 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15370 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15371 break;
15372 case OACC_HOST_DATA:
15373 ort = ORT_ACC_HOST_DATA;
15374 break;
15375 default:
15376 gcc_unreachable ();
15379 bool save_in_omp_construct = in_omp_construct;
15380 if ((ort & ORT_ACC) == 0)
15381 in_omp_construct = false;
15382 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15383 TREE_CODE (expr));
15384 if (TREE_CODE (expr) == OMP_TARGET)
15385 optimize_target_teams (expr, pre_p);
15386 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15387 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15389 push_gimplify_context ();
15390 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15391 if (gimple_code (g) == GIMPLE_BIND)
15392 pop_gimplify_context (g);
15393 else
15394 pop_gimplify_context (NULL);
15395 if ((ort & ORT_TARGET_DATA) != 0)
15397 enum built_in_function end_ix;
15398 switch (TREE_CODE (expr))
15400 case OACC_DATA:
15401 case OACC_HOST_DATA:
15402 end_ix = BUILT_IN_GOACC_DATA_END;
15403 break;
15404 case OMP_TARGET_DATA:
15405 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15406 break;
15407 default:
15408 gcc_unreachable ();
15410 tree fn = builtin_decl_explicit (end_ix);
15411 g = gimple_build_call (fn, 0);
15412 gimple_seq cleanup = NULL;
15413 gimple_seq_add_stmt (&cleanup, g);
15414 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15415 body = NULL;
15416 gimple_seq_add_stmt (&body, g);
15419 else
15420 gimplify_and_add (OMP_BODY (expr), &body);
15421 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15422 TREE_CODE (expr));
15423 in_omp_construct = save_in_omp_construct;
15425 switch (TREE_CODE (expr))
15427 case OACC_DATA:
15428 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15429 OMP_CLAUSES (expr));
15430 break;
15431 case OACC_HOST_DATA:
15432 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15434 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15435 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15436 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15439 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15440 OMP_CLAUSES (expr));
15441 break;
15442 case OACC_KERNELS:
15443 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15444 OMP_CLAUSES (expr));
15445 break;
15446 case OACC_PARALLEL:
15447 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15448 OMP_CLAUSES (expr));
15449 break;
15450 case OACC_SERIAL:
15451 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15452 OMP_CLAUSES (expr));
15453 break;
15454 case OMP_SECTIONS:
15455 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15456 break;
15457 case OMP_SINGLE:
15458 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15459 break;
15460 case OMP_SCOPE:
15461 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15462 break;
15463 case OMP_TARGET:
15464 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15465 OMP_CLAUSES (expr));
15466 break;
15467 case OMP_TARGET_DATA:
15468 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15469 to be evaluated before the use_device_{ptr,addr} clauses if they
15470 refer to the same variables. */
15472 tree use_device_clauses;
15473 tree *pc, *uc = &use_device_clauses;
15474 for (pc = &OMP_CLAUSES (expr); *pc; )
15475 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15476 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15478 *uc = *pc;
15479 *pc = OMP_CLAUSE_CHAIN (*pc);
15480 uc = &OMP_CLAUSE_CHAIN (*uc);
15482 else
15483 pc = &OMP_CLAUSE_CHAIN (*pc);
15484 *uc = NULL_TREE;
15485 *pc = use_device_clauses;
15486 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15487 OMP_CLAUSES (expr));
15489 break;
15490 case OMP_TEAMS:
15491 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15492 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15493 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15494 break;
15495 default:
15496 gcc_unreachable ();
15499 gimplify_seq_add_stmt (pre_p, stmt);
15500 *expr_p = NULL_TREE;
15503 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15504 target update constructs. */
15506 static void
15507 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15509 tree expr = *expr_p;
15510 int kind;
15511 gomp_target *stmt;
15512 enum omp_region_type ort = ORT_WORKSHARE;
15514 switch (TREE_CODE (expr))
15516 case OACC_ENTER_DATA:
15517 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15518 ort = ORT_ACC;
15519 break;
15520 case OACC_EXIT_DATA:
15521 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15522 ort = ORT_ACC;
15523 break;
15524 case OACC_UPDATE:
15525 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15526 ort = ORT_ACC;
15527 break;
15528 case OMP_TARGET_UPDATE:
15529 kind = GF_OMP_TARGET_KIND_UPDATE;
15530 break;
15531 case OMP_TARGET_ENTER_DATA:
15532 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15533 break;
15534 case OMP_TARGET_EXIT_DATA:
15535 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15536 break;
15537 default:
15538 gcc_unreachable ();
15540 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15541 ort, TREE_CODE (expr));
15542 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15543 TREE_CODE (expr));
15544 if (TREE_CODE (expr) == OACC_UPDATE
15545 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15546 OMP_CLAUSE_IF_PRESENT))
15548 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15549 clause. */
15550 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15551 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15552 switch (OMP_CLAUSE_MAP_KIND (c))
15554 case GOMP_MAP_FORCE_TO:
15555 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15556 break;
15557 case GOMP_MAP_FORCE_FROM:
15558 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15559 break;
15560 default:
15561 break;
15564 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15565 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15566 OMP_CLAUSE_FINALIZE))
15568 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15569 semantics. */
15570 bool have_clause = false;
15571 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15572 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15573 switch (OMP_CLAUSE_MAP_KIND (c))
15575 case GOMP_MAP_FROM:
15576 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15577 have_clause = true;
15578 break;
15579 case GOMP_MAP_RELEASE:
15580 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15581 have_clause = true;
15582 break;
15583 case GOMP_MAP_TO_PSET:
15584 /* Fortran arrays with descriptors must map that descriptor when
15585 doing standalone "attach" operations (in OpenACC). In that
15586 case GOMP_MAP_TO_PSET appears by itself with no preceding
15587 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15588 break;
15589 case GOMP_MAP_POINTER:
15590 /* TODO PR92929: we may see these here, but they'll always follow
15591 one of the clauses above, and will be handled by libgomp as
15592 one group, so no handling required here. */
15593 gcc_assert (have_clause);
15594 break;
15595 case GOMP_MAP_DETACH:
15596 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15597 have_clause = false;
15598 break;
15599 case GOMP_MAP_STRUCT:
15600 have_clause = false;
15601 break;
15602 default:
15603 gcc_unreachable ();
15606 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15608 gimplify_seq_add_stmt (pre_p, stmt);
15609 *expr_p = NULL_TREE;
15612 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15613 stabilized the lhs of the atomic operation as *ADDR. Return true if
15614 EXPR is this stabilized form. */
15616 static bool
15617 goa_lhs_expr_p (tree expr, tree addr)
15619 /* Also include casts to other type variants. The C front end is fond
15620 of adding these for e.g. volatile variables. This is like
15621 STRIP_TYPE_NOPS but includes the main variant lookup. */
15622 STRIP_USELESS_TYPE_CONVERSION (expr);
15624 if (INDIRECT_REF_P (expr))
15626 expr = TREE_OPERAND (expr, 0);
15627 while (expr != addr
15628 && (CONVERT_EXPR_P (expr)
15629 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15630 && TREE_CODE (expr) == TREE_CODE (addr)
15631 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15633 expr = TREE_OPERAND (expr, 0);
15634 addr = TREE_OPERAND (addr, 0);
15636 if (expr == addr)
15637 return true;
15638 return (TREE_CODE (addr) == ADDR_EXPR
15639 && TREE_CODE (expr) == ADDR_EXPR
15640 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15642 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15643 return true;
15644 return false;
15647 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15648 expression does not involve the lhs, evaluate it into a temporary.
15649 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15650 or -1 if an error was encountered. */
15652 static int
15653 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15654 tree lhs_var, tree &target_expr, bool rhs, int depth)
15656 tree expr = *expr_p;
15657 int saw_lhs = 0;
15659 if (goa_lhs_expr_p (expr, lhs_addr))
15661 if (pre_p)
15662 *expr_p = lhs_var;
15663 return 1;
15665 if (is_gimple_val (expr))
15666 return 0;
15668 /* Maximum depth of lhs in expression is for the
15669 __builtin_clear_padding (...), __builtin_clear_padding (...),
15670 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15671 if (++depth > 7)
15672 goto finish;
15674 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
15676 case tcc_binary:
15677 case tcc_comparison:
15678 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
15679 lhs_var, target_expr, true, depth);
15680 /* FALLTHRU */
15681 case tcc_unary:
15682 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
15683 lhs_var, target_expr, true, depth);
15684 break;
15685 case tcc_expression:
15686 switch (TREE_CODE (expr))
15688 case TRUTH_ANDIF_EXPR:
15689 case TRUTH_ORIF_EXPR:
15690 case TRUTH_AND_EXPR:
15691 case TRUTH_OR_EXPR:
15692 case TRUTH_XOR_EXPR:
15693 case BIT_INSERT_EXPR:
15694 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15695 lhs_addr, lhs_var, target_expr, true,
15696 depth);
15697 /* FALLTHRU */
15698 case TRUTH_NOT_EXPR:
15699 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15700 lhs_addr, lhs_var, target_expr, true,
15701 depth);
15702 break;
15703 case MODIFY_EXPR:
15704 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15705 target_expr, true, depth))
15706 break;
15707 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15708 lhs_addr, lhs_var, target_expr, true,
15709 depth);
15710 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15711 lhs_addr, lhs_var, target_expr, false,
15712 depth);
15713 break;
15714 /* FALLTHRU */
15715 case ADDR_EXPR:
15716 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15717 target_expr, true, depth))
15718 break;
15719 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15720 lhs_addr, lhs_var, target_expr, false,
15721 depth);
15722 break;
15723 case COMPOUND_EXPR:
15724 /* Break out any preevaluations from cp_build_modify_expr. */
15725 for (; TREE_CODE (expr) == COMPOUND_EXPR;
15726 expr = TREE_OPERAND (expr, 1))
15728 /* Special-case __builtin_clear_padding call before
15729 __builtin_memcmp. */
15730 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
15732 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
15733 if (fndecl
15734 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15735 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
15736 && (!pre_p
15737 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
15738 lhs_addr, lhs_var,
15739 target_expr, true, depth)))
15741 if (pre_p)
15742 *expr_p = expr;
15743 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
15744 pre_p, lhs_addr, lhs_var,
15745 target_expr, true, depth);
15746 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
15747 pre_p, lhs_addr, lhs_var,
15748 target_expr, rhs, depth);
15749 return saw_lhs;
15753 if (pre_p)
15754 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
15756 if (!pre_p)
15757 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
15758 target_expr, rhs, depth);
15759 *expr_p = expr;
15760 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
15761 target_expr, rhs, depth);
15762 case COND_EXPR:
15763 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
15764 lhs_var, target_expr, true, depth))
15765 break;
15766 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15767 lhs_addr, lhs_var, target_expr, true,
15768 depth);
15769 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15770 lhs_addr, lhs_var, target_expr, true,
15771 depth);
15772 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
15773 lhs_addr, lhs_var, target_expr, true,
15774 depth);
15775 break;
15776 case TARGET_EXPR:
15777 if (TARGET_EXPR_INITIAL (expr))
15779 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
15780 lhs_var, target_expr, true,
15781 depth))
15782 break;
15783 if (expr == target_expr)
15784 saw_lhs = 1;
15785 else
15787 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
15788 pre_p, lhs_addr, lhs_var,
15789 target_expr, true, depth);
15790 if (saw_lhs && target_expr == NULL_TREE && pre_p)
15791 target_expr = expr;
15794 break;
15795 default:
15796 break;
15798 break;
15799 case tcc_reference:
15800 if (TREE_CODE (expr) == BIT_FIELD_REF
15801 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
15802 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15803 lhs_addr, lhs_var, target_expr, true,
15804 depth);
15805 break;
15806 case tcc_vl_exp:
15807 if (TREE_CODE (expr) == CALL_EXPR)
15809 if (tree fndecl = get_callee_fndecl (expr))
15810 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
15811 BUILT_IN_MEMCMP))
15813 int nargs = call_expr_nargs (expr);
15814 for (int i = 0; i < nargs; i++)
15815 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
15816 pre_p, lhs_addr, lhs_var,
15817 target_expr, true, depth);
15820 break;
15821 default:
15822 break;
15825 finish:
15826 if (saw_lhs == 0 && pre_p)
15828 enum gimplify_status gs;
15829 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
15831 gimplify_stmt (&expr, pre_p);
15832 return saw_lhs;
15834 else if (rhs)
15835 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
15836 else
15837 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
15838 if (gs != GS_ALL_DONE)
15839 saw_lhs = -1;
15842 return saw_lhs;
15845 /* Gimplify an OMP_ATOMIC statement. */
15847 static enum gimplify_status
15848 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
15850 tree addr = TREE_OPERAND (*expr_p, 0);
15851 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
15852 ? NULL : TREE_OPERAND (*expr_p, 1);
15853 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
15854 tree tmp_load;
15855 gomp_atomic_load *loadstmt;
15856 gomp_atomic_store *storestmt;
15857 tree target_expr = NULL_TREE;
15859 tmp_load = create_tmp_reg (type);
15860 if (rhs
15861 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
15862 true, 0) < 0)
15863 return GS_ERROR;
15865 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
15866 != GS_ALL_DONE)
15867 return GS_ERROR;
15869 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
15870 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15871 gimplify_seq_add_stmt (pre_p, loadstmt);
15872 if (rhs)
15874 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15875 representatives. Use BIT_FIELD_REF on the lhs instead. */
15876 tree rhsarg = rhs;
15877 if (TREE_CODE (rhs) == COND_EXPR)
15878 rhsarg = TREE_OPERAND (rhs, 1);
15879 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
15880 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
15882 tree bitpos = TREE_OPERAND (rhsarg, 2);
15883 tree op1 = TREE_OPERAND (rhsarg, 1);
15884 tree bitsize;
15885 tree tmp_store = tmp_load;
15886 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
15887 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
15888 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
15889 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
15890 else
15891 bitsize = TYPE_SIZE (TREE_TYPE (op1));
15892 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
15893 tree t = build2_loc (EXPR_LOCATION (rhsarg),
15894 MODIFY_EXPR, void_type_node,
15895 build3_loc (EXPR_LOCATION (rhsarg),
15896 BIT_FIELD_REF, TREE_TYPE (op1),
15897 tmp_store, bitsize, bitpos), op1);
15898 if (TREE_CODE (rhs) == COND_EXPR)
15899 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
15900 TREE_OPERAND (rhs, 0), t, void_node);
15901 gimplify_and_add (t, pre_p);
15902 rhs = tmp_store;
15904 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
15905 if (TREE_CODE (rhs) == COND_EXPR)
15906 gimplify_ctxp->allow_rhs_cond_expr = true;
15907 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
15908 is_gimple_val, fb_rvalue);
15909 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
15910 if (gs != GS_ALL_DONE)
15911 return GS_ERROR;
15914 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
15915 rhs = tmp_load;
15916 storestmt
15917 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15918 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
15920 gimple_omp_atomic_set_weak (loadstmt);
15921 gimple_omp_atomic_set_weak (storestmt);
15923 gimplify_seq_add_stmt (pre_p, storestmt);
15924 switch (TREE_CODE (*expr_p))
15926 case OMP_ATOMIC_READ:
15927 case OMP_ATOMIC_CAPTURE_OLD:
15928 *expr_p = tmp_load;
15929 gimple_omp_atomic_set_need_value (loadstmt);
15930 break;
15931 case OMP_ATOMIC_CAPTURE_NEW:
15932 *expr_p = rhs;
15933 gimple_omp_atomic_set_need_value (storestmt);
15934 break;
15935 default:
15936 *expr_p = NULL;
15937 break;
15940 return GS_ALL_DONE;
15943 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
15944 body, and adding some EH bits. */
15946 static enum gimplify_status
15947 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
15949 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
15950 gimple *body_stmt;
15951 gtransaction *trans_stmt;
15952 gimple_seq body = NULL;
15953 int subcode = 0;
15955 /* Wrap the transaction body in a BIND_EXPR so we have a context
15956 where to put decls for OMP. */
15957 if (TREE_CODE (tbody) != BIND_EXPR)
15959 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
15960 TREE_SIDE_EFFECTS (bind) = 1;
15961 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
15962 TRANSACTION_EXPR_BODY (expr) = bind;
15965 push_gimplify_context ();
15966 temp = voidify_wrapper_expr (*expr_p, NULL);
15968 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
15969 pop_gimplify_context (body_stmt);
15971 trans_stmt = gimple_build_transaction (body);
15972 if (TRANSACTION_EXPR_OUTER (expr))
15973 subcode = GTMA_IS_OUTER;
15974 else if (TRANSACTION_EXPR_RELAXED (expr))
15975 subcode = GTMA_IS_RELAXED;
15976 gimple_transaction_set_subcode (trans_stmt, subcode);
15978 gimplify_seq_add_stmt (pre_p, trans_stmt);
15980 if (temp)
15982 *expr_p = temp;
15983 return GS_OK;
15986 *expr_p = NULL_TREE;
15987 return GS_ALL_DONE;
15990 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
15991 is the OMP_BODY of the original EXPR (which has already been
15992 gimplified so it's not present in the EXPR).
15994 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
15996 static gimple *
15997 gimplify_omp_ordered (tree expr, gimple_seq body)
15999 tree c, decls;
16000 int failures = 0;
16001 unsigned int i;
16002 tree source_c = NULL_TREE;
16003 tree sink_c = NULL_TREE;
16005 if (gimplify_omp_ctxp)
16007 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16008 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16009 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16011 error_at (OMP_CLAUSE_LOCATION (c),
16012 "%<ordered%> construct with %qs clause must be "
16013 "closely nested inside a loop with %<ordered%> clause",
16014 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16015 failures++;
16017 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16018 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16020 bool fail = false;
16021 sink_c = c;
16022 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16023 continue; /* omp_cur_iteration - 1 */
16024 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16025 decls && TREE_CODE (decls) == TREE_LIST;
16026 decls = TREE_CHAIN (decls), ++i)
16027 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16028 continue;
16029 else if (TREE_VALUE (decls)
16030 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16032 error_at (OMP_CLAUSE_LOCATION (c),
16033 "variable %qE is not an iteration "
16034 "of outermost loop %d, expected %qE",
16035 TREE_VALUE (decls), i + 1,
16036 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16037 fail = true;
16038 failures++;
16040 else
16041 TREE_VALUE (decls)
16042 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16043 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16045 error_at (OMP_CLAUSE_LOCATION (c),
16046 "number of variables in %qs clause with "
16047 "%<sink%> modifier does not match number of "
16048 "iteration variables",
16049 OMP_CLAUSE_DOACROSS_DEPEND (c)
16050 ? "depend" : "doacross");
16051 failures++;
16054 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16055 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16057 if (source_c)
16059 error_at (OMP_CLAUSE_LOCATION (c),
16060 "more than one %qs clause with %<source%> "
16061 "modifier on an %<ordered%> construct",
16062 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16063 ? "depend" : "doacross");
16064 failures++;
16066 else
16067 source_c = c;
16070 if (source_c && sink_c)
16072 error_at (OMP_CLAUSE_LOCATION (source_c),
16073 "%qs clause with %<source%> modifier specified "
16074 "together with %qs clauses with %<sink%> modifier "
16075 "on the same construct",
16076 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16077 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16078 failures++;
16081 if (failures)
16082 return gimple_build_nop ();
16083 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16086 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16087 expression produces a value to be used as an operand inside a GIMPLE
16088 statement, the value will be stored back in *EXPR_P. This value will
16089 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16090 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16091 emitted in PRE_P and POST_P.
16093 Additionally, this process may overwrite parts of the input
16094 expression during gimplification. Ideally, it should be
16095 possible to do non-destructive gimplification.
16097 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16098 the expression needs to evaluate to a value to be used as
16099 an operand in a GIMPLE statement, this value will be stored in
16100 *EXPR_P on exit. This happens when the caller specifies one
16101 of fb_lvalue or fb_rvalue fallback flags.
16103 PRE_P will contain the sequence of GIMPLE statements corresponding
16104 to the evaluation of EXPR and all the side-effects that must
16105 be executed before the main expression. On exit, the last
16106 statement of PRE_P is the core statement being gimplified. For
16107 instance, when gimplifying 'if (++a)' the last statement in
16108 PRE_P will be 'if (t.1)' where t.1 is the result of
16109 pre-incrementing 'a'.
16111 POST_P will contain the sequence of GIMPLE statements corresponding
16112 to the evaluation of all the side-effects that must be executed
16113 after the main expression. If this is NULL, the post
16114 side-effects are stored at the end of PRE_P.
16116 The reason why the output is split in two is to handle post
16117 side-effects explicitly. In some cases, an expression may have
16118 inner and outer post side-effects which need to be emitted in
16119 an order different from the one given by the recursive
16120 traversal. For instance, for the expression (*p--)++ the post
16121 side-effects of '--' must actually occur *after* the post
16122 side-effects of '++'. However, gimplification will first visit
16123 the inner expression, so if a separate POST sequence was not
16124 used, the resulting sequence would be:
16126 1 t.1 = *p
16127 2 p = p - 1
16128 3 t.2 = t.1 + 1
16129 4 *p = t.2
16131 However, the post-decrement operation in line #2 must not be
16132 evaluated until after the store to *p at line #4, so the
16133 correct sequence should be:
16135 1 t.1 = *p
16136 2 t.2 = t.1 + 1
16137 3 *p = t.2
16138 4 p = p - 1
16140 So, by specifying a separate post queue, it is possible
16141 to emit the post side-effects in the correct order.
16142 If POST_P is NULL, an internal queue will be used. Before
16143 returning to the caller, the sequence POST_P is appended to
16144 the main output sequence PRE_P.
16146 GIMPLE_TEST_F points to a function that takes a tree T and
16147 returns nonzero if T is in the GIMPLE form requested by the
16148 caller. The GIMPLE predicates are in gimple.cc.
16150 FALLBACK tells the function what sort of a temporary we want if
16151 gimplification cannot produce an expression that complies with
16152 GIMPLE_TEST_F.
16154 fb_none means that no temporary should be generated
16155 fb_rvalue means that an rvalue is OK to generate
16156 fb_lvalue means that an lvalue is OK to generate
16157 fb_either means that either is OK, but an lvalue is preferable.
16158 fb_mayfail means that gimplification may fail (in which case
16159 GS_ERROR will be returned)
16161 The return value is either GS_ERROR or GS_ALL_DONE, since this
16162 function iterates until EXPR is completely gimplified or an error
16163 occurs. */
16165 enum gimplify_status
16166 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16167 bool (*gimple_test_f) (tree), fallback_t fallback)
16169 tree tmp;
16170 gimple_seq internal_pre = NULL;
16171 gimple_seq internal_post = NULL;
16172 tree save_expr;
16173 bool is_statement;
16174 location_t saved_location;
16175 enum gimplify_status ret;
16176 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16177 tree label;
16179 save_expr = *expr_p;
16180 if (save_expr == NULL_TREE)
16181 return GS_ALL_DONE;
16183 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16184 is_statement = gimple_test_f == is_gimple_stmt;
16185 if (is_statement)
16186 gcc_assert (pre_p);
16188 /* Consistency checks. */
16189 if (gimple_test_f == is_gimple_reg)
16190 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16191 else if (gimple_test_f == is_gimple_val
16192 || gimple_test_f == is_gimple_call_addr
16193 || gimple_test_f == is_gimple_condexpr_for_cond
16194 || gimple_test_f == is_gimple_mem_rhs
16195 || gimple_test_f == is_gimple_mem_rhs_or_call
16196 || gimple_test_f == is_gimple_reg_rhs
16197 || gimple_test_f == is_gimple_reg_rhs_or_call
16198 || gimple_test_f == is_gimple_asm_val
16199 || gimple_test_f == is_gimple_mem_ref_addr)
16200 gcc_assert (fallback & fb_rvalue);
16201 else if (gimple_test_f == is_gimple_min_lval
16202 || gimple_test_f == is_gimple_lvalue)
16203 gcc_assert (fallback & fb_lvalue);
16204 else if (gimple_test_f == is_gimple_addressable)
16205 gcc_assert (fallback & fb_either);
16206 else if (gimple_test_f == is_gimple_stmt)
16207 gcc_assert (fallback == fb_none);
16208 else
16210 /* We should have recognized the GIMPLE_TEST_F predicate to
16211 know what kind of fallback to use in case a temporary is
16212 needed to hold the value or address of *EXPR_P. */
16213 gcc_unreachable ();
16216 /* We used to check the predicate here and return immediately if it
16217 succeeds. This is wrong; the design is for gimplification to be
16218 idempotent, and for the predicates to only test for valid forms, not
16219 whether they are fully simplified. */
16220 if (pre_p == NULL)
16221 pre_p = &internal_pre;
16223 if (post_p == NULL)
16224 post_p = &internal_post;
16226 /* Remember the last statements added to PRE_P and POST_P. Every
16227 new statement added by the gimplification helpers needs to be
16228 annotated with location information. To centralize the
16229 responsibility, we remember the last statement that had been
16230 added to both queues before gimplifying *EXPR_P. If
16231 gimplification produces new statements in PRE_P and POST_P, those
16232 statements will be annotated with the same location information
16233 as *EXPR_P. */
16234 pre_last_gsi = gsi_last (*pre_p);
16235 post_last_gsi = gsi_last (*post_p);
16237 saved_location = input_location;
16238 if (save_expr != error_mark_node
16239 && EXPR_HAS_LOCATION (*expr_p))
16240 input_location = EXPR_LOCATION (*expr_p);
16242 /* Loop over the specific gimplifiers until the toplevel node
16243 remains the same. */
16246 /* Strip away as many useless type conversions as possible
16247 at the toplevel. */
16248 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16250 /* Remember the expr. */
16251 save_expr = *expr_p;
16253 /* Die, die, die, my darling. */
16254 if (error_operand_p (save_expr))
16256 ret = GS_ERROR;
16257 break;
16260 /* Do any language-specific gimplification. */
16261 ret = ((enum gimplify_status)
16262 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16263 if (ret == GS_OK)
16265 if (*expr_p == NULL_TREE)
16266 break;
16267 if (*expr_p != save_expr)
16268 continue;
16270 else if (ret != GS_UNHANDLED)
16271 break;
16273 /* Make sure that all the cases set 'ret' appropriately. */
16274 ret = GS_UNHANDLED;
16275 switch (TREE_CODE (*expr_p))
16277 /* First deal with the special cases. */
16279 case POSTINCREMENT_EXPR:
16280 case POSTDECREMENT_EXPR:
16281 case PREINCREMENT_EXPR:
16282 case PREDECREMENT_EXPR:
16283 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16284 fallback != fb_none,
16285 TREE_TYPE (*expr_p));
16286 break;
16288 case VIEW_CONVERT_EXPR:
16289 if ((fallback & fb_rvalue)
16290 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16291 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16293 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16294 post_p, is_gimple_val, fb_rvalue);
16295 recalculate_side_effects (*expr_p);
16296 break;
16298 /* Fallthru. */
16300 case ARRAY_REF:
16301 case ARRAY_RANGE_REF:
16302 case REALPART_EXPR:
16303 case IMAGPART_EXPR:
16304 case COMPONENT_REF:
16305 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16306 fallback ? fallback : fb_rvalue);
16307 break;
16309 case COND_EXPR:
16310 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16312 /* C99 code may assign to an array in a structure value of a
16313 conditional expression, and this has undefined behavior
16314 only on execution, so create a temporary if an lvalue is
16315 required. */
16316 if (fallback == fb_lvalue)
16318 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16319 mark_addressable (*expr_p);
16320 ret = GS_OK;
16322 break;
16324 case CALL_EXPR:
16325 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16327 /* C99 code may assign to an array in a structure returned
16328 from a function, and this has undefined behavior only on
16329 execution, so create a temporary if an lvalue is
16330 required. */
16331 if (fallback == fb_lvalue)
16333 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16334 mark_addressable (*expr_p);
16335 ret = GS_OK;
16337 break;
16339 case TREE_LIST:
16340 gcc_unreachable ();
16342 case COMPOUND_EXPR:
16343 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16344 break;
16346 case COMPOUND_LITERAL_EXPR:
16347 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16348 gimple_test_f, fallback);
16349 break;
16351 case MODIFY_EXPR:
16352 case INIT_EXPR:
16353 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16354 fallback != fb_none);
16355 break;
16357 case TRUTH_ANDIF_EXPR:
16358 case TRUTH_ORIF_EXPR:
16360 /* Preserve the original type of the expression and the
16361 source location of the outer expression. */
16362 tree org_type = TREE_TYPE (*expr_p);
16363 *expr_p = gimple_boolify (*expr_p);
16364 *expr_p = build3_loc (input_location, COND_EXPR,
16365 org_type, *expr_p,
16366 fold_convert_loc
16367 (input_location,
16368 org_type, boolean_true_node),
16369 fold_convert_loc
16370 (input_location,
16371 org_type, boolean_false_node));
16372 ret = GS_OK;
16373 break;
16376 case TRUTH_NOT_EXPR:
16378 tree type = TREE_TYPE (*expr_p);
16379 /* The parsers are careful to generate TRUTH_NOT_EXPR
16380 only with operands that are always zero or one.
16381 We do not fold here but handle the only interesting case
16382 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16383 *expr_p = gimple_boolify (*expr_p);
16384 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16385 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16386 TREE_TYPE (*expr_p),
16387 TREE_OPERAND (*expr_p, 0));
16388 else
16389 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16390 TREE_TYPE (*expr_p),
16391 TREE_OPERAND (*expr_p, 0),
16392 build_int_cst (TREE_TYPE (*expr_p), 1));
16393 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16394 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16395 ret = GS_OK;
16396 break;
16399 case ADDR_EXPR:
16400 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16401 break;
16403 case ANNOTATE_EXPR:
16405 tree cond = TREE_OPERAND (*expr_p, 0);
16406 tree kind = TREE_OPERAND (*expr_p, 1);
16407 tree data = TREE_OPERAND (*expr_p, 2);
16408 tree type = TREE_TYPE (cond);
16409 if (!INTEGRAL_TYPE_P (type))
16411 *expr_p = cond;
16412 ret = GS_OK;
16413 break;
16415 tree tmp = create_tmp_var (type);
16416 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16417 gcall *call
16418 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16419 gimple_call_set_lhs (call, tmp);
16420 gimplify_seq_add_stmt (pre_p, call);
16421 *expr_p = tmp;
16422 ret = GS_ALL_DONE;
16423 break;
16426 case VA_ARG_EXPR:
16427 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16428 break;
16430 CASE_CONVERT:
16431 if (IS_EMPTY_STMT (*expr_p))
16433 ret = GS_ALL_DONE;
16434 break;
16437 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16438 || fallback == fb_none)
16440 /* Just strip a conversion to void (or in void context) and
16441 try again. */
16442 *expr_p = TREE_OPERAND (*expr_p, 0);
16443 ret = GS_OK;
16444 break;
16447 ret = gimplify_conversion (expr_p);
16448 if (ret == GS_ERROR)
16449 break;
16450 if (*expr_p != save_expr)
16451 break;
16452 /* FALLTHRU */
16454 case FIX_TRUNC_EXPR:
16455 /* unary_expr: ... | '(' cast ')' val | ... */
16456 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16457 is_gimple_val, fb_rvalue);
16458 recalculate_side_effects (*expr_p);
16459 break;
16461 case INDIRECT_REF:
16463 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16464 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16465 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16467 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16468 if (*expr_p != save_expr)
16470 ret = GS_OK;
16471 break;
16474 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16475 is_gimple_reg, fb_rvalue);
16476 if (ret == GS_ERROR)
16477 break;
16479 recalculate_side_effects (*expr_p);
16480 *expr_p = fold_build2_loc (input_location, MEM_REF,
16481 TREE_TYPE (*expr_p),
16482 TREE_OPERAND (*expr_p, 0),
16483 build_int_cst (saved_ptr_type, 0));
16484 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16485 TREE_THIS_NOTRAP (*expr_p) = notrap;
16486 ret = GS_OK;
16487 break;
16490 /* We arrive here through the various re-gimplifcation paths. */
16491 case MEM_REF:
16492 /* First try re-folding the whole thing. */
16493 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16494 TREE_OPERAND (*expr_p, 0),
16495 TREE_OPERAND (*expr_p, 1));
16496 if (tmp)
16498 REF_REVERSE_STORAGE_ORDER (tmp)
16499 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16500 *expr_p = tmp;
16501 recalculate_side_effects (*expr_p);
16502 ret = GS_OK;
16503 break;
16505 /* Avoid re-gimplifying the address operand if it is already
16506 in suitable form. Re-gimplifying would mark the address
16507 operand addressable. Always gimplify when not in SSA form
16508 as we still may have to gimplify decls with value-exprs. */
16509 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16510 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16512 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16513 is_gimple_mem_ref_addr, fb_rvalue);
16514 if (ret == GS_ERROR)
16515 break;
16517 recalculate_side_effects (*expr_p);
16518 ret = GS_ALL_DONE;
16519 break;
16521 /* Constants need not be gimplified. */
16522 case INTEGER_CST:
16523 case REAL_CST:
16524 case FIXED_CST:
16525 case STRING_CST:
16526 case COMPLEX_CST:
16527 case VECTOR_CST:
16528 /* Drop the overflow flag on constants, we do not want
16529 that in the GIMPLE IL. */
16530 if (TREE_OVERFLOW_P (*expr_p))
16531 *expr_p = drop_tree_overflow (*expr_p);
16532 ret = GS_ALL_DONE;
16533 break;
16535 case CONST_DECL:
16536 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16537 CONST_DECL node. Otherwise the decl is replaceable by its
16538 value. */
16539 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16540 if (fallback & fb_lvalue)
16541 ret = GS_ALL_DONE;
16542 else
16544 *expr_p = DECL_INITIAL (*expr_p);
16545 ret = GS_OK;
16547 break;
16549 case DECL_EXPR:
16550 ret = gimplify_decl_expr (expr_p, pre_p);
16551 break;
16553 case BIND_EXPR:
16554 ret = gimplify_bind_expr (expr_p, pre_p);
16555 break;
16557 case LOOP_EXPR:
16558 ret = gimplify_loop_expr (expr_p, pre_p);
16559 break;
16561 case SWITCH_EXPR:
16562 ret = gimplify_switch_expr (expr_p, pre_p);
16563 break;
16565 case EXIT_EXPR:
16566 ret = gimplify_exit_expr (expr_p);
16567 break;
16569 case GOTO_EXPR:
16570 /* If the target is not LABEL, then it is a computed jump
16571 and the target needs to be gimplified. */
16572 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16574 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16575 NULL, is_gimple_val, fb_rvalue);
16576 if (ret == GS_ERROR)
16577 break;
16579 gimplify_seq_add_stmt (pre_p,
16580 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16581 ret = GS_ALL_DONE;
16582 break;
16584 case PREDICT_EXPR:
16585 gimplify_seq_add_stmt (pre_p,
16586 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16587 PREDICT_EXPR_OUTCOME (*expr_p)));
16588 ret = GS_ALL_DONE;
16589 break;
16591 case LABEL_EXPR:
16592 ret = gimplify_label_expr (expr_p, pre_p);
16593 label = LABEL_EXPR_LABEL (*expr_p);
16594 gcc_assert (decl_function_context (label) == current_function_decl);
16596 /* If the label is used in a goto statement, or address of the label
16597 is taken, we need to unpoison all variables that were seen so far.
16598 Doing so would prevent us from reporting a false positives. */
16599 if (asan_poisoned_variables
16600 && asan_used_labels != NULL
16601 && asan_used_labels->contains (label)
16602 && !gimplify_omp_ctxp)
16603 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16604 break;
16606 case CASE_LABEL_EXPR:
16607 ret = gimplify_case_label_expr (expr_p, pre_p);
16609 if (gimplify_ctxp->live_switch_vars)
16610 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16611 pre_p);
16612 break;
16614 case RETURN_EXPR:
16615 ret = gimplify_return_expr (*expr_p, pre_p);
16616 break;
16618 case CONSTRUCTOR:
16619 /* Don't reduce this in place; let gimplify_init_constructor work its
16620 magic. Buf if we're just elaborating this for side effects, just
16621 gimplify any element that has side-effects. */
16622 if (fallback == fb_none)
16624 unsigned HOST_WIDE_INT ix;
16625 tree val;
16626 tree temp = NULL_TREE;
16627 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16628 if (TREE_SIDE_EFFECTS (val))
16629 append_to_statement_list (val, &temp);
16631 *expr_p = temp;
16632 ret = temp ? GS_OK : GS_ALL_DONE;
16634 /* C99 code may assign to an array in a constructed
16635 structure or union, and this has undefined behavior only
16636 on execution, so create a temporary if an lvalue is
16637 required. */
16638 else if (fallback == fb_lvalue)
16640 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16641 mark_addressable (*expr_p);
16642 ret = GS_OK;
16644 else
16645 ret = GS_ALL_DONE;
16646 break;
16648 /* The following are special cases that are not handled by the
16649 original GIMPLE grammar. */
16651 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16652 eliminated. */
16653 case SAVE_EXPR:
16654 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16655 break;
16657 case BIT_FIELD_REF:
16658 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16659 post_p, is_gimple_lvalue, fb_either);
16660 recalculate_side_effects (*expr_p);
16661 break;
16663 case TARGET_MEM_REF:
16665 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
16667 if (TMR_BASE (*expr_p))
16668 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
16669 post_p, is_gimple_mem_ref_addr, fb_either);
16670 if (TMR_INDEX (*expr_p))
16671 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
16672 post_p, is_gimple_val, fb_rvalue);
16673 if (TMR_INDEX2 (*expr_p))
16674 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
16675 post_p, is_gimple_val, fb_rvalue);
16676 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16677 ret = MIN (r0, r1);
16679 break;
16681 case NON_LVALUE_EXPR:
16682 /* This should have been stripped above. */
16683 gcc_unreachable ();
16685 case ASM_EXPR:
16686 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
16687 break;
16689 case TRY_FINALLY_EXPR:
16690 case TRY_CATCH_EXPR:
16692 gimple_seq eval, cleanup;
16693 gtry *try_;
16695 /* Calls to destructors are generated automatically in FINALLY/CATCH
16696 block. They should have location as UNKNOWN_LOCATION. However,
16697 gimplify_call_expr will reset these call stmts to input_location
16698 if it finds stmt's location is unknown. To prevent resetting for
16699 destructors, we set the input_location to unknown.
16700 Note that this only affects the destructor calls in FINALLY/CATCH
16701 block, and will automatically reset to its original value by the
16702 end of gimplify_expr. */
16703 input_location = UNKNOWN_LOCATION;
16704 eval = cleanup = NULL;
16705 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
16706 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16707 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
16709 gimple_seq n = NULL, e = NULL;
16710 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16711 0), &n);
16712 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16713 1), &e);
16714 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
16716 geh_else *stmt = gimple_build_eh_else (n, e);
16717 gimple_seq_add_stmt (&cleanup, stmt);
16720 else
16721 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
16722 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16723 if (gimple_seq_empty_p (cleanup))
16725 gimple_seq_add_seq (pre_p, eval);
16726 ret = GS_ALL_DONE;
16727 break;
16729 try_ = gimple_build_try (eval, cleanup,
16730 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16731 ? GIMPLE_TRY_FINALLY
16732 : GIMPLE_TRY_CATCH);
16733 if (EXPR_HAS_LOCATION (save_expr))
16734 gimple_set_location (try_, EXPR_LOCATION (save_expr));
16735 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
16736 gimple_set_location (try_, saved_location);
16737 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
16738 gimple_try_set_catch_is_cleanup (try_,
16739 TRY_CATCH_IS_CLEANUP (*expr_p));
16740 gimplify_seq_add_stmt (pre_p, try_);
16741 ret = GS_ALL_DONE;
16742 break;
16745 case CLEANUP_POINT_EXPR:
16746 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
16747 break;
16749 case TARGET_EXPR:
16750 ret = gimplify_target_expr (expr_p, pre_p, post_p);
16751 break;
16753 case CATCH_EXPR:
16755 gimple *c;
16756 gimple_seq handler = NULL;
16757 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
16758 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
16759 gimplify_seq_add_stmt (pre_p, c);
16760 ret = GS_ALL_DONE;
16761 break;
16764 case EH_FILTER_EXPR:
16766 gimple *ehf;
16767 gimple_seq failure = NULL;
16769 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
16770 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
16771 copy_warning (ehf, *expr_p);
16772 gimplify_seq_add_stmt (pre_p, ehf);
16773 ret = GS_ALL_DONE;
16774 break;
16777 case OBJ_TYPE_REF:
16779 enum gimplify_status r0, r1;
16780 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
16781 post_p, is_gimple_val, fb_rvalue);
16782 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
16783 post_p, is_gimple_val, fb_rvalue);
16784 TREE_SIDE_EFFECTS (*expr_p) = 0;
16785 ret = MIN (r0, r1);
16787 break;
16789 case LABEL_DECL:
16790 /* We get here when taking the address of a label. We mark
16791 the label as "forced"; meaning it can never be removed and
16792 it is a potential target for any computed goto. */
16793 FORCED_LABEL (*expr_p) = 1;
16794 ret = GS_ALL_DONE;
16795 break;
16797 case STATEMENT_LIST:
16798 ret = gimplify_statement_list (expr_p, pre_p);
16799 break;
16801 case WITH_SIZE_EXPR:
16803 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16804 post_p == &internal_post ? NULL : post_p,
16805 gimple_test_f, fallback);
16806 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
16807 is_gimple_val, fb_rvalue);
16808 ret = GS_ALL_DONE;
16810 break;
16812 case VAR_DECL:
16813 case PARM_DECL:
16814 ret = gimplify_var_or_parm_decl (expr_p);
16815 break;
16817 case RESULT_DECL:
16818 /* When within an OMP context, notice uses of variables. */
16819 if (gimplify_omp_ctxp)
16820 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
16821 ret = GS_ALL_DONE;
16822 break;
16824 case DEBUG_EXPR_DECL:
16825 gcc_unreachable ();
16827 case DEBUG_BEGIN_STMT:
16828 gimplify_seq_add_stmt (pre_p,
16829 gimple_build_debug_begin_stmt
16830 (TREE_BLOCK (*expr_p),
16831 EXPR_LOCATION (*expr_p)));
16832 ret = GS_ALL_DONE;
16833 *expr_p = NULL;
16834 break;
16836 case SSA_NAME:
16837 /* Allow callbacks into the gimplifier during optimization. */
16838 ret = GS_ALL_DONE;
16839 break;
16841 case OMP_PARALLEL:
16842 gimplify_omp_parallel (expr_p, pre_p);
16843 ret = GS_ALL_DONE;
16844 break;
16846 case OMP_TASK:
16847 gimplify_omp_task (expr_p, pre_p);
16848 ret = GS_ALL_DONE;
16849 break;
16851 case OMP_SIMD:
16853 /* Temporarily disable into_ssa, as scan_omp_simd
16854 which calls copy_gimple_seq_and_replace_locals can't deal
16855 with SSA_NAMEs defined outside of the body properly. */
16856 bool saved_into_ssa = gimplify_ctxp->into_ssa;
16857 gimplify_ctxp->into_ssa = false;
16858 ret = gimplify_omp_for (expr_p, pre_p);
16859 gimplify_ctxp->into_ssa = saved_into_ssa;
16860 break;
16863 case OMP_FOR:
16864 case OMP_DISTRIBUTE:
16865 case OMP_TASKLOOP:
16866 case OACC_LOOP:
16867 ret = gimplify_omp_for (expr_p, pre_p);
16868 break;
16870 case OMP_LOOP:
16871 ret = gimplify_omp_loop (expr_p, pre_p);
16872 break;
16874 case OACC_CACHE:
16875 gimplify_oacc_cache (expr_p, pre_p);
16876 ret = GS_ALL_DONE;
16877 break;
16879 case OACC_DECLARE:
16880 gimplify_oacc_declare (expr_p, pre_p);
16881 ret = GS_ALL_DONE;
16882 break;
16884 case OACC_HOST_DATA:
16885 case OACC_DATA:
16886 case OACC_KERNELS:
16887 case OACC_PARALLEL:
16888 case OACC_SERIAL:
16889 case OMP_SCOPE:
16890 case OMP_SECTIONS:
16891 case OMP_SINGLE:
16892 case OMP_TARGET:
16893 case OMP_TARGET_DATA:
16894 case OMP_TEAMS:
16895 gimplify_omp_workshare (expr_p, pre_p);
16896 ret = GS_ALL_DONE;
16897 break;
16899 case OACC_ENTER_DATA:
16900 case OACC_EXIT_DATA:
16901 case OACC_UPDATE:
16902 case OMP_TARGET_UPDATE:
16903 case OMP_TARGET_ENTER_DATA:
16904 case OMP_TARGET_EXIT_DATA:
16905 gimplify_omp_target_update (expr_p, pre_p);
16906 ret = GS_ALL_DONE;
16907 break;
16909 case OMP_SECTION:
16910 case OMP_MASTER:
16911 case OMP_MASKED:
16912 case OMP_ORDERED:
16913 case OMP_CRITICAL:
16914 case OMP_SCAN:
16916 gimple_seq body = NULL;
16917 gimple *g;
16918 bool saved_in_omp_construct = in_omp_construct;
16920 in_omp_construct = true;
16921 gimplify_and_add (OMP_BODY (*expr_p), &body);
16922 in_omp_construct = saved_in_omp_construct;
16923 switch (TREE_CODE (*expr_p))
16925 case OMP_SECTION:
16926 g = gimple_build_omp_section (body);
16927 break;
16928 case OMP_MASTER:
16929 g = gimple_build_omp_master (body);
16930 break;
16931 case OMP_ORDERED:
16932 g = gimplify_omp_ordered (*expr_p, body);
16933 if (OMP_BODY (*expr_p) == NULL_TREE
16934 && gimple_code (g) == GIMPLE_OMP_ORDERED)
16935 gimple_omp_ordered_standalone (g);
16936 break;
16937 case OMP_MASKED:
16938 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
16939 pre_p, ORT_WORKSHARE, OMP_MASKED);
16940 gimplify_adjust_omp_clauses (pre_p, body,
16941 &OMP_MASKED_CLAUSES (*expr_p),
16942 OMP_MASKED);
16943 g = gimple_build_omp_masked (body,
16944 OMP_MASKED_CLAUSES (*expr_p));
16945 break;
16946 case OMP_CRITICAL:
16947 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
16948 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
16949 gimplify_adjust_omp_clauses (pre_p, body,
16950 &OMP_CRITICAL_CLAUSES (*expr_p),
16951 OMP_CRITICAL);
16952 g = gimple_build_omp_critical (body,
16953 OMP_CRITICAL_NAME (*expr_p),
16954 OMP_CRITICAL_CLAUSES (*expr_p));
16955 break;
16956 case OMP_SCAN:
16957 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
16958 pre_p, ORT_WORKSHARE, OMP_SCAN);
16959 gimplify_adjust_omp_clauses (pre_p, body,
16960 &OMP_SCAN_CLAUSES (*expr_p),
16961 OMP_SCAN);
16962 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
16963 break;
16964 default:
16965 gcc_unreachable ();
16967 gimplify_seq_add_stmt (pre_p, g);
16968 ret = GS_ALL_DONE;
16969 break;
16972 case OMP_TASKGROUP:
16974 gimple_seq body = NULL;
16976 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
16977 bool saved_in_omp_construct = in_omp_construct;
16978 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
16979 OMP_TASKGROUP);
16980 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
16982 in_omp_construct = true;
16983 gimplify_and_add (OMP_BODY (*expr_p), &body);
16984 in_omp_construct = saved_in_omp_construct;
16985 gimple_seq cleanup = NULL;
16986 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
16987 gimple *g = gimple_build_call (fn, 0);
16988 gimple_seq_add_stmt (&cleanup, g);
16989 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
16990 body = NULL;
16991 gimple_seq_add_stmt (&body, g);
16992 g = gimple_build_omp_taskgroup (body, *pclauses);
16993 gimplify_seq_add_stmt (pre_p, g);
16994 ret = GS_ALL_DONE;
16995 break;
16998 case OMP_ATOMIC:
16999 case OMP_ATOMIC_READ:
17000 case OMP_ATOMIC_CAPTURE_OLD:
17001 case OMP_ATOMIC_CAPTURE_NEW:
17002 ret = gimplify_omp_atomic (expr_p, pre_p);
17003 break;
17005 case TRANSACTION_EXPR:
17006 ret = gimplify_transaction (expr_p, pre_p);
17007 break;
17009 case TRUTH_AND_EXPR:
17010 case TRUTH_OR_EXPR:
17011 case TRUTH_XOR_EXPR:
17013 tree orig_type = TREE_TYPE (*expr_p);
17014 tree new_type, xop0, xop1;
17015 *expr_p = gimple_boolify (*expr_p);
17016 new_type = TREE_TYPE (*expr_p);
17017 if (!useless_type_conversion_p (orig_type, new_type))
17019 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17020 ret = GS_OK;
17021 break;
17024 /* Boolified binary truth expressions are semantically equivalent
17025 to bitwise binary expressions. Canonicalize them to the
17026 bitwise variant. */
17027 switch (TREE_CODE (*expr_p))
17029 case TRUTH_AND_EXPR:
17030 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17031 break;
17032 case TRUTH_OR_EXPR:
17033 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17034 break;
17035 case TRUTH_XOR_EXPR:
17036 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17037 break;
17038 default:
17039 break;
17041 /* Now make sure that operands have compatible type to
17042 expression's new_type. */
17043 xop0 = TREE_OPERAND (*expr_p, 0);
17044 xop1 = TREE_OPERAND (*expr_p, 1);
17045 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17046 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17047 new_type,
17048 xop0);
17049 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17050 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17051 new_type,
17052 xop1);
17053 /* Continue classified as tcc_binary. */
17054 goto expr_2;
17057 case VEC_COND_EXPR:
17058 goto expr_3;
17060 case VEC_PERM_EXPR:
17061 /* Classified as tcc_expression. */
17062 goto expr_3;
17064 case BIT_INSERT_EXPR:
17065 /* Argument 3 is a constant. */
17066 goto expr_2;
17068 case POINTER_PLUS_EXPR:
17070 enum gimplify_status r0, r1;
17071 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17072 post_p, is_gimple_val, fb_rvalue);
17073 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17074 post_p, is_gimple_val, fb_rvalue);
17075 recalculate_side_effects (*expr_p);
17076 ret = MIN (r0, r1);
17077 break;
17080 default:
17081 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17083 case tcc_comparison:
17084 /* Handle comparison of objects of non scalar mode aggregates
17085 with a call to memcmp. It would be nice to only have to do
17086 this for variable-sized objects, but then we'd have to allow
17087 the same nest of reference nodes we allow for MODIFY_EXPR and
17088 that's too complex.
17090 Compare scalar mode aggregates as scalar mode values. Using
17091 memcmp for them would be very inefficient at best, and is
17092 plain wrong if bitfields are involved. */
17093 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17094 ret = GS_ERROR;
17095 else
17097 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17099 /* Vector comparisons need no boolification. */
17100 if (TREE_CODE (type) == VECTOR_TYPE)
17101 goto expr_2;
17102 else if (!AGGREGATE_TYPE_P (type))
17104 tree org_type = TREE_TYPE (*expr_p);
17105 *expr_p = gimple_boolify (*expr_p);
17106 if (!useless_type_conversion_p (org_type,
17107 TREE_TYPE (*expr_p)))
17109 *expr_p = fold_convert_loc (input_location,
17110 org_type, *expr_p);
17111 ret = GS_OK;
17113 else
17114 goto expr_2;
17116 else if (TYPE_MODE (type) != BLKmode)
17117 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17118 else
17119 ret = gimplify_variable_sized_compare (expr_p);
17121 break;
17123 /* If *EXPR_P does not need to be special-cased, handle it
17124 according to its class. */
17125 case tcc_unary:
17126 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17127 post_p, is_gimple_val, fb_rvalue);
17128 break;
17130 case tcc_binary:
17131 expr_2:
17133 enum gimplify_status r0, r1;
17135 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17136 post_p, is_gimple_val, fb_rvalue);
17137 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17138 post_p, is_gimple_val, fb_rvalue);
17140 ret = MIN (r0, r1);
17141 break;
17144 expr_3:
17146 enum gimplify_status r0, r1, r2;
17148 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17149 post_p, is_gimple_val, fb_rvalue);
17150 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17151 post_p, is_gimple_val, fb_rvalue);
17152 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17153 post_p, is_gimple_val, fb_rvalue);
17155 ret = MIN (MIN (r0, r1), r2);
17156 break;
17159 case tcc_declaration:
17160 case tcc_constant:
17161 ret = GS_ALL_DONE;
17162 goto dont_recalculate;
17164 default:
17165 gcc_unreachable ();
17168 recalculate_side_effects (*expr_p);
17170 dont_recalculate:
17171 break;
17174 gcc_assert (*expr_p || ret != GS_OK);
17176 while (ret == GS_OK);
17178 /* If we encountered an error_mark somewhere nested inside, either
17179 stub out the statement or propagate the error back out. */
17180 if (ret == GS_ERROR)
17182 if (is_statement)
17183 *expr_p = NULL;
17184 goto out;
17187 /* This was only valid as a return value from the langhook, which
17188 we handled. Make sure it doesn't escape from any other context. */
17189 gcc_assert (ret != GS_UNHANDLED);
17191 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17193 /* We aren't looking for a value, and we don't have a valid
17194 statement. If it doesn't have side-effects, throw it away.
17195 We can also get here with code such as "*&&L;", where L is
17196 a LABEL_DECL that is marked as FORCED_LABEL. */
17197 if (TREE_CODE (*expr_p) == LABEL_DECL
17198 || !TREE_SIDE_EFFECTS (*expr_p))
17199 *expr_p = NULL;
17200 else if (!TREE_THIS_VOLATILE (*expr_p))
17202 /* This is probably a _REF that contains something nested that
17203 has side effects. Recurse through the operands to find it. */
17204 enum tree_code code = TREE_CODE (*expr_p);
17206 switch (code)
17208 case COMPONENT_REF:
17209 case REALPART_EXPR:
17210 case IMAGPART_EXPR:
17211 case VIEW_CONVERT_EXPR:
17212 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17213 gimple_test_f, fallback);
17214 break;
17216 case ARRAY_REF:
17217 case ARRAY_RANGE_REF:
17218 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17219 gimple_test_f, fallback);
17220 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17221 gimple_test_f, fallback);
17222 break;
17224 default:
17225 /* Anything else with side-effects must be converted to
17226 a valid statement before we get here. */
17227 gcc_unreachable ();
17230 *expr_p = NULL;
17232 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17233 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17234 && !is_empty_type (TREE_TYPE (*expr_p)))
17236 /* Historically, the compiler has treated a bare reference
17237 to a non-BLKmode volatile lvalue as forcing a load. */
17238 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17240 /* Normally, we do not want to create a temporary for a
17241 TREE_ADDRESSABLE type because such a type should not be
17242 copied by bitwise-assignment. However, we make an
17243 exception here, as all we are doing here is ensuring that
17244 we read the bytes that make up the type. We use
17245 create_tmp_var_raw because create_tmp_var will abort when
17246 given a TREE_ADDRESSABLE type. */
17247 tree tmp = create_tmp_var_raw (type, "vol");
17248 gimple_add_tmp_var (tmp);
17249 gimplify_assign (tmp, *expr_p, pre_p);
17250 *expr_p = NULL;
17252 else
17253 /* We can't do anything useful with a volatile reference to
17254 an incomplete type, so just throw it away. Likewise for
17255 a BLKmode type, since any implicit inner load should
17256 already have been turned into an explicit one by the
17257 gimplification process. */
17258 *expr_p = NULL;
17261 /* If we are gimplifying at the statement level, we're done. Tack
17262 everything together and return. */
17263 if (fallback == fb_none || is_statement)
17265 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17266 it out for GC to reclaim it. */
17267 *expr_p = NULL_TREE;
17269 if (!gimple_seq_empty_p (internal_pre)
17270 || !gimple_seq_empty_p (internal_post))
17272 gimplify_seq_add_seq (&internal_pre, internal_post);
17273 gimplify_seq_add_seq (pre_p, internal_pre);
17276 /* The result of gimplifying *EXPR_P is going to be the last few
17277 statements in *PRE_P and *POST_P. Add location information
17278 to all the statements that were added by the gimplification
17279 helpers. */
17280 if (!gimple_seq_empty_p (*pre_p))
17281 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17283 if (!gimple_seq_empty_p (*post_p))
17284 annotate_all_with_location_after (*post_p, post_last_gsi,
17285 input_location);
17287 goto out;
17290 #ifdef ENABLE_GIMPLE_CHECKING
17291 if (*expr_p)
17293 enum tree_code code = TREE_CODE (*expr_p);
17294 /* These expressions should already be in gimple IR form. */
17295 gcc_assert (code != MODIFY_EXPR
17296 && code != ASM_EXPR
17297 && code != BIND_EXPR
17298 && code != CATCH_EXPR
17299 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17300 && code != EH_FILTER_EXPR
17301 && code != GOTO_EXPR
17302 && code != LABEL_EXPR
17303 && code != LOOP_EXPR
17304 && code != SWITCH_EXPR
17305 && code != TRY_FINALLY_EXPR
17306 && code != EH_ELSE_EXPR
17307 && code != OACC_PARALLEL
17308 && code != OACC_KERNELS
17309 && code != OACC_SERIAL
17310 && code != OACC_DATA
17311 && code != OACC_HOST_DATA
17312 && code != OACC_DECLARE
17313 && code != OACC_UPDATE
17314 && code != OACC_ENTER_DATA
17315 && code != OACC_EXIT_DATA
17316 && code != OACC_CACHE
17317 && code != OMP_CRITICAL
17318 && code != OMP_FOR
17319 && code != OACC_LOOP
17320 && code != OMP_MASTER
17321 && code != OMP_MASKED
17322 && code != OMP_TASKGROUP
17323 && code != OMP_ORDERED
17324 && code != OMP_PARALLEL
17325 && code != OMP_SCAN
17326 && code != OMP_SECTIONS
17327 && code != OMP_SECTION
17328 && code != OMP_SINGLE
17329 && code != OMP_SCOPE);
17331 #endif
17333 /* Otherwise we're gimplifying a subexpression, so the resulting
17334 value is interesting. If it's a valid operand that matches
17335 GIMPLE_TEST_F, we're done. Unless we are handling some
17336 post-effects internally; if that's the case, we need to copy into
17337 a temporary before adding the post-effects to POST_P. */
17338 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17339 goto out;
17341 /* Otherwise, we need to create a new temporary for the gimplified
17342 expression. */
17344 /* We can't return an lvalue if we have an internal postqueue. The
17345 object the lvalue refers to would (probably) be modified by the
17346 postqueue; we need to copy the value out first, which means an
17347 rvalue. */
17348 if ((fallback & fb_lvalue)
17349 && gimple_seq_empty_p (internal_post)
17350 && is_gimple_addressable (*expr_p))
17352 /* An lvalue will do. Take the address of the expression, store it
17353 in a temporary, and replace the expression with an INDIRECT_REF of
17354 that temporary. */
17355 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17356 unsigned int ref_align = get_object_alignment (*expr_p);
17357 tree ref_type = TREE_TYPE (*expr_p);
17358 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17359 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17360 if (TYPE_ALIGN (ref_type) != ref_align)
17361 ref_type = build_aligned_type (ref_type, ref_align);
17362 *expr_p = build2 (MEM_REF, ref_type,
17363 tmp, build_zero_cst (ref_alias_type));
17365 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17367 /* An rvalue will do. Assign the gimplified expression into a
17368 new temporary TMP and replace the original expression with
17369 TMP. First, make sure that the expression has a type so that
17370 it can be assigned into a temporary. */
17371 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17372 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17374 else
17376 #ifdef ENABLE_GIMPLE_CHECKING
17377 if (!(fallback & fb_mayfail))
17379 fprintf (stderr, "gimplification failed:\n");
17380 print_generic_expr (stderr, *expr_p);
17381 debug_tree (*expr_p);
17382 internal_error ("gimplification failed");
17384 #endif
17385 gcc_assert (fallback & fb_mayfail);
17387 /* If this is an asm statement, and the user asked for the
17388 impossible, don't die. Fail and let gimplify_asm_expr
17389 issue an error. */
17390 ret = GS_ERROR;
17391 goto out;
17394 /* Make sure the temporary matches our predicate. */
17395 gcc_assert ((*gimple_test_f) (*expr_p));
17397 if (!gimple_seq_empty_p (internal_post))
17399 annotate_all_with_location (internal_post, input_location);
17400 gimplify_seq_add_seq (pre_p, internal_post);
17403 out:
17404 input_location = saved_location;
17405 return ret;
17408 /* Like gimplify_expr but make sure the gimplified result is not itself
17409 a SSA name (but a decl if it were). Temporaries required by
17410 evaluating *EXPR_P may be still SSA names. */
17412 static enum gimplify_status
17413 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17414 bool (*gimple_test_f) (tree), fallback_t fallback,
17415 bool allow_ssa)
17417 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17418 gimple_test_f, fallback);
17419 if (! allow_ssa
17420 && TREE_CODE (*expr_p) == SSA_NAME)
17421 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17422 return ret;
17425 /* Look through TYPE for variable-sized objects and gimplify each such
17426 size that we find. Add to LIST_P any statements generated. */
17428 void
17429 gimplify_type_sizes (tree type, gimple_seq *list_p)
17431 if (type == NULL || type == error_mark_node)
17432 return;
17434 const bool ignored_p
17435 = TYPE_NAME (type)
17436 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17437 && DECL_IGNORED_P (TYPE_NAME (type));
17438 tree t;
17440 /* We first do the main variant, then copy into any other variants. */
17441 type = TYPE_MAIN_VARIANT (type);
17443 /* Avoid infinite recursion. */
17444 if (TYPE_SIZES_GIMPLIFIED (type))
17445 return;
17447 TYPE_SIZES_GIMPLIFIED (type) = 1;
17449 switch (TREE_CODE (type))
17451 case INTEGER_TYPE:
17452 case ENUMERAL_TYPE:
17453 case BOOLEAN_TYPE:
17454 case REAL_TYPE:
17455 case FIXED_POINT_TYPE:
17456 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17457 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17459 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17461 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17462 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17464 break;
17466 case ARRAY_TYPE:
17467 /* These types may not have declarations, so handle them here. */
17468 gimplify_type_sizes (TREE_TYPE (type), list_p);
17469 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17470 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17471 with assigned stack slots, for -O1+ -g they should be tracked
17472 by VTA. */
17473 if (!ignored_p
17474 && TYPE_DOMAIN (type)
17475 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17477 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17478 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17479 DECL_IGNORED_P (t) = 0;
17480 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17481 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17482 DECL_IGNORED_P (t) = 0;
17484 break;
17486 case RECORD_TYPE:
17487 case UNION_TYPE:
17488 case QUAL_UNION_TYPE:
17489 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17490 if (TREE_CODE (field) == FIELD_DECL)
17492 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17493 /* Likewise, ensure variable offsets aren't removed. */
17494 if (!ignored_p
17495 && (t = DECL_FIELD_OFFSET (field))
17496 && VAR_P (t)
17497 && DECL_ARTIFICIAL (t))
17498 DECL_IGNORED_P (t) = 0;
17499 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17500 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17501 gimplify_type_sizes (TREE_TYPE (field), list_p);
17503 break;
17505 case POINTER_TYPE:
17506 case REFERENCE_TYPE:
17507 /* We used to recurse on the pointed-to type here, which turned out to
17508 be incorrect because its definition might refer to variables not
17509 yet initialized at this point if a forward declaration is involved.
17511 It was actually useful for anonymous pointed-to types to ensure
17512 that the sizes evaluation dominates every possible later use of the
17513 values. Restricting to such types here would be safe since there
17514 is no possible forward declaration around, but would introduce an
17515 undesirable middle-end semantic to anonymity. We then defer to
17516 front-ends the responsibility of ensuring that the sizes are
17517 evaluated both early and late enough, e.g. by attaching artificial
17518 type declarations to the tree. */
17519 break;
17521 default:
17522 break;
17525 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17526 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17528 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17530 TYPE_SIZE (t) = TYPE_SIZE (type);
17531 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17532 TYPE_SIZES_GIMPLIFIED (t) = 1;
17536 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17537 a size or position, has had all of its SAVE_EXPRs evaluated.
17538 We add any required statements to *STMT_P. */
17540 void
17541 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17543 tree expr = *expr_p;
17545 /* We don't do anything if the value isn't there, is constant, or contains
17546 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17547 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17548 will want to replace it with a new variable, but that will cause problems
17549 if this type is from outside the function. It's OK to have that here. */
17550 if (expr == NULL_TREE
17551 || is_gimple_constant (expr)
17552 || VAR_P (expr)
17553 || CONTAINS_PLACEHOLDER_P (expr))
17554 return;
17556 *expr_p = unshare_expr (expr);
17558 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17559 if the def vanishes. */
17560 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17562 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17563 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17564 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17565 if (is_gimple_constant (*expr_p))
17566 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17569 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17570 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17571 is true, also gimplify the parameters. */
17573 gbind *
17574 gimplify_body (tree fndecl, bool do_parms)
17576 location_t saved_location = input_location;
17577 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17578 gimple *outer_stmt;
17579 gbind *outer_bind;
17581 timevar_push (TV_TREE_GIMPLIFY);
17583 init_tree_ssa (cfun);
17585 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17586 gimplification. */
17587 default_rtl_profile ();
17589 gcc_assert (gimplify_ctxp == NULL);
17590 push_gimplify_context (true);
17592 if (flag_openacc || flag_openmp)
17594 gcc_assert (gimplify_omp_ctxp == NULL);
17595 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17596 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17599 /* Unshare most shared trees in the body and in that of any nested functions.
17600 It would seem we don't have to do this for nested functions because
17601 they are supposed to be output and then the outer function gimplified
17602 first, but the g++ front end doesn't always do it that way. */
17603 unshare_body (fndecl);
17604 unvisit_body (fndecl);
17606 /* Make sure input_location isn't set to something weird. */
17607 input_location = DECL_SOURCE_LOCATION (fndecl);
17609 /* Resolve callee-copies. This has to be done before processing
17610 the body so that DECL_VALUE_EXPR gets processed correctly. */
17611 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17613 /* Gimplify the function's body. */
17614 seq = NULL;
17615 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17616 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17617 if (!outer_stmt)
17619 outer_stmt = gimple_build_nop ();
17620 gimplify_seq_add_stmt (&seq, outer_stmt);
17623 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17624 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17625 if (gimple_code (outer_stmt) == GIMPLE_BIND
17626 && (gimple_seq_first_nondebug_stmt (seq)
17627 == gimple_seq_last_nondebug_stmt (seq)))
17629 outer_bind = as_a <gbind *> (outer_stmt);
17630 if (gimple_seq_first_stmt (seq) != outer_stmt
17631 || gimple_seq_last_stmt (seq) != outer_stmt)
17633 /* If there are debug stmts before or after outer_stmt, move them
17634 inside of outer_bind body. */
17635 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17636 gimple_seq second_seq = NULL;
17637 if (gimple_seq_first_stmt (seq) != outer_stmt
17638 && gimple_seq_last_stmt (seq) != outer_stmt)
17640 second_seq = gsi_split_seq_after (gsi);
17641 gsi_remove (&gsi, false);
17643 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17644 gsi_remove (&gsi, false);
17645 else
17647 gsi_remove (&gsi, false);
17648 second_seq = seq;
17649 seq = NULL;
17651 gimple_seq_add_seq_without_update (&seq,
17652 gimple_bind_body (outer_bind));
17653 gimple_seq_add_seq_without_update (&seq, second_seq);
17654 gimple_bind_set_body (outer_bind, seq);
17657 else
17658 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
17660 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17662 /* If we had callee-copies statements, insert them at the beginning
17663 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17664 if (!gimple_seq_empty_p (parm_stmts))
17666 tree parm;
17668 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
17669 if (parm_cleanup)
17671 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
17672 GIMPLE_TRY_FINALLY);
17673 parm_stmts = NULL;
17674 gimple_seq_add_stmt (&parm_stmts, g);
17676 gimple_bind_set_body (outer_bind, parm_stmts);
17678 for (parm = DECL_ARGUMENTS (current_function_decl);
17679 parm; parm = DECL_CHAIN (parm))
17680 if (DECL_HAS_VALUE_EXPR_P (parm))
17682 DECL_HAS_VALUE_EXPR_P (parm) = 0;
17683 DECL_IGNORED_P (parm) = 0;
17687 if ((flag_openacc || flag_openmp || flag_openmp_simd)
17688 && gimplify_omp_ctxp)
17690 delete_omp_context (gimplify_omp_ctxp);
17691 gimplify_omp_ctxp = NULL;
17694 pop_gimplify_context (outer_bind);
17695 gcc_assert (gimplify_ctxp == NULL);
17697 if (flag_checking && !seen_error ())
17698 verify_gimple_in_seq (gimple_bind_body (outer_bind));
17700 timevar_pop (TV_TREE_GIMPLIFY);
17701 input_location = saved_location;
17703 return outer_bind;
17706 typedef char *char_p; /* For DEF_VEC_P. */
17708 /* Return whether we should exclude FNDECL from instrumentation. */
17710 static bool
17711 flag_instrument_functions_exclude_p (tree fndecl)
17713 vec<char_p> *v;
17715 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
17716 if (v && v->length () > 0)
17718 const char *name;
17719 int i;
17720 char *s;
17722 name = lang_hooks.decl_printable_name (fndecl, 1);
17723 FOR_EACH_VEC_ELT (*v, i, s)
17724 if (strstr (name, s) != NULL)
17725 return true;
17728 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
17729 if (v && v->length () > 0)
17731 const char *name;
17732 int i;
17733 char *s;
17735 name = DECL_SOURCE_FILE (fndecl);
17736 FOR_EACH_VEC_ELT (*v, i, s)
17737 if (strstr (name, s) != NULL)
17738 return true;
17741 return false;
17744 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17745 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17746 the instrumentation function. IF STMT is not NULL, it is a statement
17747 to be executed just before the call to the instrumentation function. */
17749 static void
17750 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
17751 tree cond_var, gimple *stmt)
17753 /* The instrumentation hooks aren't going to call the instrumented
17754 function and the address they receive is expected to be matchable
17755 against symbol addresses. Make sure we don't create a trampoline,
17756 in case the current function is nested. */
17757 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
17758 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
17760 tree label_true, label_false;
17761 if (cond_var)
17763 label_true = create_artificial_label (UNKNOWN_LOCATION);
17764 label_false = create_artificial_label (UNKNOWN_LOCATION);
17765 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
17766 label_true, label_false);
17767 gimplify_seq_add_stmt (seq, cond);
17768 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
17769 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
17770 NOT_TAKEN));
17773 if (stmt)
17774 gimplify_seq_add_stmt (seq, stmt);
17776 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
17777 gcall *call = gimple_build_call (x, 1, integer_zero_node);
17778 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
17779 gimple_call_set_lhs (call, tmp_var);
17780 gimplify_seq_add_stmt (seq, call);
17781 x = builtin_decl_implicit (fncode);
17782 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
17783 gimplify_seq_add_stmt (seq, call);
17785 if (cond_var)
17786 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
17789 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17790 node for the function we want to gimplify.
17792 Return the sequence of GIMPLE statements corresponding to the body
17793 of FNDECL. */
17795 void
17796 gimplify_function_tree (tree fndecl)
17798 gimple_seq seq;
17799 gbind *bind;
17801 gcc_assert (!gimple_body (fndecl));
17803 if (DECL_STRUCT_FUNCTION (fndecl))
17804 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
17805 else
17806 push_struct_function (fndecl);
17808 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17809 if necessary. */
17810 cfun->curr_properties |= PROP_gimple_lva;
17812 if (asan_sanitize_use_after_scope ())
17813 asan_poisoned_variables = new hash_set<tree> ();
17814 bind = gimplify_body (fndecl, true);
17815 if (asan_poisoned_variables)
17817 delete asan_poisoned_variables;
17818 asan_poisoned_variables = NULL;
17821 /* The tree body of the function is no longer needed, replace it
17822 with the new GIMPLE body. */
17823 seq = NULL;
17824 gimple_seq_add_stmt (&seq, bind);
17825 gimple_set_body (fndecl, seq);
17827 /* If we're instrumenting function entry/exit, then prepend the call to
17828 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17829 catch the exit hook. */
17830 /* ??? Add some way to ignore exceptions for this TFE. */
17831 if (flag_instrument_function_entry_exit
17832 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
17833 /* Do not instrument extern inline functions. */
17834 && !(DECL_DECLARED_INLINE_P (fndecl)
17835 && DECL_EXTERNAL (fndecl)
17836 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
17837 && !flag_instrument_functions_exclude_p (fndecl))
17839 gimple_seq body = NULL, cleanup = NULL;
17840 gassign *assign;
17841 tree cond_var;
17843 /* If -finstrument-functions-once is specified, generate:
17845 static volatile bool C.0 = false;
17846 bool tmp_called;
17848 tmp_called = C.0;
17849 if (!tmp_called)
17851 C.0 = true;
17852 [call profiling enter function]
17855 without specific protection for data races. */
17856 if (flag_instrument_function_entry_exit > 1)
17858 tree first_var
17859 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
17860 VAR_DECL,
17861 create_tmp_var_name ("C"),
17862 boolean_type_node);
17863 DECL_ARTIFICIAL (first_var) = 1;
17864 DECL_IGNORED_P (first_var) = 1;
17865 TREE_STATIC (first_var) = 1;
17866 TREE_THIS_VOLATILE (first_var) = 1;
17867 TREE_USED (first_var) = 1;
17868 DECL_INITIAL (first_var) = boolean_false_node;
17869 varpool_node::add (first_var);
17871 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
17872 assign = gimple_build_assign (cond_var, first_var);
17873 gimplify_seq_add_stmt (&body, assign);
17875 assign = gimple_build_assign (first_var, boolean_true_node);
17878 else
17880 cond_var = NULL_TREE;
17881 assign = NULL;
17884 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
17885 cond_var, assign);
17887 /* If -finstrument-functions-once is specified, generate:
17889 if (!tmp_called)
17890 [call profiling exit function]
17892 without specific protection for data races. */
17893 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
17894 cond_var, NULL);
17896 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
17897 gimplify_seq_add_stmt (&body, tf);
17898 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
17900 /* Replace the current function body with the body
17901 wrapped in the try/finally TF. */
17902 seq = NULL;
17903 gimple_seq_add_stmt (&seq, new_bind);
17904 gimple_set_body (fndecl, seq);
17905 bind = new_bind;
17908 if (sanitize_flags_p (SANITIZE_THREAD)
17909 && param_tsan_instrument_func_entry_exit)
17911 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
17912 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
17913 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
17914 /* Replace the current function body with the body
17915 wrapped in the try/finally TF. */
17916 seq = NULL;
17917 gimple_seq_add_stmt (&seq, new_bind);
17918 gimple_set_body (fndecl, seq);
17921 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17922 cfun->curr_properties |= PROP_gimple_any;
17924 pop_cfun ();
17926 dump_function (TDI_gimple, fndecl);
17929 /* Return a dummy expression of type TYPE in order to keep going after an
17930 error. */
17932 static tree
17933 dummy_object (tree type)
17935 tree t = build_int_cst (build_pointer_type (type), 0);
17936 return build2 (MEM_REF, type, t, t);
17939 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
17940 builtin function, but a very special sort of operator. */
17942 enum gimplify_status
17943 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
17944 gimple_seq *post_p ATTRIBUTE_UNUSED)
17946 tree promoted_type, have_va_type;
17947 tree valist = TREE_OPERAND (*expr_p, 0);
17948 tree type = TREE_TYPE (*expr_p);
17949 tree t, tag, aptag;
17950 location_t loc = EXPR_LOCATION (*expr_p);
17952 /* Verify that valist is of the proper type. */
17953 have_va_type = TREE_TYPE (valist);
17954 if (have_va_type == error_mark_node)
17955 return GS_ERROR;
17956 have_va_type = targetm.canonical_va_list_type (have_va_type);
17957 if (have_va_type == NULL_TREE
17958 && POINTER_TYPE_P (TREE_TYPE (valist)))
17959 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
17960 have_va_type
17961 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
17962 gcc_assert (have_va_type != NULL_TREE);
17964 /* Generate a diagnostic for requesting data of a type that cannot
17965 be passed through `...' due to type promotion at the call site. */
17966 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
17967 != type)
17969 static bool gave_help;
17970 bool warned;
17971 /* Use the expansion point to handle cases such as passing bool (defined
17972 in a system header) through `...'. */
17973 location_t xloc
17974 = expansion_point_location_if_in_system_header (loc);
17976 /* Unfortunately, this is merely undefined, rather than a constraint
17977 violation, so we cannot make this an error. If this call is never
17978 executed, the program is still strictly conforming. */
17979 auto_diagnostic_group d;
17980 warned = warning_at (xloc, 0,
17981 "%qT is promoted to %qT when passed through %<...%>",
17982 type, promoted_type);
17983 if (!gave_help && warned)
17985 gave_help = true;
17986 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
17987 promoted_type, type);
17990 /* We can, however, treat "undefined" any way we please.
17991 Call abort to encourage the user to fix the program. */
17992 if (warned)
17993 inform (xloc, "if this code is reached, the program will abort");
17994 /* Before the abort, allow the evaluation of the va_list
17995 expression to exit or longjmp. */
17996 gimplify_and_add (valist, pre_p);
17997 t = build_call_expr_loc (loc,
17998 builtin_decl_implicit (BUILT_IN_TRAP), 0);
17999 gimplify_and_add (t, pre_p);
18001 /* This is dead code, but go ahead and finish so that the
18002 mode of the result comes out right. */
18003 *expr_p = dummy_object (type);
18004 return GS_ALL_DONE;
18007 tag = build_int_cst (build_pointer_type (type), 0);
18008 aptag = build_int_cst (TREE_TYPE (valist), 0);
18010 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18011 valist, tag, aptag);
18013 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18014 needs to be expanded. */
18015 cfun->curr_properties &= ~PROP_gimple_lva;
18017 return GS_OK;
18020 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18022 DST/SRC are the destination and source respectively. You can pass
18023 ungimplified trees in DST or SRC, in which case they will be
18024 converted to a gimple operand if necessary.
18026 This function returns the newly created GIMPLE_ASSIGN tuple. */
18028 gimple *
18029 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18031 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18032 gimplify_and_add (t, seq_p);
18033 ggc_free (t);
18034 return gimple_seq_last_stmt (*seq_p);
18037 inline hashval_t
18038 gimplify_hasher::hash (const elt_t *p)
18040 tree t = p->val;
18041 return iterative_hash_expr (t, 0);
18044 inline bool
18045 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18047 tree t1 = p1->val;
18048 tree t2 = p2->val;
18049 enum tree_code code = TREE_CODE (t1);
18051 if (TREE_CODE (t2) != code
18052 || TREE_TYPE (t1) != TREE_TYPE (t2))
18053 return false;
18055 if (!operand_equal_p (t1, t2, 0))
18056 return false;
18058 /* Only allow them to compare equal if they also hash equal; otherwise
18059 results are nondeterminate, and we fail bootstrap comparison. */
18060 gcc_checking_assert (hash (p1) == hash (p2));
18062 return true;