ada: Fix wrong resolution for hidden discriminant in predicate
[official-gcc.git] / gcc / gimplify.cc
blob36e5df050b9cc5419591b0e9ff5d45141387fc9a
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
76 enum gimplify_omp_var_data
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
118 GOVD_NONTEMPORAL = 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
123 GOVD_CONDTEMP = 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
137 enum omp_region_type
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher : free_ptr_hash <elt_t>
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
185 struct gimplify_ctx
187 struct gimplify_ctx *prev_context;
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
209 enum gimplify_defaultmap_kind
211 GDMK_SCALAR,
212 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
213 GDMK_AGGREGATE,
214 GDMK_ALLOCATABLE,
215 GDMK_POINTER
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx *outer_context;
221 splay_tree variables;
222 hash_set<tree> *privatized_types;
223 tree clauses;
224 /* Iteration variables in an OMP_FOR. */
225 vec<tree> loop_iter_var;
226 location_t location;
227 enum omp_clause_default_kind default_kind;
228 enum omp_region_type region_type;
229 enum tree_code code;
230 bool combined_loop;
231 bool distribute;
232 bool target_firstprivatize_array_bases;
233 bool add_safelen1;
234 bool order_concurrent;
235 bool has_depend;
236 bool in_for_exprs;
237 int defaultmap[5];
240 static struct gimplify_ctx *gimplify_ctxp;
241 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
242 static bool in_omp_construct;
244 /* Forward declaration. */
245 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
246 static hash_map<tree, tree> *oacc_declare_returns;
247 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
248 bool (*) (tree), fallback_t, bool);
249 static void prepare_gimple_addressable (tree *, gimple_seq *);
251 /* Shorter alias name for the above function for use in gimplify.cc
252 only. */
254 static inline void
255 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
257 gimple_seq_add_stmt_without_update (seq_p, gs);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
266 static void
267 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
269 gimple_stmt_iterator si;
271 if (src == NULL)
272 return;
274 si = gsi_last (*dst_p);
275 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx *ctx_pool = NULL;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx *
287 ctx_alloc (void)
289 struct gimplify_ctx * c = ctx_pool;
291 if (c)
292 ctx_pool = c->prev_context;
293 else
294 c = XNEW (struct gimplify_ctx);
296 memset (c, '\0', sizeof (*c));
297 return c;
300 /* Put gimplify context C back into the pool. */
302 static inline void
303 ctx_free (struct gimplify_ctx *c)
305 c->prev_context = ctx_pool;
306 ctx_pool = c;
309 /* Free allocated ctx stack memory. */
311 void
312 free_gimplify_stack (void)
314 struct gimplify_ctx *c;
316 while ((c = ctx_pool))
318 ctx_pool = c->prev_context;
319 free (c);
324 /* Set up a context for the gimplifier. */
326 void
327 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
329 struct gimplify_ctx *c = ctx_alloc ();
331 c->prev_context = gimplify_ctxp;
332 gimplify_ctxp = c;
333 gimplify_ctxp->into_ssa = in_ssa;
334 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 in the local_decls.
341 BODY is not a sequence, but the first tuple in a sequence. */
343 void
344 pop_gimplify_context (gimple *body)
346 struct gimplify_ctx *c = gimplify_ctxp;
348 gcc_assert (c
349 && (!c->bind_expr_stack.exists ()
350 || c->bind_expr_stack.is_empty ()));
351 c->bind_expr_stack.release ();
352 gimplify_ctxp = c->prev_context;
354 if (body)
355 declare_vars (c->temps, body, false);
356 else
357 record_vars (c->temps);
359 delete c->temp_htab;
360 c->temp_htab = NULL;
361 ctx_free (c);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
366 static void
367 gimple_push_bind_expr (gbind *bind_stmt)
369 gimplify_ctxp->bind_expr_stack.reserve (8);
370 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
373 /* Pop the first element off the stack of bindings. */
375 static void
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp->bind_expr_stack.pop ();
381 /* Return the first element of the stack of bindings. */
383 gbind *
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp->bind_expr_stack.last ();
389 /* Return the stack of bindings created during gimplification. */
391 vec<gbind *>
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp->bind_expr_stack;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
400 static bool
401 gimple_conditional_context (void)
403 return gimplify_ctxp->conditions > 0;
406 /* Note that we've entered a COND_EXPR. */
408 static void
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp->conditions == 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
414 #endif
415 ++(gimplify_ctxp->conditions);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
421 static void
422 gimple_pop_condition (gimple_seq *pre_p)
424 int conds = --(gimplify_ctxp->conditions);
426 gcc_assert (conds >= 0);
427 if (conds == 0)
429 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
430 gimplify_ctxp->conditional_cleanups = NULL;
434 /* A stable comparison routine for use with splay trees and DECLs. */
436 static int
437 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
439 tree a = (tree) xa;
440 tree b = (tree) xb;
442 return DECL_UID (a) - DECL_UID (b);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx *
448 new_omp_context (enum omp_region_type region_type)
450 struct gimplify_omp_ctx *c;
452 c = XCNEW (struct gimplify_omp_ctx);
453 c->outer_context = gimplify_omp_ctxp;
454 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
455 c->privatized_types = new hash_set<tree>;
456 c->location = input_location;
457 c->region_type = region_type;
458 if ((region_type & ORT_TASK) == 0)
459 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
460 else
461 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
462 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
463 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
464 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
465 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
466 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
468 return c;
471 /* Destroy an omp construct that deals with variable remapping. */
473 static void
474 delete_omp_context (struct gimplify_omp_ctx *c)
476 splay_tree_delete (c->variables);
477 delete c->privatized_types;
478 c->loop_iter_var.release ();
479 XDELETE (c);
482 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 reference. */
489 void
490 gimplify_and_add (tree t, gimple_seq *seq_p)
492 gimplify_stmt (&t, seq_p);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
499 static gimple *
500 gimplify_and_return_first (tree t, gimple_seq *seq_p)
502 gimple_stmt_iterator last = gsi_last (*seq_p);
504 gimplify_and_add (t, seq_p);
506 if (!gsi_end_p (last))
508 gsi_next (&last);
509 return gsi_stmt (last);
511 else
512 return gimple_seq_first_stmt (*seq_p);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
518 static bool
519 is_gimple_mem_rhs (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return is_gimple_val (t) || is_gimple_lvalue (t);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
534 static bool
535 is_gimple_reg_rhs_or_call (tree t)
537 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t) == CALL_EXPR);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
545 static bool
546 is_gimple_mem_rhs_or_call (tree t)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t)))
551 return is_gimple_val (t);
552 else
553 return (is_gimple_val (t)
554 || is_gimple_lvalue (t)
555 || TREE_CLOBBER_P (t)
556 || TREE_CODE (t) == CALL_EXPR);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
562 static inline tree
563 create_tmp_from_val (tree val)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
567 tree var = create_tmp_var (type, get_name (val));
568 return var;
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
574 static tree
575 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
577 tree ret;
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal || !not_gimple_reg);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 ret = create_tmp_from_val (val);
590 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
592 else
594 elt_t elt, *elt_p;
595 elt_t **slot;
597 elt.val = val;
598 if (!gimplify_ctxp->temp_htab)
599 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
600 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
601 if (*slot == NULL)
603 elt_p = XNEW (elt_t);
604 elt_p->val = val;
605 elt_p->temp = ret = create_tmp_from_val (val);
606 *slot = elt_p;
608 else
610 elt_p = *slot;
611 ret = elt_p->temp;
615 return ret;
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
620 static tree
621 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
622 bool is_formal, bool allow_ssa, bool not_gimple_reg)
624 tree t, mod;
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
629 fb_rvalue);
631 if (allow_ssa
632 && gimplify_ctxp->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val)))
635 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
636 if (! gimple_in_ssa_p (cfun))
638 const char *name = get_name (val);
639 if (name)
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
643 else
644 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
646 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
648 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod, pre_p);
652 ggc_free (mod);
654 return t;
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
669 tree
670 get_formal_tmp_var (tree val, gimple_seq *pre_p)
672 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
678 tree
679 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
680 gimple_seq *post_p /* = NULL */,
681 bool allow_ssa /* = true */)
683 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
689 void
690 declare_vars (tree vars, gimple *gs, bool debug_info)
692 tree last = vars;
693 if (last)
695 tree temps, block;
697 gbind *scope = as_a <gbind *> (gs);
699 temps = nreverse (last);
701 block = gimple_bind_block (scope);
702 gcc_assert (!block || TREE_CODE (block) == BLOCK);
703 if (!block || !debug_info)
705 DECL_CHAIN (last) = gimple_bind_vars (scope);
706 gimple_bind_set_vars (scope, temps);
708 else
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block))
715 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
716 else
718 gimple_bind_set_vars (scope,
719 chainon (gimple_bind_vars (scope), temps));
720 BLOCK_VARS (block) = temps;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
730 static void
731 force_constant_size (tree var)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size;
738 gcc_assert (VAR_P (var));
740 max_size = max_int_size_in_bytes (TREE_TYPE (var));
742 gcc_assert (max_size >= 0);
744 DECL_SIZE_UNIT (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
746 DECL_SIZE (var)
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
750 /* Push the temporary variable TMP into the current binding. */
752 void
753 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
755 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
759 this case. */
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
761 force_constant_size (tmp);
763 DECL_CONTEXT (tmp) = fn->decl;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
766 record_vars_into (tmp, fn->decl);
769 /* Push the temporary variable TMP into the current binding. */
771 void
772 gimple_add_tmp_var (tree tmp)
774 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
778 this case. */
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
780 force_constant_size (tmp);
782 DECL_CONTEXT (tmp) = current_function_decl;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
785 if (gimplify_ctxp)
787 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
788 gimplify_ctxp->temps = tmp;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp)
793 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
794 int flag = GOVD_LOCAL | GOVD_SEEN;
795 while (ctx
796 && (ctx->region_type == ORT_WORKSHARE
797 || ctx->region_type == ORT_TASKGROUP
798 || ctx->region_type == ORT_SIMD
799 || ctx->region_type == ORT_ACC))
801 if (ctx->region_type == ORT_SIMD
802 && TREE_ADDRESSABLE (tmp)
803 && !TREE_STATIC (tmp))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
806 ctx->add_safelen1 = true;
807 else if (ctx->in_for_exprs)
808 flag = GOVD_PRIVATE;
809 else
810 flag = GOVD_PRIVATE | GOVD_SEEN;
811 break;
813 ctx = ctx->outer_context;
815 if (ctx)
816 omp_add_variable (ctx, tmp, flag);
819 else if (cfun)
820 record_vars (tmp);
821 else
823 gimple_seq body_seq;
825 /* This case is for nested functions. We need to expose the locals
826 they create. */
827 body_seq = gimple_body (current_function_decl);
828 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
868 way to go. */
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
879 tree t = *tp;
880 enum tree_code code = TREE_CODE (t);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
886 if (data && !((hash_set<tree> *)data)->add (t))
888 else
889 *walk_subtrees = 0;
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code) == tcc_type
894 || TREE_CODE_CLASS (code) == tcc_declaration
895 || TREE_CODE_CLASS (code) == tcc_constant)
896 *walk_subtrees = 0;
898 /* Cope with the statement expression extension. */
899 else if (code == STATEMENT_LIST)
902 /* Leave the bulk of the work to copy_tree_r itself. */
903 else
904 copy_tree_r (tp, walk_subtrees, NULL);
906 return NULL_TREE;
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
913 static tree
914 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
916 tree t = *tp;
917 enum tree_code code = TREE_CODE (t);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code) == tcc_type
924 || TREE_CODE_CLASS (code) == tcc_declaration
925 || TREE_CODE_CLASS (code) == tcc_constant)
927 if (TREE_VISITED (t))
928 *walk_subtrees = 0;
929 else
930 TREE_VISITED (t) = 1;
933 /* If this node has been visited already, unshare it and don't look
934 any deeper. */
935 else if (TREE_VISITED (t))
937 walk_tree (tp, mostly_copy_tree_r, data, NULL);
938 *walk_subtrees = 0;
941 /* Otherwise, mark the node as visited and keep looking. */
942 else
943 TREE_VISITED (t) = 1;
945 return NULL_TREE;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
951 void
952 copy_if_shared (tree *tp, void *data)
954 walk_tree (tp, copy_if_shared_r, data, NULL);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
960 static void
961 unshare_body (tree fndecl)
963 struct cgraph_node *cgn = cgraph_node::get (fndecl);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set<tree> *visited
967 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
973 delete visited;
975 if (cgn)
976 for (cgn = first_nested_function (cgn); cgn;
977 cgn = next_nested_function (cgn))
978 unshare_body (cgn->decl);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
984 static tree
985 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
987 tree t = *tp;
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t))
991 TREE_VISITED (t) = 0;
993 /* Otherwise, don't look any deeper. */
994 else
995 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Unmark the visited trees rooted at *TP. */
1002 static inline void
1003 unmark_visited (tree *tp)
1005 walk_tree (tp, unmark_visited_r, NULL, NULL);
1008 /* Likewise, but mark all trees as not visited. */
1010 static void
1011 unvisit_body (tree fndecl)
1013 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1019 if (cgn)
1020 for (cgn = first_nested_function (cgn);
1021 cgn; cgn = next_nested_function (cgn))
1022 unvisit_body (cgn->decl);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1029 tree
1030 unshare_expr (tree expr)
1032 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1033 return expr;
1036 /* Worker for unshare_expr_without_location. */
1038 static tree
1039 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1041 if (EXPR_P (*tp))
1042 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1043 else
1044 *walk_subtrees = 0;
1045 return NULL_TREE;
1048 /* Similar to unshare_expr but also prune all expression locations
1049 from EXPR. */
1051 tree
1052 unshare_expr_without_location (tree expr)
1054 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1055 if (EXPR_P (expr))
1056 walk_tree (&expr, prune_expr_location, NULL, NULL);
1057 return expr;
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1065 static location_t
1066 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1068 if (!expr)
1069 return or_else;
1071 if (EXPR_HAS_LOCATION (expr))
1072 return EXPR_LOCATION (expr);
1074 if (TREE_CODE (expr) != STATEMENT_LIST)
1075 return or_else;
1077 tree_stmt_iterator i = tsi_start (expr);
1079 bool found = false;
1080 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1082 found = true;
1083 tsi_next (&i);
1086 if (!found || !tsi_one_before_end_p (i))
1087 return or_else;
1089 return rexpr_location (tsi_stmt (i), or_else);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1095 static inline bool
1096 rexpr_has_location (tree expr)
1098 return rexpr_location (expr) != UNKNOWN_LOCATION;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1107 tree
1108 voidify_wrapper_expr (tree wrapper, tree temp)
1110 tree type = TREE_TYPE (wrapper);
1111 if (type && !VOID_TYPE_P (type))
1113 tree *p;
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p = &wrapper; p && *p; )
1119 switch (TREE_CODE (*p))
1121 case BIND_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p = &BIND_EXPR_BODY (*p);
1126 break;
1128 case CLEANUP_POINT_EXPR:
1129 case TRY_FINALLY_EXPR:
1130 case TRY_CATCH_EXPR:
1131 TREE_SIDE_EFFECTS (*p) = 1;
1132 TREE_TYPE (*p) = void_type_node;
1133 p = &TREE_OPERAND (*p, 0);
1134 break;
1136 case STATEMENT_LIST:
1138 tree_stmt_iterator i = tsi_last (*p);
1139 TREE_SIDE_EFFECTS (*p) = 1;
1140 TREE_TYPE (*p) = void_type_node;
1141 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1143 break;
1145 case COMPOUND_EXPR:
1146 /* Advance to the last statement. Set all container types to
1147 void. */
1148 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1153 break;
1155 case TRANSACTION_EXPR:
1156 TREE_SIDE_EFFECTS (*p) = 1;
1157 TREE_TYPE (*p) = void_type_node;
1158 p = &TRANSACTION_EXPR_BODY (*p);
1159 break;
1161 default:
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1164 if (p == &wrapper)
1166 TREE_SIDE_EFFECTS (*p) = 1;
1167 TREE_TYPE (*p) = void_type_node;
1168 p = &TREE_OPERAND (*p, 0);
1169 break;
1171 goto out;
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1187 else
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1193 return temp;
1196 return NULL_TREE;
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1202 static void
1203 build_stack_save_restore (gcall **save, gcall **restore)
1205 tree tmp_var;
1207 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1211 *restore
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1213 1, tmp_var);
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1218 static tree
1219 build_asan_poison_call_expr (tree decl)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size = DECL_SIZE_UNIT (decl);
1223 if (zerop (unit_size))
1224 return NULL_TREE;
1226 tree base = build_fold_addr_expr (decl);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1229 void_type_node, 3,
1230 build_int_cst (integer_type_node,
1231 ASAN_MARK_POISON),
1232 base, unit_size);
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1240 static void
1241 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1242 bool before)
1244 tree unit_size = DECL_SIZE_UNIT (decl);
1245 tree base = build_fold_addr_expr (decl);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size))
1249 return;
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1252 bytes. */
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1256 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1257 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1259 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1261 gimple *g
1262 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1263 build_int_cst (integer_type_node, flags),
1264 base, unit_size);
1266 if (before)
1267 gsi_insert_before (it, g, GSI_NEW_STMT);
1268 else
1269 gsi_insert_after (it, g, GSI_NEW_STMT);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1276 static void
1277 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1279 gimple_stmt_iterator it = gsi_last (*seq_p);
1280 bool before = false;
1282 if (gsi_end_p (it))
1283 before = true;
1285 asan_poison_variable (decl, poison, &it, before);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1290 static int
1291 sort_by_decl_uid (const void *a, const void *b)
1293 const tree *t1 = (const tree *)a;
1294 const tree *t2 = (const tree *)b;
1296 int uid1 = DECL_UID (*t1);
1297 int uid2 = DECL_UID (*t2);
1299 if (uid1 < uid2)
1300 return -1;
1301 else if (uid1 > uid2)
1302 return 1;
1303 else
1304 return 0;
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1311 static void
1312 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1314 unsigned c = variables->elements ();
1315 if (c == 0)
1316 return;
1318 auto_vec<tree> sorted_variables (c);
1320 for (hash_set<tree>::iterator it = variables->begin ();
1321 it != variables->end (); ++it)
1322 sorted_variables.safe_push (*it);
1324 sorted_variables.qsort (sort_by_decl_uid);
1326 unsigned i;
1327 tree var;
1328 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1330 asan_poison_variable (var, poison, seq_p);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1335 DECL_ATTRIBUTES (var)))
1336 DECL_ATTRIBUTES (var)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1338 integer_one_node,
1339 DECL_ATTRIBUTES (var));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1348 tree bind_expr = *expr_p;
1349 bool old_keep_stack = gimplify_ctxp->keep_stack;
1350 bool old_save_stack = gimplify_ctxp->save_stack;
1351 tree t;
1352 gbind *bind_stmt;
1353 gimple_seq body, cleanup;
1354 gcall *stack_save;
1355 location_t start_locus = 0, end_locus = 0;
1356 tree ret_clauses = NULL;
1358 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1360 /* Mark variables seen in this bind expr. */
1361 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1363 if (VAR_P (t))
1365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 /* Mark variable as local. */
1368 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1371 || splay_tree_lookup (ctx->variables,
1372 (splay_tree_key) t) == NULL)
1374 int flag = GOVD_LOCAL;
1375 if (ctx->region_type == ORT_SIMD
1376 && TREE_ADDRESSABLE (t)
1377 && !TREE_STATIC (t))
1379 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1380 ctx->add_safelen1 = true;
1381 else
1382 flag = GOVD_PRIVATE;
1384 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t))
1389 for (; ctx; ctx = ctx->outer_context)
1390 if ((ctx->region_type & ORT_TARGET) != 0)
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t)))
1395 tree id = get_identifier ("omp declare target");
1396 DECL_ATTRIBUTES (t)
1397 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1398 varpool_node *node = varpool_node::get (t);
1399 if (node)
1401 node->offloadable = 1;
1402 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1404 g->have_offload = true;
1405 if (!in_lto_p)
1406 vec_safe_push (offload_vars, t);
1410 break;
1414 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1416 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1417 cfun->has_local_explicit_reg_vars = true;
1421 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1422 BIND_EXPR_BLOCK (bind_expr));
1423 gimple_push_bind_expr (bind_stmt);
1425 gimplify_ctxp->keep_stack = false;
1426 gimplify_ctxp->save_stack = false;
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1429 body = NULL;
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1431 gimple_bind_set_body (bind_stmt, body);
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1437 assignment. */
1438 if (BIND_EXPR_BLOCK (bind_expr))
1440 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1441 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1443 if (start_locus == 0)
1444 start_locus = EXPR_LOCATION (bind_expr);
1446 cleanup = NULL;
1447 stack_save = NULL;
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1453 gcall *stack_restore;
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save, &stack_restore);
1459 gimple_set_location (stack_save, start_locus);
1460 gimple_set_location (stack_restore, end_locus);
1462 gimplify_seq_add_stmt (&cleanup, stack_restore);
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1468 if (VAR_P (t)
1469 && !is_global_var (t)
1470 && DECL_CONTEXT (t) == current_function_decl)
1472 if (!DECL_HARD_REGISTER (t)
1473 && !TREE_THIS_VOLATILE (t)
1474 && !DECL_HAS_VALUE_EXPR_P (t)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1477 top-level. */
1478 && !is_gimple_reg (t)
1479 && flag_stack_reuse != SR_NONE)
1481 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1482 gimple *clobber_stmt;
1483 clobber_stmt = gimple_build_assign (t, clobber);
1484 gimple_set_location (clobber_stmt, end_locus);
1485 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1488 if (flag_openacc && oacc_declare_returns != NULL)
1490 tree key = t;
1491 if (DECL_HAS_VALUE_EXPR_P (key))
1493 key = DECL_VALUE_EXPR (key);
1494 if (INDIRECT_REF_P (key))
1495 key = TREE_OPERAND (key, 0);
1497 tree *c = oacc_declare_returns->get (key);
1498 if (c != NULL)
1500 if (ret_clauses)
1501 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1503 ret_clauses = unshare_expr (*c);
1505 oacc_declare_returns->remove (key);
1507 if (oacc_declare_returns->is_empty ())
1509 delete oacc_declare_returns;
1510 oacc_declare_returns = NULL;
1516 if (asan_poisoned_variables != NULL
1517 && asan_poisoned_variables->contains (t))
1519 asan_poisoned_variables->remove (t);
1520 asan_poison_variable (t, true, &cleanup);
1523 if (gimplify_ctxp->live_switch_vars != NULL
1524 && gimplify_ctxp->live_switch_vars->contains (t))
1525 gimplify_ctxp->live_switch_vars->remove (t);
1528 if (ret_clauses)
1530 gomp_target *stmt;
1531 gimple_stmt_iterator si = gsi_start (cleanup);
1533 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1534 ret_clauses);
1535 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1538 if (cleanup)
1540 gtry *gs;
1541 gimple_seq new_body;
1543 new_body = NULL;
1544 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1545 GIMPLE_TRY_FINALLY);
1547 if (stack_save)
1548 gimplify_seq_add_stmt (&new_body, stack_save);
1549 gimplify_seq_add_stmt (&new_body, gs);
1550 gimple_bind_set_body (bind_stmt, new_body);
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp->keep_stack)
1555 gimplify_ctxp->keep_stack = old_keep_stack;
1556 gimplify_ctxp->save_stack = old_save_stack;
1558 gimple_pop_bind_expr ();
1560 gimplify_seq_add_stmt (pre_p, bind_stmt);
1562 if (temp)
1564 *expr_p = temp;
1565 return GS_OK;
1568 *expr_p = NULL_TREE;
1569 return GS_ALL_DONE;
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1574 static void
1575 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1580 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1581 NOT_TAKEN);
1582 gimplify_seq_add_stmt (pre_p, predict);
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1596 greturn *ret;
1597 tree ret_expr = TREE_OPERAND (stmt, 0);
1598 tree result_decl, result;
1600 if (ret_expr == error_mark_node)
1601 return GS_ERROR;
1603 if (!ret_expr
1604 || TREE_CODE (ret_expr) == RESULT_DECL)
1606 maybe_add_early_return_predict_stmt (pre_p);
1607 greturn *ret = gimple_build_return (ret_expr);
1608 copy_warning (ret, stmt);
1609 gimplify_seq_add_stmt (pre_p, ret);
1610 return GS_ALL_DONE;
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1614 result_decl = NULL_TREE;
1615 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl = DECL_RESULT (current_function_decl);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr = NULL_TREE;
1623 else
1625 result_decl = TREE_OPERAND (ret_expr, 0);
1627 /* See through a return by reference. */
1628 if (INDIRECT_REF_P (result_decl))
1629 result_decl = TREE_OPERAND (result_decl, 0);
1631 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr) == INIT_EXPR)
1633 && TREE_CODE (result_decl) == RESULT_DECL);
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1644 if (!result_decl)
1645 result = NULL_TREE;
1646 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1658 result = result_decl;
1660 else if (gimplify_ctxp->return_temp)
1661 result = gimplify_ctxp->return_temp;
1662 else
1664 result = create_tmp_reg (TREE_TYPE (result_decl));
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result, OPT_Wuninitialized);
1672 gimplify_ctxp->return_temp = result;
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result != result_decl)
1678 TREE_OPERAND (ret_expr, 0) = result;
1680 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1682 maybe_add_early_return_predict_stmt (pre_p);
1683 ret = gimple_build_return (result);
1684 copy_warning (ret, stmt);
1685 gimplify_seq_add_stmt (pre_p, ret);
1687 return GS_ALL_DONE;
1690 /* Gimplify a variable-length array DECL. */
1692 static void
1693 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t, addr, ptr_type;
1699 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl))
1704 return;
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type = build_pointer_type (TREE_TYPE (decl));
1712 addr = create_tmp_var (ptr_type, get_name (decl));
1713 DECL_IGNORED_P (addr) = 0;
1714 t = build_fold_indirect_ref (addr);
1715 TREE_THIS_NOTRAP (t) = 1;
1716 SET_DECL_VALUE_EXPR (decl, t);
1717 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1719 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1720 max_int_size_in_bytes (TREE_TYPE (decl)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1723 t = fold_convert (ptr_type, t);
1724 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1726 gimplify_and_add (t, seq_p);
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1730 record_dynamic_alloc (decl);
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1736 static tree
1737 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1739 if (TYPE_P (*tp))
1740 *walk_subtrees = 0;
1741 if (TREE_CODE (*tp) == LABEL_DECL)
1743 FORCED_LABEL (*tp) = 1;
1744 cfun->has_forced_label_in_static = 1;
1747 return NULL_TREE;
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1758 static void
1759 gimple_add_init_for_auto_var (tree decl,
1760 enum auto_init_type init_type,
1761 gimple_seq *seq_p)
1763 gcc_assert (auto_var_p (decl));
1764 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1765 location_t loc = EXPR_LOCATION (decl);
1766 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1768 tree init_type_node
1769 = build_int_cst (integer_type_node, (int) init_type);
1771 tree decl_name = NULL_TREE;
1772 if (DECL_NAME (decl))
1774 decl_name = build_string_literal (DECL_NAME (decl));
1776 else
1778 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1779 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
1780 decl_name = build_string_literal (decl_name_anonymous);
1783 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1784 TREE_TYPE (decl), 3,
1785 decl_size, init_type_node,
1786 decl_name);
1788 gimplify_assign (decl, call, seq_p);
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1803 static void
1804 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1805 gimple_seq *seq_p)
1807 tree addr_of_decl = NULL_TREE;
1808 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1810 if (is_vla)
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1815 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
1816 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1818 else
1820 mark_addressable (decl);
1821 addr_of_decl = build_fold_addr_expr (decl);
1824 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1825 build_one_cst (TREE_TYPE (addr_of_decl)));
1826 gimplify_seq_add_stmt (seq_p, call);
1829 /* Return true if the DECL need to be automaticly initialized by the
1830 compiler. */
1831 static bool
1832 is_var_need_auto_init (tree decl)
1834 if (auto_var_p (decl)
1835 && (TREE_CODE (decl) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl))
1837 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1840 && !is_empty_type (TREE_TYPE (decl)))
1841 return true;
1842 return false;
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1848 static enum gimplify_status
1849 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1851 tree stmt = *stmt_p;
1852 tree decl = DECL_EXPR_DECL (stmt);
1854 *stmt_p = NULL_TREE;
1856 if (TREE_TYPE (decl) == error_mark_node)
1857 return GS_ERROR;
1859 if ((TREE_CODE (decl) == TYPE_DECL
1860 || VAR_P (decl))
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1863 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1864 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1879 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1881 tree init = DECL_INITIAL (decl);
1882 bool is_vla = false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1890 poly_uint64 size;
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1892 || (!TREE_STATIC (decl)
1893 && flag_stack_check == GENERIC_STACK_CHECK
1894 && maybe_gt (size,
1895 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1897 gimplify_vla_decl (decl, seq_p);
1898 is_vla = true;
1901 if (asan_poisoned_variables
1902 && !is_vla
1903 && TREE_ADDRESSABLE (decl)
1904 && !TREE_STATIC (decl)
1905 && !DECL_HAS_VALUE_EXPR_P (decl)
1906 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1914 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1916 asan_poisoned_variables->add (decl);
1917 asan_poison_variable (decl, false, seq_p);
1918 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1919 gimplify_ctxp->live_switch_vars->add (decl);
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1927 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1928 gimple_add_tmp_var (decl);
1930 if (init && init != error_mark_node)
1932 if (!TREE_STATIC (decl))
1934 DECL_INITIAL (decl) = NULL_TREE;
1935 init = build2 (INIT_EXPR, void_type_node, decl, init);
1936 gimplify_and_add (init, seq_p);
1937 ggc_free (init);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl)
1940 && !omp_privatize_by_reference (decl))
1941 TREE_READONLY (decl) = 0;
1943 else
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init, force_labels_r, NULL, NULL);
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1950 variable. */
1951 else if (is_var_need_auto_init (decl)
1952 && !decl_had_value_expr_p)
1954 gimple_add_init_for_auto_var (decl,
1955 flag_auto_var_init,
1956 seq_p);
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init == AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1972 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1976 return GS_ALL_DONE;
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1983 static enum gimplify_status
1984 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1986 tree saved_label = gimplify_ctxp->exit_label;
1987 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1989 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1991 gimplify_ctxp->exit_label = NULL_TREE;
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1995 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1997 if (gimplify_ctxp->exit_label)
1998 gimplify_seq_add_stmt (pre_p,
1999 gimple_build_label (gimplify_ctxp->exit_label));
2001 gimplify_ctxp->exit_label = saved_label;
2003 *expr_p = NULL;
2004 return GS_ALL_DONE;
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2010 static enum gimplify_status
2011 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2013 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2015 tree_stmt_iterator i = tsi_start (*expr_p);
2017 while (!tsi_end_p (i))
2019 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2020 tsi_delink (&i);
2023 if (temp)
2025 *expr_p = temp;
2026 return GS_OK;
2029 return GS_ALL_DONE;
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2035 return NULL. */
2036 static gimple *
2037 emit_warn_switch_unreachable (gimple *stmt)
2039 if (gimple_code (stmt) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2044 return NULL;
2045 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2046 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2047 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2049 || (is_gimple_assign (stmt)
2050 && gimple_assign_single_p (stmt)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2054 IFN_DEFERRED_INIT))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2057 There are 3 cases:
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2064 i1 = _1. */
2065 return NULL;
2066 else
2067 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2068 "statement will never be executed");
2069 return stmt;
2072 /* Callback for walk_gimple_seq. */
2074 static tree
2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2076 bool *handled_ops_p,
2077 struct walk_stmt_info *wi)
2079 gimple *stmt = gsi_stmt (*gsi_p);
2080 bool unreachable_issued = wi->info != NULL;
2082 *handled_ops_p = true;
2083 switch (gimple_code (stmt))
2085 case GIMPLE_TRY:
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt) == NULL)
2091 if (warn_switch_unreachable && !unreachable_issued)
2092 wi->info = emit_warn_switch_unreachable (stmt);
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init)
2096 return integer_zero_node;
2098 /* Fall through. */
2099 case GIMPLE_BIND:
2100 case GIMPLE_CATCH:
2101 case GIMPLE_EH_FILTER:
2102 case GIMPLE_TRANSACTION:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p = false;
2105 break;
2107 case GIMPLE_DEBUG:
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2111 break;
2113 case GIMPLE_LABEL:
2114 /* Stop till the first Label. */
2115 return integer_zero_node;
2116 case GIMPLE_CALL:
2117 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2119 *handled_ops_p = false;
2120 break;
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name = gimple_call_arg (stmt, 2);
2128 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2129 const char *var_name_str = TREE_STRING_POINTER (var_name);
2131 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2134 var_name_str);
2135 break;
2138 /* Fall through. */
2139 default:
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable && !unreachable_issued)
2143 wi->info = emit_warn_switch_unreachable (stmt);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init)
2146 return integer_zero_node;
2147 break;
2149 return NULL_TREE;
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2158 static void
2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2161 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2162 /* This warning doesn't play well with Fortran when optimizations
2163 are on. */
2164 || lang_GNU_Fortran ()
2165 || seq == NULL)
2166 return;
2168 struct walk_stmt_info wi;
2170 memset (&wi, 0, sizeof (wi));
2171 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2175 /* A label entry that pairs label and a location. */
2176 struct label_entry
2178 tree label;
2179 location_t loc;
2182 /* Find LABEL in vector of label entries VEC. */
2184 static struct label_entry *
2185 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2187 unsigned int i;
2188 struct label_entry *l;
2190 FOR_EACH_VEC_ELT (*vec, i, l)
2191 if (l->label == label)
2192 return l;
2193 return NULL;
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2199 static bool
2200 case_label_p (const vec<tree> *cases, tree label)
2202 unsigned int i;
2203 tree l;
2205 FOR_EACH_VEC_ELT (*cases, i, l)
2206 if (CASE_LABEL (l) == label)
2207 return true;
2208 return false;
2211 /* Find the last nondebug statement in a scope STMT. */
2213 static gimple *
2214 last_stmt_in_scope (gimple *stmt)
2216 if (!stmt)
2217 return NULL;
2219 switch (gimple_code (stmt))
2221 case GIMPLE_BIND:
2223 gbind *bind = as_a <gbind *> (stmt);
2224 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2225 return last_stmt_in_scope (stmt);
2228 case GIMPLE_TRY:
2230 gtry *try_stmt = as_a <gtry *> (stmt);
2231 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2232 gimple *last_eval = last_stmt_in_scope (stmt);
2233 if (gimple_stmt_may_fallthru (last_eval)
2234 && (last_eval == NULL
2235 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2236 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2238 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2239 return last_stmt_in_scope (stmt);
2241 else
2242 return last_eval;
2245 case GIMPLE_DEBUG:
2246 gcc_unreachable ();
2248 default:
2249 return stmt;
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2258 static gimple *
2259 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2260 auto_vec <struct label_entry> *labels,
2261 location_t *prevloc)
2263 gimple *prev = NULL;
2265 *prevloc = UNKNOWN_LOCATION;
2268 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2274 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2275 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2276 if (last
2277 && gimple_code (first) == GIMPLE_SWITCH
2278 && gimple_code (last) == GIMPLE_LABEL)
2280 tree label = gimple_label_label (as_a <glabel *> (last));
2281 if (SWITCH_BREAK_LABEL_P (label))
2283 prev = bind;
2284 gsi_next (gsi_p);
2285 continue;
2289 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2295 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2296 if (last)
2298 prev = last;
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev))
2302 *prevloc = bind_loc;
2304 gsi_next (gsi_p);
2305 continue;
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2311 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2312 tree false_lab = gimple_cond_false_label (cond_stmt);
2313 location_t if_loc = gimple_location (cond_stmt);
2315 /* If we have e.g.
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab))
2319 break;
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2324 gimple *stmt = gsi_stmt (*gsi_p);
2325 if (gimple_code (stmt) == GIMPLE_LABEL
2326 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2327 break;
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p))
2332 break;
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab))
2337 struct label_entry l = { false_lab, if_loc };
2338 labels->safe_push (l);
2341 /* Go to the last statement of the then branch. */
2342 gsi_prev (gsi_p);
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2345 <D.1759>:
2346 <stmt>;
2347 goto <D.1761>;
2348 <D.1760>:
2350 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p)))
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2355 gsi_prev (gsi_p);
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2358 gsi_next (gsi_p);
2359 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2360 if (!fallthru_before_dest)
2362 struct label_entry l = { goto_dest, if_loc };
2363 labels->safe_push (l);
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2368 <D.2022>:
2369 n = n + 1; // #1
2370 <D.2023>: // #2
2371 <D.1988>: // #3
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab))
2375 prev = gsi_stmt (*gsi_p);
2377 /* And move back. */
2378 gsi_next (gsi_p);
2381 /* Remember the last statement. Skip labels that are of no interest
2382 to us. */
2383 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2385 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2386 if (find_label_entry (labels, label))
2387 prev = gsi_stmt (*gsi_p);
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2391 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2394 prev = gsi_stmt (*gsi_p);
2395 gsi_next (gsi_p);
2397 while (!gsi_end_p (*gsi_p)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p))));
2402 if (prev && gimple_has_location (prev))
2403 *prevloc = gimple_location (prev);
2404 return prev;
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2410 static bool
2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2413 gimple_stmt_iterator gsi = *gsi_p;
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label))
2417 return false;
2419 /* Don't warn for non-case labels followed by a statement:
2420 case 0:
2421 foo ();
2422 label:
2423 bar ();
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2427 tree l;
2428 while (!gsi_end_p (gsi)
2429 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2430 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2431 && !case_label_p (&gimplify_ctxp->case_labels, l))
2432 gsi_next_nondebug (&gsi);
2433 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2434 return false;
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2439 gsi = *gsi_p;
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi)
2443 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2445 gsi_next_nondebug (&gsi);
2447 /* { ... something; default:; } */
2448 if (gsi_end_p (gsi)
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2454 return false;
2456 return true;
2459 /* Callback for walk_gimple_seq. */
2461 static tree
2462 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2463 struct walk_stmt_info *)
2465 gimple *stmt = gsi_stmt (*gsi_p);
2467 *handled_ops_p = true;
2468 switch (gimple_code (stmt))
2470 case GIMPLE_TRY:
2471 case GIMPLE_BIND:
2472 case GIMPLE_CATCH:
2473 case GIMPLE_EH_FILTER:
2474 case GIMPLE_TRANSACTION:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p = false;
2477 break;
2479 /* Find a sequence of form:
2481 GIMPLE_LABEL
2482 [...]
2483 <may fallthru stmt>
2484 GIMPLE_LABEL
2486 and possibly warn. */
2487 case GIMPLE_LABEL:
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p)
2491 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2492 gsi_next_nondebug (gsi_p);
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p))
2496 return integer_zero_node;
2498 /* Vector of labels that fall through. */
2499 auto_vec <struct label_entry> labels;
2500 location_t prevloc;
2501 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p))
2505 return integer_zero_node;
2507 gimple *next = gsi_stmt (*gsi_p);
2508 tree label;
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next) == GIMPLE_LABEL
2511 && gimple_has_location (next)
2512 && (label = gimple_label_label (as_a <glabel *> (next)))
2513 && prev != NULL)
2515 struct label_entry *l;
2516 bool warned_p = false;
2517 auto_diagnostic_group d;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2519 /* Quiet. */;
2520 else if (gimple_code (prev) == GIMPLE_LABEL
2521 && (label = gimple_label_label (as_a <glabel *> (prev)))
2522 && (l = find_label_entry (&labels, label)))
2523 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev)
2529 && prevloc != UNKNOWN_LOCATION)
2530 warned_p = warning_at (prevloc,
2531 OPT_Wimplicit_fallthrough_,
2532 "this statement may fall through");
2533 if (warned_p)
2534 inform (gimple_location (next), "here");
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label) = true;
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2542 gsi_prev (gsi_p);
2545 break;
2546 default:
2547 break;
2549 return NULL_TREE;
2552 /* Warn when a switch case falls through. */
2554 static void
2555 maybe_warn_implicit_fallthrough (gimple_seq seq)
2557 if (!warn_implicit_fallthrough)
2558 return;
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2561 if (!(lang_GNU_C ()
2562 || lang_GNU_CXX ()
2563 || lang_GNU_OBJC ()))
2564 return;
2566 struct walk_stmt_info wi;
2567 memset (&wi, 0, sizeof (wi));
2568 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2571 /* Callback for walk_gimple_seq. */
2573 static tree
2574 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2575 struct walk_stmt_info *wi)
2577 gimple *stmt = gsi_stmt (*gsi_p);
2579 *handled_ops_p = true;
2580 switch (gimple_code (stmt))
2582 case GIMPLE_TRY:
2583 case GIMPLE_BIND:
2584 case GIMPLE_CATCH:
2585 case GIMPLE_EH_FILTER:
2586 case GIMPLE_TRANSACTION:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p = false;
2589 break;
2590 case GIMPLE_CALL:
2591 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2593 gsi_remove (gsi_p, true);
2594 if (gsi_end_p (*gsi_p))
2596 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2597 return integer_zero_node;
2600 bool found = false;
2601 location_t loc = gimple_location (stmt);
2603 gimple_stmt_iterator gsi2 = *gsi_p;
2604 stmt = gsi_stmt (gsi2);
2605 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2607 /* Go on until the artificial label. */
2608 tree goto_dest = gimple_goto_dest (stmt);
2609 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2611 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2613 == goto_dest)
2614 break;
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2))
2619 break;
2621 /* Look one past it. */
2622 gsi_next (&gsi2);
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2))
2628 stmt = gsi_stmt (gsi2);
2629 if (gimple_code (stmt) == GIMPLE_LABEL)
2631 tree label = gimple_label_label (as_a <glabel *> (stmt));
2632 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2634 found = true;
2635 break;
2638 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2640 else if (!is_gimple_debug (stmt))
2641 /* Anything else is not expected. */
2642 break;
2643 gsi_next (&gsi2);
2645 if (!found)
2646 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2649 break;
2650 default:
2651 break;
2653 return NULL_TREE;
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2658 static void
2659 expand_FALLTHROUGH (gimple_seq *seq_p)
2661 struct walk_stmt_info wi;
2662 location_t loc;
2663 memset (&wi, 0, sizeof (wi));
2664 wi.info = (void *) &loc;
2665 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2666 if (wi.callback_result == integer_zero_node)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2675 branch to. */
2677 static enum gimplify_status
2678 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2680 tree switch_expr = *expr_p;
2681 gimple_seq switch_body_seq = NULL;
2682 enum gimplify_status ret;
2683 tree index_type = TREE_TYPE (switch_expr);
2684 if (index_type == NULL_TREE)
2685 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2687 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2688 fb_rvalue);
2689 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2690 return ret;
2692 if (SWITCH_BODY (switch_expr))
2694 vec<tree> labels;
2695 vec<tree> saved_labels;
2696 hash_set<tree> *saved_live_switch_vars = NULL;
2697 tree default_case = NULL_TREE;
2698 gswitch *switch_stmt;
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels = gimplify_ctxp->case_labels;
2703 gimplify_ctxp->case_labels.create (8);
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2707 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2708 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2709 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2710 else
2711 gimplify_ctxp->live_switch_vars = NULL;
2713 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2714 gimplify_ctxp->in_switch_expr = true;
2716 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2718 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2720 maybe_warn_implicit_fallthrough (switch_body_seq);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp->in_switch_expr)
2723 expand_FALLTHROUGH (&switch_body_seq);
2725 labels = gimplify_ctxp->case_labels;
2726 gimplify_ctxp->case_labels = saved_labels;
2728 if (gimplify_ctxp->live_switch_vars)
2730 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2731 delete gimplify_ctxp->live_switch_vars;
2733 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2735 preprocess_case_label_vec_for_gimple (labels, index_type,
2736 &default_case);
2738 bool add_bind = false;
2739 if (!default_case)
2741 glabel *new_default;
2743 default_case
2744 = build_case_label (NULL_TREE, NULL_TREE,
2745 create_artificial_label (UNKNOWN_LOCATION));
2746 if (old_in_switch_expr)
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2749 add_bind = true;
2751 new_default = gimple_build_label (CASE_LABEL (default_case));
2752 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2754 else if (old_in_switch_expr)
2756 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2757 if (last && gimple_code (last) == GIMPLE_LABEL)
2759 tree label = gimple_label_label (as_a <glabel *> (last));
2760 if (SWITCH_BREAK_LABEL_P (label))
2761 add_bind = true;
2765 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2766 default_case, labels);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2771 statement. */
2772 if (add_bind)
2774 gimple_seq bind_body = NULL;
2775 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2776 gimple_seq_add_seq (&bind_body, switch_body_seq);
2777 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2778 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2779 gimplify_seq_add_stmt (pre_p, bind);
2781 else
2783 gimplify_seq_add_stmt (pre_p, switch_stmt);
2784 gimplify_seq_add_seq (pre_p, switch_body_seq);
2786 labels.release ();
2788 else
2789 gcc_unreachable ();
2791 return GS_ALL_DONE;
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2796 static enum gimplify_status
2797 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2800 == current_function_decl);
2802 tree label = LABEL_EXPR_LABEL (*expr_p);
2803 glabel *label_stmt = gimple_build_label (label);
2804 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2805 gimplify_seq_add_stmt (pre_p, label_stmt);
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2808 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2809 NOT_TAKEN));
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2811 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2812 TAKEN));
2814 return GS_ALL_DONE;
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2819 static enum gimplify_status
2820 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2822 struct gimplify_ctx *ctxp;
2823 glabel *label_stmt;
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2830 if (ctxp->case_labels.exists ())
2831 break;
2833 tree label = CASE_LABEL (*expr_p);
2834 label_stmt = gimple_build_label (label);
2835 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2836 ctxp->case_labels.safe_push (*expr_p);
2837 gimplify_seq_add_stmt (pre_p, label_stmt);
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2840 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2841 NOT_TAKEN));
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2843 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2844 TAKEN));
2846 return GS_ALL_DONE;
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2850 if necessary. */
2852 tree
2853 build_and_jump (tree *label_p)
2855 if (label_p == NULL)
2856 /* If there's nowhere to jump, just fall through. */
2857 return NULL_TREE;
2859 if (*label_p == NULL_TREE)
2861 tree label = create_artificial_label (UNKNOWN_LOCATION);
2862 *label_p = label;
2865 return build1 (GOTO_EXPR, void_type_node, *label_p);
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2872 static enum gimplify_status
2873 gimplify_exit_expr (tree *expr_p)
2875 tree cond = TREE_OPERAND (*expr_p, 0);
2876 tree expr;
2878 expr = build_and_jump (&gimplify_ctxp->exit_label);
2879 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2880 *expr_p = expr;
2882 return GS_OK;
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2888 type.
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2895 static void
2896 canonicalize_component_ref (tree *expr_p)
2898 tree expr = *expr_p;
2899 tree type;
2901 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2904 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2905 else
2906 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr) != type)
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type = TREE_TYPE (expr);
2915 #endif
2916 int type_quals;
2918 /* We need to preserve qualifiers and propagate them from
2919 operand 0. */
2920 type_quals = TYPE_QUALS (type)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2922 if (TYPE_QUALS (type) != type_quals)
2923 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr) = type;
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type, type));
2932 #endif
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2938 T array[U];
2939 (T *)&array
2941 &array[L]
2942 where L is the lower bound. For simplicity, only do this for constant
2943 lower bound.
2944 The constraint is that the type of &array[L] is trivially convertible
2945 to T *. */
2947 static void
2948 canonicalize_addr_expr (tree *expr_p)
2950 tree expr = *expr_p;
2951 tree addr_expr = TREE_OPERAND (expr, 0);
2952 tree datype, ddatype, pddatype;
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2956 || TREE_CODE (addr_expr) != ADDR_EXPR)
2957 return;
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2961 if (TREE_CODE (datype) != ARRAY_TYPE)
2962 return;
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype = TREE_TYPE (datype);
2967 pddatype = build_pointer_type (ddatype);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2969 pddatype))
2970 return;
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2977 return;
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2982 NULL_TREE, NULL_TREE);
2983 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2987 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2993 static enum gimplify_status
2994 gimplify_conversion (tree *expr_p)
2996 location_t loc = EXPR_LOCATION (*expr_p);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p));
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p))
3004 *expr_p = TREE_OPERAND (*expr_p, 0);
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p))
3010 tree sub = TREE_OPERAND (*expr_p, 0);
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub) == COMPONENT_REF)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub) == ADDR_EXPR)
3021 canonicalize_addr_expr (expr_p);
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3027 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3028 TREE_OPERAND (*expr_p, 0));
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3032 TREE_SET_CODE (*expr_p, NOP_EXPR);
3034 return GS_OK;
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3040 static enum gimplify_status
3041 gimplify_var_or_parm_decl (tree *expr_p)
3043 tree decl = *expr_p;
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3053 if (VAR_P (decl)
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3055 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3056 && decl_function_context (decl) == current_function_decl)
3058 gcc_assert (seen_error ());
3059 return GS_ERROR;
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3064 return GS_ALL_DONE;
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl))
3069 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3070 return GS_OK;
3073 return GS_ALL_DONE;
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3078 static void
3079 recalculate_side_effects (tree t)
3081 enum tree_code code = TREE_CODE (t);
3082 int len = TREE_OPERAND_LENGTH (t);
3083 int i;
3085 switch (TREE_CODE_CLASS (code))
3087 case tcc_expression:
3088 switch (code)
3090 case INIT_EXPR:
3091 case MODIFY_EXPR:
3092 case VA_ARG_EXPR:
3093 case PREDECREMENT_EXPR:
3094 case PREINCREMENT_EXPR:
3095 case POSTDECREMENT_EXPR:
3096 case POSTINCREMENT_EXPR:
3097 /* All of these have side-effects, no matter what their
3098 operands are. */
3099 return;
3101 default:
3102 break;
3104 /* Fall through. */
3106 case tcc_comparison: /* a comparison expression */
3107 case tcc_unary: /* a unary arithmetic expression */
3108 case tcc_binary: /* a binary arithmetic expression */
3109 case tcc_reference: /* a reference */
3110 case tcc_vl_exp: /* a function call */
3111 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3112 for (i = 0; i < len; ++i)
3114 tree op = TREE_OPERAND (t, i);
3115 if (op && TREE_SIDE_EFFECTS (op))
3116 TREE_SIDE_EFFECTS (t) = 1;
3118 break;
3120 case tcc_constant:
3121 /* No side-effects. */
3122 return;
3124 default:
3125 gcc_unreachable ();
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3130 node *EXPR_P.
3132 compound_lval
3133 : min_lval '[' val ']'
3134 | min_lval '.' ID
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3151 static enum gimplify_status
3152 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3153 fallback_t fallback)
3155 tree *p;
3156 enum gimplify_status ret = GS_ALL_DONE, tret;
3157 int i;
3158 location_t loc = EXPR_LOCATION (*expr_p);
3159 tree expr = *expr_p;
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec<tree, 10> expr_stack;
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3168 restart:
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p) == INDIRECT_REF)
3171 *p = fold_indirect_ref_loc (loc, *p);
3173 if (handled_component_p (*p))
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3178 && gimplify_var_or_parm_decl (p) == GS_OK)
3179 goto restart;
3180 else
3181 break;
3183 expr_stack.safe_push (*p);
3186 gcc_assert (expr_stack.length ());
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3205 to right. */
3207 bool need_non_reg = false;
3208 for (i = expr_stack.length () - 1; i >= 0; i--)
3210 tree t = expr_stack[i];
3212 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3214 /* Deal with the low bound and element type size and put them into
3215 the ARRAY_REF. If these values are set, they have already been
3216 gimplified. */
3217 if (TREE_OPERAND (t, 2) == NULL_TREE)
3219 tree low = unshare_expr (array_ref_low_bound (t));
3220 if (!is_gimple_min_invariant (low))
3222 TREE_OPERAND (t, 2) = low;
3226 if (TREE_OPERAND (t, 3) == NULL_TREE)
3228 tree elmt_size = array_ref_element_size (t);
3229 if (!is_gimple_min_invariant (elmt_size))
3231 elmt_size = unshare_expr (elmt_size);
3232 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3233 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3235 /* Divide the element size by the alignment of the element
3236 type (above). */
3237 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3238 elmt_size, factor);
3240 TREE_OPERAND (t, 3) = elmt_size;
3243 need_non_reg = true;
3245 else if (TREE_CODE (t) == COMPONENT_REF)
3247 /* Set the field offset into T and gimplify it. */
3248 if (TREE_OPERAND (t, 2) == NULL_TREE)
3250 tree offset = component_ref_field_offset (t);
3251 if (!is_gimple_min_invariant (offset))
3253 offset = unshare_expr (offset);
3254 tree field = TREE_OPERAND (t, 1);
3255 tree factor
3256 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3258 /* Divide the offset by its alignment. */
3259 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3260 offset, factor);
3262 TREE_OPERAND (t, 2) = offset;
3265 need_non_reg = true;
3267 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3268 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3269 is a non-register type then require the base object to be a
3270 non-register as well. */
3271 need_non_reg = true;
3274 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3275 so as to match the min_lval predicate. Failure to do so may result
3276 in the creation of large aggregate temporaries. */
3277 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3278 fallback | fb_lvalue);
3279 ret = MIN (ret, tret);
3280 if (ret == GS_ERROR)
3281 return GS_ERROR;
3283 /* Step 2a: if we have component references we do not support on
3284 registers then make sure the base isn't a register. Of course
3285 we can only do so if an rvalue is OK. */
3286 if (need_non_reg && (fallback & fb_rvalue))
3287 prepare_gimple_addressable (p, pre_p);
3290 /* Step 3: gimplify size expressions and the indices and operands of
3291 ARRAY_REF. During this loop we also remove any useless conversions.
3292 If we operate on a register also make sure to properly gimplify
3293 to individual operations. */
3295 bool reg_operations = is_gimple_reg (*p);
3296 for (; expr_stack.length () > 0; )
3298 tree t = expr_stack.pop ();
3300 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3302 gcc_assert (!reg_operations);
3304 /* Gimplify the low bound and element type size. */
3305 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3306 is_gimple_reg, fb_rvalue);
3307 ret = MIN (ret, tret);
3309 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3310 is_gimple_reg, fb_rvalue);
3311 ret = MIN (ret, tret);
3313 /* Gimplify the dimension. */
3314 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3315 is_gimple_val, fb_rvalue);
3316 ret = MIN (ret, tret);
3318 else if (TREE_CODE (t) == COMPONENT_REF)
3320 gcc_assert (!reg_operations);
3322 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3323 is_gimple_reg, fb_rvalue);
3324 ret = MIN (ret, tret);
3326 else if (reg_operations)
3328 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3329 is_gimple_val, fb_rvalue);
3330 ret = MIN (ret, tret);
3333 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3335 /* The innermost expression P may have originally had
3336 TREE_SIDE_EFFECTS set which would have caused all the outer
3337 expressions in *EXPR_P leading to P to also have had
3338 TREE_SIDE_EFFECTS set. */
3339 recalculate_side_effects (t);
3342 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3343 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3345 canonicalize_component_ref (expr_p);
3348 expr_stack.release ();
3350 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3352 return ret;
3355 /* Gimplify the self modifying expression pointed to by EXPR_P
3356 (++, --, +=, -=).
3358 PRE_P points to the list where side effects that must happen before
3359 *EXPR_P should be stored.
3361 POST_P points to the list where side effects that must happen after
3362 *EXPR_P should be stored.
3364 WANT_VALUE is nonzero iff we want to use the value of this expression
3365 in another expression.
3367 ARITH_TYPE is the type the computation should be performed in. */
3369 enum gimplify_status
3370 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3371 bool want_value, tree arith_type)
3373 enum tree_code code;
3374 tree lhs, lvalue, rhs, t1;
3375 gimple_seq post = NULL, *orig_post_p = post_p;
3376 bool postfix;
3377 enum tree_code arith_code;
3378 enum gimplify_status ret;
3379 location_t loc = EXPR_LOCATION (*expr_p);
3381 code = TREE_CODE (*expr_p);
3383 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3384 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3386 /* Prefix or postfix? */
3387 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3388 /* Faster to treat as prefix if result is not used. */
3389 postfix = want_value;
3390 else
3391 postfix = false;
3393 /* For postfix, make sure the inner expression's post side effects
3394 are executed after side effects from this expression. */
3395 if (postfix)
3396 post_p = &post;
3398 /* Add or subtract? */
3399 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3400 arith_code = PLUS_EXPR;
3401 else
3402 arith_code = MINUS_EXPR;
3404 /* Gimplify the LHS into a GIMPLE lvalue. */
3405 lvalue = TREE_OPERAND (*expr_p, 0);
3406 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3407 if (ret == GS_ERROR)
3408 return ret;
3410 /* Extract the operands to the arithmetic operation. */
3411 lhs = lvalue;
3412 rhs = TREE_OPERAND (*expr_p, 1);
3414 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3415 that as the result value and in the postqueue operation. */
3416 if (postfix)
3418 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3419 if (ret == GS_ERROR)
3420 return ret;
3422 lhs = get_initialized_tmp_var (lhs, pre_p);
3425 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3426 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3428 rhs = convert_to_ptrofftype_loc (loc, rhs);
3429 if (arith_code == MINUS_EXPR)
3430 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3431 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3433 else
3434 t1 = fold_convert (TREE_TYPE (*expr_p),
3435 fold_build2 (arith_code, arith_type,
3436 fold_convert (arith_type, lhs),
3437 fold_convert (arith_type, rhs)));
3439 if (postfix)
3441 gimplify_assign (lvalue, t1, pre_p);
3442 gimplify_seq_add_seq (orig_post_p, post);
3443 *expr_p = lhs;
3444 return GS_ALL_DONE;
3446 else
3448 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3449 return GS_OK;
3453 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3455 static void
3456 maybe_with_size_expr (tree *expr_p)
3458 tree expr = *expr_p;
3459 tree type = TREE_TYPE (expr);
3460 tree size;
3462 /* If we've already wrapped this or the type is error_mark_node, we can't do
3463 anything. */
3464 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3465 || type == error_mark_node)
3466 return;
3468 /* If the size isn't known or is a constant, we have nothing to do. */
3469 size = TYPE_SIZE_UNIT (type);
3470 if (!size || poly_int_tree_p (size))
3471 return;
3473 /* Otherwise, make a WITH_SIZE_EXPR. */
3474 size = unshare_expr (size);
3475 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3476 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3479 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3480 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3481 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3482 gimplified to an SSA name. */
3484 enum gimplify_status
3485 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3486 bool allow_ssa)
3488 bool (*test) (tree);
3489 fallback_t fb;
3491 /* In general, we allow lvalues for function arguments to avoid
3492 extra overhead of copying large aggregates out of even larger
3493 aggregates into temporaries only to copy the temporaries to
3494 the argument list. Make optimizers happy by pulling out to
3495 temporaries those types that fit in registers. */
3496 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3497 test = is_gimple_val, fb = fb_rvalue;
3498 else
3500 test = is_gimple_lvalue, fb = fb_either;
3501 /* Also strip a TARGET_EXPR that would force an extra copy. */
3502 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3504 tree init = TARGET_EXPR_INITIAL (*arg_p);
3505 if (init
3506 && !VOID_TYPE_P (TREE_TYPE (init)))
3507 *arg_p = init;
3511 /* If this is a variable sized type, we must remember the size. */
3512 maybe_with_size_expr (arg_p);
3514 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3515 /* Make sure arguments have the same location as the function call
3516 itself. */
3517 protected_set_expr_location (*arg_p, call_location);
3519 /* There is a sequence point before a function call. Side effects in
3520 the argument list must occur before the actual call. So, when
3521 gimplifying arguments, force gimplify_expr to use an internal
3522 post queue which is then appended to the end of PRE_P. */
3523 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3526 /* Don't fold inside offloading or taskreg regions: it can break code by
3527 adding decl references that weren't in the source. We'll do it during
3528 omplower pass instead. */
3530 static bool
3531 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3533 struct gimplify_omp_ctx *ctx;
3534 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3535 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3536 return false;
3537 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3538 return false;
3539 /* Delay folding of builtins until the IL is in consistent state
3540 so the diagnostic machinery can do a better job. */
3541 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3542 return false;
3543 return fold_stmt (gsi);
3546 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3547 WANT_VALUE is true if the result of the call is desired. */
3549 static enum gimplify_status
3550 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3552 tree fndecl, parms, p, fnptrtype;
3553 enum gimplify_status ret;
3554 int i, nargs;
3555 gcall *call;
3556 bool builtin_va_start_p = false;
3557 location_t loc = EXPR_LOCATION (*expr_p);
3559 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3561 /* For reliable diagnostics during inlining, it is necessary that
3562 every call_expr be annotated with file and line. */
3563 if (! EXPR_HAS_LOCATION (*expr_p))
3564 SET_EXPR_LOCATION (*expr_p, input_location);
3566 /* Gimplify internal functions created in the FEs. */
3567 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3569 if (want_value)
3570 return GS_ALL_DONE;
3572 nargs = call_expr_nargs (*expr_p);
3573 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3574 auto_vec<tree> vargs (nargs);
3576 if (ifn == IFN_ASSUME)
3578 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3580 /* If the [[assume (cond)]]; condition is simple
3581 enough and can be evaluated unconditionally
3582 without side-effects, expand it as
3583 if (!cond) __builtin_unreachable (); */
3584 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3585 *expr_p = build3 (COND_EXPR, void_type_node,
3586 CALL_EXPR_ARG (*expr_p, 0), void_node,
3587 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3588 fndecl, 0));
3589 return GS_OK;
3591 /* If not optimizing, ignore the assumptions. */
3592 if (!optimize || seen_error ())
3594 *expr_p = NULL_TREE;
3595 return GS_ALL_DONE;
3597 /* Temporarily, until gimple lowering, transform
3598 .ASSUME (cond);
3599 into:
3600 [[assume (guard)]]
3602 guard = cond;
3604 such that gimple lowering can outline the condition into
3605 a separate function easily. */
3606 tree guard = create_tmp_var (boolean_type_node);
3607 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3608 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3609 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3610 push_gimplify_context ();
3611 gimple_seq body = NULL;
3612 gimple *g = gimplify_and_return_first (*expr_p, &body);
3613 pop_gimplify_context (g);
3614 g = gimple_build_assume (guard, body);
3615 gimple_set_location (g, loc);
3616 gimplify_seq_add_stmt (pre_p, g);
3617 *expr_p = NULL_TREE;
3618 return GS_ALL_DONE;
3621 for (i = 0; i < nargs; i++)
3623 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3624 EXPR_LOCATION (*expr_p));
3625 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3628 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3629 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3630 gimplify_seq_add_stmt (pre_p, call);
3631 return GS_ALL_DONE;
3634 /* This may be a call to a builtin function.
3636 Builtin function calls may be transformed into different
3637 (and more efficient) builtin function calls under certain
3638 circumstances. Unfortunately, gimplification can muck things
3639 up enough that the builtin expanders are not aware that certain
3640 transformations are still valid.
3642 So we attempt transformation/gimplification of the call before
3643 we gimplify the CALL_EXPR. At this time we do not manage to
3644 transform all calls in the same manner as the expanders do, but
3645 we do transform most of them. */
3646 fndecl = get_callee_fndecl (*expr_p);
3647 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3648 switch (DECL_FUNCTION_CODE (fndecl))
3650 CASE_BUILT_IN_ALLOCA:
3651 /* If the call has been built for a variable-sized object, then we
3652 want to restore the stack level when the enclosing BIND_EXPR is
3653 exited to reclaim the allocated space; otherwise, we precisely
3654 need to do the opposite and preserve the latest stack level. */
3655 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3656 gimplify_ctxp->save_stack = true;
3657 else
3658 gimplify_ctxp->keep_stack = true;
3659 break;
3661 case BUILT_IN_VA_START:
3663 builtin_va_start_p = TRUE;
3664 if (call_expr_nargs (*expr_p) < 2)
3666 error ("too few arguments to function %<va_start%>");
3667 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3668 return GS_OK;
3671 if (fold_builtin_next_arg (*expr_p, true))
3673 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3674 return GS_OK;
3676 break;
3679 case BUILT_IN_EH_RETURN:
3680 cfun->calls_eh_return = true;
3681 break;
3683 case BUILT_IN_CLEAR_PADDING:
3684 if (call_expr_nargs (*expr_p) == 1)
3686 /* Remember the original type of the argument in an internal
3687 dummy second argument, as in GIMPLE pointer conversions are
3688 useless. Also mark this call as not for automatic
3689 initialization in the internal dummy third argument. */
3690 p = CALL_EXPR_ARG (*expr_p, 0);
3691 *expr_p
3692 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3693 build_zero_cst (TREE_TYPE (p)));
3694 return GS_OK;
3696 break;
3698 default:
3701 if (fndecl && fndecl_built_in_p (fndecl))
3703 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3704 if (new_tree && new_tree != *expr_p)
3706 /* There was a transformation of this call which computes the
3707 same value, but in a more efficient way. Return and try
3708 again. */
3709 *expr_p = new_tree;
3710 return GS_OK;
3714 /* Remember the original function pointer type. */
3715 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3717 if (flag_openmp
3718 && fndecl
3719 && cfun
3720 && (cfun->curr_properties & PROP_gimple_any) == 0)
3722 tree variant = omp_resolve_declare_variant (fndecl);
3723 if (variant != fndecl)
3724 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3727 /* There is a sequence point before the call, so any side effects in
3728 the calling expression must occur before the actual call. Force
3729 gimplify_expr to use an internal post queue. */
3730 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3731 is_gimple_call_addr, fb_rvalue);
3733 if (ret == GS_ERROR)
3734 return GS_ERROR;
3736 nargs = call_expr_nargs (*expr_p);
3738 /* Get argument types for verification. */
3739 fndecl = get_callee_fndecl (*expr_p);
3740 parms = NULL_TREE;
3741 if (fndecl)
3742 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3743 else
3744 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3746 if (fndecl && DECL_ARGUMENTS (fndecl))
3747 p = DECL_ARGUMENTS (fndecl);
3748 else if (parms)
3749 p = parms;
3750 else
3751 p = NULL_TREE;
3752 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3755 /* If the last argument is __builtin_va_arg_pack () and it is not
3756 passed as a named argument, decrease the number of CALL_EXPR
3757 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3758 if (!p
3759 && i < nargs
3760 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3762 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3763 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3765 if (last_arg_fndecl
3766 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3768 tree call = *expr_p;
3770 --nargs;
3771 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3772 CALL_EXPR_FN (call),
3773 nargs, CALL_EXPR_ARGP (call));
3775 /* Copy all CALL_EXPR flags, location and block, except
3776 CALL_EXPR_VA_ARG_PACK flag. */
3777 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3778 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3779 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3780 = CALL_EXPR_RETURN_SLOT_OPT (call);
3781 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3782 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3784 /* Set CALL_EXPR_VA_ARG_PACK. */
3785 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3789 /* If the call returns twice then after building the CFG the call
3790 argument computations will no longer dominate the call because
3791 we add an abnormal incoming edge to the call. So do not use SSA
3792 vars there. */
3793 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3795 /* Gimplify the function arguments. */
3796 if (nargs > 0)
3798 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3799 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3800 PUSH_ARGS_REVERSED ? i-- : i++)
3802 enum gimplify_status t;
3804 /* Avoid gimplifying the second argument to va_start, which needs to
3805 be the plain PARM_DECL. */
3806 if ((i != 1) || !builtin_va_start_p)
3808 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3809 EXPR_LOCATION (*expr_p), ! returns_twice);
3811 if (t == GS_ERROR)
3812 ret = GS_ERROR;
3817 /* Gimplify the static chain. */
3818 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3820 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3821 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3822 else
3824 enum gimplify_status t;
3825 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3826 EXPR_LOCATION (*expr_p), ! returns_twice);
3827 if (t == GS_ERROR)
3828 ret = GS_ERROR;
3832 /* Verify the function result. */
3833 if (want_value && fndecl
3834 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3836 error_at (loc, "using result of function returning %<void%>");
3837 ret = GS_ERROR;
3840 /* Try this again in case gimplification exposed something. */
3841 if (ret != GS_ERROR)
3843 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3845 if (new_tree && new_tree != *expr_p)
3847 /* There was a transformation of this call which computes the
3848 same value, but in a more efficient way. Return and try
3849 again. */
3850 *expr_p = new_tree;
3851 return GS_OK;
3854 else
3856 *expr_p = error_mark_node;
3857 return GS_ERROR;
3860 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3861 decl. This allows us to eliminate redundant or useless
3862 calls to "const" functions. */
3863 if (TREE_CODE (*expr_p) == CALL_EXPR)
3865 int flags = call_expr_flags (*expr_p);
3866 if (flags & (ECF_CONST | ECF_PURE)
3867 /* An infinite loop is considered a side effect. */
3868 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3869 TREE_SIDE_EFFECTS (*expr_p) = 0;
3872 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3873 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3874 form and delegate the creation of a GIMPLE_CALL to
3875 gimplify_modify_expr. This is always possible because when
3876 WANT_VALUE is true, the caller wants the result of this call into
3877 a temporary, which means that we will emit an INIT_EXPR in
3878 internal_get_tmp_var which will then be handled by
3879 gimplify_modify_expr. */
3880 if (!want_value)
3882 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3883 have to do is replicate it as a GIMPLE_CALL tuple. */
3884 gimple_stmt_iterator gsi;
3885 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3886 notice_special_calls (call);
3887 gimplify_seq_add_stmt (pre_p, call);
3888 gsi = gsi_last (*pre_p);
3889 maybe_fold_stmt (&gsi);
3890 *expr_p = NULL_TREE;
3892 else
3893 /* Remember the original function type. */
3894 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3895 CALL_EXPR_FN (*expr_p));
3897 return ret;
3900 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3901 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3903 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3904 condition is true or false, respectively. If null, we should generate
3905 our own to skip over the evaluation of this specific expression.
3907 LOCUS is the source location of the COND_EXPR.
3909 This function is the tree equivalent of do_jump.
3911 shortcut_cond_r should only be called by shortcut_cond_expr. */
3913 static tree
3914 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3915 location_t locus)
3917 tree local_label = NULL_TREE;
3918 tree t, expr = NULL;
3920 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3921 retain the shortcut semantics. Just insert the gotos here;
3922 shortcut_cond_expr will append the real blocks later. */
3923 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3925 location_t new_locus;
3927 /* Turn if (a && b) into
3929 if (a); else goto no;
3930 if (b) goto yes; else goto no;
3931 (no:) */
3933 if (false_label_p == NULL)
3934 false_label_p = &local_label;
3936 /* Keep the original source location on the first 'if'. */
3937 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3938 append_to_statement_list (t, &expr);
3940 /* Set the source location of the && on the second 'if'. */
3941 new_locus = rexpr_location (pred, locus);
3942 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3943 new_locus);
3944 append_to_statement_list (t, &expr);
3946 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3948 location_t new_locus;
3950 /* Turn if (a || b) into
3952 if (a) goto yes;
3953 if (b) goto yes; else goto no;
3954 (yes:) */
3956 if (true_label_p == NULL)
3957 true_label_p = &local_label;
3959 /* Keep the original source location on the first 'if'. */
3960 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3961 append_to_statement_list (t, &expr);
3963 /* Set the source location of the || on the second 'if'. */
3964 new_locus = rexpr_location (pred, locus);
3965 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3966 new_locus);
3967 append_to_statement_list (t, &expr);
3969 else if (TREE_CODE (pred) == COND_EXPR
3970 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3971 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3973 location_t new_locus;
3975 /* As long as we're messing with gotos, turn if (a ? b : c) into
3976 if (a)
3977 if (b) goto yes; else goto no;
3978 else
3979 if (c) goto yes; else goto no;
3981 Don't do this if one of the arms has void type, which can happen
3982 in C++ when the arm is throw. */
3984 /* Keep the original source location on the first 'if'. Set the source
3985 location of the ? on the second 'if'. */
3986 new_locus = rexpr_location (pred, locus);
3987 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3988 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3989 false_label_p, locus),
3990 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3991 false_label_p, new_locus));
3993 else
3995 expr = build3 (COND_EXPR, void_type_node, pred,
3996 build_and_jump (true_label_p),
3997 build_and_jump (false_label_p));
3998 SET_EXPR_LOCATION (expr, locus);
4001 if (local_label)
4003 t = build1 (LABEL_EXPR, void_type_node, local_label);
4004 append_to_statement_list (t, &expr);
4007 return expr;
4010 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4011 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4012 statement, if it is the last one. Otherwise, return NULL. */
4014 static tree
4015 find_goto (tree expr)
4017 if (!expr)
4018 return NULL_TREE;
4020 if (TREE_CODE (expr) == GOTO_EXPR)
4021 return expr;
4023 if (TREE_CODE (expr) != STATEMENT_LIST)
4024 return NULL_TREE;
4026 tree_stmt_iterator i = tsi_start (expr);
4028 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4029 tsi_next (&i);
4031 if (!tsi_one_before_end_p (i))
4032 return NULL_TREE;
4034 return find_goto (tsi_stmt (i));
4037 /* Same as find_goto, except that it returns NULL if the destination
4038 is not a LABEL_DECL. */
4040 static inline tree
4041 find_goto_label (tree expr)
4043 tree dest = find_goto (expr);
4044 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4045 return dest;
4046 return NULL_TREE;
4049 /* Given a conditional expression EXPR with short-circuit boolean
4050 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4051 predicate apart into the equivalent sequence of conditionals. */
4053 static tree
4054 shortcut_cond_expr (tree expr)
4056 tree pred = TREE_OPERAND (expr, 0);
4057 tree then_ = TREE_OPERAND (expr, 1);
4058 tree else_ = TREE_OPERAND (expr, 2);
4059 tree true_label, false_label, end_label, t;
4060 tree *true_label_p;
4061 tree *false_label_p;
4062 bool emit_end, emit_false, jump_over_else;
4063 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4064 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4066 /* First do simple transformations. */
4067 if (!else_se)
4069 /* If there is no 'else', turn
4070 if (a && b) then c
4071 into
4072 if (a) if (b) then c. */
4073 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4075 /* Keep the original source location on the first 'if'. */
4076 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4077 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4078 /* Set the source location of the && on the second 'if'. */
4079 if (rexpr_has_location (pred))
4080 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4081 then_ = shortcut_cond_expr (expr);
4082 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4083 pred = TREE_OPERAND (pred, 0);
4084 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4085 SET_EXPR_LOCATION (expr, locus);
4089 if (!then_se)
4091 /* If there is no 'then', turn
4092 if (a || b); else d
4093 into
4094 if (a); else if (b); else d. */
4095 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4097 /* Keep the original source location on the first 'if'. */
4098 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4099 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4100 /* Set the source location of the || on the second 'if'. */
4101 if (rexpr_has_location (pred))
4102 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4103 else_ = shortcut_cond_expr (expr);
4104 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4105 pred = TREE_OPERAND (pred, 0);
4106 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4107 SET_EXPR_LOCATION (expr, locus);
4111 /* If we're done, great. */
4112 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4113 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4114 return expr;
4116 /* Otherwise we need to mess with gotos. Change
4117 if (a) c; else d;
4119 if (a); else goto no;
4120 c; goto end;
4121 no: d; end:
4122 and recursively gimplify the condition. */
4124 true_label = false_label = end_label = NULL_TREE;
4126 /* If our arms just jump somewhere, hijack those labels so we don't
4127 generate jumps to jumps. */
4129 if (tree then_goto = find_goto_label (then_))
4131 true_label = GOTO_DESTINATION (then_goto);
4132 then_ = NULL;
4133 then_se = false;
4136 if (tree else_goto = find_goto_label (else_))
4138 false_label = GOTO_DESTINATION (else_goto);
4139 else_ = NULL;
4140 else_se = false;
4143 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4144 if (true_label)
4145 true_label_p = &true_label;
4146 else
4147 true_label_p = NULL;
4149 /* The 'else' branch also needs a label if it contains interesting code. */
4150 if (false_label || else_se)
4151 false_label_p = &false_label;
4152 else
4153 false_label_p = NULL;
4155 /* If there was nothing else in our arms, just forward the label(s). */
4156 if (!then_se && !else_se)
4157 return shortcut_cond_r (pred, true_label_p, false_label_p,
4158 EXPR_LOC_OR_LOC (expr, input_location));
4160 /* If our last subexpression already has a terminal label, reuse it. */
4161 if (else_se)
4162 t = expr_last (else_);
4163 else if (then_se)
4164 t = expr_last (then_);
4165 else
4166 t = NULL;
4167 if (t && TREE_CODE (t) == LABEL_EXPR)
4168 end_label = LABEL_EXPR_LABEL (t);
4170 /* If we don't care about jumping to the 'else' branch, jump to the end
4171 if the condition is false. */
4172 if (!false_label_p)
4173 false_label_p = &end_label;
4175 /* We only want to emit these labels if we aren't hijacking them. */
4176 emit_end = (end_label == NULL_TREE);
4177 emit_false = (false_label == NULL_TREE);
4179 /* We only emit the jump over the else clause if we have to--if the
4180 then clause may fall through. Otherwise we can wind up with a
4181 useless jump and a useless label at the end of gimplified code,
4182 which will cause us to think that this conditional as a whole
4183 falls through even if it doesn't. If we then inline a function
4184 which ends with such a condition, that can cause us to issue an
4185 inappropriate warning about control reaching the end of a
4186 non-void function. */
4187 jump_over_else = block_may_fallthru (then_);
4189 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4190 EXPR_LOC_OR_LOC (expr, input_location));
4192 expr = NULL;
4193 append_to_statement_list (pred, &expr);
4195 append_to_statement_list (then_, &expr);
4196 if (else_se)
4198 if (jump_over_else)
4200 tree last = expr_last (expr);
4201 t = build_and_jump (&end_label);
4202 if (rexpr_has_location (last))
4203 SET_EXPR_LOCATION (t, rexpr_location (last));
4204 append_to_statement_list (t, &expr);
4206 if (emit_false)
4208 t = build1 (LABEL_EXPR, void_type_node, false_label);
4209 append_to_statement_list (t, &expr);
4211 append_to_statement_list (else_, &expr);
4213 if (emit_end && end_label)
4215 t = build1 (LABEL_EXPR, void_type_node, end_label);
4216 append_to_statement_list (t, &expr);
4219 return expr;
4222 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4224 tree
4225 gimple_boolify (tree expr)
4227 tree type = TREE_TYPE (expr);
4228 location_t loc = EXPR_LOCATION (expr);
4230 if (TREE_CODE (expr) == NE_EXPR
4231 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4232 && integer_zerop (TREE_OPERAND (expr, 1)))
4234 tree call = TREE_OPERAND (expr, 0);
4235 tree fn = get_callee_fndecl (call);
4237 /* For __builtin_expect ((long) (x), y) recurse into x as well
4238 if x is truth_value_p. */
4239 if (fn
4240 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4241 && call_expr_nargs (call) == 2)
4243 tree arg = CALL_EXPR_ARG (call, 0);
4244 if (arg)
4246 if (TREE_CODE (arg) == NOP_EXPR
4247 && TREE_TYPE (arg) == TREE_TYPE (call))
4248 arg = TREE_OPERAND (arg, 0);
4249 if (truth_value_p (TREE_CODE (arg)))
4251 arg = gimple_boolify (arg);
4252 CALL_EXPR_ARG (call, 0)
4253 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4259 switch (TREE_CODE (expr))
4261 case TRUTH_AND_EXPR:
4262 case TRUTH_OR_EXPR:
4263 case TRUTH_XOR_EXPR:
4264 case TRUTH_ANDIF_EXPR:
4265 case TRUTH_ORIF_EXPR:
4266 /* Also boolify the arguments of truth exprs. */
4267 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4268 /* FALLTHRU */
4270 case TRUTH_NOT_EXPR:
4271 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4273 /* These expressions always produce boolean results. */
4274 if (TREE_CODE (type) != BOOLEAN_TYPE)
4275 TREE_TYPE (expr) = boolean_type_node;
4276 return expr;
4278 case ANNOTATE_EXPR:
4279 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4281 case annot_expr_ivdep_kind:
4282 case annot_expr_unroll_kind:
4283 case annot_expr_no_vector_kind:
4284 case annot_expr_vector_kind:
4285 case annot_expr_parallel_kind:
4286 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4287 if (TREE_CODE (type) != BOOLEAN_TYPE)
4288 TREE_TYPE (expr) = boolean_type_node;
4289 return expr;
4290 default:
4291 gcc_unreachable ();
4294 default:
4295 if (COMPARISON_CLASS_P (expr))
4297 /* These expressions always produce boolean results. */
4298 if (TREE_CODE (type) != BOOLEAN_TYPE)
4299 TREE_TYPE (expr) = boolean_type_node;
4300 return expr;
4302 /* Other expressions that get here must have boolean values, but
4303 might need to be converted to the appropriate mode. */
4304 if (TREE_CODE (type) == BOOLEAN_TYPE)
4305 return expr;
4306 return fold_convert_loc (loc, boolean_type_node, expr);
4310 /* Given a conditional expression *EXPR_P without side effects, gimplify
4311 its operands. New statements are inserted to PRE_P. */
4313 static enum gimplify_status
4314 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4316 tree expr = *expr_p, cond;
4317 enum gimplify_status ret, tret;
4318 enum tree_code code;
4320 cond = gimple_boolify (COND_EXPR_COND (expr));
4322 /* We need to handle && and || specially, as their gimplification
4323 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4324 code = TREE_CODE (cond);
4325 if (code == TRUTH_ANDIF_EXPR)
4326 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4327 else if (code == TRUTH_ORIF_EXPR)
4328 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4329 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4330 COND_EXPR_COND (*expr_p) = cond;
4332 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4333 is_gimple_val, fb_rvalue);
4334 ret = MIN (ret, tret);
4335 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4336 is_gimple_val, fb_rvalue);
4338 return MIN (ret, tret);
4341 /* Return true if evaluating EXPR could trap.
4342 EXPR is GENERIC, while tree_could_trap_p can be called
4343 only on GIMPLE. */
4345 bool
4346 generic_expr_could_trap_p (tree expr)
4348 unsigned i, n;
4350 if (!expr || is_gimple_val (expr))
4351 return false;
4353 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4354 return true;
4356 n = TREE_OPERAND_LENGTH (expr);
4357 for (i = 0; i < n; i++)
4358 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4359 return true;
4361 return false;
4364 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4365 into
4367 if (p) if (p)
4368 t1 = a; a;
4369 else or else
4370 t1 = b; b;
4373 The second form is used when *EXPR_P is of type void.
4375 PRE_P points to the list where side effects that must happen before
4376 *EXPR_P should be stored. */
4378 static enum gimplify_status
4379 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4381 tree expr = *expr_p;
4382 tree type = TREE_TYPE (expr);
4383 location_t loc = EXPR_LOCATION (expr);
4384 tree tmp, arm1, arm2;
4385 enum gimplify_status ret;
4386 tree label_true, label_false, label_cont;
4387 bool have_then_clause_p, have_else_clause_p;
4388 gcond *cond_stmt;
4389 enum tree_code pred_code;
4390 gimple_seq seq = NULL;
4392 /* If this COND_EXPR has a value, copy the values into a temporary within
4393 the arms. */
4394 if (!VOID_TYPE_P (type))
4396 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4397 tree result;
4399 /* If either an rvalue is ok or we do not require an lvalue, create the
4400 temporary. But we cannot do that if the type is addressable. */
4401 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4402 && !TREE_ADDRESSABLE (type))
4404 if (gimplify_ctxp->allow_rhs_cond_expr
4405 /* If either branch has side effects or could trap, it can't be
4406 evaluated unconditionally. */
4407 && !TREE_SIDE_EFFECTS (then_)
4408 && !generic_expr_could_trap_p (then_)
4409 && !TREE_SIDE_EFFECTS (else_)
4410 && !generic_expr_could_trap_p (else_))
4411 return gimplify_pure_cond_expr (expr_p, pre_p);
4413 tmp = create_tmp_var (type, "iftmp");
4414 result = tmp;
4417 /* Otherwise, only create and copy references to the values. */
4418 else
4420 type = build_pointer_type (type);
4422 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4423 then_ = build_fold_addr_expr_loc (loc, then_);
4425 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4426 else_ = build_fold_addr_expr_loc (loc, else_);
4428 expr
4429 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4431 tmp = create_tmp_var (type, "iftmp");
4432 result = build_simple_mem_ref_loc (loc, tmp);
4435 /* Build the new then clause, `tmp = then_;'. But don't build the
4436 assignment if the value is void; in C++ it can be if it's a throw. */
4437 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4438 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4440 /* Similarly, build the new else clause, `tmp = else_;'. */
4441 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4442 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4444 TREE_TYPE (expr) = void_type_node;
4445 recalculate_side_effects (expr);
4447 /* Move the COND_EXPR to the prequeue. */
4448 gimplify_stmt (&expr, pre_p);
4450 *expr_p = result;
4451 return GS_ALL_DONE;
4454 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4455 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4456 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4457 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4459 /* Make sure the condition has BOOLEAN_TYPE. */
4460 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4462 /* Break apart && and || conditions. */
4463 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4464 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4466 expr = shortcut_cond_expr (expr);
4468 if (expr != *expr_p)
4470 *expr_p = expr;
4472 /* We can't rely on gimplify_expr to re-gimplify the expanded
4473 form properly, as cleanups might cause the target labels to be
4474 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4475 set up a conditional context. */
4476 gimple_push_condition ();
4477 gimplify_stmt (expr_p, &seq);
4478 gimple_pop_condition (pre_p);
4479 gimple_seq_add_seq (pre_p, seq);
4481 return GS_ALL_DONE;
4485 /* Now do the normal gimplification. */
4487 /* Gimplify condition. */
4488 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4489 is_gimple_condexpr_for_cond, fb_rvalue);
4490 if (ret == GS_ERROR)
4491 return GS_ERROR;
4492 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4494 gimple_push_condition ();
4496 have_then_clause_p = have_else_clause_p = false;
4497 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4498 if (label_true
4499 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4500 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4501 have different locations, otherwise we end up with incorrect
4502 location information on the branches. */
4503 && (optimize
4504 || !EXPR_HAS_LOCATION (expr)
4505 || !rexpr_has_location (label_true)
4506 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4508 have_then_clause_p = true;
4509 label_true = GOTO_DESTINATION (label_true);
4511 else
4512 label_true = create_artificial_label (UNKNOWN_LOCATION);
4513 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4514 if (label_false
4515 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4516 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4517 have different locations, otherwise we end up with incorrect
4518 location information on the branches. */
4519 && (optimize
4520 || !EXPR_HAS_LOCATION (expr)
4521 || !rexpr_has_location (label_false)
4522 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4524 have_else_clause_p = true;
4525 label_false = GOTO_DESTINATION (label_false);
4527 else
4528 label_false = create_artificial_label (UNKNOWN_LOCATION);
4530 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4531 &arm2);
4532 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4533 label_false);
4534 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4535 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4536 gimplify_seq_add_stmt (&seq, cond_stmt);
4537 gimple_stmt_iterator gsi = gsi_last (seq);
4538 maybe_fold_stmt (&gsi);
4540 label_cont = NULL_TREE;
4541 if (!have_then_clause_p)
4543 /* For if (...) {} else { code; } put label_true after
4544 the else block. */
4545 if (TREE_OPERAND (expr, 1) == NULL_TREE
4546 && !have_else_clause_p
4547 && TREE_OPERAND (expr, 2) != NULL_TREE)
4549 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4550 handling that label_cont == label_true can be only reached
4551 through fallthrough from { code; }. */
4552 if (integer_zerop (COND_EXPR_COND (expr)))
4553 UNUSED_LABEL_P (label_true) = 1;
4554 label_cont = label_true;
4556 else
4558 bool then_side_effects
4559 = (TREE_OPERAND (expr, 1)
4560 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4561 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4562 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4563 /* For if (...) { code; } else {} or
4564 if (...) { code; } else goto label; or
4565 if (...) { code; return; } else { ... }
4566 label_cont isn't needed. */
4567 if (!have_else_clause_p
4568 && TREE_OPERAND (expr, 2) != NULL_TREE
4569 && gimple_seq_may_fallthru (seq))
4571 gimple *g;
4572 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4574 /* For if (0) { non-side-effect-code } else { code }
4575 tell -Wimplicit-fallthrough handling that label_cont can
4576 be only reached through fallthrough from { code }. */
4577 if (integer_zerop (COND_EXPR_COND (expr)))
4579 UNUSED_LABEL_P (label_true) = 1;
4580 if (!then_side_effects)
4581 UNUSED_LABEL_P (label_cont) = 1;
4584 g = gimple_build_goto (label_cont);
4586 /* GIMPLE_COND's are very low level; they have embedded
4587 gotos. This particular embedded goto should not be marked
4588 with the location of the original COND_EXPR, as it would
4589 correspond to the COND_EXPR's condition, not the ELSE or the
4590 THEN arms. To avoid marking it with the wrong location, flag
4591 it as "no location". */
4592 gimple_set_do_not_emit_location (g);
4594 gimplify_seq_add_stmt (&seq, g);
4598 if (!have_else_clause_p)
4600 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4601 tell -Wimplicit-fallthrough handling that label_false can be only
4602 reached through fallthrough from { code }. */
4603 if (integer_nonzerop (COND_EXPR_COND (expr))
4604 && (TREE_OPERAND (expr, 2) == NULL_TREE
4605 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4606 UNUSED_LABEL_P (label_false) = 1;
4607 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4608 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4610 if (label_cont)
4611 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4613 gimple_pop_condition (pre_p);
4614 gimple_seq_add_seq (pre_p, seq);
4616 if (ret == GS_ERROR)
4617 ; /* Do nothing. */
4618 else if (have_then_clause_p || have_else_clause_p)
4619 ret = GS_ALL_DONE;
4620 else
4622 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4623 expr = TREE_OPERAND (expr, 0);
4624 gimplify_stmt (&expr, pre_p);
4627 *expr_p = NULL;
4628 return ret;
4631 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4632 to be marked addressable.
4634 We cannot rely on such an expression being directly markable if a temporary
4635 has been created by the gimplification. In this case, we create another
4636 temporary and initialize it with a copy, which will become a store after we
4637 mark it addressable. This can happen if the front-end passed us something
4638 that it could not mark addressable yet, like a Fortran pass-by-reference
4639 parameter (int) floatvar. */
4641 static void
4642 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4644 while (handled_component_p (*expr_p))
4645 expr_p = &TREE_OPERAND (*expr_p, 0);
4647 /* Do not allow an SSA name as the temporary. */
4648 if (is_gimple_reg (*expr_p))
4649 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4652 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4653 a call to __builtin_memcpy. */
4655 static enum gimplify_status
4656 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4657 gimple_seq *seq_p)
4659 tree t, to, to_ptr, from, from_ptr;
4660 gcall *gs;
4661 location_t loc = EXPR_LOCATION (*expr_p);
4663 to = TREE_OPERAND (*expr_p, 0);
4664 from = TREE_OPERAND (*expr_p, 1);
4666 /* Mark the RHS addressable. Beware that it may not be possible to do so
4667 directly if a temporary has been created by the gimplification. */
4668 prepare_gimple_addressable (&from, seq_p);
4670 mark_addressable (from);
4671 from_ptr = build_fold_addr_expr_loc (loc, from);
4672 gimplify_arg (&from_ptr, seq_p, loc);
4674 mark_addressable (to);
4675 to_ptr = build_fold_addr_expr_loc (loc, to);
4676 gimplify_arg (&to_ptr, seq_p, loc);
4678 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4680 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4681 gimple_call_set_alloca_for_var (gs, true);
4683 if (want_value)
4685 /* tmp = memcpy() */
4686 t = create_tmp_var (TREE_TYPE (to_ptr));
4687 gimple_call_set_lhs (gs, t);
4688 gimplify_seq_add_stmt (seq_p, gs);
4690 *expr_p = build_simple_mem_ref (t);
4691 return GS_ALL_DONE;
4694 gimplify_seq_add_stmt (seq_p, gs);
4695 *expr_p = NULL;
4696 return GS_ALL_DONE;
4699 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4700 a call to __builtin_memset. In this case we know that the RHS is
4701 a CONSTRUCTOR with an empty element list. */
4703 static enum gimplify_status
4704 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4705 gimple_seq *seq_p)
4707 tree t, from, to, to_ptr;
4708 gcall *gs;
4709 location_t loc = EXPR_LOCATION (*expr_p);
4711 /* Assert our assumptions, to abort instead of producing wrong code
4712 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4713 not be immediately exposed. */
4714 from = TREE_OPERAND (*expr_p, 1);
4715 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4716 from = TREE_OPERAND (from, 0);
4718 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4719 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4721 /* Now proceed. */
4722 to = TREE_OPERAND (*expr_p, 0);
4724 to_ptr = build_fold_addr_expr_loc (loc, to);
4725 gimplify_arg (&to_ptr, seq_p, loc);
4726 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4728 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4730 if (want_value)
4732 /* tmp = memset() */
4733 t = create_tmp_var (TREE_TYPE (to_ptr));
4734 gimple_call_set_lhs (gs, t);
4735 gimplify_seq_add_stmt (seq_p, gs);
4737 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4738 return GS_ALL_DONE;
4741 gimplify_seq_add_stmt (seq_p, gs);
4742 *expr_p = NULL;
4743 return GS_ALL_DONE;
4746 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4747 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4748 assignment. Return non-null if we detect a potential overlap. */
4750 struct gimplify_init_ctor_preeval_data
4752 /* The base decl of the lhs object. May be NULL, in which case we
4753 have to assume the lhs is indirect. */
4754 tree lhs_base_decl;
4756 /* The alias set of the lhs object. */
4757 alias_set_type lhs_alias_set;
4760 static tree
4761 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4763 struct gimplify_init_ctor_preeval_data *data
4764 = (struct gimplify_init_ctor_preeval_data *) xdata;
4765 tree t = *tp;
4767 /* If we find the base object, obviously we have overlap. */
4768 if (data->lhs_base_decl == t)
4769 return t;
4771 /* If the constructor component is indirect, determine if we have a
4772 potential overlap with the lhs. The only bits of information we
4773 have to go on at this point are addressability and alias sets. */
4774 if ((INDIRECT_REF_P (t)
4775 || TREE_CODE (t) == MEM_REF)
4776 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4777 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4778 return t;
4780 /* If the constructor component is a call, determine if it can hide a
4781 potential overlap with the lhs through an INDIRECT_REF like above.
4782 ??? Ugh - this is completely broken. In fact this whole analysis
4783 doesn't look conservative. */
4784 if (TREE_CODE (t) == CALL_EXPR)
4786 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4788 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4789 if (POINTER_TYPE_P (TREE_VALUE (type))
4790 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4791 && alias_sets_conflict_p (data->lhs_alias_set,
4792 get_alias_set
4793 (TREE_TYPE (TREE_VALUE (type)))))
4794 return t;
4797 if (IS_TYPE_OR_DECL_P (t))
4798 *walk_subtrees = 0;
4799 return NULL;
4802 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4803 force values that overlap with the lhs (as described by *DATA)
4804 into temporaries. */
4806 static void
4807 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4808 struct gimplify_init_ctor_preeval_data *data)
4810 enum gimplify_status one;
4812 /* If the value is constant, then there's nothing to pre-evaluate. */
4813 if (TREE_CONSTANT (*expr_p))
4815 /* Ensure it does not have side effects, it might contain a reference to
4816 the object we're initializing. */
4817 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4818 return;
4821 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4822 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4823 return;
4825 /* Recurse for nested constructors. */
4826 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4828 unsigned HOST_WIDE_INT ix;
4829 constructor_elt *ce;
4830 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4832 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4833 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4835 return;
4838 /* If this is a variable sized type, we must remember the size. */
4839 maybe_with_size_expr (expr_p);
4841 /* Gimplify the constructor element to something appropriate for the rhs
4842 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4843 the gimplifier will consider this a store to memory. Doing this
4844 gimplification now means that we won't have to deal with complicated
4845 language-specific trees, nor trees like SAVE_EXPR that can induce
4846 exponential search behavior. */
4847 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4848 if (one == GS_ERROR)
4850 *expr_p = NULL;
4851 return;
4854 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4855 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4856 always be true for all scalars, since is_gimple_mem_rhs insists on a
4857 temporary variable for them. */
4858 if (DECL_P (*expr_p))
4859 return;
4861 /* If this is of variable size, we have no choice but to assume it doesn't
4862 overlap since we can't make a temporary for it. */
4863 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4864 return;
4866 /* Otherwise, we must search for overlap ... */
4867 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4868 return;
4870 /* ... and if found, force the value into a temporary. */
4871 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4874 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4875 a RANGE_EXPR in a CONSTRUCTOR for an array.
4877 var = lower;
4878 loop_entry:
4879 object[var] = value;
4880 if (var == upper)
4881 goto loop_exit;
4882 var = var + 1;
4883 goto loop_entry;
4884 loop_exit:
4886 We increment var _after_ the loop exit check because we might otherwise
4887 fail if upper == TYPE_MAX_VALUE (type for upper).
4889 Note that we never have to deal with SAVE_EXPRs here, because this has
4890 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4892 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4893 gimple_seq *, bool);
4895 static void
4896 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4897 tree value, tree array_elt_type,
4898 gimple_seq *pre_p, bool cleared)
4900 tree loop_entry_label, loop_exit_label, fall_thru_label;
4901 tree var, var_type, cref, tmp;
4903 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4904 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4905 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4907 /* Create and initialize the index variable. */
4908 var_type = TREE_TYPE (upper);
4909 var = create_tmp_var (var_type);
4910 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4912 /* Add the loop entry label. */
4913 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4915 /* Build the reference. */
4916 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4917 var, NULL_TREE, NULL_TREE);
4919 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4920 the store. Otherwise just assign value to the reference. */
4922 if (TREE_CODE (value) == CONSTRUCTOR)
4923 /* NB we might have to call ourself recursively through
4924 gimplify_init_ctor_eval if the value is a constructor. */
4925 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4926 pre_p, cleared);
4927 else
4929 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4930 != GS_ERROR)
4931 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4934 /* We exit the loop when the index var is equal to the upper bound. */
4935 gimplify_seq_add_stmt (pre_p,
4936 gimple_build_cond (EQ_EXPR, var, upper,
4937 loop_exit_label, fall_thru_label));
4939 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4941 /* Otherwise, increment the index var... */
4942 tmp = build2 (PLUS_EXPR, var_type, var,
4943 fold_convert (var_type, integer_one_node));
4944 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4946 /* ...and jump back to the loop entry. */
4947 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4949 /* Add the loop exit label. */
4950 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4953 /* A subroutine of gimplify_init_constructor. Generate individual
4954 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4955 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4956 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4957 zeroed first. */
4959 static void
4960 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4961 gimple_seq *pre_p, bool cleared)
4963 tree array_elt_type = NULL;
4964 unsigned HOST_WIDE_INT ix;
4965 tree purpose, value;
4967 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4968 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4970 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4972 tree cref;
4974 /* NULL values are created above for gimplification errors. */
4975 if (value == NULL)
4976 continue;
4978 if (cleared && initializer_zerop (value))
4979 continue;
4981 /* ??? Here's to hoping the front end fills in all of the indices,
4982 so we don't have to figure out what's missing ourselves. */
4983 gcc_assert (purpose);
4985 /* Skip zero-sized fields, unless value has side-effects. This can
4986 happen with calls to functions returning a empty type, which
4987 we shouldn't discard. As a number of downstream passes don't
4988 expect sets of empty type fields, we rely on the gimplification of
4989 the MODIFY_EXPR we make below to drop the assignment statement. */
4990 if (!TREE_SIDE_EFFECTS (value)
4991 && TREE_CODE (purpose) == FIELD_DECL
4992 && is_empty_type (TREE_TYPE (purpose)))
4993 continue;
4995 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4996 whole range. */
4997 if (TREE_CODE (purpose) == RANGE_EXPR)
4999 tree lower = TREE_OPERAND (purpose, 0);
5000 tree upper = TREE_OPERAND (purpose, 1);
5002 /* If the lower bound is equal to upper, just treat it as if
5003 upper was the index. */
5004 if (simple_cst_equal (lower, upper))
5005 purpose = upper;
5006 else
5008 gimplify_init_ctor_eval_range (object, lower, upper, value,
5009 array_elt_type, pre_p, cleared);
5010 continue;
5014 if (array_elt_type)
5016 /* Do not use bitsizetype for ARRAY_REF indices. */
5017 if (TYPE_DOMAIN (TREE_TYPE (object)))
5018 purpose
5019 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5020 purpose);
5021 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5022 purpose, NULL_TREE, NULL_TREE);
5024 else
5026 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5027 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5028 unshare_expr (object), purpose, NULL_TREE);
5031 if (TREE_CODE (value) == CONSTRUCTOR
5032 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5033 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5034 pre_p, cleared);
5035 else
5037 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5038 gimplify_and_add (init, pre_p);
5039 ggc_free (init);
5044 /* Return the appropriate RHS predicate for this LHS. */
5046 gimple_predicate
5047 rhs_predicate_for (tree lhs)
5049 if (is_gimple_reg (lhs))
5050 return is_gimple_reg_rhs_or_call;
5051 else
5052 return is_gimple_mem_rhs_or_call;
5055 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5056 before the LHS has been gimplified. */
5058 static gimple_predicate
5059 initial_rhs_predicate_for (tree lhs)
5061 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5062 return is_gimple_reg_rhs_or_call;
5063 else
5064 return is_gimple_mem_rhs_or_call;
5067 /* Gimplify a C99 compound literal expression. This just means adding
5068 the DECL_EXPR before the current statement and using its anonymous
5069 decl instead. */
5071 static enum gimplify_status
5072 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5073 bool (*gimple_test_f) (tree),
5074 fallback_t fallback)
5076 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5077 tree decl = DECL_EXPR_DECL (decl_s);
5078 tree init = DECL_INITIAL (decl);
5079 /* Mark the decl as addressable if the compound literal
5080 expression is addressable now, otherwise it is marked too late
5081 after we gimplify the initialization expression. */
5082 if (TREE_ADDRESSABLE (*expr_p))
5083 TREE_ADDRESSABLE (decl) = 1;
5084 /* Otherwise, if we don't need an lvalue and have a literal directly
5085 substitute it. Check if it matches the gimple predicate, as
5086 otherwise we'd generate a new temporary, and we can as well just
5087 use the decl we already have. */
5088 else if (!TREE_ADDRESSABLE (decl)
5089 && !TREE_THIS_VOLATILE (decl)
5090 && init
5091 && (fallback & fb_lvalue) == 0
5092 && gimple_test_f (init))
5094 *expr_p = init;
5095 return GS_OK;
5098 /* If the decl is not addressable, then it is being used in some
5099 expression or on the right hand side of a statement, and it can
5100 be put into a readonly data section. */
5101 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5102 TREE_READONLY (decl) = 1;
5104 /* This decl isn't mentioned in the enclosing block, so add it to the
5105 list of temps. FIXME it seems a bit of a kludge to say that
5106 anonymous artificial vars aren't pushed, but everything else is. */
5107 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5108 gimple_add_tmp_var (decl);
5110 gimplify_and_add (decl_s, pre_p);
5111 *expr_p = decl;
5112 return GS_OK;
5115 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5116 return a new CONSTRUCTOR if something changed. */
5118 static tree
5119 optimize_compound_literals_in_ctor (tree orig_ctor)
5121 tree ctor = orig_ctor;
5122 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5123 unsigned int idx, num = vec_safe_length (elts);
5125 for (idx = 0; idx < num; idx++)
5127 tree value = (*elts)[idx].value;
5128 tree newval = value;
5129 if (TREE_CODE (value) == CONSTRUCTOR)
5130 newval = optimize_compound_literals_in_ctor (value);
5131 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5133 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5134 tree decl = DECL_EXPR_DECL (decl_s);
5135 tree init = DECL_INITIAL (decl);
5137 if (!TREE_ADDRESSABLE (value)
5138 && !TREE_ADDRESSABLE (decl)
5139 && init
5140 && TREE_CODE (init) == CONSTRUCTOR)
5141 newval = optimize_compound_literals_in_ctor (init);
5143 if (newval == value)
5144 continue;
5146 if (ctor == orig_ctor)
5148 ctor = copy_node (orig_ctor);
5149 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5150 elts = CONSTRUCTOR_ELTS (ctor);
5152 (*elts)[idx].value = newval;
5154 return ctor;
5157 /* A subroutine of gimplify_modify_expr. Break out elements of a
5158 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5160 Note that we still need to clear any elements that don't have explicit
5161 initializers, so if not all elements are initialized we keep the
5162 original MODIFY_EXPR, we just remove all of the constructor elements.
5164 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5165 GS_ERROR if we would have to create a temporary when gimplifying
5166 this constructor. Otherwise, return GS_OK.
5168 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5170 static enum gimplify_status
5171 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5172 bool want_value, bool notify_temp_creation)
5174 tree object, ctor, type;
5175 enum gimplify_status ret;
5176 vec<constructor_elt, va_gc> *elts;
5177 bool cleared = false;
5178 bool is_empty_ctor = false;
5179 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5181 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5183 if (!notify_temp_creation)
5185 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5186 is_gimple_lvalue, fb_lvalue);
5187 if (ret == GS_ERROR)
5188 return ret;
5191 object = TREE_OPERAND (*expr_p, 0);
5192 ctor = TREE_OPERAND (*expr_p, 1)
5193 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5194 type = TREE_TYPE (ctor);
5195 elts = CONSTRUCTOR_ELTS (ctor);
5196 ret = GS_ALL_DONE;
5198 switch (TREE_CODE (type))
5200 case RECORD_TYPE:
5201 case UNION_TYPE:
5202 case QUAL_UNION_TYPE:
5203 case ARRAY_TYPE:
5205 /* Use readonly data for initializers of this or smaller size
5206 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5207 ratio. */
5208 const HOST_WIDE_INT min_unique_size = 64;
5209 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5210 is smaller than this, use readonly data. */
5211 const int unique_nonzero_ratio = 8;
5212 /* True if a single access of the object must be ensured. This is the
5213 case if the target is volatile, the type is non-addressable and more
5214 than one field need to be assigned. */
5215 const bool ensure_single_access
5216 = TREE_THIS_VOLATILE (object)
5217 && !TREE_ADDRESSABLE (type)
5218 && vec_safe_length (elts) > 1;
5219 struct gimplify_init_ctor_preeval_data preeval_data;
5220 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5221 HOST_WIDE_INT num_unique_nonzero_elements;
5222 bool complete_p, valid_const_initializer;
5224 /* Aggregate types must lower constructors to initialization of
5225 individual elements. The exception is that a CONSTRUCTOR node
5226 with no elements indicates zero-initialization of the whole. */
5227 if (vec_safe_is_empty (elts))
5229 if (notify_temp_creation)
5230 return GS_OK;
5232 /* The var will be initialized and so appear on lhs of
5233 assignment, it can't be TREE_READONLY anymore. */
5234 if (VAR_P (object))
5235 TREE_READONLY (object) = 0;
5237 is_empty_ctor = true;
5238 break;
5241 /* Fetch information about the constructor to direct later processing.
5242 We might want to make static versions of it in various cases, and
5243 can only do so if it known to be a valid constant initializer. */
5244 valid_const_initializer
5245 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5246 &num_unique_nonzero_elements,
5247 &num_ctor_elements, &complete_p);
5249 /* If a const aggregate variable is being initialized, then it
5250 should never be a lose to promote the variable to be static. */
5251 if (valid_const_initializer
5252 && num_nonzero_elements > 1
5253 && TREE_READONLY (object)
5254 && VAR_P (object)
5255 && !DECL_REGISTER (object)
5256 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5257 || DECL_MERGEABLE (object))
5258 /* For ctors that have many repeated nonzero elements
5259 represented through RANGE_EXPRs, prefer initializing
5260 those through runtime loops over copies of large amounts
5261 of data from readonly data section. */
5262 && (num_unique_nonzero_elements
5263 > num_nonzero_elements / unique_nonzero_ratio
5264 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5265 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5267 if (notify_temp_creation)
5268 return GS_ERROR;
5270 DECL_INITIAL (object) = ctor;
5271 TREE_STATIC (object) = 1;
5272 if (!DECL_NAME (object))
5273 DECL_NAME (object) = create_tmp_var_name ("C");
5274 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5276 /* ??? C++ doesn't automatically append a .<number> to the
5277 assembler name, and even when it does, it looks at FE private
5278 data structures to figure out what that number should be,
5279 which are not set for this variable. I suppose this is
5280 important for local statics for inline functions, which aren't
5281 "local" in the object file sense. So in order to get a unique
5282 TU-local symbol, we must invoke the lhd version now. */
5283 lhd_set_decl_assembler_name (object);
5285 *expr_p = NULL_TREE;
5286 break;
5289 /* The var will be initialized and so appear on lhs of
5290 assignment, it can't be TREE_READONLY anymore. */
5291 if (VAR_P (object) && !notify_temp_creation)
5292 TREE_READONLY (object) = 0;
5294 /* If there are "lots" of initialized elements, even discounting
5295 those that are not address constants (and thus *must* be
5296 computed at runtime), then partition the constructor into
5297 constant and non-constant parts. Block copy the constant
5298 parts in, then generate code for the non-constant parts. */
5299 /* TODO. There's code in cp/typeck.cc to do this. */
5301 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5302 /* store_constructor will ignore the clearing of variable-sized
5303 objects. Initializers for such objects must explicitly set
5304 every field that needs to be set. */
5305 cleared = false;
5306 else if (!complete_p)
5307 /* If the constructor isn't complete, clear the whole object
5308 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5310 ??? This ought not to be needed. For any element not present
5311 in the initializer, we should simply set them to zero. Except
5312 we'd need to *find* the elements that are not present, and that
5313 requires trickery to avoid quadratic compile-time behavior in
5314 large cases or excessive memory use in small cases. */
5315 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5316 else if (num_ctor_elements - num_nonzero_elements
5317 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5318 && num_nonzero_elements < num_ctor_elements / 4)
5319 /* If there are "lots" of zeros, it's more efficient to clear
5320 the memory and then set the nonzero elements. */
5321 cleared = true;
5322 else if (ensure_single_access && num_nonzero_elements == 0)
5323 /* If a single access to the target must be ensured and all elements
5324 are zero, then it's optimal to clear whatever their number. */
5325 cleared = true;
5326 else
5327 cleared = false;
5329 /* If there are "lots" of initialized elements, and all of them
5330 are valid address constants, then the entire initializer can
5331 be dropped to memory, and then memcpy'd out. Don't do this
5332 for sparse arrays, though, as it's more efficient to follow
5333 the standard CONSTRUCTOR behavior of memset followed by
5334 individual element initialization. Also don't do this for small
5335 all-zero initializers (which aren't big enough to merit
5336 clearing), and don't try to make bitwise copies of
5337 TREE_ADDRESSABLE types. */
5338 if (valid_const_initializer
5339 && complete_p
5340 && !(cleared || num_nonzero_elements == 0)
5341 && !TREE_ADDRESSABLE (type))
5343 HOST_WIDE_INT size = int_size_in_bytes (type);
5344 unsigned int align;
5346 /* ??? We can still get unbounded array types, at least
5347 from the C++ front end. This seems wrong, but attempt
5348 to work around it for now. */
5349 if (size < 0)
5351 size = int_size_in_bytes (TREE_TYPE (object));
5352 if (size >= 0)
5353 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5356 /* Find the maximum alignment we can assume for the object. */
5357 /* ??? Make use of DECL_OFFSET_ALIGN. */
5358 if (DECL_P (object))
5359 align = DECL_ALIGN (object);
5360 else
5361 align = TYPE_ALIGN (type);
5363 /* Do a block move either if the size is so small as to make
5364 each individual move a sub-unit move on average, or if it
5365 is so large as to make individual moves inefficient. */
5366 if (size > 0
5367 && num_nonzero_elements > 1
5368 /* For ctors that have many repeated nonzero elements
5369 represented through RANGE_EXPRs, prefer initializing
5370 those through runtime loops over copies of large amounts
5371 of data from readonly data section. */
5372 && (num_unique_nonzero_elements
5373 > num_nonzero_elements / unique_nonzero_ratio
5374 || size <= min_unique_size)
5375 && (size < num_nonzero_elements
5376 || !can_move_by_pieces (size, align)))
5378 if (notify_temp_creation)
5379 return GS_ERROR;
5381 walk_tree (&ctor, force_labels_r, NULL, NULL);
5382 ctor = tree_output_constant_def (ctor);
5383 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5384 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5385 TREE_OPERAND (*expr_p, 1) = ctor;
5387 /* This is no longer an assignment of a CONSTRUCTOR, but
5388 we still may have processing to do on the LHS. So
5389 pretend we didn't do anything here to let that happen. */
5390 return GS_UNHANDLED;
5394 /* If a single access to the target must be ensured and there are
5395 nonzero elements or the zero elements are not assigned en masse,
5396 initialize the target from a temporary. */
5397 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5399 if (notify_temp_creation)
5400 return GS_ERROR;
5402 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5403 TREE_OPERAND (*expr_p, 0) = temp;
5404 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5405 *expr_p,
5406 build2 (MODIFY_EXPR, void_type_node,
5407 object, temp));
5408 return GS_OK;
5411 if (notify_temp_creation)
5412 return GS_OK;
5414 /* If there are nonzero elements and if needed, pre-evaluate to capture
5415 elements overlapping with the lhs into temporaries. We must do this
5416 before clearing to fetch the values before they are zeroed-out. */
5417 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5419 preeval_data.lhs_base_decl = get_base_address (object);
5420 if (!DECL_P (preeval_data.lhs_base_decl))
5421 preeval_data.lhs_base_decl = NULL;
5422 preeval_data.lhs_alias_set = get_alias_set (object);
5424 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5425 pre_p, post_p, &preeval_data);
5428 bool ctor_has_side_effects_p
5429 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5431 if (cleared)
5433 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5434 Note that we still have to gimplify, in order to handle the
5435 case of variable sized types. Avoid shared tree structures. */
5436 CONSTRUCTOR_ELTS (ctor) = NULL;
5437 TREE_SIDE_EFFECTS (ctor) = 0;
5438 object = unshare_expr (object);
5439 gimplify_stmt (expr_p, pre_p);
5442 /* If we have not block cleared the object, or if there are nonzero
5443 elements in the constructor, or if the constructor has side effects,
5444 add assignments to the individual scalar fields of the object. */
5445 if (!cleared
5446 || num_nonzero_elements > 0
5447 || ctor_has_side_effects_p)
5448 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5450 *expr_p = NULL_TREE;
5452 break;
5454 case COMPLEX_TYPE:
5456 tree r, i;
5458 if (notify_temp_creation)
5459 return GS_OK;
5461 /* Extract the real and imaginary parts out of the ctor. */
5462 gcc_assert (elts->length () == 2);
5463 r = (*elts)[0].value;
5464 i = (*elts)[1].value;
5465 if (r == NULL || i == NULL)
5467 tree zero = build_zero_cst (TREE_TYPE (type));
5468 if (r == NULL)
5469 r = zero;
5470 if (i == NULL)
5471 i = zero;
5474 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5475 represent creation of a complex value. */
5476 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5478 ctor = build_complex (type, r, i);
5479 TREE_OPERAND (*expr_p, 1) = ctor;
5481 else
5483 ctor = build2 (COMPLEX_EXPR, type, r, i);
5484 TREE_OPERAND (*expr_p, 1) = ctor;
5485 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5486 pre_p,
5487 post_p,
5488 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5489 fb_rvalue);
5492 break;
5494 case VECTOR_TYPE:
5496 unsigned HOST_WIDE_INT ix;
5497 constructor_elt *ce;
5499 if (notify_temp_creation)
5500 return GS_OK;
5502 /* Vector types use CONSTRUCTOR all the way through gimple
5503 compilation as a general initializer. */
5504 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5506 enum gimplify_status tret;
5507 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5508 fb_rvalue);
5509 if (tret == GS_ERROR)
5510 ret = GS_ERROR;
5511 else if (TREE_STATIC (ctor)
5512 && !initializer_constant_valid_p (ce->value,
5513 TREE_TYPE (ce->value)))
5514 TREE_STATIC (ctor) = 0;
5516 recompute_constructor_flags (ctor);
5518 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5519 if (TREE_CONSTANT (ctor))
5521 bool constant_p = true;
5522 tree value;
5524 /* Even when ctor is constant, it might contain non-*_CST
5525 elements, such as addresses or trapping values like
5526 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5527 in VECTOR_CST nodes. */
5528 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5529 if (!CONSTANT_CLASS_P (value))
5531 constant_p = false;
5532 break;
5535 if (constant_p)
5537 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5538 break;
5542 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5543 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5545 break;
5547 default:
5548 /* So how did we get a CONSTRUCTOR for a scalar type? */
5549 gcc_unreachable ();
5552 if (ret == GS_ERROR)
5553 return GS_ERROR;
5554 /* If we have gimplified both sides of the initializer but have
5555 not emitted an assignment, do so now. */
5556 if (*expr_p
5557 /* If the type is an empty type, we don't need to emit the
5558 assignment. */
5559 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5561 tree lhs = TREE_OPERAND (*expr_p, 0);
5562 tree rhs = TREE_OPERAND (*expr_p, 1);
5563 if (want_value && object == lhs)
5564 lhs = unshare_expr (lhs);
5565 gassign *init = gimple_build_assign (lhs, rhs);
5566 gimplify_seq_add_stmt (pre_p, init);
5568 if (want_value)
5570 *expr_p = object;
5571 ret = GS_OK;
5573 else
5575 *expr_p = NULL;
5576 ret = GS_ALL_DONE;
5579 /* If the user requests to initialize automatic variables, we
5580 should initialize paddings inside the variable. Add a call to
5581 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5582 initialize paddings of object always to zero regardless of
5583 INIT_TYPE. Note, we will not insert this call if the aggregate
5584 variable has be completely cleared already or it's initialized
5585 with an empty constructor. We cannot insert this call if the
5586 variable is a gimple register since __builtin_clear_padding will take
5587 the address of the variable. As a result, if a long double/_Complex long
5588 double variable will be spilled into stack later, its padding cannot
5589 be cleared with __builtin_clear_padding. We should clear its padding
5590 when it is spilled into memory. */
5591 if (is_init_expr
5592 && !is_gimple_reg (object)
5593 && clear_padding_type_may_have_padding_p (type)
5594 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5595 || !AGGREGATE_TYPE_P (type))
5596 && is_var_need_auto_init (object))
5597 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5599 return ret;
5602 /* Given a pointer value OP0, return a simplified version of an
5603 indirection through OP0, or NULL_TREE if no simplification is
5604 possible. This may only be applied to a rhs of an expression.
5605 Note that the resulting type may be different from the type pointed
5606 to in the sense that it is still compatible from the langhooks
5607 point of view. */
5609 static tree
5610 gimple_fold_indirect_ref_rhs (tree t)
5612 return gimple_fold_indirect_ref (t);
5615 /* Subroutine of gimplify_modify_expr to do simplifications of
5616 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5617 something changes. */
5619 static enum gimplify_status
5620 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5621 gimple_seq *pre_p, gimple_seq *post_p,
5622 bool want_value)
5624 enum gimplify_status ret = GS_UNHANDLED;
5625 bool changed;
5629 changed = false;
5630 switch (TREE_CODE (*from_p))
5632 case VAR_DECL:
5633 /* If we're assigning from a read-only variable initialized with
5634 a constructor and not volatile, do the direct assignment from
5635 the constructor, but only if the target is not volatile either
5636 since this latter assignment might end up being done on a per
5637 field basis. However, if the target is volatile and the type
5638 is aggregate and non-addressable, gimplify_init_constructor
5639 knows that it needs to ensure a single access to the target
5640 and it will return GS_OK only in this case. */
5641 if (TREE_READONLY (*from_p)
5642 && DECL_INITIAL (*from_p)
5643 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5644 && !TREE_THIS_VOLATILE (*from_p)
5645 && (!TREE_THIS_VOLATILE (*to_p)
5646 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5647 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5649 tree old_from = *from_p;
5650 enum gimplify_status subret;
5652 /* Move the constructor into the RHS. */
5653 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5655 /* Let's see if gimplify_init_constructor will need to put
5656 it in memory. */
5657 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5658 false, true);
5659 if (subret == GS_ERROR)
5661 /* If so, revert the change. */
5662 *from_p = old_from;
5664 else
5666 ret = GS_OK;
5667 changed = true;
5670 break;
5671 case INDIRECT_REF:
5672 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5673 /* If we have code like
5675 *(const A*)(A*)&x
5677 where the type of "x" is a (possibly cv-qualified variant
5678 of "A"), treat the entire expression as identical to "x".
5679 This kind of code arises in C++ when an object is bound
5680 to a const reference, and if "x" is a TARGET_EXPR we want
5681 to take advantage of the optimization below. But not if
5682 the type is TREE_ADDRESSABLE; then C++17 says that the
5683 TARGET_EXPR needs to be a temporary. */
5684 if (tree t
5685 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5687 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5688 if (TREE_THIS_VOLATILE (t) != volatile_p)
5690 if (DECL_P (t))
5691 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5692 build_fold_addr_expr (t));
5693 if (REFERENCE_CLASS_P (t))
5694 TREE_THIS_VOLATILE (t) = volatile_p;
5696 *from_p = t;
5697 ret = GS_OK;
5698 changed = true;
5700 break;
5702 case TARGET_EXPR:
5704 /* If we are initializing something from a TARGET_EXPR, strip the
5705 TARGET_EXPR and initialize it directly, if possible. This can't
5706 be done if the initializer is void, since that implies that the
5707 temporary is set in some non-trivial way.
5709 ??? What about code that pulls out the temp and uses it
5710 elsewhere? I think that such code never uses the TARGET_EXPR as
5711 an initializer. If I'm wrong, we'll die because the temp won't
5712 have any RTL. In that case, I guess we'll need to replace
5713 references somehow. */
5714 tree init = TARGET_EXPR_INITIAL (*from_p);
5716 if (init
5717 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5718 || !TARGET_EXPR_NO_ELIDE (*from_p))
5719 && !VOID_TYPE_P (TREE_TYPE (init)))
5721 *from_p = init;
5722 ret = GS_OK;
5723 changed = true;
5726 break;
5728 case COMPOUND_EXPR:
5729 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5730 caught. */
5731 gimplify_compound_expr (from_p, pre_p, true);
5732 ret = GS_OK;
5733 changed = true;
5734 break;
5736 case CONSTRUCTOR:
5737 /* If we already made some changes, let the front end have a
5738 crack at this before we break it down. */
5739 if (ret != GS_UNHANDLED)
5740 break;
5742 /* If we're initializing from a CONSTRUCTOR, break this into
5743 individual MODIFY_EXPRs. */
5744 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5745 false);
5746 return ret;
5748 case COND_EXPR:
5749 /* If we're assigning to a non-register type, push the assignment
5750 down into the branches. This is mandatory for ADDRESSABLE types,
5751 since we cannot generate temporaries for such, but it saves a
5752 copy in other cases as well. */
5753 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5755 /* This code should mirror the code in gimplify_cond_expr. */
5756 enum tree_code code = TREE_CODE (*expr_p);
5757 tree cond = *from_p;
5758 tree result = *to_p;
5760 ret = gimplify_expr (&result, pre_p, post_p,
5761 is_gimple_lvalue, fb_lvalue);
5762 if (ret != GS_ERROR)
5763 ret = GS_OK;
5765 /* If we are going to write RESULT more than once, clear
5766 TREE_READONLY flag, otherwise we might incorrectly promote
5767 the variable to static const and initialize it at compile
5768 time in one of the branches. */
5769 if (VAR_P (result)
5770 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5771 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5772 TREE_READONLY (result) = 0;
5773 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5774 TREE_OPERAND (cond, 1)
5775 = build2 (code, void_type_node, result,
5776 TREE_OPERAND (cond, 1));
5777 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5778 TREE_OPERAND (cond, 2)
5779 = build2 (code, void_type_node, unshare_expr (result),
5780 TREE_OPERAND (cond, 2));
5782 TREE_TYPE (cond) = void_type_node;
5783 recalculate_side_effects (cond);
5785 if (want_value)
5787 gimplify_and_add (cond, pre_p);
5788 *expr_p = unshare_expr (result);
5790 else
5791 *expr_p = cond;
5792 return ret;
5794 break;
5796 case CALL_EXPR:
5797 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5798 return slot so that we don't generate a temporary. */
5799 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5800 && aggregate_value_p (*from_p, *from_p))
5802 bool use_target;
5804 if (!(rhs_predicate_for (*to_p))(*from_p))
5805 /* If we need a temporary, *to_p isn't accurate. */
5806 use_target = false;
5807 /* It's OK to use the return slot directly unless it's an NRV. */
5808 else if (TREE_CODE (*to_p) == RESULT_DECL
5809 && DECL_NAME (*to_p) == NULL_TREE
5810 && needs_to_live_in_memory (*to_p))
5811 use_target = true;
5812 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5813 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5814 /* Don't force regs into memory. */
5815 use_target = false;
5816 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5817 /* It's OK to use the target directly if it's being
5818 initialized. */
5819 use_target = true;
5820 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5821 != INTEGER_CST)
5822 /* Always use the target and thus RSO for variable-sized types.
5823 GIMPLE cannot deal with a variable-sized assignment
5824 embedded in a call statement. */
5825 use_target = true;
5826 else if (TREE_CODE (*to_p) != SSA_NAME
5827 && (!is_gimple_variable (*to_p)
5828 || needs_to_live_in_memory (*to_p)))
5829 /* Don't use the original target if it's already addressable;
5830 if its address escapes, and the called function uses the
5831 NRV optimization, a conforming program could see *to_p
5832 change before the called function returns; see c++/19317.
5833 When optimizing, the return_slot pass marks more functions
5834 as safe after we have escape info. */
5835 use_target = false;
5836 else
5837 use_target = true;
5839 if (use_target)
5841 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5842 mark_addressable (*to_p);
5845 break;
5847 case WITH_SIZE_EXPR:
5848 /* Likewise for calls that return an aggregate of non-constant size,
5849 since we would not be able to generate a temporary at all. */
5850 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5852 *from_p = TREE_OPERAND (*from_p, 0);
5853 /* We don't change ret in this case because the
5854 WITH_SIZE_EXPR might have been added in
5855 gimplify_modify_expr, so returning GS_OK would lead to an
5856 infinite loop. */
5857 changed = true;
5859 break;
5861 /* If we're initializing from a container, push the initialization
5862 inside it. */
5863 case CLEANUP_POINT_EXPR:
5864 case BIND_EXPR:
5865 case STATEMENT_LIST:
5867 tree wrap = *from_p;
5868 tree t;
5870 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5871 fb_lvalue);
5872 if (ret != GS_ERROR)
5873 ret = GS_OK;
5875 t = voidify_wrapper_expr (wrap, *expr_p);
5876 gcc_assert (t == *expr_p);
5878 if (want_value)
5880 gimplify_and_add (wrap, pre_p);
5881 *expr_p = unshare_expr (*to_p);
5883 else
5884 *expr_p = wrap;
5885 return GS_OK;
5888 case NOP_EXPR:
5889 /* Pull out compound literal expressions from a NOP_EXPR.
5890 Those are created in the C FE to drop qualifiers during
5891 lvalue conversion. */
5892 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5893 && tree_ssa_useless_type_conversion (*from_p))
5895 *from_p = TREE_OPERAND (*from_p, 0);
5896 ret = GS_OK;
5897 changed = true;
5899 break;
5901 case COMPOUND_LITERAL_EXPR:
5903 tree complit = TREE_OPERAND (*expr_p, 1);
5904 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5905 tree decl = DECL_EXPR_DECL (decl_s);
5906 tree init = DECL_INITIAL (decl);
5908 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5909 into struct T x = { 0, 1, 2 } if the address of the
5910 compound literal has never been taken. */
5911 if (!TREE_ADDRESSABLE (complit)
5912 && !TREE_ADDRESSABLE (decl)
5913 && init)
5915 *expr_p = copy_node (*expr_p);
5916 TREE_OPERAND (*expr_p, 1) = init;
5917 return GS_OK;
5921 default:
5922 break;
5925 while (changed);
5927 return ret;
5931 /* Return true if T looks like a valid GIMPLE statement. */
5933 static bool
5934 is_gimple_stmt (tree t)
5936 const enum tree_code code = TREE_CODE (t);
5938 switch (code)
5940 case NOP_EXPR:
5941 /* The only valid NOP_EXPR is the empty statement. */
5942 return IS_EMPTY_STMT (t);
5944 case BIND_EXPR:
5945 case COND_EXPR:
5946 /* These are only valid if they're void. */
5947 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5949 case SWITCH_EXPR:
5950 case GOTO_EXPR:
5951 case RETURN_EXPR:
5952 case LABEL_EXPR:
5953 case CASE_LABEL_EXPR:
5954 case TRY_CATCH_EXPR:
5955 case TRY_FINALLY_EXPR:
5956 case EH_FILTER_EXPR:
5957 case CATCH_EXPR:
5958 case ASM_EXPR:
5959 case STATEMENT_LIST:
5960 case OACC_PARALLEL:
5961 case OACC_KERNELS:
5962 case OACC_SERIAL:
5963 case OACC_DATA:
5964 case OACC_HOST_DATA:
5965 case OACC_DECLARE:
5966 case OACC_UPDATE:
5967 case OACC_ENTER_DATA:
5968 case OACC_EXIT_DATA:
5969 case OACC_CACHE:
5970 case OMP_PARALLEL:
5971 case OMP_FOR:
5972 case OMP_SIMD:
5973 case OMP_DISTRIBUTE:
5974 case OMP_LOOP:
5975 case OACC_LOOP:
5976 case OMP_SCAN:
5977 case OMP_SCOPE:
5978 case OMP_SECTIONS:
5979 case OMP_SECTION:
5980 case OMP_SINGLE:
5981 case OMP_MASTER:
5982 case OMP_MASKED:
5983 case OMP_TASKGROUP:
5984 case OMP_ORDERED:
5985 case OMP_CRITICAL:
5986 case OMP_TASK:
5987 case OMP_TARGET:
5988 case OMP_TARGET_DATA:
5989 case OMP_TARGET_UPDATE:
5990 case OMP_TARGET_ENTER_DATA:
5991 case OMP_TARGET_EXIT_DATA:
5992 case OMP_TASKLOOP:
5993 case OMP_TEAMS:
5994 /* These are always void. */
5995 return true;
5997 case CALL_EXPR:
5998 case MODIFY_EXPR:
5999 case PREDICT_EXPR:
6000 /* These are valid regardless of their type. */
6001 return true;
6003 default:
6004 return false;
6009 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6010 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6012 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6013 other, unmodified part of the complex object just before the total store.
6014 As a consequence, if the object is still uninitialized, an undefined value
6015 will be loaded into a register, which may result in a spurious exception
6016 if the register is floating-point and the value happens to be a signaling
6017 NaN for example. Then the fully-fledged complex operations lowering pass
6018 followed by a DCE pass are necessary in order to fix things up. */
6020 static enum gimplify_status
6021 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6022 bool want_value)
6024 enum tree_code code, ocode;
6025 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6027 lhs = TREE_OPERAND (*expr_p, 0);
6028 rhs = TREE_OPERAND (*expr_p, 1);
6029 code = TREE_CODE (lhs);
6030 lhs = TREE_OPERAND (lhs, 0);
6032 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6033 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6034 suppress_warning (other);
6035 other = get_formal_tmp_var (other, pre_p);
6037 realpart = code == REALPART_EXPR ? rhs : other;
6038 imagpart = code == REALPART_EXPR ? other : rhs;
6040 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6041 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6042 else
6043 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6045 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6046 *expr_p = (want_value) ? rhs : NULL_TREE;
6048 return GS_ALL_DONE;
6051 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6053 modify_expr
6054 : varname '=' rhs
6055 | '*' ID '=' rhs
6057 PRE_P points to the list where side effects that must happen before
6058 *EXPR_P should be stored.
6060 POST_P points to the list where side effects that must happen after
6061 *EXPR_P should be stored.
6063 WANT_VALUE is nonzero iff we want to use the value of this expression
6064 in another expression. */
6066 static enum gimplify_status
6067 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6068 bool want_value)
6070 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6071 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6072 enum gimplify_status ret = GS_UNHANDLED;
6073 gimple *assign;
6074 location_t loc = EXPR_LOCATION (*expr_p);
6075 gimple_stmt_iterator gsi;
6077 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6078 return GS_ERROR;
6080 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6081 || TREE_CODE (*expr_p) == INIT_EXPR);
6083 /* Trying to simplify a clobber using normal logic doesn't work,
6084 so handle it here. */
6085 if (TREE_CLOBBER_P (*from_p))
6087 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6088 if (ret == GS_ERROR)
6089 return ret;
6090 gcc_assert (!want_value);
6091 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6093 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6094 pre_p, post_p);
6095 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6097 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6098 *expr_p = NULL;
6099 return GS_ALL_DONE;
6102 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6103 memset. */
6104 if (TREE_TYPE (*from_p) != error_mark_node
6105 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6106 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6107 && TREE_CODE (*from_p) == CONSTRUCTOR
6108 && CONSTRUCTOR_NELTS (*from_p) == 0)
6110 maybe_with_size_expr (from_p);
6111 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6112 return gimplify_modify_expr_to_memset (expr_p,
6113 TREE_OPERAND (*from_p, 1),
6114 want_value, pre_p);
6117 /* Insert pointer conversions required by the middle-end that are not
6118 required by the frontend. This fixes middle-end type checking for
6119 for example gcc.dg/redecl-6.c. */
6120 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6122 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6123 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6124 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6127 /* See if any simplifications can be done based on what the RHS is. */
6128 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6129 want_value);
6130 if (ret != GS_UNHANDLED)
6131 return ret;
6133 /* For empty types only gimplify the left hand side and right hand
6134 side as statements and throw away the assignment. Do this after
6135 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6136 types properly. */
6137 if (is_empty_type (TREE_TYPE (*from_p))
6138 && !want_value
6139 /* Don't do this for calls that return addressable types, expand_call
6140 relies on those having a lhs. */
6141 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6142 && TREE_CODE (*from_p) == CALL_EXPR))
6144 gimplify_stmt (from_p, pre_p);
6145 gimplify_stmt (to_p, pre_p);
6146 *expr_p = NULL_TREE;
6147 return GS_ALL_DONE;
6150 /* If the value being copied is of variable width, compute the length
6151 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6152 before gimplifying any of the operands so that we can resolve any
6153 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6154 the size of the expression to be copied, not of the destination, so
6155 that is what we must do here. */
6156 maybe_with_size_expr (from_p);
6158 /* As a special case, we have to temporarily allow for assignments
6159 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6160 a toplevel statement, when gimplifying the GENERIC expression
6161 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6162 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6164 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6165 prevent gimplify_expr from trying to create a new temporary for
6166 foo's LHS, we tell it that it should only gimplify until it
6167 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6168 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6169 and all we need to do here is set 'a' to be its LHS. */
6171 /* Gimplify the RHS first for C++17 and bug 71104. */
6172 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6173 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6174 if (ret == GS_ERROR)
6175 return ret;
6177 /* Then gimplify the LHS. */
6178 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6179 twice we have to make sure to gimplify into non-SSA as otherwise
6180 the abnormal edge added later will make those defs not dominate
6181 their uses.
6182 ??? Technically this applies only to the registers used in the
6183 resulting non-register *TO_P. */
6184 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6185 if (saved_into_ssa
6186 && TREE_CODE (*from_p) == CALL_EXPR
6187 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6188 gimplify_ctxp->into_ssa = false;
6189 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6190 gimplify_ctxp->into_ssa = saved_into_ssa;
6191 if (ret == GS_ERROR)
6192 return ret;
6194 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6195 guess for the predicate was wrong. */
6196 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6197 if (final_pred != initial_pred)
6199 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6200 if (ret == GS_ERROR)
6201 return ret;
6204 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6205 size as argument to the call. */
6206 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6208 tree call = TREE_OPERAND (*from_p, 0);
6209 tree vlasize = TREE_OPERAND (*from_p, 1);
6211 if (TREE_CODE (call) == CALL_EXPR
6212 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6214 int nargs = call_expr_nargs (call);
6215 tree type = TREE_TYPE (call);
6216 tree ap = CALL_EXPR_ARG (call, 0);
6217 tree tag = CALL_EXPR_ARG (call, 1);
6218 tree aptag = CALL_EXPR_ARG (call, 2);
6219 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6220 IFN_VA_ARG, type,
6221 nargs + 1, ap, tag,
6222 aptag, vlasize);
6223 TREE_OPERAND (*from_p, 0) = newcall;
6227 /* Now see if the above changed *from_p to something we handle specially. */
6228 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6229 want_value);
6230 if (ret != GS_UNHANDLED)
6231 return ret;
6233 /* If we've got a variable sized assignment between two lvalues (i.e. does
6234 not involve a call), then we can make things a bit more straightforward
6235 by converting the assignment to memcpy or memset. */
6236 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6238 tree from = TREE_OPERAND (*from_p, 0);
6239 tree size = TREE_OPERAND (*from_p, 1);
6241 if (TREE_CODE (from) == CONSTRUCTOR)
6242 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6244 if (is_gimple_addressable (from))
6246 *from_p = from;
6247 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6248 pre_p);
6252 /* Transform partial stores to non-addressable complex variables into
6253 total stores. This allows us to use real instead of virtual operands
6254 for these variables, which improves optimization. */
6255 if ((TREE_CODE (*to_p) == REALPART_EXPR
6256 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6257 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6258 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6260 /* Try to alleviate the effects of the gimplification creating artificial
6261 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6262 make sure not to create DECL_DEBUG_EXPR links across functions. */
6263 if (!gimplify_ctxp->into_ssa
6264 && VAR_P (*from_p)
6265 && DECL_IGNORED_P (*from_p)
6266 && DECL_P (*to_p)
6267 && !DECL_IGNORED_P (*to_p)
6268 && decl_function_context (*to_p) == current_function_decl
6269 && decl_function_context (*from_p) == current_function_decl)
6271 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6272 DECL_NAME (*from_p)
6273 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6274 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6275 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6278 if (want_value && TREE_THIS_VOLATILE (*to_p))
6279 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6281 if (TREE_CODE (*from_p) == CALL_EXPR)
6283 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6284 instead of a GIMPLE_ASSIGN. */
6285 gcall *call_stmt;
6286 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6288 /* Gimplify internal functions created in the FEs. */
6289 int nargs = call_expr_nargs (*from_p), i;
6290 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6291 auto_vec<tree> vargs (nargs);
6293 for (i = 0; i < nargs; i++)
6295 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6296 EXPR_LOCATION (*from_p));
6297 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6299 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6300 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6301 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6303 else
6305 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6306 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6307 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6308 tree fndecl = get_callee_fndecl (*from_p);
6309 if (fndecl
6310 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6311 && call_expr_nargs (*from_p) == 3)
6312 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6313 CALL_EXPR_ARG (*from_p, 0),
6314 CALL_EXPR_ARG (*from_p, 1),
6315 CALL_EXPR_ARG (*from_p, 2));
6316 else
6318 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6321 notice_special_calls (call_stmt);
6322 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6323 gimple_call_set_lhs (call_stmt, *to_p);
6324 else if (TREE_CODE (*to_p) == SSA_NAME)
6325 /* The above is somewhat premature, avoid ICEing later for a
6326 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6327 ??? This doesn't make it a default-def. */
6328 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6330 assign = call_stmt;
6332 else
6334 assign = gimple_build_assign (*to_p, *from_p);
6335 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6336 if (COMPARISON_CLASS_P (*from_p))
6337 copy_warning (assign, *from_p);
6340 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6342 /* We should have got an SSA name from the start. */
6343 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6344 || ! gimple_in_ssa_p (cfun));
6347 gimplify_seq_add_stmt (pre_p, assign);
6348 gsi = gsi_last (*pre_p);
6349 maybe_fold_stmt (&gsi);
6351 if (want_value)
6353 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6354 return GS_OK;
6356 else
6357 *expr_p = NULL;
6359 return GS_ALL_DONE;
6362 /* Gimplify a comparison between two variable-sized objects. Do this
6363 with a call to BUILT_IN_MEMCMP. */
6365 static enum gimplify_status
6366 gimplify_variable_sized_compare (tree *expr_p)
6368 location_t loc = EXPR_LOCATION (*expr_p);
6369 tree op0 = TREE_OPERAND (*expr_p, 0);
6370 tree op1 = TREE_OPERAND (*expr_p, 1);
6371 tree t, arg, dest, src, expr;
6373 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6374 arg = unshare_expr (arg);
6375 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6376 src = build_fold_addr_expr_loc (loc, op1);
6377 dest = build_fold_addr_expr_loc (loc, op0);
6378 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6379 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6381 expr
6382 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6383 SET_EXPR_LOCATION (expr, loc);
6384 *expr_p = expr;
6386 return GS_OK;
6389 /* Gimplify a comparison between two aggregate objects of integral scalar
6390 mode as a comparison between the bitwise equivalent scalar values. */
6392 static enum gimplify_status
6393 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6395 location_t loc = EXPR_LOCATION (*expr_p);
6396 tree op0 = TREE_OPERAND (*expr_p, 0);
6397 tree op1 = TREE_OPERAND (*expr_p, 1);
6399 tree type = TREE_TYPE (op0);
6400 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6402 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6403 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6405 *expr_p
6406 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6408 return GS_OK;
6411 /* Gimplify an expression sequence. This function gimplifies each
6412 expression and rewrites the original expression with the last
6413 expression of the sequence in GIMPLE form.
6415 PRE_P points to the list where the side effects for all the
6416 expressions in the sequence will be emitted.
6418 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6420 static enum gimplify_status
6421 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6423 tree t = *expr_p;
6427 tree *sub_p = &TREE_OPERAND (t, 0);
6429 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6430 gimplify_compound_expr (sub_p, pre_p, false);
6431 else
6432 gimplify_stmt (sub_p, pre_p);
6434 t = TREE_OPERAND (t, 1);
6436 while (TREE_CODE (t) == COMPOUND_EXPR);
6438 *expr_p = t;
6439 if (want_value)
6440 return GS_OK;
6441 else
6443 gimplify_stmt (expr_p, pre_p);
6444 return GS_ALL_DONE;
6448 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6449 gimplify. After gimplification, EXPR_P will point to a new temporary
6450 that holds the original value of the SAVE_EXPR node.
6452 PRE_P points to the list where side effects that must happen before
6453 *EXPR_P should be stored. */
6455 static enum gimplify_status
6456 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6458 enum gimplify_status ret = GS_ALL_DONE;
6459 tree val;
6461 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6462 val = TREE_OPERAND (*expr_p, 0);
6464 if (val && TREE_TYPE (val) == error_mark_node)
6465 return GS_ERROR;
6467 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6468 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6470 /* The operand may be a void-valued expression. It is
6471 being executed only for its side-effects. */
6472 if (TREE_TYPE (val) == void_type_node)
6474 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6475 is_gimple_stmt, fb_none);
6476 val = NULL;
6478 else
6479 /* The temporary may not be an SSA name as later abnormal and EH
6480 control flow may invalidate use/def domination. When in SSA
6481 form then assume there are no such issues and SAVE_EXPRs only
6482 appear via GENERIC foldings. */
6483 val = get_initialized_tmp_var (val, pre_p, post_p,
6484 gimple_in_ssa_p (cfun));
6486 TREE_OPERAND (*expr_p, 0) = val;
6487 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6490 *expr_p = val;
6492 return ret;
6495 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6497 unary_expr
6498 : ...
6499 | '&' varname
6502 PRE_P points to the list where side effects that must happen before
6503 *EXPR_P should be stored.
6505 POST_P points to the list where side effects that must happen after
6506 *EXPR_P should be stored. */
6508 static enum gimplify_status
6509 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6511 tree expr = *expr_p;
6512 tree op0 = TREE_OPERAND (expr, 0);
6513 enum gimplify_status ret;
6514 location_t loc = EXPR_LOCATION (*expr_p);
6516 switch (TREE_CODE (op0))
6518 case INDIRECT_REF:
6519 do_indirect_ref:
6520 /* Check if we are dealing with an expression of the form '&*ptr'.
6521 While the front end folds away '&*ptr' into 'ptr', these
6522 expressions may be generated internally by the compiler (e.g.,
6523 builtins like __builtin_va_end). */
6524 /* Caution: the silent array decomposition semantics we allow for
6525 ADDR_EXPR means we can't always discard the pair. */
6526 /* Gimplification of the ADDR_EXPR operand may drop
6527 cv-qualification conversions, so make sure we add them if
6528 needed. */
6530 tree op00 = TREE_OPERAND (op0, 0);
6531 tree t_expr = TREE_TYPE (expr);
6532 tree t_op00 = TREE_TYPE (op00);
6534 if (!useless_type_conversion_p (t_expr, t_op00))
6535 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6536 *expr_p = op00;
6537 ret = GS_OK;
6539 break;
6541 case VIEW_CONVERT_EXPR:
6542 /* Take the address of our operand and then convert it to the type of
6543 this ADDR_EXPR.
6545 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6546 all clear. The impact of this transformation is even less clear. */
6548 /* If the operand is a useless conversion, look through it. Doing so
6549 guarantees that the ADDR_EXPR and its operand will remain of the
6550 same type. */
6551 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6552 op0 = TREE_OPERAND (op0, 0);
6554 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6555 build_fold_addr_expr_loc (loc,
6556 TREE_OPERAND (op0, 0)));
6557 ret = GS_OK;
6558 break;
6560 case MEM_REF:
6561 if (integer_zerop (TREE_OPERAND (op0, 1)))
6562 goto do_indirect_ref;
6564 /* fall through */
6566 default:
6567 /* If we see a call to a declared builtin or see its address
6568 being taken (we can unify those cases here) then we can mark
6569 the builtin for implicit generation by GCC. */
6570 if (TREE_CODE (op0) == FUNCTION_DECL
6571 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6572 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6573 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6575 /* We use fb_either here because the C frontend sometimes takes
6576 the address of a call that returns a struct; see
6577 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6578 the implied temporary explicit. */
6580 /* Make the operand addressable. */
6581 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6582 is_gimple_addressable, fb_either);
6583 if (ret == GS_ERROR)
6584 break;
6586 /* Then mark it. Beware that it may not be possible to do so directly
6587 if a temporary has been created by the gimplification. */
6588 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6590 op0 = TREE_OPERAND (expr, 0);
6592 /* For various reasons, the gimplification of the expression
6593 may have made a new INDIRECT_REF. */
6594 if (INDIRECT_REF_P (op0)
6595 || (TREE_CODE (op0) == MEM_REF
6596 && integer_zerop (TREE_OPERAND (op0, 1))))
6597 goto do_indirect_ref;
6599 mark_addressable (TREE_OPERAND (expr, 0));
6601 /* The FEs may end up building ADDR_EXPRs early on a decl with
6602 an incomplete type. Re-build ADDR_EXPRs in canonical form
6603 here. */
6604 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6605 *expr_p = build_fold_addr_expr (op0);
6607 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6608 recompute_tree_invariant_for_addr_expr (*expr_p);
6610 /* If we re-built the ADDR_EXPR add a conversion to the original type
6611 if required. */
6612 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6613 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6615 break;
6618 return ret;
6621 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6622 value; output operands should be a gimple lvalue. */
6624 static enum gimplify_status
6625 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6627 tree expr;
6628 int noutputs;
6629 const char **oconstraints;
6630 int i;
6631 tree link;
6632 const char *constraint;
6633 bool allows_mem, allows_reg, is_inout;
6634 enum gimplify_status ret, tret;
6635 gasm *stmt;
6636 vec<tree, va_gc> *inputs;
6637 vec<tree, va_gc> *outputs;
6638 vec<tree, va_gc> *clobbers;
6639 vec<tree, va_gc> *labels;
6640 tree link_next;
6642 expr = *expr_p;
6643 noutputs = list_length (ASM_OUTPUTS (expr));
6644 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6646 inputs = NULL;
6647 outputs = NULL;
6648 clobbers = NULL;
6649 labels = NULL;
6651 ret = GS_ALL_DONE;
6652 link_next = NULL_TREE;
6653 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6655 bool ok;
6656 size_t constraint_len;
6658 link_next = TREE_CHAIN (link);
6660 oconstraints[i]
6661 = constraint
6662 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6663 constraint_len = strlen (constraint);
6664 if (constraint_len == 0)
6665 continue;
6667 ok = parse_output_constraint (&constraint, i, 0, 0,
6668 &allows_mem, &allows_reg, &is_inout);
6669 if (!ok)
6671 ret = GS_ERROR;
6672 is_inout = false;
6675 /* If we can't make copies, we can only accept memory.
6676 Similarly for VLAs. */
6677 tree outtype = TREE_TYPE (TREE_VALUE (link));
6678 if (outtype != error_mark_node
6679 && (TREE_ADDRESSABLE (outtype)
6680 || !COMPLETE_TYPE_P (outtype)
6681 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6683 if (allows_mem)
6684 allows_reg = 0;
6685 else
6687 error ("impossible constraint in %<asm%>");
6688 error ("non-memory output %d must stay in memory", i);
6689 return GS_ERROR;
6693 if (!allows_reg && allows_mem)
6694 mark_addressable (TREE_VALUE (link));
6696 tree orig = TREE_VALUE (link);
6697 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6698 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6699 fb_lvalue | fb_mayfail);
6700 if (tret == GS_ERROR)
6702 if (orig != error_mark_node)
6703 error ("invalid lvalue in %<asm%> output %d", i);
6704 ret = tret;
6707 /* If the constraint does not allow memory make sure we gimplify
6708 it to a register if it is not already but its base is. This
6709 happens for complex and vector components. */
6710 if (!allows_mem)
6712 tree op = TREE_VALUE (link);
6713 if (! is_gimple_val (op)
6714 && is_gimple_reg_type (TREE_TYPE (op))
6715 && is_gimple_reg (get_base_address (op)))
6717 tree tem = create_tmp_reg (TREE_TYPE (op));
6718 tree ass;
6719 if (is_inout)
6721 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6722 tem, unshare_expr (op));
6723 gimplify_and_add (ass, pre_p);
6725 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6726 gimplify_and_add (ass, post_p);
6728 TREE_VALUE (link) = tem;
6729 tret = GS_OK;
6733 vec_safe_push (outputs, link);
6734 TREE_CHAIN (link) = NULL_TREE;
6736 if (is_inout)
6738 /* An input/output operand. To give the optimizers more
6739 flexibility, split it into separate input and output
6740 operands. */
6741 tree input;
6742 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6743 char buf[11];
6745 /* Turn the in/out constraint into an output constraint. */
6746 char *p = xstrdup (constraint);
6747 p[0] = '=';
6748 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6750 /* And add a matching input constraint. */
6751 if (allows_reg)
6753 sprintf (buf, "%u", i);
6755 /* If there are multiple alternatives in the constraint,
6756 handle each of them individually. Those that allow register
6757 will be replaced with operand number, the others will stay
6758 unchanged. */
6759 if (strchr (p, ',') != NULL)
6761 size_t len = 0, buflen = strlen (buf);
6762 char *beg, *end, *str, *dst;
6764 for (beg = p + 1;;)
6766 end = strchr (beg, ',');
6767 if (end == NULL)
6768 end = strchr (beg, '\0');
6769 if ((size_t) (end - beg) < buflen)
6770 len += buflen + 1;
6771 else
6772 len += end - beg + 1;
6773 if (*end)
6774 beg = end + 1;
6775 else
6776 break;
6779 str = (char *) alloca (len);
6780 for (beg = p + 1, dst = str;;)
6782 const char *tem;
6783 bool mem_p, reg_p, inout_p;
6785 end = strchr (beg, ',');
6786 if (end)
6787 *end = '\0';
6788 beg[-1] = '=';
6789 tem = beg - 1;
6790 parse_output_constraint (&tem, i, 0, 0,
6791 &mem_p, &reg_p, &inout_p);
6792 if (dst != str)
6793 *dst++ = ',';
6794 if (reg_p)
6796 memcpy (dst, buf, buflen);
6797 dst += buflen;
6799 else
6801 if (end)
6802 len = end - beg;
6803 else
6804 len = strlen (beg);
6805 memcpy (dst, beg, len);
6806 dst += len;
6808 if (end)
6809 beg = end + 1;
6810 else
6811 break;
6813 *dst = '\0';
6814 input = build_string (dst - str, str);
6816 else
6817 input = build_string (strlen (buf), buf);
6819 else
6820 input = build_string (constraint_len - 1, constraint + 1);
6822 free (p);
6824 input = build_tree_list (build_tree_list (NULL_TREE, input),
6825 unshare_expr (TREE_VALUE (link)));
6826 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6830 link_next = NULL_TREE;
6831 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6833 link_next = TREE_CHAIN (link);
6834 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6835 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6836 oconstraints, &allows_mem, &allows_reg);
6838 /* If we can't make copies, we can only accept memory. */
6839 tree intype = TREE_TYPE (TREE_VALUE (link));
6840 if (intype != error_mark_node
6841 && (TREE_ADDRESSABLE (intype)
6842 || !COMPLETE_TYPE_P (intype)
6843 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6845 if (allows_mem)
6846 allows_reg = 0;
6847 else
6849 error ("impossible constraint in %<asm%>");
6850 error ("non-memory input %d must stay in memory", i);
6851 return GS_ERROR;
6855 /* If the operand is a memory input, it should be an lvalue. */
6856 if (!allows_reg && allows_mem)
6858 tree inputv = TREE_VALUE (link);
6859 STRIP_NOPS (inputv);
6860 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6861 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6862 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6863 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6864 || TREE_CODE (inputv) == MODIFY_EXPR)
6865 TREE_VALUE (link) = error_mark_node;
6866 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6867 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6868 if (tret != GS_ERROR)
6870 /* Unlike output operands, memory inputs are not guaranteed
6871 to be lvalues by the FE, and while the expressions are
6872 marked addressable there, if it is e.g. a statement
6873 expression, temporaries in it might not end up being
6874 addressable. They might be already used in the IL and thus
6875 it is too late to make them addressable now though. */
6876 tree x = TREE_VALUE (link);
6877 while (handled_component_p (x))
6878 x = TREE_OPERAND (x, 0);
6879 if (TREE_CODE (x) == MEM_REF
6880 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6881 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6882 if ((VAR_P (x)
6883 || TREE_CODE (x) == PARM_DECL
6884 || TREE_CODE (x) == RESULT_DECL)
6885 && !TREE_ADDRESSABLE (x)
6886 && is_gimple_reg (x))
6888 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6889 input_location), 0,
6890 "memory input %d is not directly addressable",
6892 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6895 mark_addressable (TREE_VALUE (link));
6896 if (tret == GS_ERROR)
6898 if (inputv != error_mark_node)
6899 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6900 "memory input %d is not directly addressable", i);
6901 ret = tret;
6904 else
6906 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6907 is_gimple_asm_val, fb_rvalue);
6908 if (tret == GS_ERROR)
6909 ret = tret;
6912 TREE_CHAIN (link) = NULL_TREE;
6913 vec_safe_push (inputs, link);
6916 link_next = NULL_TREE;
6917 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6919 link_next = TREE_CHAIN (link);
6920 TREE_CHAIN (link) = NULL_TREE;
6921 vec_safe_push (clobbers, link);
6924 link_next = NULL_TREE;
6925 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6927 link_next = TREE_CHAIN (link);
6928 TREE_CHAIN (link) = NULL_TREE;
6929 vec_safe_push (labels, link);
6932 /* Do not add ASMs with errors to the gimple IL stream. */
6933 if (ret != GS_ERROR)
6935 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6936 inputs, outputs, clobbers, labels);
6938 /* asm is volatile if it was marked by the user as volatile or
6939 there are no outputs or this is an asm goto. */
6940 gimple_asm_set_volatile (stmt,
6941 ASM_VOLATILE_P (expr)
6942 || noutputs == 0
6943 || labels);
6944 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6945 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6947 gimplify_seq_add_stmt (pre_p, stmt);
6950 return ret;
6953 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6954 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6955 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6956 return to this function.
6958 FIXME should we complexify the prequeue handling instead? Or use flags
6959 for all the cleanups and let the optimizer tighten them up? The current
6960 code seems pretty fragile; it will break on a cleanup within any
6961 non-conditional nesting. But any such nesting would be broken, anyway;
6962 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6963 and continues out of it. We can do that at the RTL level, though, so
6964 having an optimizer to tighten up try/finally regions would be a Good
6965 Thing. */
6967 static enum gimplify_status
6968 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6970 gimple_stmt_iterator iter;
6971 gimple_seq body_sequence = NULL;
6973 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6975 /* We only care about the number of conditions between the innermost
6976 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6977 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6978 int old_conds = gimplify_ctxp->conditions;
6979 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6980 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6981 gimplify_ctxp->conditions = 0;
6982 gimplify_ctxp->conditional_cleanups = NULL;
6983 gimplify_ctxp->in_cleanup_point_expr = true;
6985 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6987 gimplify_ctxp->conditions = old_conds;
6988 gimplify_ctxp->conditional_cleanups = old_cleanups;
6989 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6991 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6993 gimple *wce = gsi_stmt (iter);
6995 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6997 if (gsi_one_before_end_p (iter))
6999 /* Note that gsi_insert_seq_before and gsi_remove do not
7000 scan operands, unlike some other sequence mutators. */
7001 if (!gimple_wce_cleanup_eh_only (wce))
7002 gsi_insert_seq_before_without_update (&iter,
7003 gimple_wce_cleanup (wce),
7004 GSI_SAME_STMT);
7005 gsi_remove (&iter, true);
7006 break;
7008 else
7010 gtry *gtry;
7011 gimple_seq seq;
7012 enum gimple_try_flags kind;
7014 if (gimple_wce_cleanup_eh_only (wce))
7015 kind = GIMPLE_TRY_CATCH;
7016 else
7017 kind = GIMPLE_TRY_FINALLY;
7018 seq = gsi_split_seq_after (iter);
7020 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
7021 /* Do not use gsi_replace here, as it may scan operands.
7022 We want to do a simple structural modification only. */
7023 gsi_set_stmt (&iter, gtry);
7024 iter = gsi_start (gtry->eval);
7027 else
7028 gsi_next (&iter);
7031 gimplify_seq_add_seq (pre_p, body_sequence);
7032 if (temp)
7034 *expr_p = temp;
7035 return GS_OK;
7037 else
7039 *expr_p = NULL;
7040 return GS_ALL_DONE;
7044 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7045 is the cleanup action required. EH_ONLY is true if the cleanup should
7046 only be executed if an exception is thrown, not on normal exit.
7047 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7048 only valid for clobbers. */
7050 static void
7051 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7052 bool force_uncond = false)
7054 gimple *wce;
7055 gimple_seq cleanup_stmts = NULL;
7057 /* Errors can result in improperly nested cleanups. Which results in
7058 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7059 if (seen_error ())
7060 return;
7062 if (gimple_conditional_context ())
7064 /* If we're in a conditional context, this is more complex. We only
7065 want to run the cleanup if we actually ran the initialization that
7066 necessitates it, but we want to run it after the end of the
7067 conditional context. So we wrap the try/finally around the
7068 condition and use a flag to determine whether or not to actually
7069 run the destructor. Thus
7071 test ? f(A()) : 0
7073 becomes (approximately)
7075 flag = 0;
7076 try {
7077 if (test) { A::A(temp); flag = 1; val = f(temp); }
7078 else { val = 0; }
7079 } finally {
7080 if (flag) A::~A(temp);
7084 if (force_uncond)
7086 gimplify_stmt (&cleanup, &cleanup_stmts);
7087 wce = gimple_build_wce (cleanup_stmts);
7088 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7090 else
7092 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7093 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7094 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7096 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7097 gimplify_stmt (&cleanup, &cleanup_stmts);
7098 wce = gimple_build_wce (cleanup_stmts);
7099 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7101 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7102 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7103 gimplify_seq_add_stmt (pre_p, ftrue);
7105 /* Because of this manipulation, and the EH edges that jump
7106 threading cannot redirect, the temporary (VAR) will appear
7107 to be used uninitialized. Don't warn. */
7108 suppress_warning (var, OPT_Wuninitialized);
7111 else
7113 gimplify_stmt (&cleanup, &cleanup_stmts);
7114 wce = gimple_build_wce (cleanup_stmts);
7115 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7116 gimplify_seq_add_stmt (pre_p, wce);
7120 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7122 static enum gimplify_status
7123 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7125 tree targ = *expr_p;
7126 tree temp = TARGET_EXPR_SLOT (targ);
7127 tree init = TARGET_EXPR_INITIAL (targ);
7128 enum gimplify_status ret;
7130 bool unpoison_empty_seq = false;
7131 gimple_stmt_iterator unpoison_it;
7133 if (init)
7135 gimple_seq init_pre_p = NULL;
7137 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7138 to the temps list. Handle also variable length TARGET_EXPRs. */
7139 if (!poly_int_tree_p (DECL_SIZE (temp)))
7141 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7142 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7143 /* FIXME: this is correct only when the size of the type does
7144 not depend on expressions evaluated in init. */
7145 gimplify_vla_decl (temp, &init_pre_p);
7147 else
7149 /* Save location where we need to place unpoisoning. It's possible
7150 that a variable will be converted to needs_to_live_in_memory. */
7151 unpoison_it = gsi_last (*pre_p);
7152 unpoison_empty_seq = gsi_end_p (unpoison_it);
7154 gimple_add_tmp_var (temp);
7157 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7158 expression is supposed to initialize the slot. */
7159 if (VOID_TYPE_P (TREE_TYPE (init)))
7160 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7161 fb_none);
7162 else
7164 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7165 init = init_expr;
7166 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7167 fb_none);
7168 init = NULL;
7169 ggc_free (init_expr);
7171 if (ret == GS_ERROR)
7173 /* PR c++/28266 Make sure this is expanded only once. */
7174 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7175 return GS_ERROR;
7178 if (init)
7179 gimplify_and_add (init, &init_pre_p);
7181 /* Add a clobber for the temporary going out of scope, like
7182 gimplify_bind_expr. But only if we did not promote the
7183 temporary to static storage. */
7184 if (gimplify_ctxp->in_cleanup_point_expr
7185 && !TREE_STATIC (temp)
7186 && needs_to_live_in_memory (temp))
7188 if (flag_stack_reuse == SR_ALL)
7190 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7191 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7192 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7194 if (asan_poisoned_variables
7195 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7196 && !TREE_STATIC (temp)
7197 && dbg_cnt (asan_use_after_scope)
7198 && !gimplify_omp_ctxp)
7200 tree asan_cleanup = build_asan_poison_call_expr (temp);
7201 if (asan_cleanup)
7203 if (unpoison_empty_seq)
7204 unpoison_it = gsi_start (*pre_p);
7206 asan_poison_variable (temp, false, &unpoison_it,
7207 unpoison_empty_seq);
7208 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7213 gimple_seq_add_seq (pre_p, init_pre_p);
7215 /* If needed, push the cleanup for the temp. */
7216 if (TARGET_EXPR_CLEANUP (targ))
7217 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7218 CLEANUP_EH_ONLY (targ), pre_p);
7220 /* Only expand this once. */
7221 TREE_OPERAND (targ, 3) = init;
7222 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7224 else
7225 /* We should have expanded this before. */
7226 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7228 *expr_p = temp;
7229 return GS_OK;
7232 /* Gimplification of expression trees. */
7234 /* Gimplify an expression which appears at statement context. The
7235 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7236 NULL, a new sequence is allocated.
7238 Return true if we actually added a statement to the queue. */
7240 bool
7241 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7243 gimple_seq_node last;
7245 last = gimple_seq_last (*seq_p);
7246 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7247 return last != gimple_seq_last (*seq_p);
7250 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7251 to CTX. If entries already exist, force them to be some flavor of private.
7252 If there is no enclosing parallel, do nothing. */
7254 void
7255 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7257 splay_tree_node n;
7259 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7260 return;
7264 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7265 if (n != NULL)
7267 if (n->value & GOVD_SHARED)
7268 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7269 else if (n->value & GOVD_MAP)
7270 n->value |= GOVD_MAP_TO_ONLY;
7271 else
7272 return;
7274 else if ((ctx->region_type & ORT_TARGET) != 0)
7276 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7277 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7278 else
7279 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7281 else if (ctx->region_type != ORT_WORKSHARE
7282 && ctx->region_type != ORT_TASKGROUP
7283 && ctx->region_type != ORT_SIMD
7284 && ctx->region_type != ORT_ACC
7285 && !(ctx->region_type & ORT_TARGET_DATA))
7286 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7288 ctx = ctx->outer_context;
7290 while (ctx);
7293 /* Similarly for each of the type sizes of TYPE. */
7295 static void
7296 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7298 if (type == NULL || type == error_mark_node)
7299 return;
7300 type = TYPE_MAIN_VARIANT (type);
7302 if (ctx->privatized_types->add (type))
7303 return;
7305 switch (TREE_CODE (type))
7307 case INTEGER_TYPE:
7308 case ENUMERAL_TYPE:
7309 case BOOLEAN_TYPE:
7310 case REAL_TYPE:
7311 case FIXED_POINT_TYPE:
7312 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7313 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7314 break;
7316 case ARRAY_TYPE:
7317 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7318 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7319 break;
7321 case RECORD_TYPE:
7322 case UNION_TYPE:
7323 case QUAL_UNION_TYPE:
7325 tree field;
7326 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7327 if (TREE_CODE (field) == FIELD_DECL)
7329 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7330 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7333 break;
7335 case POINTER_TYPE:
7336 case REFERENCE_TYPE:
7337 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7338 break;
7340 default:
7341 break;
7344 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7345 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7346 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7349 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7351 static void
7352 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7354 splay_tree_node n;
7355 unsigned int nflags;
7356 tree t;
7358 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7359 return;
7361 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7362 there are constructors involved somewhere. Exception is a shared clause,
7363 there is nothing privatized in that case. */
7364 if ((flags & GOVD_SHARED) == 0
7365 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7366 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7367 flags |= GOVD_SEEN;
7369 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7370 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7372 /* We shouldn't be re-adding the decl with the same data
7373 sharing class. */
7374 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7375 nflags = n->value | flags;
7376 /* The only combination of data sharing classes we should see is
7377 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7378 reduction variables to be used in data sharing clauses. */
7379 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7380 || ((nflags & GOVD_DATA_SHARE_CLASS)
7381 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7382 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7383 n->value = nflags;
7384 return;
7387 /* When adding a variable-sized variable, we have to handle all sorts
7388 of additional bits of data: the pointer replacement variable, and
7389 the parameters of the type. */
7390 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7392 /* Add the pointer replacement variable as PRIVATE if the variable
7393 replacement is private, else FIRSTPRIVATE since we'll need the
7394 address of the original variable either for SHARED, or for the
7395 copy into or out of the context. */
7396 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7398 if (flags & GOVD_MAP)
7399 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7400 else if (flags & GOVD_PRIVATE)
7401 nflags = GOVD_PRIVATE;
7402 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7403 && (flags & GOVD_FIRSTPRIVATE))
7404 || (ctx->region_type == ORT_TARGET_DATA
7405 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7406 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7407 else
7408 nflags = GOVD_FIRSTPRIVATE;
7409 nflags |= flags & GOVD_SEEN;
7410 t = DECL_VALUE_EXPR (decl);
7411 gcc_assert (INDIRECT_REF_P (t));
7412 t = TREE_OPERAND (t, 0);
7413 gcc_assert (DECL_P (t));
7414 omp_add_variable (ctx, t, nflags);
7417 /* Add all of the variable and type parameters (which should have
7418 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7419 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7420 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7421 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7423 /* The variable-sized variable itself is never SHARED, only some form
7424 of PRIVATE. The sharing would take place via the pointer variable
7425 which we remapped above. */
7426 if (flags & GOVD_SHARED)
7427 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7428 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7430 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7431 alloca statement we generate for the variable, so make sure it
7432 is available. This isn't automatically needed for the SHARED
7433 case, since we won't be allocating local storage then.
7434 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7435 in this case omp_notice_variable will be called later
7436 on when it is gimplified. */
7437 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7438 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7439 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7441 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7442 && omp_privatize_by_reference (decl))
7444 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7446 /* Similar to the direct variable sized case above, we'll need the
7447 size of references being privatized. */
7448 if ((flags & GOVD_SHARED) == 0)
7450 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7451 if (t && DECL_P (t))
7452 omp_notice_variable (ctx, t, true);
7456 if (n != NULL)
7457 n->value |= flags;
7458 else
7459 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7461 /* For reductions clauses in OpenACC loop directives, by default create a
7462 copy clause on the enclosing parallel construct for carrying back the
7463 results. */
7464 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7466 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7467 while (outer_ctx)
7469 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7470 if (n != NULL)
7472 /* Ignore local variables and explicitly declared clauses. */
7473 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7474 break;
7475 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7477 /* According to the OpenACC spec, such a reduction variable
7478 should already have a copy map on a kernels construct,
7479 verify that here. */
7480 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7481 && (n->value & GOVD_MAP));
7483 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7485 /* Remove firstprivate and make it a copy map. */
7486 n->value &= ~GOVD_FIRSTPRIVATE;
7487 n->value |= GOVD_MAP;
7490 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7492 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7493 GOVD_MAP | GOVD_SEEN);
7494 break;
7496 outer_ctx = outer_ctx->outer_context;
7501 /* Notice a threadprivate variable DECL used in OMP context CTX.
7502 This just prints out diagnostics about threadprivate variable uses
7503 in untied tasks. If DECL2 is non-NULL, prevent this warning
7504 on that variable. */
7506 static bool
7507 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7508 tree decl2)
7510 splay_tree_node n;
7511 struct gimplify_omp_ctx *octx;
7513 for (octx = ctx; octx; octx = octx->outer_context)
7514 if ((octx->region_type & ORT_TARGET) != 0
7515 || octx->order_concurrent)
7517 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7518 if (n == NULL)
7520 if (octx->order_concurrent)
7522 error ("threadprivate variable %qE used in a region with"
7523 " %<order(concurrent)%> clause", DECL_NAME (decl));
7524 inform (octx->location, "enclosing region");
7526 else
7528 error ("threadprivate variable %qE used in target region",
7529 DECL_NAME (decl));
7530 inform (octx->location, "enclosing target region");
7532 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7534 if (decl2)
7535 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7538 if (ctx->region_type != ORT_UNTIED_TASK)
7539 return false;
7540 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7541 if (n == NULL)
7543 error ("threadprivate variable %qE used in untied task",
7544 DECL_NAME (decl));
7545 inform (ctx->location, "enclosing task");
7546 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7548 if (decl2)
7549 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7550 return false;
7553 /* Return true if global var DECL is device resident. */
7555 static bool
7556 device_resident_p (tree decl)
7558 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7560 if (!attr)
7561 return false;
7563 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7565 tree c = TREE_VALUE (t);
7566 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7567 return true;
7570 return false;
7573 /* Return true if DECL has an ACC DECLARE attribute. */
7575 static bool
7576 is_oacc_declared (tree decl)
7578 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7579 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7580 return declared != NULL_TREE;
7583 /* Determine outer default flags for DECL mentioned in an OMP region
7584 but not declared in an enclosing clause.
7586 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7587 remapped firstprivate instead of shared. To some extent this is
7588 addressed in omp_firstprivatize_type_sizes, but not
7589 effectively. */
7591 static unsigned
7592 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7593 bool in_code, unsigned flags)
7595 enum omp_clause_default_kind default_kind = ctx->default_kind;
7596 enum omp_clause_default_kind kind;
7598 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7599 if (ctx->region_type & ORT_TASK)
7601 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7603 /* The event-handle specified by a detach clause should always be firstprivate,
7604 regardless of the current default. */
7605 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7606 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7608 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7609 default_kind = kind;
7610 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7611 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7612 /* For C/C++ default({,first}private), variables with static storage duration
7613 declared in a namespace or global scope and referenced in construct
7614 must be explicitly specified, i.e. acts as default(none). */
7615 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7616 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7617 && VAR_P (decl)
7618 && is_global_var (decl)
7619 && (DECL_FILE_SCOPE_P (decl)
7620 || (DECL_CONTEXT (decl)
7621 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7622 && !lang_GNU_Fortran ())
7623 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7625 switch (default_kind)
7627 case OMP_CLAUSE_DEFAULT_NONE:
7629 const char *rtype;
7631 if (ctx->region_type & ORT_PARALLEL)
7632 rtype = "parallel";
7633 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7634 rtype = "taskloop";
7635 else if (ctx->region_type & ORT_TASK)
7636 rtype = "task";
7637 else if (ctx->region_type & ORT_TEAMS)
7638 rtype = "teams";
7639 else
7640 gcc_unreachable ();
7642 error ("%qE not specified in enclosing %qs",
7643 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7644 inform (ctx->location, "enclosing %qs", rtype);
7646 /* FALLTHRU */
7647 case OMP_CLAUSE_DEFAULT_SHARED:
7648 flags |= GOVD_SHARED;
7649 break;
7650 case OMP_CLAUSE_DEFAULT_PRIVATE:
7651 flags |= GOVD_PRIVATE;
7652 break;
7653 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7654 flags |= GOVD_FIRSTPRIVATE;
7655 break;
7656 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7657 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7658 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7659 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7661 omp_notice_variable (octx, decl, in_code);
7662 for (; octx; octx = octx->outer_context)
7664 splay_tree_node n2;
7666 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7667 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7668 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7669 continue;
7670 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7672 flags |= GOVD_FIRSTPRIVATE;
7673 goto found_outer;
7675 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7677 flags |= GOVD_SHARED;
7678 goto found_outer;
7683 if (TREE_CODE (decl) == PARM_DECL
7684 || (!is_global_var (decl)
7685 && DECL_CONTEXT (decl) == current_function_decl))
7686 flags |= GOVD_FIRSTPRIVATE;
7687 else
7688 flags |= GOVD_SHARED;
7689 found_outer:
7690 break;
7692 default:
7693 gcc_unreachable ();
7696 return flags;
7700 /* Determine outer default flags for DECL mentioned in an OACC region
7701 but not declared in an enclosing clause. */
7703 static unsigned
7704 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7706 const char *rkind;
7707 bool on_device = false;
7708 bool is_private = false;
7709 bool declared = is_oacc_declared (decl);
7710 tree type = TREE_TYPE (decl);
7712 if (omp_privatize_by_reference (decl))
7713 type = TREE_TYPE (type);
7715 /* For Fortran COMMON blocks, only used variables in those blocks are
7716 transfered and remapped. The block itself will have a private clause to
7717 avoid transfering the data twice.
7718 The hook evaluates to false by default. For a variable in Fortran's COMMON
7719 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7720 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7721 the whole block. For C++ and Fortran, it can also be true under certain
7722 other conditions, if DECL_HAS_VALUE_EXPR. */
7723 if (RECORD_OR_UNION_TYPE_P (type))
7724 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7726 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7727 && is_global_var (decl)
7728 && device_resident_p (decl)
7729 && !is_private)
7731 on_device = true;
7732 flags |= GOVD_MAP_TO_ONLY;
7735 switch (ctx->region_type)
7737 case ORT_ACC_KERNELS:
7738 rkind = "kernels";
7740 if (is_private)
7741 flags |= GOVD_FIRSTPRIVATE;
7742 else if (AGGREGATE_TYPE_P (type))
7744 /* Aggregates default to 'present_or_copy', or 'present'. */
7745 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7746 flags |= GOVD_MAP;
7747 else
7748 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7750 else
7751 /* Scalars default to 'copy'. */
7752 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7754 break;
7756 case ORT_ACC_PARALLEL:
7757 case ORT_ACC_SERIAL:
7758 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7760 if (is_private)
7761 flags |= GOVD_FIRSTPRIVATE;
7762 else if (on_device || declared)
7763 flags |= GOVD_MAP;
7764 else if (AGGREGATE_TYPE_P (type))
7766 /* Aggregates default to 'present_or_copy', or 'present'. */
7767 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7768 flags |= GOVD_MAP;
7769 else
7770 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7772 else
7773 /* Scalars default to 'firstprivate'. */
7774 flags |= GOVD_FIRSTPRIVATE;
7776 break;
7778 default:
7779 gcc_unreachable ();
7782 if (DECL_ARTIFICIAL (decl))
7783 ; /* We can get compiler-generated decls, and should not complain
7784 about them. */
7785 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7787 error ("%qE not specified in enclosing OpenACC %qs construct",
7788 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7789 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7791 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7792 ; /* Handled above. */
7793 else
7794 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7796 return flags;
7799 /* Record the fact that DECL was used within the OMP context CTX.
7800 IN_CODE is true when real code uses DECL, and false when we should
7801 merely emit default(none) errors. Return true if DECL is going to
7802 be remapped and thus DECL shouldn't be gimplified into its
7803 DECL_VALUE_EXPR (if any). */
7805 static bool
7806 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7808 splay_tree_node n;
7809 unsigned flags = in_code ? GOVD_SEEN : 0;
7810 bool ret = false, shared;
7812 if (error_operand_p (decl))
7813 return false;
7815 if (ctx->region_type == ORT_NONE)
7816 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7818 if (is_global_var (decl))
7820 /* Threadprivate variables are predetermined. */
7821 if (DECL_THREAD_LOCAL_P (decl))
7822 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7824 if (DECL_HAS_VALUE_EXPR_P (decl))
7826 if (ctx->region_type & ORT_ACC)
7827 /* For OpenACC, defer expansion of value to avoid transfering
7828 privatized common block data instead of im-/explicitly transfered
7829 variables which are in common blocks. */
7831 else
7833 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7835 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7836 return omp_notice_threadprivate_variable (ctx, decl, value);
7840 if (gimplify_omp_ctxp->outer_context == NULL
7841 && VAR_P (decl)
7842 && oacc_get_fn_attrib (current_function_decl))
7844 location_t loc = DECL_SOURCE_LOCATION (decl);
7846 if (lookup_attribute ("omp declare target link",
7847 DECL_ATTRIBUTES (decl)))
7849 error_at (loc,
7850 "%qE with %<link%> clause used in %<routine%> function",
7851 DECL_NAME (decl));
7852 return false;
7854 else if (!lookup_attribute ("omp declare target",
7855 DECL_ATTRIBUTES (decl)))
7857 error_at (loc,
7858 "%qE requires a %<declare%> directive for use "
7859 "in a %<routine%> function", DECL_NAME (decl));
7860 return false;
7865 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7866 if ((ctx->region_type & ORT_TARGET) != 0)
7868 if (ctx->region_type & ORT_ACC)
7869 /* For OpenACC, as remarked above, defer expansion. */
7870 shared = false;
7871 else
7872 shared = true;
7874 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7875 if (n == NULL)
7877 unsigned nflags = flags;
7878 if ((ctx->region_type & ORT_ACC) == 0)
7880 bool is_declare_target = false;
7881 if (is_global_var (decl)
7882 && varpool_node::get_create (decl)->offloadable)
7884 struct gimplify_omp_ctx *octx;
7885 for (octx = ctx->outer_context;
7886 octx; octx = octx->outer_context)
7888 n = splay_tree_lookup (octx->variables,
7889 (splay_tree_key)decl);
7890 if (n
7891 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7892 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7893 break;
7895 is_declare_target = octx == NULL;
7897 if (!is_declare_target)
7899 int gdmk;
7900 enum omp_clause_defaultmap_kind kind;
7901 if (lang_hooks.decls.omp_allocatable_p (decl))
7902 gdmk = GDMK_ALLOCATABLE;
7903 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7904 gdmk = GDMK_SCALAR_TARGET;
7905 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7906 gdmk = GDMK_SCALAR;
7907 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7908 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7909 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7910 == POINTER_TYPE)))
7911 gdmk = GDMK_POINTER;
7912 else
7913 gdmk = GDMK_AGGREGATE;
7914 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7915 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7917 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7918 nflags |= GOVD_FIRSTPRIVATE;
7919 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7920 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7921 else
7922 gcc_unreachable ();
7924 else if (ctx->defaultmap[gdmk] == 0)
7926 tree d = lang_hooks.decls.omp_report_decl (decl);
7927 error ("%qE not specified in enclosing %<target%>",
7928 DECL_NAME (d));
7929 inform (ctx->location, "enclosing %<target%>");
7931 else if (ctx->defaultmap[gdmk]
7932 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7933 nflags |= ctx->defaultmap[gdmk];
7934 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
7936 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7937 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
7939 else
7941 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7942 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7947 struct gimplify_omp_ctx *octx = ctx->outer_context;
7948 if ((ctx->region_type & ORT_ACC) && octx)
7950 /* Look in outer OpenACC contexts, to see if there's a
7951 data attribute for this variable. */
7952 omp_notice_variable (octx, decl, in_code);
7954 for (; octx; octx = octx->outer_context)
7956 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7957 break;
7958 splay_tree_node n2
7959 = splay_tree_lookup (octx->variables,
7960 (splay_tree_key) decl);
7961 if (n2)
7963 if (octx->region_type == ORT_ACC_HOST_DATA)
7964 error ("variable %qE declared in enclosing "
7965 "%<host_data%> region", DECL_NAME (decl));
7966 nflags |= GOVD_MAP;
7967 if (octx->region_type == ORT_ACC_DATA
7968 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7969 nflags |= GOVD_MAP_0LEN_ARRAY;
7970 goto found_outer;
7975 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7976 | GOVD_MAP_ALLOC_ONLY)) == flags)
7978 tree type = TREE_TYPE (decl);
7980 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7981 && omp_privatize_by_reference (decl))
7982 type = TREE_TYPE (type);
7983 if (!omp_mappable_type (type))
7985 error ("%qD referenced in target region does not have "
7986 "a mappable type", decl);
7987 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7989 else
7991 if ((ctx->region_type & ORT_ACC) != 0)
7992 nflags = oacc_default_clause (ctx, decl, flags);
7993 else
7994 nflags |= GOVD_MAP;
7997 found_outer:
7998 omp_add_variable (ctx, decl, nflags);
8000 else
8002 /* If nothing changed, there's nothing left to do. */
8003 if ((n->value & flags) == flags)
8004 return ret;
8005 flags |= n->value;
8006 n->value = flags;
8008 goto do_outer;
8011 if (n == NULL)
8013 if (ctx->region_type == ORT_WORKSHARE
8014 || ctx->region_type == ORT_TASKGROUP
8015 || ctx->region_type == ORT_SIMD
8016 || ctx->region_type == ORT_ACC
8017 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8018 goto do_outer;
8020 flags = omp_default_clause (ctx, decl, in_code, flags);
8022 if ((flags & GOVD_PRIVATE)
8023 && lang_hooks.decls.omp_private_outer_ref (decl))
8024 flags |= GOVD_PRIVATE_OUTER_REF;
8026 omp_add_variable (ctx, decl, flags);
8028 shared = (flags & GOVD_SHARED) != 0;
8029 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8030 goto do_outer;
8033 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8034 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8035 if (ctx->region_type == ORT_SIMD
8036 && ctx->in_for_exprs
8037 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8038 == GOVD_PRIVATE))
8039 flags &= ~GOVD_SEEN;
8041 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8042 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8043 && DECL_SIZE (decl))
8045 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8047 splay_tree_node n2;
8048 tree t = DECL_VALUE_EXPR (decl);
8049 gcc_assert (INDIRECT_REF_P (t));
8050 t = TREE_OPERAND (t, 0);
8051 gcc_assert (DECL_P (t));
8052 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8053 n2->value |= GOVD_SEEN;
8055 else if (omp_privatize_by_reference (decl)
8056 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8057 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8058 != INTEGER_CST))
8060 splay_tree_node n2;
8061 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8062 gcc_assert (DECL_P (t));
8063 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8064 if (n2)
8065 omp_notice_variable (ctx, t, true);
8069 if (ctx->region_type & ORT_ACC)
8070 /* For OpenACC, as remarked above, defer expansion. */
8071 shared = false;
8072 else
8073 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8074 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8076 /* If nothing changed, there's nothing left to do. */
8077 if ((n->value & flags) == flags)
8078 return ret;
8079 flags |= n->value;
8080 n->value = flags;
8082 do_outer:
8083 /* If the variable is private in the current context, then we don't
8084 need to propagate anything to an outer context. */
8085 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8086 return ret;
8087 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8088 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8089 return ret;
8090 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8091 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8092 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8093 return ret;
8094 if (ctx->outer_context
8095 && omp_notice_variable (ctx->outer_context, decl, in_code))
8096 return true;
8097 return ret;
8100 /* Verify that DECL is private within CTX. If there's specific information
8101 to the contrary in the innermost scope, generate an error. */
8103 static bool
8104 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8106 splay_tree_node n;
8108 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8109 if (n != NULL)
8111 if (n->value & GOVD_SHARED)
8113 if (ctx == gimplify_omp_ctxp)
8115 if (simd)
8116 error ("iteration variable %qE is predetermined linear",
8117 DECL_NAME (decl));
8118 else
8119 error ("iteration variable %qE should be private",
8120 DECL_NAME (decl));
8121 n->value = GOVD_PRIVATE;
8122 return true;
8124 else
8125 return false;
8127 else if ((n->value & GOVD_EXPLICIT) != 0
8128 && (ctx == gimplify_omp_ctxp
8129 || (ctx->region_type == ORT_COMBINED_PARALLEL
8130 && gimplify_omp_ctxp->outer_context == ctx)))
8132 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8133 error ("iteration variable %qE should not be firstprivate",
8134 DECL_NAME (decl));
8135 else if ((n->value & GOVD_REDUCTION) != 0)
8136 error ("iteration variable %qE should not be reduction",
8137 DECL_NAME (decl));
8138 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8139 error ("iteration variable %qE should not be linear",
8140 DECL_NAME (decl));
8142 return (ctx == gimplify_omp_ctxp
8143 || (ctx->region_type == ORT_COMBINED_PARALLEL
8144 && gimplify_omp_ctxp->outer_context == ctx));
8147 if (ctx->region_type != ORT_WORKSHARE
8148 && ctx->region_type != ORT_TASKGROUP
8149 && ctx->region_type != ORT_SIMD
8150 && ctx->region_type != ORT_ACC)
8151 return false;
8152 else if (ctx->outer_context)
8153 return omp_is_private (ctx->outer_context, decl, simd);
8154 return false;
8157 /* Return true if DECL is private within a parallel region
8158 that binds to the current construct's context or in parallel
8159 region's REDUCTION clause. */
8161 static bool
8162 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8164 splay_tree_node n;
8168 ctx = ctx->outer_context;
8169 if (ctx == NULL)
8171 if (is_global_var (decl))
8172 return false;
8174 /* References might be private, but might be shared too,
8175 when checking for copyprivate, assume they might be
8176 private, otherwise assume they might be shared. */
8177 if (copyprivate)
8178 return true;
8180 if (omp_privatize_by_reference (decl))
8181 return false;
8183 /* Treat C++ privatized non-static data members outside
8184 of the privatization the same. */
8185 if (omp_member_access_dummy_var (decl))
8186 return false;
8188 return true;
8191 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8193 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8194 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8196 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8197 || n == NULL
8198 || (n->value & GOVD_MAP) == 0)
8199 continue;
8200 return false;
8203 if (n != NULL)
8205 if ((n->value & GOVD_LOCAL) != 0
8206 && omp_member_access_dummy_var (decl))
8207 return false;
8208 return (n->value & GOVD_SHARED) == 0;
8211 if (ctx->region_type == ORT_WORKSHARE
8212 || ctx->region_type == ORT_TASKGROUP
8213 || ctx->region_type == ORT_SIMD
8214 || ctx->region_type == ORT_ACC)
8215 continue;
8217 break;
8219 while (1);
8220 return false;
8223 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8225 static tree
8226 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8228 tree t = *tp;
8230 /* If this node has been visited, unmark it and keep looking. */
8231 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8232 return t;
8234 if (IS_TYPE_OR_DECL_P (t))
8235 *walk_subtrees = 0;
8236 return NULL_TREE;
8240 /* Gimplify the affinity clause but effectively ignore it.
8241 Generate:
8242 var = begin;
8243 if ((step > 1) ? var <= end : var > end)
8244 locatator_var_expr; */
8246 static void
8247 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8249 tree last_iter = NULL_TREE;
8250 tree last_bind = NULL_TREE;
8251 tree label = NULL_TREE;
8252 tree *last_body = NULL;
8253 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8256 tree t = OMP_CLAUSE_DECL (c);
8257 if (TREE_CODE (t) == TREE_LIST
8258 && TREE_PURPOSE (t)
8259 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8261 if (TREE_VALUE (t) == null_pointer_node)
8262 continue;
8263 if (TREE_PURPOSE (t) != last_iter)
8265 if (last_bind)
8267 append_to_statement_list (label, last_body);
8268 gimplify_and_add (last_bind, pre_p);
8269 last_bind = NULL_TREE;
8271 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8273 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8274 is_gimple_val, fb_rvalue) == GS_ERROR
8275 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8276 is_gimple_val, fb_rvalue) == GS_ERROR
8277 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8278 is_gimple_val, fb_rvalue) == GS_ERROR
8279 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8280 is_gimple_val, fb_rvalue)
8281 == GS_ERROR))
8282 return;
8284 last_iter = TREE_PURPOSE (t);
8285 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8286 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8287 NULL, block);
8288 last_body = &BIND_EXPR_BODY (last_bind);
8289 tree cond = NULL_TREE;
8290 location_t loc = OMP_CLAUSE_LOCATION (c);
8291 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8293 tree var = TREE_VEC_ELT (it, 0);
8294 tree begin = TREE_VEC_ELT (it, 1);
8295 tree end = TREE_VEC_ELT (it, 2);
8296 tree step = TREE_VEC_ELT (it, 3);
8297 loc = DECL_SOURCE_LOCATION (var);
8298 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8299 var, begin);
8300 append_to_statement_list_force (tem, last_body);
8302 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8303 step, build_zero_cst (TREE_TYPE (step)));
8304 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8305 var, end);
8306 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8307 var, end);
8308 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8309 cond1, cond2, cond3);
8310 if (cond)
8311 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8312 boolean_type_node, cond, cond1);
8313 else
8314 cond = cond1;
8316 tree cont_label = create_artificial_label (loc);
8317 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8318 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8319 void_node,
8320 build_and_jump (&cont_label));
8321 append_to_statement_list_force (tem, last_body);
8323 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8325 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8326 last_body);
8327 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8329 if (error_operand_p (TREE_VALUE (t)))
8330 return;
8331 append_to_statement_list_force (TREE_VALUE (t), last_body);
8332 TREE_VALUE (t) = null_pointer_node;
8334 else
8336 if (last_bind)
8338 append_to_statement_list (label, last_body);
8339 gimplify_and_add (last_bind, pre_p);
8340 last_bind = NULL_TREE;
8342 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8344 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8345 NULL, is_gimple_val, fb_rvalue);
8346 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8348 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8349 return;
8350 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8351 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8352 return;
8353 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8356 if (last_bind)
8358 append_to_statement_list (label, last_body);
8359 gimplify_and_add (last_bind, pre_p);
8361 return;
8364 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8365 lower all the depend clauses by populating corresponding depend
8366 array. Returns 0 if there are no such depend clauses, or
8367 2 if all depend clauses should be removed, 1 otherwise. */
8369 static int
8370 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8372 tree c;
8373 gimple *g;
8374 size_t n[5] = { 0, 0, 0, 0, 0 };
8375 bool unused[5];
8376 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8377 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8378 size_t i, j;
8379 location_t first_loc = UNKNOWN_LOCATION;
8381 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8384 switch (OMP_CLAUSE_DEPEND_KIND (c))
8386 case OMP_CLAUSE_DEPEND_IN:
8387 i = 2;
8388 break;
8389 case OMP_CLAUSE_DEPEND_OUT:
8390 case OMP_CLAUSE_DEPEND_INOUT:
8391 i = 0;
8392 break;
8393 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8394 i = 1;
8395 break;
8396 case OMP_CLAUSE_DEPEND_DEPOBJ:
8397 i = 3;
8398 break;
8399 case OMP_CLAUSE_DEPEND_INOUTSET:
8400 i = 4;
8401 break;
8402 default:
8403 gcc_unreachable ();
8405 tree t = OMP_CLAUSE_DECL (c);
8406 if (first_loc == UNKNOWN_LOCATION)
8407 first_loc = OMP_CLAUSE_LOCATION (c);
8408 if (TREE_CODE (t) == TREE_LIST
8409 && TREE_PURPOSE (t)
8410 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8412 if (TREE_PURPOSE (t) != last_iter)
8414 tree tcnt = size_one_node;
8415 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8417 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8418 is_gimple_val, fb_rvalue) == GS_ERROR
8419 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8420 is_gimple_val, fb_rvalue) == GS_ERROR
8421 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8422 is_gimple_val, fb_rvalue) == GS_ERROR
8423 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8424 is_gimple_val, fb_rvalue)
8425 == GS_ERROR))
8426 return 2;
8427 tree var = TREE_VEC_ELT (it, 0);
8428 tree begin = TREE_VEC_ELT (it, 1);
8429 tree end = TREE_VEC_ELT (it, 2);
8430 tree step = TREE_VEC_ELT (it, 3);
8431 tree orig_step = TREE_VEC_ELT (it, 4);
8432 tree type = TREE_TYPE (var);
8433 tree stype = TREE_TYPE (step);
8434 location_t loc = DECL_SOURCE_LOCATION (var);
8435 tree endmbegin;
8436 /* Compute count for this iterator as
8437 orig_step > 0
8438 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8439 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8440 and compute product of those for the entire depend
8441 clause. */
8442 if (POINTER_TYPE_P (type))
8443 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8444 stype, end, begin);
8445 else
8446 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8447 end, begin);
8448 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8449 step,
8450 build_int_cst (stype, 1));
8451 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8452 build_int_cst (stype, 1));
8453 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8454 unshare_expr (endmbegin),
8455 stepm1);
8456 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8457 pos, step);
8458 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8459 endmbegin, stepp1);
8460 if (TYPE_UNSIGNED (stype))
8462 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8463 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8465 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8466 neg, step);
8467 step = NULL_TREE;
8468 tree cond = fold_build2_loc (loc, LT_EXPR,
8469 boolean_type_node,
8470 begin, end);
8471 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8472 build_int_cst (stype, 0));
8473 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8474 end, begin);
8475 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8476 build_int_cst (stype, 0));
8477 tree osteptype = TREE_TYPE (orig_step);
8478 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8479 orig_step,
8480 build_int_cst (osteptype, 0));
8481 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8482 cond, pos, neg);
8483 cnt = fold_convert_loc (loc, sizetype, cnt);
8484 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8485 fb_rvalue) == GS_ERROR)
8486 return 2;
8487 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8489 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8490 fb_rvalue) == GS_ERROR)
8491 return 2;
8492 last_iter = TREE_PURPOSE (t);
8493 last_count = tcnt;
8495 if (counts[i] == NULL_TREE)
8496 counts[i] = last_count;
8497 else
8498 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8499 PLUS_EXPR, counts[i], last_count);
8501 else
8502 n[i]++;
8504 for (i = 0; i < 5; i++)
8505 if (counts[i])
8506 break;
8507 if (i == 5)
8508 return 0;
8510 tree total = size_zero_node;
8511 for (i = 0; i < 5; i++)
8513 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8514 if (counts[i] == NULL_TREE)
8515 counts[i] = size_zero_node;
8516 if (n[i])
8517 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8518 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8519 fb_rvalue) == GS_ERROR)
8520 return 2;
8521 total = size_binop (PLUS_EXPR, total, counts[i]);
8524 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8525 == GS_ERROR)
8526 return 2;
8527 bool is_old = unused[1] && unused[3] && unused[4];
8528 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8529 size_int (is_old ? 1 : 4));
8530 if (!unused[4])
8531 totalpx = size_binop (PLUS_EXPR, totalpx,
8532 size_binop (MULT_EXPR, counts[4], size_int (2)));
8533 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8534 tree array = create_tmp_var_raw (type);
8535 TREE_ADDRESSABLE (array) = 1;
8536 if (!poly_int_tree_p (totalpx))
8538 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8539 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8540 if (gimplify_omp_ctxp)
8542 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8543 while (ctx
8544 && (ctx->region_type == ORT_WORKSHARE
8545 || ctx->region_type == ORT_TASKGROUP
8546 || ctx->region_type == ORT_SIMD
8547 || ctx->region_type == ORT_ACC))
8548 ctx = ctx->outer_context;
8549 if (ctx)
8550 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8552 gimplify_vla_decl (array, pre_p);
8554 else
8555 gimple_add_tmp_var (array);
8556 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8557 NULL_TREE);
8558 tree tem;
8559 if (!is_old)
8561 tem = build2 (MODIFY_EXPR, void_type_node, r,
8562 build_int_cst (ptr_type_node, 0));
8563 gimplify_and_add (tem, pre_p);
8564 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8565 NULL_TREE);
8567 tem = build2 (MODIFY_EXPR, void_type_node, r,
8568 fold_convert (ptr_type_node, total));
8569 gimplify_and_add (tem, pre_p);
8570 for (i = 1; i < (is_old ? 2 : 4); i++)
8572 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8573 NULL_TREE, NULL_TREE);
8574 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8575 gimplify_and_add (tem, pre_p);
8578 tree cnts[6];
8579 for (j = 5; j; j--)
8580 if (!unused[j - 1])
8581 break;
8582 for (i = 0; i < 5; i++)
8584 if (i && (i >= j || unused[i - 1]))
8586 cnts[i] = cnts[i - 1];
8587 continue;
8589 cnts[i] = create_tmp_var (sizetype);
8590 if (i == 0)
8591 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8592 else
8594 tree t;
8595 if (is_old)
8596 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8597 else
8598 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8599 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8600 == GS_ERROR)
8601 return 2;
8602 g = gimple_build_assign (cnts[i], t);
8604 gimple_seq_add_stmt (pre_p, g);
8606 if (unused[4])
8607 cnts[5] = NULL_TREE;
8608 else
8610 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8611 cnts[5] = create_tmp_var (sizetype);
8612 g = gimple_build_assign (cnts[i], t);
8613 gimple_seq_add_stmt (pre_p, g);
8616 last_iter = NULL_TREE;
8617 tree last_bind = NULL_TREE;
8618 tree *last_body = NULL;
8619 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8620 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8622 switch (OMP_CLAUSE_DEPEND_KIND (c))
8624 case OMP_CLAUSE_DEPEND_IN:
8625 i = 2;
8626 break;
8627 case OMP_CLAUSE_DEPEND_OUT:
8628 case OMP_CLAUSE_DEPEND_INOUT:
8629 i = 0;
8630 break;
8631 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8632 i = 1;
8633 break;
8634 case OMP_CLAUSE_DEPEND_DEPOBJ:
8635 i = 3;
8636 break;
8637 case OMP_CLAUSE_DEPEND_INOUTSET:
8638 i = 4;
8639 break;
8640 default:
8641 gcc_unreachable ();
8643 tree t = OMP_CLAUSE_DECL (c);
8644 if (TREE_CODE (t) == TREE_LIST
8645 && TREE_PURPOSE (t)
8646 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8648 if (TREE_PURPOSE (t) != last_iter)
8650 if (last_bind)
8651 gimplify_and_add (last_bind, pre_p);
8652 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8653 last_bind = build3 (BIND_EXPR, void_type_node,
8654 BLOCK_VARS (block), NULL, block);
8655 TREE_SIDE_EFFECTS (last_bind) = 1;
8656 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8657 tree *p = &BIND_EXPR_BODY (last_bind);
8658 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8660 tree var = TREE_VEC_ELT (it, 0);
8661 tree begin = TREE_VEC_ELT (it, 1);
8662 tree end = TREE_VEC_ELT (it, 2);
8663 tree step = TREE_VEC_ELT (it, 3);
8664 tree orig_step = TREE_VEC_ELT (it, 4);
8665 tree type = TREE_TYPE (var);
8666 location_t loc = DECL_SOURCE_LOCATION (var);
8667 /* Emit:
8668 var = begin;
8669 goto cond_label;
8670 beg_label:
8672 var = var + step;
8673 cond_label:
8674 if (orig_step > 0) {
8675 if (var < end) goto beg_label;
8676 } else {
8677 if (var > end) goto beg_label;
8679 for each iterator, with inner iterators added to
8680 the ... above. */
8681 tree beg_label = create_artificial_label (loc);
8682 tree cond_label = NULL_TREE;
8683 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8684 var, begin);
8685 append_to_statement_list_force (tem, p);
8686 tem = build_and_jump (&cond_label);
8687 append_to_statement_list_force (tem, p);
8688 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8689 append_to_statement_list (tem, p);
8690 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8691 NULL_TREE, NULL_TREE);
8692 TREE_SIDE_EFFECTS (bind) = 1;
8693 SET_EXPR_LOCATION (bind, loc);
8694 append_to_statement_list_force (bind, p);
8695 if (POINTER_TYPE_P (type))
8696 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8697 var, fold_convert_loc (loc, sizetype,
8698 step));
8699 else
8700 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8701 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8702 var, tem);
8703 append_to_statement_list_force (tem, p);
8704 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8705 append_to_statement_list (tem, p);
8706 tree cond = fold_build2_loc (loc, LT_EXPR,
8707 boolean_type_node,
8708 var, end);
8709 tree pos
8710 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8711 cond, build_and_jump (&beg_label),
8712 void_node);
8713 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8714 var, end);
8715 tree neg
8716 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8717 cond, build_and_jump (&beg_label),
8718 void_node);
8719 tree osteptype = TREE_TYPE (orig_step);
8720 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8721 orig_step,
8722 build_int_cst (osteptype, 0));
8723 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8724 cond, pos, neg);
8725 append_to_statement_list_force (tem, p);
8726 p = &BIND_EXPR_BODY (bind);
8728 last_body = p;
8730 last_iter = TREE_PURPOSE (t);
8731 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8733 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8734 0), last_body);
8735 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8737 if (error_operand_p (TREE_VALUE (t)))
8738 return 2;
8739 if (TREE_VALUE (t) != null_pointer_node)
8740 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8741 if (i == 4)
8743 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8744 NULL_TREE, NULL_TREE);
8745 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8746 NULL_TREE, NULL_TREE);
8747 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8748 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8749 void_type_node, r, r2);
8750 append_to_statement_list_force (tem, last_body);
8751 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8752 void_type_node, cnts[i],
8753 size_binop (PLUS_EXPR, cnts[i],
8754 size_int (1)));
8755 append_to_statement_list_force (tem, last_body);
8756 i = 5;
8758 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8759 NULL_TREE, NULL_TREE);
8760 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8761 void_type_node, r, TREE_VALUE (t));
8762 append_to_statement_list_force (tem, last_body);
8763 if (i == 5)
8765 r = build4 (ARRAY_REF, ptr_type_node, array,
8766 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8767 NULL_TREE, NULL_TREE);
8768 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8769 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8770 void_type_node, r, tem);
8771 append_to_statement_list_force (tem, last_body);
8773 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8774 void_type_node, cnts[i],
8775 size_binop (PLUS_EXPR, cnts[i],
8776 size_int (1 + (i == 5))));
8777 append_to_statement_list_force (tem, last_body);
8778 TREE_VALUE (t) = null_pointer_node;
8780 else
8782 if (last_bind)
8784 gimplify_and_add (last_bind, pre_p);
8785 last_bind = NULL_TREE;
8787 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8789 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8790 NULL, is_gimple_val, fb_rvalue);
8791 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8793 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8794 return 2;
8795 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
8796 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8797 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8798 is_gimple_val, fb_rvalue) == GS_ERROR)
8799 return 2;
8800 if (i == 4)
8802 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8803 NULL_TREE, NULL_TREE);
8804 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8805 NULL_TREE, NULL_TREE);
8806 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8807 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
8808 gimplify_and_add (tem, pre_p);
8809 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
8810 cnts[i],
8811 size_int (1)));
8812 gimple_seq_add_stmt (pre_p, g);
8813 i = 5;
8815 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8816 NULL_TREE, NULL_TREE);
8817 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8818 gimplify_and_add (tem, pre_p);
8819 if (i == 5)
8821 r = build4 (ARRAY_REF, ptr_type_node, array,
8822 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8823 NULL_TREE, NULL_TREE);
8824 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8825 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
8826 append_to_statement_list_force (tem, last_body);
8827 gimplify_and_add (tem, pre_p);
8829 g = gimple_build_assign (cnts[i],
8830 size_binop (PLUS_EXPR, cnts[i],
8831 size_int (1 + (i == 5))));
8832 gimple_seq_add_stmt (pre_p, g);
8835 if (last_bind)
8836 gimplify_and_add (last_bind, pre_p);
8837 tree cond = boolean_false_node;
8838 if (is_old)
8840 if (!unused[0])
8841 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8842 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8843 size_int (2)));
8844 if (!unused[2])
8845 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8846 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8847 cnts[2],
8848 size_binop_loc (first_loc, PLUS_EXPR,
8849 totalpx,
8850 size_int (1))));
8852 else
8854 tree prev = size_int (5);
8855 for (i = 0; i < 5; i++)
8857 if (unused[i])
8858 continue;
8859 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8860 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8861 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8862 cnts[i], unshare_expr (prev)));
8865 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8866 build_call_expr_loc (first_loc,
8867 builtin_decl_explicit (BUILT_IN_TRAP),
8868 0), void_node);
8869 gimplify_and_add (tem, pre_p);
8870 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8871 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8872 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8873 OMP_CLAUSE_CHAIN (c) = *list_p;
8874 *list_p = c;
8875 return 1;
8878 /* For a set of mappings describing an array section pointed to by a struct
8879 (or derived type, etc.) component, create an "alloc" or "release" node to
8880 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8881 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8882 be created that is inserted into the list of mapping nodes attached to the
8883 directive being processed -- not part of the sorted list of nodes after
8884 GOMP_MAP_STRUCT.
8886 CODE is the code of the directive being processed. GRP_START and GRP_END
8887 are the first and last of two or three nodes representing this array section
8888 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8889 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8890 filled with the additional node described above, if needed.
8892 This function does not add the new nodes to any lists itself. It is the
8893 responsibility of the caller to do that. */
8895 static tree
8896 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
8897 tree *extra_node)
8899 enum gomp_map_kind mkind
8900 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8901 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8903 gcc_assert (grp_start != grp_end);
8905 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8906 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8907 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
8908 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
8909 tree grp_mid = NULL_TREE;
8910 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
8911 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
8913 if (grp_mid
8914 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8915 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
8916 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
8917 else
8918 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8920 if (grp_mid
8921 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8922 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
8923 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
8925 tree c3
8926 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8927 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8928 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
8929 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8930 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
8932 *extra_node = c3;
8934 else
8935 *extra_node = NULL_TREE;
8937 return c2;
8940 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8941 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8942 If BASE_REF is non-NULL and the containing object is a reference, set
8943 *BASE_REF to that reference before dereferencing the object.
8944 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8945 has array type, else return NULL. */
8947 static tree
8948 extract_base_bit_offset (tree base, poly_int64 *bitposp,
8949 poly_offset_int *poffsetp)
8951 tree offset;
8952 poly_int64 bitsize, bitpos;
8953 machine_mode mode;
8954 int unsignedp, reversep, volatilep = 0;
8955 poly_offset_int poffset;
8957 STRIP_NOPS (base);
8959 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8960 &unsignedp, &reversep, &volatilep);
8962 STRIP_NOPS (base);
8964 if (offset && poly_int_tree_p (offset))
8966 poffset = wi::to_poly_offset (offset);
8967 offset = NULL_TREE;
8969 else
8970 poffset = 0;
8972 if (maybe_ne (bitpos, 0))
8973 poffset += bits_to_bytes_round_down (bitpos);
8975 *bitposp = bitpos;
8976 *poffsetp = poffset;
8978 return base;
8981 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8982 started processing the group yet. The TEMPORARY mark is used when we first
8983 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8984 when we have processed all the group's children (i.e. all the base pointers
8985 referred to by the group's mapping nodes, recursively). */
8987 enum omp_tsort_mark {
8988 UNVISITED,
8989 TEMPORARY,
8990 PERMANENT
8993 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
8994 but ignores side effects in the equality comparisons. */
8996 struct tree_operand_hash_no_se : tree_operand_hash
8998 static inline bool equal (const value_type &,
8999 const compare_type &);
9002 inline bool
9003 tree_operand_hash_no_se::equal (const value_type &t1,
9004 const compare_type &t2)
9006 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
9009 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9010 clause. */
9012 struct omp_mapping_group {
9013 tree *grp_start;
9014 tree grp_end;
9015 omp_tsort_mark mark;
9016 /* If we've removed the group but need to reindex, mark the group as
9017 deleted. */
9018 bool deleted;
9019 struct omp_mapping_group *sibling;
9020 struct omp_mapping_group *next;
9023 DEBUG_FUNCTION void
9024 debug_mapping_group (omp_mapping_group *grp)
9026 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9027 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9028 debug_generic_expr (*grp->grp_start);
9029 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9032 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9033 isn't one. */
9035 static tree
9036 omp_get_base_pointer (tree expr)
9038 while (TREE_CODE (expr) == ARRAY_REF
9039 || TREE_CODE (expr) == COMPONENT_REF)
9040 expr = TREE_OPERAND (expr, 0);
9042 if (INDIRECT_REF_P (expr)
9043 || (TREE_CODE (expr) == MEM_REF
9044 && integer_zerop (TREE_OPERAND (expr, 1))))
9046 expr = TREE_OPERAND (expr, 0);
9047 while (TREE_CODE (expr) == COMPOUND_EXPR)
9048 expr = TREE_OPERAND (expr, 1);
9049 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9050 expr = TREE_OPERAND (expr, 0);
9051 if (TREE_CODE (expr) == SAVE_EXPR)
9052 expr = TREE_OPERAND (expr, 0);
9053 STRIP_NOPS (expr);
9054 return expr;
9057 return NULL_TREE;
9060 /* Remove COMPONENT_REFS and indirections from EXPR. */
9062 static tree
9063 omp_strip_components_and_deref (tree expr)
9065 while (TREE_CODE (expr) == COMPONENT_REF
9066 || INDIRECT_REF_P (expr)
9067 || (TREE_CODE (expr) == MEM_REF
9068 && integer_zerop (TREE_OPERAND (expr, 1)))
9069 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9070 || TREE_CODE (expr) == COMPOUND_EXPR)
9071 if (TREE_CODE (expr) == COMPOUND_EXPR)
9072 expr = TREE_OPERAND (expr, 1);
9073 else
9074 expr = TREE_OPERAND (expr, 0);
9076 STRIP_NOPS (expr);
9078 return expr;
9081 static tree
9082 omp_strip_indirections (tree expr)
9084 while (INDIRECT_REF_P (expr)
9085 || (TREE_CODE (expr) == MEM_REF
9086 && integer_zerop (TREE_OPERAND (expr, 1))))
9087 expr = TREE_OPERAND (expr, 0);
9089 return expr;
9092 /* An attach or detach operation depends directly on the address being
9093 attached/detached. Return that address, or none if there are no
9094 attachments/detachments. */
9096 static tree
9097 omp_get_attachment (omp_mapping_group *grp)
9099 tree node = *grp->grp_start;
9101 switch (OMP_CLAUSE_MAP_KIND (node))
9103 case GOMP_MAP_TO:
9104 case GOMP_MAP_FROM:
9105 case GOMP_MAP_TOFROM:
9106 case GOMP_MAP_ALWAYS_FROM:
9107 case GOMP_MAP_ALWAYS_TO:
9108 case GOMP_MAP_ALWAYS_TOFROM:
9109 case GOMP_MAP_FORCE_FROM:
9110 case GOMP_MAP_FORCE_TO:
9111 case GOMP_MAP_FORCE_TOFROM:
9112 case GOMP_MAP_FORCE_PRESENT:
9113 case GOMP_MAP_PRESENT_ALLOC:
9114 case GOMP_MAP_PRESENT_FROM:
9115 case GOMP_MAP_PRESENT_TO:
9116 case GOMP_MAP_PRESENT_TOFROM:
9117 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9118 case GOMP_MAP_ALWAYS_PRESENT_TO:
9119 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9120 case GOMP_MAP_ALLOC:
9121 case GOMP_MAP_RELEASE:
9122 case GOMP_MAP_DELETE:
9123 case GOMP_MAP_FORCE_ALLOC:
9124 if (node == grp->grp_end)
9125 return NULL_TREE;
9127 node = OMP_CLAUSE_CHAIN (node);
9128 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9130 gcc_assert (node != grp->grp_end);
9131 node = OMP_CLAUSE_CHAIN (node);
9133 if (node)
9134 switch (OMP_CLAUSE_MAP_KIND (node))
9136 case GOMP_MAP_POINTER:
9137 case GOMP_MAP_ALWAYS_POINTER:
9138 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9139 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9140 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9141 return NULL_TREE;
9143 case GOMP_MAP_ATTACH_DETACH:
9144 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9145 return OMP_CLAUSE_DECL (node);
9147 default:
9148 internal_error ("unexpected mapping node");
9150 return error_mark_node;
9152 case GOMP_MAP_TO_PSET:
9153 gcc_assert (node != grp->grp_end);
9154 node = OMP_CLAUSE_CHAIN (node);
9155 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9156 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9157 return OMP_CLAUSE_DECL (node);
9158 else
9159 internal_error ("unexpected mapping node");
9160 return error_mark_node;
9162 case GOMP_MAP_ATTACH:
9163 case GOMP_MAP_DETACH:
9164 node = OMP_CLAUSE_CHAIN (node);
9165 if (!node || *grp->grp_start == grp->grp_end)
9166 return OMP_CLAUSE_DECL (*grp->grp_start);
9167 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9168 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9169 return OMP_CLAUSE_DECL (*grp->grp_start);
9170 else
9171 internal_error ("unexpected mapping node");
9172 return error_mark_node;
9174 case GOMP_MAP_STRUCT:
9175 case GOMP_MAP_FORCE_DEVICEPTR:
9176 case GOMP_MAP_DEVICE_RESIDENT:
9177 case GOMP_MAP_LINK:
9178 case GOMP_MAP_IF_PRESENT:
9179 case GOMP_MAP_FIRSTPRIVATE:
9180 case GOMP_MAP_FIRSTPRIVATE_INT:
9181 case GOMP_MAP_USE_DEVICE_PTR:
9182 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9183 return NULL_TREE;
9185 default:
9186 internal_error ("unexpected mapping node");
9189 return error_mark_node;
9192 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9193 mappings, return the chain pointer to the end of that group in the list. */
9195 static tree *
9196 omp_group_last (tree *start_p)
9198 tree c = *start_p, nc, *grp_last_p = start_p;
9200 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9202 nc = OMP_CLAUSE_CHAIN (c);
9204 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9205 return grp_last_p;
9207 switch (OMP_CLAUSE_MAP_KIND (c))
9209 default:
9210 while (nc
9211 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9212 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9213 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9214 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9215 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9216 || (OMP_CLAUSE_MAP_KIND (nc)
9217 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9218 || (OMP_CLAUSE_MAP_KIND (nc)
9219 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9220 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9221 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9223 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9224 c = nc;
9225 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9226 if (nc2
9227 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9228 && (OMP_CLAUSE_MAP_KIND (nc)
9229 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9230 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9232 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9233 c = nc2;
9234 nc2 = OMP_CLAUSE_CHAIN (nc2);
9236 nc = nc2;
9238 break;
9240 case GOMP_MAP_ATTACH:
9241 case GOMP_MAP_DETACH:
9242 /* This is a weird artifact of how directives are parsed: bare attach or
9243 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9244 FIRSTPRIVATE_REFERENCE node. FIXME. */
9245 if (nc
9246 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9247 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9248 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9249 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9250 break;
9252 case GOMP_MAP_TO_PSET:
9253 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9254 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9255 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9256 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9257 break;
9259 case GOMP_MAP_STRUCT:
9261 unsigned HOST_WIDE_INT num_mappings
9262 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9263 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9264 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9265 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9266 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9267 for (unsigned i = 0; i < num_mappings; i++)
9268 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9270 break;
9273 return grp_last_p;
9276 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9277 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9278 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9279 if we have more than one such group, else return NULL. */
9281 static void
9282 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9283 tree gather_sentinel)
9285 for (tree *cp = list_p;
9286 *cp && *cp != gather_sentinel;
9287 cp = &OMP_CLAUSE_CHAIN (*cp))
9289 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9290 continue;
9292 tree *grp_last_p = omp_group_last (cp);
9293 omp_mapping_group grp;
9295 grp.grp_start = cp;
9296 grp.grp_end = *grp_last_p;
9297 grp.mark = UNVISITED;
9298 grp.sibling = NULL;
9299 grp.deleted = false;
9300 grp.next = NULL;
9301 groups->safe_push (grp);
9303 cp = grp_last_p;
9307 static vec<omp_mapping_group> *
9308 omp_gather_mapping_groups (tree *list_p)
9310 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9312 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9314 if (groups->length () > 0)
9315 return groups;
9316 else
9318 delete groups;
9319 return NULL;
9323 /* A pointer mapping group GRP may define a block of memory starting at some
9324 base address, and maybe also define a firstprivate pointer or firstprivate
9325 reference that points to that block. The return value is a node containing
9326 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9327 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9328 return the number of consecutive chained nodes in CHAINED. */
9330 static tree
9331 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9332 tree *firstprivate)
9334 tree node = *grp->grp_start;
9336 *firstprivate = NULL_TREE;
9337 *chained = 1;
9339 switch (OMP_CLAUSE_MAP_KIND (node))
9341 case GOMP_MAP_TO:
9342 case GOMP_MAP_FROM:
9343 case GOMP_MAP_TOFROM:
9344 case GOMP_MAP_ALWAYS_FROM:
9345 case GOMP_MAP_ALWAYS_TO:
9346 case GOMP_MAP_ALWAYS_TOFROM:
9347 case GOMP_MAP_FORCE_FROM:
9348 case GOMP_MAP_FORCE_TO:
9349 case GOMP_MAP_FORCE_TOFROM:
9350 case GOMP_MAP_FORCE_PRESENT:
9351 case GOMP_MAP_PRESENT_ALLOC:
9352 case GOMP_MAP_PRESENT_FROM:
9353 case GOMP_MAP_PRESENT_TO:
9354 case GOMP_MAP_PRESENT_TOFROM:
9355 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9356 case GOMP_MAP_ALWAYS_PRESENT_TO:
9357 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9358 case GOMP_MAP_ALLOC:
9359 case GOMP_MAP_RELEASE:
9360 case GOMP_MAP_DELETE:
9361 case GOMP_MAP_FORCE_ALLOC:
9362 case GOMP_MAP_IF_PRESENT:
9363 if (node == grp->grp_end)
9364 return node;
9366 node = OMP_CLAUSE_CHAIN (node);
9367 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9369 if (node == grp->grp_end)
9370 return *grp->grp_start;
9371 node = OMP_CLAUSE_CHAIN (node);
9373 if (node)
9374 switch (OMP_CLAUSE_MAP_KIND (node))
9376 case GOMP_MAP_POINTER:
9377 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9378 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9379 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9380 *firstprivate = OMP_CLAUSE_DECL (node);
9381 return *grp->grp_start;
9383 case GOMP_MAP_ALWAYS_POINTER:
9384 case GOMP_MAP_ATTACH_DETACH:
9385 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9386 return *grp->grp_start;
9388 default:
9389 internal_error ("unexpected mapping node");
9391 else
9392 internal_error ("unexpected mapping node");
9393 return error_mark_node;
9395 case GOMP_MAP_TO_PSET:
9396 gcc_assert (node != grp->grp_end);
9397 node = OMP_CLAUSE_CHAIN (node);
9398 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9399 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9400 return NULL_TREE;
9401 else
9402 internal_error ("unexpected mapping node");
9403 return error_mark_node;
9405 case GOMP_MAP_ATTACH:
9406 case GOMP_MAP_DETACH:
9407 node = OMP_CLAUSE_CHAIN (node);
9408 if (!node || *grp->grp_start == grp->grp_end)
9409 return NULL_TREE;
9410 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9411 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9413 /* We're mapping the base pointer itself in a bare attach or detach
9414 node. This is a side effect of how parsing works, and the mapping
9415 will be removed anyway (at least for enter/exit data directives).
9416 We should ignore the mapping here. FIXME. */
9417 return NULL_TREE;
9419 else
9420 internal_error ("unexpected mapping node");
9421 return error_mark_node;
9423 case GOMP_MAP_STRUCT:
9425 unsigned HOST_WIDE_INT num_mappings
9426 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9427 node = OMP_CLAUSE_CHAIN (node);
9428 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9429 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9431 *firstprivate = OMP_CLAUSE_DECL (node);
9432 node = OMP_CLAUSE_CHAIN (node);
9434 *chained = num_mappings;
9435 return node;
9438 case GOMP_MAP_FORCE_DEVICEPTR:
9439 case GOMP_MAP_DEVICE_RESIDENT:
9440 case GOMP_MAP_LINK:
9441 case GOMP_MAP_FIRSTPRIVATE:
9442 case GOMP_MAP_FIRSTPRIVATE_INT:
9443 case GOMP_MAP_USE_DEVICE_PTR:
9444 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9445 return NULL_TREE;
9447 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9448 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9449 case GOMP_MAP_POINTER:
9450 case GOMP_MAP_ALWAYS_POINTER:
9451 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9452 /* These shouldn't appear by themselves. */
9453 if (!seen_error ())
9454 internal_error ("unexpected pointer mapping node");
9455 return error_mark_node;
9457 default:
9458 gcc_unreachable ();
9461 return error_mark_node;
9464 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9465 nodes by tree_operand_hash_no_se. */
9467 static void
9468 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9469 omp_mapping_group *> *grpmap,
9470 vec<omp_mapping_group> *groups,
9471 tree reindex_sentinel)
9473 omp_mapping_group *grp;
9474 unsigned int i;
9475 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9477 FOR_EACH_VEC_ELT (*groups, i, grp)
9479 if (reindexing && *grp->grp_start == reindex_sentinel)
9480 above_hwm = true;
9482 if (reindexing && !above_hwm)
9483 continue;
9485 tree fpp;
9486 unsigned int chained;
9487 tree node = omp_group_base (grp, &chained, &fpp);
9489 if (node == error_mark_node || (!node && !fpp))
9490 continue;
9492 for (unsigned j = 0;
9493 node && j < chained;
9494 node = OMP_CLAUSE_CHAIN (node), j++)
9496 tree decl = OMP_CLAUSE_DECL (node);
9497 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9498 meaning node-hash lookups don't work. This is a workaround for
9499 that, but ideally we should just create the INDIRECT_REF at
9500 source instead. FIXME. */
9501 if (TREE_CODE (decl) == MEM_REF
9502 && integer_zerop (TREE_OPERAND (decl, 1)))
9503 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9505 omp_mapping_group **prev = grpmap->get (decl);
9507 if (prev && *prev == grp)
9508 /* Empty. */;
9509 else if (prev)
9511 /* Mapping the same thing twice is normally diagnosed as an error,
9512 but can happen under some circumstances, e.g. in pr99928-16.c,
9513 the directive:
9515 #pragma omp target simd reduction(+:a[:3]) \
9516 map(always, tofrom: a[:6])
9519 will result in two "a[0]" mappings (of different sizes). */
9521 grp->sibling = (*prev)->sibling;
9522 (*prev)->sibling = grp;
9524 else
9525 grpmap->put (decl, grp);
9528 if (!fpp)
9529 continue;
9531 omp_mapping_group **prev = grpmap->get (fpp);
9532 if (prev && *prev != grp)
9534 grp->sibling = (*prev)->sibling;
9535 (*prev)->sibling = grp;
9537 else
9538 grpmap->put (fpp, grp);
9542 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9543 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9545 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9546 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9548 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9550 return grpmap;
9553 /* Rebuild group map from partially-processed clause list (during
9554 omp_build_struct_sibling_lists). We have already processed nodes up until
9555 a high-water mark (HWM). This is a bit tricky because the list is being
9556 reordered as it is scanned, but we know:
9558 1. The list after HWM has not been touched yet, so we can reindex it safely.
9560 2. The list before and including HWM has been altered, but remains
9561 well-formed throughout the sibling-list building operation.
9563 so, we can do the reindex operation in two parts, on the processed and
9564 then the unprocessed halves of the list. */
9566 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9567 omp_reindex_mapping_groups (tree *list_p,
9568 vec<omp_mapping_group> *groups,
9569 vec<omp_mapping_group> *processed_groups,
9570 tree sentinel)
9572 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9573 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9575 processed_groups->truncate (0);
9577 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9578 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9579 if (sentinel)
9580 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9582 return grpmap;
9585 /* Find the immediately-containing struct for a component ref (etc.)
9586 expression EXPR. */
9588 static tree
9589 omp_containing_struct (tree expr)
9591 tree expr0 = expr;
9593 STRIP_NOPS (expr);
9595 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9596 component ref. */
9597 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9598 return expr0;
9600 while (TREE_CODE (expr) == ARRAY_REF)
9601 expr = TREE_OPERAND (expr, 0);
9603 if (TREE_CODE (expr) == COMPONENT_REF)
9604 expr = TREE_OPERAND (expr, 0);
9606 return expr;
9609 /* Return TRUE if DECL describes a component that is part of a whole structure
9610 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9611 that maps that structure, if present. */
9613 static bool
9614 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9615 omp_mapping_group *> *grpmap,
9616 tree decl,
9617 omp_mapping_group **mapped_by_group)
9619 tree wsdecl = NULL_TREE;
9621 *mapped_by_group = NULL;
9623 while (true)
9625 wsdecl = omp_containing_struct (decl);
9626 if (wsdecl == decl)
9627 break;
9628 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9629 if (!wholestruct
9630 && TREE_CODE (wsdecl) == MEM_REF
9631 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9633 tree deref = TREE_OPERAND (wsdecl, 0);
9634 deref = build_fold_indirect_ref (deref);
9635 wholestruct = grpmap->get (deref);
9637 if (wholestruct)
9639 *mapped_by_group = *wholestruct;
9640 return true;
9642 decl = wsdecl;
9645 return false;
9648 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9649 FALSE on error. */
9651 static bool
9652 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9653 vec<omp_mapping_group> *groups,
9654 hash_map<tree_operand_hash_no_se,
9655 omp_mapping_group *> *grpmap,
9656 omp_mapping_group *grp)
9658 if (grp->mark == PERMANENT)
9659 return true;
9660 if (grp->mark == TEMPORARY)
9662 fprintf (stderr, "when processing group:\n");
9663 debug_mapping_group (grp);
9664 internal_error ("base pointer cycle detected");
9665 return false;
9667 grp->mark = TEMPORARY;
9669 tree attaches_to = omp_get_attachment (grp);
9671 if (attaches_to)
9673 omp_mapping_group **basep = grpmap->get (attaches_to);
9675 if (basep && *basep != grp)
9677 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9678 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9679 return false;
9683 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9685 while (decl)
9687 tree base = omp_get_base_pointer (decl);
9689 if (!base)
9690 break;
9692 omp_mapping_group **innerp = grpmap->get (base);
9693 omp_mapping_group *wholestruct;
9695 /* We should treat whole-structure mappings as if all (pointer, in this
9696 case) members are mapped as individual list items. Check if we have
9697 such a whole-structure mapping, if we don't have an explicit reference
9698 to the pointer member itself. */
9699 if (!innerp
9700 && TREE_CODE (base) == COMPONENT_REF
9701 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9702 innerp = &wholestruct;
9704 if (innerp && *innerp != grp)
9706 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9707 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9708 return false;
9709 break;
9712 decl = base;
9715 grp->mark = PERMANENT;
9717 /* Emit grp to output list. */
9719 **outlist = grp;
9720 *outlist = &grp->next;
9722 return true;
9725 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9726 before mappings that use those pointers. This is an implementation of the
9727 depth-first search algorithm, described e.g. at:
9729 https://en.wikipedia.org/wiki/Topological_sorting
9732 static omp_mapping_group *
9733 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9734 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
9735 *grpmap)
9737 omp_mapping_group *grp, *outlist = NULL, **cursor;
9738 unsigned int i;
9740 cursor = &outlist;
9742 FOR_EACH_VEC_ELT (*groups, i, grp)
9744 if (grp->mark != PERMANENT)
9745 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9746 return NULL;
9749 return outlist;
9752 /* Split INLIST into two parts, moving groups corresponding to
9753 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9754 The former list is then appended to the latter. Each sub-list retains the
9755 order of the original list.
9756 Note that ATTACH nodes are later moved to the end of the list in
9757 gimplify_adjust_omp_clauses, for target regions. */
9759 static omp_mapping_group *
9760 omp_segregate_mapping_groups (omp_mapping_group *inlist)
9762 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
9763 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
9765 for (omp_mapping_group *w = inlist; w;)
9767 tree c = *w->grp_start;
9768 omp_mapping_group *next = w->next;
9770 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9772 switch (OMP_CLAUSE_MAP_KIND (c))
9774 case GOMP_MAP_ALLOC:
9775 case GOMP_MAP_RELEASE:
9776 case GOMP_MAP_DELETE:
9777 *ard_tail = w;
9778 w->next = NULL;
9779 ard_tail = &w->next;
9780 break;
9782 default:
9783 *tf_tail = w;
9784 w->next = NULL;
9785 tf_tail = &w->next;
9788 w = next;
9791 /* Now splice the lists together... */
9792 *tf_tail = ard_groups;
9794 return tf_groups;
9797 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9798 those groups based on the output list of omp_tsort_mapping_groups --
9799 singly-linked, threaded through each element's NEXT pointer starting at
9800 HEAD. Each list element appears exactly once in that linked list.
9802 Each element of GROUPS may correspond to one or several mapping nodes.
9803 Node groups are kept together, and in the reordered list, the positions of
9804 the original groups are reused for the positions of the reordered list.
9805 Hence if we have e.g.
9807 {to ptr ptr} firstprivate {tofrom ptr} ...
9808 ^ ^ ^
9809 first group non-"map" second group
9811 and say the second group contains a base pointer for the first so must be
9812 moved before it, the resulting list will contain:
9814 {tofrom ptr} firstprivate {to ptr ptr} ...
9815 ^ prev. second group ^ prev. first group
9818 static tree *
9819 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
9820 omp_mapping_group *head,
9821 tree *list_p)
9823 omp_mapping_group *grp;
9824 unsigned int i;
9825 unsigned numgroups = groups->length ();
9826 auto_vec<tree> old_heads (numgroups);
9827 auto_vec<tree *> old_headps (numgroups);
9828 auto_vec<tree> new_heads (numgroups);
9829 auto_vec<tree> old_succs (numgroups);
9830 bool map_at_start = (list_p == (*groups)[0].grp_start);
9832 tree *new_grp_tail = NULL;
9834 /* Stash the start & end nodes of each mapping group before we start
9835 modifying the list. */
9836 FOR_EACH_VEC_ELT (*groups, i, grp)
9838 old_headps.quick_push (grp->grp_start);
9839 old_heads.quick_push (*grp->grp_start);
9840 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
9843 /* And similarly, the heads of the groups in the order we want to rearrange
9844 the list to. */
9845 for (omp_mapping_group *w = head; w; w = w->next)
9846 new_heads.quick_push (*w->grp_start);
9848 FOR_EACH_VEC_ELT (*groups, i, grp)
9850 gcc_assert (head);
9852 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
9854 /* a {b c d} {e f g} h i j (original)
9856 a {k l m} {e f g} h i j (inserted new group on last iter)
9858 a {k l m} {n o p} h i j (this time, chain last group to new one)
9859 ^new_grp_tail
9861 *new_grp_tail = new_heads[i];
9863 else if (new_grp_tail)
9865 /* a {b c d} e {f g h} i j k (original)
9867 a {l m n} e {f g h} i j k (gap after last iter's group)
9869 a {l m n} e {o p q} h i j (chain last group to old successor)
9870 ^new_grp_tail
9872 *new_grp_tail = old_succs[i - 1];
9873 *old_headps[i] = new_heads[i];
9875 else
9877 /* The first inserted group -- point to new group, and leave end
9878 open.
9879 a {b c d} e f
9881 a {g h i...
9883 *grp->grp_start = new_heads[i];
9886 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
9888 head = head->next;
9891 if (new_grp_tail)
9892 *new_grp_tail = old_succs[numgroups - 1];
9894 gcc_assert (!head);
9896 return map_at_start ? (*groups)[0].grp_start : list_p;
9899 /* DECL is supposed to have lastprivate semantics in the outer contexts
9900 of combined/composite constructs, starting with OCTX.
9901 Add needed lastprivate, shared or map clause if no data sharing or
9902 mapping clause are present. IMPLICIT_P is true if it is an implicit
9903 clause (IV on simd), in which case the lastprivate will not be
9904 copied to some constructs. */
9906 static void
9907 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9908 tree decl, bool implicit_p)
9910 struct gimplify_omp_ctx *orig_octx = octx;
9911 for (; octx; octx = octx->outer_context)
9913 if ((octx->region_type == ORT_COMBINED_PARALLEL
9914 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9915 && splay_tree_lookup (octx->variables,
9916 (splay_tree_key) decl) == NULL)
9918 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9919 continue;
9921 if ((octx->region_type & ORT_TASK) != 0
9922 && octx->combined_loop
9923 && splay_tree_lookup (octx->variables,
9924 (splay_tree_key) decl) == NULL)
9926 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9927 continue;
9929 if (implicit_p
9930 && octx->region_type == ORT_WORKSHARE
9931 && octx->combined_loop
9932 && splay_tree_lookup (octx->variables,
9933 (splay_tree_key) decl) == NULL
9934 && octx->outer_context
9935 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9936 && splay_tree_lookup (octx->outer_context->variables,
9937 (splay_tree_key) decl) == NULL)
9939 octx = octx->outer_context;
9940 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9941 continue;
9943 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9944 && octx->combined_loop
9945 && splay_tree_lookup (octx->variables,
9946 (splay_tree_key) decl) == NULL
9947 && !omp_check_private (octx, decl, false))
9949 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9950 continue;
9952 if (octx->region_type == ORT_COMBINED_TARGET)
9954 splay_tree_node n = splay_tree_lookup (octx->variables,
9955 (splay_tree_key) decl);
9956 if (n == NULL)
9958 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9959 octx = octx->outer_context;
9961 else if (!implicit_p
9962 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9964 n->value &= ~(GOVD_FIRSTPRIVATE
9965 | GOVD_FIRSTPRIVATE_IMPLICIT
9966 | GOVD_EXPLICIT);
9967 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9968 octx = octx->outer_context;
9971 break;
9973 if (octx && (implicit_p || octx != orig_octx))
9974 omp_notice_variable (octx, decl, true);
9977 /* If we have mappings INNER and OUTER, where INNER is a component access and
9978 OUTER is a mapping of the whole containing struct, check that the mappings
9979 are compatible. We'll be deleting the inner mapping, so we need to make
9980 sure the outer mapping does (at least) the same transfers to/from the device
9981 as the inner mapping. */
9983 bool
9984 omp_check_mapping_compatibility (location_t loc,
9985 omp_mapping_group *outer,
9986 omp_mapping_group *inner)
9988 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
9990 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
9991 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
9993 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
9994 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
9996 if (outer_kind == inner_kind)
9997 return true;
9999 switch (outer_kind)
10001 case GOMP_MAP_ALWAYS_TO:
10002 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10003 || inner_kind == GOMP_MAP_ALLOC
10004 || inner_kind == GOMP_MAP_TO)
10005 return true;
10006 break;
10008 case GOMP_MAP_ALWAYS_FROM:
10009 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10010 || inner_kind == GOMP_MAP_ALLOC
10011 || inner_kind == GOMP_MAP_FROM)
10012 return true;
10013 break;
10015 case GOMP_MAP_TO:
10016 case GOMP_MAP_FROM:
10017 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10018 || inner_kind == GOMP_MAP_ALLOC)
10019 return true;
10020 break;
10022 case GOMP_MAP_ALWAYS_TOFROM:
10023 case GOMP_MAP_TOFROM:
10024 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10025 || inner_kind == GOMP_MAP_ALLOC
10026 || inner_kind == GOMP_MAP_TO
10027 || inner_kind == GOMP_MAP_FROM
10028 || inner_kind == GOMP_MAP_TOFROM)
10029 return true;
10030 break;
10032 default:
10036 error_at (loc, "data movement for component %qE is not compatible with "
10037 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10038 OMP_CLAUSE_DECL (first_outer));
10040 return false;
10043 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10044 clause dependencies we handle for now are struct element mappings and
10045 whole-struct mappings on the same directive, and duplicate clause
10046 detection. */
10048 void
10049 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10050 hash_map<tree_operand_hash_no_se,
10051 omp_mapping_group *> *grpmap)
10053 int i;
10054 omp_mapping_group *grp;
10055 hash_set<tree_operand_hash> *seen_components = NULL;
10056 hash_set<tree_operand_hash> *shown_error = NULL;
10058 FOR_EACH_VEC_ELT (*groups, i, grp)
10060 tree grp_end = grp->grp_end;
10061 tree decl = OMP_CLAUSE_DECL (grp_end);
10063 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10065 if (DECL_P (grp_end))
10066 continue;
10068 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10069 while (TREE_CODE (c) == ARRAY_REF)
10070 c = TREE_OPERAND (c, 0);
10071 if (TREE_CODE (c) != COMPONENT_REF)
10072 continue;
10073 if (!seen_components)
10074 seen_components = new hash_set<tree_operand_hash> ();
10075 if (!shown_error)
10076 shown_error = new hash_set<tree_operand_hash> ();
10077 if (seen_components->contains (c)
10078 && !shown_error->contains (c))
10080 error_at (OMP_CLAUSE_LOCATION (grp_end),
10081 "%qE appears more than once in map clauses",
10082 OMP_CLAUSE_DECL (grp_end));
10083 shown_error->add (c);
10085 else
10086 seen_components->add (c);
10088 omp_mapping_group *struct_group;
10089 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10090 && *grp->grp_start == grp_end)
10092 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10093 struct_group, grp);
10094 /* Remove the whole of this mapping -- redundant. */
10095 grp->deleted = true;
10099 if (seen_components)
10100 delete seen_components;
10101 if (shown_error)
10102 delete shown_error;
10105 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10106 is linked to the previous node pointed to by INSERT_AT. */
10108 static tree *
10109 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10111 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10112 *insert_at = newnode;
10113 return &OMP_CLAUSE_CHAIN (newnode);
10116 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10117 pointed to by chain MOVE_AFTER instead. */
10119 static void
10120 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10122 gcc_assert (node == *old_pos);
10123 *old_pos = OMP_CLAUSE_CHAIN (node);
10124 OMP_CLAUSE_CHAIN (node) = *move_after;
10125 *move_after = node;
10128 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10129 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10130 new nodes are prepended to the list before splicing into the new position.
10131 Return the position we should continue scanning the list at, or NULL to
10132 stay where we were. */
10134 static tree *
10135 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10136 tree *move_after)
10138 if (first_ptr == move_after)
10139 return NULL;
10141 tree tmp = *first_ptr;
10142 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10143 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10144 *move_after = tmp;
10146 return first_ptr;
10149 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10150 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10151 pointer MOVE_AFTER.
10153 The latter list was previously part of the OMP clause list, and the former
10154 (prepended) part is comprised of new nodes.
10156 We start with a list of nodes starting with a struct mapping node. We
10157 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10158 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10159 the group of mapping nodes we are currently processing (from the chain
10160 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10161 we should continue processing from, or NULL to stay where we were.
10163 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10164 different) is worked through below. Here we are processing LAST_NODE, and
10165 FIRST_PTR points at the preceding mapping clause:
10167 #. mapping node chain
10168 ---------------------------------------------------
10169 A. struct_node [->B]
10170 B. comp_1 [->C]
10171 C. comp_2 [->D (move_after)]
10172 D. map_to_3 [->E]
10173 E. attach_3 [->F (first_ptr)]
10174 F. map_to_4 [->G (continue_at)]
10175 G. attach_4 (last_node) [->H]
10176 H. ...
10178 *last_new_tail = *first_ptr;
10180 I. new_node (first_new) [->F (last_new_tail)]
10182 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10184 #. mapping node chain
10185 ----------------------------------------------------
10186 A. struct_node [->B]
10187 B. comp_1 [->C]
10188 C. comp_2 [->D (move_after)]
10189 D. map_to_3 [->E]
10190 E. attach_3 [->H (first_ptr)]
10191 F. map_to_4 [->G (continue_at)]
10192 G. attach_4 (last_node) [->H]
10193 H. ...
10195 I. new_node (first_new) [->F (last_new_tail)]
10197 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10199 #. mapping node chain
10200 ---------------------------------------------------
10201 A. struct_node [->B]
10202 B. comp_1 [->C]
10203 C. comp_2 [->D (move_after)]
10204 D. map_to_3 [->E]
10205 E. attach_3 [->H (continue_at)]
10206 F. map_to_4 [->G]
10207 G. attach_4 (last_node) [->D]
10208 H. ...
10210 I. new_node (first_new) [->F (last_new_tail)]
10212 *move_after = first_new;
10214 #. mapping node chain
10215 ---------------------------------------------------
10216 A. struct_node [->B]
10217 B. comp_1 [->C]
10218 C. comp_2 [->I (move_after)]
10219 D. map_to_3 [->E]
10220 E. attach_3 [->H (continue_at)]
10221 F. map_to_4 [->G]
10222 G. attach_4 (last_node) [->D]
10223 H. ...
10224 I. new_node (first_new) [->F (last_new_tail)]
10226 or, in order:
10228 #. mapping node chain
10229 ---------------------------------------------------
10230 A. struct_node [->B]
10231 B. comp_1 [->C]
10232 C. comp_2 [->I (move_after)]
10233 I. new_node (first_new) [->F (last_new_tail)]
10234 F. map_to_4 [->G]
10235 G. attach_4 (last_node) [->D]
10236 D. map_to_3 [->E]
10237 E. attach_3 [->H (continue_at)]
10238 H. ...
10241 static tree *
10242 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10243 tree *first_ptr, tree last_node,
10244 tree *move_after)
10246 tree *continue_at = NULL;
10247 *last_new_tail = *first_ptr;
10248 if (first_ptr == move_after)
10249 *move_after = first_new;
10250 else
10252 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10253 continue_at = first_ptr;
10254 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10255 *move_after = first_new;
10257 return continue_at;
10260 /* Mapping struct members causes an additional set of nodes to be created,
10261 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10262 number of members being mapped, in order of ascending position (address or
10263 bitwise).
10265 We scan through the list of mapping clauses, calling this function for each
10266 struct member mapping we find, and build up the list of mappings after the
10267 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10268 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10269 moved into place in the sorted list.
10271 struct {
10272 int *a;
10273 int *b;
10274 int c;
10275 int *d;
10278 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10279 struct.d[0:n])
10281 GOMP_MAP_STRUCT (4)
10282 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10283 GOMP_MAP_ALLOC (struct.a)
10284 GOMP_MAP_ALLOC (struct.b)
10285 GOMP_MAP_TO (struct.c)
10286 GOMP_MAP_ALLOC (struct.d)
10289 In the case where we are mapping references to pointers, or in Fortran if
10290 we are mapping an array with a descriptor, additional nodes may be created
10291 after the struct node list also.
10293 The return code is either a pointer to the next node to process (if the
10294 list has been rearranged), else NULL to continue with the next node in the
10295 original list. */
10297 static tree *
10298 omp_accumulate_sibling_list (enum omp_region_type region_type,
10299 enum tree_code code,
10300 hash_map<tree_operand_hash, tree>
10301 *&struct_map_to_clause, tree *grp_start_p,
10302 tree grp_end, tree *inner)
10304 poly_offset_int coffset;
10305 poly_int64 cbitpos;
10306 tree ocd = OMP_CLAUSE_DECL (grp_end);
10307 bool openmp = !(region_type & ORT_ACC);
10308 tree *continue_at = NULL;
10310 while (TREE_CODE (ocd) == ARRAY_REF)
10311 ocd = TREE_OPERAND (ocd, 0);
10313 if (INDIRECT_REF_P (ocd))
10314 ocd = TREE_OPERAND (ocd, 0);
10316 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10318 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10319 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10320 == GOMP_MAP_ATTACH_DETACH)
10321 || (OMP_CLAUSE_MAP_KIND (grp_end)
10322 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10323 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10324 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10326 /* FIXME: If we're not mapping the base pointer in some other clause on this
10327 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10328 early-exit. */
10329 if (openmp && attach_detach)
10330 return NULL;
10332 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10334 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10335 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10337 OMP_CLAUSE_SET_MAP_KIND (l, k);
10339 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10341 OMP_CLAUSE_SIZE (l)
10342 = (!attach ? size_int (1)
10343 : (DECL_P (OMP_CLAUSE_DECL (l))
10344 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10345 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10346 if (struct_map_to_clause == NULL)
10347 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10348 struct_map_to_clause->put (base, l);
10350 if (ptr || attach_detach)
10352 tree extra_node;
10353 tree alloc_node
10354 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10355 &extra_node);
10356 OMP_CLAUSE_CHAIN (l) = alloc_node;
10358 tree *insert_node_pos = grp_start_p;
10360 if (extra_node)
10362 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10363 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10365 else
10366 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10368 *insert_node_pos = l;
10370 else
10372 gcc_assert (*grp_start_p == grp_end);
10373 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10376 tree noind = omp_strip_indirections (base);
10378 if (!openmp
10379 && (region_type & ORT_TARGET)
10380 && TREE_CODE (noind) == COMPONENT_REF)
10382 /* The base for this component access is a struct component access
10383 itself. Insert a node to be processed on the next iteration of
10384 our caller's loop, which will subsequently be turned into a new,
10385 inner GOMP_MAP_STRUCT mapping.
10387 We need to do this else the non-DECL_P base won't be
10388 rewritten correctly in the offloaded region. */
10389 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10390 OMP_CLAUSE_MAP);
10391 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10392 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10393 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10394 *inner = c2;
10395 return NULL;
10398 tree sdecl = omp_strip_components_and_deref (base);
10400 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10402 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10403 OMP_CLAUSE_MAP);
10404 bool base_ref
10405 = (INDIRECT_REF_P (base)
10406 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10407 == REFERENCE_TYPE)
10408 || (INDIRECT_REF_P (TREE_OPERAND (base, 0))
10409 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10410 (TREE_OPERAND (base, 0), 0)))
10411 == REFERENCE_TYPE))));
10412 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10413 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10414 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10415 OMP_CLAUSE_DECL (c2) = sdecl;
10416 tree baddr = build_fold_addr_expr (base);
10417 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10418 ptrdiff_type_node, baddr);
10419 /* This isn't going to be good enough when we add support for more
10420 complicated lvalue expressions. FIXME. */
10421 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10422 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10423 sdecl = build_simple_mem_ref (sdecl);
10424 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10425 ptrdiff_type_node, sdecl);
10426 OMP_CLAUSE_SIZE (c2)
10427 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10428 ptrdiff_type_node, baddr, decladdr);
10429 /* Insert after struct node. */
10430 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10431 OMP_CLAUSE_CHAIN (l) = c2;
10434 return NULL;
10436 else if (struct_map_to_clause)
10438 tree *osc = struct_map_to_clause->get (base);
10439 tree *sc = NULL, *scp = NULL;
10440 sc = &OMP_CLAUSE_CHAIN (*osc);
10441 /* The struct mapping might be immediately followed by a
10442 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10443 indirect access or a reference, or both. (This added node is removed
10444 in omp-low.c after it has been processed there.) */
10445 if (*sc != grp_end
10446 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10447 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10448 sc = &OMP_CLAUSE_CHAIN (*sc);
10449 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10450 if ((ptr || attach_detach) && sc == grp_start_p)
10451 break;
10452 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10453 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10454 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10455 break;
10456 else
10458 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10459 poly_offset_int offset;
10460 poly_int64 bitpos;
10462 if (TREE_CODE (sc_decl) == ARRAY_REF)
10464 while (TREE_CODE (sc_decl) == ARRAY_REF)
10465 sc_decl = TREE_OPERAND (sc_decl, 0);
10466 if (TREE_CODE (sc_decl) != COMPONENT_REF
10467 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10468 break;
10470 else if (INDIRECT_REF_P (sc_decl)
10471 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10472 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10473 == REFERENCE_TYPE))
10474 sc_decl = TREE_OPERAND (sc_decl, 0);
10476 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10477 if (!base2 || !operand_equal_p (base2, base, 0))
10478 break;
10479 if (scp)
10480 continue;
10481 if (maybe_lt (coffset, offset)
10482 || (known_eq (coffset, offset)
10483 && maybe_lt (cbitpos, bitpos)))
10485 if (ptr || attach_detach)
10486 scp = sc;
10487 else
10488 break;
10492 if (!attach)
10493 OMP_CLAUSE_SIZE (*osc)
10494 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10495 if (ptr || attach_detach)
10497 tree cl = NULL_TREE, extra_node;
10498 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10499 grp_end, &extra_node);
10500 tree *tail_chain = NULL;
10502 /* Here, we have:
10504 grp_end : the last (or only) node in this group.
10505 grp_start_p : pointer to the first node in a pointer mapping group
10506 up to and including GRP_END.
10507 sc : pointer to the chain for the end of the struct component
10508 list.
10509 scp : pointer to the chain for the sorted position at which we
10510 should insert in the middle of the struct component list
10511 (else NULL to insert at end).
10512 alloc_node : the "alloc" node for the structure (pointer-type)
10513 component. We insert at SCP (if present), else SC
10514 (the end of the struct component list).
10515 extra_node : a newly-synthesized node for an additional indirect
10516 pointer mapping or a Fortran pointer set, if needed.
10517 cl : first node to prepend before grp_start_p.
10518 tail_chain : pointer to chain of last prepended node.
10520 The general idea is we move the nodes for this struct mapping
10521 together: the alloc node goes into the sorted list directly after
10522 the struct mapping, and any extra nodes (together with the nodes
10523 mapping arrays pointed to by struct components) get moved after
10524 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10525 the end of the struct component mapping list. It's important that
10526 the alloc_node comes first in that case because it's part of the
10527 sorted component mapping list (but subsequent nodes are not!). */
10529 if (scp)
10530 omp_siblist_insert_node_after (alloc_node, scp);
10532 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10533 already inserted it) and the extra_node (if it is present). The
10534 list can be empty if we added alloc_node above and there is no
10535 extra node. */
10536 if (scp && extra_node)
10538 cl = extra_node;
10539 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10541 else if (extra_node)
10543 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10544 cl = alloc_node;
10545 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10547 else if (!scp)
10549 cl = alloc_node;
10550 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10553 continue_at
10554 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10555 grp_start_p, grp_end,
10557 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10559 else if (*sc != grp_end)
10561 gcc_assert (*grp_start_p == grp_end);
10563 /* We are moving the current node back to a previous struct node:
10564 the node that used to point to the current node will now point to
10565 the next node. */
10566 continue_at = grp_start_p;
10567 /* In the non-pointer case, the mapping clause itself is moved into
10568 the correct position in the struct component list, which in this
10569 case is just SC. */
10570 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10573 return continue_at;
10576 /* Scan through GROUPS, and create sorted structure sibling lists without
10577 gimplifying. */
10579 static bool
10580 omp_build_struct_sibling_lists (enum tree_code code,
10581 enum omp_region_type region_type,
10582 vec<omp_mapping_group> *groups,
10583 hash_map<tree_operand_hash_no_se,
10584 omp_mapping_group *> **grpmap,
10585 tree *list_p)
10587 unsigned i;
10588 omp_mapping_group *grp;
10589 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10590 bool success = true;
10591 tree *new_next = NULL;
10592 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10593 auto_vec<omp_mapping_group> pre_hwm_groups;
10595 FOR_EACH_VEC_ELT (*groups, i, grp)
10597 tree c = grp->grp_end;
10598 tree decl = OMP_CLAUSE_DECL (c);
10599 tree grp_end = grp->grp_end;
10600 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10602 if (new_next)
10603 grp->grp_start = new_next;
10605 new_next = NULL;
10607 tree *grp_start_p = grp->grp_start;
10609 if (DECL_P (decl))
10610 continue;
10612 /* Skip groups we marked for deletion in
10613 oacc_resolve_clause_dependencies. */
10614 if (grp->deleted)
10615 continue;
10617 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10618 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10620 /* Don't process an array descriptor that isn't inside a derived type
10621 as a struct (the GOMP_MAP_POINTER following will have the form
10622 "var.data", but such mappings are handled specially). */
10623 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10624 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10625 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10626 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10627 continue;
10630 tree d = decl;
10631 if (TREE_CODE (d) == ARRAY_REF)
10633 while (TREE_CODE (d) == ARRAY_REF)
10634 d = TREE_OPERAND (d, 0);
10635 if (TREE_CODE (d) == COMPONENT_REF
10636 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10637 decl = d;
10639 if (d == decl
10640 && INDIRECT_REF_P (decl)
10641 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10642 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10643 == REFERENCE_TYPE)
10644 && (OMP_CLAUSE_MAP_KIND (c)
10645 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10646 decl = TREE_OPERAND (decl, 0);
10648 STRIP_NOPS (decl);
10650 if (TREE_CODE (decl) != COMPONENT_REF)
10651 continue;
10653 /* If we're mapping the whole struct in another node, skip adding this
10654 node to a sibling list. */
10655 omp_mapping_group *wholestruct;
10656 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10657 &wholestruct))
10659 if (!(region_type & ORT_ACC)
10660 && *grp_start_p == grp_end)
10661 /* Remove the whole of this mapping -- redundant. */
10662 grp->deleted = true;
10664 continue;
10667 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10668 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10669 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10670 && code != OACC_UPDATE
10671 && code != OMP_TARGET_UPDATE)
10673 if (error_operand_p (decl))
10675 success = false;
10676 goto error_out;
10679 tree stype = TREE_TYPE (decl);
10680 if (TREE_CODE (stype) == REFERENCE_TYPE)
10681 stype = TREE_TYPE (stype);
10682 if (TYPE_SIZE_UNIT (stype) == NULL
10683 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10685 error_at (OMP_CLAUSE_LOCATION (c),
10686 "mapping field %qE of variable length "
10687 "structure", OMP_CLAUSE_DECL (c));
10688 success = false;
10689 goto error_out;
10692 tree inner = NULL_TREE;
10694 new_next
10695 = omp_accumulate_sibling_list (region_type, code,
10696 struct_map_to_clause, grp_start_p,
10697 grp_end, &inner);
10699 if (inner)
10701 if (new_next && *new_next == NULL_TREE)
10702 *new_next = inner;
10703 else
10704 *tail = inner;
10706 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10707 omp_mapping_group newgrp;
10708 newgrp.grp_start = new_next ? new_next : tail;
10709 newgrp.grp_end = inner;
10710 newgrp.mark = UNVISITED;
10711 newgrp.sibling = NULL;
10712 newgrp.deleted = false;
10713 newgrp.next = NULL;
10714 groups->safe_push (newgrp);
10716 /* !!! Growing GROUPS might invalidate the pointers in the group
10717 map. Rebuild it here. This is a bit inefficient, but
10718 shouldn't happen very often. */
10719 delete (*grpmap);
10720 *grpmap
10721 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10722 sentinel);
10724 tail = &OMP_CLAUSE_CHAIN (inner);
10729 /* Delete groups marked for deletion above. At this point the order of the
10730 groups may no longer correspond to the order of the underlying list,
10731 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10732 deleted nodes... */
10734 FOR_EACH_VEC_ELT (*groups, i, grp)
10735 if (grp->deleted)
10736 for (tree d = *grp->grp_start;
10737 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10738 d = OMP_CLAUSE_CHAIN (d))
10739 OMP_CLAUSE_DECL (d) = NULL_TREE;
10741 /* ...then sweep through the list removing the now-empty nodes. */
10743 tail = list_p;
10744 while (*tail)
10746 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10747 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
10748 *tail = OMP_CLAUSE_CHAIN (*tail);
10749 else
10750 tail = &OMP_CLAUSE_CHAIN (*tail);
10753 error_out:
10754 if (struct_map_to_clause)
10755 delete struct_map_to_clause;
10757 return success;
10760 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10761 and previous omp contexts. */
10763 static void
10764 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
10765 enum omp_region_type region_type,
10766 enum tree_code code)
10768 struct gimplify_omp_ctx *ctx, *outer_ctx;
10769 tree c;
10770 tree *orig_list_p = list_p;
10771 int handled_depend_iterators = -1;
10772 int nowait = -1;
10774 ctx = new_omp_context (region_type);
10775 ctx->code = code;
10776 outer_ctx = ctx->outer_context;
10777 if (code == OMP_TARGET)
10779 if (!lang_GNU_Fortran ())
10780 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
10781 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
10782 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
10783 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10785 if (!lang_GNU_Fortran ())
10786 switch (code)
10788 case OMP_TARGET:
10789 case OMP_TARGET_DATA:
10790 case OMP_TARGET_ENTER_DATA:
10791 case OMP_TARGET_EXIT_DATA:
10792 case OACC_DECLARE:
10793 case OACC_HOST_DATA:
10794 case OACC_PARALLEL:
10795 case OACC_KERNELS:
10796 ctx->target_firstprivatize_array_bases = true;
10797 default:
10798 break;
10801 if (code == OMP_TARGET
10802 || code == OMP_TARGET_DATA
10803 || code == OMP_TARGET_ENTER_DATA
10804 || code == OMP_TARGET_EXIT_DATA)
10806 vec<omp_mapping_group> *groups;
10807 groups = omp_gather_mapping_groups (list_p);
10808 if (groups)
10810 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10811 grpmap = omp_index_mapping_groups (groups);
10813 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10814 list_p);
10816 omp_mapping_group *outlist = NULL;
10818 /* Topological sorting may fail if we have duplicate nodes, which
10819 we should have detected and shown an error for already. Skip
10820 sorting in that case. */
10821 if (seen_error ())
10822 goto failure;
10824 delete grpmap;
10825 delete groups;
10827 /* Rebuild now we have struct sibling lists. */
10828 groups = omp_gather_mapping_groups (list_p);
10829 grpmap = omp_index_mapping_groups (groups);
10831 outlist = omp_tsort_mapping_groups (groups, grpmap);
10832 outlist = omp_segregate_mapping_groups (outlist);
10833 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
10835 failure:
10836 delete grpmap;
10837 delete groups;
10840 /* OpenMP map clauses with 'present' need to go in front of those
10841 without. */
10842 tree present_map_head = NULL;
10843 tree *present_map_tail_p = &present_map_head;
10844 tree *first_map_clause_p = NULL;
10846 for (tree *c_p = list_p; *c_p; )
10848 tree c = *c_p;
10849 tree *next_c_p = &OMP_CLAUSE_CHAIN (c);
10851 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
10853 if (!first_map_clause_p)
10854 first_map_clause_p = c_p;
10855 switch (OMP_CLAUSE_MAP_KIND (c))
10857 case GOMP_MAP_PRESENT_ALLOC:
10858 case GOMP_MAP_PRESENT_FROM:
10859 case GOMP_MAP_PRESENT_TO:
10860 case GOMP_MAP_PRESENT_TOFROM:
10861 next_c_p = c_p;
10862 *c_p = OMP_CLAUSE_CHAIN (c);
10864 OMP_CLAUSE_CHAIN (c) = NULL;
10865 *present_map_tail_p = c;
10866 present_map_tail_p = &OMP_CLAUSE_CHAIN (c);
10868 break;
10870 default:
10871 break;
10875 c_p = next_c_p;
10877 if (first_map_clause_p && present_map_head)
10879 tree next = *first_map_clause_p;
10880 *first_map_clause_p = present_map_head;
10881 *present_map_tail_p = next;
10884 else if (region_type & ORT_ACC)
10886 vec<omp_mapping_group> *groups;
10887 groups = omp_gather_mapping_groups (list_p);
10888 if (groups)
10890 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10891 grpmap = omp_index_mapping_groups (groups);
10893 oacc_resolve_clause_dependencies (groups, grpmap);
10894 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10895 list_p);
10897 delete groups;
10898 delete grpmap;
10902 while ((c = *list_p) != NULL)
10904 bool remove = false;
10905 bool notice_outer = true;
10906 const char *check_non_private = NULL;
10907 unsigned int flags;
10908 tree decl;
10910 switch (OMP_CLAUSE_CODE (c))
10912 case OMP_CLAUSE_PRIVATE:
10913 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
10914 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
10916 flags |= GOVD_PRIVATE_OUTER_REF;
10917 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
10919 else
10920 notice_outer = false;
10921 goto do_add;
10922 case OMP_CLAUSE_SHARED:
10923 flags = GOVD_SHARED | GOVD_EXPLICIT;
10924 goto do_add;
10925 case OMP_CLAUSE_FIRSTPRIVATE:
10926 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10927 check_non_private = "firstprivate";
10928 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10930 gcc_assert (code == OMP_TARGET);
10931 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
10933 goto do_add;
10934 case OMP_CLAUSE_LASTPRIVATE:
10935 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10936 switch (code)
10938 case OMP_DISTRIBUTE:
10939 error_at (OMP_CLAUSE_LOCATION (c),
10940 "conditional %<lastprivate%> clause on "
10941 "%qs construct", "distribute");
10942 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10943 break;
10944 case OMP_TASKLOOP:
10945 error_at (OMP_CLAUSE_LOCATION (c),
10946 "conditional %<lastprivate%> clause on "
10947 "%qs construct", "taskloop");
10948 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10949 break;
10950 default:
10951 break;
10953 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
10954 if (code != OMP_LOOP)
10955 check_non_private = "lastprivate";
10956 decl = OMP_CLAUSE_DECL (c);
10957 if (error_operand_p (decl))
10958 goto do_add;
10959 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
10960 && !lang_hooks.decls.omp_scalar_p (decl, true))
10962 error_at (OMP_CLAUSE_LOCATION (c),
10963 "non-scalar variable %qD in conditional "
10964 "%<lastprivate%> clause", decl);
10965 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10967 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10968 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
10969 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
10970 false);
10971 goto do_add;
10972 case OMP_CLAUSE_REDUCTION:
10973 if (OMP_CLAUSE_REDUCTION_TASK (c))
10975 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10977 if (nowait == -1)
10978 nowait = omp_find_clause (*list_p,
10979 OMP_CLAUSE_NOWAIT) != NULL_TREE;
10980 if (nowait
10981 && (outer_ctx == NULL
10982 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
10984 error_at (OMP_CLAUSE_LOCATION (c),
10985 "%<task%> reduction modifier on a construct "
10986 "with a %<nowait%> clause");
10987 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10990 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
10992 error_at (OMP_CLAUSE_LOCATION (c),
10993 "invalid %<task%> reduction modifier on construct "
10994 "other than %<parallel%>, %qs, %<sections%> or "
10995 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10996 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10999 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11000 switch (code)
11002 case OMP_SECTIONS:
11003 error_at (OMP_CLAUSE_LOCATION (c),
11004 "%<inscan%> %<reduction%> clause on "
11005 "%qs construct", "sections");
11006 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11007 break;
11008 case OMP_PARALLEL:
11009 error_at (OMP_CLAUSE_LOCATION (c),
11010 "%<inscan%> %<reduction%> clause on "
11011 "%qs construct", "parallel");
11012 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11013 break;
11014 case OMP_TEAMS:
11015 error_at (OMP_CLAUSE_LOCATION (c),
11016 "%<inscan%> %<reduction%> clause on "
11017 "%qs construct", "teams");
11018 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11019 break;
11020 case OMP_TASKLOOP:
11021 error_at (OMP_CLAUSE_LOCATION (c),
11022 "%<inscan%> %<reduction%> clause on "
11023 "%qs construct", "taskloop");
11024 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11025 break;
11026 case OMP_SCOPE:
11027 error_at (OMP_CLAUSE_LOCATION (c),
11028 "%<inscan%> %<reduction%> clause on "
11029 "%qs construct", "scope");
11030 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11031 break;
11032 default:
11033 break;
11035 /* FALLTHRU */
11036 case OMP_CLAUSE_IN_REDUCTION:
11037 case OMP_CLAUSE_TASK_REDUCTION:
11038 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
11039 /* OpenACC permits reductions on private variables. */
11040 if (!(region_type & ORT_ACC)
11041 /* taskgroup is actually not a worksharing region. */
11042 && code != OMP_TASKGROUP)
11043 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
11044 decl = OMP_CLAUSE_DECL (c);
11045 if (TREE_CODE (decl) == MEM_REF)
11047 tree type = TREE_TYPE (decl);
11048 bool saved_into_ssa = gimplify_ctxp->into_ssa;
11049 gimplify_ctxp->into_ssa = false;
11050 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
11051 NULL, is_gimple_val, fb_rvalue, false)
11052 == GS_ERROR)
11054 gimplify_ctxp->into_ssa = saved_into_ssa;
11055 remove = true;
11056 break;
11058 gimplify_ctxp->into_ssa = saved_into_ssa;
11059 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11060 if (DECL_P (v))
11062 omp_firstprivatize_variable (ctx, v);
11063 omp_notice_variable (ctx, v, true);
11065 decl = TREE_OPERAND (decl, 0);
11066 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11068 gimplify_ctxp->into_ssa = false;
11069 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
11070 NULL, is_gimple_val, fb_rvalue, false)
11071 == GS_ERROR)
11073 gimplify_ctxp->into_ssa = saved_into_ssa;
11074 remove = true;
11075 break;
11077 gimplify_ctxp->into_ssa = saved_into_ssa;
11078 v = TREE_OPERAND (decl, 1);
11079 if (DECL_P (v))
11081 omp_firstprivatize_variable (ctx, v);
11082 omp_notice_variable (ctx, v, true);
11084 decl = TREE_OPERAND (decl, 0);
11086 if (TREE_CODE (decl) == ADDR_EXPR
11087 || TREE_CODE (decl) == INDIRECT_REF)
11088 decl = TREE_OPERAND (decl, 0);
11090 goto do_add_decl;
11091 case OMP_CLAUSE_LINEAR:
11092 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
11093 is_gimple_val, fb_rvalue) == GS_ERROR)
11095 remove = true;
11096 break;
11098 else
11100 if (code == OMP_SIMD
11101 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11103 struct gimplify_omp_ctx *octx = outer_ctx;
11104 if (octx
11105 && octx->region_type == ORT_WORKSHARE
11106 && octx->combined_loop
11107 && !octx->distribute)
11109 if (octx->outer_context
11110 && (octx->outer_context->region_type
11111 == ORT_COMBINED_PARALLEL))
11112 octx = octx->outer_context->outer_context;
11113 else
11114 octx = octx->outer_context;
11116 if (octx
11117 && octx->region_type == ORT_WORKSHARE
11118 && octx->combined_loop
11119 && octx->distribute)
11121 error_at (OMP_CLAUSE_LOCATION (c),
11122 "%<linear%> clause for variable other than "
11123 "loop iterator specified on construct "
11124 "combined with %<distribute%>");
11125 remove = true;
11126 break;
11129 /* For combined #pragma omp parallel for simd, need to put
11130 lastprivate and perhaps firstprivate too on the
11131 parallel. Similarly for #pragma omp for simd. */
11132 struct gimplify_omp_ctx *octx = outer_ctx;
11133 bool taskloop_seen = false;
11134 decl = NULL_TREE;
11137 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11138 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11139 break;
11140 decl = OMP_CLAUSE_DECL (c);
11141 if (error_operand_p (decl))
11143 decl = NULL_TREE;
11144 break;
11146 flags = GOVD_SEEN;
11147 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11148 flags |= GOVD_FIRSTPRIVATE;
11149 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11150 flags |= GOVD_LASTPRIVATE;
11151 if (octx
11152 && octx->region_type == ORT_WORKSHARE
11153 && octx->combined_loop)
11155 if (octx->outer_context
11156 && (octx->outer_context->region_type
11157 == ORT_COMBINED_PARALLEL))
11158 octx = octx->outer_context;
11159 else if (omp_check_private (octx, decl, false))
11160 break;
11162 else if (octx
11163 && (octx->region_type & ORT_TASK) != 0
11164 && octx->combined_loop)
11165 taskloop_seen = true;
11166 else if (octx
11167 && octx->region_type == ORT_COMBINED_PARALLEL
11168 && ((ctx->region_type == ORT_WORKSHARE
11169 && octx == outer_ctx)
11170 || taskloop_seen))
11171 flags = GOVD_SEEN | GOVD_SHARED;
11172 else if (octx
11173 && ((octx->region_type & ORT_COMBINED_TEAMS)
11174 == ORT_COMBINED_TEAMS))
11175 flags = GOVD_SEEN | GOVD_SHARED;
11176 else if (octx
11177 && octx->region_type == ORT_COMBINED_TARGET)
11179 if (flags & GOVD_LASTPRIVATE)
11180 flags = GOVD_SEEN | GOVD_MAP;
11182 else
11183 break;
11184 splay_tree_node on
11185 = splay_tree_lookup (octx->variables,
11186 (splay_tree_key) decl);
11187 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11189 octx = NULL;
11190 break;
11192 omp_add_variable (octx, decl, flags);
11193 if (octx->outer_context == NULL)
11194 break;
11195 octx = octx->outer_context;
11197 while (1);
11198 if (octx
11199 && decl
11200 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11201 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11202 omp_notice_variable (octx, decl, true);
11204 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11205 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11206 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11208 notice_outer = false;
11209 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11211 goto do_add;
11213 case OMP_CLAUSE_MAP:
11214 decl = OMP_CLAUSE_DECL (c);
11215 if (error_operand_p (decl))
11216 remove = true;
11217 switch (code)
11219 case OMP_TARGET:
11220 break;
11221 case OACC_DATA:
11222 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11223 break;
11224 /* FALLTHRU */
11225 case OMP_TARGET_DATA:
11226 case OMP_TARGET_ENTER_DATA:
11227 case OMP_TARGET_EXIT_DATA:
11228 case OACC_ENTER_DATA:
11229 case OACC_EXIT_DATA:
11230 case OACC_HOST_DATA:
11231 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11232 || (OMP_CLAUSE_MAP_KIND (c)
11233 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11234 /* For target {,enter ,exit }data only the array slice is
11235 mapped, but not the pointer to it. */
11236 remove = true;
11237 break;
11238 default:
11239 break;
11241 if (remove)
11242 break;
11243 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11245 struct gimplify_omp_ctx *octx;
11246 for (octx = outer_ctx; octx; octx = octx->outer_context)
11248 if (octx->region_type != ORT_ACC_HOST_DATA)
11249 break;
11250 splay_tree_node n2
11251 = splay_tree_lookup (octx->variables,
11252 (splay_tree_key) decl);
11253 if (n2)
11254 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11255 "declared in enclosing %<host_data%> region",
11256 DECL_NAME (decl));
11259 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11260 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11261 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11262 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11263 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11265 remove = true;
11266 break;
11268 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11269 || (OMP_CLAUSE_MAP_KIND (c)
11270 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11271 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11272 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11274 OMP_CLAUSE_SIZE (c)
11275 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11276 false);
11277 if ((region_type & ORT_TARGET) != 0)
11278 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11279 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11282 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11284 tree base = omp_strip_components_and_deref (decl);
11285 if (DECL_P (base))
11287 decl = base;
11288 splay_tree_node n
11289 = splay_tree_lookup (ctx->variables,
11290 (splay_tree_key) decl);
11291 if (seen_error ()
11292 && n
11293 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11295 remove = true;
11296 break;
11298 flags = GOVD_MAP | GOVD_EXPLICIT;
11300 goto do_add_decl;
11304 if (TREE_CODE (decl) == TARGET_EXPR)
11306 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11307 is_gimple_lvalue, fb_lvalue)
11308 == GS_ERROR)
11309 remove = true;
11311 else if (!DECL_P (decl))
11313 tree d = decl, *pd;
11314 if (TREE_CODE (d) == ARRAY_REF)
11316 while (TREE_CODE (d) == ARRAY_REF)
11317 d = TREE_OPERAND (d, 0);
11318 if (TREE_CODE (d) == COMPONENT_REF
11319 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11320 decl = d;
11322 pd = &OMP_CLAUSE_DECL (c);
11323 if (d == decl
11324 && TREE_CODE (decl) == INDIRECT_REF
11325 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11326 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11327 == REFERENCE_TYPE)
11328 && (OMP_CLAUSE_MAP_KIND (c)
11329 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11331 pd = &TREE_OPERAND (decl, 0);
11332 decl = TREE_OPERAND (decl, 0);
11334 /* An "attach/detach" operation on an update directive should
11335 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11336 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11337 depends on the previous mapping. */
11338 if (code == OACC_UPDATE
11339 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11340 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11342 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11344 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11345 == ARRAY_TYPE)
11346 remove = true;
11347 else
11349 gomp_map_kind k = ((code == OACC_EXIT_DATA
11350 || code == OMP_TARGET_EXIT_DATA)
11351 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11352 OMP_CLAUSE_SET_MAP_KIND (c, k);
11356 tree cref = decl;
11358 while (TREE_CODE (cref) == ARRAY_REF)
11359 cref = TREE_OPERAND (cref, 0);
11361 if (TREE_CODE (cref) == INDIRECT_REF)
11362 cref = TREE_OPERAND (cref, 0);
11364 if (TREE_CODE (cref) == COMPONENT_REF)
11366 tree base = cref;
11367 while (base && !DECL_P (base))
11369 tree innerbase = omp_get_base_pointer (base);
11370 if (!innerbase)
11371 break;
11372 base = innerbase;
11374 if (base
11375 && DECL_P (base)
11376 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11377 && POINTER_TYPE_P (TREE_TYPE (base)))
11379 splay_tree_node n
11380 = splay_tree_lookup (ctx->variables,
11381 (splay_tree_key) base);
11382 n->value |= GOVD_SEEN;
11386 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11388 /* Don't gimplify *pd fully at this point, as the base
11389 will need to be adjusted during omp lowering. */
11390 auto_vec<tree, 10> expr_stack;
11391 tree *p = pd;
11392 while (handled_component_p (*p)
11393 || TREE_CODE (*p) == INDIRECT_REF
11394 || TREE_CODE (*p) == ADDR_EXPR
11395 || TREE_CODE (*p) == MEM_REF
11396 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11398 expr_stack.safe_push (*p);
11399 p = &TREE_OPERAND (*p, 0);
11401 for (int i = expr_stack.length () - 1; i >= 0; i--)
11403 tree t = expr_stack[i];
11404 if (TREE_CODE (t) == ARRAY_REF
11405 || TREE_CODE (t) == ARRAY_RANGE_REF)
11407 if (TREE_OPERAND (t, 2) == NULL_TREE)
11409 tree low = unshare_expr (array_ref_low_bound (t));
11410 if (!is_gimple_min_invariant (low))
11412 TREE_OPERAND (t, 2) = low;
11413 if (gimplify_expr (&TREE_OPERAND (t, 2),
11414 pre_p, NULL,
11415 is_gimple_reg,
11416 fb_rvalue) == GS_ERROR)
11417 remove = true;
11420 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11421 NULL, is_gimple_reg,
11422 fb_rvalue) == GS_ERROR)
11423 remove = true;
11424 if (TREE_OPERAND (t, 3) == NULL_TREE)
11426 tree elmt_size = array_ref_element_size (t);
11427 if (!is_gimple_min_invariant (elmt_size))
11429 elmt_size = unshare_expr (elmt_size);
11430 tree elmt_type
11431 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11432 0)));
11433 tree factor
11434 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11435 elmt_size
11436 = size_binop (EXACT_DIV_EXPR, elmt_size,
11437 factor);
11438 TREE_OPERAND (t, 3) = elmt_size;
11439 if (gimplify_expr (&TREE_OPERAND (t, 3),
11440 pre_p, NULL,
11441 is_gimple_reg,
11442 fb_rvalue) == GS_ERROR)
11443 remove = true;
11446 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11447 NULL, is_gimple_reg,
11448 fb_rvalue) == GS_ERROR)
11449 remove = true;
11451 else if (TREE_CODE (t) == COMPONENT_REF)
11453 if (TREE_OPERAND (t, 2) == NULL_TREE)
11455 tree offset = component_ref_field_offset (t);
11456 if (!is_gimple_min_invariant (offset))
11458 offset = unshare_expr (offset);
11459 tree field = TREE_OPERAND (t, 1);
11460 tree factor
11461 = size_int (DECL_OFFSET_ALIGN (field)
11462 / BITS_PER_UNIT);
11463 offset = size_binop (EXACT_DIV_EXPR, offset,
11464 factor);
11465 TREE_OPERAND (t, 2) = offset;
11466 if (gimplify_expr (&TREE_OPERAND (t, 2),
11467 pre_p, NULL,
11468 is_gimple_reg,
11469 fb_rvalue) == GS_ERROR)
11470 remove = true;
11473 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11474 NULL, is_gimple_reg,
11475 fb_rvalue) == GS_ERROR)
11476 remove = true;
11479 for (; expr_stack.length () > 0; )
11481 tree t = expr_stack.pop ();
11483 if (TREE_CODE (t) == ARRAY_REF
11484 || TREE_CODE (t) == ARRAY_RANGE_REF)
11486 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11487 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11488 NULL, is_gimple_val,
11489 fb_rvalue) == GS_ERROR)
11490 remove = true;
11494 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11495 fb_lvalue) == GS_ERROR)
11497 remove = true;
11498 break;
11500 break;
11502 flags = GOVD_MAP | GOVD_EXPLICIT;
11503 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11504 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11505 flags |= GOVD_MAP_ALWAYS_TO;
11507 if ((code == OMP_TARGET
11508 || code == OMP_TARGET_DATA
11509 || code == OMP_TARGET_ENTER_DATA
11510 || code == OMP_TARGET_EXIT_DATA)
11511 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11513 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11514 octx = octx->outer_context)
11516 splay_tree_node n
11517 = splay_tree_lookup (octx->variables,
11518 (splay_tree_key) OMP_CLAUSE_DECL (c));
11519 /* If this is contained in an outer OpenMP region as a
11520 firstprivate value, remove the attach/detach. */
11521 if (n && (n->value & GOVD_FIRSTPRIVATE))
11523 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11524 goto do_add;
11528 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11529 ? GOMP_MAP_DETACH
11530 : GOMP_MAP_ATTACH);
11531 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11534 goto do_add;
11536 case OMP_CLAUSE_AFFINITY:
11537 gimplify_omp_affinity (list_p, pre_p);
11538 remove = true;
11539 break;
11540 case OMP_CLAUSE_DOACROSS:
11541 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11543 tree deps = OMP_CLAUSE_DECL (c);
11544 while (deps && TREE_CODE (deps) == TREE_LIST)
11546 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11547 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11548 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11549 pre_p, NULL, is_gimple_val, fb_rvalue);
11550 deps = TREE_CHAIN (deps);
11553 else
11554 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11555 == OMP_CLAUSE_DOACROSS_SOURCE);
11556 break;
11557 case OMP_CLAUSE_DEPEND:
11558 if (handled_depend_iterators == -1)
11559 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11560 if (handled_depend_iterators)
11562 if (handled_depend_iterators == 2)
11563 remove = true;
11564 break;
11566 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11568 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11569 NULL, is_gimple_val, fb_rvalue);
11570 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11572 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11574 remove = true;
11575 break;
11577 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11579 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11580 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11581 is_gimple_val, fb_rvalue) == GS_ERROR)
11583 remove = true;
11584 break;
11587 if (code == OMP_TASK)
11588 ctx->has_depend = true;
11589 break;
11591 case OMP_CLAUSE_TO:
11592 case OMP_CLAUSE_FROM:
11593 case OMP_CLAUSE__CACHE_:
11594 decl = OMP_CLAUSE_DECL (c);
11595 if (error_operand_p (decl))
11597 remove = true;
11598 break;
11600 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11601 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11602 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11603 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11604 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11606 remove = true;
11607 break;
11609 if (!DECL_P (decl))
11611 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11612 NULL, is_gimple_lvalue, fb_lvalue)
11613 == GS_ERROR)
11615 remove = true;
11616 break;
11618 break;
11620 goto do_notice;
11622 case OMP_CLAUSE_USE_DEVICE_PTR:
11623 case OMP_CLAUSE_USE_DEVICE_ADDR:
11624 flags = GOVD_EXPLICIT;
11625 goto do_add;
11627 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11628 decl = OMP_CLAUSE_DECL (c);
11629 while (TREE_CODE (decl) == INDIRECT_REF
11630 || TREE_CODE (decl) == ARRAY_REF)
11631 decl = TREE_OPERAND (decl, 0);
11632 flags = GOVD_EXPLICIT;
11633 goto do_add_decl;
11635 case OMP_CLAUSE_IS_DEVICE_PTR:
11636 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11637 goto do_add;
11639 do_add:
11640 decl = OMP_CLAUSE_DECL (c);
11641 do_add_decl:
11642 if (error_operand_p (decl))
11644 remove = true;
11645 break;
11647 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11649 tree t = omp_member_access_dummy_var (decl);
11650 if (t)
11652 tree v = DECL_VALUE_EXPR (decl);
11653 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11654 if (outer_ctx)
11655 omp_notice_variable (outer_ctx, t, true);
11658 if (code == OACC_DATA
11659 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11660 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11661 flags |= GOVD_MAP_0LEN_ARRAY;
11662 omp_add_variable (ctx, decl, flags);
11663 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11664 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11665 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11666 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11668 struct gimplify_omp_ctx *pctx
11669 = code == OMP_TARGET ? outer_ctx : ctx;
11670 if (pctx)
11671 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11672 GOVD_LOCAL | GOVD_SEEN);
11673 if (pctx
11674 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11675 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11676 find_decl_expr,
11677 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11678 NULL) == NULL_TREE)
11679 omp_add_variable (pctx,
11680 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11681 GOVD_LOCAL | GOVD_SEEN);
11682 gimplify_omp_ctxp = pctx;
11683 push_gimplify_context ();
11685 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11686 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11688 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11689 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11690 pop_gimplify_context
11691 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11692 push_gimplify_context ();
11693 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11694 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11695 pop_gimplify_context
11696 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11697 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11698 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11700 gimplify_omp_ctxp = outer_ctx;
11702 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11703 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11705 gimplify_omp_ctxp = ctx;
11706 push_gimplify_context ();
11707 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11709 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11710 NULL, NULL);
11711 TREE_SIDE_EFFECTS (bind) = 1;
11712 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11713 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11715 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11716 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11717 pop_gimplify_context
11718 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11719 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11721 gimplify_omp_ctxp = outer_ctx;
11723 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11724 && OMP_CLAUSE_LINEAR_STMT (c))
11726 gimplify_omp_ctxp = ctx;
11727 push_gimplify_context ();
11728 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11730 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11731 NULL, NULL);
11732 TREE_SIDE_EFFECTS (bind) = 1;
11733 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11734 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11736 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11737 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11738 pop_gimplify_context
11739 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11740 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11742 gimplify_omp_ctxp = outer_ctx;
11744 if (notice_outer)
11745 goto do_notice;
11746 break;
11748 case OMP_CLAUSE_COPYIN:
11749 case OMP_CLAUSE_COPYPRIVATE:
11750 decl = OMP_CLAUSE_DECL (c);
11751 if (error_operand_p (decl))
11753 remove = true;
11754 break;
11756 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
11757 && !remove
11758 && !omp_check_private (ctx, decl, true))
11760 remove = true;
11761 if (is_global_var (decl))
11763 if (DECL_THREAD_LOCAL_P (decl))
11764 remove = false;
11765 else if (DECL_HAS_VALUE_EXPR_P (decl))
11767 tree value = get_base_address (DECL_VALUE_EXPR (decl));
11769 if (value
11770 && DECL_P (value)
11771 && DECL_THREAD_LOCAL_P (value))
11772 remove = false;
11775 if (remove)
11776 error_at (OMP_CLAUSE_LOCATION (c),
11777 "copyprivate variable %qE is not threadprivate"
11778 " or private in outer context", DECL_NAME (decl));
11780 do_notice:
11781 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11782 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
11783 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11784 && outer_ctx
11785 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
11786 || (region_type == ORT_WORKSHARE
11787 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11788 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
11789 || code == OMP_LOOP)))
11790 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
11791 || (code == OMP_LOOP
11792 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11793 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
11794 == ORT_COMBINED_TEAMS))))
11796 splay_tree_node on
11797 = splay_tree_lookup (outer_ctx->variables,
11798 (splay_tree_key)decl);
11799 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
11801 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11802 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11803 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11804 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11805 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
11806 == POINTER_TYPE))))
11807 omp_firstprivatize_variable (outer_ctx, decl);
11808 else
11810 omp_add_variable (outer_ctx, decl,
11811 GOVD_SEEN | GOVD_SHARED);
11812 if (outer_ctx->outer_context)
11813 omp_notice_variable (outer_ctx->outer_context, decl,
11814 true);
11818 if (outer_ctx)
11819 omp_notice_variable (outer_ctx, decl, true);
11820 if (check_non_private
11821 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11822 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
11823 || decl == OMP_CLAUSE_DECL (c)
11824 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11825 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11826 == ADDR_EXPR
11827 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11828 == POINTER_PLUS_EXPR
11829 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11830 (OMP_CLAUSE_DECL (c), 0), 0))
11831 == ADDR_EXPR)))))
11832 && omp_check_private (ctx, decl, false))
11834 error ("%s variable %qE is private in outer context",
11835 check_non_private, DECL_NAME (decl));
11836 remove = true;
11838 break;
11840 case OMP_CLAUSE_DETACH:
11841 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
11842 goto do_add;
11844 case OMP_CLAUSE_IF:
11845 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
11846 && OMP_CLAUSE_IF_MODIFIER (c) != code)
11848 const char *p[2];
11849 for (int i = 0; i < 2; i++)
11850 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
11852 case VOID_CST: p[i] = "cancel"; break;
11853 case OMP_PARALLEL: p[i] = "parallel"; break;
11854 case OMP_SIMD: p[i] = "simd"; break;
11855 case OMP_TASK: p[i] = "task"; break;
11856 case OMP_TASKLOOP: p[i] = "taskloop"; break;
11857 case OMP_TARGET_DATA: p[i] = "target data"; break;
11858 case OMP_TARGET: p[i] = "target"; break;
11859 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
11860 case OMP_TARGET_ENTER_DATA:
11861 p[i] = "target enter data"; break;
11862 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
11863 default: gcc_unreachable ();
11865 error_at (OMP_CLAUSE_LOCATION (c),
11866 "expected %qs %<if%> clause modifier rather than %qs",
11867 p[0], p[1]);
11868 remove = true;
11870 /* Fall through. */
11872 case OMP_CLAUSE_FINAL:
11873 OMP_CLAUSE_OPERAND (c, 0)
11874 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
11875 /* Fall through. */
11877 case OMP_CLAUSE_NUM_TEAMS:
11878 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
11879 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11880 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11882 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11884 remove = true;
11885 break;
11887 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11888 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
11889 pre_p, NULL, true);
11891 /* Fall through. */
11893 case OMP_CLAUSE_SCHEDULE:
11894 case OMP_CLAUSE_NUM_THREADS:
11895 case OMP_CLAUSE_THREAD_LIMIT:
11896 case OMP_CLAUSE_DIST_SCHEDULE:
11897 case OMP_CLAUSE_DEVICE:
11898 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
11899 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
11901 if (code != OMP_TARGET)
11903 error_at (OMP_CLAUSE_LOCATION (c),
11904 "%<device%> clause with %<ancestor%> is only "
11905 "allowed on %<target%> construct");
11906 remove = true;
11907 break;
11910 tree clauses = *orig_list_p;
11911 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
11912 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
11913 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
11914 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
11915 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
11916 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
11919 error_at (OMP_CLAUSE_LOCATION (c),
11920 "with %<ancestor%>, only the %<device%>, "
11921 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11922 "and %<map%> clauses may appear on the "
11923 "construct");
11924 remove = true;
11925 break;
11928 /* Fall through. */
11930 case OMP_CLAUSE_PRIORITY:
11931 case OMP_CLAUSE_GRAINSIZE:
11932 case OMP_CLAUSE_NUM_TASKS:
11933 case OMP_CLAUSE_FILTER:
11934 case OMP_CLAUSE_HINT:
11935 case OMP_CLAUSE_ASYNC:
11936 case OMP_CLAUSE_WAIT:
11937 case OMP_CLAUSE_NUM_GANGS:
11938 case OMP_CLAUSE_NUM_WORKERS:
11939 case OMP_CLAUSE_VECTOR_LENGTH:
11940 case OMP_CLAUSE_WORKER:
11941 case OMP_CLAUSE_VECTOR:
11942 if (OMP_CLAUSE_OPERAND (c, 0)
11943 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
11945 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
11947 remove = true;
11948 break;
11950 /* All these clauses care about value, not a particular decl,
11951 so try to force it into a SSA_NAME or fresh temporary. */
11952 OMP_CLAUSE_OPERAND (c, 0)
11953 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
11954 pre_p, NULL, true);
11956 break;
11958 case OMP_CLAUSE_GANG:
11959 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
11960 is_gimple_val, fb_rvalue) == GS_ERROR)
11961 remove = true;
11962 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
11963 is_gimple_val, fb_rvalue) == GS_ERROR)
11964 remove = true;
11965 break;
11967 case OMP_CLAUSE_NOWAIT:
11968 nowait = 1;
11969 break;
11971 case OMP_CLAUSE_ORDERED:
11972 case OMP_CLAUSE_UNTIED:
11973 case OMP_CLAUSE_COLLAPSE:
11974 case OMP_CLAUSE_TILE:
11975 case OMP_CLAUSE_AUTO:
11976 case OMP_CLAUSE_SEQ:
11977 case OMP_CLAUSE_INDEPENDENT:
11978 case OMP_CLAUSE_MERGEABLE:
11979 case OMP_CLAUSE_PROC_BIND:
11980 case OMP_CLAUSE_SAFELEN:
11981 case OMP_CLAUSE_SIMDLEN:
11982 case OMP_CLAUSE_NOGROUP:
11983 case OMP_CLAUSE_THREADS:
11984 case OMP_CLAUSE_SIMD:
11985 case OMP_CLAUSE_BIND:
11986 case OMP_CLAUSE_IF_PRESENT:
11987 case OMP_CLAUSE_FINALIZE:
11988 break;
11990 case OMP_CLAUSE_ORDER:
11991 ctx->order_concurrent = true;
11992 break;
11994 case OMP_CLAUSE_DEFAULTMAP:
11995 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
11996 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
11998 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
11999 gdmkmin = GDMK_SCALAR;
12000 gdmkmax = GDMK_POINTER;
12001 break;
12002 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
12003 gdmkmin = GDMK_SCALAR;
12004 gdmkmax = GDMK_SCALAR_TARGET;
12005 break;
12006 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
12007 gdmkmin = gdmkmax = GDMK_AGGREGATE;
12008 break;
12009 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
12010 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
12011 break;
12012 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
12013 gdmkmin = gdmkmax = GDMK_POINTER;
12014 break;
12015 default:
12016 gcc_unreachable ();
12018 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
12019 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
12021 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
12022 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
12023 break;
12024 case OMP_CLAUSE_DEFAULTMAP_TO:
12025 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
12026 break;
12027 case OMP_CLAUSE_DEFAULTMAP_FROM:
12028 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
12029 break;
12030 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
12031 ctx->defaultmap[gdmk] = GOVD_MAP;
12032 break;
12033 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
12034 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12035 break;
12036 case OMP_CLAUSE_DEFAULTMAP_NONE:
12037 ctx->defaultmap[gdmk] = 0;
12038 break;
12039 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
12040 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
12041 break;
12042 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
12043 switch (gdmk)
12045 case GDMK_SCALAR:
12046 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12047 break;
12048 case GDMK_SCALAR_TARGET:
12049 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
12050 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
12051 break;
12052 case GDMK_AGGREGATE:
12053 case GDMK_ALLOCATABLE:
12054 ctx->defaultmap[gdmk] = GOVD_MAP;
12055 break;
12056 case GDMK_POINTER:
12057 ctx->defaultmap[gdmk] = GOVD_MAP;
12058 if (!lang_GNU_Fortran ())
12059 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
12060 break;
12061 default:
12062 gcc_unreachable ();
12064 break;
12065 default:
12066 gcc_unreachable ();
12068 break;
12070 case OMP_CLAUSE_ALIGNED:
12071 decl = OMP_CLAUSE_DECL (c);
12072 if (error_operand_p (decl))
12074 remove = true;
12075 break;
12077 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12078 is_gimple_val, fb_rvalue) == GS_ERROR)
12080 remove = true;
12081 break;
12083 if (!is_global_var (decl)
12084 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12085 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12086 break;
12088 case OMP_CLAUSE_NONTEMPORAL:
12089 decl = OMP_CLAUSE_DECL (c);
12090 if (error_operand_p (decl))
12092 remove = true;
12093 break;
12095 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12096 break;
12098 case OMP_CLAUSE_ALLOCATE:
12099 decl = OMP_CLAUSE_DECL (c);
12100 if (error_operand_p (decl))
12102 remove = true;
12103 break;
12105 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12106 is_gimple_val, fb_rvalue) == GS_ERROR)
12108 remove = true;
12109 break;
12111 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12112 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12113 == INTEGER_CST))
12115 else if (code == OMP_TASKLOOP
12116 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12117 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12118 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12119 pre_p, NULL, false);
12120 break;
12122 case OMP_CLAUSE_DEFAULT:
12123 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12124 break;
12126 case OMP_CLAUSE_INCLUSIVE:
12127 case OMP_CLAUSE_EXCLUSIVE:
12128 decl = OMP_CLAUSE_DECL (c);
12130 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12131 (splay_tree_key) decl);
12132 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12134 error_at (OMP_CLAUSE_LOCATION (c),
12135 "%qD specified in %qs clause but not in %<inscan%> "
12136 "%<reduction%> clause on the containing construct",
12137 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12138 remove = true;
12140 else
12142 n->value |= GOVD_REDUCTION_INSCAN;
12143 if (outer_ctx->region_type == ORT_SIMD
12144 && outer_ctx->outer_context
12145 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12147 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12148 (splay_tree_key) decl);
12149 if (n && (n->value & GOVD_REDUCTION) != 0)
12150 n->value |= GOVD_REDUCTION_INSCAN;
12154 break;
12156 case OMP_CLAUSE_NOHOST:
12157 default:
12158 gcc_unreachable ();
12161 if (code == OACC_DATA
12162 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12163 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12164 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12165 remove = true;
12166 if (remove)
12167 *list_p = OMP_CLAUSE_CHAIN (c);
12168 else
12169 list_p = &OMP_CLAUSE_CHAIN (c);
12172 ctx->clauses = *orig_list_p;
12173 gimplify_omp_ctxp = ctx;
12176 /* Return true if DECL is a candidate for shared to firstprivate
12177 optimization. We only consider non-addressable scalars, not
12178 too big, and not references. */
12180 static bool
12181 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12183 if (TREE_ADDRESSABLE (decl))
12184 return false;
12185 tree type = TREE_TYPE (decl);
12186 if (!is_gimple_reg_type (type)
12187 || TREE_CODE (type) == REFERENCE_TYPE
12188 || TREE_ADDRESSABLE (type))
12189 return false;
12190 /* Don't optimize too large decls, as each thread/task will have
12191 its own. */
12192 HOST_WIDE_INT len = int_size_in_bytes (type);
12193 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12194 return false;
12195 if (omp_privatize_by_reference (decl))
12196 return false;
12197 return true;
12200 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12201 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12202 GOVD_WRITTEN in outer contexts. */
12204 static void
12205 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12207 for (; ctx; ctx = ctx->outer_context)
12209 splay_tree_node n = splay_tree_lookup (ctx->variables,
12210 (splay_tree_key) decl);
12211 if (n == NULL)
12212 continue;
12213 else if (n->value & GOVD_SHARED)
12215 n->value |= GOVD_WRITTEN;
12216 return;
12218 else if (n->value & GOVD_DATA_SHARE_CLASS)
12219 return;
12223 /* Helper callback for walk_gimple_seq to discover possible stores
12224 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12225 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12226 for those. */
12228 static tree
12229 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12231 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12233 *walk_subtrees = 0;
12234 if (!wi->is_lhs)
12235 return NULL_TREE;
12237 tree op = *tp;
12240 if (handled_component_p (op))
12241 op = TREE_OPERAND (op, 0);
12242 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12243 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12244 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12245 else
12246 break;
12248 while (1);
12249 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12250 return NULL_TREE;
12252 omp_mark_stores (gimplify_omp_ctxp, op);
12253 return NULL_TREE;
12256 /* Helper callback for walk_gimple_seq to discover possible stores
12257 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12258 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12259 for those. */
12261 static tree
12262 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12263 bool *handled_ops_p,
12264 struct walk_stmt_info *wi)
12266 gimple *stmt = gsi_stmt (*gsi_p);
12267 switch (gimple_code (stmt))
12269 /* Don't recurse on OpenMP constructs for which
12270 gimplify_adjust_omp_clauses already handled the bodies,
12271 except handle gimple_omp_for_pre_body. */
12272 case GIMPLE_OMP_FOR:
12273 *handled_ops_p = true;
12274 if (gimple_omp_for_pre_body (stmt))
12275 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12276 omp_find_stores_stmt, omp_find_stores_op, wi);
12277 break;
12278 case GIMPLE_OMP_PARALLEL:
12279 case GIMPLE_OMP_TASK:
12280 case GIMPLE_OMP_SECTIONS:
12281 case GIMPLE_OMP_SINGLE:
12282 case GIMPLE_OMP_SCOPE:
12283 case GIMPLE_OMP_TARGET:
12284 case GIMPLE_OMP_TEAMS:
12285 case GIMPLE_OMP_CRITICAL:
12286 *handled_ops_p = true;
12287 break;
12288 default:
12289 break;
12291 return NULL_TREE;
12294 struct gimplify_adjust_omp_clauses_data
12296 tree *list_p;
12297 gimple_seq *pre_p;
12300 /* For all variables that were not actually used within the context,
12301 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12303 static int
12304 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12306 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12307 gimple_seq *pre_p
12308 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12309 tree decl = (tree) n->key;
12310 unsigned flags = n->value;
12311 enum omp_clause_code code;
12312 tree clause;
12313 bool private_debug;
12315 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12316 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12317 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12318 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12319 return 0;
12320 if ((flags & GOVD_SEEN) == 0)
12321 return 0;
12322 if (flags & GOVD_DEBUG_PRIVATE)
12324 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12325 private_debug = true;
12327 else if (flags & GOVD_MAP)
12328 private_debug = false;
12329 else
12330 private_debug
12331 = lang_hooks.decls.omp_private_debug_clause (decl,
12332 !!(flags & GOVD_SHARED));
12333 if (private_debug)
12334 code = OMP_CLAUSE_PRIVATE;
12335 else if (flags & GOVD_MAP)
12337 code = OMP_CLAUSE_MAP;
12338 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12339 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12341 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12342 return 0;
12344 if (VAR_P (decl)
12345 && DECL_IN_CONSTANT_POOL (decl)
12346 && !lookup_attribute ("omp declare target",
12347 DECL_ATTRIBUTES (decl)))
12349 tree id = get_identifier ("omp declare target");
12350 DECL_ATTRIBUTES (decl)
12351 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12352 varpool_node *node = varpool_node::get (decl);
12353 if (node)
12355 node->offloadable = 1;
12356 if (ENABLE_OFFLOADING)
12357 g->have_offload = true;
12361 else if (flags & GOVD_SHARED)
12363 if (is_global_var (decl))
12365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12366 while (ctx != NULL)
12368 splay_tree_node on
12369 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12370 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12371 | GOVD_PRIVATE | GOVD_REDUCTION
12372 | GOVD_LINEAR | GOVD_MAP)) != 0)
12373 break;
12374 ctx = ctx->outer_context;
12376 if (ctx == NULL)
12377 return 0;
12379 code = OMP_CLAUSE_SHARED;
12380 /* Don't optimize shared into firstprivate for read-only vars
12381 on tasks with depend clause, we shouldn't try to copy them
12382 until the dependencies are satisfied. */
12383 if (gimplify_omp_ctxp->has_depend)
12384 flags |= GOVD_WRITTEN;
12386 else if (flags & GOVD_PRIVATE)
12387 code = OMP_CLAUSE_PRIVATE;
12388 else if (flags & GOVD_FIRSTPRIVATE)
12390 code = OMP_CLAUSE_FIRSTPRIVATE;
12391 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12392 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12393 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12395 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12396 "%<target%> construct", decl);
12397 return 0;
12400 else if (flags & GOVD_LASTPRIVATE)
12401 code = OMP_CLAUSE_LASTPRIVATE;
12402 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12403 return 0;
12404 else if (flags & GOVD_CONDTEMP)
12406 code = OMP_CLAUSE__CONDTEMP_;
12407 gimple_add_tmp_var (decl);
12409 else
12410 gcc_unreachable ();
12412 if (((flags & GOVD_LASTPRIVATE)
12413 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12414 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12415 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12417 tree chain = *list_p;
12418 clause = build_omp_clause (input_location, code);
12419 OMP_CLAUSE_DECL (clause) = decl;
12420 OMP_CLAUSE_CHAIN (clause) = chain;
12421 if (private_debug)
12422 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12423 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12424 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12425 else if (code == OMP_CLAUSE_SHARED
12426 && (flags & GOVD_WRITTEN) == 0
12427 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12428 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12429 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12430 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12431 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12433 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12434 OMP_CLAUSE_DECL (nc) = decl;
12435 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12436 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12437 OMP_CLAUSE_DECL (clause)
12438 = build_simple_mem_ref_loc (input_location, decl);
12439 OMP_CLAUSE_DECL (clause)
12440 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12441 build_int_cst (build_pointer_type (char_type_node), 0));
12442 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12443 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12444 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12445 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12446 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12447 OMP_CLAUSE_CHAIN (nc) = chain;
12448 OMP_CLAUSE_CHAIN (clause) = nc;
12449 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12450 gimplify_omp_ctxp = ctx->outer_context;
12451 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12452 pre_p, NULL, is_gimple_val, fb_rvalue);
12453 gimplify_omp_ctxp = ctx;
12455 else if (code == OMP_CLAUSE_MAP)
12457 int kind;
12458 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12459 switch (flags & (GOVD_MAP_TO_ONLY
12460 | GOVD_MAP_FORCE
12461 | GOVD_MAP_FORCE_PRESENT
12462 | GOVD_MAP_ALLOC_ONLY
12463 | GOVD_MAP_FROM_ONLY))
12465 case 0:
12466 kind = GOMP_MAP_TOFROM;
12467 break;
12468 case GOVD_MAP_FORCE:
12469 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12470 break;
12471 case GOVD_MAP_TO_ONLY:
12472 kind = GOMP_MAP_TO;
12473 break;
12474 case GOVD_MAP_FROM_ONLY:
12475 kind = GOMP_MAP_FROM;
12476 break;
12477 case GOVD_MAP_ALLOC_ONLY:
12478 kind = GOMP_MAP_ALLOC;
12479 break;
12480 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12481 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12482 break;
12483 case GOVD_MAP_FORCE_PRESENT:
12484 kind = GOMP_MAP_FORCE_PRESENT;
12485 break;
12486 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
12487 kind = GOMP_MAP_FORCE_PRESENT;
12488 break;
12489 default:
12490 gcc_unreachable ();
12492 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12493 /* Setting of the implicit flag for the runtime is currently disabled for
12494 OpenACC. */
12495 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12496 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12497 if (DECL_SIZE (decl)
12498 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12500 tree decl2 = DECL_VALUE_EXPR (decl);
12501 gcc_assert (INDIRECT_REF_P (decl2));
12502 decl2 = TREE_OPERAND (decl2, 0);
12503 gcc_assert (DECL_P (decl2));
12504 tree mem = build_simple_mem_ref (decl2);
12505 OMP_CLAUSE_DECL (clause) = mem;
12506 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12507 if (gimplify_omp_ctxp->outer_context)
12509 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12510 omp_notice_variable (ctx, decl2, true);
12511 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12513 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12514 OMP_CLAUSE_MAP);
12515 OMP_CLAUSE_DECL (nc) = decl;
12516 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12517 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12518 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12519 else
12520 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12521 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12522 OMP_CLAUSE_CHAIN (clause) = nc;
12524 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12525 && omp_privatize_by_reference (decl))
12527 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12528 OMP_CLAUSE_SIZE (clause)
12529 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12530 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12531 gimplify_omp_ctxp = ctx->outer_context;
12532 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12533 pre_p, NULL, is_gimple_val, fb_rvalue);
12534 gimplify_omp_ctxp = ctx;
12535 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12536 OMP_CLAUSE_MAP);
12537 OMP_CLAUSE_DECL (nc) = decl;
12538 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12539 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12540 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12541 OMP_CLAUSE_CHAIN (clause) = nc;
12543 else
12544 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12546 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12548 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12549 OMP_CLAUSE_DECL (nc) = decl;
12550 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12551 OMP_CLAUSE_CHAIN (nc) = chain;
12552 OMP_CLAUSE_CHAIN (clause) = nc;
12553 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12554 gimplify_omp_ctxp = ctx->outer_context;
12555 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12556 (ctx->region_type & ORT_ACC) != 0);
12557 gimplify_omp_ctxp = ctx;
12559 *list_p = clause;
12560 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12561 gimplify_omp_ctxp = ctx->outer_context;
12562 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12563 in simd. Those are only added for the local vars inside of simd body
12564 and they don't need to be e.g. default constructible. */
12565 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12566 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12567 (ctx->region_type & ORT_ACC) != 0);
12568 if (gimplify_omp_ctxp)
12569 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12570 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12571 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12572 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12573 true);
12574 gimplify_omp_ctxp = ctx;
12575 return 0;
12578 static void
12579 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12580 enum tree_code code)
12582 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12583 tree *orig_list_p = list_p;
12584 tree c, decl;
12585 bool has_inscan_reductions = false;
12587 if (body)
12589 struct gimplify_omp_ctx *octx;
12590 for (octx = ctx; octx; octx = octx->outer_context)
12591 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12592 break;
12593 if (octx)
12595 struct walk_stmt_info wi;
12596 memset (&wi, 0, sizeof (wi));
12597 walk_gimple_seq (body, omp_find_stores_stmt,
12598 omp_find_stores_op, &wi);
12602 if (ctx->add_safelen1)
12604 /* If there are VLAs in the body of simd loop, prevent
12605 vectorization. */
12606 gcc_assert (ctx->region_type == ORT_SIMD);
12607 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12608 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12609 OMP_CLAUSE_CHAIN (c) = *list_p;
12610 *list_p = c;
12611 list_p = &OMP_CLAUSE_CHAIN (c);
12614 if (ctx->region_type == ORT_WORKSHARE
12615 && ctx->outer_context
12616 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12618 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12619 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12620 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12622 decl = OMP_CLAUSE_DECL (c);
12623 splay_tree_node n
12624 = splay_tree_lookup (ctx->outer_context->variables,
12625 (splay_tree_key) decl);
12626 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12627 (splay_tree_key) decl));
12628 omp_add_variable (ctx, decl, n->value);
12629 tree c2 = copy_node (c);
12630 OMP_CLAUSE_CHAIN (c2) = *list_p;
12631 *list_p = c2;
12632 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12633 continue;
12634 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12635 OMP_CLAUSE_FIRSTPRIVATE);
12636 OMP_CLAUSE_DECL (c2) = decl;
12637 OMP_CLAUSE_CHAIN (c2) = *list_p;
12638 *list_p = c2;
12642 tree attach_list = NULL_TREE;
12643 tree *attach_tail = &attach_list;
12645 while ((c = *list_p) != NULL)
12647 splay_tree_node n;
12648 bool remove = false;
12649 bool move_attach = false;
12651 switch (OMP_CLAUSE_CODE (c))
12653 case OMP_CLAUSE_FIRSTPRIVATE:
12654 if ((ctx->region_type & ORT_TARGET)
12655 && (ctx->region_type & ORT_ACC) == 0
12656 && TYPE_ATOMIC (strip_array_types
12657 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12659 error_at (OMP_CLAUSE_LOCATION (c),
12660 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12661 "%<target%> construct", OMP_CLAUSE_DECL (c));
12662 remove = true;
12663 break;
12665 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12667 decl = OMP_CLAUSE_DECL (c);
12668 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12669 if ((n->value & GOVD_MAP) != 0)
12671 remove = true;
12672 break;
12674 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12675 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12677 /* FALLTHRU */
12678 case OMP_CLAUSE_PRIVATE:
12679 case OMP_CLAUSE_SHARED:
12680 case OMP_CLAUSE_LINEAR:
12681 decl = OMP_CLAUSE_DECL (c);
12682 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12683 remove = !(n->value & GOVD_SEEN);
12684 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12685 && code == OMP_PARALLEL
12686 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12687 remove = true;
12688 if (! remove)
12690 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12691 if ((n->value & GOVD_DEBUG_PRIVATE)
12692 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12694 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12695 || ((n->value & GOVD_DATA_SHARE_CLASS)
12696 == GOVD_SHARED));
12697 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12698 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12700 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12701 && ctx->has_depend
12702 && DECL_P (decl))
12703 n->value |= GOVD_WRITTEN;
12704 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12705 && (n->value & GOVD_WRITTEN) == 0
12706 && DECL_P (decl)
12707 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12708 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12709 else if (DECL_P (decl)
12710 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12711 && (n->value & GOVD_WRITTEN) != 0)
12712 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12713 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12714 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12715 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12717 else
12718 n->value &= ~GOVD_EXPLICIT;
12719 break;
12721 case OMP_CLAUSE_LASTPRIVATE:
12722 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12723 accurately reflect the presence of a FIRSTPRIVATE clause. */
12724 decl = OMP_CLAUSE_DECL (c);
12725 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12726 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12727 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12728 if (code == OMP_DISTRIBUTE
12729 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12731 remove = true;
12732 error_at (OMP_CLAUSE_LOCATION (c),
12733 "same variable used in %<firstprivate%> and "
12734 "%<lastprivate%> clauses on %<distribute%> "
12735 "construct");
12737 if (!remove
12738 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12739 && DECL_P (decl)
12740 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12741 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12742 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12743 remove = true;
12744 break;
12746 case OMP_CLAUSE_ALIGNED:
12747 decl = OMP_CLAUSE_DECL (c);
12748 if (!is_global_var (decl))
12750 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12751 remove = n == NULL || !(n->value & GOVD_SEEN);
12752 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12754 struct gimplify_omp_ctx *octx;
12755 if (n != NULL
12756 && (n->value & (GOVD_DATA_SHARE_CLASS
12757 & ~GOVD_FIRSTPRIVATE)))
12758 remove = true;
12759 else
12760 for (octx = ctx->outer_context; octx;
12761 octx = octx->outer_context)
12763 n = splay_tree_lookup (octx->variables,
12764 (splay_tree_key) decl);
12765 if (n == NULL)
12766 continue;
12767 if (n->value & GOVD_LOCAL)
12768 break;
12769 /* We have to avoid assigning a shared variable
12770 to itself when trying to add
12771 __builtin_assume_aligned. */
12772 if (n->value & GOVD_SHARED)
12774 remove = true;
12775 break;
12780 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
12782 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12783 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12784 remove = true;
12786 break;
12788 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12789 decl = OMP_CLAUSE_DECL (c);
12790 while (INDIRECT_REF_P (decl)
12791 || TREE_CODE (decl) == ARRAY_REF)
12792 decl = TREE_OPERAND (decl, 0);
12793 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12794 remove = n == NULL || !(n->value & GOVD_SEEN);
12795 break;
12797 case OMP_CLAUSE_IS_DEVICE_PTR:
12798 case OMP_CLAUSE_NONTEMPORAL:
12799 decl = OMP_CLAUSE_DECL (c);
12800 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12801 remove = n == NULL || !(n->value & GOVD_SEEN);
12802 break;
12804 case OMP_CLAUSE_MAP:
12805 switch (OMP_CLAUSE_MAP_KIND (c))
12807 case GOMP_MAP_PRESENT_ALLOC:
12808 case GOMP_MAP_PRESENT_TO:
12809 case GOMP_MAP_PRESENT_FROM:
12810 case GOMP_MAP_PRESENT_TOFROM:
12811 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
12812 break;
12813 default:
12814 break;
12816 if (code == OMP_TARGET_EXIT_DATA
12817 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
12819 remove = true;
12820 break;
12822 /* If we have a target region, we can push all the attaches to the
12823 end of the list (we may have standalone "attach" operations
12824 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12825 the attachment point AND the pointed-to block have been mapped).
12826 If we have something else, e.g. "enter data", we need to keep
12827 "attach" nodes together with the previous node they attach to so
12828 that separate "exit data" operations work properly (see
12829 libgomp/target.c). */
12830 if ((ctx->region_type & ORT_TARGET) != 0
12831 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12832 || (OMP_CLAUSE_MAP_KIND (c)
12833 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
12834 move_attach = true;
12835 decl = OMP_CLAUSE_DECL (c);
12836 /* Data clauses associated with reductions must be
12837 compatible with present_or_copy. Warn and adjust the clause
12838 if that is not the case. */
12839 if (ctx->region_type == ORT_ACC_PARALLEL
12840 || ctx->region_type == ORT_ACC_SERIAL)
12842 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
12843 n = NULL;
12845 if (DECL_P (t))
12846 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
12848 if (n && (n->value & GOVD_REDUCTION))
12850 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
12852 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
12853 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
12854 && kind != GOMP_MAP_FORCE_PRESENT
12855 && kind != GOMP_MAP_POINTER)
12857 warning_at (OMP_CLAUSE_LOCATION (c), 0,
12858 "incompatible data clause with reduction "
12859 "on %qE; promoting to %<present_or_copy%>",
12860 DECL_NAME (t));
12861 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
12865 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
12866 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
12868 remove = true;
12869 break;
12871 if (!DECL_P (decl))
12873 if ((ctx->region_type & ORT_TARGET) != 0
12874 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12876 if (INDIRECT_REF_P (decl)
12877 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12878 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12879 == REFERENCE_TYPE))
12880 decl = TREE_OPERAND (decl, 0);
12881 if (TREE_CODE (decl) == COMPONENT_REF)
12883 while (TREE_CODE (decl) == COMPONENT_REF)
12884 decl = TREE_OPERAND (decl, 0);
12885 if (DECL_P (decl))
12887 n = splay_tree_lookup (ctx->variables,
12888 (splay_tree_key) decl);
12889 if (!(n->value & GOVD_SEEN))
12890 remove = true;
12894 break;
12896 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12897 if ((ctx->region_type & ORT_TARGET) != 0
12898 && !(n->value & GOVD_SEEN)
12899 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
12900 && (!is_global_var (decl)
12901 || !lookup_attribute ("omp declare target link",
12902 DECL_ATTRIBUTES (decl))))
12904 remove = true;
12905 /* For struct element mapping, if struct is never referenced
12906 in target block and none of the mapping has always modifier,
12907 remove all the struct element mappings, which immediately
12908 follow the GOMP_MAP_STRUCT map clause. */
12909 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
12911 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
12912 while (cnt--)
12913 OMP_CLAUSE_CHAIN (c)
12914 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
12917 else if (DECL_SIZE (decl)
12918 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
12919 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
12920 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
12921 && (OMP_CLAUSE_MAP_KIND (c)
12922 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12924 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12925 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12926 INTEGER_CST. */
12927 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
12929 tree decl2 = DECL_VALUE_EXPR (decl);
12930 gcc_assert (INDIRECT_REF_P (decl2));
12931 decl2 = TREE_OPERAND (decl2, 0);
12932 gcc_assert (DECL_P (decl2));
12933 tree mem = build_simple_mem_ref (decl2);
12934 OMP_CLAUSE_DECL (c) = mem;
12935 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12936 if (ctx->outer_context)
12938 omp_notice_variable (ctx->outer_context, decl2, true);
12939 omp_notice_variable (ctx->outer_context,
12940 OMP_CLAUSE_SIZE (c), true);
12942 if (((ctx->region_type & ORT_TARGET) != 0
12943 || !ctx->target_firstprivatize_array_bases)
12944 && ((n->value & GOVD_SEEN) == 0
12945 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
12947 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12948 OMP_CLAUSE_MAP);
12949 OMP_CLAUSE_DECL (nc) = decl;
12950 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12951 if (ctx->target_firstprivatize_array_bases)
12952 OMP_CLAUSE_SET_MAP_KIND (nc,
12953 GOMP_MAP_FIRSTPRIVATE_POINTER);
12954 else
12955 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12956 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
12957 OMP_CLAUSE_CHAIN (c) = nc;
12958 c = nc;
12961 else
12963 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12964 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12965 gcc_assert ((n->value & GOVD_SEEN) == 0
12966 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12967 == 0));
12969 break;
12971 case OMP_CLAUSE_TO:
12972 case OMP_CLAUSE_FROM:
12973 case OMP_CLAUSE__CACHE_:
12974 decl = OMP_CLAUSE_DECL (c);
12975 if (!DECL_P (decl))
12976 break;
12977 if (DECL_SIZE (decl)
12978 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12980 tree decl2 = DECL_VALUE_EXPR (decl);
12981 gcc_assert (INDIRECT_REF_P (decl2));
12982 decl2 = TREE_OPERAND (decl2, 0);
12983 gcc_assert (DECL_P (decl2));
12984 tree mem = build_simple_mem_ref (decl2);
12985 OMP_CLAUSE_DECL (c) = mem;
12986 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12987 if (ctx->outer_context)
12989 omp_notice_variable (ctx->outer_context, decl2, true);
12990 omp_notice_variable (ctx->outer_context,
12991 OMP_CLAUSE_SIZE (c), true);
12994 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12995 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12996 break;
12998 case OMP_CLAUSE_REDUCTION:
12999 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
13001 decl = OMP_CLAUSE_DECL (c);
13002 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13003 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
13005 remove = true;
13006 error_at (OMP_CLAUSE_LOCATION (c),
13007 "%qD specified in %<inscan%> %<reduction%> clause "
13008 "but not in %<scan%> directive clause", decl);
13009 break;
13011 has_inscan_reductions = true;
13013 /* FALLTHRU */
13014 case OMP_CLAUSE_IN_REDUCTION:
13015 case OMP_CLAUSE_TASK_REDUCTION:
13016 decl = OMP_CLAUSE_DECL (c);
13017 /* OpenACC reductions need a present_or_copy data clause.
13018 Add one if necessary. Emit error when the reduction is private. */
13019 if (ctx->region_type == ORT_ACC_PARALLEL
13020 || ctx->region_type == ORT_ACC_SERIAL)
13022 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13023 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13025 remove = true;
13026 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
13027 "reduction on %qE", DECL_NAME (decl));
13029 else if ((n->value & GOVD_MAP) == 0)
13031 tree next = OMP_CLAUSE_CHAIN (c);
13032 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
13033 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
13034 OMP_CLAUSE_DECL (nc) = decl;
13035 OMP_CLAUSE_CHAIN (c) = nc;
13036 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13037 (ctx->region_type
13038 & ORT_ACC) != 0);
13039 while (1)
13041 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
13042 if (OMP_CLAUSE_CHAIN (nc) == NULL)
13043 break;
13044 nc = OMP_CLAUSE_CHAIN (nc);
13046 OMP_CLAUSE_CHAIN (nc) = next;
13047 n->value |= GOVD_MAP;
13050 if (DECL_P (decl)
13051 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13052 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13053 break;
13055 case OMP_CLAUSE_ALLOCATE:
13056 decl = OMP_CLAUSE_DECL (c);
13057 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13058 if (n != NULL && !(n->value & GOVD_SEEN))
13060 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
13061 != 0
13062 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
13063 remove = true;
13065 if (!remove
13066 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13067 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
13068 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
13069 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
13070 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
13072 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13073 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
13074 if (n == NULL)
13076 enum omp_clause_default_kind default_kind
13077 = ctx->default_kind;
13078 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
13079 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13080 true);
13081 ctx->default_kind = default_kind;
13083 else
13084 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13085 true);
13087 break;
13089 case OMP_CLAUSE_COPYIN:
13090 case OMP_CLAUSE_COPYPRIVATE:
13091 case OMP_CLAUSE_IF:
13092 case OMP_CLAUSE_NUM_THREADS:
13093 case OMP_CLAUSE_NUM_TEAMS:
13094 case OMP_CLAUSE_THREAD_LIMIT:
13095 case OMP_CLAUSE_DIST_SCHEDULE:
13096 case OMP_CLAUSE_DEVICE:
13097 case OMP_CLAUSE_SCHEDULE:
13098 case OMP_CLAUSE_NOWAIT:
13099 case OMP_CLAUSE_ORDERED:
13100 case OMP_CLAUSE_DEFAULT:
13101 case OMP_CLAUSE_UNTIED:
13102 case OMP_CLAUSE_COLLAPSE:
13103 case OMP_CLAUSE_FINAL:
13104 case OMP_CLAUSE_MERGEABLE:
13105 case OMP_CLAUSE_PROC_BIND:
13106 case OMP_CLAUSE_SAFELEN:
13107 case OMP_CLAUSE_SIMDLEN:
13108 case OMP_CLAUSE_DEPEND:
13109 case OMP_CLAUSE_DOACROSS:
13110 case OMP_CLAUSE_PRIORITY:
13111 case OMP_CLAUSE_GRAINSIZE:
13112 case OMP_CLAUSE_NUM_TASKS:
13113 case OMP_CLAUSE_NOGROUP:
13114 case OMP_CLAUSE_THREADS:
13115 case OMP_CLAUSE_SIMD:
13116 case OMP_CLAUSE_FILTER:
13117 case OMP_CLAUSE_HINT:
13118 case OMP_CLAUSE_DEFAULTMAP:
13119 case OMP_CLAUSE_ORDER:
13120 case OMP_CLAUSE_BIND:
13121 case OMP_CLAUSE_DETACH:
13122 case OMP_CLAUSE_USE_DEVICE_PTR:
13123 case OMP_CLAUSE_USE_DEVICE_ADDR:
13124 case OMP_CLAUSE_ASYNC:
13125 case OMP_CLAUSE_WAIT:
13126 case OMP_CLAUSE_INDEPENDENT:
13127 case OMP_CLAUSE_NUM_GANGS:
13128 case OMP_CLAUSE_NUM_WORKERS:
13129 case OMP_CLAUSE_VECTOR_LENGTH:
13130 case OMP_CLAUSE_GANG:
13131 case OMP_CLAUSE_WORKER:
13132 case OMP_CLAUSE_VECTOR:
13133 case OMP_CLAUSE_AUTO:
13134 case OMP_CLAUSE_SEQ:
13135 case OMP_CLAUSE_TILE:
13136 case OMP_CLAUSE_IF_PRESENT:
13137 case OMP_CLAUSE_FINALIZE:
13138 case OMP_CLAUSE_INCLUSIVE:
13139 case OMP_CLAUSE_EXCLUSIVE:
13140 break;
13142 case OMP_CLAUSE_NOHOST:
13143 default:
13144 gcc_unreachable ();
13147 if (remove)
13148 *list_p = OMP_CLAUSE_CHAIN (c);
13149 else if (move_attach)
13151 /* Remove attach node from here, separate out into its own list. */
13152 *attach_tail = c;
13153 *list_p = OMP_CLAUSE_CHAIN (c);
13154 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13155 attach_tail = &OMP_CLAUSE_CHAIN (c);
13157 else
13158 list_p = &OMP_CLAUSE_CHAIN (c);
13161 /* Splice attach nodes at the end of the list. */
13162 if (attach_list)
13164 *list_p = attach_list;
13165 list_p = attach_tail;
13168 /* Add in any implicit data sharing. */
13169 struct gimplify_adjust_omp_clauses_data data;
13170 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13172 /* OpenMP. Implicit clauses are added at the start of the clause list,
13173 but after any non-map clauses. */
13174 tree *implicit_add_list_p = orig_list_p;
13175 while (*implicit_add_list_p
13176 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13177 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13178 data.list_p = implicit_add_list_p;
13180 else
13181 /* OpenACC. */
13182 data.list_p = list_p;
13183 data.pre_p = pre_p;
13184 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13186 if (has_inscan_reductions)
13187 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13188 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13189 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13191 error_at (OMP_CLAUSE_LOCATION (c),
13192 "%<inscan%> %<reduction%> clause used together with "
13193 "%<linear%> clause for a variable other than loop "
13194 "iterator");
13195 break;
13198 gimplify_omp_ctxp = ctx->outer_context;
13199 delete_omp_context (ctx);
13202 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13203 -1 if unknown yet (simd is involved, won't be known until vectorization)
13204 and 1 if they do. If SCORES is non-NULL, it should point to an array
13205 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13206 of the CONSTRUCTS (position -1 if it will never match) followed by
13207 number of constructs in the OpenMP context construct trait. If the
13208 score depends on whether it will be in a declare simd clone or not,
13209 the function returns 2 and there will be two sets of the scores, the first
13210 one for the case that it is not in a declare simd clone, the other
13211 that it is in a declare simd clone. */
13214 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13215 int *scores)
13217 int matched = 0, cnt = 0;
13218 bool simd_seen = false;
13219 bool target_seen = false;
13220 int declare_simd_cnt = -1;
13221 auto_vec<enum tree_code, 16> codes;
13222 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13224 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13225 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13226 == ORT_TARGET && ctx->code == OMP_TARGET)
13227 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13228 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13229 || (ctx->region_type == ORT_SIMD
13230 && ctx->code == OMP_SIMD
13231 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13233 ++cnt;
13234 if (scores)
13235 codes.safe_push (ctx->code);
13236 else if (matched < nconstructs && ctx->code == constructs[matched])
13238 if (ctx->code == OMP_SIMD)
13240 if (matched)
13241 return 0;
13242 simd_seen = true;
13244 ++matched;
13246 if (ctx->code == OMP_TARGET)
13248 if (scores == NULL)
13249 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13250 target_seen = true;
13251 break;
13254 else if (ctx->region_type == ORT_WORKSHARE
13255 && ctx->code == OMP_LOOP
13256 && ctx->outer_context
13257 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13258 && ctx->outer_context->outer_context
13259 && ctx->outer_context->outer_context->code == OMP_LOOP
13260 && ctx->outer_context->outer_context->distribute)
13261 ctx = ctx->outer_context->outer_context;
13262 ctx = ctx->outer_context;
13264 if (!target_seen
13265 && lookup_attribute ("omp declare simd",
13266 DECL_ATTRIBUTES (current_function_decl)))
13268 /* Declare simd is a maybe case, it is supposed to be added only to the
13269 omp-simd-clone.cc added clones and not to the base function. */
13270 declare_simd_cnt = cnt++;
13271 if (scores)
13272 codes.safe_push (OMP_SIMD);
13273 else if (cnt == 0
13274 && constructs[0] == OMP_SIMD)
13276 gcc_assert (matched == 0);
13277 simd_seen = true;
13278 if (++matched == nconstructs)
13279 return -1;
13282 if (tree attr = lookup_attribute ("omp declare variant variant",
13283 DECL_ATTRIBUTES (current_function_decl)))
13285 enum tree_code variant_constructs[5];
13286 int variant_nconstructs = 0;
13287 if (!target_seen)
13288 variant_nconstructs
13289 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13290 variant_constructs);
13291 for (int i = 0; i < variant_nconstructs; i++)
13293 ++cnt;
13294 if (scores)
13295 codes.safe_push (variant_constructs[i]);
13296 else if (matched < nconstructs
13297 && variant_constructs[i] == constructs[matched])
13299 if (variant_constructs[i] == OMP_SIMD)
13301 if (matched)
13302 return 0;
13303 simd_seen = true;
13305 ++matched;
13309 if (!target_seen
13310 && lookup_attribute ("omp declare target block",
13311 DECL_ATTRIBUTES (current_function_decl)))
13313 if (scores)
13314 codes.safe_push (OMP_TARGET);
13315 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13316 ++matched;
13318 if (scores)
13320 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13322 int j = codes.length () - 1;
13323 for (int i = nconstructs - 1; i >= 0; i--)
13325 while (j >= 0
13326 && (pass != 0 || declare_simd_cnt != j)
13327 && constructs[i] != codes[j])
13328 --j;
13329 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13330 *scores++ = j - 1;
13331 else
13332 *scores++ = j;
13334 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13335 ? codes.length () - 1 : codes.length ());
13337 return declare_simd_cnt == -1 ? 1 : 2;
13339 if (matched == nconstructs)
13340 return simd_seen ? -1 : 1;
13341 return 0;
13344 /* Gimplify OACC_CACHE. */
13346 static void
13347 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13349 tree expr = *expr_p;
13351 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13352 OACC_CACHE);
13353 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13354 OACC_CACHE);
13356 /* TODO: Do something sensible with this information. */
13358 *expr_p = NULL_TREE;
13361 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13362 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13363 kind. The entry kind will replace the one in CLAUSE, while the exit
13364 kind will be used in a new omp_clause and returned to the caller. */
13366 static tree
13367 gimplify_oacc_declare_1 (tree clause)
13369 HOST_WIDE_INT kind, new_op;
13370 bool ret = false;
13371 tree c = NULL;
13373 kind = OMP_CLAUSE_MAP_KIND (clause);
13375 switch (kind)
13377 case GOMP_MAP_ALLOC:
13378 new_op = GOMP_MAP_RELEASE;
13379 ret = true;
13380 break;
13382 case GOMP_MAP_FROM:
13383 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13384 new_op = GOMP_MAP_FROM;
13385 ret = true;
13386 break;
13388 case GOMP_MAP_TOFROM:
13389 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13390 new_op = GOMP_MAP_FROM;
13391 ret = true;
13392 break;
13394 case GOMP_MAP_DEVICE_RESIDENT:
13395 case GOMP_MAP_FORCE_DEVICEPTR:
13396 case GOMP_MAP_FORCE_PRESENT:
13397 case GOMP_MAP_LINK:
13398 case GOMP_MAP_POINTER:
13399 case GOMP_MAP_TO:
13400 break;
13402 default:
13403 gcc_unreachable ();
13404 break;
13407 if (ret)
13409 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13410 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13411 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13414 return c;
13417 /* Gimplify OACC_DECLARE. */
13419 static void
13420 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13422 tree expr = *expr_p;
13423 gomp_target *stmt;
13424 tree clauses, t, decl;
13426 clauses = OACC_DECLARE_CLAUSES (expr);
13428 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13429 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13431 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13433 decl = OMP_CLAUSE_DECL (t);
13435 if (TREE_CODE (decl) == MEM_REF)
13436 decl = TREE_OPERAND (decl, 0);
13438 if (VAR_P (decl) && !is_oacc_declared (decl))
13440 tree attr = get_identifier ("oacc declare target");
13441 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13442 DECL_ATTRIBUTES (decl));
13445 if (VAR_P (decl)
13446 && !is_global_var (decl)
13447 && DECL_CONTEXT (decl) == current_function_decl)
13449 tree c = gimplify_oacc_declare_1 (t);
13450 if (c)
13452 if (oacc_declare_returns == NULL)
13453 oacc_declare_returns = new hash_map<tree, tree>;
13455 oacc_declare_returns->put (decl, c);
13459 if (gimplify_omp_ctxp)
13460 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13463 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13464 clauses);
13466 gimplify_seq_add_stmt (pre_p, stmt);
13468 *expr_p = NULL_TREE;
13471 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13472 gimplification of the body, as well as scanning the body for used
13473 variables. We need to do this scan now, because variable-sized
13474 decls will be decomposed during gimplification. */
13476 static void
13477 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13479 tree expr = *expr_p;
13480 gimple *g;
13481 gimple_seq body = NULL;
13483 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13484 OMP_PARALLEL_COMBINED (expr)
13485 ? ORT_COMBINED_PARALLEL
13486 : ORT_PARALLEL, OMP_PARALLEL);
13488 push_gimplify_context ();
13490 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13491 if (gimple_code (g) == GIMPLE_BIND)
13492 pop_gimplify_context (g);
13493 else
13494 pop_gimplify_context (NULL);
13496 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13497 OMP_PARALLEL);
13499 g = gimple_build_omp_parallel (body,
13500 OMP_PARALLEL_CLAUSES (expr),
13501 NULL_TREE, NULL_TREE);
13502 if (OMP_PARALLEL_COMBINED (expr))
13503 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13504 gimplify_seq_add_stmt (pre_p, g);
13505 *expr_p = NULL_TREE;
13508 /* Gimplify the contents of an OMP_TASK statement. This involves
13509 gimplification of the body, as well as scanning the body for used
13510 variables. We need to do this scan now, because variable-sized
13511 decls will be decomposed during gimplification. */
13513 static void
13514 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13516 tree expr = *expr_p;
13517 gimple *g;
13518 gimple_seq body = NULL;
13519 bool nowait = false;
13520 bool has_depend = false;
13522 if (OMP_TASK_BODY (expr) == NULL_TREE)
13524 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13525 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13527 has_depend = true;
13528 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13530 error_at (OMP_CLAUSE_LOCATION (c),
13531 "%<mutexinoutset%> kind in %<depend%> clause on a "
13532 "%<taskwait%> construct");
13533 break;
13536 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13537 nowait = true;
13538 if (nowait && !has_depend)
13540 error_at (EXPR_LOCATION (expr),
13541 "%<taskwait%> construct with %<nowait%> clause but no "
13542 "%<depend%> clauses");
13543 *expr_p = NULL_TREE;
13544 return;
13548 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13549 omp_find_clause (OMP_TASK_CLAUSES (expr),
13550 OMP_CLAUSE_UNTIED)
13551 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13553 if (OMP_TASK_BODY (expr))
13555 push_gimplify_context ();
13557 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13558 if (gimple_code (g) == GIMPLE_BIND)
13559 pop_gimplify_context (g);
13560 else
13561 pop_gimplify_context (NULL);
13564 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13565 OMP_TASK);
13567 g = gimple_build_omp_task (body,
13568 OMP_TASK_CLAUSES (expr),
13569 NULL_TREE, NULL_TREE,
13570 NULL_TREE, NULL_TREE, NULL_TREE);
13571 if (OMP_TASK_BODY (expr) == NULL_TREE)
13572 gimple_omp_task_set_taskwait_p (g, true);
13573 gimplify_seq_add_stmt (pre_p, g);
13574 *expr_p = NULL_TREE;
13577 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13578 force it into a temporary initialized in PRE_P and add firstprivate clause
13579 to ORIG_FOR_STMT. */
13581 static void
13582 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13583 tree orig_for_stmt)
13585 if (*tp == NULL || is_gimple_constant (*tp))
13586 return;
13588 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13589 /* Reference to pointer conversion is considered useless,
13590 but is significant for firstprivate clause. Force it
13591 here. */
13592 if (type
13593 && TREE_CODE (type) == POINTER_TYPE
13594 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13596 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13597 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13598 gimplify_and_add (m, pre_p);
13599 *tp = v;
13602 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13603 OMP_CLAUSE_DECL (c) = *tp;
13604 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13605 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13608 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13609 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13611 static tree
13612 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13614 switch (TREE_CODE (*tp))
13616 case OMP_ORDERED:
13617 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13618 return *tp;
13619 break;
13620 case OMP_SIMD:
13621 case OMP_PARALLEL:
13622 case OMP_TARGET:
13623 *walk_subtrees = 0;
13624 break;
13625 default:
13626 break;
13628 return NULL_TREE;
13631 /* Gimplify the gross structure of an OMP_FOR statement. */
13633 static enum gimplify_status
13634 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13636 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13637 enum gimplify_status ret = GS_ALL_DONE;
13638 enum gimplify_status tret;
13639 gomp_for *gfor;
13640 gimple_seq for_body, for_pre_body;
13641 int i;
13642 bitmap has_decl_expr = NULL;
13643 enum omp_region_type ort = ORT_WORKSHARE;
13644 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13646 orig_for_stmt = for_stmt = *expr_p;
13648 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13649 != NULL_TREE);
13650 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13652 tree *data[4] = { NULL, NULL, NULL, NULL };
13653 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13654 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13655 find_combined_omp_for, data, NULL);
13656 if (inner_for_stmt == NULL_TREE)
13658 gcc_assert (seen_error ());
13659 *expr_p = NULL_TREE;
13660 return GS_ERROR;
13662 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13664 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13665 &OMP_FOR_PRE_BODY (for_stmt));
13666 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13668 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13670 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13671 &OMP_FOR_PRE_BODY (for_stmt));
13672 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13675 if (data[0])
13677 /* We have some statements or variable declarations in between
13678 the composite construct directives. Move them around the
13679 inner_for_stmt. */
13680 data[0] = expr_p;
13681 for (i = 0; i < 3; i++)
13682 if (data[i])
13684 tree t = *data[i];
13685 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13686 data[i + 1] = data[i];
13687 *data[i] = OMP_BODY (t);
13688 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13689 NULL_TREE, make_node (BLOCK));
13690 OMP_BODY (t) = body;
13691 append_to_statement_list_force (inner_for_stmt,
13692 &BIND_EXPR_BODY (body));
13693 *data[3] = t;
13694 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13695 gcc_assert (*data[3] == inner_for_stmt);
13697 return GS_OK;
13700 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13701 if (!loop_p
13702 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13703 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13704 i)) == TREE_LIST
13705 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13706 i)))
13708 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13709 /* Class iterators aren't allowed on OMP_SIMD, so the only
13710 case we need to solve is distribute parallel for. They are
13711 allowed on the loop construct, but that is already handled
13712 in gimplify_omp_loop. */
13713 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13714 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13715 && data[1]);
13716 tree orig_decl = TREE_PURPOSE (orig);
13717 tree last = TREE_VALUE (orig);
13718 tree *pc;
13719 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13720 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13721 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13722 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13723 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13724 break;
13725 if (*pc == NULL_TREE)
13727 tree *spc;
13728 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13729 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13730 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13731 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13732 break;
13733 if (*spc)
13735 tree c = *spc;
13736 *spc = OMP_CLAUSE_CHAIN (c);
13737 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13738 *pc = c;
13741 if (*pc == NULL_TREE)
13743 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13745 /* private clause will appear only on inner_for_stmt.
13746 Change it into firstprivate, and add private clause
13747 on for_stmt. */
13748 tree c = copy_node (*pc);
13749 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
13750 OMP_FOR_CLAUSES (for_stmt) = c;
13751 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
13752 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13754 else
13756 /* lastprivate clause will appear on both inner_for_stmt
13757 and for_stmt. Add firstprivate clause to
13758 inner_for_stmt. */
13759 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
13760 OMP_CLAUSE_FIRSTPRIVATE);
13761 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
13762 OMP_CLAUSE_CHAIN (c) = *pc;
13763 *pc = c;
13764 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13766 tree c = build_omp_clause (UNKNOWN_LOCATION,
13767 OMP_CLAUSE_FIRSTPRIVATE);
13768 OMP_CLAUSE_DECL (c) = last;
13769 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13770 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13771 c = build_omp_clause (UNKNOWN_LOCATION,
13772 *pc ? OMP_CLAUSE_SHARED
13773 : OMP_CLAUSE_FIRSTPRIVATE);
13774 OMP_CLAUSE_DECL (c) = orig_decl;
13775 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13776 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13778 /* Similarly, take care of C++ range for temporaries, those should
13779 be firstprivate on OMP_PARALLEL if any. */
13780 if (data[1])
13781 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13782 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
13783 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13784 i)) == TREE_LIST
13785 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13786 i)))
13788 tree orig
13789 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13790 tree v = TREE_CHAIN (orig);
13791 tree c = build_omp_clause (UNKNOWN_LOCATION,
13792 OMP_CLAUSE_FIRSTPRIVATE);
13793 /* First add firstprivate clause for the __for_end artificial
13794 decl. */
13795 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
13796 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13797 == REFERENCE_TYPE)
13798 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13799 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13800 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13801 if (TREE_VEC_ELT (v, 0))
13803 /* And now the same for __for_range artificial decl if it
13804 exists. */
13805 c = build_omp_clause (UNKNOWN_LOCATION,
13806 OMP_CLAUSE_FIRSTPRIVATE);
13807 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
13808 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13809 == REFERENCE_TYPE)
13810 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13811 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13812 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13817 switch (TREE_CODE (for_stmt))
13819 case OMP_FOR:
13820 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13822 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13823 OMP_CLAUSE_SCHEDULE))
13824 error_at (EXPR_LOCATION (for_stmt),
13825 "%qs clause may not appear on non-rectangular %qs",
13826 "schedule", lang_GNU_Fortran () ? "do" : "for");
13827 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
13828 error_at (EXPR_LOCATION (for_stmt),
13829 "%qs clause may not appear on non-rectangular %qs",
13830 "ordered", lang_GNU_Fortran () ? "do" : "for");
13832 break;
13833 case OMP_DISTRIBUTE:
13834 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
13835 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13836 OMP_CLAUSE_DIST_SCHEDULE))
13837 error_at (EXPR_LOCATION (for_stmt),
13838 "%qs clause may not appear on non-rectangular %qs",
13839 "dist_schedule", "distribute");
13840 break;
13841 case OACC_LOOP:
13842 ort = ORT_ACC;
13843 break;
13844 case OMP_TASKLOOP:
13845 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13847 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13848 OMP_CLAUSE_GRAINSIZE))
13849 error_at (EXPR_LOCATION (for_stmt),
13850 "%qs clause may not appear on non-rectangular %qs",
13851 "grainsize", "taskloop");
13852 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13853 OMP_CLAUSE_NUM_TASKS))
13854 error_at (EXPR_LOCATION (for_stmt),
13855 "%qs clause may not appear on non-rectangular %qs",
13856 "num_tasks", "taskloop");
13858 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
13859 ort = ORT_UNTIED_TASKLOOP;
13860 else
13861 ort = ORT_TASKLOOP;
13862 break;
13863 case OMP_SIMD:
13864 ort = ORT_SIMD;
13865 break;
13866 default:
13867 gcc_unreachable ();
13870 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13871 clause for the IV. */
13872 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
13874 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
13875 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13876 decl = TREE_OPERAND (t, 0);
13877 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13878 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13879 && OMP_CLAUSE_DECL (c) == decl)
13881 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
13882 break;
13886 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
13887 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
13888 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
13889 ? OMP_LOOP : TREE_CODE (for_stmt));
13891 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
13892 gimplify_omp_ctxp->distribute = true;
13894 /* Handle OMP_FOR_INIT. */
13895 for_pre_body = NULL;
13896 if ((ort == ORT_SIMD
13897 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
13898 && OMP_FOR_PRE_BODY (for_stmt))
13900 has_decl_expr = BITMAP_ALLOC (NULL);
13901 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
13902 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
13904 t = OMP_FOR_PRE_BODY (for_stmt);
13905 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13907 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
13909 tree_stmt_iterator si;
13910 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
13911 tsi_next (&si))
13913 t = tsi_stmt (si);
13914 if (TREE_CODE (t) == DECL_EXPR
13915 && VAR_P (DECL_EXPR_DECL (t)))
13916 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13920 if (OMP_FOR_PRE_BODY (for_stmt))
13922 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
13923 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13924 else
13926 struct gimplify_omp_ctx ctx;
13927 memset (&ctx, 0, sizeof (ctx));
13928 ctx.region_type = ORT_NONE;
13929 gimplify_omp_ctxp = &ctx;
13930 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13931 gimplify_omp_ctxp = NULL;
13934 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
13936 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13937 for_stmt = inner_for_stmt;
13939 /* For taskloop, need to gimplify the start, end and step before the
13940 taskloop, outside of the taskloop omp context. */
13941 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13943 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13945 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13946 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
13947 ? pre_p : &for_pre_body);
13948 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
13949 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13951 tree v = TREE_OPERAND (t, 1);
13952 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13953 for_pre_p, orig_for_stmt);
13954 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13955 for_pre_p, orig_for_stmt);
13957 else
13958 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13959 orig_for_stmt);
13961 /* Handle OMP_FOR_COND. */
13962 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13963 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13965 tree v = TREE_OPERAND (t, 1);
13966 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13967 for_pre_p, orig_for_stmt);
13968 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13969 for_pre_p, orig_for_stmt);
13971 else
13972 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13973 orig_for_stmt);
13975 /* Handle OMP_FOR_INCR. */
13976 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13977 if (TREE_CODE (t) == MODIFY_EXPR)
13979 decl = TREE_OPERAND (t, 0);
13980 t = TREE_OPERAND (t, 1);
13981 tree *tp = &TREE_OPERAND (t, 1);
13982 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
13983 tp = &TREE_OPERAND (t, 0);
13985 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
13986 orig_for_stmt);
13990 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
13991 OMP_TASKLOOP);
13994 if (orig_for_stmt != for_stmt)
13995 gimplify_omp_ctxp->combined_loop = true;
13997 for_body = NULL;
13998 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13999 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
14000 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14001 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
14003 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
14004 bool is_doacross = false;
14005 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
14006 find_standalone_omp_ordered, NULL))
14008 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
14009 is_doacross = true;
14010 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
14011 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
14012 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14013 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
14015 error_at (OMP_CLAUSE_LOCATION (*pc),
14016 "%<linear%> clause may not be specified together "
14017 "with %<ordered%> clause if stand-alone %<ordered%> "
14018 "construct is nested in it");
14019 *pc = OMP_CLAUSE_CHAIN (*pc);
14021 else
14022 pc = &OMP_CLAUSE_CHAIN (*pc);
14024 int collapse = 1, tile = 0;
14025 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
14026 if (c)
14027 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
14028 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
14029 if (c)
14030 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
14031 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
14032 hash_set<tree> *allocate_uids = NULL;
14033 if (c)
14035 allocate_uids = new hash_set<tree>;
14036 for (; c; c = OMP_CLAUSE_CHAIN (c))
14037 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
14038 allocate_uids->add (OMP_CLAUSE_DECL (c));
14040 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14042 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14043 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14044 decl = TREE_OPERAND (t, 0);
14045 gcc_assert (DECL_P (decl));
14046 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
14047 || POINTER_TYPE_P (TREE_TYPE (decl)));
14048 if (is_doacross)
14050 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
14052 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14053 if (TREE_CODE (orig_decl) == TREE_LIST)
14055 orig_decl = TREE_PURPOSE (orig_decl);
14056 if (!orig_decl)
14057 orig_decl = decl;
14059 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
14061 else
14062 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14063 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14066 if (for_stmt == orig_for_stmt)
14068 tree orig_decl = decl;
14069 if (OMP_FOR_ORIG_DECLS (for_stmt))
14071 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14072 if (TREE_CODE (orig_decl) == TREE_LIST)
14074 orig_decl = TREE_PURPOSE (orig_decl);
14075 if (!orig_decl)
14076 orig_decl = decl;
14079 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
14080 error_at (EXPR_LOCATION (for_stmt),
14081 "threadprivate iteration variable %qD", orig_decl);
14084 /* Make sure the iteration variable is private. */
14085 tree c = NULL_TREE;
14086 tree c2 = NULL_TREE;
14087 if (orig_for_stmt != for_stmt)
14089 /* Preserve this information until we gimplify the inner simd. */
14090 if (has_decl_expr
14091 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14092 TREE_PRIVATE (t) = 1;
14094 else if (ort == ORT_SIMD)
14096 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14097 (splay_tree_key) decl);
14098 omp_is_private (gimplify_omp_ctxp, decl,
14099 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14100 != 1));
14101 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14103 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14104 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14105 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14106 OMP_CLAUSE_LASTPRIVATE);
14107 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14108 OMP_CLAUSE_LASTPRIVATE))
14109 if (OMP_CLAUSE_DECL (c3) == decl)
14111 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14112 "conditional %<lastprivate%> on loop "
14113 "iterator %qD ignored", decl);
14114 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14115 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14118 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14120 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14121 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14122 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14123 if ((has_decl_expr
14124 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14125 || TREE_PRIVATE (t))
14127 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14128 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14130 struct gimplify_omp_ctx *outer
14131 = gimplify_omp_ctxp->outer_context;
14132 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14134 if (outer->region_type == ORT_WORKSHARE
14135 && outer->combined_loop)
14137 n = splay_tree_lookup (outer->variables,
14138 (splay_tree_key)decl);
14139 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14141 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14142 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14144 else
14146 struct gimplify_omp_ctx *octx = outer->outer_context;
14147 if (octx
14148 && octx->region_type == ORT_COMBINED_PARALLEL
14149 && octx->outer_context
14150 && (octx->outer_context->region_type
14151 == ORT_WORKSHARE)
14152 && octx->outer_context->combined_loop)
14154 octx = octx->outer_context;
14155 n = splay_tree_lookup (octx->variables,
14156 (splay_tree_key)decl);
14157 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14159 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14160 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14167 OMP_CLAUSE_DECL (c) = decl;
14168 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14169 OMP_FOR_CLAUSES (for_stmt) = c;
14170 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14171 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14172 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14173 true);
14175 else
14177 bool lastprivate
14178 = (!has_decl_expr
14179 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14180 if (TREE_PRIVATE (t))
14181 lastprivate = false;
14182 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14184 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14185 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14186 lastprivate = false;
14189 struct gimplify_omp_ctx *outer
14190 = gimplify_omp_ctxp->outer_context;
14191 if (outer && lastprivate)
14192 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14193 true);
14195 c = build_omp_clause (input_location,
14196 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14197 : OMP_CLAUSE_PRIVATE);
14198 OMP_CLAUSE_DECL (c) = decl;
14199 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14200 OMP_FOR_CLAUSES (for_stmt) = c;
14201 omp_add_variable (gimplify_omp_ctxp, decl,
14202 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14203 | GOVD_EXPLICIT | GOVD_SEEN);
14204 c = NULL_TREE;
14207 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14209 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14210 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14211 (splay_tree_key) decl);
14212 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14213 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14214 OMP_CLAUSE_LASTPRIVATE);
14215 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14216 OMP_CLAUSE_LASTPRIVATE))
14217 if (OMP_CLAUSE_DECL (c3) == decl)
14219 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14220 "conditional %<lastprivate%> on loop "
14221 "iterator %qD ignored", decl);
14222 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14223 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14226 else
14227 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14229 /* If DECL is not a gimple register, create a temporary variable to act
14230 as an iteration counter. This is valid, since DECL cannot be
14231 modified in the body of the loop. Similarly for any iteration vars
14232 in simd with collapse > 1 where the iterator vars must be
14233 lastprivate. And similarly for vars mentioned in allocate clauses. */
14234 if (orig_for_stmt != for_stmt)
14235 var = decl;
14236 else if (!is_gimple_reg (decl)
14237 || (ort == ORT_SIMD
14238 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14239 || (allocate_uids && allocate_uids->contains (decl)))
14241 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14242 /* Make sure omp_add_variable is not called on it prematurely.
14243 We call it ourselves a few lines later. */
14244 gimplify_omp_ctxp = NULL;
14245 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14246 gimplify_omp_ctxp = ctx;
14247 TREE_OPERAND (t, 0) = var;
14249 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14251 if (ort == ORT_SIMD
14252 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14254 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14255 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14256 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14257 OMP_CLAUSE_DECL (c2) = var;
14258 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14259 OMP_FOR_CLAUSES (for_stmt) = c2;
14260 omp_add_variable (gimplify_omp_ctxp, var,
14261 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14262 if (c == NULL_TREE)
14264 c = c2;
14265 c2 = NULL_TREE;
14268 else
14269 omp_add_variable (gimplify_omp_ctxp, var,
14270 GOVD_PRIVATE | GOVD_SEEN);
14272 else
14273 var = decl;
14275 gimplify_omp_ctxp->in_for_exprs = true;
14276 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14278 tree lb = TREE_OPERAND (t, 1);
14279 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14280 is_gimple_val, fb_rvalue, false);
14281 ret = MIN (ret, tret);
14282 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14283 is_gimple_val, fb_rvalue, false);
14285 else
14286 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14287 is_gimple_val, fb_rvalue, false);
14288 gimplify_omp_ctxp->in_for_exprs = false;
14289 ret = MIN (ret, tret);
14290 if (ret == GS_ERROR)
14291 return ret;
14293 /* Handle OMP_FOR_COND. */
14294 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14295 gcc_assert (COMPARISON_CLASS_P (t));
14296 gcc_assert (TREE_OPERAND (t, 0) == decl);
14298 gimplify_omp_ctxp->in_for_exprs = true;
14299 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14301 tree ub = TREE_OPERAND (t, 1);
14302 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14303 is_gimple_val, fb_rvalue, false);
14304 ret = MIN (ret, tret);
14305 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14306 is_gimple_val, fb_rvalue, false);
14308 else
14309 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14310 is_gimple_val, fb_rvalue, false);
14311 gimplify_omp_ctxp->in_for_exprs = false;
14312 ret = MIN (ret, tret);
14314 /* Handle OMP_FOR_INCR. */
14315 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14316 switch (TREE_CODE (t))
14318 case PREINCREMENT_EXPR:
14319 case POSTINCREMENT_EXPR:
14321 tree decl = TREE_OPERAND (t, 0);
14322 /* c_omp_for_incr_canonicalize_ptr() should have been
14323 called to massage things appropriately. */
14324 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14326 if (orig_for_stmt != for_stmt)
14327 break;
14328 t = build_int_cst (TREE_TYPE (decl), 1);
14329 if (c)
14330 OMP_CLAUSE_LINEAR_STEP (c) = t;
14331 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14332 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14333 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14334 break;
14337 case PREDECREMENT_EXPR:
14338 case POSTDECREMENT_EXPR:
14339 /* c_omp_for_incr_canonicalize_ptr() should have been
14340 called to massage things appropriately. */
14341 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14342 if (orig_for_stmt != for_stmt)
14343 break;
14344 t = build_int_cst (TREE_TYPE (decl), -1);
14345 if (c)
14346 OMP_CLAUSE_LINEAR_STEP (c) = t;
14347 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14348 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14349 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14350 break;
14352 case MODIFY_EXPR:
14353 gcc_assert (TREE_OPERAND (t, 0) == decl);
14354 TREE_OPERAND (t, 0) = var;
14356 t = TREE_OPERAND (t, 1);
14357 switch (TREE_CODE (t))
14359 case PLUS_EXPR:
14360 if (TREE_OPERAND (t, 1) == decl)
14362 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14363 TREE_OPERAND (t, 0) = var;
14364 break;
14367 /* Fallthru. */
14368 case MINUS_EXPR:
14369 case POINTER_PLUS_EXPR:
14370 gcc_assert (TREE_OPERAND (t, 0) == decl);
14371 TREE_OPERAND (t, 0) = var;
14372 break;
14373 default:
14374 gcc_unreachable ();
14377 gimplify_omp_ctxp->in_for_exprs = true;
14378 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14379 is_gimple_val, fb_rvalue, false);
14380 ret = MIN (ret, tret);
14381 if (c)
14383 tree step = TREE_OPERAND (t, 1);
14384 tree stept = TREE_TYPE (decl);
14385 if (POINTER_TYPE_P (stept))
14386 stept = sizetype;
14387 step = fold_convert (stept, step);
14388 if (TREE_CODE (t) == MINUS_EXPR)
14389 step = fold_build1 (NEGATE_EXPR, stept, step);
14390 OMP_CLAUSE_LINEAR_STEP (c) = step;
14391 if (step != TREE_OPERAND (t, 1))
14393 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14394 &for_pre_body, NULL,
14395 is_gimple_val, fb_rvalue, false);
14396 ret = MIN (ret, tret);
14399 gimplify_omp_ctxp->in_for_exprs = false;
14400 break;
14402 default:
14403 gcc_unreachable ();
14406 if (c2)
14408 gcc_assert (c);
14409 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14412 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14414 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14415 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14416 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14417 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14418 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14419 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14420 && OMP_CLAUSE_DECL (c) == decl)
14422 if (is_doacross && (collapse == 1 || i >= collapse))
14423 t = var;
14424 else
14426 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14427 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14428 gcc_assert (TREE_OPERAND (t, 0) == var);
14429 t = TREE_OPERAND (t, 1);
14430 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14431 || TREE_CODE (t) == MINUS_EXPR
14432 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14433 gcc_assert (TREE_OPERAND (t, 0) == var);
14434 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14435 is_doacross ? var : decl,
14436 TREE_OPERAND (t, 1));
14438 gimple_seq *seq;
14439 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14440 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14441 else
14442 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14443 push_gimplify_context ();
14444 gimplify_assign (decl, t, seq);
14445 gimple *bind = NULL;
14446 if (gimplify_ctxp->temps)
14448 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14449 *seq = NULL;
14450 gimplify_seq_add_stmt (seq, bind);
14452 pop_gimplify_context (bind);
14455 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14456 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14458 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14459 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14460 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14461 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14462 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14463 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14464 gcc_assert (COMPARISON_CLASS_P (t));
14465 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14466 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14467 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14471 BITMAP_FREE (has_decl_expr);
14472 delete allocate_uids;
14474 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14475 || (loop_p && orig_for_stmt == for_stmt))
14477 push_gimplify_context ();
14478 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14480 OMP_FOR_BODY (orig_for_stmt)
14481 = build3 (BIND_EXPR, void_type_node, NULL,
14482 OMP_FOR_BODY (orig_for_stmt), NULL);
14483 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14487 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14488 &for_body);
14490 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14491 || (loop_p && orig_for_stmt == for_stmt))
14493 if (gimple_code (g) == GIMPLE_BIND)
14494 pop_gimplify_context (g);
14495 else
14496 pop_gimplify_context (NULL);
14499 if (orig_for_stmt != for_stmt)
14500 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14502 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14503 decl = TREE_OPERAND (t, 0);
14504 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14505 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14506 gimplify_omp_ctxp = ctx->outer_context;
14507 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14508 gimplify_omp_ctxp = ctx;
14509 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14510 TREE_OPERAND (t, 0) = var;
14511 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14512 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14513 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14514 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14515 for (int j = i + 1;
14516 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14518 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14519 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14520 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14521 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14523 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14524 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14526 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14527 gcc_assert (COMPARISON_CLASS_P (t));
14528 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14529 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14531 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14532 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14537 gimplify_adjust_omp_clauses (pre_p, for_body,
14538 &OMP_FOR_CLAUSES (orig_for_stmt),
14539 TREE_CODE (orig_for_stmt));
14541 int kind;
14542 switch (TREE_CODE (orig_for_stmt))
14544 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14545 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14546 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14547 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14548 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14549 default:
14550 gcc_unreachable ();
14552 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14554 gimplify_seq_add_seq (pre_p, for_pre_body);
14555 for_pre_body = NULL;
14557 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14558 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14559 for_pre_body);
14560 if (orig_for_stmt != for_stmt)
14561 gimple_omp_for_set_combined_p (gfor, true);
14562 if (gimplify_omp_ctxp
14563 && (gimplify_omp_ctxp->combined_loop
14564 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14565 && gimplify_omp_ctxp->outer_context
14566 && gimplify_omp_ctxp->outer_context->combined_loop)))
14568 gimple_omp_for_set_combined_into_p (gfor, true);
14569 if (gimplify_omp_ctxp->combined_loop)
14570 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14571 else
14572 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14575 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14577 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14578 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14579 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14580 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14581 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14582 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14583 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14584 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14587 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14588 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14589 The outer taskloop stands for computing the number of iterations,
14590 counts for collapsed loops and holding taskloop specific clauses.
14591 The task construct stands for the effect of data sharing on the
14592 explicit task it creates and the inner taskloop stands for expansion
14593 of the static loop inside of the explicit task construct. */
14594 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14596 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14597 tree task_clauses = NULL_TREE;
14598 tree c = *gfor_clauses_ptr;
14599 tree *gtask_clauses_ptr = &task_clauses;
14600 tree outer_for_clauses = NULL_TREE;
14601 tree *gforo_clauses_ptr = &outer_for_clauses;
14602 bitmap lastprivate_uids = NULL;
14603 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14605 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14606 if (c)
14608 lastprivate_uids = BITMAP_ALLOC (NULL);
14609 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14610 OMP_CLAUSE_LASTPRIVATE))
14611 bitmap_set_bit (lastprivate_uids,
14612 DECL_UID (OMP_CLAUSE_DECL (c)));
14614 c = *gfor_clauses_ptr;
14616 for (; c; c = OMP_CLAUSE_CHAIN (c))
14617 switch (OMP_CLAUSE_CODE (c))
14619 /* These clauses are allowed on task, move them there. */
14620 case OMP_CLAUSE_SHARED:
14621 case OMP_CLAUSE_FIRSTPRIVATE:
14622 case OMP_CLAUSE_DEFAULT:
14623 case OMP_CLAUSE_IF:
14624 case OMP_CLAUSE_UNTIED:
14625 case OMP_CLAUSE_FINAL:
14626 case OMP_CLAUSE_MERGEABLE:
14627 case OMP_CLAUSE_PRIORITY:
14628 case OMP_CLAUSE_REDUCTION:
14629 case OMP_CLAUSE_IN_REDUCTION:
14630 *gtask_clauses_ptr = c;
14631 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14632 break;
14633 case OMP_CLAUSE_PRIVATE:
14634 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14636 /* We want private on outer for and firstprivate
14637 on task. */
14638 *gtask_clauses_ptr
14639 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14640 OMP_CLAUSE_FIRSTPRIVATE);
14641 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14642 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14643 openacc);
14644 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14645 *gforo_clauses_ptr = c;
14646 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14648 else
14650 *gtask_clauses_ptr = c;
14651 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14653 break;
14654 /* These clauses go into outer taskloop clauses. */
14655 case OMP_CLAUSE_GRAINSIZE:
14656 case OMP_CLAUSE_NUM_TASKS:
14657 case OMP_CLAUSE_NOGROUP:
14658 *gforo_clauses_ptr = c;
14659 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14660 break;
14661 /* Collapse clause we duplicate on both taskloops. */
14662 case OMP_CLAUSE_COLLAPSE:
14663 *gfor_clauses_ptr = c;
14664 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14665 *gforo_clauses_ptr = copy_node (c);
14666 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14667 break;
14668 /* For lastprivate, keep the clause on inner taskloop, and add
14669 a shared clause on task. If the same decl is also firstprivate,
14670 add also firstprivate clause on the inner taskloop. */
14671 case OMP_CLAUSE_LASTPRIVATE:
14672 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14674 /* For taskloop C++ lastprivate IVs, we want:
14675 1) private on outer taskloop
14676 2) firstprivate and shared on task
14677 3) lastprivate on inner taskloop */
14678 *gtask_clauses_ptr
14679 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14680 OMP_CLAUSE_FIRSTPRIVATE);
14681 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14682 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14683 openacc);
14684 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14685 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14686 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14687 OMP_CLAUSE_PRIVATE);
14688 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14689 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14690 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14691 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14693 *gfor_clauses_ptr = c;
14694 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14695 *gtask_clauses_ptr
14696 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14697 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14698 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14699 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14700 gtask_clauses_ptr
14701 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14702 break;
14703 /* Allocate clause we duplicate on task and inner taskloop
14704 if the decl is lastprivate, otherwise just put on task. */
14705 case OMP_CLAUSE_ALLOCATE:
14706 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14707 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14709 /* Additionally, put firstprivate clause on task
14710 for the allocator if it is not constant. */
14711 *gtask_clauses_ptr
14712 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14713 OMP_CLAUSE_FIRSTPRIVATE);
14714 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14715 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14716 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14718 if (lastprivate_uids
14719 && bitmap_bit_p (lastprivate_uids,
14720 DECL_UID (OMP_CLAUSE_DECL (c))))
14722 *gfor_clauses_ptr = c;
14723 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14724 *gtask_clauses_ptr = copy_node (c);
14725 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14727 else
14729 *gtask_clauses_ptr = c;
14730 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14732 break;
14733 default:
14734 gcc_unreachable ();
14736 *gfor_clauses_ptr = NULL_TREE;
14737 *gtask_clauses_ptr = NULL_TREE;
14738 *gforo_clauses_ptr = NULL_TREE;
14739 BITMAP_FREE (lastprivate_uids);
14740 gimple_set_location (gfor, input_location);
14741 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14742 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14743 NULL_TREE, NULL_TREE, NULL_TREE);
14744 gimple_set_location (g, input_location);
14745 gimple_omp_task_set_taskloop_p (g, true);
14746 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
14747 gomp_for *gforo
14748 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
14749 gimple_omp_for_collapse (gfor),
14750 gimple_omp_for_pre_body (gfor));
14751 gimple_omp_for_set_pre_body (gfor, NULL);
14752 gimple_omp_for_set_combined_p (gforo, true);
14753 gimple_omp_for_set_combined_into_p (gfor, true);
14754 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
14756 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
14757 tree v = create_tmp_var (type);
14758 gimple_omp_for_set_index (gforo, i, v);
14759 t = unshare_expr (gimple_omp_for_initial (gfor, i));
14760 gimple_omp_for_set_initial (gforo, i, t);
14761 gimple_omp_for_set_cond (gforo, i,
14762 gimple_omp_for_cond (gfor, i));
14763 t = unshare_expr (gimple_omp_for_final (gfor, i));
14764 gimple_omp_for_set_final (gforo, i, t);
14765 t = unshare_expr (gimple_omp_for_incr (gfor, i));
14766 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
14767 TREE_OPERAND (t, 0) = v;
14768 gimple_omp_for_set_incr (gforo, i, t);
14769 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
14770 OMP_CLAUSE_DECL (t) = v;
14771 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
14772 gimple_omp_for_set_clauses (gforo, t);
14773 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14775 tree *p1 = NULL, *p2 = NULL;
14776 t = gimple_omp_for_initial (gforo, i);
14777 if (TREE_CODE (t) == TREE_VEC)
14778 p1 = &TREE_VEC_ELT (t, 0);
14779 t = gimple_omp_for_final (gforo, i);
14780 if (TREE_CODE (t) == TREE_VEC)
14782 if (p1)
14783 p2 = &TREE_VEC_ELT (t, 0);
14784 else
14785 p1 = &TREE_VEC_ELT (t, 0);
14787 if (p1)
14789 int j;
14790 for (j = 0; j < i; j++)
14791 if (*p1 == gimple_omp_for_index (gfor, j))
14793 *p1 = gimple_omp_for_index (gforo, j);
14794 if (p2)
14795 *p2 = *p1;
14796 break;
14798 gcc_assert (j < i);
14802 gimplify_seq_add_stmt (pre_p, gforo);
14804 else
14805 gimplify_seq_add_stmt (pre_p, gfor);
14807 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
14809 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14810 unsigned lastprivate_conditional = 0;
14811 while (ctx
14812 && (ctx->region_type == ORT_TARGET_DATA
14813 || ctx->region_type == ORT_TASKGROUP))
14814 ctx = ctx->outer_context;
14815 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
14816 for (tree c = gimple_omp_for_clauses (gfor);
14817 c; c = OMP_CLAUSE_CHAIN (c))
14818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14819 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14820 ++lastprivate_conditional;
14821 if (lastprivate_conditional)
14823 struct omp_for_data fd;
14824 omp_extract_for_data (gfor, &fd, NULL);
14825 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
14826 lastprivate_conditional);
14827 tree var = create_tmp_var_raw (type);
14828 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
14829 OMP_CLAUSE_DECL (c) = var;
14830 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14831 gimple_omp_for_set_clauses (gfor, c);
14832 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
14835 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
14837 unsigned lastprivate_conditional = 0;
14838 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
14839 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14840 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14841 ++lastprivate_conditional;
14842 if (lastprivate_conditional)
14844 struct omp_for_data fd;
14845 omp_extract_for_data (gfor, &fd, NULL);
14846 tree type = unsigned_type_for (fd.iter_type);
14847 while (lastprivate_conditional--)
14849 tree c = build_omp_clause (UNKNOWN_LOCATION,
14850 OMP_CLAUSE__CONDTEMP_);
14851 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
14852 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14853 gimple_omp_for_set_clauses (gfor, c);
14858 if (ret != GS_ALL_DONE)
14859 return GS_ERROR;
14860 *expr_p = NULL_TREE;
14861 return GS_ALL_DONE;
14864 /* Helper for gimplify_omp_loop, called through walk_tree. */
14866 static tree
14867 note_no_context_vars (tree *tp, int *, void *data)
14869 if (VAR_P (*tp)
14870 && DECL_CONTEXT (*tp) == NULL_TREE
14871 && !is_global_var (*tp))
14873 vec<tree> *d = (vec<tree> *) data;
14874 d->safe_push (*tp);
14875 DECL_CONTEXT (*tp) = current_function_decl;
14877 return NULL_TREE;
14880 /* Gimplify the gross structure of an OMP_LOOP statement. */
14882 static enum gimplify_status
14883 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
14885 tree for_stmt = *expr_p;
14886 tree clauses = OMP_FOR_CLAUSES (for_stmt);
14887 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
14888 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
14889 int i;
14891 /* If order is not present, the behavior is as if order(concurrent)
14892 appeared. */
14893 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
14894 if (order == NULL_TREE)
14896 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
14897 OMP_CLAUSE_CHAIN (order) = clauses;
14898 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
14901 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
14902 if (bind == NULL_TREE)
14904 if (!flag_openmp) /* flag_openmp_simd */
14906 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
14907 kind = OMP_CLAUSE_BIND_TEAMS;
14908 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
14909 kind = OMP_CLAUSE_BIND_PARALLEL;
14910 else
14912 for (; octx; octx = octx->outer_context)
14914 if ((octx->region_type & ORT_ACC) != 0
14915 || octx->region_type == ORT_NONE
14916 || octx->region_type == ORT_IMPLICIT_TARGET)
14917 continue;
14918 break;
14920 if (octx == NULL && !in_omp_construct)
14921 error_at (EXPR_LOCATION (for_stmt),
14922 "%<bind%> clause not specified on a %<loop%> "
14923 "construct not nested inside another OpenMP construct");
14925 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
14926 OMP_CLAUSE_CHAIN (bind) = clauses;
14927 OMP_CLAUSE_BIND_KIND (bind) = kind;
14928 OMP_FOR_CLAUSES (for_stmt) = bind;
14930 else
14931 switch (OMP_CLAUSE_BIND_KIND (bind))
14933 case OMP_CLAUSE_BIND_THREAD:
14934 break;
14935 case OMP_CLAUSE_BIND_PARALLEL:
14936 if (!flag_openmp) /* flag_openmp_simd */
14938 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14939 break;
14941 for (; octx; octx = octx->outer_context)
14942 if (octx->region_type == ORT_SIMD
14943 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
14945 error_at (EXPR_LOCATION (for_stmt),
14946 "%<bind(parallel)%> on a %<loop%> construct nested "
14947 "inside %<simd%> construct");
14948 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14949 break;
14951 kind = OMP_CLAUSE_BIND_PARALLEL;
14952 break;
14953 case OMP_CLAUSE_BIND_TEAMS:
14954 if (!flag_openmp) /* flag_openmp_simd */
14956 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14957 break;
14959 if ((octx
14960 && octx->region_type != ORT_IMPLICIT_TARGET
14961 && octx->region_type != ORT_NONE
14962 && (octx->region_type & ORT_TEAMS) == 0)
14963 || in_omp_construct)
14965 error_at (EXPR_LOCATION (for_stmt),
14966 "%<bind(teams)%> on a %<loop%> region not strictly "
14967 "nested inside of a %<teams%> region");
14968 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14969 break;
14971 kind = OMP_CLAUSE_BIND_TEAMS;
14972 break;
14973 default:
14974 gcc_unreachable ();
14977 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14978 switch (OMP_CLAUSE_CODE (*pc))
14980 case OMP_CLAUSE_REDUCTION:
14981 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
14983 error_at (OMP_CLAUSE_LOCATION (*pc),
14984 "%<inscan%> %<reduction%> clause on "
14985 "%qs construct", "loop");
14986 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
14988 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
14990 error_at (OMP_CLAUSE_LOCATION (*pc),
14991 "invalid %<task%> reduction modifier on construct "
14992 "other than %<parallel%>, %qs or %<sections%>",
14993 lang_GNU_Fortran () ? "do" : "for");
14994 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
14996 pc = &OMP_CLAUSE_CHAIN (*pc);
14997 break;
14998 case OMP_CLAUSE_LASTPRIVATE:
14999 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15001 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15002 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15003 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
15004 break;
15005 if (OMP_FOR_ORIG_DECLS (for_stmt)
15006 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15007 i)) == TREE_LIST
15008 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15009 i)))
15011 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15012 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
15013 break;
15016 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
15018 error_at (OMP_CLAUSE_LOCATION (*pc),
15019 "%<lastprivate%> clause on a %<loop%> construct refers "
15020 "to a variable %qD which is not the loop iterator",
15021 OMP_CLAUSE_DECL (*pc));
15022 *pc = OMP_CLAUSE_CHAIN (*pc);
15023 break;
15025 pc = &OMP_CLAUSE_CHAIN (*pc);
15026 break;
15027 default:
15028 pc = &OMP_CLAUSE_CHAIN (*pc);
15029 break;
15032 TREE_SET_CODE (for_stmt, OMP_SIMD);
15034 int last;
15035 switch (kind)
15037 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
15038 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
15039 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
15041 for (int pass = 1; pass <= last; pass++)
15043 if (pass == 2)
15045 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
15046 make_node (BLOCK));
15047 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
15048 *expr_p = make_node (OMP_PARALLEL);
15049 TREE_TYPE (*expr_p) = void_type_node;
15050 OMP_PARALLEL_BODY (*expr_p) = bind;
15051 OMP_PARALLEL_COMBINED (*expr_p) = 1;
15052 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
15053 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
15054 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15055 if (OMP_FOR_ORIG_DECLS (for_stmt)
15056 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
15057 == TREE_LIST))
15059 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15060 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
15062 *pc = build_omp_clause (UNKNOWN_LOCATION,
15063 OMP_CLAUSE_FIRSTPRIVATE);
15064 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
15065 pc = &OMP_CLAUSE_CHAIN (*pc);
15069 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
15070 tree *pc = &OMP_FOR_CLAUSES (t);
15071 TREE_TYPE (t) = void_type_node;
15072 OMP_FOR_BODY (t) = *expr_p;
15073 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
15074 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15075 switch (OMP_CLAUSE_CODE (c))
15077 case OMP_CLAUSE_BIND:
15078 case OMP_CLAUSE_ORDER:
15079 case OMP_CLAUSE_COLLAPSE:
15080 *pc = copy_node (c);
15081 pc = &OMP_CLAUSE_CHAIN (*pc);
15082 break;
15083 case OMP_CLAUSE_PRIVATE:
15084 case OMP_CLAUSE_FIRSTPRIVATE:
15085 /* Only needed on innermost. */
15086 break;
15087 case OMP_CLAUSE_LASTPRIVATE:
15088 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15090 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15091 OMP_CLAUSE_FIRSTPRIVATE);
15092 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15093 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15094 pc = &OMP_CLAUSE_CHAIN (*pc);
15096 *pc = copy_node (c);
15097 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15098 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15099 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15101 if (pass != last)
15102 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15103 else
15104 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15105 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15107 pc = &OMP_CLAUSE_CHAIN (*pc);
15108 break;
15109 case OMP_CLAUSE_REDUCTION:
15110 *pc = copy_node (c);
15111 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15112 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15113 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15115 auto_vec<tree> no_context_vars;
15116 int walk_subtrees = 0;
15117 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15118 &walk_subtrees, &no_context_vars);
15119 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15120 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15121 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15122 note_no_context_vars,
15123 &no_context_vars);
15124 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15125 note_no_context_vars,
15126 &no_context_vars);
15128 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15129 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15130 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15131 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15132 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15134 hash_map<tree, tree> decl_map;
15135 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15136 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15137 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15138 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15139 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15140 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15142 copy_body_data id;
15143 memset (&id, 0, sizeof (id));
15144 id.src_fn = current_function_decl;
15145 id.dst_fn = current_function_decl;
15146 id.src_cfun = cfun;
15147 id.decl_map = &decl_map;
15148 id.copy_decl = copy_decl_no_change;
15149 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15150 id.transform_new_cfg = true;
15151 id.transform_return_to_modify = false;
15152 id.eh_lp_nr = 0;
15153 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15154 &id, NULL);
15155 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15156 &id, NULL);
15158 for (tree d : no_context_vars)
15160 DECL_CONTEXT (d) = NULL_TREE;
15161 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15164 else
15166 OMP_CLAUSE_REDUCTION_INIT (*pc)
15167 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15168 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15169 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15171 pc = &OMP_CLAUSE_CHAIN (*pc);
15172 break;
15173 default:
15174 gcc_unreachable ();
15176 *pc = NULL_TREE;
15177 *expr_p = t;
15179 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15183 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15184 of OMP_TARGET's body. */
15186 static tree
15187 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15189 *walk_subtrees = 0;
15190 switch (TREE_CODE (*tp))
15192 case OMP_TEAMS:
15193 return *tp;
15194 case BIND_EXPR:
15195 case STATEMENT_LIST:
15196 *walk_subtrees = 1;
15197 break;
15198 default:
15199 break;
15201 return NULL_TREE;
15204 /* Helper function of optimize_target_teams, determine if the expression
15205 can be computed safely before the target construct on the host. */
15207 static tree
15208 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15210 splay_tree_node n;
15212 if (TYPE_P (*tp))
15214 *walk_subtrees = 0;
15215 return NULL_TREE;
15217 switch (TREE_CODE (*tp))
15219 case VAR_DECL:
15220 case PARM_DECL:
15221 case RESULT_DECL:
15222 *walk_subtrees = 0;
15223 if (error_operand_p (*tp)
15224 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15225 || DECL_HAS_VALUE_EXPR_P (*tp)
15226 || DECL_THREAD_LOCAL_P (*tp)
15227 || TREE_SIDE_EFFECTS (*tp)
15228 || TREE_THIS_VOLATILE (*tp))
15229 return *tp;
15230 if (is_global_var (*tp)
15231 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15232 || lookup_attribute ("omp declare target link",
15233 DECL_ATTRIBUTES (*tp))))
15234 return *tp;
15235 if (VAR_P (*tp)
15236 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15237 && !is_global_var (*tp)
15238 && decl_function_context (*tp) == current_function_decl)
15239 return *tp;
15240 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15241 (splay_tree_key) *tp);
15242 if (n == NULL)
15244 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15245 return NULL_TREE;
15246 return *tp;
15248 else if (n->value & GOVD_LOCAL)
15249 return *tp;
15250 else if (n->value & GOVD_FIRSTPRIVATE)
15251 return NULL_TREE;
15252 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15253 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15254 return NULL_TREE;
15255 return *tp;
15256 case INTEGER_CST:
15257 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15258 return *tp;
15259 return NULL_TREE;
15260 case TARGET_EXPR:
15261 if (TARGET_EXPR_INITIAL (*tp)
15262 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15263 return *tp;
15264 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15265 walk_subtrees, NULL);
15266 /* Allow some reasonable subset of integral arithmetics. */
15267 case PLUS_EXPR:
15268 case MINUS_EXPR:
15269 case MULT_EXPR:
15270 case TRUNC_DIV_EXPR:
15271 case CEIL_DIV_EXPR:
15272 case FLOOR_DIV_EXPR:
15273 case ROUND_DIV_EXPR:
15274 case TRUNC_MOD_EXPR:
15275 case CEIL_MOD_EXPR:
15276 case FLOOR_MOD_EXPR:
15277 case ROUND_MOD_EXPR:
15278 case RDIV_EXPR:
15279 case EXACT_DIV_EXPR:
15280 case MIN_EXPR:
15281 case MAX_EXPR:
15282 case LSHIFT_EXPR:
15283 case RSHIFT_EXPR:
15284 case BIT_IOR_EXPR:
15285 case BIT_XOR_EXPR:
15286 case BIT_AND_EXPR:
15287 case NEGATE_EXPR:
15288 case ABS_EXPR:
15289 case BIT_NOT_EXPR:
15290 case NON_LVALUE_EXPR:
15291 CASE_CONVERT:
15292 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15293 return *tp;
15294 return NULL_TREE;
15295 /* And disallow anything else, except for comparisons. */
15296 default:
15297 if (COMPARISON_CLASS_P (*tp))
15298 return NULL_TREE;
15299 return *tp;
15303 /* Try to determine if the num_teams and/or thread_limit expressions
15304 can have their values determined already before entering the
15305 target construct.
15306 INTEGER_CSTs trivially are,
15307 integral decls that are firstprivate (explicitly or implicitly)
15308 or explicitly map(always, to:) or map(always, tofrom:) on the target
15309 region too, and expressions involving simple arithmetics on those
15310 too, function calls are not ok, dereferencing something neither etc.
15311 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15312 EXPR based on what we find:
15313 0 stands for clause not specified at all, use implementation default
15314 -1 stands for value that can't be determined easily before entering
15315 the target construct.
15316 -2 means that no explicit teams construct was specified
15317 If teams construct is not present at all, use 1 for num_teams
15318 and 0 for thread_limit (only one team is involved, and the thread
15319 limit is implementation defined. */
15321 static void
15322 optimize_target_teams (tree target, gimple_seq *pre_p)
15324 tree body = OMP_BODY (target);
15325 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15326 tree num_teams_lower = NULL_TREE;
15327 tree num_teams_upper = integer_zero_node;
15328 tree thread_limit = integer_zero_node;
15329 location_t num_teams_loc = EXPR_LOCATION (target);
15330 location_t thread_limit_loc = EXPR_LOCATION (target);
15331 tree c, *p, expr;
15332 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15334 if (teams == NULL_TREE)
15335 num_teams_upper = build_int_cst (integer_type_node, -2);
15336 else
15337 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15339 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15341 p = &num_teams_upper;
15342 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15343 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15345 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15346 if (TREE_CODE (expr) == INTEGER_CST)
15347 num_teams_lower = expr;
15348 else if (walk_tree (&expr, computable_teams_clause,
15349 NULL, NULL))
15350 num_teams_lower = integer_minus_one_node;
15351 else
15353 num_teams_lower = expr;
15354 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15355 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15356 is_gimple_val, fb_rvalue, false)
15357 == GS_ERROR)
15359 gimplify_omp_ctxp = target_ctx;
15360 num_teams_lower = integer_minus_one_node;
15362 else
15364 gimplify_omp_ctxp = target_ctx;
15365 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15366 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15367 = num_teams_lower;
15372 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15374 p = &thread_limit;
15375 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15377 else
15378 continue;
15379 expr = OMP_CLAUSE_OPERAND (c, 0);
15380 if (TREE_CODE (expr) == INTEGER_CST)
15382 *p = expr;
15383 continue;
15385 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15387 *p = integer_minus_one_node;
15388 continue;
15390 *p = expr;
15391 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15392 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15393 == GS_ERROR)
15395 gimplify_omp_ctxp = target_ctx;
15396 *p = integer_minus_one_node;
15397 continue;
15399 gimplify_omp_ctxp = target_ctx;
15400 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15401 OMP_CLAUSE_OPERAND (c, 0) = *p;
15403 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15405 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15406 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15407 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15408 OMP_TARGET_CLAUSES (target) = c;
15410 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15411 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15412 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15413 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15414 OMP_TARGET_CLAUSES (target) = c;
15417 /* Gimplify the gross structure of several OMP constructs. */
15419 static void
15420 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15422 tree expr = *expr_p;
15423 gimple *stmt;
15424 gimple_seq body = NULL;
15425 enum omp_region_type ort;
15427 switch (TREE_CODE (expr))
15429 case OMP_SECTIONS:
15430 case OMP_SINGLE:
15431 ort = ORT_WORKSHARE;
15432 break;
15433 case OMP_SCOPE:
15434 ort = ORT_TASKGROUP;
15435 break;
15436 case OMP_TARGET:
15437 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15438 break;
15439 case OACC_KERNELS:
15440 ort = ORT_ACC_KERNELS;
15441 break;
15442 case OACC_PARALLEL:
15443 ort = ORT_ACC_PARALLEL;
15444 break;
15445 case OACC_SERIAL:
15446 ort = ORT_ACC_SERIAL;
15447 break;
15448 case OACC_DATA:
15449 ort = ORT_ACC_DATA;
15450 break;
15451 case OMP_TARGET_DATA:
15452 ort = ORT_TARGET_DATA;
15453 break;
15454 case OMP_TEAMS:
15455 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15456 if (gimplify_omp_ctxp == NULL
15457 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15458 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15459 break;
15460 case OACC_HOST_DATA:
15461 ort = ORT_ACC_HOST_DATA;
15462 break;
15463 default:
15464 gcc_unreachable ();
15467 bool save_in_omp_construct = in_omp_construct;
15468 if ((ort & ORT_ACC) == 0)
15469 in_omp_construct = false;
15470 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15471 TREE_CODE (expr));
15472 if (TREE_CODE (expr) == OMP_TARGET)
15473 optimize_target_teams (expr, pre_p);
15474 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15475 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15477 push_gimplify_context ();
15478 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15479 if (gimple_code (g) == GIMPLE_BIND)
15480 pop_gimplify_context (g);
15481 else
15482 pop_gimplify_context (NULL);
15483 if ((ort & ORT_TARGET_DATA) != 0)
15485 enum built_in_function end_ix;
15486 switch (TREE_CODE (expr))
15488 case OACC_DATA:
15489 case OACC_HOST_DATA:
15490 end_ix = BUILT_IN_GOACC_DATA_END;
15491 break;
15492 case OMP_TARGET_DATA:
15493 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15494 break;
15495 default:
15496 gcc_unreachable ();
15498 tree fn = builtin_decl_explicit (end_ix);
15499 g = gimple_build_call (fn, 0);
15500 gimple_seq cleanup = NULL;
15501 gimple_seq_add_stmt (&cleanup, g);
15502 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15503 body = NULL;
15504 gimple_seq_add_stmt (&body, g);
15507 else
15508 gimplify_and_add (OMP_BODY (expr), &body);
15509 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15510 TREE_CODE (expr));
15511 in_omp_construct = save_in_omp_construct;
15513 switch (TREE_CODE (expr))
15515 case OACC_DATA:
15516 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15517 OMP_CLAUSES (expr));
15518 break;
15519 case OACC_HOST_DATA:
15520 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15522 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15523 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15524 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15527 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15528 OMP_CLAUSES (expr));
15529 break;
15530 case OACC_KERNELS:
15531 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15532 OMP_CLAUSES (expr));
15533 break;
15534 case OACC_PARALLEL:
15535 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15536 OMP_CLAUSES (expr));
15537 break;
15538 case OACC_SERIAL:
15539 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15540 OMP_CLAUSES (expr));
15541 break;
15542 case OMP_SECTIONS:
15543 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15544 break;
15545 case OMP_SINGLE:
15546 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15547 break;
15548 case OMP_SCOPE:
15549 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15550 break;
15551 case OMP_TARGET:
15552 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15553 OMP_CLAUSES (expr));
15554 break;
15555 case OMP_TARGET_DATA:
15556 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15557 to be evaluated before the use_device_{ptr,addr} clauses if they
15558 refer to the same variables. */
15560 tree use_device_clauses;
15561 tree *pc, *uc = &use_device_clauses;
15562 for (pc = &OMP_CLAUSES (expr); *pc; )
15563 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15564 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15566 *uc = *pc;
15567 *pc = OMP_CLAUSE_CHAIN (*pc);
15568 uc = &OMP_CLAUSE_CHAIN (*uc);
15570 else
15571 pc = &OMP_CLAUSE_CHAIN (*pc);
15572 *uc = NULL_TREE;
15573 *pc = use_device_clauses;
15574 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15575 OMP_CLAUSES (expr));
15577 break;
15578 case OMP_TEAMS:
15579 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15580 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15581 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15582 break;
15583 default:
15584 gcc_unreachable ();
15587 gimplify_seq_add_stmt (pre_p, stmt);
15588 *expr_p = NULL_TREE;
15591 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15592 target update constructs. */
15594 static void
15595 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15597 tree expr = *expr_p;
15598 int kind;
15599 gomp_target *stmt;
15600 enum omp_region_type ort = ORT_WORKSHARE;
15602 switch (TREE_CODE (expr))
15604 case OACC_ENTER_DATA:
15605 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15606 ort = ORT_ACC;
15607 break;
15608 case OACC_EXIT_DATA:
15609 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15610 ort = ORT_ACC;
15611 break;
15612 case OACC_UPDATE:
15613 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15614 ort = ORT_ACC;
15615 break;
15616 case OMP_TARGET_UPDATE:
15617 kind = GF_OMP_TARGET_KIND_UPDATE;
15618 break;
15619 case OMP_TARGET_ENTER_DATA:
15620 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15621 break;
15622 case OMP_TARGET_EXIT_DATA:
15623 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15624 break;
15625 default:
15626 gcc_unreachable ();
15628 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15629 ort, TREE_CODE (expr));
15630 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15631 TREE_CODE (expr));
15632 if (TREE_CODE (expr) == OACC_UPDATE
15633 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15634 OMP_CLAUSE_IF_PRESENT))
15636 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15637 clause. */
15638 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15639 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15640 switch (OMP_CLAUSE_MAP_KIND (c))
15642 case GOMP_MAP_FORCE_TO:
15643 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15644 break;
15645 case GOMP_MAP_FORCE_FROM:
15646 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15647 break;
15648 default:
15649 break;
15652 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15653 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15654 OMP_CLAUSE_FINALIZE))
15656 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15657 semantics. */
15658 bool have_clause = false;
15659 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15660 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15661 switch (OMP_CLAUSE_MAP_KIND (c))
15663 case GOMP_MAP_FROM:
15664 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15665 have_clause = true;
15666 break;
15667 case GOMP_MAP_RELEASE:
15668 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15669 have_clause = true;
15670 break;
15671 case GOMP_MAP_TO_PSET:
15672 /* Fortran arrays with descriptors must map that descriptor when
15673 doing standalone "attach" operations (in OpenACC). In that
15674 case GOMP_MAP_TO_PSET appears by itself with no preceding
15675 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15676 break;
15677 case GOMP_MAP_POINTER:
15678 /* TODO PR92929: we may see these here, but they'll always follow
15679 one of the clauses above, and will be handled by libgomp as
15680 one group, so no handling required here. */
15681 gcc_assert (have_clause);
15682 break;
15683 case GOMP_MAP_DETACH:
15684 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15685 have_clause = false;
15686 break;
15687 case GOMP_MAP_STRUCT:
15688 have_clause = false;
15689 break;
15690 default:
15691 gcc_unreachable ();
15694 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15696 gimplify_seq_add_stmt (pre_p, stmt);
15697 *expr_p = NULL_TREE;
15700 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15701 stabilized the lhs of the atomic operation as *ADDR. Return true if
15702 EXPR is this stabilized form. */
15704 static bool
15705 goa_lhs_expr_p (tree expr, tree addr)
15707 /* Also include casts to other type variants. The C front end is fond
15708 of adding these for e.g. volatile variables. This is like
15709 STRIP_TYPE_NOPS but includes the main variant lookup. */
15710 STRIP_USELESS_TYPE_CONVERSION (expr);
15712 if (INDIRECT_REF_P (expr))
15714 expr = TREE_OPERAND (expr, 0);
15715 while (expr != addr
15716 && (CONVERT_EXPR_P (expr)
15717 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15718 && TREE_CODE (expr) == TREE_CODE (addr)
15719 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15721 expr = TREE_OPERAND (expr, 0);
15722 addr = TREE_OPERAND (addr, 0);
15724 if (expr == addr)
15725 return true;
15726 return (TREE_CODE (addr) == ADDR_EXPR
15727 && TREE_CODE (expr) == ADDR_EXPR
15728 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15730 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15731 return true;
15732 return false;
15735 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15736 expression does not involve the lhs, evaluate it into a temporary.
15737 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15738 or -1 if an error was encountered. */
15740 static int
15741 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15742 tree lhs_var, tree &target_expr, bool rhs, int depth)
15744 tree expr = *expr_p;
15745 int saw_lhs = 0;
15747 if (goa_lhs_expr_p (expr, lhs_addr))
15749 if (pre_p)
15750 *expr_p = lhs_var;
15751 return 1;
15753 if (is_gimple_val (expr))
15754 return 0;
15756 /* Maximum depth of lhs in expression is for the
15757 __builtin_clear_padding (...), __builtin_clear_padding (...),
15758 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15759 if (++depth > 7)
15760 goto finish;
15762 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
15764 case tcc_binary:
15765 case tcc_comparison:
15766 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
15767 lhs_var, target_expr, true, depth);
15768 /* FALLTHRU */
15769 case tcc_unary:
15770 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
15771 lhs_var, target_expr, true, depth);
15772 break;
15773 case tcc_expression:
15774 switch (TREE_CODE (expr))
15776 case TRUTH_ANDIF_EXPR:
15777 case TRUTH_ORIF_EXPR:
15778 case TRUTH_AND_EXPR:
15779 case TRUTH_OR_EXPR:
15780 case TRUTH_XOR_EXPR:
15781 case BIT_INSERT_EXPR:
15782 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15783 lhs_addr, lhs_var, target_expr, true,
15784 depth);
15785 /* FALLTHRU */
15786 case TRUTH_NOT_EXPR:
15787 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15788 lhs_addr, lhs_var, target_expr, true,
15789 depth);
15790 break;
15791 case MODIFY_EXPR:
15792 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15793 target_expr, true, depth))
15794 break;
15795 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15796 lhs_addr, lhs_var, target_expr, true,
15797 depth);
15798 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15799 lhs_addr, lhs_var, target_expr, false,
15800 depth);
15801 break;
15802 /* FALLTHRU */
15803 case ADDR_EXPR:
15804 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15805 target_expr, true, depth))
15806 break;
15807 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15808 lhs_addr, lhs_var, target_expr, false,
15809 depth);
15810 break;
15811 case COMPOUND_EXPR:
15812 /* Break out any preevaluations from cp_build_modify_expr. */
15813 for (; TREE_CODE (expr) == COMPOUND_EXPR;
15814 expr = TREE_OPERAND (expr, 1))
15816 /* Special-case __builtin_clear_padding call before
15817 __builtin_memcmp. */
15818 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
15820 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
15821 if (fndecl
15822 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15823 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
15824 && (!pre_p
15825 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
15826 lhs_addr, lhs_var,
15827 target_expr, true, depth)))
15829 if (pre_p)
15830 *expr_p = expr;
15831 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
15832 pre_p, lhs_addr, lhs_var,
15833 target_expr, true, depth);
15834 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
15835 pre_p, lhs_addr, lhs_var,
15836 target_expr, rhs, depth);
15837 return saw_lhs;
15841 if (pre_p)
15842 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
15844 if (!pre_p)
15845 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
15846 target_expr, rhs, depth);
15847 *expr_p = expr;
15848 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
15849 target_expr, rhs, depth);
15850 case COND_EXPR:
15851 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
15852 lhs_var, target_expr, true, depth))
15853 break;
15854 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15855 lhs_addr, lhs_var, target_expr, true,
15856 depth);
15857 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15858 lhs_addr, lhs_var, target_expr, true,
15859 depth);
15860 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
15861 lhs_addr, lhs_var, target_expr, true,
15862 depth);
15863 break;
15864 case TARGET_EXPR:
15865 if (TARGET_EXPR_INITIAL (expr))
15867 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
15868 lhs_var, target_expr, true,
15869 depth))
15870 break;
15871 if (expr == target_expr)
15872 saw_lhs = 1;
15873 else
15875 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
15876 pre_p, lhs_addr, lhs_var,
15877 target_expr, true, depth);
15878 if (saw_lhs && target_expr == NULL_TREE && pre_p)
15879 target_expr = expr;
15882 break;
15883 default:
15884 break;
15886 break;
15887 case tcc_reference:
15888 if (TREE_CODE (expr) == BIT_FIELD_REF
15889 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
15890 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15891 lhs_addr, lhs_var, target_expr, true,
15892 depth);
15893 break;
15894 case tcc_vl_exp:
15895 if (TREE_CODE (expr) == CALL_EXPR)
15897 if (tree fndecl = get_callee_fndecl (expr))
15898 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
15899 BUILT_IN_MEMCMP))
15901 int nargs = call_expr_nargs (expr);
15902 for (int i = 0; i < nargs; i++)
15903 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
15904 pre_p, lhs_addr, lhs_var,
15905 target_expr, true, depth);
15908 break;
15909 default:
15910 break;
15913 finish:
15914 if (saw_lhs == 0 && pre_p)
15916 enum gimplify_status gs;
15917 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
15919 gimplify_stmt (&expr, pre_p);
15920 return saw_lhs;
15922 else if (rhs)
15923 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
15924 else
15925 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
15926 if (gs != GS_ALL_DONE)
15927 saw_lhs = -1;
15930 return saw_lhs;
15933 /* Gimplify an OMP_ATOMIC statement. */
15935 static enum gimplify_status
15936 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
15938 tree addr = TREE_OPERAND (*expr_p, 0);
15939 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
15940 ? NULL : TREE_OPERAND (*expr_p, 1);
15941 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
15942 tree tmp_load;
15943 gomp_atomic_load *loadstmt;
15944 gomp_atomic_store *storestmt;
15945 tree target_expr = NULL_TREE;
15947 tmp_load = create_tmp_reg (type);
15948 if (rhs
15949 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
15950 true, 0) < 0)
15951 return GS_ERROR;
15953 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
15954 != GS_ALL_DONE)
15955 return GS_ERROR;
15957 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
15958 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15959 gimplify_seq_add_stmt (pre_p, loadstmt);
15960 if (rhs)
15962 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15963 representatives. Use BIT_FIELD_REF on the lhs instead. */
15964 tree rhsarg = rhs;
15965 if (TREE_CODE (rhs) == COND_EXPR)
15966 rhsarg = TREE_OPERAND (rhs, 1);
15967 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
15968 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
15970 tree bitpos = TREE_OPERAND (rhsarg, 2);
15971 tree op1 = TREE_OPERAND (rhsarg, 1);
15972 tree bitsize;
15973 tree tmp_store = tmp_load;
15974 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
15975 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
15976 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
15977 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
15978 else
15979 bitsize = TYPE_SIZE (TREE_TYPE (op1));
15980 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
15981 tree t = build2_loc (EXPR_LOCATION (rhsarg),
15982 MODIFY_EXPR, void_type_node,
15983 build3_loc (EXPR_LOCATION (rhsarg),
15984 BIT_FIELD_REF, TREE_TYPE (op1),
15985 tmp_store, bitsize, bitpos), op1);
15986 if (TREE_CODE (rhs) == COND_EXPR)
15987 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
15988 TREE_OPERAND (rhs, 0), t, void_node);
15989 gimplify_and_add (t, pre_p);
15990 rhs = tmp_store;
15992 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
15993 if (TREE_CODE (rhs) == COND_EXPR)
15994 gimplify_ctxp->allow_rhs_cond_expr = true;
15995 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
15996 is_gimple_val, fb_rvalue);
15997 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
15998 if (gs != GS_ALL_DONE)
15999 return GS_ERROR;
16002 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
16003 rhs = tmp_load;
16004 storestmt
16005 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16006 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
16008 gimple_omp_atomic_set_weak (loadstmt);
16009 gimple_omp_atomic_set_weak (storestmt);
16011 gimplify_seq_add_stmt (pre_p, storestmt);
16012 switch (TREE_CODE (*expr_p))
16014 case OMP_ATOMIC_READ:
16015 case OMP_ATOMIC_CAPTURE_OLD:
16016 *expr_p = tmp_load;
16017 gimple_omp_atomic_set_need_value (loadstmt);
16018 break;
16019 case OMP_ATOMIC_CAPTURE_NEW:
16020 *expr_p = rhs;
16021 gimple_omp_atomic_set_need_value (storestmt);
16022 break;
16023 default:
16024 *expr_p = NULL;
16025 break;
16028 return GS_ALL_DONE;
16031 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
16032 body, and adding some EH bits. */
16034 static enum gimplify_status
16035 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
16037 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
16038 gimple *body_stmt;
16039 gtransaction *trans_stmt;
16040 gimple_seq body = NULL;
16041 int subcode = 0;
16043 /* Wrap the transaction body in a BIND_EXPR so we have a context
16044 where to put decls for OMP. */
16045 if (TREE_CODE (tbody) != BIND_EXPR)
16047 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
16048 TREE_SIDE_EFFECTS (bind) = 1;
16049 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
16050 TRANSACTION_EXPR_BODY (expr) = bind;
16053 push_gimplify_context ();
16054 temp = voidify_wrapper_expr (*expr_p, NULL);
16056 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
16057 pop_gimplify_context (body_stmt);
16059 trans_stmt = gimple_build_transaction (body);
16060 if (TRANSACTION_EXPR_OUTER (expr))
16061 subcode = GTMA_IS_OUTER;
16062 else if (TRANSACTION_EXPR_RELAXED (expr))
16063 subcode = GTMA_IS_RELAXED;
16064 gimple_transaction_set_subcode (trans_stmt, subcode);
16066 gimplify_seq_add_stmt (pre_p, trans_stmt);
16068 if (temp)
16070 *expr_p = temp;
16071 return GS_OK;
16074 *expr_p = NULL_TREE;
16075 return GS_ALL_DONE;
16078 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
16079 is the OMP_BODY of the original EXPR (which has already been
16080 gimplified so it's not present in the EXPR).
16082 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16084 static gimple *
16085 gimplify_omp_ordered (tree expr, gimple_seq body)
16087 tree c, decls;
16088 int failures = 0;
16089 unsigned int i;
16090 tree source_c = NULL_TREE;
16091 tree sink_c = NULL_TREE;
16093 if (gimplify_omp_ctxp)
16095 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16096 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16097 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16099 error_at (OMP_CLAUSE_LOCATION (c),
16100 "%<ordered%> construct with %qs clause must be "
16101 "closely nested inside a loop with %<ordered%> clause",
16102 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16103 failures++;
16105 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16106 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16108 bool fail = false;
16109 sink_c = c;
16110 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16111 continue; /* omp_cur_iteration - 1 */
16112 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16113 decls && TREE_CODE (decls) == TREE_LIST;
16114 decls = TREE_CHAIN (decls), ++i)
16115 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16116 continue;
16117 else if (TREE_VALUE (decls)
16118 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16120 error_at (OMP_CLAUSE_LOCATION (c),
16121 "variable %qE is not an iteration "
16122 "of outermost loop %d, expected %qE",
16123 TREE_VALUE (decls), i + 1,
16124 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16125 fail = true;
16126 failures++;
16128 else
16129 TREE_VALUE (decls)
16130 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16131 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16133 error_at (OMP_CLAUSE_LOCATION (c),
16134 "number of variables in %qs clause with "
16135 "%<sink%> modifier does not match number of "
16136 "iteration variables",
16137 OMP_CLAUSE_DOACROSS_DEPEND (c)
16138 ? "depend" : "doacross");
16139 failures++;
16142 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16143 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16145 if (source_c)
16147 error_at (OMP_CLAUSE_LOCATION (c),
16148 "more than one %qs clause with %<source%> "
16149 "modifier on an %<ordered%> construct",
16150 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16151 ? "depend" : "doacross");
16152 failures++;
16154 else
16155 source_c = c;
16158 if (source_c && sink_c)
16160 error_at (OMP_CLAUSE_LOCATION (source_c),
16161 "%qs clause with %<source%> modifier specified "
16162 "together with %qs clauses with %<sink%> modifier "
16163 "on the same construct",
16164 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16165 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16166 failures++;
16169 if (failures)
16170 return gimple_build_nop ();
16171 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16174 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16175 expression produces a value to be used as an operand inside a GIMPLE
16176 statement, the value will be stored back in *EXPR_P. This value will
16177 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16178 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16179 emitted in PRE_P and POST_P.
16181 Additionally, this process may overwrite parts of the input
16182 expression during gimplification. Ideally, it should be
16183 possible to do non-destructive gimplification.
16185 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16186 the expression needs to evaluate to a value to be used as
16187 an operand in a GIMPLE statement, this value will be stored in
16188 *EXPR_P on exit. This happens when the caller specifies one
16189 of fb_lvalue or fb_rvalue fallback flags.
16191 PRE_P will contain the sequence of GIMPLE statements corresponding
16192 to the evaluation of EXPR and all the side-effects that must
16193 be executed before the main expression. On exit, the last
16194 statement of PRE_P is the core statement being gimplified. For
16195 instance, when gimplifying 'if (++a)' the last statement in
16196 PRE_P will be 'if (t.1)' where t.1 is the result of
16197 pre-incrementing 'a'.
16199 POST_P will contain the sequence of GIMPLE statements corresponding
16200 to the evaluation of all the side-effects that must be executed
16201 after the main expression. If this is NULL, the post
16202 side-effects are stored at the end of PRE_P.
16204 The reason why the output is split in two is to handle post
16205 side-effects explicitly. In some cases, an expression may have
16206 inner and outer post side-effects which need to be emitted in
16207 an order different from the one given by the recursive
16208 traversal. For instance, for the expression (*p--)++ the post
16209 side-effects of '--' must actually occur *after* the post
16210 side-effects of '++'. However, gimplification will first visit
16211 the inner expression, so if a separate POST sequence was not
16212 used, the resulting sequence would be:
16214 1 t.1 = *p
16215 2 p = p - 1
16216 3 t.2 = t.1 + 1
16217 4 *p = t.2
16219 However, the post-decrement operation in line #2 must not be
16220 evaluated until after the store to *p at line #4, so the
16221 correct sequence should be:
16223 1 t.1 = *p
16224 2 t.2 = t.1 + 1
16225 3 *p = t.2
16226 4 p = p - 1
16228 So, by specifying a separate post queue, it is possible
16229 to emit the post side-effects in the correct order.
16230 If POST_P is NULL, an internal queue will be used. Before
16231 returning to the caller, the sequence POST_P is appended to
16232 the main output sequence PRE_P.
16234 GIMPLE_TEST_F points to a function that takes a tree T and
16235 returns nonzero if T is in the GIMPLE form requested by the
16236 caller. The GIMPLE predicates are in gimple.cc.
16238 FALLBACK tells the function what sort of a temporary we want if
16239 gimplification cannot produce an expression that complies with
16240 GIMPLE_TEST_F.
16242 fb_none means that no temporary should be generated
16243 fb_rvalue means that an rvalue is OK to generate
16244 fb_lvalue means that an lvalue is OK to generate
16245 fb_either means that either is OK, but an lvalue is preferable.
16246 fb_mayfail means that gimplification may fail (in which case
16247 GS_ERROR will be returned)
16249 The return value is either GS_ERROR or GS_ALL_DONE, since this
16250 function iterates until EXPR is completely gimplified or an error
16251 occurs. */
16253 enum gimplify_status
16254 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16255 bool (*gimple_test_f) (tree), fallback_t fallback)
16257 tree tmp;
16258 gimple_seq internal_pre = NULL;
16259 gimple_seq internal_post = NULL;
16260 tree save_expr;
16261 bool is_statement;
16262 location_t saved_location;
16263 enum gimplify_status ret;
16264 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16265 tree label;
16267 save_expr = *expr_p;
16268 if (save_expr == NULL_TREE)
16269 return GS_ALL_DONE;
16271 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16272 is_statement = gimple_test_f == is_gimple_stmt;
16273 if (is_statement)
16274 gcc_assert (pre_p);
16276 /* Consistency checks. */
16277 if (gimple_test_f == is_gimple_reg)
16278 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16279 else if (gimple_test_f == is_gimple_val
16280 || gimple_test_f == is_gimple_call_addr
16281 || gimple_test_f == is_gimple_condexpr_for_cond
16282 || gimple_test_f == is_gimple_mem_rhs
16283 || gimple_test_f == is_gimple_mem_rhs_or_call
16284 || gimple_test_f == is_gimple_reg_rhs
16285 || gimple_test_f == is_gimple_reg_rhs_or_call
16286 || gimple_test_f == is_gimple_asm_val
16287 || gimple_test_f == is_gimple_mem_ref_addr)
16288 gcc_assert (fallback & fb_rvalue);
16289 else if (gimple_test_f == is_gimple_min_lval
16290 || gimple_test_f == is_gimple_lvalue)
16291 gcc_assert (fallback & fb_lvalue);
16292 else if (gimple_test_f == is_gimple_addressable)
16293 gcc_assert (fallback & fb_either);
16294 else if (gimple_test_f == is_gimple_stmt)
16295 gcc_assert (fallback == fb_none);
16296 else
16298 /* We should have recognized the GIMPLE_TEST_F predicate to
16299 know what kind of fallback to use in case a temporary is
16300 needed to hold the value or address of *EXPR_P. */
16301 gcc_unreachable ();
16304 /* We used to check the predicate here and return immediately if it
16305 succeeds. This is wrong; the design is for gimplification to be
16306 idempotent, and for the predicates to only test for valid forms, not
16307 whether they are fully simplified. */
16308 if (pre_p == NULL)
16309 pre_p = &internal_pre;
16311 if (post_p == NULL)
16312 post_p = &internal_post;
16314 /* Remember the last statements added to PRE_P and POST_P. Every
16315 new statement added by the gimplification helpers needs to be
16316 annotated with location information. To centralize the
16317 responsibility, we remember the last statement that had been
16318 added to both queues before gimplifying *EXPR_P. If
16319 gimplification produces new statements in PRE_P and POST_P, those
16320 statements will be annotated with the same location information
16321 as *EXPR_P. */
16322 pre_last_gsi = gsi_last (*pre_p);
16323 post_last_gsi = gsi_last (*post_p);
16325 saved_location = input_location;
16326 if (save_expr != error_mark_node
16327 && EXPR_HAS_LOCATION (*expr_p))
16328 input_location = EXPR_LOCATION (*expr_p);
16330 /* Loop over the specific gimplifiers until the toplevel node
16331 remains the same. */
16334 /* Strip away as many useless type conversions as possible
16335 at the toplevel. */
16336 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16338 /* Remember the expr. */
16339 save_expr = *expr_p;
16341 /* Die, die, die, my darling. */
16342 if (error_operand_p (save_expr))
16344 ret = GS_ERROR;
16345 break;
16348 /* Do any language-specific gimplification. */
16349 ret = ((enum gimplify_status)
16350 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16351 if (ret == GS_OK)
16353 if (*expr_p == NULL_TREE)
16354 break;
16355 if (*expr_p != save_expr)
16356 continue;
16358 else if (ret != GS_UNHANDLED)
16359 break;
16361 /* Make sure that all the cases set 'ret' appropriately. */
16362 ret = GS_UNHANDLED;
16363 switch (TREE_CODE (*expr_p))
16365 /* First deal with the special cases. */
16367 case POSTINCREMENT_EXPR:
16368 case POSTDECREMENT_EXPR:
16369 case PREINCREMENT_EXPR:
16370 case PREDECREMENT_EXPR:
16371 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16372 fallback != fb_none,
16373 TREE_TYPE (*expr_p));
16374 break;
16376 case VIEW_CONVERT_EXPR:
16377 if ((fallback & fb_rvalue)
16378 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16379 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16381 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16382 post_p, is_gimple_val, fb_rvalue);
16383 recalculate_side_effects (*expr_p);
16384 break;
16386 /* Fallthru. */
16388 case ARRAY_REF:
16389 case ARRAY_RANGE_REF:
16390 case REALPART_EXPR:
16391 case IMAGPART_EXPR:
16392 case COMPONENT_REF:
16393 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16394 fallback ? fallback : fb_rvalue);
16395 break;
16397 case COND_EXPR:
16398 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16400 /* C99 code may assign to an array in a structure value of a
16401 conditional expression, and this has undefined behavior
16402 only on execution, so create a temporary if an lvalue is
16403 required. */
16404 if (fallback == fb_lvalue)
16406 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16407 mark_addressable (*expr_p);
16408 ret = GS_OK;
16410 break;
16412 case CALL_EXPR:
16413 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16415 /* C99 code may assign to an array in a structure returned
16416 from a function, and this has undefined behavior only on
16417 execution, so create a temporary if an lvalue is
16418 required. */
16419 if (fallback == fb_lvalue)
16421 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16422 mark_addressable (*expr_p);
16423 ret = GS_OK;
16425 break;
16427 case TREE_LIST:
16428 gcc_unreachable ();
16430 case COMPOUND_EXPR:
16431 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16432 break;
16434 case COMPOUND_LITERAL_EXPR:
16435 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16436 gimple_test_f, fallback);
16437 break;
16439 case MODIFY_EXPR:
16440 case INIT_EXPR:
16441 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16442 fallback != fb_none);
16443 break;
16445 case TRUTH_ANDIF_EXPR:
16446 case TRUTH_ORIF_EXPR:
16448 /* Preserve the original type of the expression and the
16449 source location of the outer expression. */
16450 tree org_type = TREE_TYPE (*expr_p);
16451 *expr_p = gimple_boolify (*expr_p);
16452 *expr_p = build3_loc (input_location, COND_EXPR,
16453 org_type, *expr_p,
16454 fold_convert_loc
16455 (input_location,
16456 org_type, boolean_true_node),
16457 fold_convert_loc
16458 (input_location,
16459 org_type, boolean_false_node));
16460 ret = GS_OK;
16461 break;
16464 case TRUTH_NOT_EXPR:
16466 tree type = TREE_TYPE (*expr_p);
16467 /* The parsers are careful to generate TRUTH_NOT_EXPR
16468 only with operands that are always zero or one.
16469 We do not fold here but handle the only interesting case
16470 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16471 *expr_p = gimple_boolify (*expr_p);
16472 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16473 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16474 TREE_TYPE (*expr_p),
16475 TREE_OPERAND (*expr_p, 0));
16476 else
16477 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16478 TREE_TYPE (*expr_p),
16479 TREE_OPERAND (*expr_p, 0),
16480 build_int_cst (TREE_TYPE (*expr_p), 1));
16481 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16482 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16483 ret = GS_OK;
16484 break;
16487 case ADDR_EXPR:
16488 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16489 break;
16491 case ANNOTATE_EXPR:
16493 tree cond = TREE_OPERAND (*expr_p, 0);
16494 tree kind = TREE_OPERAND (*expr_p, 1);
16495 tree data = TREE_OPERAND (*expr_p, 2);
16496 tree type = TREE_TYPE (cond);
16497 if (!INTEGRAL_TYPE_P (type))
16499 *expr_p = cond;
16500 ret = GS_OK;
16501 break;
16503 tree tmp = create_tmp_var (type);
16504 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16505 gcall *call
16506 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16507 gimple_call_set_lhs (call, tmp);
16508 gimplify_seq_add_stmt (pre_p, call);
16509 *expr_p = tmp;
16510 ret = GS_ALL_DONE;
16511 break;
16514 case VA_ARG_EXPR:
16515 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16516 break;
16518 CASE_CONVERT:
16519 if (IS_EMPTY_STMT (*expr_p))
16521 ret = GS_ALL_DONE;
16522 break;
16525 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16526 || fallback == fb_none)
16528 /* Just strip a conversion to void (or in void context) and
16529 try again. */
16530 *expr_p = TREE_OPERAND (*expr_p, 0);
16531 ret = GS_OK;
16532 break;
16535 ret = gimplify_conversion (expr_p);
16536 if (ret == GS_ERROR)
16537 break;
16538 if (*expr_p != save_expr)
16539 break;
16540 /* FALLTHRU */
16542 case FIX_TRUNC_EXPR:
16543 /* unary_expr: ... | '(' cast ')' val | ... */
16544 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16545 is_gimple_val, fb_rvalue);
16546 recalculate_side_effects (*expr_p);
16547 break;
16549 case INDIRECT_REF:
16551 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16552 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16553 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16555 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16556 if (*expr_p != save_expr)
16558 ret = GS_OK;
16559 break;
16562 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16563 is_gimple_reg, fb_rvalue);
16564 if (ret == GS_ERROR)
16565 break;
16567 recalculate_side_effects (*expr_p);
16568 *expr_p = fold_build2_loc (input_location, MEM_REF,
16569 TREE_TYPE (*expr_p),
16570 TREE_OPERAND (*expr_p, 0),
16571 build_int_cst (saved_ptr_type, 0));
16572 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16573 TREE_THIS_NOTRAP (*expr_p) = notrap;
16574 ret = GS_OK;
16575 break;
16578 /* We arrive here through the various re-gimplifcation paths. */
16579 case MEM_REF:
16580 /* First try re-folding the whole thing. */
16581 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16582 TREE_OPERAND (*expr_p, 0),
16583 TREE_OPERAND (*expr_p, 1));
16584 if (tmp)
16586 REF_REVERSE_STORAGE_ORDER (tmp)
16587 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16588 *expr_p = tmp;
16589 recalculate_side_effects (*expr_p);
16590 ret = GS_OK;
16591 break;
16593 /* Avoid re-gimplifying the address operand if it is already
16594 in suitable form. Re-gimplifying would mark the address
16595 operand addressable. Always gimplify when not in SSA form
16596 as we still may have to gimplify decls with value-exprs. */
16597 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16598 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16600 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16601 is_gimple_mem_ref_addr, fb_rvalue);
16602 if (ret == GS_ERROR)
16603 break;
16605 recalculate_side_effects (*expr_p);
16606 ret = GS_ALL_DONE;
16607 break;
16609 /* Constants need not be gimplified. */
16610 case INTEGER_CST:
16611 case REAL_CST:
16612 case FIXED_CST:
16613 case STRING_CST:
16614 case COMPLEX_CST:
16615 case VECTOR_CST:
16616 /* Drop the overflow flag on constants, we do not want
16617 that in the GIMPLE IL. */
16618 if (TREE_OVERFLOW_P (*expr_p))
16619 *expr_p = drop_tree_overflow (*expr_p);
16620 ret = GS_ALL_DONE;
16621 break;
16623 case CONST_DECL:
16624 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16625 CONST_DECL node. Otherwise the decl is replaceable by its
16626 value. */
16627 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16628 if (fallback & fb_lvalue)
16629 ret = GS_ALL_DONE;
16630 else
16632 *expr_p = DECL_INITIAL (*expr_p);
16633 ret = GS_OK;
16635 break;
16637 case DECL_EXPR:
16638 ret = gimplify_decl_expr (expr_p, pre_p);
16639 break;
16641 case BIND_EXPR:
16642 ret = gimplify_bind_expr (expr_p, pre_p);
16643 break;
16645 case LOOP_EXPR:
16646 ret = gimplify_loop_expr (expr_p, pre_p);
16647 break;
16649 case SWITCH_EXPR:
16650 ret = gimplify_switch_expr (expr_p, pre_p);
16651 break;
16653 case EXIT_EXPR:
16654 ret = gimplify_exit_expr (expr_p);
16655 break;
16657 case GOTO_EXPR:
16658 /* If the target is not LABEL, then it is a computed jump
16659 and the target needs to be gimplified. */
16660 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16662 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16663 NULL, is_gimple_val, fb_rvalue);
16664 if (ret == GS_ERROR)
16665 break;
16667 gimplify_seq_add_stmt (pre_p,
16668 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16669 ret = GS_ALL_DONE;
16670 break;
16672 case PREDICT_EXPR:
16673 gimplify_seq_add_stmt (pre_p,
16674 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16675 PREDICT_EXPR_OUTCOME (*expr_p)));
16676 ret = GS_ALL_DONE;
16677 break;
16679 case LABEL_EXPR:
16680 ret = gimplify_label_expr (expr_p, pre_p);
16681 label = LABEL_EXPR_LABEL (*expr_p);
16682 gcc_assert (decl_function_context (label) == current_function_decl);
16684 /* If the label is used in a goto statement, or address of the label
16685 is taken, we need to unpoison all variables that were seen so far.
16686 Doing so would prevent us from reporting a false positives. */
16687 if (asan_poisoned_variables
16688 && asan_used_labels != NULL
16689 && asan_used_labels->contains (label)
16690 && !gimplify_omp_ctxp)
16691 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16692 break;
16694 case CASE_LABEL_EXPR:
16695 ret = gimplify_case_label_expr (expr_p, pre_p);
16697 if (gimplify_ctxp->live_switch_vars)
16698 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16699 pre_p);
16700 break;
16702 case RETURN_EXPR:
16703 ret = gimplify_return_expr (*expr_p, pre_p);
16704 break;
16706 case CONSTRUCTOR:
16707 /* Don't reduce this in place; let gimplify_init_constructor work its
16708 magic. Buf if we're just elaborating this for side effects, just
16709 gimplify any element that has side-effects. */
16710 if (fallback == fb_none)
16712 unsigned HOST_WIDE_INT ix;
16713 tree val;
16714 tree temp = NULL_TREE;
16715 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16716 if (TREE_SIDE_EFFECTS (val))
16717 append_to_statement_list (val, &temp);
16719 *expr_p = temp;
16720 ret = temp ? GS_OK : GS_ALL_DONE;
16722 /* C99 code may assign to an array in a constructed
16723 structure or union, and this has undefined behavior only
16724 on execution, so create a temporary if an lvalue is
16725 required. */
16726 else if (fallback == fb_lvalue)
16728 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16729 mark_addressable (*expr_p);
16730 ret = GS_OK;
16732 else
16733 ret = GS_ALL_DONE;
16734 break;
16736 /* The following are special cases that are not handled by the
16737 original GIMPLE grammar. */
16739 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16740 eliminated. */
16741 case SAVE_EXPR:
16742 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16743 break;
16745 case BIT_FIELD_REF:
16746 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16747 post_p, is_gimple_lvalue, fb_either);
16748 recalculate_side_effects (*expr_p);
16749 break;
16751 case TARGET_MEM_REF:
16753 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
16755 if (TMR_BASE (*expr_p))
16756 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
16757 post_p, is_gimple_mem_ref_addr, fb_either);
16758 if (TMR_INDEX (*expr_p))
16759 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
16760 post_p, is_gimple_val, fb_rvalue);
16761 if (TMR_INDEX2 (*expr_p))
16762 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
16763 post_p, is_gimple_val, fb_rvalue);
16764 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16765 ret = MIN (r0, r1);
16767 break;
16769 case NON_LVALUE_EXPR:
16770 /* This should have been stripped above. */
16771 gcc_unreachable ();
16773 case ASM_EXPR:
16774 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
16775 break;
16777 case TRY_FINALLY_EXPR:
16778 case TRY_CATCH_EXPR:
16780 gimple_seq eval, cleanup;
16781 gtry *try_;
16783 /* Calls to destructors are generated automatically in FINALLY/CATCH
16784 block. They should have location as UNKNOWN_LOCATION. However,
16785 gimplify_call_expr will reset these call stmts to input_location
16786 if it finds stmt's location is unknown. To prevent resetting for
16787 destructors, we set the input_location to unknown.
16788 Note that this only affects the destructor calls in FINALLY/CATCH
16789 block, and will automatically reset to its original value by the
16790 end of gimplify_expr. */
16791 input_location = UNKNOWN_LOCATION;
16792 eval = cleanup = NULL;
16793 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
16794 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16795 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
16797 gimple_seq n = NULL, e = NULL;
16798 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16799 0), &n);
16800 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16801 1), &e);
16802 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
16804 geh_else *stmt = gimple_build_eh_else (n, e);
16805 gimple_seq_add_stmt (&cleanup, stmt);
16808 else
16809 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
16810 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16811 if (gimple_seq_empty_p (cleanup))
16813 gimple_seq_add_seq (pre_p, eval);
16814 ret = GS_ALL_DONE;
16815 break;
16817 try_ = gimple_build_try (eval, cleanup,
16818 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16819 ? GIMPLE_TRY_FINALLY
16820 : GIMPLE_TRY_CATCH);
16821 if (EXPR_HAS_LOCATION (save_expr))
16822 gimple_set_location (try_, EXPR_LOCATION (save_expr));
16823 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
16824 gimple_set_location (try_, saved_location);
16825 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
16826 gimple_try_set_catch_is_cleanup (try_,
16827 TRY_CATCH_IS_CLEANUP (*expr_p));
16828 gimplify_seq_add_stmt (pre_p, try_);
16829 ret = GS_ALL_DONE;
16830 break;
16833 case CLEANUP_POINT_EXPR:
16834 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
16835 break;
16837 case TARGET_EXPR:
16838 ret = gimplify_target_expr (expr_p, pre_p, post_p);
16839 break;
16841 case CATCH_EXPR:
16843 gimple *c;
16844 gimple_seq handler = NULL;
16845 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
16846 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
16847 gimplify_seq_add_stmt (pre_p, c);
16848 ret = GS_ALL_DONE;
16849 break;
16852 case EH_FILTER_EXPR:
16854 gimple *ehf;
16855 gimple_seq failure = NULL;
16857 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
16858 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
16859 copy_warning (ehf, *expr_p);
16860 gimplify_seq_add_stmt (pre_p, ehf);
16861 ret = GS_ALL_DONE;
16862 break;
16865 case OBJ_TYPE_REF:
16867 enum gimplify_status r0, r1;
16868 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
16869 post_p, is_gimple_val, fb_rvalue);
16870 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
16871 post_p, is_gimple_val, fb_rvalue);
16872 TREE_SIDE_EFFECTS (*expr_p) = 0;
16873 ret = MIN (r0, r1);
16875 break;
16877 case LABEL_DECL:
16878 /* We get here when taking the address of a label. We mark
16879 the label as "forced"; meaning it can never be removed and
16880 it is a potential target for any computed goto. */
16881 FORCED_LABEL (*expr_p) = 1;
16882 ret = GS_ALL_DONE;
16883 break;
16885 case STATEMENT_LIST:
16886 ret = gimplify_statement_list (expr_p, pre_p);
16887 break;
16889 case WITH_SIZE_EXPR:
16891 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16892 post_p == &internal_post ? NULL : post_p,
16893 gimple_test_f, fallback);
16894 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
16895 is_gimple_val, fb_rvalue);
16896 ret = GS_ALL_DONE;
16898 break;
16900 case VAR_DECL:
16901 case PARM_DECL:
16902 ret = gimplify_var_or_parm_decl (expr_p);
16903 break;
16905 case RESULT_DECL:
16906 /* When within an OMP context, notice uses of variables. */
16907 if (gimplify_omp_ctxp)
16908 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
16909 ret = GS_ALL_DONE;
16910 break;
16912 case DEBUG_EXPR_DECL:
16913 gcc_unreachable ();
16915 case DEBUG_BEGIN_STMT:
16916 gimplify_seq_add_stmt (pre_p,
16917 gimple_build_debug_begin_stmt
16918 (TREE_BLOCK (*expr_p),
16919 EXPR_LOCATION (*expr_p)));
16920 ret = GS_ALL_DONE;
16921 *expr_p = NULL;
16922 break;
16924 case SSA_NAME:
16925 /* Allow callbacks into the gimplifier during optimization. */
16926 ret = GS_ALL_DONE;
16927 break;
16929 case OMP_PARALLEL:
16930 gimplify_omp_parallel (expr_p, pre_p);
16931 ret = GS_ALL_DONE;
16932 break;
16934 case OMP_TASK:
16935 gimplify_omp_task (expr_p, pre_p);
16936 ret = GS_ALL_DONE;
16937 break;
16939 case OMP_SIMD:
16941 /* Temporarily disable into_ssa, as scan_omp_simd
16942 which calls copy_gimple_seq_and_replace_locals can't deal
16943 with SSA_NAMEs defined outside of the body properly. */
16944 bool saved_into_ssa = gimplify_ctxp->into_ssa;
16945 gimplify_ctxp->into_ssa = false;
16946 ret = gimplify_omp_for (expr_p, pre_p);
16947 gimplify_ctxp->into_ssa = saved_into_ssa;
16948 break;
16951 case OMP_FOR:
16952 case OMP_DISTRIBUTE:
16953 case OMP_TASKLOOP:
16954 case OACC_LOOP:
16955 ret = gimplify_omp_for (expr_p, pre_p);
16956 break;
16958 case OMP_LOOP:
16959 ret = gimplify_omp_loop (expr_p, pre_p);
16960 break;
16962 case OACC_CACHE:
16963 gimplify_oacc_cache (expr_p, pre_p);
16964 ret = GS_ALL_DONE;
16965 break;
16967 case OACC_DECLARE:
16968 gimplify_oacc_declare (expr_p, pre_p);
16969 ret = GS_ALL_DONE;
16970 break;
16972 case OACC_HOST_DATA:
16973 case OACC_DATA:
16974 case OACC_KERNELS:
16975 case OACC_PARALLEL:
16976 case OACC_SERIAL:
16977 case OMP_SCOPE:
16978 case OMP_SECTIONS:
16979 case OMP_SINGLE:
16980 case OMP_TARGET:
16981 case OMP_TARGET_DATA:
16982 case OMP_TEAMS:
16983 gimplify_omp_workshare (expr_p, pre_p);
16984 ret = GS_ALL_DONE;
16985 break;
16987 case OACC_ENTER_DATA:
16988 case OACC_EXIT_DATA:
16989 case OACC_UPDATE:
16990 case OMP_TARGET_UPDATE:
16991 case OMP_TARGET_ENTER_DATA:
16992 case OMP_TARGET_EXIT_DATA:
16993 gimplify_omp_target_update (expr_p, pre_p);
16994 ret = GS_ALL_DONE;
16995 break;
16997 case OMP_SECTION:
16998 case OMP_MASTER:
16999 case OMP_MASKED:
17000 case OMP_ORDERED:
17001 case OMP_CRITICAL:
17002 case OMP_SCAN:
17004 gimple_seq body = NULL;
17005 gimple *g;
17006 bool saved_in_omp_construct = in_omp_construct;
17008 in_omp_construct = true;
17009 gimplify_and_add (OMP_BODY (*expr_p), &body);
17010 in_omp_construct = saved_in_omp_construct;
17011 switch (TREE_CODE (*expr_p))
17013 case OMP_SECTION:
17014 g = gimple_build_omp_section (body);
17015 break;
17016 case OMP_MASTER:
17017 g = gimple_build_omp_master (body);
17018 break;
17019 case OMP_ORDERED:
17020 g = gimplify_omp_ordered (*expr_p, body);
17021 if (OMP_BODY (*expr_p) == NULL_TREE
17022 && gimple_code (g) == GIMPLE_OMP_ORDERED)
17023 gimple_omp_ordered_standalone (g);
17024 break;
17025 case OMP_MASKED:
17026 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
17027 pre_p, ORT_WORKSHARE, OMP_MASKED);
17028 gimplify_adjust_omp_clauses (pre_p, body,
17029 &OMP_MASKED_CLAUSES (*expr_p),
17030 OMP_MASKED);
17031 g = gimple_build_omp_masked (body,
17032 OMP_MASKED_CLAUSES (*expr_p));
17033 break;
17034 case OMP_CRITICAL:
17035 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
17036 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
17037 gimplify_adjust_omp_clauses (pre_p, body,
17038 &OMP_CRITICAL_CLAUSES (*expr_p),
17039 OMP_CRITICAL);
17040 g = gimple_build_omp_critical (body,
17041 OMP_CRITICAL_NAME (*expr_p),
17042 OMP_CRITICAL_CLAUSES (*expr_p));
17043 break;
17044 case OMP_SCAN:
17045 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
17046 pre_p, ORT_WORKSHARE, OMP_SCAN);
17047 gimplify_adjust_omp_clauses (pre_p, body,
17048 &OMP_SCAN_CLAUSES (*expr_p),
17049 OMP_SCAN);
17050 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
17051 break;
17052 default:
17053 gcc_unreachable ();
17055 gimplify_seq_add_stmt (pre_p, g);
17056 ret = GS_ALL_DONE;
17057 break;
17060 case OMP_TASKGROUP:
17062 gimple_seq body = NULL;
17064 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
17065 bool saved_in_omp_construct = in_omp_construct;
17066 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
17067 OMP_TASKGROUP);
17068 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
17070 in_omp_construct = true;
17071 gimplify_and_add (OMP_BODY (*expr_p), &body);
17072 in_omp_construct = saved_in_omp_construct;
17073 gimple_seq cleanup = NULL;
17074 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
17075 gimple *g = gimple_build_call (fn, 0);
17076 gimple_seq_add_stmt (&cleanup, g);
17077 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
17078 body = NULL;
17079 gimple_seq_add_stmt (&body, g);
17080 g = gimple_build_omp_taskgroup (body, *pclauses);
17081 gimplify_seq_add_stmt (pre_p, g);
17082 ret = GS_ALL_DONE;
17083 break;
17086 case OMP_ATOMIC:
17087 case OMP_ATOMIC_READ:
17088 case OMP_ATOMIC_CAPTURE_OLD:
17089 case OMP_ATOMIC_CAPTURE_NEW:
17090 ret = gimplify_omp_atomic (expr_p, pre_p);
17091 break;
17093 case TRANSACTION_EXPR:
17094 ret = gimplify_transaction (expr_p, pre_p);
17095 break;
17097 case TRUTH_AND_EXPR:
17098 case TRUTH_OR_EXPR:
17099 case TRUTH_XOR_EXPR:
17101 tree orig_type = TREE_TYPE (*expr_p);
17102 tree new_type, xop0, xop1;
17103 *expr_p = gimple_boolify (*expr_p);
17104 new_type = TREE_TYPE (*expr_p);
17105 if (!useless_type_conversion_p (orig_type, new_type))
17107 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17108 ret = GS_OK;
17109 break;
17112 /* Boolified binary truth expressions are semantically equivalent
17113 to bitwise binary expressions. Canonicalize them to the
17114 bitwise variant. */
17115 switch (TREE_CODE (*expr_p))
17117 case TRUTH_AND_EXPR:
17118 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17119 break;
17120 case TRUTH_OR_EXPR:
17121 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17122 break;
17123 case TRUTH_XOR_EXPR:
17124 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17125 break;
17126 default:
17127 break;
17129 /* Now make sure that operands have compatible type to
17130 expression's new_type. */
17131 xop0 = TREE_OPERAND (*expr_p, 0);
17132 xop1 = TREE_OPERAND (*expr_p, 1);
17133 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17134 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17135 new_type,
17136 xop0);
17137 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17138 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17139 new_type,
17140 xop1);
17141 /* Continue classified as tcc_binary. */
17142 goto expr_2;
17145 case VEC_COND_EXPR:
17146 goto expr_3;
17148 case VEC_PERM_EXPR:
17149 /* Classified as tcc_expression. */
17150 goto expr_3;
17152 case BIT_INSERT_EXPR:
17153 /* Argument 3 is a constant. */
17154 goto expr_2;
17156 case POINTER_PLUS_EXPR:
17158 enum gimplify_status r0, r1;
17159 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17160 post_p, is_gimple_val, fb_rvalue);
17161 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17162 post_p, is_gimple_val, fb_rvalue);
17163 recalculate_side_effects (*expr_p);
17164 ret = MIN (r0, r1);
17165 break;
17168 default:
17169 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17171 case tcc_comparison:
17172 /* Handle comparison of objects of non scalar mode aggregates
17173 with a call to memcmp. It would be nice to only have to do
17174 this for variable-sized objects, but then we'd have to allow
17175 the same nest of reference nodes we allow for MODIFY_EXPR and
17176 that's too complex.
17178 Compare scalar mode aggregates as scalar mode values. Using
17179 memcmp for them would be very inefficient at best, and is
17180 plain wrong if bitfields are involved. */
17181 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17182 ret = GS_ERROR;
17183 else
17185 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17187 /* Vector comparisons need no boolification. */
17188 if (TREE_CODE (type) == VECTOR_TYPE)
17189 goto expr_2;
17190 else if (!AGGREGATE_TYPE_P (type))
17192 tree org_type = TREE_TYPE (*expr_p);
17193 *expr_p = gimple_boolify (*expr_p);
17194 if (!useless_type_conversion_p (org_type,
17195 TREE_TYPE (*expr_p)))
17197 *expr_p = fold_convert_loc (input_location,
17198 org_type, *expr_p);
17199 ret = GS_OK;
17201 else
17202 goto expr_2;
17204 else if (TYPE_MODE (type) != BLKmode)
17205 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17206 else
17207 ret = gimplify_variable_sized_compare (expr_p);
17209 break;
17211 /* If *EXPR_P does not need to be special-cased, handle it
17212 according to its class. */
17213 case tcc_unary:
17214 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17215 post_p, is_gimple_val, fb_rvalue);
17216 break;
17218 case tcc_binary:
17219 expr_2:
17221 enum gimplify_status r0, r1;
17223 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17224 post_p, is_gimple_val, fb_rvalue);
17225 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17226 post_p, is_gimple_val, fb_rvalue);
17228 ret = MIN (r0, r1);
17229 break;
17232 expr_3:
17234 enum gimplify_status r0, r1, r2;
17236 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17237 post_p, is_gimple_val, fb_rvalue);
17238 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17239 post_p, is_gimple_val, fb_rvalue);
17240 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17241 post_p, is_gimple_val, fb_rvalue);
17243 ret = MIN (MIN (r0, r1), r2);
17244 break;
17247 case tcc_declaration:
17248 case tcc_constant:
17249 ret = GS_ALL_DONE;
17250 goto dont_recalculate;
17252 default:
17253 gcc_unreachable ();
17256 recalculate_side_effects (*expr_p);
17258 dont_recalculate:
17259 break;
17262 gcc_assert (*expr_p || ret != GS_OK);
17264 while (ret == GS_OK);
17266 /* If we encountered an error_mark somewhere nested inside, either
17267 stub out the statement or propagate the error back out. */
17268 if (ret == GS_ERROR)
17270 if (is_statement)
17271 *expr_p = NULL;
17272 goto out;
17275 /* This was only valid as a return value from the langhook, which
17276 we handled. Make sure it doesn't escape from any other context. */
17277 gcc_assert (ret != GS_UNHANDLED);
17279 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17281 /* We aren't looking for a value, and we don't have a valid
17282 statement. If it doesn't have side-effects, throw it away.
17283 We can also get here with code such as "*&&L;", where L is
17284 a LABEL_DECL that is marked as FORCED_LABEL. */
17285 if (TREE_CODE (*expr_p) == LABEL_DECL
17286 || !TREE_SIDE_EFFECTS (*expr_p))
17287 *expr_p = NULL;
17288 else if (!TREE_THIS_VOLATILE (*expr_p))
17290 /* This is probably a _REF that contains something nested that
17291 has side effects. Recurse through the operands to find it. */
17292 enum tree_code code = TREE_CODE (*expr_p);
17294 switch (code)
17296 case COMPONENT_REF:
17297 case REALPART_EXPR:
17298 case IMAGPART_EXPR:
17299 case VIEW_CONVERT_EXPR:
17300 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17301 gimple_test_f, fallback);
17302 break;
17304 case ARRAY_REF:
17305 case ARRAY_RANGE_REF:
17306 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17307 gimple_test_f, fallback);
17308 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17309 gimple_test_f, fallback);
17310 break;
17312 default:
17313 /* Anything else with side-effects must be converted to
17314 a valid statement before we get here. */
17315 gcc_unreachable ();
17318 *expr_p = NULL;
17320 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17321 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17322 && !is_empty_type (TREE_TYPE (*expr_p)))
17324 /* Historically, the compiler has treated a bare reference
17325 to a non-BLKmode volatile lvalue as forcing a load. */
17326 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17328 /* Normally, we do not want to create a temporary for a
17329 TREE_ADDRESSABLE type because such a type should not be
17330 copied by bitwise-assignment. However, we make an
17331 exception here, as all we are doing here is ensuring that
17332 we read the bytes that make up the type. We use
17333 create_tmp_var_raw because create_tmp_var will abort when
17334 given a TREE_ADDRESSABLE type. */
17335 tree tmp = create_tmp_var_raw (type, "vol");
17336 gimple_add_tmp_var (tmp);
17337 gimplify_assign (tmp, *expr_p, pre_p);
17338 *expr_p = NULL;
17340 else
17341 /* We can't do anything useful with a volatile reference to
17342 an incomplete type, so just throw it away. Likewise for
17343 a BLKmode type, since any implicit inner load should
17344 already have been turned into an explicit one by the
17345 gimplification process. */
17346 *expr_p = NULL;
17349 /* If we are gimplifying at the statement level, we're done. Tack
17350 everything together and return. */
17351 if (fallback == fb_none || is_statement)
17353 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17354 it out for GC to reclaim it. */
17355 *expr_p = NULL_TREE;
17357 if (!gimple_seq_empty_p (internal_pre)
17358 || !gimple_seq_empty_p (internal_post))
17360 gimplify_seq_add_seq (&internal_pre, internal_post);
17361 gimplify_seq_add_seq (pre_p, internal_pre);
17364 /* The result of gimplifying *EXPR_P is going to be the last few
17365 statements in *PRE_P and *POST_P. Add location information
17366 to all the statements that were added by the gimplification
17367 helpers. */
17368 if (!gimple_seq_empty_p (*pre_p))
17369 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17371 if (!gimple_seq_empty_p (*post_p))
17372 annotate_all_with_location_after (*post_p, post_last_gsi,
17373 input_location);
17375 goto out;
17378 #ifdef ENABLE_GIMPLE_CHECKING
17379 if (*expr_p)
17381 enum tree_code code = TREE_CODE (*expr_p);
17382 /* These expressions should already be in gimple IR form. */
17383 gcc_assert (code != MODIFY_EXPR
17384 && code != ASM_EXPR
17385 && code != BIND_EXPR
17386 && code != CATCH_EXPR
17387 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17388 && code != EH_FILTER_EXPR
17389 && code != GOTO_EXPR
17390 && code != LABEL_EXPR
17391 && code != LOOP_EXPR
17392 && code != SWITCH_EXPR
17393 && code != TRY_FINALLY_EXPR
17394 && code != EH_ELSE_EXPR
17395 && code != OACC_PARALLEL
17396 && code != OACC_KERNELS
17397 && code != OACC_SERIAL
17398 && code != OACC_DATA
17399 && code != OACC_HOST_DATA
17400 && code != OACC_DECLARE
17401 && code != OACC_UPDATE
17402 && code != OACC_ENTER_DATA
17403 && code != OACC_EXIT_DATA
17404 && code != OACC_CACHE
17405 && code != OMP_CRITICAL
17406 && code != OMP_FOR
17407 && code != OACC_LOOP
17408 && code != OMP_MASTER
17409 && code != OMP_MASKED
17410 && code != OMP_TASKGROUP
17411 && code != OMP_ORDERED
17412 && code != OMP_PARALLEL
17413 && code != OMP_SCAN
17414 && code != OMP_SECTIONS
17415 && code != OMP_SECTION
17416 && code != OMP_SINGLE
17417 && code != OMP_SCOPE);
17419 #endif
17421 /* Otherwise we're gimplifying a subexpression, so the resulting
17422 value is interesting. If it's a valid operand that matches
17423 GIMPLE_TEST_F, we're done. Unless we are handling some
17424 post-effects internally; if that's the case, we need to copy into
17425 a temporary before adding the post-effects to POST_P. */
17426 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17427 goto out;
17429 /* Otherwise, we need to create a new temporary for the gimplified
17430 expression. */
17432 /* We can't return an lvalue if we have an internal postqueue. The
17433 object the lvalue refers to would (probably) be modified by the
17434 postqueue; we need to copy the value out first, which means an
17435 rvalue. */
17436 if ((fallback & fb_lvalue)
17437 && gimple_seq_empty_p (internal_post)
17438 && is_gimple_addressable (*expr_p))
17440 /* An lvalue will do. Take the address of the expression, store it
17441 in a temporary, and replace the expression with an INDIRECT_REF of
17442 that temporary. */
17443 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17444 unsigned int ref_align = get_object_alignment (*expr_p);
17445 tree ref_type = TREE_TYPE (*expr_p);
17446 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17447 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17448 if (TYPE_ALIGN (ref_type) != ref_align)
17449 ref_type = build_aligned_type (ref_type, ref_align);
17450 *expr_p = build2 (MEM_REF, ref_type,
17451 tmp, build_zero_cst (ref_alias_type));
17453 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17455 /* An rvalue will do. Assign the gimplified expression into a
17456 new temporary TMP and replace the original expression with
17457 TMP. First, make sure that the expression has a type so that
17458 it can be assigned into a temporary. */
17459 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17460 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17462 else
17464 #ifdef ENABLE_GIMPLE_CHECKING
17465 if (!(fallback & fb_mayfail))
17467 fprintf (stderr, "gimplification failed:\n");
17468 print_generic_expr (stderr, *expr_p);
17469 debug_tree (*expr_p);
17470 internal_error ("gimplification failed");
17472 #endif
17473 gcc_assert (fallback & fb_mayfail);
17475 /* If this is an asm statement, and the user asked for the
17476 impossible, don't die. Fail and let gimplify_asm_expr
17477 issue an error. */
17478 ret = GS_ERROR;
17479 goto out;
17482 /* Make sure the temporary matches our predicate. */
17483 gcc_assert ((*gimple_test_f) (*expr_p));
17485 if (!gimple_seq_empty_p (internal_post))
17487 annotate_all_with_location (internal_post, input_location);
17488 gimplify_seq_add_seq (pre_p, internal_post);
17491 out:
17492 input_location = saved_location;
17493 return ret;
17496 /* Like gimplify_expr but make sure the gimplified result is not itself
17497 a SSA name (but a decl if it were). Temporaries required by
17498 evaluating *EXPR_P may be still SSA names. */
17500 static enum gimplify_status
17501 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17502 bool (*gimple_test_f) (tree), fallback_t fallback,
17503 bool allow_ssa)
17505 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17506 gimple_test_f, fallback);
17507 if (! allow_ssa
17508 && TREE_CODE (*expr_p) == SSA_NAME)
17509 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17510 return ret;
17513 /* Look through TYPE for variable-sized objects and gimplify each such
17514 size that we find. Add to LIST_P any statements generated. */
17516 void
17517 gimplify_type_sizes (tree type, gimple_seq *list_p)
17519 if (type == NULL || type == error_mark_node)
17520 return;
17522 const bool ignored_p
17523 = TYPE_NAME (type)
17524 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17525 && DECL_IGNORED_P (TYPE_NAME (type));
17526 tree t;
17528 /* We first do the main variant, then copy into any other variants. */
17529 type = TYPE_MAIN_VARIANT (type);
17531 /* Avoid infinite recursion. */
17532 if (TYPE_SIZES_GIMPLIFIED (type))
17533 return;
17535 TYPE_SIZES_GIMPLIFIED (type) = 1;
17537 switch (TREE_CODE (type))
17539 case INTEGER_TYPE:
17540 case ENUMERAL_TYPE:
17541 case BOOLEAN_TYPE:
17542 case REAL_TYPE:
17543 case FIXED_POINT_TYPE:
17544 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17545 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17547 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17549 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17550 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17552 break;
17554 case ARRAY_TYPE:
17555 /* These types may not have declarations, so handle them here. */
17556 gimplify_type_sizes (TREE_TYPE (type), list_p);
17557 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17558 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17559 with assigned stack slots, for -O1+ -g they should be tracked
17560 by VTA. */
17561 if (!ignored_p
17562 && TYPE_DOMAIN (type)
17563 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17565 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17566 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17567 DECL_IGNORED_P (t) = 0;
17568 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17569 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17570 DECL_IGNORED_P (t) = 0;
17572 break;
17574 case RECORD_TYPE:
17575 case UNION_TYPE:
17576 case QUAL_UNION_TYPE:
17577 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17578 if (TREE_CODE (field) == FIELD_DECL)
17580 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17581 /* Likewise, ensure variable offsets aren't removed. */
17582 if (!ignored_p
17583 && (t = DECL_FIELD_OFFSET (field))
17584 && VAR_P (t)
17585 && DECL_ARTIFICIAL (t))
17586 DECL_IGNORED_P (t) = 0;
17587 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17588 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17589 gimplify_type_sizes (TREE_TYPE (field), list_p);
17591 break;
17593 case POINTER_TYPE:
17594 case REFERENCE_TYPE:
17595 /* We used to recurse on the pointed-to type here, which turned out to
17596 be incorrect because its definition might refer to variables not
17597 yet initialized at this point if a forward declaration is involved.
17599 It was actually useful for anonymous pointed-to types to ensure
17600 that the sizes evaluation dominates every possible later use of the
17601 values. Restricting to such types here would be safe since there
17602 is no possible forward declaration around, but would introduce an
17603 undesirable middle-end semantic to anonymity. We then defer to
17604 front-ends the responsibility of ensuring that the sizes are
17605 evaluated both early and late enough, e.g. by attaching artificial
17606 type declarations to the tree. */
17607 break;
17609 default:
17610 break;
17613 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17614 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17616 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17618 TYPE_SIZE (t) = TYPE_SIZE (type);
17619 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17620 TYPE_SIZES_GIMPLIFIED (t) = 1;
17624 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17625 a size or position, has had all of its SAVE_EXPRs evaluated.
17626 We add any required statements to *STMT_P. */
17628 void
17629 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17631 tree expr = *expr_p;
17633 /* We don't do anything if the value isn't there, is constant, or contains
17634 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17635 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17636 will want to replace it with a new variable, but that will cause problems
17637 if this type is from outside the function. It's OK to have that here. */
17638 if (expr == NULL_TREE
17639 || is_gimple_constant (expr)
17640 || VAR_P (expr)
17641 || CONTAINS_PLACEHOLDER_P (expr))
17642 return;
17644 *expr_p = unshare_expr (expr);
17646 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17647 if the def vanishes. */
17648 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17650 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17651 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17652 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17653 if (is_gimple_constant (*expr_p))
17654 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17657 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17658 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17659 is true, also gimplify the parameters. */
17661 gbind *
17662 gimplify_body (tree fndecl, bool do_parms)
17664 location_t saved_location = input_location;
17665 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17666 gimple *outer_stmt;
17667 gbind *outer_bind;
17669 timevar_push (TV_TREE_GIMPLIFY);
17671 init_tree_ssa (cfun);
17673 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17674 gimplification. */
17675 default_rtl_profile ();
17677 gcc_assert (gimplify_ctxp == NULL);
17678 push_gimplify_context (true);
17680 if (flag_openacc || flag_openmp)
17682 gcc_assert (gimplify_omp_ctxp == NULL);
17683 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17684 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17687 /* Unshare most shared trees in the body and in that of any nested functions.
17688 It would seem we don't have to do this for nested functions because
17689 they are supposed to be output and then the outer function gimplified
17690 first, but the g++ front end doesn't always do it that way. */
17691 unshare_body (fndecl);
17692 unvisit_body (fndecl);
17694 /* Make sure input_location isn't set to something weird. */
17695 input_location = DECL_SOURCE_LOCATION (fndecl);
17697 /* Resolve callee-copies. This has to be done before processing
17698 the body so that DECL_VALUE_EXPR gets processed correctly. */
17699 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17701 /* Gimplify the function's body. */
17702 seq = NULL;
17703 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17704 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17705 if (!outer_stmt)
17707 outer_stmt = gimple_build_nop ();
17708 gimplify_seq_add_stmt (&seq, outer_stmt);
17711 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17712 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17713 if (gimple_code (outer_stmt) == GIMPLE_BIND
17714 && (gimple_seq_first_nondebug_stmt (seq)
17715 == gimple_seq_last_nondebug_stmt (seq)))
17717 outer_bind = as_a <gbind *> (outer_stmt);
17718 if (gimple_seq_first_stmt (seq) != outer_stmt
17719 || gimple_seq_last_stmt (seq) != outer_stmt)
17721 /* If there are debug stmts before or after outer_stmt, move them
17722 inside of outer_bind body. */
17723 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17724 gimple_seq second_seq = NULL;
17725 if (gimple_seq_first_stmt (seq) != outer_stmt
17726 && gimple_seq_last_stmt (seq) != outer_stmt)
17728 second_seq = gsi_split_seq_after (gsi);
17729 gsi_remove (&gsi, false);
17731 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17732 gsi_remove (&gsi, false);
17733 else
17735 gsi_remove (&gsi, false);
17736 second_seq = seq;
17737 seq = NULL;
17739 gimple_seq_add_seq_without_update (&seq,
17740 gimple_bind_body (outer_bind));
17741 gimple_seq_add_seq_without_update (&seq, second_seq);
17742 gimple_bind_set_body (outer_bind, seq);
17745 else
17746 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
17748 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17750 /* If we had callee-copies statements, insert them at the beginning
17751 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17752 if (!gimple_seq_empty_p (parm_stmts))
17754 tree parm;
17756 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
17757 if (parm_cleanup)
17759 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
17760 GIMPLE_TRY_FINALLY);
17761 parm_stmts = NULL;
17762 gimple_seq_add_stmt (&parm_stmts, g);
17764 gimple_bind_set_body (outer_bind, parm_stmts);
17766 for (parm = DECL_ARGUMENTS (current_function_decl);
17767 parm; parm = DECL_CHAIN (parm))
17768 if (DECL_HAS_VALUE_EXPR_P (parm))
17770 DECL_HAS_VALUE_EXPR_P (parm) = 0;
17771 DECL_IGNORED_P (parm) = 0;
17775 if ((flag_openacc || flag_openmp || flag_openmp_simd)
17776 && gimplify_omp_ctxp)
17778 delete_omp_context (gimplify_omp_ctxp);
17779 gimplify_omp_ctxp = NULL;
17782 pop_gimplify_context (outer_bind);
17783 gcc_assert (gimplify_ctxp == NULL);
17785 if (flag_checking && !seen_error ())
17786 verify_gimple_in_seq (gimple_bind_body (outer_bind));
17788 timevar_pop (TV_TREE_GIMPLIFY);
17789 input_location = saved_location;
17791 return outer_bind;
17794 typedef char *char_p; /* For DEF_VEC_P. */
17796 /* Return whether we should exclude FNDECL from instrumentation. */
17798 static bool
17799 flag_instrument_functions_exclude_p (tree fndecl)
17801 vec<char_p> *v;
17803 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
17804 if (v && v->length () > 0)
17806 const char *name;
17807 int i;
17808 char *s;
17810 name = lang_hooks.decl_printable_name (fndecl, 1);
17811 FOR_EACH_VEC_ELT (*v, i, s)
17812 if (strstr (name, s) != NULL)
17813 return true;
17816 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
17817 if (v && v->length () > 0)
17819 const char *name;
17820 int i;
17821 char *s;
17823 name = DECL_SOURCE_FILE (fndecl);
17824 FOR_EACH_VEC_ELT (*v, i, s)
17825 if (strstr (name, s) != NULL)
17826 return true;
17829 return false;
17832 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17833 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17834 the instrumentation function. IF STMT is not NULL, it is a statement
17835 to be executed just before the call to the instrumentation function. */
17837 static void
17838 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
17839 tree cond_var, gimple *stmt)
17841 /* The instrumentation hooks aren't going to call the instrumented
17842 function and the address they receive is expected to be matchable
17843 against symbol addresses. Make sure we don't create a trampoline,
17844 in case the current function is nested. */
17845 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
17846 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
17848 tree label_true, label_false;
17849 if (cond_var)
17851 label_true = create_artificial_label (UNKNOWN_LOCATION);
17852 label_false = create_artificial_label (UNKNOWN_LOCATION);
17853 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
17854 label_true, label_false);
17855 gimplify_seq_add_stmt (seq, cond);
17856 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
17857 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
17858 NOT_TAKEN));
17861 if (stmt)
17862 gimplify_seq_add_stmt (seq, stmt);
17864 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
17865 gcall *call = gimple_build_call (x, 1, integer_zero_node);
17866 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
17867 gimple_call_set_lhs (call, tmp_var);
17868 gimplify_seq_add_stmt (seq, call);
17869 x = builtin_decl_implicit (fncode);
17870 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
17871 gimplify_seq_add_stmt (seq, call);
17873 if (cond_var)
17874 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
17877 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17878 node for the function we want to gimplify.
17880 Return the sequence of GIMPLE statements corresponding to the body
17881 of FNDECL. */
17883 void
17884 gimplify_function_tree (tree fndecl)
17886 gimple_seq seq;
17887 gbind *bind;
17889 gcc_assert (!gimple_body (fndecl));
17891 if (DECL_STRUCT_FUNCTION (fndecl))
17892 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
17893 else
17894 push_struct_function (fndecl);
17896 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17897 if necessary. */
17898 cfun->curr_properties |= PROP_gimple_lva;
17900 if (asan_sanitize_use_after_scope ())
17901 asan_poisoned_variables = new hash_set<tree> ();
17902 bind = gimplify_body (fndecl, true);
17903 if (asan_poisoned_variables)
17905 delete asan_poisoned_variables;
17906 asan_poisoned_variables = NULL;
17909 /* The tree body of the function is no longer needed, replace it
17910 with the new GIMPLE body. */
17911 seq = NULL;
17912 gimple_seq_add_stmt (&seq, bind);
17913 gimple_set_body (fndecl, seq);
17915 /* If we're instrumenting function entry/exit, then prepend the call to
17916 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17917 catch the exit hook. */
17918 /* ??? Add some way to ignore exceptions for this TFE. */
17919 if (flag_instrument_function_entry_exit
17920 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
17921 /* Do not instrument extern inline functions. */
17922 && !(DECL_DECLARED_INLINE_P (fndecl)
17923 && DECL_EXTERNAL (fndecl)
17924 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
17925 && !flag_instrument_functions_exclude_p (fndecl))
17927 gimple_seq body = NULL, cleanup = NULL;
17928 gassign *assign;
17929 tree cond_var;
17931 /* If -finstrument-functions-once is specified, generate:
17933 static volatile bool C.0 = false;
17934 bool tmp_called;
17936 tmp_called = C.0;
17937 if (!tmp_called)
17939 C.0 = true;
17940 [call profiling enter function]
17943 without specific protection for data races. */
17944 if (flag_instrument_function_entry_exit > 1)
17946 tree first_var
17947 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
17948 VAR_DECL,
17949 create_tmp_var_name ("C"),
17950 boolean_type_node);
17951 DECL_ARTIFICIAL (first_var) = 1;
17952 DECL_IGNORED_P (first_var) = 1;
17953 TREE_STATIC (first_var) = 1;
17954 TREE_THIS_VOLATILE (first_var) = 1;
17955 TREE_USED (first_var) = 1;
17956 DECL_INITIAL (first_var) = boolean_false_node;
17957 varpool_node::add (first_var);
17959 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
17960 assign = gimple_build_assign (cond_var, first_var);
17961 gimplify_seq_add_stmt (&body, assign);
17963 assign = gimple_build_assign (first_var, boolean_true_node);
17966 else
17968 cond_var = NULL_TREE;
17969 assign = NULL;
17972 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
17973 cond_var, assign);
17975 /* If -finstrument-functions-once is specified, generate:
17977 if (!tmp_called)
17978 [call profiling exit function]
17980 without specific protection for data races. */
17981 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
17982 cond_var, NULL);
17984 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
17985 gimplify_seq_add_stmt (&body, tf);
17986 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
17988 /* Replace the current function body with the body
17989 wrapped in the try/finally TF. */
17990 seq = NULL;
17991 gimple_seq_add_stmt (&seq, new_bind);
17992 gimple_set_body (fndecl, seq);
17993 bind = new_bind;
17996 if (sanitize_flags_p (SANITIZE_THREAD)
17997 && param_tsan_instrument_func_entry_exit)
17999 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
18000 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
18001 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
18002 /* Replace the current function body with the body
18003 wrapped in the try/finally TF. */
18004 seq = NULL;
18005 gimple_seq_add_stmt (&seq, new_bind);
18006 gimple_set_body (fndecl, seq);
18009 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18010 cfun->curr_properties |= PROP_gimple_any;
18012 pop_cfun ();
18014 dump_function (TDI_gimple, fndecl);
18017 /* Return a dummy expression of type TYPE in order to keep going after an
18018 error. */
18020 static tree
18021 dummy_object (tree type)
18023 tree t = build_int_cst (build_pointer_type (type), 0);
18024 return build2 (MEM_REF, type, t, t);
18027 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
18028 builtin function, but a very special sort of operator. */
18030 enum gimplify_status
18031 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
18032 gimple_seq *post_p ATTRIBUTE_UNUSED)
18034 tree promoted_type, have_va_type;
18035 tree valist = TREE_OPERAND (*expr_p, 0);
18036 tree type = TREE_TYPE (*expr_p);
18037 tree t, tag, aptag;
18038 location_t loc = EXPR_LOCATION (*expr_p);
18040 /* Verify that valist is of the proper type. */
18041 have_va_type = TREE_TYPE (valist);
18042 if (have_va_type == error_mark_node)
18043 return GS_ERROR;
18044 have_va_type = targetm.canonical_va_list_type (have_va_type);
18045 if (have_va_type == NULL_TREE
18046 && POINTER_TYPE_P (TREE_TYPE (valist)))
18047 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
18048 have_va_type
18049 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
18050 gcc_assert (have_va_type != NULL_TREE);
18052 /* Generate a diagnostic for requesting data of a type that cannot
18053 be passed through `...' due to type promotion at the call site. */
18054 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
18055 != type)
18057 static bool gave_help;
18058 bool warned;
18059 /* Use the expansion point to handle cases such as passing bool (defined
18060 in a system header) through `...'. */
18061 location_t xloc
18062 = expansion_point_location_if_in_system_header (loc);
18064 /* Unfortunately, this is merely undefined, rather than a constraint
18065 violation, so we cannot make this an error. If this call is never
18066 executed, the program is still strictly conforming. */
18067 auto_diagnostic_group d;
18068 warned = warning_at (xloc, 0,
18069 "%qT is promoted to %qT when passed through %<...%>",
18070 type, promoted_type);
18071 if (!gave_help && warned)
18073 gave_help = true;
18074 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
18075 promoted_type, type);
18078 /* We can, however, treat "undefined" any way we please.
18079 Call abort to encourage the user to fix the program. */
18080 if (warned)
18081 inform (xloc, "if this code is reached, the program will abort");
18082 /* Before the abort, allow the evaluation of the va_list
18083 expression to exit or longjmp. */
18084 gimplify_and_add (valist, pre_p);
18085 t = build_call_expr_loc (loc,
18086 builtin_decl_implicit (BUILT_IN_TRAP), 0);
18087 gimplify_and_add (t, pre_p);
18089 /* This is dead code, but go ahead and finish so that the
18090 mode of the result comes out right. */
18091 *expr_p = dummy_object (type);
18092 return GS_ALL_DONE;
18095 tag = build_int_cst (build_pointer_type (type), 0);
18096 aptag = build_int_cst (TREE_TYPE (valist), 0);
18098 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18099 valist, tag, aptag);
18101 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18102 needs to be expanded. */
18103 cfun->curr_properties &= ~PROP_gimple_lva;
18105 return GS_OK;
18108 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18110 DST/SRC are the destination and source respectively. You can pass
18111 ungimplified trees in DST or SRC, in which case they will be
18112 converted to a gimple operand if necessary.
18114 This function returns the newly created GIMPLE_ASSIGN tuple. */
18116 gimple *
18117 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18119 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18120 gimplify_and_add (t, seq_p);
18121 ggc_free (t);
18122 return gimple_seq_last_stmt (*seq_p);
18125 inline hashval_t
18126 gimplify_hasher::hash (const elt_t *p)
18128 tree t = p->val;
18129 return iterative_hash_expr (t, 0);
18132 inline bool
18133 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18135 tree t1 = p1->val;
18136 tree t2 = p2->val;
18137 enum tree_code code = TREE_CODE (t1);
18139 if (TREE_CODE (t2) != code
18140 || TREE_TYPE (t1) != TREE_TYPE (t2))
18141 return false;
18143 if (!operand_equal_p (t1, t2, 0))
18144 return false;
18146 /* Only allow them to compare equal if they also hash equal; otherwise
18147 results are nondeterminate, and we fail bootstrap comparison. */
18148 gcc_checking_assert (hash (p1) == hash (p2));
18150 return true;