c++: optimize unifying nested templated classes [PR89231]
[official-gcc.git] / gcc / gimplify.cc
bloba0e8cc2199dfa861a90554c7e58029b1b061fbc7
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
76 enum gimplify_omp_var_data
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
118 GOVD_NONTEMPORAL = 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
123 GOVD_CONDTEMP = 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
137 enum omp_region_type
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher : free_ptr_hash <elt_t>
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
185 struct gimplify_ctx
187 struct gimplify_ctx *prev_context;
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
209 enum gimplify_defaultmap_kind
211 GDMK_SCALAR,
212 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
213 GDMK_AGGREGATE,
214 GDMK_ALLOCATABLE,
215 GDMK_POINTER
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx *outer_context;
221 splay_tree variables;
222 hash_set<tree> *privatized_types;
223 tree clauses;
224 /* Iteration variables in an OMP_FOR. */
225 vec<tree> loop_iter_var;
226 location_t location;
227 enum omp_clause_default_kind default_kind;
228 enum omp_region_type region_type;
229 enum tree_code code;
230 bool combined_loop;
231 bool distribute;
232 bool target_firstprivatize_array_bases;
233 bool add_safelen1;
234 bool order_concurrent;
235 bool has_depend;
236 bool in_for_exprs;
237 int defaultmap[5];
240 static struct gimplify_ctx *gimplify_ctxp;
241 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
242 static bool in_omp_construct;
244 /* Forward declaration. */
245 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
246 static hash_map<tree, tree> *oacc_declare_returns;
247 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
248 bool (*) (tree), fallback_t, bool);
249 static void prepare_gimple_addressable (tree *, gimple_seq *);
251 /* Shorter alias name for the above function for use in gimplify.cc
252 only. */
254 static inline void
255 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
257 gimple_seq_add_stmt_without_update (seq_p, gs);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
266 static void
267 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
269 gimple_stmt_iterator si;
271 if (src == NULL)
272 return;
274 si = gsi_last (*dst_p);
275 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx *ctx_pool = NULL;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx *
287 ctx_alloc (void)
289 struct gimplify_ctx * c = ctx_pool;
291 if (c)
292 ctx_pool = c->prev_context;
293 else
294 c = XNEW (struct gimplify_ctx);
296 memset (c, '\0', sizeof (*c));
297 return c;
300 /* Put gimplify context C back into the pool. */
302 static inline void
303 ctx_free (struct gimplify_ctx *c)
305 c->prev_context = ctx_pool;
306 ctx_pool = c;
309 /* Free allocated ctx stack memory. */
311 void
312 free_gimplify_stack (void)
314 struct gimplify_ctx *c;
316 while ((c = ctx_pool))
318 ctx_pool = c->prev_context;
319 free (c);
324 /* Set up a context for the gimplifier. */
326 void
327 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
329 struct gimplify_ctx *c = ctx_alloc ();
331 c->prev_context = gimplify_ctxp;
332 gimplify_ctxp = c;
333 gimplify_ctxp->into_ssa = in_ssa;
334 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 in the local_decls.
341 BODY is not a sequence, but the first tuple in a sequence. */
343 void
344 pop_gimplify_context (gimple *body)
346 struct gimplify_ctx *c = gimplify_ctxp;
348 gcc_assert (c
349 && (!c->bind_expr_stack.exists ()
350 || c->bind_expr_stack.is_empty ()));
351 c->bind_expr_stack.release ();
352 gimplify_ctxp = c->prev_context;
354 if (body)
355 declare_vars (c->temps, body, false);
356 else
357 record_vars (c->temps);
359 delete c->temp_htab;
360 c->temp_htab = NULL;
361 ctx_free (c);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
366 static void
367 gimple_push_bind_expr (gbind *bind_stmt)
369 gimplify_ctxp->bind_expr_stack.reserve (8);
370 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
373 /* Pop the first element off the stack of bindings. */
375 static void
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp->bind_expr_stack.pop ();
381 /* Return the first element of the stack of bindings. */
383 gbind *
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp->bind_expr_stack.last ();
389 /* Return the stack of bindings created during gimplification. */
391 vec<gbind *>
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp->bind_expr_stack;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
400 static bool
401 gimple_conditional_context (void)
403 return gimplify_ctxp->conditions > 0;
406 /* Note that we've entered a COND_EXPR. */
408 static void
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp->conditions == 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
414 #endif
415 ++(gimplify_ctxp->conditions);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
421 static void
422 gimple_pop_condition (gimple_seq *pre_p)
424 int conds = --(gimplify_ctxp->conditions);
426 gcc_assert (conds >= 0);
427 if (conds == 0)
429 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
430 gimplify_ctxp->conditional_cleanups = NULL;
434 /* A stable comparison routine for use with splay trees and DECLs. */
436 static int
437 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
439 tree a = (tree) xa;
440 tree b = (tree) xb;
442 return DECL_UID (a) - DECL_UID (b);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx *
448 new_omp_context (enum omp_region_type region_type)
450 struct gimplify_omp_ctx *c;
452 c = XCNEW (struct gimplify_omp_ctx);
453 c->outer_context = gimplify_omp_ctxp;
454 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
455 c->privatized_types = new hash_set<tree>;
456 c->location = input_location;
457 c->region_type = region_type;
458 if ((region_type & ORT_TASK) == 0)
459 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
460 else
461 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
462 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
463 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
464 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
465 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
466 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
468 return c;
471 /* Destroy an omp construct that deals with variable remapping. */
473 static void
474 delete_omp_context (struct gimplify_omp_ctx *c)
476 splay_tree_delete (c->variables);
477 delete c->privatized_types;
478 c->loop_iter_var.release ();
479 XDELETE (c);
482 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 reference. */
489 void
490 gimplify_and_add (tree t, gimple_seq *seq_p)
492 gimplify_stmt (&t, seq_p);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
499 static gimple *
500 gimplify_and_return_first (tree t, gimple_seq *seq_p)
502 gimple_stmt_iterator last = gsi_last (*seq_p);
504 gimplify_and_add (t, seq_p);
506 if (!gsi_end_p (last))
508 gsi_next (&last);
509 return gsi_stmt (last);
511 else
512 return gimple_seq_first_stmt (*seq_p);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
518 static bool
519 is_gimple_mem_rhs (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return is_gimple_val (t) || is_gimple_lvalue (t);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
534 static bool
535 is_gimple_reg_rhs_or_call (tree t)
537 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t) == CALL_EXPR);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
545 static bool
546 is_gimple_mem_rhs_or_call (tree t)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t)))
551 return is_gimple_val (t);
552 else
553 return (is_gimple_val (t)
554 || is_gimple_lvalue (t)
555 || TREE_CLOBBER_P (t)
556 || TREE_CODE (t) == CALL_EXPR);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
562 static inline tree
563 create_tmp_from_val (tree val)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
567 tree var = create_tmp_var (type, get_name (val));
568 return var;
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
574 static tree
575 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
577 tree ret;
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal || !not_gimple_reg);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 ret = create_tmp_from_val (val);
590 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
592 else
594 elt_t elt, *elt_p;
595 elt_t **slot;
597 elt.val = val;
598 if (!gimplify_ctxp->temp_htab)
599 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
600 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
601 if (*slot == NULL)
603 elt_p = XNEW (elt_t);
604 elt_p->val = val;
605 elt_p->temp = ret = create_tmp_from_val (val);
606 *slot = elt_p;
608 else
610 elt_p = *slot;
611 ret = elt_p->temp;
615 return ret;
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
620 static tree
621 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
622 bool is_formal, bool allow_ssa, bool not_gimple_reg)
624 tree t, mod;
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
629 fb_rvalue);
631 if (allow_ssa
632 && gimplify_ctxp->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val)))
635 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
636 if (! gimple_in_ssa_p (cfun))
638 const char *name = get_name (val);
639 if (name)
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
643 else
644 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
646 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
648 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod, pre_p);
652 ggc_free (mod);
654 return t;
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
669 tree
670 get_formal_tmp_var (tree val, gimple_seq *pre_p)
672 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
678 tree
679 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
680 gimple_seq *post_p /* = NULL */,
681 bool allow_ssa /* = true */)
683 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
689 void
690 declare_vars (tree vars, gimple *gs, bool debug_info)
692 tree last = vars;
693 if (last)
695 tree temps, block;
697 gbind *scope = as_a <gbind *> (gs);
699 temps = nreverse (last);
701 block = gimple_bind_block (scope);
702 gcc_assert (!block || TREE_CODE (block) == BLOCK);
703 if (!block || !debug_info)
705 DECL_CHAIN (last) = gimple_bind_vars (scope);
706 gimple_bind_set_vars (scope, temps);
708 else
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block))
715 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
716 else
718 gimple_bind_set_vars (scope,
719 chainon (gimple_bind_vars (scope), temps));
720 BLOCK_VARS (block) = temps;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
730 static void
731 force_constant_size (tree var)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size;
738 gcc_assert (VAR_P (var));
740 max_size = max_int_size_in_bytes (TREE_TYPE (var));
742 gcc_assert (max_size >= 0);
744 DECL_SIZE_UNIT (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
746 DECL_SIZE (var)
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
750 /* Push the temporary variable TMP into the current binding. */
752 void
753 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
755 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
759 this case. */
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
761 force_constant_size (tmp);
763 DECL_CONTEXT (tmp) = fn->decl;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
766 record_vars_into (tmp, fn->decl);
769 /* Push the temporary variable TMP into the current binding. */
771 void
772 gimple_add_tmp_var (tree tmp)
774 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
778 this case. */
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
780 force_constant_size (tmp);
782 DECL_CONTEXT (tmp) = current_function_decl;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
785 if (gimplify_ctxp)
787 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
788 gimplify_ctxp->temps = tmp;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp)
793 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
794 int flag = GOVD_LOCAL | GOVD_SEEN;
795 while (ctx
796 && (ctx->region_type == ORT_WORKSHARE
797 || ctx->region_type == ORT_TASKGROUP
798 || ctx->region_type == ORT_SIMD
799 || ctx->region_type == ORT_ACC))
801 if (ctx->region_type == ORT_SIMD
802 && TREE_ADDRESSABLE (tmp)
803 && !TREE_STATIC (tmp))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
806 ctx->add_safelen1 = true;
807 else if (ctx->in_for_exprs)
808 flag = GOVD_PRIVATE;
809 else
810 flag = GOVD_PRIVATE | GOVD_SEEN;
811 break;
813 ctx = ctx->outer_context;
815 if (ctx)
816 omp_add_variable (ctx, tmp, flag);
819 else if (cfun)
820 record_vars (tmp);
821 else
823 gimple_seq body_seq;
825 /* This case is for nested functions. We need to expose the locals
826 they create. */
827 body_seq = gimple_body (current_function_decl);
828 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
868 way to go. */
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
879 tree t = *tp;
880 enum tree_code code = TREE_CODE (t);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
886 if (data && !((hash_set<tree> *)data)->add (t))
888 else
889 *walk_subtrees = 0;
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code) == tcc_type
894 || TREE_CODE_CLASS (code) == tcc_declaration
895 || TREE_CODE_CLASS (code) == tcc_constant)
896 *walk_subtrees = 0;
898 /* Cope with the statement expression extension. */
899 else if (code == STATEMENT_LIST)
902 /* Leave the bulk of the work to copy_tree_r itself. */
903 else
904 copy_tree_r (tp, walk_subtrees, NULL);
906 return NULL_TREE;
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
913 static tree
914 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
916 tree t = *tp;
917 enum tree_code code = TREE_CODE (t);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code) == tcc_type
924 || TREE_CODE_CLASS (code) == tcc_declaration
925 || TREE_CODE_CLASS (code) == tcc_constant)
927 if (TREE_VISITED (t))
928 *walk_subtrees = 0;
929 else
930 TREE_VISITED (t) = 1;
933 /* If this node has been visited already, unshare it and don't look
934 any deeper. */
935 else if (TREE_VISITED (t))
937 walk_tree (tp, mostly_copy_tree_r, data, NULL);
938 *walk_subtrees = 0;
941 /* Otherwise, mark the node as visited and keep looking. */
942 else
943 TREE_VISITED (t) = 1;
945 return NULL_TREE;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
951 void
952 copy_if_shared (tree *tp, void *data)
954 walk_tree (tp, copy_if_shared_r, data, NULL);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
960 static void
961 unshare_body (tree fndecl)
963 struct cgraph_node *cgn = cgraph_node::get (fndecl);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set<tree> *visited
967 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
973 delete visited;
975 if (cgn)
976 for (cgn = first_nested_function (cgn); cgn;
977 cgn = next_nested_function (cgn))
978 unshare_body (cgn->decl);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
984 static tree
985 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
987 tree t = *tp;
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t))
991 TREE_VISITED (t) = 0;
993 /* Otherwise, don't look any deeper. */
994 else
995 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Unmark the visited trees rooted at *TP. */
1002 static inline void
1003 unmark_visited (tree *tp)
1005 walk_tree (tp, unmark_visited_r, NULL, NULL);
1008 /* Likewise, but mark all trees as not visited. */
1010 static void
1011 unvisit_body (tree fndecl)
1013 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1019 if (cgn)
1020 for (cgn = first_nested_function (cgn);
1021 cgn; cgn = next_nested_function (cgn))
1022 unvisit_body (cgn->decl);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1029 tree
1030 unshare_expr (tree expr)
1032 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1033 return expr;
1036 /* Worker for unshare_expr_without_location. */
1038 static tree
1039 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1041 if (EXPR_P (*tp))
1042 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1043 else
1044 *walk_subtrees = 0;
1045 return NULL_TREE;
1048 /* Similar to unshare_expr but also prune all expression locations
1049 from EXPR. */
1051 tree
1052 unshare_expr_without_location (tree expr)
1054 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1055 if (EXPR_P (expr))
1056 walk_tree (&expr, prune_expr_location, NULL, NULL);
1057 return expr;
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1065 static location_t
1066 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1068 if (!expr)
1069 return or_else;
1071 if (EXPR_HAS_LOCATION (expr))
1072 return EXPR_LOCATION (expr);
1074 if (TREE_CODE (expr) != STATEMENT_LIST)
1075 return or_else;
1077 tree_stmt_iterator i = tsi_start (expr);
1079 bool found = false;
1080 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1082 found = true;
1083 tsi_next (&i);
1086 if (!found || !tsi_one_before_end_p (i))
1087 return or_else;
1089 return rexpr_location (tsi_stmt (i), or_else);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1095 static inline bool
1096 rexpr_has_location (tree expr)
1098 return rexpr_location (expr) != UNKNOWN_LOCATION;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1107 tree
1108 voidify_wrapper_expr (tree wrapper, tree temp)
1110 tree type = TREE_TYPE (wrapper);
1111 if (type && !VOID_TYPE_P (type))
1113 tree *p;
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p = &wrapper; p && *p; )
1119 switch (TREE_CODE (*p))
1121 case BIND_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p = &BIND_EXPR_BODY (*p);
1126 break;
1128 case CLEANUP_POINT_EXPR:
1129 case TRY_FINALLY_EXPR:
1130 case TRY_CATCH_EXPR:
1131 TREE_SIDE_EFFECTS (*p) = 1;
1132 TREE_TYPE (*p) = void_type_node;
1133 p = &TREE_OPERAND (*p, 0);
1134 break;
1136 case STATEMENT_LIST:
1138 tree_stmt_iterator i = tsi_last (*p);
1139 TREE_SIDE_EFFECTS (*p) = 1;
1140 TREE_TYPE (*p) = void_type_node;
1141 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1143 break;
1145 case COMPOUND_EXPR:
1146 /* Advance to the last statement. Set all container types to
1147 void. */
1148 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1153 break;
1155 case TRANSACTION_EXPR:
1156 TREE_SIDE_EFFECTS (*p) = 1;
1157 TREE_TYPE (*p) = void_type_node;
1158 p = &TRANSACTION_EXPR_BODY (*p);
1159 break;
1161 default:
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1164 if (p == &wrapper)
1166 TREE_SIDE_EFFECTS (*p) = 1;
1167 TREE_TYPE (*p) = void_type_node;
1168 p = &TREE_OPERAND (*p, 0);
1169 break;
1171 goto out;
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1187 else
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1193 return temp;
1196 return NULL_TREE;
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1202 static void
1203 build_stack_save_restore (gcall **save, gcall **restore)
1205 tree tmp_var;
1207 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1211 *restore
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1213 1, tmp_var);
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1218 static tree
1219 build_asan_poison_call_expr (tree decl)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size = DECL_SIZE_UNIT (decl);
1223 if (zerop (unit_size))
1224 return NULL_TREE;
1226 tree base = build_fold_addr_expr (decl);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1229 void_type_node, 3,
1230 build_int_cst (integer_type_node,
1231 ASAN_MARK_POISON),
1232 base, unit_size);
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1240 static void
1241 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1242 bool before)
1244 tree unit_size = DECL_SIZE_UNIT (decl);
1245 tree base = build_fold_addr_expr (decl);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size))
1249 return;
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1252 bytes. */
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1256 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1257 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1259 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1261 gimple *g
1262 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1263 build_int_cst (integer_type_node, flags),
1264 base, unit_size);
1266 if (before)
1267 gsi_insert_before (it, g, GSI_NEW_STMT);
1268 else
1269 gsi_insert_after (it, g, GSI_NEW_STMT);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1276 static void
1277 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1279 gimple_stmt_iterator it = gsi_last (*seq_p);
1280 bool before = false;
1282 if (gsi_end_p (it))
1283 before = true;
1285 asan_poison_variable (decl, poison, &it, before);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1290 static int
1291 sort_by_decl_uid (const void *a, const void *b)
1293 const tree *t1 = (const tree *)a;
1294 const tree *t2 = (const tree *)b;
1296 int uid1 = DECL_UID (*t1);
1297 int uid2 = DECL_UID (*t2);
1299 if (uid1 < uid2)
1300 return -1;
1301 else if (uid1 > uid2)
1302 return 1;
1303 else
1304 return 0;
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1311 static void
1312 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1314 unsigned c = variables->elements ();
1315 if (c == 0)
1316 return;
1318 auto_vec<tree> sorted_variables (c);
1320 for (hash_set<tree>::iterator it = variables->begin ();
1321 it != variables->end (); ++it)
1322 sorted_variables.safe_push (*it);
1324 sorted_variables.qsort (sort_by_decl_uid);
1326 unsigned i;
1327 tree var;
1328 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1330 asan_poison_variable (var, poison, seq_p);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1335 DECL_ATTRIBUTES (var)))
1336 DECL_ATTRIBUTES (var)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1338 integer_one_node,
1339 DECL_ATTRIBUTES (var));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1348 tree bind_expr = *expr_p;
1349 bool old_keep_stack = gimplify_ctxp->keep_stack;
1350 bool old_save_stack = gimplify_ctxp->save_stack;
1351 tree t;
1352 gbind *bind_stmt;
1353 gimple_seq body, cleanup;
1354 gcall *stack_save;
1355 location_t start_locus = 0, end_locus = 0;
1356 tree ret_clauses = NULL;
1358 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1360 /* Mark variables seen in this bind expr. */
1361 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1363 if (VAR_P (t))
1365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1366 tree attr;
1368 if (flag_openmp
1369 && !is_global_var (t)
1370 && DECL_CONTEXT (t) == current_function_decl
1371 && TREE_USED (t)
1372 && (attr = lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1373 != NULL_TREE)
1375 tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1376 tree align = TREE_VALUE (TREE_VALUE (attr));
1377 /* Allocate directives that appear in a target region must specify
1378 an allocator clause unless a requires directive with the
1379 dynamic_allocators clause is present in the same compilation
1380 unit. */
1381 bool missing_dyn_alloc = false;
1382 if (alloc == NULL_TREE
1383 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1384 == 0))
1386 /* This comes too early for omp_discover_declare_target...,
1387 but should at least catch the most common cases. */
1388 missing_dyn_alloc
1389 = cgraph_node::get (current_function_decl)->offloadable;
1390 for (struct gimplify_omp_ctx *ctx2 = ctx;
1391 ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1392 if (ctx2->code == OMP_TARGET)
1393 missing_dyn_alloc = true;
1395 if (missing_dyn_alloc)
1396 error_at (DECL_SOURCE_LOCATION (t),
1397 "%<allocate%> directive for %qD inside a target "
1398 "region must specify an %<allocator%> clause", t);
1399 else if (align != NULL_TREE
1400 || alloc == NULL_TREE
1401 || !integer_onep (alloc))
1402 sorry_at (DECL_SOURCE_LOCATION (t),
1403 "OpenMP %<allocate%> directive, used for %qD, not "
1404 "yet supported", t);
1407 /* Mark variable as local. */
1408 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1410 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1411 || splay_tree_lookup (ctx->variables,
1412 (splay_tree_key) t) == NULL)
1414 int flag = GOVD_LOCAL;
1415 if (ctx->region_type == ORT_SIMD
1416 && TREE_ADDRESSABLE (t)
1417 && !TREE_STATIC (t))
1419 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1420 ctx->add_safelen1 = true;
1421 else
1422 flag = GOVD_PRIVATE;
1424 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1426 /* Static locals inside of target construct or offloaded
1427 routines need to be "omp declare target". */
1428 if (TREE_STATIC (t))
1429 for (; ctx; ctx = ctx->outer_context)
1430 if ((ctx->region_type & ORT_TARGET) != 0)
1432 if (!lookup_attribute ("omp declare target",
1433 DECL_ATTRIBUTES (t)))
1435 tree id = get_identifier ("omp declare target");
1436 DECL_ATTRIBUTES (t)
1437 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1438 varpool_node *node = varpool_node::get (t);
1439 if (node)
1441 node->offloadable = 1;
1442 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1444 g->have_offload = true;
1445 if (!in_lto_p)
1446 vec_safe_push (offload_vars, t);
1450 break;
1454 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1456 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1457 cfun->has_local_explicit_reg_vars = true;
1461 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1462 BIND_EXPR_BLOCK (bind_expr));
1463 gimple_push_bind_expr (bind_stmt);
1465 gimplify_ctxp->keep_stack = false;
1466 gimplify_ctxp->save_stack = false;
1468 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1469 body = NULL;
1470 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1471 gimple_bind_set_body (bind_stmt, body);
1473 /* Source location wise, the cleanup code (stack_restore and clobbers)
1474 belongs to the end of the block, so propagate what we have. The
1475 stack_save operation belongs to the beginning of block, which we can
1476 infer from the bind_expr directly if the block has no explicit
1477 assignment. */
1478 if (BIND_EXPR_BLOCK (bind_expr))
1480 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1481 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1483 if (start_locus == 0)
1484 start_locus = EXPR_LOCATION (bind_expr);
1486 cleanup = NULL;
1487 stack_save = NULL;
1489 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1490 the stack space allocated to the VLAs. */
1491 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1493 gcall *stack_restore;
1495 /* Save stack on entry and restore it on exit. Add a try_finally
1496 block to achieve this. */
1497 build_stack_save_restore (&stack_save, &stack_restore);
1499 gimple_set_location (stack_save, start_locus);
1500 gimple_set_location (stack_restore, end_locus);
1502 gimplify_seq_add_stmt (&cleanup, stack_restore);
1505 /* Add clobbers for all variables that go out of scope. */
1506 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1508 if (VAR_P (t)
1509 && !is_global_var (t)
1510 && DECL_CONTEXT (t) == current_function_decl)
1512 if (!DECL_HARD_REGISTER (t)
1513 && !TREE_THIS_VOLATILE (t)
1514 && !DECL_HAS_VALUE_EXPR_P (t)
1515 /* Only care for variables that have to be in memory. Others
1516 will be rewritten into SSA names, hence moved to the
1517 top-level. */
1518 && !is_gimple_reg (t)
1519 && flag_stack_reuse != SR_NONE)
1521 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1522 gimple *clobber_stmt;
1523 clobber_stmt = gimple_build_assign (t, clobber);
1524 gimple_set_location (clobber_stmt, end_locus);
1525 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1528 if (flag_openacc && oacc_declare_returns != NULL)
1530 tree key = t;
1531 if (DECL_HAS_VALUE_EXPR_P (key))
1533 key = DECL_VALUE_EXPR (key);
1534 if (INDIRECT_REF_P (key))
1535 key = TREE_OPERAND (key, 0);
1537 tree *c = oacc_declare_returns->get (key);
1538 if (c != NULL)
1540 if (ret_clauses)
1541 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1543 ret_clauses = unshare_expr (*c);
1545 oacc_declare_returns->remove (key);
1547 if (oacc_declare_returns->is_empty ())
1549 delete oacc_declare_returns;
1550 oacc_declare_returns = NULL;
1556 if (asan_poisoned_variables != NULL
1557 && asan_poisoned_variables->contains (t))
1559 asan_poisoned_variables->remove (t);
1560 asan_poison_variable (t, true, &cleanup);
1563 if (gimplify_ctxp->live_switch_vars != NULL
1564 && gimplify_ctxp->live_switch_vars->contains (t))
1565 gimplify_ctxp->live_switch_vars->remove (t);
1568 if (ret_clauses)
1570 gomp_target *stmt;
1571 gimple_stmt_iterator si = gsi_start (cleanup);
1573 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1574 ret_clauses);
1575 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1578 if (cleanup)
1580 gtry *gs;
1581 gimple_seq new_body;
1583 new_body = NULL;
1584 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1585 GIMPLE_TRY_FINALLY);
1587 if (stack_save)
1588 gimplify_seq_add_stmt (&new_body, stack_save);
1589 gimplify_seq_add_stmt (&new_body, gs);
1590 gimple_bind_set_body (bind_stmt, new_body);
1593 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1594 if (!gimplify_ctxp->keep_stack)
1595 gimplify_ctxp->keep_stack = old_keep_stack;
1596 gimplify_ctxp->save_stack = old_save_stack;
1598 gimple_pop_bind_expr ();
1600 gimplify_seq_add_stmt (pre_p, bind_stmt);
1602 if (temp)
1604 *expr_p = temp;
1605 return GS_OK;
1608 *expr_p = NULL_TREE;
1609 return GS_ALL_DONE;
1612 /* Maybe add early return predict statement to PRE_P sequence. */
1614 static void
1615 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1617 /* If we are not in a conditional context, add PREDICT statement. */
1618 if (gimple_conditional_context ())
1620 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1621 NOT_TAKEN);
1622 gimplify_seq_add_stmt (pre_p, predict);
1626 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1627 GIMPLE value, it is assigned to a new temporary and the statement is
1628 re-written to return the temporary.
1630 PRE_P points to the sequence where side effects that must happen before
1631 STMT should be stored. */
1633 static enum gimplify_status
1634 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1636 greturn *ret;
1637 tree ret_expr = TREE_OPERAND (stmt, 0);
1638 tree result_decl, result;
1640 if (ret_expr == error_mark_node)
1641 return GS_ERROR;
1643 if (!ret_expr
1644 || TREE_CODE (ret_expr) == RESULT_DECL)
1646 maybe_add_early_return_predict_stmt (pre_p);
1647 greturn *ret = gimple_build_return (ret_expr);
1648 copy_warning (ret, stmt);
1649 gimplify_seq_add_stmt (pre_p, ret);
1650 return GS_ALL_DONE;
1653 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1654 result_decl = NULL_TREE;
1655 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1657 /* Used in C++ for handling EH cleanup of the return value if a local
1658 cleanup throws. Assume the front-end knows what it's doing. */
1659 result_decl = DECL_RESULT (current_function_decl);
1660 /* But crash if we end up trying to modify ret_expr below. */
1661 ret_expr = NULL_TREE;
1663 else
1665 result_decl = TREE_OPERAND (ret_expr, 0);
1667 /* See through a return by reference. */
1668 if (INDIRECT_REF_P (result_decl))
1669 result_decl = TREE_OPERAND (result_decl, 0);
1671 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1672 || TREE_CODE (ret_expr) == INIT_EXPR)
1673 && TREE_CODE (result_decl) == RESULT_DECL);
1676 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1677 Recall that aggregate_value_p is FALSE for any aggregate type that is
1678 returned in registers. If we're returning values in registers, then
1679 we don't want to extend the lifetime of the RESULT_DECL, particularly
1680 across another call. In addition, for those aggregates for which
1681 hard_function_value generates a PARALLEL, we'll die during normal
1682 expansion of structure assignments; there's special code in expand_return
1683 to handle this case that does not exist in expand_expr. */
1684 if (!result_decl)
1685 result = NULL_TREE;
1686 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1688 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1690 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1691 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1692 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1693 should be effectively allocated by the caller, i.e. all calls to
1694 this function must be subject to the Return Slot Optimization. */
1695 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1696 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1698 result = result_decl;
1700 else if (gimplify_ctxp->return_temp)
1701 result = gimplify_ctxp->return_temp;
1702 else
1704 result = create_tmp_reg (TREE_TYPE (result_decl));
1706 /* ??? With complex control flow (usually involving abnormal edges),
1707 we can wind up warning about an uninitialized value for this. Due
1708 to how this variable is constructed and initialized, this is never
1709 true. Give up and never warn. */
1710 suppress_warning (result, OPT_Wuninitialized);
1712 gimplify_ctxp->return_temp = result;
1715 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1716 Then gimplify the whole thing. */
1717 if (result != result_decl)
1718 TREE_OPERAND (ret_expr, 0) = result;
1720 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1722 maybe_add_early_return_predict_stmt (pre_p);
1723 ret = gimple_build_return (result);
1724 copy_warning (ret, stmt);
1725 gimplify_seq_add_stmt (pre_p, ret);
1727 return GS_ALL_DONE;
1730 /* Gimplify a variable-length array DECL. */
1732 static void
1733 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1735 /* This is a variable-sized decl. Simplify its size and mark it
1736 for deferred expansion. */
1737 tree t, addr, ptr_type;
1739 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1740 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1742 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1743 if (DECL_HAS_VALUE_EXPR_P (decl))
1744 return;
1746 /* All occurrences of this decl in final gimplified code will be
1747 replaced by indirection. Setting DECL_VALUE_EXPR does two
1748 things: First, it lets the rest of the gimplifier know what
1749 replacement to use. Second, it lets the debug info know
1750 where to find the value. */
1751 ptr_type = build_pointer_type (TREE_TYPE (decl));
1752 addr = create_tmp_var (ptr_type, get_name (decl));
1753 DECL_IGNORED_P (addr) = 0;
1754 t = build_fold_indirect_ref (addr);
1755 TREE_THIS_NOTRAP (t) = 1;
1756 SET_DECL_VALUE_EXPR (decl, t);
1757 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1759 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1760 max_int_size_in_bytes (TREE_TYPE (decl)));
1761 /* The call has been built for a variable-sized object. */
1762 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1763 t = fold_convert (ptr_type, t);
1764 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1766 gimplify_and_add (t, seq_p);
1768 /* Record the dynamic allocation associated with DECL if requested. */
1769 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1770 record_dynamic_alloc (decl);
1773 /* A helper function to be called via walk_tree. Mark all labels under *TP
1774 as being forced. To be called for DECL_INITIAL of static variables. */
1776 static tree
1777 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1779 if (TYPE_P (*tp))
1780 *walk_subtrees = 0;
1781 if (TREE_CODE (*tp) == LABEL_DECL)
1783 FORCED_LABEL (*tp) = 1;
1784 cfun->has_forced_label_in_static = 1;
1787 return NULL_TREE;
1790 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1791 Build a call to internal const function DEFERRED_INIT:
1792 1st argument: SIZE of the DECL;
1793 2nd argument: INIT_TYPE;
1794 3rd argument: NAME of the DECL;
1796 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1798 static void
1799 gimple_add_init_for_auto_var (tree decl,
1800 enum auto_init_type init_type,
1801 gimple_seq *seq_p)
1803 gcc_assert (auto_var_p (decl));
1804 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1805 location_t loc = EXPR_LOCATION (decl);
1806 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1808 tree init_type_node
1809 = build_int_cst (integer_type_node, (int) init_type);
1811 tree decl_name = NULL_TREE;
1812 if (DECL_NAME (decl))
1814 decl_name = build_string_literal (DECL_NAME (decl));
1816 else
1818 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1819 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
1820 decl_name = build_string_literal (decl_name_anonymous);
1823 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1824 TREE_TYPE (decl), 3,
1825 decl_size, init_type_node,
1826 decl_name);
1828 gimplify_assign (decl, call, seq_p);
1831 /* Generate padding initialization for automatic vairable DECL.
1832 C guarantees that brace-init with fewer initializers than members
1833 aggregate will initialize the rest of the aggregate as-if it were
1834 static initialization. In turn static initialization guarantees
1835 that padding is initialized to zero. So, we always initialize paddings
1836 to zeroes regardless INIT_TYPE.
1837 To do the padding initialization, we insert a call to
1838 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1839 Note, we add an additional dummy argument for __builtin_clear_padding,
1840 'for_auto_init' to distinguish whether this call is for automatic
1841 variable initialization or not.
1843 static void
1844 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1845 gimple_seq *seq_p)
1847 tree addr_of_decl = NULL_TREE;
1848 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1850 if (is_vla)
1852 /* The temporary address variable for this vla should be
1853 created in gimplify_vla_decl. */
1854 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1855 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
1856 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1858 else
1860 mark_addressable (decl);
1861 addr_of_decl = build_fold_addr_expr (decl);
1864 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1865 build_one_cst (TREE_TYPE (addr_of_decl)));
1866 gimplify_seq_add_stmt (seq_p, call);
1869 /* Return true if the DECL need to be automaticly initialized by the
1870 compiler. */
1871 static bool
1872 is_var_need_auto_init (tree decl)
1874 if (auto_var_p (decl)
1875 && (TREE_CODE (decl) != VAR_DECL
1876 || !DECL_HARD_REGISTER (decl))
1877 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1878 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1879 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1880 && !is_empty_type (TREE_TYPE (decl)))
1881 return true;
1882 return false;
1885 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1886 and initialization explicit. */
1888 static enum gimplify_status
1889 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1891 tree stmt = *stmt_p;
1892 tree decl = DECL_EXPR_DECL (stmt);
1894 *stmt_p = NULL_TREE;
1896 if (TREE_TYPE (decl) == error_mark_node)
1897 return GS_ERROR;
1899 if ((TREE_CODE (decl) == TYPE_DECL
1900 || VAR_P (decl))
1901 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1903 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1904 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1905 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1908 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1909 in case its size expressions contain problematic nodes like CALL_EXPR. */
1910 if (TREE_CODE (decl) == TYPE_DECL
1911 && DECL_ORIGINAL_TYPE (decl)
1912 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1914 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1915 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1916 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1919 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1921 tree init = DECL_INITIAL (decl);
1922 bool is_vla = false;
1923 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1924 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1925 If the decl has VALUE_EXPR that was created by FE (usually
1926 C++FE), it's a proxy varaible, and FE already initialized
1927 the VALUE_EXPR of it, we should not initialize it anymore. */
1928 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1930 poly_uint64 size;
1931 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1932 || (!TREE_STATIC (decl)
1933 && flag_stack_check == GENERIC_STACK_CHECK
1934 && maybe_gt (size,
1935 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1937 gimplify_vla_decl (decl, seq_p);
1938 is_vla = true;
1941 if (asan_poisoned_variables
1942 && !is_vla
1943 && TREE_ADDRESSABLE (decl)
1944 && !TREE_STATIC (decl)
1945 && !DECL_HAS_VALUE_EXPR_P (decl)
1946 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1947 && dbg_cnt (asan_use_after_scope)
1948 && !gimplify_omp_ctxp
1949 /* GNAT introduces temporaries to hold return values of calls in
1950 initializers of variables defined in other units, so the
1951 declaration of the variable is discarded completely. We do not
1952 want to issue poison calls for such dropped variables. */
1953 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1954 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1956 asan_poisoned_variables->add (decl);
1957 asan_poison_variable (decl, false, seq_p);
1958 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1959 gimplify_ctxp->live_switch_vars->add (decl);
1962 /* Some front ends do not explicitly declare all anonymous
1963 artificial variables. We compensate here by declaring the
1964 variables, though it would be better if the front ends would
1965 explicitly declare them. */
1966 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1967 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1968 gimple_add_tmp_var (decl);
1970 if (init && init != error_mark_node)
1972 if (!TREE_STATIC (decl))
1974 DECL_INITIAL (decl) = NULL_TREE;
1975 init = build2 (INIT_EXPR, void_type_node, decl, init);
1976 gimplify_and_add (init, seq_p);
1977 ggc_free (init);
1978 /* Clear TREE_READONLY if we really have an initialization. */
1979 if (!DECL_INITIAL (decl)
1980 && !omp_privatize_by_reference (decl))
1981 TREE_READONLY (decl) = 0;
1983 else
1984 /* We must still examine initializers for static variables
1985 as they may contain a label address. */
1986 walk_tree (&init, force_labels_r, NULL, NULL);
1988 /* When there is no explicit initializer, if the user requested,
1989 We should insert an artifical initializer for this automatic
1990 variable. */
1991 else if (is_var_need_auto_init (decl)
1992 && !decl_had_value_expr_p)
1994 gimple_add_init_for_auto_var (decl,
1995 flag_auto_var_init,
1996 seq_p);
1997 /* The expanding of a call to the above .DEFERRED_INIT will apply
1998 block initialization to the whole space covered by this variable.
1999 As a result, all the paddings will be initialized to zeroes
2000 for zero initialization and 0xFE byte-repeatable patterns for
2001 pattern initialization.
2002 In order to make the paddings as zeroes for pattern init, We
2003 should add a call to __builtin_clear_padding to clear the
2004 paddings to zero in compatiple with CLANG.
2005 We cannot insert this call if the variable is a gimple register
2006 since __builtin_clear_padding will take the address of the
2007 variable. As a result, if a long double/_Complex long double
2008 variable will spilled into stack later, its padding is 0XFE. */
2009 if (flag_auto_var_init == AUTO_INIT_PATTERN
2010 && !is_gimple_reg (decl)
2011 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2012 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2016 return GS_ALL_DONE;
2019 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2020 and replacing the LOOP_EXPR with goto, but if the loop contains an
2021 EXIT_EXPR, we need to append a label for it to jump to. */
2023 static enum gimplify_status
2024 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2026 tree saved_label = gimplify_ctxp->exit_label;
2027 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2029 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
2031 gimplify_ctxp->exit_label = NULL_TREE;
2033 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
2035 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
2037 if (gimplify_ctxp->exit_label)
2038 gimplify_seq_add_stmt (pre_p,
2039 gimple_build_label (gimplify_ctxp->exit_label));
2041 gimplify_ctxp->exit_label = saved_label;
2043 *expr_p = NULL;
2044 return GS_ALL_DONE;
2047 /* Gimplify a statement list onto a sequence. These may be created either
2048 by an enlightened front-end, or by shortcut_cond_expr. */
2050 static enum gimplify_status
2051 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2053 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2055 tree_stmt_iterator i = tsi_start (*expr_p);
2057 while (!tsi_end_p (i))
2059 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2060 tsi_delink (&i);
2063 if (temp)
2065 *expr_p = temp;
2066 return GS_OK;
2069 return GS_ALL_DONE;
2073 /* Emit warning for the unreachable statment STMT if needed.
2074 Return the gimple itself when the warning is emitted, otherwise
2075 return NULL. */
2076 static gimple *
2077 emit_warn_switch_unreachable (gimple *stmt)
2079 if (gimple_code (stmt) == GIMPLE_GOTO
2080 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2081 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2082 /* Don't warn for compiler-generated gotos. These occur
2083 in Duff's devices, for example. */
2084 return NULL;
2085 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2086 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2087 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2088 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2089 || (is_gimple_assign (stmt)
2090 && gimple_assign_single_p (stmt)
2091 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2092 && gimple_call_internal_p (
2093 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2094 IFN_DEFERRED_INIT))))
2095 /* Don't warn for compiler-generated initializations for
2096 -ftrivial-auto-var-init.
2097 There are 3 cases:
2098 case 1: a call to .DEFERRED_INIT;
2099 case 2: a call to __builtin_clear_padding with the 2nd argument is
2100 present and non-zero;
2101 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2102 that has the LHS of .DEFERRED_INIT as the RHS as following:
2103 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2104 i1 = _1. */
2105 return NULL;
2106 else
2107 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2108 "statement will never be executed");
2109 return stmt;
2112 /* Callback for walk_gimple_seq. */
2114 static tree
2115 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2116 bool *handled_ops_p,
2117 struct walk_stmt_info *wi)
2119 gimple *stmt = gsi_stmt (*gsi_p);
2120 bool unreachable_issued = wi->info != NULL;
2122 *handled_ops_p = true;
2123 switch (gimple_code (stmt))
2125 case GIMPLE_TRY:
2126 /* A compiler-generated cleanup or a user-written try block.
2127 If it's empty, don't dive into it--that would result in
2128 worse location info. */
2129 if (gimple_try_eval (stmt) == NULL)
2131 if (warn_switch_unreachable && !unreachable_issued)
2132 wi->info = emit_warn_switch_unreachable (stmt);
2134 /* Stop when auto var init warning is not on. */
2135 if (!warn_trivial_auto_var_init)
2136 return integer_zero_node;
2138 /* Fall through. */
2139 case GIMPLE_BIND:
2140 case GIMPLE_CATCH:
2141 case GIMPLE_EH_FILTER:
2142 case GIMPLE_TRANSACTION:
2143 /* Walk the sub-statements. */
2144 *handled_ops_p = false;
2145 break;
2147 case GIMPLE_DEBUG:
2148 /* Ignore these. We may generate them before declarations that
2149 are never executed. If there's something to warn about,
2150 there will be non-debug stmts too, and we'll catch those. */
2151 break;
2153 case GIMPLE_LABEL:
2154 /* Stop till the first Label. */
2155 return integer_zero_node;
2156 case GIMPLE_CALL:
2157 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2159 *handled_ops_p = false;
2160 break;
2162 if (warn_trivial_auto_var_init
2163 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2164 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2166 /* Get the variable name from the 3rd argument of call. */
2167 tree var_name = gimple_call_arg (stmt, 2);
2168 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2169 const char *var_name_str = TREE_STRING_POINTER (var_name);
2171 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2172 "%qs cannot be initialized with"
2173 "%<-ftrivial-auto-var_init%>",
2174 var_name_str);
2175 break;
2178 /* Fall through. */
2179 default:
2180 /* check the first "real" statement (not a decl/lexical scope/...), issue
2181 warning if needed. */
2182 if (warn_switch_unreachable && !unreachable_issued)
2183 wi->info = emit_warn_switch_unreachable (stmt);
2184 /* Stop when auto var init warning is not on. */
2185 if (!warn_trivial_auto_var_init)
2186 return integer_zero_node;
2187 break;
2189 return NULL_TREE;
2193 /* Possibly warn about unreachable statements between switch's controlling
2194 expression and the first case. Also warn about -ftrivial-auto-var-init
2195 cannot initialize the auto variable under such situation.
2196 SEQ is the body of a switch expression. */
2198 static void
2199 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2201 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2202 /* This warning doesn't play well with Fortran when optimizations
2203 are on. */
2204 || lang_GNU_Fortran ()
2205 || seq == NULL)
2206 return;
2208 struct walk_stmt_info wi;
2210 memset (&wi, 0, sizeof (wi));
2211 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2215 /* A label entry that pairs label and a location. */
2216 struct label_entry
2218 tree label;
2219 location_t loc;
2222 /* Find LABEL in vector of label entries VEC. */
2224 static struct label_entry *
2225 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2227 unsigned int i;
2228 struct label_entry *l;
2230 FOR_EACH_VEC_ELT (*vec, i, l)
2231 if (l->label == label)
2232 return l;
2233 return NULL;
2236 /* Return true if LABEL, a LABEL_DECL, represents a case label
2237 in a vector of labels CASES. */
2239 static bool
2240 case_label_p (const vec<tree> *cases, tree label)
2242 unsigned int i;
2243 tree l;
2245 FOR_EACH_VEC_ELT (*cases, i, l)
2246 if (CASE_LABEL (l) == label)
2247 return true;
2248 return false;
2251 /* Find the last nondebug statement in a scope STMT. */
2253 static gimple *
2254 last_stmt_in_scope (gimple *stmt)
2256 if (!stmt)
2257 return NULL;
2259 switch (gimple_code (stmt))
2261 case GIMPLE_BIND:
2263 gbind *bind = as_a <gbind *> (stmt);
2264 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2265 return last_stmt_in_scope (stmt);
2268 case GIMPLE_TRY:
2270 gtry *try_stmt = as_a <gtry *> (stmt);
2271 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2272 gimple *last_eval = last_stmt_in_scope (stmt);
2273 if (gimple_stmt_may_fallthru (last_eval)
2274 && (last_eval == NULL
2275 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2276 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2278 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2279 return last_stmt_in_scope (stmt);
2281 else
2282 return last_eval;
2285 case GIMPLE_DEBUG:
2286 gcc_unreachable ();
2288 default:
2289 return stmt;
2293 /* Collect labels that may fall through into LABELS and return the statement
2294 preceding another case label, or a user-defined label. Store a location
2295 useful to give warnings at *PREVLOC (usually the location of the returned
2296 statement or of its surrounding scope). */
2298 static gimple *
2299 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2300 auto_vec <struct label_entry> *labels,
2301 location_t *prevloc)
2303 gimple *prev = NULL;
2305 *prevloc = UNKNOWN_LOCATION;
2308 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2310 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2311 which starts on a GIMPLE_SWITCH and ends with a break label.
2312 Handle that as a single statement that can fall through. */
2313 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2314 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2315 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2316 if (last
2317 && gimple_code (first) == GIMPLE_SWITCH
2318 && gimple_code (last) == GIMPLE_LABEL)
2320 tree label = gimple_label_label (as_a <glabel *> (last));
2321 if (SWITCH_BREAK_LABEL_P (label))
2323 prev = bind;
2324 gsi_next (gsi_p);
2325 continue;
2329 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2330 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2332 /* Nested scope. Only look at the last statement of
2333 the innermost scope. */
2334 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2335 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2336 if (last)
2338 prev = last;
2339 /* It might be a label without a location. Use the
2340 location of the scope then. */
2341 if (!gimple_has_location (prev))
2342 *prevloc = bind_loc;
2344 gsi_next (gsi_p);
2345 continue;
2348 /* Ifs are tricky. */
2349 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2351 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2352 tree false_lab = gimple_cond_false_label (cond_stmt);
2353 location_t if_loc = gimple_location (cond_stmt);
2355 /* If we have e.g.
2356 if (i > 1) goto <D.2259>; else goto D;
2357 we can't do much with the else-branch. */
2358 if (!DECL_ARTIFICIAL (false_lab))
2359 break;
2361 /* Go on until the false label, then one step back. */
2362 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2364 gimple *stmt = gsi_stmt (*gsi_p);
2365 if (gimple_code (stmt) == GIMPLE_LABEL
2366 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2367 break;
2370 /* Not found? Oops. */
2371 if (gsi_end_p (*gsi_p))
2372 break;
2374 /* A dead label can't fall through. */
2375 if (!UNUSED_LABEL_P (false_lab))
2377 struct label_entry l = { false_lab, if_loc };
2378 labels->safe_push (l);
2381 /* Go to the last statement of the then branch. */
2382 gsi_prev (gsi_p);
2384 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2385 <D.1759>:
2386 <stmt>;
2387 goto <D.1761>;
2388 <D.1760>:
2390 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2391 && !gimple_has_location (gsi_stmt (*gsi_p)))
2393 /* Look at the statement before, it might be
2394 attribute fallthrough, in which case don't warn. */
2395 gsi_prev (gsi_p);
2396 bool fallthru_before_dest
2397 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2398 gsi_next (gsi_p);
2399 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2400 if (!fallthru_before_dest)
2402 struct label_entry l = { goto_dest, if_loc };
2403 labels->safe_push (l);
2406 /* This case is about
2407 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2408 <D.2022>:
2409 n = n + 1; // #1
2410 <D.2023>: // #2
2411 <D.1988>: // #3
2412 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2413 through to #3. So set PREV to #1. */
2414 else if (UNUSED_LABEL_P (false_lab))
2415 prev = gsi_stmt (*gsi_p);
2417 /* And move back. */
2418 gsi_next (gsi_p);
2421 /* Remember the last statement. Skip labels that are of no interest
2422 to us. */
2423 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2425 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2426 if (find_label_entry (labels, label))
2427 prev = gsi_stmt (*gsi_p);
2429 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2431 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2433 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2434 prev = gsi_stmt (*gsi_p);
2435 gsi_next (gsi_p);
2437 while (!gsi_end_p (*gsi_p)
2438 /* Stop if we find a case or a user-defined label. */
2439 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2440 || !gimple_has_location (gsi_stmt (*gsi_p))));
2442 if (prev && gimple_has_location (prev))
2443 *prevloc = gimple_location (prev);
2444 return prev;
2447 /* Return true if the switch fallthough warning should occur. LABEL is
2448 the label statement that we're falling through to. */
2450 static bool
2451 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2453 gimple_stmt_iterator gsi = *gsi_p;
2455 /* Don't warn if the label is marked with a "falls through" comment. */
2456 if (FALLTHROUGH_LABEL_P (label))
2457 return false;
2459 /* Don't warn for non-case labels followed by a statement:
2460 case 0:
2461 foo ();
2462 label:
2463 bar ();
2464 as these are likely intentional. */
2465 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2467 tree l;
2468 while (!gsi_end_p (gsi)
2469 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2470 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2471 && !case_label_p (&gimplify_ctxp->case_labels, l))
2472 gsi_next_nondebug (&gsi);
2473 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2474 return false;
2477 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2478 immediately breaks. */
2479 gsi = *gsi_p;
2481 /* Skip all immediately following labels. */
2482 while (!gsi_end_p (gsi)
2483 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2484 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2485 gsi_next_nondebug (&gsi);
2487 /* { ... something; default:; } */
2488 if (gsi_end_p (gsi)
2489 /* { ... something; default: break; } or
2490 { ... something; default: goto L; } */
2491 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2492 /* { ... something; default: return; } */
2493 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2494 return false;
2496 return true;
2499 /* Callback for walk_gimple_seq. */
2501 static tree
2502 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2503 struct walk_stmt_info *)
2505 gimple *stmt = gsi_stmt (*gsi_p);
2507 *handled_ops_p = true;
2508 switch (gimple_code (stmt))
2510 case GIMPLE_TRY:
2511 case GIMPLE_BIND:
2512 case GIMPLE_CATCH:
2513 case GIMPLE_EH_FILTER:
2514 case GIMPLE_TRANSACTION:
2515 /* Walk the sub-statements. */
2516 *handled_ops_p = false;
2517 break;
2519 /* Find a sequence of form:
2521 GIMPLE_LABEL
2522 [...]
2523 <may fallthru stmt>
2524 GIMPLE_LABEL
2526 and possibly warn. */
2527 case GIMPLE_LABEL:
2529 /* Found a label. Skip all immediately following labels. */
2530 while (!gsi_end_p (*gsi_p)
2531 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2532 gsi_next_nondebug (gsi_p);
2534 /* There might be no more statements. */
2535 if (gsi_end_p (*gsi_p))
2536 return integer_zero_node;
2538 /* Vector of labels that fall through. */
2539 auto_vec <struct label_entry> labels;
2540 location_t prevloc;
2541 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2543 /* There might be no more statements. */
2544 if (gsi_end_p (*gsi_p))
2545 return integer_zero_node;
2547 gimple *next = gsi_stmt (*gsi_p);
2548 tree label;
2549 /* If what follows is a label, then we may have a fallthrough. */
2550 if (gimple_code (next) == GIMPLE_LABEL
2551 && gimple_has_location (next)
2552 && (label = gimple_label_label (as_a <glabel *> (next)))
2553 && prev != NULL)
2555 struct label_entry *l;
2556 bool warned_p = false;
2557 auto_diagnostic_group d;
2558 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2559 /* Quiet. */;
2560 else if (gimple_code (prev) == GIMPLE_LABEL
2561 && (label = gimple_label_label (as_a <glabel *> (prev)))
2562 && (l = find_label_entry (&labels, label)))
2563 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2564 "this statement may fall through");
2565 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2566 /* Try to be clever and don't warn when the statement
2567 can't actually fall through. */
2568 && gimple_stmt_may_fallthru (prev)
2569 && prevloc != UNKNOWN_LOCATION)
2570 warned_p = warning_at (prevloc,
2571 OPT_Wimplicit_fallthrough_,
2572 "this statement may fall through");
2573 if (warned_p)
2574 inform (gimple_location (next), "here");
2576 /* Mark this label as processed so as to prevent multiple
2577 warnings in nested switches. */
2578 FALLTHROUGH_LABEL_P (label) = true;
2580 /* So that next warn_implicit_fallthrough_r will start looking for
2581 a new sequence starting with this label. */
2582 gsi_prev (gsi_p);
2585 break;
2586 default:
2587 break;
2589 return NULL_TREE;
2592 /* Warn when a switch case falls through. */
2594 static void
2595 maybe_warn_implicit_fallthrough (gimple_seq seq)
2597 if (!warn_implicit_fallthrough)
2598 return;
2600 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2601 if (!(lang_GNU_C ()
2602 || lang_GNU_CXX ()
2603 || lang_GNU_OBJC ()))
2604 return;
2606 struct walk_stmt_info wi;
2607 memset (&wi, 0, sizeof (wi));
2608 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2611 /* Callback for walk_gimple_seq. */
2613 static tree
2614 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2615 struct walk_stmt_info *wi)
2617 gimple *stmt = gsi_stmt (*gsi_p);
2619 *handled_ops_p = true;
2620 switch (gimple_code (stmt))
2622 case GIMPLE_TRY:
2623 case GIMPLE_BIND:
2624 case GIMPLE_CATCH:
2625 case GIMPLE_EH_FILTER:
2626 case GIMPLE_TRANSACTION:
2627 /* Walk the sub-statements. */
2628 *handled_ops_p = false;
2629 break;
2630 case GIMPLE_CALL:
2631 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2633 gsi_remove (gsi_p, true);
2634 if (gsi_end_p (*gsi_p))
2636 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2637 return integer_zero_node;
2640 bool found = false;
2641 location_t loc = gimple_location (stmt);
2643 gimple_stmt_iterator gsi2 = *gsi_p;
2644 stmt = gsi_stmt (gsi2);
2645 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2647 /* Go on until the artificial label. */
2648 tree goto_dest = gimple_goto_dest (stmt);
2649 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2651 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2652 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2653 == goto_dest)
2654 break;
2657 /* Not found? Stop. */
2658 if (gsi_end_p (gsi2))
2659 break;
2661 /* Look one past it. */
2662 gsi_next (&gsi2);
2665 /* We're looking for a case label or default label here. */
2666 while (!gsi_end_p (gsi2))
2668 stmt = gsi_stmt (gsi2);
2669 if (gimple_code (stmt) == GIMPLE_LABEL)
2671 tree label = gimple_label_label (as_a <glabel *> (stmt));
2672 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2674 found = true;
2675 break;
2678 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2680 else if (!is_gimple_debug (stmt))
2681 /* Anything else is not expected. */
2682 break;
2683 gsi_next (&gsi2);
2685 if (!found)
2686 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2687 "a case label or default label");
2689 break;
2690 default:
2691 break;
2693 return NULL_TREE;
2696 /* Expand all FALLTHROUGH () calls in SEQ. */
2698 static void
2699 expand_FALLTHROUGH (gimple_seq *seq_p)
2701 struct walk_stmt_info wi;
2702 location_t loc;
2703 memset (&wi, 0, sizeof (wi));
2704 wi.info = (void *) &loc;
2705 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2706 if (wi.callback_result == integer_zero_node)
2707 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2708 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2709 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2710 "a case label or default label");
2714 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2715 branch to. */
2717 static enum gimplify_status
2718 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2720 tree switch_expr = *expr_p;
2721 gimple_seq switch_body_seq = NULL;
2722 enum gimplify_status ret;
2723 tree index_type = TREE_TYPE (switch_expr);
2724 if (index_type == NULL_TREE)
2725 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2727 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2728 fb_rvalue);
2729 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2730 return ret;
2732 if (SWITCH_BODY (switch_expr))
2734 vec<tree> labels;
2735 vec<tree> saved_labels;
2736 hash_set<tree> *saved_live_switch_vars = NULL;
2737 tree default_case = NULL_TREE;
2738 gswitch *switch_stmt;
2740 /* Save old labels, get new ones from body, then restore the old
2741 labels. Save all the things from the switch body to append after. */
2742 saved_labels = gimplify_ctxp->case_labels;
2743 gimplify_ctxp->case_labels.create (8);
2745 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2746 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2747 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2748 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2749 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2750 else
2751 gimplify_ctxp->live_switch_vars = NULL;
2753 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2754 gimplify_ctxp->in_switch_expr = true;
2756 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2758 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2759 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2760 maybe_warn_implicit_fallthrough (switch_body_seq);
2761 /* Only do this for the outermost GIMPLE_SWITCH. */
2762 if (!gimplify_ctxp->in_switch_expr)
2763 expand_FALLTHROUGH (&switch_body_seq);
2765 labels = gimplify_ctxp->case_labels;
2766 gimplify_ctxp->case_labels = saved_labels;
2768 if (gimplify_ctxp->live_switch_vars)
2770 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2771 delete gimplify_ctxp->live_switch_vars;
2773 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2775 preprocess_case_label_vec_for_gimple (labels, index_type,
2776 &default_case);
2778 bool add_bind = false;
2779 if (!default_case)
2781 glabel *new_default;
2783 default_case
2784 = build_case_label (NULL_TREE, NULL_TREE,
2785 create_artificial_label (UNKNOWN_LOCATION));
2786 if (old_in_switch_expr)
2788 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2789 add_bind = true;
2791 new_default = gimple_build_label (CASE_LABEL (default_case));
2792 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2794 else if (old_in_switch_expr)
2796 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2797 if (last && gimple_code (last) == GIMPLE_LABEL)
2799 tree label = gimple_label_label (as_a <glabel *> (last));
2800 if (SWITCH_BREAK_LABEL_P (label))
2801 add_bind = true;
2805 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2806 default_case, labels);
2807 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2808 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2809 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2810 so that we can easily find the start and end of the switch
2811 statement. */
2812 if (add_bind)
2814 gimple_seq bind_body = NULL;
2815 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2816 gimple_seq_add_seq (&bind_body, switch_body_seq);
2817 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2818 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2819 gimplify_seq_add_stmt (pre_p, bind);
2821 else
2823 gimplify_seq_add_stmt (pre_p, switch_stmt);
2824 gimplify_seq_add_seq (pre_p, switch_body_seq);
2826 labels.release ();
2828 else
2829 gcc_unreachable ();
2831 return GS_ALL_DONE;
2834 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2836 static enum gimplify_status
2837 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2839 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2840 == current_function_decl);
2842 tree label = LABEL_EXPR_LABEL (*expr_p);
2843 glabel *label_stmt = gimple_build_label (label);
2844 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2845 gimplify_seq_add_stmt (pre_p, label_stmt);
2847 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2848 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2849 NOT_TAKEN));
2850 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2851 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2852 TAKEN));
2854 return GS_ALL_DONE;
2857 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2859 static enum gimplify_status
2860 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2862 struct gimplify_ctx *ctxp;
2863 glabel *label_stmt;
2865 /* Invalid programs can play Duff's Device type games with, for example,
2866 #pragma omp parallel. At least in the C front end, we don't
2867 detect such invalid branches until after gimplification, in the
2868 diagnose_omp_blocks pass. */
2869 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2870 if (ctxp->case_labels.exists ())
2871 break;
2873 tree label = CASE_LABEL (*expr_p);
2874 label_stmt = gimple_build_label (label);
2875 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2876 ctxp->case_labels.safe_push (*expr_p);
2877 gimplify_seq_add_stmt (pre_p, label_stmt);
2879 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2880 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2881 NOT_TAKEN));
2882 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2883 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2884 TAKEN));
2886 return GS_ALL_DONE;
2889 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2890 if necessary. */
2892 tree
2893 build_and_jump (tree *label_p)
2895 if (label_p == NULL)
2896 /* If there's nowhere to jump, just fall through. */
2897 return NULL_TREE;
2899 if (*label_p == NULL_TREE)
2901 tree label = create_artificial_label (UNKNOWN_LOCATION);
2902 *label_p = label;
2905 return build1 (GOTO_EXPR, void_type_node, *label_p);
2908 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2909 This also involves building a label to jump to and communicating it to
2910 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2912 static enum gimplify_status
2913 gimplify_exit_expr (tree *expr_p)
2915 tree cond = TREE_OPERAND (*expr_p, 0);
2916 tree expr;
2918 expr = build_and_jump (&gimplify_ctxp->exit_label);
2919 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2920 *expr_p = expr;
2922 return GS_OK;
2925 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2926 different from its canonical type, wrap the whole thing inside a
2927 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2928 type.
2930 The canonical type of a COMPONENT_REF is the type of the field being
2931 referenced--unless the field is a bit-field which can be read directly
2932 in a smaller mode, in which case the canonical type is the
2933 sign-appropriate type corresponding to that mode. */
2935 static void
2936 canonicalize_component_ref (tree *expr_p)
2938 tree expr = *expr_p;
2939 tree type;
2941 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2943 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2944 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2945 else
2946 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2948 /* One could argue that all the stuff below is not necessary for
2949 the non-bitfield case and declare it a FE error if type
2950 adjustment would be needed. */
2951 if (TREE_TYPE (expr) != type)
2953 #ifdef ENABLE_TYPES_CHECKING
2954 tree old_type = TREE_TYPE (expr);
2955 #endif
2956 int type_quals;
2958 /* We need to preserve qualifiers and propagate them from
2959 operand 0. */
2960 type_quals = TYPE_QUALS (type)
2961 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2962 if (TYPE_QUALS (type) != type_quals)
2963 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2965 /* Set the type of the COMPONENT_REF to the underlying type. */
2966 TREE_TYPE (expr) = type;
2968 #ifdef ENABLE_TYPES_CHECKING
2969 /* It is now a FE error, if the conversion from the canonical
2970 type to the original expression type is not useless. */
2971 gcc_assert (useless_type_conversion_p (old_type, type));
2972 #endif
2976 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2977 to foo, embed that change in the ADDR_EXPR by converting
2978 T array[U];
2979 (T *)&array
2981 &array[L]
2982 where L is the lower bound. For simplicity, only do this for constant
2983 lower bound.
2984 The constraint is that the type of &array[L] is trivially convertible
2985 to T *. */
2987 static void
2988 canonicalize_addr_expr (tree *expr_p)
2990 tree expr = *expr_p;
2991 tree addr_expr = TREE_OPERAND (expr, 0);
2992 tree datype, ddatype, pddatype;
2994 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2995 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2996 || TREE_CODE (addr_expr) != ADDR_EXPR)
2997 return;
2999 /* The addr_expr type should be a pointer to an array. */
3000 datype = TREE_TYPE (TREE_TYPE (addr_expr));
3001 if (TREE_CODE (datype) != ARRAY_TYPE)
3002 return;
3004 /* The pointer to element type shall be trivially convertible to
3005 the expression pointer type. */
3006 ddatype = TREE_TYPE (datype);
3007 pddatype = build_pointer_type (ddatype);
3008 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3009 pddatype))
3010 return;
3012 /* The lower bound and element sizes must be constant. */
3013 if (!TYPE_SIZE_UNIT (ddatype)
3014 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3015 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3016 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3017 return;
3019 /* All checks succeeded. Build a new node to merge the cast. */
3020 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3021 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3022 NULL_TREE, NULL_TREE);
3023 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3025 /* We can have stripped a required restrict qualifier above. */
3026 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3027 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3030 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3031 underneath as appropriate. */
3033 static enum gimplify_status
3034 gimplify_conversion (tree *expr_p)
3036 location_t loc = EXPR_LOCATION (*expr_p);
3037 gcc_assert (CONVERT_EXPR_P (*expr_p));
3039 /* Then strip away all but the outermost conversion. */
3040 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3042 /* And remove the outermost conversion if it's useless. */
3043 if (tree_ssa_useless_type_conversion (*expr_p))
3044 *expr_p = TREE_OPERAND (*expr_p, 0);
3046 /* If we still have a conversion at the toplevel,
3047 then canonicalize some constructs. */
3048 if (CONVERT_EXPR_P (*expr_p))
3050 tree sub = TREE_OPERAND (*expr_p, 0);
3052 /* If a NOP conversion is changing the type of a COMPONENT_REF
3053 expression, then canonicalize its type now in order to expose more
3054 redundant conversions. */
3055 if (TREE_CODE (sub) == COMPONENT_REF)
3056 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3058 /* If a NOP conversion is changing a pointer to array of foo
3059 to a pointer to foo, embed that change in the ADDR_EXPR. */
3060 else if (TREE_CODE (sub) == ADDR_EXPR)
3061 canonicalize_addr_expr (expr_p);
3064 /* If we have a conversion to a non-register type force the
3065 use of a VIEW_CONVERT_EXPR instead. */
3066 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3067 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3068 TREE_OPERAND (*expr_p, 0));
3070 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3071 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3072 TREE_SET_CODE (*expr_p, NOP_EXPR);
3074 return GS_OK;
3077 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3078 DECL_VALUE_EXPR, and it's worth re-examining things. */
3080 static enum gimplify_status
3081 gimplify_var_or_parm_decl (tree *expr_p)
3083 tree decl = *expr_p;
3085 /* ??? If this is a local variable, and it has not been seen in any
3086 outer BIND_EXPR, then it's probably the result of a duplicate
3087 declaration, for which we've already issued an error. It would
3088 be really nice if the front end wouldn't leak these at all.
3089 Currently the only known culprit is C++ destructors, as seen
3090 in g++.old-deja/g++.jason/binding.C.
3091 Another possible culpit are size expressions for variably modified
3092 types which are lost in the FE or not gimplified correctly. */
3093 if (VAR_P (decl)
3094 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3095 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3096 && decl_function_context (decl) == current_function_decl)
3098 gcc_assert (seen_error ());
3099 return GS_ERROR;
3102 /* When within an OMP context, notice uses of variables. */
3103 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3104 return GS_ALL_DONE;
3106 /* If the decl is an alias for another expression, substitute it now. */
3107 if (DECL_HAS_VALUE_EXPR_P (decl))
3109 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3110 return GS_OK;
3113 return GS_ALL_DONE;
3116 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3118 static void
3119 recalculate_side_effects (tree t)
3121 enum tree_code code = TREE_CODE (t);
3122 int len = TREE_OPERAND_LENGTH (t);
3123 int i;
3125 switch (TREE_CODE_CLASS (code))
3127 case tcc_expression:
3128 switch (code)
3130 case INIT_EXPR:
3131 case MODIFY_EXPR:
3132 case VA_ARG_EXPR:
3133 case PREDECREMENT_EXPR:
3134 case PREINCREMENT_EXPR:
3135 case POSTDECREMENT_EXPR:
3136 case POSTINCREMENT_EXPR:
3137 /* All of these have side-effects, no matter what their
3138 operands are. */
3139 return;
3141 default:
3142 break;
3144 /* Fall through. */
3146 case tcc_comparison: /* a comparison expression */
3147 case tcc_unary: /* a unary arithmetic expression */
3148 case tcc_binary: /* a binary arithmetic expression */
3149 case tcc_reference: /* a reference */
3150 case tcc_vl_exp: /* a function call */
3151 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3152 for (i = 0; i < len; ++i)
3154 tree op = TREE_OPERAND (t, i);
3155 if (op && TREE_SIDE_EFFECTS (op))
3156 TREE_SIDE_EFFECTS (t) = 1;
3158 break;
3160 case tcc_constant:
3161 /* No side-effects. */
3162 return;
3164 default:
3165 gcc_unreachable ();
3169 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3170 node *EXPR_P.
3172 compound_lval
3173 : min_lval '[' val ']'
3174 | min_lval '.' ID
3175 | compound_lval '[' val ']'
3176 | compound_lval '.' ID
3178 This is not part of the original SIMPLE definition, which separates
3179 array and member references, but it seems reasonable to handle them
3180 together. Also, this way we don't run into problems with union
3181 aliasing; gcc requires that for accesses through a union to alias, the
3182 union reference must be explicit, which was not always the case when we
3183 were splitting up array and member refs.
3185 PRE_P points to the sequence where side effects that must happen before
3186 *EXPR_P should be stored.
3188 POST_P points to the sequence where side effects that must happen after
3189 *EXPR_P should be stored. */
3191 static enum gimplify_status
3192 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3193 fallback_t fallback)
3195 tree *p;
3196 enum gimplify_status ret = GS_ALL_DONE, tret;
3197 int i;
3198 location_t loc = EXPR_LOCATION (*expr_p);
3199 tree expr = *expr_p;
3201 /* Create a stack of the subexpressions so later we can walk them in
3202 order from inner to outer. */
3203 auto_vec<tree, 10> expr_stack;
3205 /* We can handle anything that get_inner_reference can deal with. */
3206 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3208 restart:
3209 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3210 if (TREE_CODE (*p) == INDIRECT_REF)
3211 *p = fold_indirect_ref_loc (loc, *p);
3213 if (handled_component_p (*p))
3215 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3216 additional COMPONENT_REFs. */
3217 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3218 && gimplify_var_or_parm_decl (p) == GS_OK)
3219 goto restart;
3220 else
3221 break;
3223 expr_stack.safe_push (*p);
3226 gcc_assert (expr_stack.length ());
3228 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3229 walked through and P points to the innermost expression.
3231 Java requires that we elaborated nodes in source order. That
3232 means we must gimplify the inner expression followed by each of
3233 the indices, in order. But we can't gimplify the inner
3234 expression until we deal with any variable bounds, sizes, or
3235 positions in order to deal with PLACEHOLDER_EXPRs.
3237 The base expression may contain a statement expression that
3238 has declarations used in size expressions, so has to be
3239 gimplified before gimplifying the size expressions.
3241 So we do this in three steps. First we deal with variable
3242 bounds, sizes, and positions, then we gimplify the base and
3243 ensure it is memory if needed, then we deal with the annotations
3244 for any variables in the components and any indices, from left
3245 to right. */
3247 bool need_non_reg = false;
3248 for (i = expr_stack.length () - 1; i >= 0; i--)
3250 tree t = expr_stack[i];
3252 if (error_operand_p (TREE_OPERAND (t, 0)))
3253 return GS_ERROR;
3255 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3257 /* Deal with the low bound and element type size and put them into
3258 the ARRAY_REF. If these values are set, they have already been
3259 gimplified. */
3260 if (TREE_OPERAND (t, 2) == NULL_TREE)
3262 tree low = unshare_expr (array_ref_low_bound (t));
3263 if (!is_gimple_min_invariant (low))
3265 TREE_OPERAND (t, 2) = low;
3269 if (TREE_OPERAND (t, 3) == NULL_TREE)
3271 tree elmt_size = array_ref_element_size (t);
3272 if (!is_gimple_min_invariant (elmt_size))
3274 elmt_size = unshare_expr (elmt_size);
3275 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3276 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3278 /* Divide the element size by the alignment of the element
3279 type (above). */
3280 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3281 elmt_size, factor);
3283 TREE_OPERAND (t, 3) = elmt_size;
3286 need_non_reg = true;
3288 else if (TREE_CODE (t) == COMPONENT_REF)
3290 /* Set the field offset into T and gimplify it. */
3291 if (TREE_OPERAND (t, 2) == NULL_TREE)
3293 tree offset = component_ref_field_offset (t);
3294 if (!is_gimple_min_invariant (offset))
3296 offset = unshare_expr (offset);
3297 tree field = TREE_OPERAND (t, 1);
3298 tree factor
3299 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3301 /* Divide the offset by its alignment. */
3302 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3303 offset, factor);
3305 TREE_OPERAND (t, 2) = offset;
3308 need_non_reg = true;
3310 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3311 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3312 is a non-register type then require the base object to be a
3313 non-register as well. */
3314 need_non_reg = true;
3317 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3318 so as to match the min_lval predicate. Failure to do so may result
3319 in the creation of large aggregate temporaries. */
3320 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3321 fallback | fb_lvalue);
3322 ret = MIN (ret, tret);
3323 if (ret == GS_ERROR)
3324 return GS_ERROR;
3326 /* Step 2a: if we have component references we do not support on
3327 registers then make sure the base isn't a register. Of course
3328 we can only do so if an rvalue is OK. */
3329 if (need_non_reg && (fallback & fb_rvalue))
3330 prepare_gimple_addressable (p, pre_p);
3333 /* Step 3: gimplify size expressions and the indices and operands of
3334 ARRAY_REF. During this loop we also remove any useless conversions.
3335 If we operate on a register also make sure to properly gimplify
3336 to individual operations. */
3338 bool reg_operations = is_gimple_reg (*p);
3339 for (; expr_stack.length () > 0; )
3341 tree t = expr_stack.pop ();
3343 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3345 gcc_assert (!reg_operations);
3347 /* Gimplify the low bound and element type size. */
3348 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3349 is_gimple_reg, fb_rvalue);
3350 ret = MIN (ret, tret);
3352 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3353 is_gimple_reg, fb_rvalue);
3354 ret = MIN (ret, tret);
3356 /* Gimplify the dimension. */
3357 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3358 is_gimple_val, fb_rvalue);
3359 ret = MIN (ret, tret);
3361 else if (TREE_CODE (t) == COMPONENT_REF)
3363 gcc_assert (!reg_operations);
3365 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3366 is_gimple_reg, fb_rvalue);
3367 ret = MIN (ret, tret);
3369 else if (reg_operations)
3371 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3372 is_gimple_val, fb_rvalue);
3373 ret = MIN (ret, tret);
3376 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3378 /* The innermost expression P may have originally had
3379 TREE_SIDE_EFFECTS set which would have caused all the outer
3380 expressions in *EXPR_P leading to P to also have had
3381 TREE_SIDE_EFFECTS set. */
3382 recalculate_side_effects (t);
3385 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3386 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3388 canonicalize_component_ref (expr_p);
3391 expr_stack.release ();
3393 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3395 return ret;
3398 /* Gimplify the self modifying expression pointed to by EXPR_P
3399 (++, --, +=, -=).
3401 PRE_P points to the list where side effects that must happen before
3402 *EXPR_P should be stored.
3404 POST_P points to the list where side effects that must happen after
3405 *EXPR_P should be stored.
3407 WANT_VALUE is nonzero iff we want to use the value of this expression
3408 in another expression.
3410 ARITH_TYPE is the type the computation should be performed in. */
3412 enum gimplify_status
3413 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3414 bool want_value, tree arith_type)
3416 enum tree_code code;
3417 tree lhs, lvalue, rhs, t1;
3418 gimple_seq post = NULL, *orig_post_p = post_p;
3419 bool postfix;
3420 enum tree_code arith_code;
3421 enum gimplify_status ret;
3422 location_t loc = EXPR_LOCATION (*expr_p);
3424 code = TREE_CODE (*expr_p);
3426 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3427 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3429 /* Prefix or postfix? */
3430 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3431 /* Faster to treat as prefix if result is not used. */
3432 postfix = want_value;
3433 else
3434 postfix = false;
3436 /* For postfix, make sure the inner expression's post side effects
3437 are executed after side effects from this expression. */
3438 if (postfix)
3439 post_p = &post;
3441 /* Add or subtract? */
3442 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3443 arith_code = PLUS_EXPR;
3444 else
3445 arith_code = MINUS_EXPR;
3447 /* Gimplify the LHS into a GIMPLE lvalue. */
3448 lvalue = TREE_OPERAND (*expr_p, 0);
3449 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3450 if (ret == GS_ERROR)
3451 return ret;
3453 /* Extract the operands to the arithmetic operation. */
3454 lhs = lvalue;
3455 rhs = TREE_OPERAND (*expr_p, 1);
3457 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3458 that as the result value and in the postqueue operation. */
3459 if (postfix)
3461 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3462 if (ret == GS_ERROR)
3463 return ret;
3465 lhs = get_initialized_tmp_var (lhs, pre_p);
3468 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3469 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3471 rhs = convert_to_ptrofftype_loc (loc, rhs);
3472 if (arith_code == MINUS_EXPR)
3473 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3474 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3476 else
3477 t1 = fold_convert (TREE_TYPE (*expr_p),
3478 fold_build2 (arith_code, arith_type,
3479 fold_convert (arith_type, lhs),
3480 fold_convert (arith_type, rhs)));
3482 if (postfix)
3484 gimplify_assign (lvalue, t1, pre_p);
3485 gimplify_seq_add_seq (orig_post_p, post);
3486 *expr_p = lhs;
3487 return GS_ALL_DONE;
3489 else
3491 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3492 return GS_OK;
3496 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3498 static void
3499 maybe_with_size_expr (tree *expr_p)
3501 tree expr = *expr_p;
3502 tree type = TREE_TYPE (expr);
3503 tree size;
3505 /* If we've already wrapped this or the type is error_mark_node, we can't do
3506 anything. */
3507 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3508 || type == error_mark_node)
3509 return;
3511 /* If the size isn't known or is a constant, we have nothing to do. */
3512 size = TYPE_SIZE_UNIT (type);
3513 if (!size || poly_int_tree_p (size))
3514 return;
3516 /* Otherwise, make a WITH_SIZE_EXPR. */
3517 size = unshare_expr (size);
3518 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3519 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3522 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3523 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3524 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3525 gimplified to an SSA name. */
3527 enum gimplify_status
3528 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3529 bool allow_ssa)
3531 bool (*test) (tree);
3532 fallback_t fb;
3534 /* In general, we allow lvalues for function arguments to avoid
3535 extra overhead of copying large aggregates out of even larger
3536 aggregates into temporaries only to copy the temporaries to
3537 the argument list. Make optimizers happy by pulling out to
3538 temporaries those types that fit in registers. */
3539 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3540 test = is_gimple_val, fb = fb_rvalue;
3541 else
3543 test = is_gimple_lvalue, fb = fb_either;
3544 /* Also strip a TARGET_EXPR that would force an extra copy. */
3545 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3547 tree init = TARGET_EXPR_INITIAL (*arg_p);
3548 if (init
3549 && !VOID_TYPE_P (TREE_TYPE (init)))
3550 *arg_p = init;
3554 /* If this is a variable sized type, we must remember the size. */
3555 maybe_with_size_expr (arg_p);
3557 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3558 /* Make sure arguments have the same location as the function call
3559 itself. */
3560 protected_set_expr_location (*arg_p, call_location);
3562 /* There is a sequence point before a function call. Side effects in
3563 the argument list must occur before the actual call. So, when
3564 gimplifying arguments, force gimplify_expr to use an internal
3565 post queue which is then appended to the end of PRE_P. */
3566 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3569 /* Don't fold inside offloading or taskreg regions: it can break code by
3570 adding decl references that weren't in the source. We'll do it during
3571 omplower pass instead. */
3573 static bool
3574 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3576 struct gimplify_omp_ctx *ctx;
3577 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3578 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3579 return false;
3580 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3581 return false;
3582 /* Delay folding of builtins until the IL is in consistent state
3583 so the diagnostic machinery can do a better job. */
3584 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3585 return false;
3586 return fold_stmt (gsi);
3589 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3590 WANT_VALUE is true if the result of the call is desired. */
3592 static enum gimplify_status
3593 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3595 tree fndecl, parms, p, fnptrtype;
3596 enum gimplify_status ret;
3597 int i, nargs;
3598 gcall *call;
3599 bool builtin_va_start_p = false;
3600 location_t loc = EXPR_LOCATION (*expr_p);
3602 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3604 /* For reliable diagnostics during inlining, it is necessary that
3605 every call_expr be annotated with file and line. */
3606 if (! EXPR_HAS_LOCATION (*expr_p))
3607 SET_EXPR_LOCATION (*expr_p, input_location);
3609 /* Gimplify internal functions created in the FEs. */
3610 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3612 if (want_value)
3613 return GS_ALL_DONE;
3615 nargs = call_expr_nargs (*expr_p);
3616 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3617 auto_vec<tree> vargs (nargs);
3619 if (ifn == IFN_ASSUME)
3621 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3623 /* If the [[assume (cond)]]; condition is simple
3624 enough and can be evaluated unconditionally
3625 without side-effects, expand it as
3626 if (!cond) __builtin_unreachable (); */
3627 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3628 *expr_p = build3 (COND_EXPR, void_type_node,
3629 CALL_EXPR_ARG (*expr_p, 0), void_node,
3630 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3631 fndecl, 0));
3632 return GS_OK;
3634 /* If not optimizing, ignore the assumptions. */
3635 if (!optimize || seen_error ())
3637 *expr_p = NULL_TREE;
3638 return GS_ALL_DONE;
3640 /* Temporarily, until gimple lowering, transform
3641 .ASSUME (cond);
3642 into:
3643 [[assume (guard)]]
3645 guard = cond;
3647 such that gimple lowering can outline the condition into
3648 a separate function easily. */
3649 tree guard = create_tmp_var (boolean_type_node);
3650 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3651 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3652 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3653 push_gimplify_context ();
3654 gimple_seq body = NULL;
3655 gimple *g = gimplify_and_return_first (*expr_p, &body);
3656 pop_gimplify_context (g);
3657 g = gimple_build_assume (guard, body);
3658 gimple_set_location (g, loc);
3659 gimplify_seq_add_stmt (pre_p, g);
3660 *expr_p = NULL_TREE;
3661 return GS_ALL_DONE;
3664 for (i = 0; i < nargs; i++)
3666 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3667 EXPR_LOCATION (*expr_p));
3668 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3671 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3672 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3673 gimplify_seq_add_stmt (pre_p, call);
3674 return GS_ALL_DONE;
3677 /* This may be a call to a builtin function.
3679 Builtin function calls may be transformed into different
3680 (and more efficient) builtin function calls under certain
3681 circumstances. Unfortunately, gimplification can muck things
3682 up enough that the builtin expanders are not aware that certain
3683 transformations are still valid.
3685 So we attempt transformation/gimplification of the call before
3686 we gimplify the CALL_EXPR. At this time we do not manage to
3687 transform all calls in the same manner as the expanders do, but
3688 we do transform most of them. */
3689 fndecl = get_callee_fndecl (*expr_p);
3690 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3691 switch (DECL_FUNCTION_CODE (fndecl))
3693 CASE_BUILT_IN_ALLOCA:
3694 /* If the call has been built for a variable-sized object, then we
3695 want to restore the stack level when the enclosing BIND_EXPR is
3696 exited to reclaim the allocated space; otherwise, we precisely
3697 need to do the opposite and preserve the latest stack level. */
3698 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3699 gimplify_ctxp->save_stack = true;
3700 else
3701 gimplify_ctxp->keep_stack = true;
3702 break;
3704 case BUILT_IN_VA_START:
3706 builtin_va_start_p = true;
3707 if (call_expr_nargs (*expr_p) < 2)
3709 error ("too few arguments to function %<va_start%>");
3710 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3711 return GS_OK;
3714 if (fold_builtin_next_arg (*expr_p, true))
3716 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3717 return GS_OK;
3719 break;
3722 case BUILT_IN_EH_RETURN:
3723 cfun->calls_eh_return = true;
3724 break;
3726 case BUILT_IN_CLEAR_PADDING:
3727 if (call_expr_nargs (*expr_p) == 1)
3729 /* Remember the original type of the argument in an internal
3730 dummy second argument, as in GIMPLE pointer conversions are
3731 useless. Also mark this call as not for automatic
3732 initialization in the internal dummy third argument. */
3733 p = CALL_EXPR_ARG (*expr_p, 0);
3734 *expr_p
3735 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3736 build_zero_cst (TREE_TYPE (p)));
3737 return GS_OK;
3739 break;
3741 default:
3744 if (fndecl && fndecl_built_in_p (fndecl))
3746 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3747 if (new_tree && new_tree != *expr_p)
3749 /* There was a transformation of this call which computes the
3750 same value, but in a more efficient way. Return and try
3751 again. */
3752 *expr_p = new_tree;
3753 return GS_OK;
3757 /* Remember the original function pointer type. */
3758 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3760 if (flag_openmp
3761 && fndecl
3762 && cfun
3763 && (cfun->curr_properties & PROP_gimple_any) == 0)
3765 tree variant = omp_resolve_declare_variant (fndecl);
3766 if (variant != fndecl)
3767 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3770 /* There is a sequence point before the call, so any side effects in
3771 the calling expression must occur before the actual call. Force
3772 gimplify_expr to use an internal post queue. */
3773 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3774 is_gimple_call_addr, fb_rvalue);
3776 if (ret == GS_ERROR)
3777 return GS_ERROR;
3779 nargs = call_expr_nargs (*expr_p);
3781 /* Get argument types for verification. */
3782 fndecl = get_callee_fndecl (*expr_p);
3783 parms = NULL_TREE;
3784 if (fndecl)
3785 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3786 else
3787 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3789 if (fndecl && DECL_ARGUMENTS (fndecl))
3790 p = DECL_ARGUMENTS (fndecl);
3791 else if (parms)
3792 p = parms;
3793 else
3794 p = NULL_TREE;
3795 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3798 /* If the last argument is __builtin_va_arg_pack () and it is not
3799 passed as a named argument, decrease the number of CALL_EXPR
3800 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3801 if (!p
3802 && i < nargs
3803 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3805 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3806 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3808 if (last_arg_fndecl
3809 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3811 tree call = *expr_p;
3813 --nargs;
3814 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3815 CALL_EXPR_FN (call),
3816 nargs, CALL_EXPR_ARGP (call));
3818 /* Copy all CALL_EXPR flags, location and block, except
3819 CALL_EXPR_VA_ARG_PACK flag. */
3820 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3821 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3822 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3823 = CALL_EXPR_RETURN_SLOT_OPT (call);
3824 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3825 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3827 /* Set CALL_EXPR_VA_ARG_PACK. */
3828 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3832 /* If the call returns twice then after building the CFG the call
3833 argument computations will no longer dominate the call because
3834 we add an abnormal incoming edge to the call. So do not use SSA
3835 vars there. */
3836 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3838 /* Gimplify the function arguments. */
3839 if (nargs > 0)
3841 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3842 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3843 PUSH_ARGS_REVERSED ? i-- : i++)
3845 enum gimplify_status t;
3847 /* Avoid gimplifying the second argument to va_start, which needs to
3848 be the plain PARM_DECL. */
3849 if ((i != 1) || !builtin_va_start_p)
3851 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3852 EXPR_LOCATION (*expr_p), ! returns_twice);
3854 if (t == GS_ERROR)
3855 ret = GS_ERROR;
3860 /* Gimplify the static chain. */
3861 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3863 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3864 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3865 else
3867 enum gimplify_status t;
3868 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3869 EXPR_LOCATION (*expr_p), ! returns_twice);
3870 if (t == GS_ERROR)
3871 ret = GS_ERROR;
3875 /* Verify the function result. */
3876 if (want_value && fndecl
3877 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3879 error_at (loc, "using result of function returning %<void%>");
3880 ret = GS_ERROR;
3883 /* Try this again in case gimplification exposed something. */
3884 if (ret != GS_ERROR)
3886 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3888 if (new_tree && new_tree != *expr_p)
3890 /* There was a transformation of this call which computes the
3891 same value, but in a more efficient way. Return and try
3892 again. */
3893 *expr_p = new_tree;
3894 return GS_OK;
3897 else
3899 *expr_p = error_mark_node;
3900 return GS_ERROR;
3903 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3904 decl. This allows us to eliminate redundant or useless
3905 calls to "const" functions. */
3906 if (TREE_CODE (*expr_p) == CALL_EXPR)
3908 int flags = call_expr_flags (*expr_p);
3909 if (flags & (ECF_CONST | ECF_PURE)
3910 /* An infinite loop is considered a side effect. */
3911 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3912 TREE_SIDE_EFFECTS (*expr_p) = 0;
3915 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3916 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3917 form and delegate the creation of a GIMPLE_CALL to
3918 gimplify_modify_expr. This is always possible because when
3919 WANT_VALUE is true, the caller wants the result of this call into
3920 a temporary, which means that we will emit an INIT_EXPR in
3921 internal_get_tmp_var which will then be handled by
3922 gimplify_modify_expr. */
3923 if (!want_value)
3925 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3926 have to do is replicate it as a GIMPLE_CALL tuple. */
3927 gimple_stmt_iterator gsi;
3928 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3929 notice_special_calls (call);
3930 gimplify_seq_add_stmt (pre_p, call);
3931 gsi = gsi_last (*pre_p);
3932 maybe_fold_stmt (&gsi);
3933 *expr_p = NULL_TREE;
3935 else
3936 /* Remember the original function type. */
3937 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3938 CALL_EXPR_FN (*expr_p));
3940 return ret;
3943 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3944 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3946 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3947 condition is true or false, respectively. If null, we should generate
3948 our own to skip over the evaluation of this specific expression.
3950 LOCUS is the source location of the COND_EXPR.
3952 This function is the tree equivalent of do_jump.
3954 shortcut_cond_r should only be called by shortcut_cond_expr. */
3956 static tree
3957 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3958 location_t locus)
3960 tree local_label = NULL_TREE;
3961 tree t, expr = NULL;
3963 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3964 retain the shortcut semantics. Just insert the gotos here;
3965 shortcut_cond_expr will append the real blocks later. */
3966 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3968 location_t new_locus;
3970 /* Turn if (a && b) into
3972 if (a); else goto no;
3973 if (b) goto yes; else goto no;
3974 (no:) */
3976 if (false_label_p == NULL)
3977 false_label_p = &local_label;
3979 /* Keep the original source location on the first 'if'. */
3980 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3981 append_to_statement_list (t, &expr);
3983 /* Set the source location of the && on the second 'if'. */
3984 new_locus = rexpr_location (pred, locus);
3985 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3986 new_locus);
3987 append_to_statement_list (t, &expr);
3989 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3991 location_t new_locus;
3993 /* Turn if (a || b) into
3995 if (a) goto yes;
3996 if (b) goto yes; else goto no;
3997 (yes:) */
3999 if (true_label_p == NULL)
4000 true_label_p = &local_label;
4002 /* Keep the original source location on the first 'if'. */
4003 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
4004 append_to_statement_list (t, &expr);
4006 /* Set the source location of the || on the second 'if'. */
4007 new_locus = rexpr_location (pred, locus);
4008 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4009 new_locus);
4010 append_to_statement_list (t, &expr);
4012 else if (TREE_CODE (pred) == COND_EXPR
4013 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
4014 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
4016 location_t new_locus;
4018 /* As long as we're messing with gotos, turn if (a ? b : c) into
4019 if (a)
4020 if (b) goto yes; else goto no;
4021 else
4022 if (c) goto yes; else goto no;
4024 Don't do this if one of the arms has void type, which can happen
4025 in C++ when the arm is throw. */
4027 /* Keep the original source location on the first 'if'. Set the source
4028 location of the ? on the second 'if'. */
4029 new_locus = rexpr_location (pred, locus);
4030 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
4031 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
4032 false_label_p, locus),
4033 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
4034 false_label_p, new_locus));
4036 else
4038 expr = build3 (COND_EXPR, void_type_node, pred,
4039 build_and_jump (true_label_p),
4040 build_and_jump (false_label_p));
4041 SET_EXPR_LOCATION (expr, locus);
4044 if (local_label)
4046 t = build1 (LABEL_EXPR, void_type_node, local_label);
4047 append_to_statement_list (t, &expr);
4050 return expr;
4053 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4054 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4055 statement, if it is the last one. Otherwise, return NULL. */
4057 static tree
4058 find_goto (tree expr)
4060 if (!expr)
4061 return NULL_TREE;
4063 if (TREE_CODE (expr) == GOTO_EXPR)
4064 return expr;
4066 if (TREE_CODE (expr) != STATEMENT_LIST)
4067 return NULL_TREE;
4069 tree_stmt_iterator i = tsi_start (expr);
4071 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4072 tsi_next (&i);
4074 if (!tsi_one_before_end_p (i))
4075 return NULL_TREE;
4077 return find_goto (tsi_stmt (i));
4080 /* Same as find_goto, except that it returns NULL if the destination
4081 is not a LABEL_DECL. */
4083 static inline tree
4084 find_goto_label (tree expr)
4086 tree dest = find_goto (expr);
4087 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4088 return dest;
4089 return NULL_TREE;
4092 /* Given a conditional expression EXPR with short-circuit boolean
4093 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4094 predicate apart into the equivalent sequence of conditionals. */
4096 static tree
4097 shortcut_cond_expr (tree expr)
4099 tree pred = TREE_OPERAND (expr, 0);
4100 tree then_ = TREE_OPERAND (expr, 1);
4101 tree else_ = TREE_OPERAND (expr, 2);
4102 tree true_label, false_label, end_label, t;
4103 tree *true_label_p;
4104 tree *false_label_p;
4105 bool emit_end, emit_false, jump_over_else;
4106 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4107 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4109 /* First do simple transformations. */
4110 if (!else_se)
4112 /* If there is no 'else', turn
4113 if (a && b) then c
4114 into
4115 if (a) if (b) then c. */
4116 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4118 /* Keep the original source location on the first 'if'. */
4119 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4120 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4121 /* Set the source location of the && on the second 'if'. */
4122 if (rexpr_has_location (pred))
4123 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4124 then_ = shortcut_cond_expr (expr);
4125 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4126 pred = TREE_OPERAND (pred, 0);
4127 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4128 SET_EXPR_LOCATION (expr, locus);
4132 if (!then_se)
4134 /* If there is no 'then', turn
4135 if (a || b); else d
4136 into
4137 if (a); else if (b); else d. */
4138 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4140 /* Keep the original source location on the first 'if'. */
4141 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4142 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4143 /* Set the source location of the || on the second 'if'. */
4144 if (rexpr_has_location (pred))
4145 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4146 else_ = shortcut_cond_expr (expr);
4147 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4148 pred = TREE_OPERAND (pred, 0);
4149 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4150 SET_EXPR_LOCATION (expr, locus);
4154 /* If we're done, great. */
4155 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4156 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4157 return expr;
4159 /* Otherwise we need to mess with gotos. Change
4160 if (a) c; else d;
4162 if (a); else goto no;
4163 c; goto end;
4164 no: d; end:
4165 and recursively gimplify the condition. */
4167 true_label = false_label = end_label = NULL_TREE;
4169 /* If our arms just jump somewhere, hijack those labels so we don't
4170 generate jumps to jumps. */
4172 if (tree then_goto = find_goto_label (then_))
4174 true_label = GOTO_DESTINATION (then_goto);
4175 then_ = NULL;
4176 then_se = false;
4179 if (tree else_goto = find_goto_label (else_))
4181 false_label = GOTO_DESTINATION (else_goto);
4182 else_ = NULL;
4183 else_se = false;
4186 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4187 if (true_label)
4188 true_label_p = &true_label;
4189 else
4190 true_label_p = NULL;
4192 /* The 'else' branch also needs a label if it contains interesting code. */
4193 if (false_label || else_se)
4194 false_label_p = &false_label;
4195 else
4196 false_label_p = NULL;
4198 /* If there was nothing else in our arms, just forward the label(s). */
4199 if (!then_se && !else_se)
4200 return shortcut_cond_r (pred, true_label_p, false_label_p,
4201 EXPR_LOC_OR_LOC (expr, input_location));
4203 /* If our last subexpression already has a terminal label, reuse it. */
4204 if (else_se)
4205 t = expr_last (else_);
4206 else if (then_se)
4207 t = expr_last (then_);
4208 else
4209 t = NULL;
4210 if (t && TREE_CODE (t) == LABEL_EXPR)
4211 end_label = LABEL_EXPR_LABEL (t);
4213 /* If we don't care about jumping to the 'else' branch, jump to the end
4214 if the condition is false. */
4215 if (!false_label_p)
4216 false_label_p = &end_label;
4218 /* We only want to emit these labels if we aren't hijacking them. */
4219 emit_end = (end_label == NULL_TREE);
4220 emit_false = (false_label == NULL_TREE);
4222 /* We only emit the jump over the else clause if we have to--if the
4223 then clause may fall through. Otherwise we can wind up with a
4224 useless jump and a useless label at the end of gimplified code,
4225 which will cause us to think that this conditional as a whole
4226 falls through even if it doesn't. If we then inline a function
4227 which ends with such a condition, that can cause us to issue an
4228 inappropriate warning about control reaching the end of a
4229 non-void function. */
4230 jump_over_else = block_may_fallthru (then_);
4232 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4233 EXPR_LOC_OR_LOC (expr, input_location));
4235 expr = NULL;
4236 append_to_statement_list (pred, &expr);
4238 append_to_statement_list (then_, &expr);
4239 if (else_se)
4241 if (jump_over_else)
4243 tree last = expr_last (expr);
4244 t = build_and_jump (&end_label);
4245 if (rexpr_has_location (last))
4246 SET_EXPR_LOCATION (t, rexpr_location (last));
4247 append_to_statement_list (t, &expr);
4249 if (emit_false)
4251 t = build1 (LABEL_EXPR, void_type_node, false_label);
4252 append_to_statement_list (t, &expr);
4254 append_to_statement_list (else_, &expr);
4256 if (emit_end && end_label)
4258 t = build1 (LABEL_EXPR, void_type_node, end_label);
4259 append_to_statement_list (t, &expr);
4262 return expr;
4265 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4267 tree
4268 gimple_boolify (tree expr)
4270 tree type = TREE_TYPE (expr);
4271 location_t loc = EXPR_LOCATION (expr);
4273 if (TREE_CODE (expr) == NE_EXPR
4274 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4275 && integer_zerop (TREE_OPERAND (expr, 1)))
4277 tree call = TREE_OPERAND (expr, 0);
4278 tree fn = get_callee_fndecl (call);
4280 /* For __builtin_expect ((long) (x), y) recurse into x as well
4281 if x is truth_value_p. */
4282 if (fn
4283 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4284 && call_expr_nargs (call) == 2)
4286 tree arg = CALL_EXPR_ARG (call, 0);
4287 if (arg)
4289 if (TREE_CODE (arg) == NOP_EXPR
4290 && TREE_TYPE (arg) == TREE_TYPE (call))
4291 arg = TREE_OPERAND (arg, 0);
4292 if (truth_value_p (TREE_CODE (arg)))
4294 arg = gimple_boolify (arg);
4295 CALL_EXPR_ARG (call, 0)
4296 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4302 switch (TREE_CODE (expr))
4304 case TRUTH_AND_EXPR:
4305 case TRUTH_OR_EXPR:
4306 case TRUTH_XOR_EXPR:
4307 case TRUTH_ANDIF_EXPR:
4308 case TRUTH_ORIF_EXPR:
4309 /* Also boolify the arguments of truth exprs. */
4310 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4311 /* FALLTHRU */
4313 case TRUTH_NOT_EXPR:
4314 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4316 /* These expressions always produce boolean results. */
4317 if (TREE_CODE (type) != BOOLEAN_TYPE)
4318 TREE_TYPE (expr) = boolean_type_node;
4319 return expr;
4321 case ANNOTATE_EXPR:
4322 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4324 case annot_expr_ivdep_kind:
4325 case annot_expr_unroll_kind:
4326 case annot_expr_no_vector_kind:
4327 case annot_expr_vector_kind:
4328 case annot_expr_parallel_kind:
4329 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4330 if (TREE_CODE (type) != BOOLEAN_TYPE)
4331 TREE_TYPE (expr) = boolean_type_node;
4332 return expr;
4333 default:
4334 gcc_unreachable ();
4337 default:
4338 if (COMPARISON_CLASS_P (expr))
4340 /* These expressions always produce boolean results. */
4341 if (TREE_CODE (type) != BOOLEAN_TYPE)
4342 TREE_TYPE (expr) = boolean_type_node;
4343 return expr;
4345 /* Other expressions that get here must have boolean values, but
4346 might need to be converted to the appropriate mode. */
4347 if (TREE_CODE (type) == BOOLEAN_TYPE)
4348 return expr;
4349 return fold_convert_loc (loc, boolean_type_node, expr);
4353 /* Given a conditional expression *EXPR_P without side effects, gimplify
4354 its operands. New statements are inserted to PRE_P. */
4356 static enum gimplify_status
4357 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4359 tree expr = *expr_p, cond;
4360 enum gimplify_status ret, tret;
4361 enum tree_code code;
4363 cond = gimple_boolify (COND_EXPR_COND (expr));
4365 /* We need to handle && and || specially, as their gimplification
4366 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4367 code = TREE_CODE (cond);
4368 if (code == TRUTH_ANDIF_EXPR)
4369 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4370 else if (code == TRUTH_ORIF_EXPR)
4371 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4372 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4373 COND_EXPR_COND (*expr_p) = cond;
4375 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4376 is_gimple_val, fb_rvalue);
4377 ret = MIN (ret, tret);
4378 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4379 is_gimple_val, fb_rvalue);
4381 return MIN (ret, tret);
4384 /* Return true if evaluating EXPR could trap.
4385 EXPR is GENERIC, while tree_could_trap_p can be called
4386 only on GIMPLE. */
4388 bool
4389 generic_expr_could_trap_p (tree expr)
4391 unsigned i, n;
4393 if (!expr || is_gimple_val (expr))
4394 return false;
4396 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4397 return true;
4399 n = TREE_OPERAND_LENGTH (expr);
4400 for (i = 0; i < n; i++)
4401 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4402 return true;
4404 return false;
4407 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4408 into
4410 if (p) if (p)
4411 t1 = a; a;
4412 else or else
4413 t1 = b; b;
4416 The second form is used when *EXPR_P is of type void.
4418 PRE_P points to the list where side effects that must happen before
4419 *EXPR_P should be stored. */
4421 static enum gimplify_status
4422 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4424 tree expr = *expr_p;
4425 tree type = TREE_TYPE (expr);
4426 location_t loc = EXPR_LOCATION (expr);
4427 tree tmp, arm1, arm2;
4428 enum gimplify_status ret;
4429 tree label_true, label_false, label_cont;
4430 bool have_then_clause_p, have_else_clause_p;
4431 gcond *cond_stmt;
4432 enum tree_code pred_code;
4433 gimple_seq seq = NULL;
4435 /* If this COND_EXPR has a value, copy the values into a temporary within
4436 the arms. */
4437 if (!VOID_TYPE_P (type))
4439 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4440 tree result;
4442 /* If either an rvalue is ok or we do not require an lvalue, create the
4443 temporary. But we cannot do that if the type is addressable. */
4444 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4445 && !TREE_ADDRESSABLE (type))
4447 if (gimplify_ctxp->allow_rhs_cond_expr
4448 /* If either branch has side effects or could trap, it can't be
4449 evaluated unconditionally. */
4450 && !TREE_SIDE_EFFECTS (then_)
4451 && !generic_expr_could_trap_p (then_)
4452 && !TREE_SIDE_EFFECTS (else_)
4453 && !generic_expr_could_trap_p (else_))
4454 return gimplify_pure_cond_expr (expr_p, pre_p);
4456 tmp = create_tmp_var (type, "iftmp");
4457 result = tmp;
4460 /* Otherwise, only create and copy references to the values. */
4461 else
4463 type = build_pointer_type (type);
4465 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4466 then_ = build_fold_addr_expr_loc (loc, then_);
4468 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4469 else_ = build_fold_addr_expr_loc (loc, else_);
4471 expr
4472 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4474 tmp = create_tmp_var (type, "iftmp");
4475 result = build_simple_mem_ref_loc (loc, tmp);
4478 /* Build the new then clause, `tmp = then_;'. But don't build the
4479 assignment if the value is void; in C++ it can be if it's a throw. */
4480 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4481 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4483 /* Similarly, build the new else clause, `tmp = else_;'. */
4484 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4485 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4487 TREE_TYPE (expr) = void_type_node;
4488 recalculate_side_effects (expr);
4490 /* Move the COND_EXPR to the prequeue. */
4491 gimplify_stmt (&expr, pre_p);
4493 *expr_p = result;
4494 return GS_ALL_DONE;
4497 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4498 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4499 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4500 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4502 /* Make sure the condition has BOOLEAN_TYPE. */
4503 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4505 /* Break apart && and || conditions. */
4506 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4507 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4509 expr = shortcut_cond_expr (expr);
4511 if (expr != *expr_p)
4513 *expr_p = expr;
4515 /* We can't rely on gimplify_expr to re-gimplify the expanded
4516 form properly, as cleanups might cause the target labels to be
4517 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4518 set up a conditional context. */
4519 gimple_push_condition ();
4520 gimplify_stmt (expr_p, &seq);
4521 gimple_pop_condition (pre_p);
4522 gimple_seq_add_seq (pre_p, seq);
4524 return GS_ALL_DONE;
4528 /* Now do the normal gimplification. */
4530 /* Gimplify condition. */
4531 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4532 is_gimple_condexpr_for_cond, fb_rvalue);
4533 if (ret == GS_ERROR)
4534 return GS_ERROR;
4535 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4537 gimple_push_condition ();
4539 have_then_clause_p = have_else_clause_p = false;
4540 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4541 if (label_true
4542 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4543 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4544 have different locations, otherwise we end up with incorrect
4545 location information on the branches. */
4546 && (optimize
4547 || !EXPR_HAS_LOCATION (expr)
4548 || !rexpr_has_location (label_true)
4549 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4551 have_then_clause_p = true;
4552 label_true = GOTO_DESTINATION (label_true);
4554 else
4555 label_true = create_artificial_label (UNKNOWN_LOCATION);
4556 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4557 if (label_false
4558 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4559 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4560 have different locations, otherwise we end up with incorrect
4561 location information on the branches. */
4562 && (optimize
4563 || !EXPR_HAS_LOCATION (expr)
4564 || !rexpr_has_location (label_false)
4565 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4567 have_else_clause_p = true;
4568 label_false = GOTO_DESTINATION (label_false);
4570 else
4571 label_false = create_artificial_label (UNKNOWN_LOCATION);
4573 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4574 &arm2);
4575 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4576 label_false);
4577 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4578 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4579 gimplify_seq_add_stmt (&seq, cond_stmt);
4580 gimple_stmt_iterator gsi = gsi_last (seq);
4581 maybe_fold_stmt (&gsi);
4583 label_cont = NULL_TREE;
4584 if (!have_then_clause_p)
4586 /* For if (...) {} else { code; } put label_true after
4587 the else block. */
4588 if (TREE_OPERAND (expr, 1) == NULL_TREE
4589 && !have_else_clause_p
4590 && TREE_OPERAND (expr, 2) != NULL_TREE)
4592 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4593 handling that label_cont == label_true can be only reached
4594 through fallthrough from { code; }. */
4595 if (integer_zerop (COND_EXPR_COND (expr)))
4596 UNUSED_LABEL_P (label_true) = 1;
4597 label_cont = label_true;
4599 else
4601 bool then_side_effects
4602 = (TREE_OPERAND (expr, 1)
4603 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4604 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4605 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4606 /* For if (...) { code; } else {} or
4607 if (...) { code; } else goto label; or
4608 if (...) { code; return; } else { ... }
4609 label_cont isn't needed. */
4610 if (!have_else_clause_p
4611 && TREE_OPERAND (expr, 2) != NULL_TREE
4612 && gimple_seq_may_fallthru (seq))
4614 gimple *g;
4615 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4617 /* For if (0) { non-side-effect-code } else { code }
4618 tell -Wimplicit-fallthrough handling that label_cont can
4619 be only reached through fallthrough from { code }. */
4620 if (integer_zerop (COND_EXPR_COND (expr)))
4622 UNUSED_LABEL_P (label_true) = 1;
4623 if (!then_side_effects)
4624 UNUSED_LABEL_P (label_cont) = 1;
4627 g = gimple_build_goto (label_cont);
4629 /* GIMPLE_COND's are very low level; they have embedded
4630 gotos. This particular embedded goto should not be marked
4631 with the location of the original COND_EXPR, as it would
4632 correspond to the COND_EXPR's condition, not the ELSE or the
4633 THEN arms. To avoid marking it with the wrong location, flag
4634 it as "no location". */
4635 gimple_set_do_not_emit_location (g);
4637 gimplify_seq_add_stmt (&seq, g);
4641 if (!have_else_clause_p)
4643 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4644 tell -Wimplicit-fallthrough handling that label_false can be only
4645 reached through fallthrough from { code }. */
4646 if (integer_nonzerop (COND_EXPR_COND (expr))
4647 && (TREE_OPERAND (expr, 2) == NULL_TREE
4648 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4649 UNUSED_LABEL_P (label_false) = 1;
4650 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4651 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4653 if (label_cont)
4654 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4656 gimple_pop_condition (pre_p);
4657 gimple_seq_add_seq (pre_p, seq);
4659 if (ret == GS_ERROR)
4660 ; /* Do nothing. */
4661 else if (have_then_clause_p || have_else_clause_p)
4662 ret = GS_ALL_DONE;
4663 else
4665 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4666 expr = TREE_OPERAND (expr, 0);
4667 gimplify_stmt (&expr, pre_p);
4670 *expr_p = NULL;
4671 return ret;
4674 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4675 to be marked addressable.
4677 We cannot rely on such an expression being directly markable if a temporary
4678 has been created by the gimplification. In this case, we create another
4679 temporary and initialize it with a copy, which will become a store after we
4680 mark it addressable. This can happen if the front-end passed us something
4681 that it could not mark addressable yet, like a Fortran pass-by-reference
4682 parameter (int) floatvar. */
4684 static void
4685 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4687 while (handled_component_p (*expr_p))
4688 expr_p = &TREE_OPERAND (*expr_p, 0);
4690 /* Do not allow an SSA name as the temporary. */
4691 if (is_gimple_reg (*expr_p))
4692 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4695 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4696 a call to __builtin_memcpy. */
4698 static enum gimplify_status
4699 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4700 gimple_seq *seq_p)
4702 tree t, to, to_ptr, from, from_ptr;
4703 gcall *gs;
4704 location_t loc = EXPR_LOCATION (*expr_p);
4706 to = TREE_OPERAND (*expr_p, 0);
4707 from = TREE_OPERAND (*expr_p, 1);
4709 /* Mark the RHS addressable. Beware that it may not be possible to do so
4710 directly if a temporary has been created by the gimplification. */
4711 prepare_gimple_addressable (&from, seq_p);
4713 mark_addressable (from);
4714 from_ptr = build_fold_addr_expr_loc (loc, from);
4715 gimplify_arg (&from_ptr, seq_p, loc);
4717 mark_addressable (to);
4718 to_ptr = build_fold_addr_expr_loc (loc, to);
4719 gimplify_arg (&to_ptr, seq_p, loc);
4721 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4723 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4724 gimple_call_set_alloca_for_var (gs, true);
4726 if (want_value)
4728 /* tmp = memcpy() */
4729 t = create_tmp_var (TREE_TYPE (to_ptr));
4730 gimple_call_set_lhs (gs, t);
4731 gimplify_seq_add_stmt (seq_p, gs);
4733 *expr_p = build_simple_mem_ref (t);
4734 return GS_ALL_DONE;
4737 gimplify_seq_add_stmt (seq_p, gs);
4738 *expr_p = NULL;
4739 return GS_ALL_DONE;
4742 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4743 a call to __builtin_memset. In this case we know that the RHS is
4744 a CONSTRUCTOR with an empty element list. */
4746 static enum gimplify_status
4747 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4748 gimple_seq *seq_p)
4750 tree t, from, to, to_ptr;
4751 gcall *gs;
4752 location_t loc = EXPR_LOCATION (*expr_p);
4754 /* Assert our assumptions, to abort instead of producing wrong code
4755 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4756 not be immediately exposed. */
4757 from = TREE_OPERAND (*expr_p, 1);
4758 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4759 from = TREE_OPERAND (from, 0);
4761 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4762 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4764 /* Now proceed. */
4765 to = TREE_OPERAND (*expr_p, 0);
4767 to_ptr = build_fold_addr_expr_loc (loc, to);
4768 gimplify_arg (&to_ptr, seq_p, loc);
4769 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4771 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4773 if (want_value)
4775 /* tmp = memset() */
4776 t = create_tmp_var (TREE_TYPE (to_ptr));
4777 gimple_call_set_lhs (gs, t);
4778 gimplify_seq_add_stmt (seq_p, gs);
4780 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4781 return GS_ALL_DONE;
4784 gimplify_seq_add_stmt (seq_p, gs);
4785 *expr_p = NULL;
4786 return GS_ALL_DONE;
4789 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4790 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4791 assignment. Return non-null if we detect a potential overlap. */
4793 struct gimplify_init_ctor_preeval_data
4795 /* The base decl of the lhs object. May be NULL, in which case we
4796 have to assume the lhs is indirect. */
4797 tree lhs_base_decl;
4799 /* The alias set of the lhs object. */
4800 alias_set_type lhs_alias_set;
4803 static tree
4804 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4806 struct gimplify_init_ctor_preeval_data *data
4807 = (struct gimplify_init_ctor_preeval_data *) xdata;
4808 tree t = *tp;
4810 /* If we find the base object, obviously we have overlap. */
4811 if (data->lhs_base_decl == t)
4812 return t;
4814 /* If the constructor component is indirect, determine if we have a
4815 potential overlap with the lhs. The only bits of information we
4816 have to go on at this point are addressability and alias sets. */
4817 if ((INDIRECT_REF_P (t)
4818 || TREE_CODE (t) == MEM_REF)
4819 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4820 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4821 return t;
4823 /* If the constructor component is a call, determine if it can hide a
4824 potential overlap with the lhs through an INDIRECT_REF like above.
4825 ??? Ugh - this is completely broken. In fact this whole analysis
4826 doesn't look conservative. */
4827 if (TREE_CODE (t) == CALL_EXPR)
4829 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4831 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4832 if (POINTER_TYPE_P (TREE_VALUE (type))
4833 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4834 && alias_sets_conflict_p (data->lhs_alias_set,
4835 get_alias_set
4836 (TREE_TYPE (TREE_VALUE (type)))))
4837 return t;
4840 if (IS_TYPE_OR_DECL_P (t))
4841 *walk_subtrees = 0;
4842 return NULL;
4845 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4846 force values that overlap with the lhs (as described by *DATA)
4847 into temporaries. */
4849 static void
4850 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4851 struct gimplify_init_ctor_preeval_data *data)
4853 enum gimplify_status one;
4855 /* If the value is constant, then there's nothing to pre-evaluate. */
4856 if (TREE_CONSTANT (*expr_p))
4858 /* Ensure it does not have side effects, it might contain a reference to
4859 the object we're initializing. */
4860 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4861 return;
4864 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4865 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4866 return;
4868 /* Recurse for nested constructors. */
4869 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4871 unsigned HOST_WIDE_INT ix;
4872 constructor_elt *ce;
4873 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4875 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4876 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4878 return;
4881 /* If this is a variable sized type, we must remember the size. */
4882 maybe_with_size_expr (expr_p);
4884 /* Gimplify the constructor element to something appropriate for the rhs
4885 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4886 the gimplifier will consider this a store to memory. Doing this
4887 gimplification now means that we won't have to deal with complicated
4888 language-specific trees, nor trees like SAVE_EXPR that can induce
4889 exponential search behavior. */
4890 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4891 if (one == GS_ERROR)
4893 *expr_p = NULL;
4894 return;
4897 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4898 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4899 always be true for all scalars, since is_gimple_mem_rhs insists on a
4900 temporary variable for them. */
4901 if (DECL_P (*expr_p))
4902 return;
4904 /* If this is of variable size, we have no choice but to assume it doesn't
4905 overlap since we can't make a temporary for it. */
4906 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4907 return;
4909 /* Otherwise, we must search for overlap ... */
4910 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4911 return;
4913 /* ... and if found, force the value into a temporary. */
4914 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4917 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4918 a RANGE_EXPR in a CONSTRUCTOR for an array.
4920 var = lower;
4921 loop_entry:
4922 object[var] = value;
4923 if (var == upper)
4924 goto loop_exit;
4925 var = var + 1;
4926 goto loop_entry;
4927 loop_exit:
4929 We increment var _after_ the loop exit check because we might otherwise
4930 fail if upper == TYPE_MAX_VALUE (type for upper).
4932 Note that we never have to deal with SAVE_EXPRs here, because this has
4933 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4935 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4936 gimple_seq *, bool);
4938 static void
4939 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4940 tree value, tree array_elt_type,
4941 gimple_seq *pre_p, bool cleared)
4943 tree loop_entry_label, loop_exit_label, fall_thru_label;
4944 tree var, var_type, cref, tmp;
4946 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4947 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4948 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4950 /* Create and initialize the index variable. */
4951 var_type = TREE_TYPE (upper);
4952 var = create_tmp_var (var_type);
4953 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4955 /* Add the loop entry label. */
4956 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4958 /* Build the reference. */
4959 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4960 var, NULL_TREE, NULL_TREE);
4962 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4963 the store. Otherwise just assign value to the reference. */
4965 if (TREE_CODE (value) == CONSTRUCTOR)
4966 /* NB we might have to call ourself recursively through
4967 gimplify_init_ctor_eval if the value is a constructor. */
4968 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4969 pre_p, cleared);
4970 else
4972 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4973 != GS_ERROR)
4974 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4977 /* We exit the loop when the index var is equal to the upper bound. */
4978 gimplify_seq_add_stmt (pre_p,
4979 gimple_build_cond (EQ_EXPR, var, upper,
4980 loop_exit_label, fall_thru_label));
4982 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4984 /* Otherwise, increment the index var... */
4985 tmp = build2 (PLUS_EXPR, var_type, var,
4986 fold_convert (var_type, integer_one_node));
4987 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4989 /* ...and jump back to the loop entry. */
4990 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4992 /* Add the loop exit label. */
4993 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4996 /* A subroutine of gimplify_init_constructor. Generate individual
4997 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4998 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4999 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5000 zeroed first. */
5002 static void
5003 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
5004 gimple_seq *pre_p, bool cleared)
5006 tree array_elt_type = NULL;
5007 unsigned HOST_WIDE_INT ix;
5008 tree purpose, value;
5010 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
5011 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
5013 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
5015 tree cref;
5017 /* NULL values are created above for gimplification errors. */
5018 if (value == NULL)
5019 continue;
5021 if (cleared && initializer_zerop (value))
5022 continue;
5024 /* ??? Here's to hoping the front end fills in all of the indices,
5025 so we don't have to figure out what's missing ourselves. */
5026 gcc_assert (purpose);
5028 /* Skip zero-sized fields, unless value has side-effects. This can
5029 happen with calls to functions returning a empty type, which
5030 we shouldn't discard. As a number of downstream passes don't
5031 expect sets of empty type fields, we rely on the gimplification of
5032 the MODIFY_EXPR we make below to drop the assignment statement. */
5033 if (!TREE_SIDE_EFFECTS (value)
5034 && TREE_CODE (purpose) == FIELD_DECL
5035 && is_empty_type (TREE_TYPE (purpose)))
5036 continue;
5038 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5039 whole range. */
5040 if (TREE_CODE (purpose) == RANGE_EXPR)
5042 tree lower = TREE_OPERAND (purpose, 0);
5043 tree upper = TREE_OPERAND (purpose, 1);
5045 /* If the lower bound is equal to upper, just treat it as if
5046 upper was the index. */
5047 if (simple_cst_equal (lower, upper))
5048 purpose = upper;
5049 else
5051 gimplify_init_ctor_eval_range (object, lower, upper, value,
5052 array_elt_type, pre_p, cleared);
5053 continue;
5057 if (array_elt_type)
5059 /* Do not use bitsizetype for ARRAY_REF indices. */
5060 if (TYPE_DOMAIN (TREE_TYPE (object)))
5061 purpose
5062 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5063 purpose);
5064 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5065 purpose, NULL_TREE, NULL_TREE);
5067 else
5069 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5070 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5071 unshare_expr (object), purpose, NULL_TREE);
5074 if (TREE_CODE (value) == CONSTRUCTOR
5075 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5076 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5077 pre_p, cleared);
5078 else
5080 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5081 gimplify_and_add (init, pre_p);
5082 ggc_free (init);
5087 /* Return the appropriate RHS predicate for this LHS. */
5089 gimple_predicate
5090 rhs_predicate_for (tree lhs)
5092 if (is_gimple_reg (lhs))
5093 return is_gimple_reg_rhs_or_call;
5094 else
5095 return is_gimple_mem_rhs_or_call;
5098 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5099 before the LHS has been gimplified. */
5101 static gimple_predicate
5102 initial_rhs_predicate_for (tree lhs)
5104 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5105 return is_gimple_reg_rhs_or_call;
5106 else
5107 return is_gimple_mem_rhs_or_call;
5110 /* Gimplify a C99 compound literal expression. This just means adding
5111 the DECL_EXPR before the current statement and using its anonymous
5112 decl instead. */
5114 static enum gimplify_status
5115 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5116 bool (*gimple_test_f) (tree),
5117 fallback_t fallback)
5119 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5120 tree decl = DECL_EXPR_DECL (decl_s);
5121 tree init = DECL_INITIAL (decl);
5122 /* Mark the decl as addressable if the compound literal
5123 expression is addressable now, otherwise it is marked too late
5124 after we gimplify the initialization expression. */
5125 if (TREE_ADDRESSABLE (*expr_p))
5126 TREE_ADDRESSABLE (decl) = 1;
5127 /* Otherwise, if we don't need an lvalue and have a literal directly
5128 substitute it. Check if it matches the gimple predicate, as
5129 otherwise we'd generate a new temporary, and we can as well just
5130 use the decl we already have. */
5131 else if (!TREE_ADDRESSABLE (decl)
5132 && !TREE_THIS_VOLATILE (decl)
5133 && init
5134 && (fallback & fb_lvalue) == 0
5135 && gimple_test_f (init))
5137 *expr_p = init;
5138 return GS_OK;
5141 /* If the decl is not addressable, then it is being used in some
5142 expression or on the right hand side of a statement, and it can
5143 be put into a readonly data section. */
5144 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5145 TREE_READONLY (decl) = 1;
5147 /* This decl isn't mentioned in the enclosing block, so add it to the
5148 list of temps. FIXME it seems a bit of a kludge to say that
5149 anonymous artificial vars aren't pushed, but everything else is. */
5150 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5151 gimple_add_tmp_var (decl);
5153 gimplify_and_add (decl_s, pre_p);
5154 *expr_p = decl;
5155 return GS_OK;
5158 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5159 return a new CONSTRUCTOR if something changed. */
5161 static tree
5162 optimize_compound_literals_in_ctor (tree orig_ctor)
5164 tree ctor = orig_ctor;
5165 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5166 unsigned int idx, num = vec_safe_length (elts);
5168 for (idx = 0; idx < num; idx++)
5170 tree value = (*elts)[idx].value;
5171 tree newval = value;
5172 if (TREE_CODE (value) == CONSTRUCTOR)
5173 newval = optimize_compound_literals_in_ctor (value);
5174 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5176 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5177 tree decl = DECL_EXPR_DECL (decl_s);
5178 tree init = DECL_INITIAL (decl);
5180 if (!TREE_ADDRESSABLE (value)
5181 && !TREE_ADDRESSABLE (decl)
5182 && init
5183 && TREE_CODE (init) == CONSTRUCTOR)
5184 newval = optimize_compound_literals_in_ctor (init);
5186 if (newval == value)
5187 continue;
5189 if (ctor == orig_ctor)
5191 ctor = copy_node (orig_ctor);
5192 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5193 elts = CONSTRUCTOR_ELTS (ctor);
5195 (*elts)[idx].value = newval;
5197 return ctor;
5200 /* A subroutine of gimplify_modify_expr. Break out elements of a
5201 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5203 Note that we still need to clear any elements that don't have explicit
5204 initializers, so if not all elements are initialized we keep the
5205 original MODIFY_EXPR, we just remove all of the constructor elements.
5207 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5208 GS_ERROR if we would have to create a temporary when gimplifying
5209 this constructor. Otherwise, return GS_OK.
5211 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5213 static enum gimplify_status
5214 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5215 bool want_value, bool notify_temp_creation)
5217 tree object, ctor, type;
5218 enum gimplify_status ret;
5219 vec<constructor_elt, va_gc> *elts;
5220 bool cleared = false;
5221 bool is_empty_ctor = false;
5222 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5224 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5226 if (!notify_temp_creation)
5228 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5229 is_gimple_lvalue, fb_lvalue);
5230 if (ret == GS_ERROR)
5231 return ret;
5234 object = TREE_OPERAND (*expr_p, 0);
5235 ctor = TREE_OPERAND (*expr_p, 1)
5236 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5237 type = TREE_TYPE (ctor);
5238 elts = CONSTRUCTOR_ELTS (ctor);
5239 ret = GS_ALL_DONE;
5241 switch (TREE_CODE (type))
5243 case RECORD_TYPE:
5244 case UNION_TYPE:
5245 case QUAL_UNION_TYPE:
5246 case ARRAY_TYPE:
5248 /* Use readonly data for initializers of this or smaller size
5249 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5250 ratio. */
5251 const HOST_WIDE_INT min_unique_size = 64;
5252 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5253 is smaller than this, use readonly data. */
5254 const int unique_nonzero_ratio = 8;
5255 /* True if a single access of the object must be ensured. This is the
5256 case if the target is volatile, the type is non-addressable and more
5257 than one field need to be assigned. */
5258 const bool ensure_single_access
5259 = TREE_THIS_VOLATILE (object)
5260 && !TREE_ADDRESSABLE (type)
5261 && vec_safe_length (elts) > 1;
5262 struct gimplify_init_ctor_preeval_data preeval_data;
5263 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5264 HOST_WIDE_INT num_unique_nonzero_elements;
5265 bool complete_p, valid_const_initializer;
5267 /* Aggregate types must lower constructors to initialization of
5268 individual elements. The exception is that a CONSTRUCTOR node
5269 with no elements indicates zero-initialization of the whole. */
5270 if (vec_safe_is_empty (elts))
5272 if (notify_temp_creation)
5273 return GS_OK;
5275 /* The var will be initialized and so appear on lhs of
5276 assignment, it can't be TREE_READONLY anymore. */
5277 if (VAR_P (object))
5278 TREE_READONLY (object) = 0;
5280 is_empty_ctor = true;
5281 break;
5284 /* Fetch information about the constructor to direct later processing.
5285 We might want to make static versions of it in various cases, and
5286 can only do so if it known to be a valid constant initializer. */
5287 valid_const_initializer
5288 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5289 &num_unique_nonzero_elements,
5290 &num_ctor_elements, &complete_p);
5292 /* If a const aggregate variable is being initialized, then it
5293 should never be a lose to promote the variable to be static. */
5294 if (valid_const_initializer
5295 && num_nonzero_elements > 1
5296 && TREE_READONLY (object)
5297 && VAR_P (object)
5298 && !DECL_REGISTER (object)
5299 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5300 || DECL_MERGEABLE (object))
5301 /* For ctors that have many repeated nonzero elements
5302 represented through RANGE_EXPRs, prefer initializing
5303 those through runtime loops over copies of large amounts
5304 of data from readonly data section. */
5305 && (num_unique_nonzero_elements
5306 > num_nonzero_elements / unique_nonzero_ratio
5307 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5308 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5310 if (notify_temp_creation)
5311 return GS_ERROR;
5313 DECL_INITIAL (object) = ctor;
5314 TREE_STATIC (object) = 1;
5315 if (!DECL_NAME (object))
5316 DECL_NAME (object) = create_tmp_var_name ("C");
5317 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5319 /* ??? C++ doesn't automatically append a .<number> to the
5320 assembler name, and even when it does, it looks at FE private
5321 data structures to figure out what that number should be,
5322 which are not set for this variable. I suppose this is
5323 important for local statics for inline functions, which aren't
5324 "local" in the object file sense. So in order to get a unique
5325 TU-local symbol, we must invoke the lhd version now. */
5326 lhd_set_decl_assembler_name (object);
5328 *expr_p = NULL_TREE;
5329 break;
5332 /* The var will be initialized and so appear on lhs of
5333 assignment, it can't be TREE_READONLY anymore. */
5334 if (VAR_P (object) && !notify_temp_creation)
5335 TREE_READONLY (object) = 0;
5337 /* If there are "lots" of initialized elements, even discounting
5338 those that are not address constants (and thus *must* be
5339 computed at runtime), then partition the constructor into
5340 constant and non-constant parts. Block copy the constant
5341 parts in, then generate code for the non-constant parts. */
5342 /* TODO. There's code in cp/typeck.cc to do this. */
5344 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5345 /* store_constructor will ignore the clearing of variable-sized
5346 objects. Initializers for such objects must explicitly set
5347 every field that needs to be set. */
5348 cleared = false;
5349 else if (!complete_p)
5350 /* If the constructor isn't complete, clear the whole object
5351 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5353 ??? This ought not to be needed. For any element not present
5354 in the initializer, we should simply set them to zero. Except
5355 we'd need to *find* the elements that are not present, and that
5356 requires trickery to avoid quadratic compile-time behavior in
5357 large cases or excessive memory use in small cases. */
5358 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5359 else if (num_ctor_elements - num_nonzero_elements
5360 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5361 && num_nonzero_elements < num_ctor_elements / 4)
5362 /* If there are "lots" of zeros, it's more efficient to clear
5363 the memory and then set the nonzero elements. */
5364 cleared = true;
5365 else if (ensure_single_access && num_nonzero_elements == 0)
5366 /* If a single access to the target must be ensured and all elements
5367 are zero, then it's optimal to clear whatever their number. */
5368 cleared = true;
5369 else
5370 cleared = false;
5372 /* If there are "lots" of initialized elements, and all of them
5373 are valid address constants, then the entire initializer can
5374 be dropped to memory, and then memcpy'd out. Don't do this
5375 for sparse arrays, though, as it's more efficient to follow
5376 the standard CONSTRUCTOR behavior of memset followed by
5377 individual element initialization. Also don't do this for small
5378 all-zero initializers (which aren't big enough to merit
5379 clearing), and don't try to make bitwise copies of
5380 TREE_ADDRESSABLE types. */
5381 if (valid_const_initializer
5382 && complete_p
5383 && !(cleared || num_nonzero_elements == 0)
5384 && !TREE_ADDRESSABLE (type))
5386 HOST_WIDE_INT size = int_size_in_bytes (type);
5387 unsigned int align;
5389 /* ??? We can still get unbounded array types, at least
5390 from the C++ front end. This seems wrong, but attempt
5391 to work around it for now. */
5392 if (size < 0)
5394 size = int_size_in_bytes (TREE_TYPE (object));
5395 if (size >= 0)
5396 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5399 /* Find the maximum alignment we can assume for the object. */
5400 /* ??? Make use of DECL_OFFSET_ALIGN. */
5401 if (DECL_P (object))
5402 align = DECL_ALIGN (object);
5403 else
5404 align = TYPE_ALIGN (type);
5406 /* Do a block move either if the size is so small as to make
5407 each individual move a sub-unit move on average, or if it
5408 is so large as to make individual moves inefficient. */
5409 if (size > 0
5410 && num_nonzero_elements > 1
5411 /* For ctors that have many repeated nonzero elements
5412 represented through RANGE_EXPRs, prefer initializing
5413 those through runtime loops over copies of large amounts
5414 of data from readonly data section. */
5415 && (num_unique_nonzero_elements
5416 > num_nonzero_elements / unique_nonzero_ratio
5417 || size <= min_unique_size)
5418 && (size < num_nonzero_elements
5419 || !can_move_by_pieces (size, align)))
5421 if (notify_temp_creation)
5422 return GS_ERROR;
5424 walk_tree (&ctor, force_labels_r, NULL, NULL);
5425 ctor = tree_output_constant_def (ctor);
5426 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5427 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5428 TREE_OPERAND (*expr_p, 1) = ctor;
5430 /* This is no longer an assignment of a CONSTRUCTOR, but
5431 we still may have processing to do on the LHS. So
5432 pretend we didn't do anything here to let that happen. */
5433 return GS_UNHANDLED;
5437 /* If a single access to the target must be ensured and there are
5438 nonzero elements or the zero elements are not assigned en masse,
5439 initialize the target from a temporary. */
5440 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5442 if (notify_temp_creation)
5443 return GS_ERROR;
5445 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5446 TREE_OPERAND (*expr_p, 0) = temp;
5447 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5448 *expr_p,
5449 build2 (MODIFY_EXPR, void_type_node,
5450 object, temp));
5451 return GS_OK;
5454 if (notify_temp_creation)
5455 return GS_OK;
5457 /* If there are nonzero elements and if needed, pre-evaluate to capture
5458 elements overlapping with the lhs into temporaries. We must do this
5459 before clearing to fetch the values before they are zeroed-out. */
5460 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5462 preeval_data.lhs_base_decl = get_base_address (object);
5463 if (!DECL_P (preeval_data.lhs_base_decl))
5464 preeval_data.lhs_base_decl = NULL;
5465 preeval_data.lhs_alias_set = get_alias_set (object);
5467 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5468 pre_p, post_p, &preeval_data);
5471 bool ctor_has_side_effects_p
5472 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5474 if (cleared)
5476 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5477 Note that we still have to gimplify, in order to handle the
5478 case of variable sized types. Avoid shared tree structures. */
5479 CONSTRUCTOR_ELTS (ctor) = NULL;
5480 TREE_SIDE_EFFECTS (ctor) = 0;
5481 object = unshare_expr (object);
5482 gimplify_stmt (expr_p, pre_p);
5485 /* If we have not block cleared the object, or if there are nonzero
5486 elements in the constructor, or if the constructor has side effects,
5487 add assignments to the individual scalar fields of the object. */
5488 if (!cleared
5489 || num_nonzero_elements > 0
5490 || ctor_has_side_effects_p)
5491 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5493 *expr_p = NULL_TREE;
5495 break;
5497 case COMPLEX_TYPE:
5499 tree r, i;
5501 if (notify_temp_creation)
5502 return GS_OK;
5504 /* Extract the real and imaginary parts out of the ctor. */
5505 gcc_assert (elts->length () == 2);
5506 r = (*elts)[0].value;
5507 i = (*elts)[1].value;
5508 if (r == NULL || i == NULL)
5510 tree zero = build_zero_cst (TREE_TYPE (type));
5511 if (r == NULL)
5512 r = zero;
5513 if (i == NULL)
5514 i = zero;
5517 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5518 represent creation of a complex value. */
5519 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5521 ctor = build_complex (type, r, i);
5522 TREE_OPERAND (*expr_p, 1) = ctor;
5524 else
5526 ctor = build2 (COMPLEX_EXPR, type, r, i);
5527 TREE_OPERAND (*expr_p, 1) = ctor;
5528 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5529 pre_p,
5530 post_p,
5531 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5532 fb_rvalue);
5535 break;
5537 case VECTOR_TYPE:
5539 unsigned HOST_WIDE_INT ix;
5540 constructor_elt *ce;
5542 if (notify_temp_creation)
5543 return GS_OK;
5545 /* Vector types use CONSTRUCTOR all the way through gimple
5546 compilation as a general initializer. */
5547 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5549 enum gimplify_status tret;
5550 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5551 fb_rvalue);
5552 if (tret == GS_ERROR)
5553 ret = GS_ERROR;
5554 else if (TREE_STATIC (ctor)
5555 && !initializer_constant_valid_p (ce->value,
5556 TREE_TYPE (ce->value)))
5557 TREE_STATIC (ctor) = 0;
5559 recompute_constructor_flags (ctor);
5561 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5562 if (TREE_CONSTANT (ctor))
5564 bool constant_p = true;
5565 tree value;
5567 /* Even when ctor is constant, it might contain non-*_CST
5568 elements, such as addresses or trapping values like
5569 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5570 in VECTOR_CST nodes. */
5571 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5572 if (!CONSTANT_CLASS_P (value))
5574 constant_p = false;
5575 break;
5578 if (constant_p)
5580 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5581 break;
5585 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5586 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5588 break;
5590 default:
5591 /* So how did we get a CONSTRUCTOR for a scalar type? */
5592 gcc_unreachable ();
5595 if (ret == GS_ERROR)
5596 return GS_ERROR;
5597 /* If we have gimplified both sides of the initializer but have
5598 not emitted an assignment, do so now. */
5599 if (*expr_p
5600 /* If the type is an empty type, we don't need to emit the
5601 assignment. */
5602 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5604 tree lhs = TREE_OPERAND (*expr_p, 0);
5605 tree rhs = TREE_OPERAND (*expr_p, 1);
5606 if (want_value && object == lhs)
5607 lhs = unshare_expr (lhs);
5608 gassign *init = gimple_build_assign (lhs, rhs);
5609 gimplify_seq_add_stmt (pre_p, init);
5611 if (want_value)
5613 *expr_p = object;
5614 ret = GS_OK;
5616 else
5618 *expr_p = NULL;
5619 ret = GS_ALL_DONE;
5622 /* If the user requests to initialize automatic variables, we
5623 should initialize paddings inside the variable. Add a call to
5624 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5625 initialize paddings of object always to zero regardless of
5626 INIT_TYPE. Note, we will not insert this call if the aggregate
5627 variable has be completely cleared already or it's initialized
5628 with an empty constructor. We cannot insert this call if the
5629 variable is a gimple register since __builtin_clear_padding will take
5630 the address of the variable. As a result, if a long double/_Complex long
5631 double variable will be spilled into stack later, its padding cannot
5632 be cleared with __builtin_clear_padding. We should clear its padding
5633 when it is spilled into memory. */
5634 if (is_init_expr
5635 && !is_gimple_reg (object)
5636 && clear_padding_type_may_have_padding_p (type)
5637 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5638 || !AGGREGATE_TYPE_P (type))
5639 && is_var_need_auto_init (object))
5640 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5642 return ret;
5645 /* Given a pointer value OP0, return a simplified version of an
5646 indirection through OP0, or NULL_TREE if no simplification is
5647 possible. This may only be applied to a rhs of an expression.
5648 Note that the resulting type may be different from the type pointed
5649 to in the sense that it is still compatible from the langhooks
5650 point of view. */
5652 static tree
5653 gimple_fold_indirect_ref_rhs (tree t)
5655 return gimple_fold_indirect_ref (t);
5658 /* Subroutine of gimplify_modify_expr to do simplifications of
5659 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5660 something changes. */
5662 static enum gimplify_status
5663 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5664 gimple_seq *pre_p, gimple_seq *post_p,
5665 bool want_value)
5667 enum gimplify_status ret = GS_UNHANDLED;
5668 bool changed;
5672 changed = false;
5673 switch (TREE_CODE (*from_p))
5675 case VAR_DECL:
5676 /* If we're assigning from a read-only variable initialized with
5677 a constructor and not volatile, do the direct assignment from
5678 the constructor, but only if the target is not volatile either
5679 since this latter assignment might end up being done on a per
5680 field basis. However, if the target is volatile and the type
5681 is aggregate and non-addressable, gimplify_init_constructor
5682 knows that it needs to ensure a single access to the target
5683 and it will return GS_OK only in this case. */
5684 if (TREE_READONLY (*from_p)
5685 && DECL_INITIAL (*from_p)
5686 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5687 && !TREE_THIS_VOLATILE (*from_p)
5688 && (!TREE_THIS_VOLATILE (*to_p)
5689 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5690 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5692 tree old_from = *from_p;
5693 enum gimplify_status subret;
5695 /* Move the constructor into the RHS. */
5696 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5698 /* Let's see if gimplify_init_constructor will need to put
5699 it in memory. */
5700 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5701 false, true);
5702 if (subret == GS_ERROR)
5704 /* If so, revert the change. */
5705 *from_p = old_from;
5707 else
5709 ret = GS_OK;
5710 changed = true;
5713 break;
5714 case INDIRECT_REF:
5715 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5716 /* If we have code like
5718 *(const A*)(A*)&x
5720 where the type of "x" is a (possibly cv-qualified variant
5721 of "A"), treat the entire expression as identical to "x".
5722 This kind of code arises in C++ when an object is bound
5723 to a const reference, and if "x" is a TARGET_EXPR we want
5724 to take advantage of the optimization below. But not if
5725 the type is TREE_ADDRESSABLE; then C++17 says that the
5726 TARGET_EXPR needs to be a temporary. */
5727 if (tree t
5728 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5730 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5731 if (TREE_THIS_VOLATILE (t) != volatile_p)
5733 if (DECL_P (t))
5734 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5735 build_fold_addr_expr (t));
5736 if (REFERENCE_CLASS_P (t))
5737 TREE_THIS_VOLATILE (t) = volatile_p;
5739 *from_p = t;
5740 ret = GS_OK;
5741 changed = true;
5743 break;
5745 case TARGET_EXPR:
5747 /* If we are initializing something from a TARGET_EXPR, strip the
5748 TARGET_EXPR and initialize it directly, if possible. This can't
5749 be done if the initializer is void, since that implies that the
5750 temporary is set in some non-trivial way.
5752 ??? What about code that pulls out the temp and uses it
5753 elsewhere? I think that such code never uses the TARGET_EXPR as
5754 an initializer. If I'm wrong, we'll die because the temp won't
5755 have any RTL. In that case, I guess we'll need to replace
5756 references somehow. */
5757 tree init = TARGET_EXPR_INITIAL (*from_p);
5759 if (init
5760 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5761 || !TARGET_EXPR_NO_ELIDE (*from_p))
5762 && !VOID_TYPE_P (TREE_TYPE (init)))
5764 *from_p = init;
5765 ret = GS_OK;
5766 changed = true;
5769 break;
5771 case COMPOUND_EXPR:
5772 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5773 caught. */
5774 gimplify_compound_expr (from_p, pre_p, true);
5775 ret = GS_OK;
5776 changed = true;
5777 break;
5779 case CONSTRUCTOR:
5780 /* If we already made some changes, let the front end have a
5781 crack at this before we break it down. */
5782 if (ret != GS_UNHANDLED)
5783 break;
5785 /* If we're initializing from a CONSTRUCTOR, break this into
5786 individual MODIFY_EXPRs. */
5787 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5788 false);
5789 return ret;
5791 case COND_EXPR:
5792 /* If we're assigning to a non-register type, push the assignment
5793 down into the branches. This is mandatory for ADDRESSABLE types,
5794 since we cannot generate temporaries for such, but it saves a
5795 copy in other cases as well. */
5796 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5798 /* This code should mirror the code in gimplify_cond_expr. */
5799 enum tree_code code = TREE_CODE (*expr_p);
5800 tree cond = *from_p;
5801 tree result = *to_p;
5803 ret = gimplify_expr (&result, pre_p, post_p,
5804 is_gimple_lvalue, fb_lvalue);
5805 if (ret != GS_ERROR)
5806 ret = GS_OK;
5808 /* If we are going to write RESULT more than once, clear
5809 TREE_READONLY flag, otherwise we might incorrectly promote
5810 the variable to static const and initialize it at compile
5811 time in one of the branches. */
5812 if (VAR_P (result)
5813 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5814 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5815 TREE_READONLY (result) = 0;
5816 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5817 TREE_OPERAND (cond, 1)
5818 = build2 (code, void_type_node, result,
5819 TREE_OPERAND (cond, 1));
5820 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5821 TREE_OPERAND (cond, 2)
5822 = build2 (code, void_type_node, unshare_expr (result),
5823 TREE_OPERAND (cond, 2));
5825 TREE_TYPE (cond) = void_type_node;
5826 recalculate_side_effects (cond);
5828 if (want_value)
5830 gimplify_and_add (cond, pre_p);
5831 *expr_p = unshare_expr (result);
5833 else
5834 *expr_p = cond;
5835 return ret;
5837 break;
5839 case CALL_EXPR:
5840 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5841 return slot so that we don't generate a temporary. */
5842 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5843 && aggregate_value_p (*from_p, *from_p))
5845 bool use_target;
5847 if (!(rhs_predicate_for (*to_p))(*from_p))
5848 /* If we need a temporary, *to_p isn't accurate. */
5849 use_target = false;
5850 /* It's OK to use the return slot directly unless it's an NRV. */
5851 else if (TREE_CODE (*to_p) == RESULT_DECL
5852 && DECL_NAME (*to_p) == NULL_TREE
5853 && needs_to_live_in_memory (*to_p))
5854 use_target = true;
5855 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5856 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5857 /* Don't force regs into memory. */
5858 use_target = false;
5859 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5860 /* It's OK to use the target directly if it's being
5861 initialized. */
5862 use_target = true;
5863 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5864 != INTEGER_CST)
5865 /* Always use the target and thus RSO for variable-sized types.
5866 GIMPLE cannot deal with a variable-sized assignment
5867 embedded in a call statement. */
5868 use_target = true;
5869 else if (TREE_CODE (*to_p) != SSA_NAME
5870 && (!is_gimple_variable (*to_p)
5871 || needs_to_live_in_memory (*to_p)))
5872 /* Don't use the original target if it's already addressable;
5873 if its address escapes, and the called function uses the
5874 NRV optimization, a conforming program could see *to_p
5875 change before the called function returns; see c++/19317.
5876 When optimizing, the return_slot pass marks more functions
5877 as safe after we have escape info. */
5878 use_target = false;
5879 else
5880 use_target = true;
5882 if (use_target)
5884 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5885 mark_addressable (*to_p);
5888 break;
5890 case WITH_SIZE_EXPR:
5891 /* Likewise for calls that return an aggregate of non-constant size,
5892 since we would not be able to generate a temporary at all. */
5893 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5895 *from_p = TREE_OPERAND (*from_p, 0);
5896 /* We don't change ret in this case because the
5897 WITH_SIZE_EXPR might have been added in
5898 gimplify_modify_expr, so returning GS_OK would lead to an
5899 infinite loop. */
5900 changed = true;
5902 break;
5904 /* If we're initializing from a container, push the initialization
5905 inside it. */
5906 case CLEANUP_POINT_EXPR:
5907 case BIND_EXPR:
5908 case STATEMENT_LIST:
5910 tree wrap = *from_p;
5911 tree t;
5913 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5914 fb_lvalue);
5915 if (ret != GS_ERROR)
5916 ret = GS_OK;
5918 t = voidify_wrapper_expr (wrap, *expr_p);
5919 gcc_assert (t == *expr_p);
5921 if (want_value)
5923 gimplify_and_add (wrap, pre_p);
5924 *expr_p = unshare_expr (*to_p);
5926 else
5927 *expr_p = wrap;
5928 return GS_OK;
5931 case NOP_EXPR:
5932 /* Pull out compound literal expressions from a NOP_EXPR.
5933 Those are created in the C FE to drop qualifiers during
5934 lvalue conversion. */
5935 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5936 && tree_ssa_useless_type_conversion (*from_p))
5938 *from_p = TREE_OPERAND (*from_p, 0);
5939 ret = GS_OK;
5940 changed = true;
5942 break;
5944 case COMPOUND_LITERAL_EXPR:
5946 tree complit = TREE_OPERAND (*expr_p, 1);
5947 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5948 tree decl = DECL_EXPR_DECL (decl_s);
5949 tree init = DECL_INITIAL (decl);
5951 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5952 into struct T x = { 0, 1, 2 } if the address of the
5953 compound literal has never been taken. */
5954 if (!TREE_ADDRESSABLE (complit)
5955 && !TREE_ADDRESSABLE (decl)
5956 && init)
5958 *expr_p = copy_node (*expr_p);
5959 TREE_OPERAND (*expr_p, 1) = init;
5960 return GS_OK;
5964 default:
5965 break;
5968 while (changed);
5970 return ret;
5974 /* Return true if T looks like a valid GIMPLE statement. */
5976 static bool
5977 is_gimple_stmt (tree t)
5979 const enum tree_code code = TREE_CODE (t);
5981 switch (code)
5983 case NOP_EXPR:
5984 /* The only valid NOP_EXPR is the empty statement. */
5985 return IS_EMPTY_STMT (t);
5987 case BIND_EXPR:
5988 case COND_EXPR:
5989 /* These are only valid if they're void. */
5990 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5992 case SWITCH_EXPR:
5993 case GOTO_EXPR:
5994 case RETURN_EXPR:
5995 case LABEL_EXPR:
5996 case CASE_LABEL_EXPR:
5997 case TRY_CATCH_EXPR:
5998 case TRY_FINALLY_EXPR:
5999 case EH_FILTER_EXPR:
6000 case CATCH_EXPR:
6001 case ASM_EXPR:
6002 case STATEMENT_LIST:
6003 case OACC_PARALLEL:
6004 case OACC_KERNELS:
6005 case OACC_SERIAL:
6006 case OACC_DATA:
6007 case OACC_HOST_DATA:
6008 case OACC_DECLARE:
6009 case OACC_UPDATE:
6010 case OACC_ENTER_DATA:
6011 case OACC_EXIT_DATA:
6012 case OACC_CACHE:
6013 case OMP_PARALLEL:
6014 case OMP_FOR:
6015 case OMP_SIMD:
6016 case OMP_DISTRIBUTE:
6017 case OMP_LOOP:
6018 case OACC_LOOP:
6019 case OMP_SCAN:
6020 case OMP_SCOPE:
6021 case OMP_SECTIONS:
6022 case OMP_SECTION:
6023 case OMP_STRUCTURED_BLOCK:
6024 case OMP_SINGLE:
6025 case OMP_MASTER:
6026 case OMP_MASKED:
6027 case OMP_TASKGROUP:
6028 case OMP_ORDERED:
6029 case OMP_CRITICAL:
6030 case OMP_TASK:
6031 case OMP_TARGET:
6032 case OMP_TARGET_DATA:
6033 case OMP_TARGET_UPDATE:
6034 case OMP_TARGET_ENTER_DATA:
6035 case OMP_TARGET_EXIT_DATA:
6036 case OMP_TASKLOOP:
6037 case OMP_TEAMS:
6038 /* These are always void. */
6039 return true;
6041 case CALL_EXPR:
6042 case MODIFY_EXPR:
6043 case PREDICT_EXPR:
6044 /* These are valid regardless of their type. */
6045 return true;
6047 default:
6048 return false;
6053 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6054 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6056 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6057 other, unmodified part of the complex object just before the total store.
6058 As a consequence, if the object is still uninitialized, an undefined value
6059 will be loaded into a register, which may result in a spurious exception
6060 if the register is floating-point and the value happens to be a signaling
6061 NaN for example. Then the fully-fledged complex operations lowering pass
6062 followed by a DCE pass are necessary in order to fix things up. */
6064 static enum gimplify_status
6065 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6066 bool want_value)
6068 enum tree_code code, ocode;
6069 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6071 lhs = TREE_OPERAND (*expr_p, 0);
6072 rhs = TREE_OPERAND (*expr_p, 1);
6073 code = TREE_CODE (lhs);
6074 lhs = TREE_OPERAND (lhs, 0);
6076 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6077 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6078 suppress_warning (other);
6079 other = get_formal_tmp_var (other, pre_p);
6081 realpart = code == REALPART_EXPR ? rhs : other;
6082 imagpart = code == REALPART_EXPR ? other : rhs;
6084 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6085 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6086 else
6087 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6089 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6090 *expr_p = (want_value) ? rhs : NULL_TREE;
6092 return GS_ALL_DONE;
6095 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6097 modify_expr
6098 : varname '=' rhs
6099 | '*' ID '=' rhs
6101 PRE_P points to the list where side effects that must happen before
6102 *EXPR_P should be stored.
6104 POST_P points to the list where side effects that must happen after
6105 *EXPR_P should be stored.
6107 WANT_VALUE is nonzero iff we want to use the value of this expression
6108 in another expression. */
6110 static enum gimplify_status
6111 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6112 bool want_value)
6114 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6115 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6116 enum gimplify_status ret = GS_UNHANDLED;
6117 gimple *assign;
6118 location_t loc = EXPR_LOCATION (*expr_p);
6119 gimple_stmt_iterator gsi;
6121 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6122 return GS_ERROR;
6124 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6125 || TREE_CODE (*expr_p) == INIT_EXPR);
6127 /* Trying to simplify a clobber using normal logic doesn't work,
6128 so handle it here. */
6129 if (TREE_CLOBBER_P (*from_p))
6131 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6132 if (ret == GS_ERROR)
6133 return ret;
6134 gcc_assert (!want_value);
6135 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6137 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6138 pre_p, post_p);
6139 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6141 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6142 *expr_p = NULL;
6143 return GS_ALL_DONE;
6146 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6147 memset. */
6148 if (TREE_TYPE (*from_p) != error_mark_node
6149 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6150 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6151 && TREE_CODE (*from_p) == CONSTRUCTOR
6152 && CONSTRUCTOR_NELTS (*from_p) == 0)
6154 maybe_with_size_expr (from_p);
6155 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6156 return gimplify_modify_expr_to_memset (expr_p,
6157 TREE_OPERAND (*from_p, 1),
6158 want_value, pre_p);
6161 /* Insert pointer conversions required by the middle-end that are not
6162 required by the frontend. This fixes middle-end type checking for
6163 for example gcc.dg/redecl-6.c. */
6164 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6166 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6167 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6168 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6171 /* See if any simplifications can be done based on what the RHS is. */
6172 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6173 want_value);
6174 if (ret != GS_UNHANDLED)
6175 return ret;
6177 /* For empty types only gimplify the left hand side and right hand
6178 side as statements and throw away the assignment. Do this after
6179 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6180 types properly. */
6181 if (is_empty_type (TREE_TYPE (*from_p))
6182 && !want_value
6183 /* Don't do this for calls that return addressable types, expand_call
6184 relies on those having a lhs. */
6185 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6186 && TREE_CODE (*from_p) == CALL_EXPR))
6188 gimplify_stmt (from_p, pre_p);
6189 gimplify_stmt (to_p, pre_p);
6190 *expr_p = NULL_TREE;
6191 return GS_ALL_DONE;
6194 /* If the value being copied is of variable width, compute the length
6195 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6196 before gimplifying any of the operands so that we can resolve any
6197 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6198 the size of the expression to be copied, not of the destination, so
6199 that is what we must do here. */
6200 maybe_with_size_expr (from_p);
6202 /* As a special case, we have to temporarily allow for assignments
6203 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6204 a toplevel statement, when gimplifying the GENERIC expression
6205 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6206 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6208 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6209 prevent gimplify_expr from trying to create a new temporary for
6210 foo's LHS, we tell it that it should only gimplify until it
6211 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6212 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6213 and all we need to do here is set 'a' to be its LHS. */
6215 /* Gimplify the RHS first for C++17 and bug 71104. */
6216 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6217 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6218 if (ret == GS_ERROR)
6219 return ret;
6221 /* Then gimplify the LHS. */
6222 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6223 twice we have to make sure to gimplify into non-SSA as otherwise
6224 the abnormal edge added later will make those defs not dominate
6225 their uses.
6226 ??? Technically this applies only to the registers used in the
6227 resulting non-register *TO_P. */
6228 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6229 if (saved_into_ssa
6230 && TREE_CODE (*from_p) == CALL_EXPR
6231 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6232 gimplify_ctxp->into_ssa = false;
6233 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6234 gimplify_ctxp->into_ssa = saved_into_ssa;
6235 if (ret == GS_ERROR)
6236 return ret;
6238 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6239 guess for the predicate was wrong. */
6240 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6241 if (final_pred != initial_pred)
6243 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6244 if (ret == GS_ERROR)
6245 return ret;
6248 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6249 size as argument to the call. */
6250 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6252 tree call = TREE_OPERAND (*from_p, 0);
6253 tree vlasize = TREE_OPERAND (*from_p, 1);
6255 if (TREE_CODE (call) == CALL_EXPR
6256 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6258 int nargs = call_expr_nargs (call);
6259 tree type = TREE_TYPE (call);
6260 tree ap = CALL_EXPR_ARG (call, 0);
6261 tree tag = CALL_EXPR_ARG (call, 1);
6262 tree aptag = CALL_EXPR_ARG (call, 2);
6263 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6264 IFN_VA_ARG, type,
6265 nargs + 1, ap, tag,
6266 aptag, vlasize);
6267 TREE_OPERAND (*from_p, 0) = newcall;
6271 /* Now see if the above changed *from_p to something we handle specially. */
6272 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6273 want_value);
6274 if (ret != GS_UNHANDLED)
6275 return ret;
6277 /* If we've got a variable sized assignment between two lvalues (i.e. does
6278 not involve a call), then we can make things a bit more straightforward
6279 by converting the assignment to memcpy or memset. */
6280 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6282 tree from = TREE_OPERAND (*from_p, 0);
6283 tree size = TREE_OPERAND (*from_p, 1);
6285 if (TREE_CODE (from) == CONSTRUCTOR)
6286 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6288 if (is_gimple_addressable (from))
6290 *from_p = from;
6291 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6292 pre_p);
6296 /* Transform partial stores to non-addressable complex variables into
6297 total stores. This allows us to use real instead of virtual operands
6298 for these variables, which improves optimization. */
6299 if ((TREE_CODE (*to_p) == REALPART_EXPR
6300 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6301 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6302 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6304 /* Try to alleviate the effects of the gimplification creating artificial
6305 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6306 make sure not to create DECL_DEBUG_EXPR links across functions. */
6307 if (!gimplify_ctxp->into_ssa
6308 && VAR_P (*from_p)
6309 && DECL_IGNORED_P (*from_p)
6310 && DECL_P (*to_p)
6311 && !DECL_IGNORED_P (*to_p)
6312 && decl_function_context (*to_p) == current_function_decl
6313 && decl_function_context (*from_p) == current_function_decl)
6315 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6316 DECL_NAME (*from_p)
6317 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6318 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6319 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6322 if (want_value && TREE_THIS_VOLATILE (*to_p))
6323 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6325 if (TREE_CODE (*from_p) == CALL_EXPR)
6327 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6328 instead of a GIMPLE_ASSIGN. */
6329 gcall *call_stmt;
6330 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6332 /* Gimplify internal functions created in the FEs. */
6333 int nargs = call_expr_nargs (*from_p), i;
6334 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6335 auto_vec<tree> vargs (nargs);
6337 for (i = 0; i < nargs; i++)
6339 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6340 EXPR_LOCATION (*from_p));
6341 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6343 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6344 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6345 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6347 else
6349 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6350 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6351 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6352 tree fndecl = get_callee_fndecl (*from_p);
6353 if (fndecl
6354 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6355 && call_expr_nargs (*from_p) == 3)
6356 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6357 CALL_EXPR_ARG (*from_p, 0),
6358 CALL_EXPR_ARG (*from_p, 1),
6359 CALL_EXPR_ARG (*from_p, 2));
6360 else
6362 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6365 notice_special_calls (call_stmt);
6366 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6367 gimple_call_set_lhs (call_stmt, *to_p);
6368 else if (TREE_CODE (*to_p) == SSA_NAME)
6369 /* The above is somewhat premature, avoid ICEing later for a
6370 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6371 ??? This doesn't make it a default-def. */
6372 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6374 assign = call_stmt;
6376 else
6378 assign = gimple_build_assign (*to_p, *from_p);
6379 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6380 if (COMPARISON_CLASS_P (*from_p))
6381 copy_warning (assign, *from_p);
6384 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6386 /* We should have got an SSA name from the start. */
6387 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6388 || ! gimple_in_ssa_p (cfun));
6391 gimplify_seq_add_stmt (pre_p, assign);
6392 gsi = gsi_last (*pre_p);
6393 maybe_fold_stmt (&gsi);
6395 if (want_value)
6397 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6398 return GS_OK;
6400 else
6401 *expr_p = NULL;
6403 return GS_ALL_DONE;
6406 /* Gimplify a comparison between two variable-sized objects. Do this
6407 with a call to BUILT_IN_MEMCMP. */
6409 static enum gimplify_status
6410 gimplify_variable_sized_compare (tree *expr_p)
6412 location_t loc = EXPR_LOCATION (*expr_p);
6413 tree op0 = TREE_OPERAND (*expr_p, 0);
6414 tree op1 = TREE_OPERAND (*expr_p, 1);
6415 tree t, arg, dest, src, expr;
6417 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6418 arg = unshare_expr (arg);
6419 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6420 src = build_fold_addr_expr_loc (loc, op1);
6421 dest = build_fold_addr_expr_loc (loc, op0);
6422 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6423 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6425 expr
6426 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6427 SET_EXPR_LOCATION (expr, loc);
6428 *expr_p = expr;
6430 return GS_OK;
6433 /* Gimplify a comparison between two aggregate objects of integral scalar
6434 mode as a comparison between the bitwise equivalent scalar values. */
6436 static enum gimplify_status
6437 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6439 location_t loc = EXPR_LOCATION (*expr_p);
6440 tree op0 = TREE_OPERAND (*expr_p, 0);
6441 tree op1 = TREE_OPERAND (*expr_p, 1);
6443 tree type = TREE_TYPE (op0);
6444 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6446 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6447 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6449 *expr_p
6450 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6452 return GS_OK;
6455 /* Gimplify an expression sequence. This function gimplifies each
6456 expression and rewrites the original expression with the last
6457 expression of the sequence in GIMPLE form.
6459 PRE_P points to the list where the side effects for all the
6460 expressions in the sequence will be emitted.
6462 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6464 static enum gimplify_status
6465 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6467 tree t = *expr_p;
6471 tree *sub_p = &TREE_OPERAND (t, 0);
6473 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6474 gimplify_compound_expr (sub_p, pre_p, false);
6475 else
6476 gimplify_stmt (sub_p, pre_p);
6478 t = TREE_OPERAND (t, 1);
6480 while (TREE_CODE (t) == COMPOUND_EXPR);
6482 *expr_p = t;
6483 if (want_value)
6484 return GS_OK;
6485 else
6487 gimplify_stmt (expr_p, pre_p);
6488 return GS_ALL_DONE;
6492 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6493 gimplify. After gimplification, EXPR_P will point to a new temporary
6494 that holds the original value of the SAVE_EXPR node.
6496 PRE_P points to the list where side effects that must happen before
6497 *EXPR_P should be stored. */
6499 static enum gimplify_status
6500 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6502 enum gimplify_status ret = GS_ALL_DONE;
6503 tree val;
6505 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6506 val = TREE_OPERAND (*expr_p, 0);
6508 if (val && TREE_TYPE (val) == error_mark_node)
6509 return GS_ERROR;
6511 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6512 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6514 /* The operand may be a void-valued expression. It is
6515 being executed only for its side-effects. */
6516 if (TREE_TYPE (val) == void_type_node)
6518 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6519 is_gimple_stmt, fb_none);
6520 val = NULL;
6522 else
6523 /* The temporary may not be an SSA name as later abnormal and EH
6524 control flow may invalidate use/def domination. When in SSA
6525 form then assume there are no such issues and SAVE_EXPRs only
6526 appear via GENERIC foldings. */
6527 val = get_initialized_tmp_var (val, pre_p, post_p,
6528 gimple_in_ssa_p (cfun));
6530 TREE_OPERAND (*expr_p, 0) = val;
6531 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6534 *expr_p = val;
6536 return ret;
6539 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6541 unary_expr
6542 : ...
6543 | '&' varname
6546 PRE_P points to the list where side effects that must happen before
6547 *EXPR_P should be stored.
6549 POST_P points to the list where side effects that must happen after
6550 *EXPR_P should be stored. */
6552 static enum gimplify_status
6553 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6555 tree expr = *expr_p;
6556 tree op0 = TREE_OPERAND (expr, 0);
6557 enum gimplify_status ret;
6558 location_t loc = EXPR_LOCATION (*expr_p);
6560 switch (TREE_CODE (op0))
6562 case INDIRECT_REF:
6563 do_indirect_ref:
6564 /* Check if we are dealing with an expression of the form '&*ptr'.
6565 While the front end folds away '&*ptr' into 'ptr', these
6566 expressions may be generated internally by the compiler (e.g.,
6567 builtins like __builtin_va_end). */
6568 /* Caution: the silent array decomposition semantics we allow for
6569 ADDR_EXPR means we can't always discard the pair. */
6570 /* Gimplification of the ADDR_EXPR operand may drop
6571 cv-qualification conversions, so make sure we add them if
6572 needed. */
6574 tree op00 = TREE_OPERAND (op0, 0);
6575 tree t_expr = TREE_TYPE (expr);
6576 tree t_op00 = TREE_TYPE (op00);
6578 if (!useless_type_conversion_p (t_expr, t_op00))
6579 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6580 *expr_p = op00;
6581 ret = GS_OK;
6583 break;
6585 case VIEW_CONVERT_EXPR:
6586 /* Take the address of our operand and then convert it to the type of
6587 this ADDR_EXPR.
6589 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6590 all clear. The impact of this transformation is even less clear. */
6592 /* If the operand is a useless conversion, look through it. Doing so
6593 guarantees that the ADDR_EXPR and its operand will remain of the
6594 same type. */
6595 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6596 op0 = TREE_OPERAND (op0, 0);
6598 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6599 build_fold_addr_expr_loc (loc,
6600 TREE_OPERAND (op0, 0)));
6601 ret = GS_OK;
6602 break;
6604 case MEM_REF:
6605 if (integer_zerop (TREE_OPERAND (op0, 1)))
6606 goto do_indirect_ref;
6608 /* fall through */
6610 default:
6611 /* If we see a call to a declared builtin or see its address
6612 being taken (we can unify those cases here) then we can mark
6613 the builtin for implicit generation by GCC. */
6614 if (TREE_CODE (op0) == FUNCTION_DECL
6615 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6616 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6617 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6619 /* We use fb_either here because the C frontend sometimes takes
6620 the address of a call that returns a struct; see
6621 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6622 the implied temporary explicit. */
6624 /* Make the operand addressable. */
6625 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6626 is_gimple_addressable, fb_either);
6627 if (ret == GS_ERROR)
6628 break;
6630 /* Then mark it. Beware that it may not be possible to do so directly
6631 if a temporary has been created by the gimplification. */
6632 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6634 op0 = TREE_OPERAND (expr, 0);
6636 /* For various reasons, the gimplification of the expression
6637 may have made a new INDIRECT_REF. */
6638 if (INDIRECT_REF_P (op0)
6639 || (TREE_CODE (op0) == MEM_REF
6640 && integer_zerop (TREE_OPERAND (op0, 1))))
6641 goto do_indirect_ref;
6643 mark_addressable (TREE_OPERAND (expr, 0));
6645 /* The FEs may end up building ADDR_EXPRs early on a decl with
6646 an incomplete type. Re-build ADDR_EXPRs in canonical form
6647 here. */
6648 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6649 *expr_p = build_fold_addr_expr (op0);
6651 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6652 recompute_tree_invariant_for_addr_expr (*expr_p);
6654 /* If we re-built the ADDR_EXPR add a conversion to the original type
6655 if required. */
6656 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6657 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6659 break;
6662 return ret;
6665 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6666 value; output operands should be a gimple lvalue. */
6668 static enum gimplify_status
6669 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6671 tree expr;
6672 int noutputs;
6673 const char **oconstraints;
6674 int i;
6675 tree link;
6676 const char *constraint;
6677 bool allows_mem, allows_reg, is_inout;
6678 enum gimplify_status ret, tret;
6679 gasm *stmt;
6680 vec<tree, va_gc> *inputs;
6681 vec<tree, va_gc> *outputs;
6682 vec<tree, va_gc> *clobbers;
6683 vec<tree, va_gc> *labels;
6684 tree link_next;
6686 expr = *expr_p;
6687 noutputs = list_length (ASM_OUTPUTS (expr));
6688 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6690 inputs = NULL;
6691 outputs = NULL;
6692 clobbers = NULL;
6693 labels = NULL;
6695 ret = GS_ALL_DONE;
6696 link_next = NULL_TREE;
6697 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6699 bool ok;
6700 size_t constraint_len;
6702 link_next = TREE_CHAIN (link);
6704 oconstraints[i]
6705 = constraint
6706 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6707 constraint_len = strlen (constraint);
6708 if (constraint_len == 0)
6709 continue;
6711 ok = parse_output_constraint (&constraint, i, 0, 0,
6712 &allows_mem, &allows_reg, &is_inout);
6713 if (!ok)
6715 ret = GS_ERROR;
6716 is_inout = false;
6719 /* If we can't make copies, we can only accept memory.
6720 Similarly for VLAs. */
6721 tree outtype = TREE_TYPE (TREE_VALUE (link));
6722 if (outtype != error_mark_node
6723 && (TREE_ADDRESSABLE (outtype)
6724 || !COMPLETE_TYPE_P (outtype)
6725 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6727 if (allows_mem)
6728 allows_reg = 0;
6729 else
6731 error ("impossible constraint in %<asm%>");
6732 error ("non-memory output %d must stay in memory", i);
6733 return GS_ERROR;
6737 if (!allows_reg && allows_mem)
6738 mark_addressable (TREE_VALUE (link));
6740 tree orig = TREE_VALUE (link);
6741 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6742 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6743 fb_lvalue | fb_mayfail);
6744 if (tret == GS_ERROR)
6746 if (orig != error_mark_node)
6747 error ("invalid lvalue in %<asm%> output %d", i);
6748 ret = tret;
6751 /* If the constraint does not allow memory make sure we gimplify
6752 it to a register if it is not already but its base is. This
6753 happens for complex and vector components. */
6754 if (!allows_mem)
6756 tree op = TREE_VALUE (link);
6757 if (! is_gimple_val (op)
6758 && is_gimple_reg_type (TREE_TYPE (op))
6759 && is_gimple_reg (get_base_address (op)))
6761 tree tem = create_tmp_reg (TREE_TYPE (op));
6762 tree ass;
6763 if (is_inout)
6765 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6766 tem, unshare_expr (op));
6767 gimplify_and_add (ass, pre_p);
6769 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6770 gimplify_and_add (ass, post_p);
6772 TREE_VALUE (link) = tem;
6773 tret = GS_OK;
6777 vec_safe_push (outputs, link);
6778 TREE_CHAIN (link) = NULL_TREE;
6780 if (is_inout)
6782 /* An input/output operand. To give the optimizers more
6783 flexibility, split it into separate input and output
6784 operands. */
6785 tree input;
6786 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6787 char buf[11];
6789 /* Turn the in/out constraint into an output constraint. */
6790 char *p = xstrdup (constraint);
6791 p[0] = '=';
6792 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6794 /* And add a matching input constraint. */
6795 if (allows_reg)
6797 sprintf (buf, "%u", i);
6799 /* If there are multiple alternatives in the constraint,
6800 handle each of them individually. Those that allow register
6801 will be replaced with operand number, the others will stay
6802 unchanged. */
6803 if (strchr (p, ',') != NULL)
6805 size_t len = 0, buflen = strlen (buf);
6806 char *beg, *end, *str, *dst;
6808 for (beg = p + 1;;)
6810 end = strchr (beg, ',');
6811 if (end == NULL)
6812 end = strchr (beg, '\0');
6813 if ((size_t) (end - beg) < buflen)
6814 len += buflen + 1;
6815 else
6816 len += end - beg + 1;
6817 if (*end)
6818 beg = end + 1;
6819 else
6820 break;
6823 str = (char *) alloca (len);
6824 for (beg = p + 1, dst = str;;)
6826 const char *tem;
6827 bool mem_p, reg_p, inout_p;
6829 end = strchr (beg, ',');
6830 if (end)
6831 *end = '\0';
6832 beg[-1] = '=';
6833 tem = beg - 1;
6834 parse_output_constraint (&tem, i, 0, 0,
6835 &mem_p, &reg_p, &inout_p);
6836 if (dst != str)
6837 *dst++ = ',';
6838 if (reg_p)
6840 memcpy (dst, buf, buflen);
6841 dst += buflen;
6843 else
6845 if (end)
6846 len = end - beg;
6847 else
6848 len = strlen (beg);
6849 memcpy (dst, beg, len);
6850 dst += len;
6852 if (end)
6853 beg = end + 1;
6854 else
6855 break;
6857 *dst = '\0';
6858 input = build_string (dst - str, str);
6860 else
6861 input = build_string (strlen (buf), buf);
6863 else
6864 input = build_string (constraint_len - 1, constraint + 1);
6866 free (p);
6868 input = build_tree_list (build_tree_list (NULL_TREE, input),
6869 unshare_expr (TREE_VALUE (link)));
6870 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6874 link_next = NULL_TREE;
6875 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6877 link_next = TREE_CHAIN (link);
6878 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6879 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6880 oconstraints, &allows_mem, &allows_reg);
6882 /* If we can't make copies, we can only accept memory. */
6883 tree intype = TREE_TYPE (TREE_VALUE (link));
6884 if (intype != error_mark_node
6885 && (TREE_ADDRESSABLE (intype)
6886 || !COMPLETE_TYPE_P (intype)
6887 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6889 if (allows_mem)
6890 allows_reg = 0;
6891 else
6893 error ("impossible constraint in %<asm%>");
6894 error ("non-memory input %d must stay in memory", i);
6895 return GS_ERROR;
6899 /* If the operand is a memory input, it should be an lvalue. */
6900 if (!allows_reg && allows_mem)
6902 tree inputv = TREE_VALUE (link);
6903 STRIP_NOPS (inputv);
6904 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6905 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6906 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6907 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6908 || TREE_CODE (inputv) == MODIFY_EXPR)
6909 TREE_VALUE (link) = error_mark_node;
6910 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6911 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6912 if (tret != GS_ERROR)
6914 /* Unlike output operands, memory inputs are not guaranteed
6915 to be lvalues by the FE, and while the expressions are
6916 marked addressable there, if it is e.g. a statement
6917 expression, temporaries in it might not end up being
6918 addressable. They might be already used in the IL and thus
6919 it is too late to make them addressable now though. */
6920 tree x = TREE_VALUE (link);
6921 while (handled_component_p (x))
6922 x = TREE_OPERAND (x, 0);
6923 if (TREE_CODE (x) == MEM_REF
6924 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6925 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6926 if ((VAR_P (x)
6927 || TREE_CODE (x) == PARM_DECL
6928 || TREE_CODE (x) == RESULT_DECL)
6929 && !TREE_ADDRESSABLE (x)
6930 && is_gimple_reg (x))
6932 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6933 input_location), 0,
6934 "memory input %d is not directly addressable",
6936 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6939 mark_addressable (TREE_VALUE (link));
6940 if (tret == GS_ERROR)
6942 if (inputv != error_mark_node)
6943 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6944 "memory input %d is not directly addressable", i);
6945 ret = tret;
6948 else
6950 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6951 is_gimple_asm_val, fb_rvalue);
6952 if (tret == GS_ERROR)
6953 ret = tret;
6956 TREE_CHAIN (link) = NULL_TREE;
6957 vec_safe_push (inputs, link);
6960 link_next = NULL_TREE;
6961 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6963 link_next = TREE_CHAIN (link);
6964 TREE_CHAIN (link) = NULL_TREE;
6965 vec_safe_push (clobbers, link);
6968 link_next = NULL_TREE;
6969 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6971 link_next = TREE_CHAIN (link);
6972 TREE_CHAIN (link) = NULL_TREE;
6973 vec_safe_push (labels, link);
6976 /* Do not add ASMs with errors to the gimple IL stream. */
6977 if (ret != GS_ERROR)
6979 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6980 inputs, outputs, clobbers, labels);
6982 /* asm is volatile if it was marked by the user as volatile or
6983 there are no outputs or this is an asm goto. */
6984 gimple_asm_set_volatile (stmt,
6985 ASM_VOLATILE_P (expr)
6986 || noutputs == 0
6987 || labels);
6988 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6989 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6991 gimplify_seq_add_stmt (pre_p, stmt);
6994 return ret;
6997 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6998 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6999 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7000 return to this function.
7002 FIXME should we complexify the prequeue handling instead? Or use flags
7003 for all the cleanups and let the optimizer tighten them up? The current
7004 code seems pretty fragile; it will break on a cleanup within any
7005 non-conditional nesting. But any such nesting would be broken, anyway;
7006 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7007 and continues out of it. We can do that at the RTL level, though, so
7008 having an optimizer to tighten up try/finally regions would be a Good
7009 Thing. */
7011 static enum gimplify_status
7012 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
7014 gimple_stmt_iterator iter;
7015 gimple_seq body_sequence = NULL;
7017 tree temp = voidify_wrapper_expr (*expr_p, NULL);
7019 /* We only care about the number of conditions between the innermost
7020 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7021 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7022 int old_conds = gimplify_ctxp->conditions;
7023 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
7024 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
7025 gimplify_ctxp->conditions = 0;
7026 gimplify_ctxp->conditional_cleanups = NULL;
7027 gimplify_ctxp->in_cleanup_point_expr = true;
7029 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
7031 gimplify_ctxp->conditions = old_conds;
7032 gimplify_ctxp->conditional_cleanups = old_cleanups;
7033 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
7035 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
7037 gimple *wce = gsi_stmt (iter);
7039 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
7041 if (gsi_one_before_end_p (iter))
7043 /* Note that gsi_insert_seq_before and gsi_remove do not
7044 scan operands, unlike some other sequence mutators. */
7045 if (!gimple_wce_cleanup_eh_only (wce))
7046 gsi_insert_seq_before_without_update (&iter,
7047 gimple_wce_cleanup (wce),
7048 GSI_SAME_STMT);
7049 gsi_remove (&iter, true);
7050 break;
7052 else
7054 gtry *gtry;
7055 gimple_seq seq;
7056 enum gimple_try_flags kind;
7058 if (gimple_wce_cleanup_eh_only (wce))
7059 kind = GIMPLE_TRY_CATCH;
7060 else
7061 kind = GIMPLE_TRY_FINALLY;
7062 seq = gsi_split_seq_after (iter);
7064 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
7065 /* Do not use gsi_replace here, as it may scan operands.
7066 We want to do a simple structural modification only. */
7067 gsi_set_stmt (&iter, gtry);
7068 iter = gsi_start (gtry->eval);
7071 else
7072 gsi_next (&iter);
7075 gimplify_seq_add_seq (pre_p, body_sequence);
7076 if (temp)
7078 *expr_p = temp;
7079 return GS_OK;
7081 else
7083 *expr_p = NULL;
7084 return GS_ALL_DONE;
7088 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7089 is the cleanup action required. EH_ONLY is true if the cleanup should
7090 only be executed if an exception is thrown, not on normal exit.
7091 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7092 only valid for clobbers. */
7094 static void
7095 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7096 bool force_uncond = false)
7098 gimple *wce;
7099 gimple_seq cleanup_stmts = NULL;
7101 /* Errors can result in improperly nested cleanups. Which results in
7102 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7103 if (seen_error ())
7104 return;
7106 if (gimple_conditional_context ())
7108 /* If we're in a conditional context, this is more complex. We only
7109 want to run the cleanup if we actually ran the initialization that
7110 necessitates it, but we want to run it after the end of the
7111 conditional context. So we wrap the try/finally around the
7112 condition and use a flag to determine whether or not to actually
7113 run the destructor. Thus
7115 test ? f(A()) : 0
7117 becomes (approximately)
7119 flag = 0;
7120 try {
7121 if (test) { A::A(temp); flag = 1; val = f(temp); }
7122 else { val = 0; }
7123 } finally {
7124 if (flag) A::~A(temp);
7128 if (force_uncond)
7130 gimplify_stmt (&cleanup, &cleanup_stmts);
7131 wce = gimple_build_wce (cleanup_stmts);
7132 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7134 else
7136 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7137 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7138 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7140 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7141 gimplify_stmt (&cleanup, &cleanup_stmts);
7142 wce = gimple_build_wce (cleanup_stmts);
7143 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7145 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7146 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7147 gimplify_seq_add_stmt (pre_p, ftrue);
7149 /* Because of this manipulation, and the EH edges that jump
7150 threading cannot redirect, the temporary (VAR) will appear
7151 to be used uninitialized. Don't warn. */
7152 suppress_warning (var, OPT_Wuninitialized);
7155 else
7157 gimplify_stmt (&cleanup, &cleanup_stmts);
7158 wce = gimple_build_wce (cleanup_stmts);
7159 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7160 gimplify_seq_add_stmt (pre_p, wce);
7164 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7166 static enum gimplify_status
7167 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7169 tree targ = *expr_p;
7170 tree temp = TARGET_EXPR_SLOT (targ);
7171 tree init = TARGET_EXPR_INITIAL (targ);
7172 enum gimplify_status ret;
7174 bool unpoison_empty_seq = false;
7175 gimple_stmt_iterator unpoison_it;
7177 if (init)
7179 gimple_seq init_pre_p = NULL;
7181 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7182 to the temps list. Handle also variable length TARGET_EXPRs. */
7183 if (!poly_int_tree_p (DECL_SIZE (temp)))
7185 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7186 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7187 /* FIXME: this is correct only when the size of the type does
7188 not depend on expressions evaluated in init. */
7189 gimplify_vla_decl (temp, &init_pre_p);
7191 else
7193 /* Save location where we need to place unpoisoning. It's possible
7194 that a variable will be converted to needs_to_live_in_memory. */
7195 unpoison_it = gsi_last (*pre_p);
7196 unpoison_empty_seq = gsi_end_p (unpoison_it);
7198 gimple_add_tmp_var (temp);
7201 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7202 expression is supposed to initialize the slot. */
7203 if (VOID_TYPE_P (TREE_TYPE (init)))
7204 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7205 fb_none);
7206 else
7208 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7209 init = init_expr;
7210 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7211 fb_none);
7212 init = NULL;
7213 ggc_free (init_expr);
7215 if (ret == GS_ERROR)
7217 /* PR c++/28266 Make sure this is expanded only once. */
7218 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7219 return GS_ERROR;
7222 if (init)
7223 gimplify_and_add (init, &init_pre_p);
7225 /* Add a clobber for the temporary going out of scope, like
7226 gimplify_bind_expr. But only if we did not promote the
7227 temporary to static storage. */
7228 if (gimplify_ctxp->in_cleanup_point_expr
7229 && !TREE_STATIC (temp)
7230 && needs_to_live_in_memory (temp))
7232 if (flag_stack_reuse == SR_ALL)
7234 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7235 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7236 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7238 if (asan_poisoned_variables
7239 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7240 && !TREE_STATIC (temp)
7241 && dbg_cnt (asan_use_after_scope)
7242 && !gimplify_omp_ctxp)
7244 tree asan_cleanup = build_asan_poison_call_expr (temp);
7245 if (asan_cleanup)
7247 if (unpoison_empty_seq)
7248 unpoison_it = gsi_start (*pre_p);
7250 asan_poison_variable (temp, false, &unpoison_it,
7251 unpoison_empty_seq);
7252 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7257 gimple_seq_add_seq (pre_p, init_pre_p);
7259 /* If needed, push the cleanup for the temp. */
7260 if (TARGET_EXPR_CLEANUP (targ))
7261 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7262 CLEANUP_EH_ONLY (targ), pre_p);
7264 /* Only expand this once. */
7265 TREE_OPERAND (targ, 3) = init;
7266 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7268 else
7269 /* We should have expanded this before. */
7270 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7272 *expr_p = temp;
7273 return GS_OK;
7276 /* Gimplification of expression trees. */
7278 /* Gimplify an expression which appears at statement context. The
7279 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7280 NULL, a new sequence is allocated.
7282 Return true if we actually added a statement to the queue. */
7284 bool
7285 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7287 gimple_seq_node last;
7289 last = gimple_seq_last (*seq_p);
7290 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7291 return last != gimple_seq_last (*seq_p);
7294 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7295 to CTX. If entries already exist, force them to be some flavor of private.
7296 If there is no enclosing parallel, do nothing. */
7298 void
7299 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7301 splay_tree_node n;
7303 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7304 return;
7308 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7309 if (n != NULL)
7311 if (n->value & GOVD_SHARED)
7312 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7313 else if (n->value & GOVD_MAP)
7314 n->value |= GOVD_MAP_TO_ONLY;
7315 else
7316 return;
7318 else if ((ctx->region_type & ORT_TARGET) != 0)
7320 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7321 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7322 else
7323 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7325 else if (ctx->region_type != ORT_WORKSHARE
7326 && ctx->region_type != ORT_TASKGROUP
7327 && ctx->region_type != ORT_SIMD
7328 && ctx->region_type != ORT_ACC
7329 && !(ctx->region_type & ORT_TARGET_DATA))
7330 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7332 ctx = ctx->outer_context;
7334 while (ctx);
7337 /* Similarly for each of the type sizes of TYPE. */
7339 static void
7340 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7342 if (type == NULL || type == error_mark_node)
7343 return;
7344 type = TYPE_MAIN_VARIANT (type);
7346 if (ctx->privatized_types->add (type))
7347 return;
7349 switch (TREE_CODE (type))
7351 case INTEGER_TYPE:
7352 case ENUMERAL_TYPE:
7353 case BOOLEAN_TYPE:
7354 case REAL_TYPE:
7355 case FIXED_POINT_TYPE:
7356 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7357 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7358 break;
7360 case ARRAY_TYPE:
7361 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7362 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7363 break;
7365 case RECORD_TYPE:
7366 case UNION_TYPE:
7367 case QUAL_UNION_TYPE:
7369 tree field;
7370 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7371 if (TREE_CODE (field) == FIELD_DECL)
7373 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7374 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7377 break;
7379 case POINTER_TYPE:
7380 case REFERENCE_TYPE:
7381 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7382 break;
7384 default:
7385 break;
7388 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7389 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7390 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7393 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7395 static void
7396 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7398 splay_tree_node n;
7399 unsigned int nflags;
7400 tree t;
7402 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7403 return;
7405 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7406 there are constructors involved somewhere. Exception is a shared clause,
7407 there is nothing privatized in that case. */
7408 if ((flags & GOVD_SHARED) == 0
7409 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7410 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7411 flags |= GOVD_SEEN;
7413 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7414 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7416 /* We shouldn't be re-adding the decl with the same data
7417 sharing class. */
7418 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7419 nflags = n->value | flags;
7420 /* The only combination of data sharing classes we should see is
7421 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7422 reduction variables to be used in data sharing clauses. */
7423 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7424 || ((nflags & GOVD_DATA_SHARE_CLASS)
7425 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7426 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7427 n->value = nflags;
7428 return;
7431 /* When adding a variable-sized variable, we have to handle all sorts
7432 of additional bits of data: the pointer replacement variable, and
7433 the parameters of the type. */
7434 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7436 /* Add the pointer replacement variable as PRIVATE if the variable
7437 replacement is private, else FIRSTPRIVATE since we'll need the
7438 address of the original variable either for SHARED, or for the
7439 copy into or out of the context. */
7440 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7442 if (flags & GOVD_MAP)
7443 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7444 else if (flags & GOVD_PRIVATE)
7445 nflags = GOVD_PRIVATE;
7446 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7447 && (flags & GOVD_FIRSTPRIVATE))
7448 || (ctx->region_type == ORT_TARGET_DATA
7449 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7450 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7451 else
7452 nflags = GOVD_FIRSTPRIVATE;
7453 nflags |= flags & GOVD_SEEN;
7454 t = DECL_VALUE_EXPR (decl);
7455 gcc_assert (INDIRECT_REF_P (t));
7456 t = TREE_OPERAND (t, 0);
7457 gcc_assert (DECL_P (t));
7458 omp_add_variable (ctx, t, nflags);
7461 /* Add all of the variable and type parameters (which should have
7462 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7463 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7464 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7465 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7467 /* The variable-sized variable itself is never SHARED, only some form
7468 of PRIVATE. The sharing would take place via the pointer variable
7469 which we remapped above. */
7470 if (flags & GOVD_SHARED)
7471 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7472 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7474 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7475 alloca statement we generate for the variable, so make sure it
7476 is available. This isn't automatically needed for the SHARED
7477 case, since we won't be allocating local storage then.
7478 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7479 in this case omp_notice_variable will be called later
7480 on when it is gimplified. */
7481 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7482 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7483 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7485 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7486 && omp_privatize_by_reference (decl))
7488 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7490 /* Similar to the direct variable sized case above, we'll need the
7491 size of references being privatized. */
7492 if ((flags & GOVD_SHARED) == 0)
7494 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7495 if (t && DECL_P (t))
7496 omp_notice_variable (ctx, t, true);
7500 if (n != NULL)
7501 n->value |= flags;
7502 else
7503 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7505 /* For reductions clauses in OpenACC loop directives, by default create a
7506 copy clause on the enclosing parallel construct for carrying back the
7507 results. */
7508 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7510 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7511 while (outer_ctx)
7513 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7514 if (n != NULL)
7516 /* Ignore local variables and explicitly declared clauses. */
7517 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7518 break;
7519 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7521 /* According to the OpenACC spec, such a reduction variable
7522 should already have a copy map on a kernels construct,
7523 verify that here. */
7524 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7525 && (n->value & GOVD_MAP));
7527 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7529 /* Remove firstprivate and make it a copy map. */
7530 n->value &= ~GOVD_FIRSTPRIVATE;
7531 n->value |= GOVD_MAP;
7534 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7536 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7537 GOVD_MAP | GOVD_SEEN);
7538 break;
7540 outer_ctx = outer_ctx->outer_context;
7545 /* Notice a threadprivate variable DECL used in OMP context CTX.
7546 This just prints out diagnostics about threadprivate variable uses
7547 in untied tasks. If DECL2 is non-NULL, prevent this warning
7548 on that variable. */
7550 static bool
7551 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7552 tree decl2)
7554 splay_tree_node n;
7555 struct gimplify_omp_ctx *octx;
7557 for (octx = ctx; octx; octx = octx->outer_context)
7558 if ((octx->region_type & ORT_TARGET) != 0
7559 || octx->order_concurrent)
7561 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7562 if (n == NULL)
7564 if (octx->order_concurrent)
7566 error ("threadprivate variable %qE used in a region with"
7567 " %<order(concurrent)%> clause", DECL_NAME (decl));
7568 inform (octx->location, "enclosing region");
7570 else
7572 error ("threadprivate variable %qE used in target region",
7573 DECL_NAME (decl));
7574 inform (octx->location, "enclosing target region");
7576 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7578 if (decl2)
7579 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7582 if (ctx->region_type != ORT_UNTIED_TASK)
7583 return false;
7584 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7585 if (n == NULL)
7587 error ("threadprivate variable %qE used in untied task",
7588 DECL_NAME (decl));
7589 inform (ctx->location, "enclosing task");
7590 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7592 if (decl2)
7593 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7594 return false;
7597 /* Return true if global var DECL is device resident. */
7599 static bool
7600 device_resident_p (tree decl)
7602 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7604 if (!attr)
7605 return false;
7607 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7609 tree c = TREE_VALUE (t);
7610 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7611 return true;
7614 return false;
7617 /* Return true if DECL has an ACC DECLARE attribute. */
7619 static bool
7620 is_oacc_declared (tree decl)
7622 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7623 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7624 return declared != NULL_TREE;
7627 /* Determine outer default flags for DECL mentioned in an OMP region
7628 but not declared in an enclosing clause.
7630 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7631 remapped firstprivate instead of shared. To some extent this is
7632 addressed in omp_firstprivatize_type_sizes, but not
7633 effectively. */
7635 static unsigned
7636 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7637 bool in_code, unsigned flags)
7639 enum omp_clause_default_kind default_kind = ctx->default_kind;
7640 enum omp_clause_default_kind kind;
7642 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7643 if (ctx->region_type & ORT_TASK)
7645 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7647 /* The event-handle specified by a detach clause should always be firstprivate,
7648 regardless of the current default. */
7649 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7650 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7652 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7653 default_kind = kind;
7654 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7655 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7656 /* For C/C++ default({,first}private), variables with static storage duration
7657 declared in a namespace or global scope and referenced in construct
7658 must be explicitly specified, i.e. acts as default(none). */
7659 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7660 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7661 && VAR_P (decl)
7662 && is_global_var (decl)
7663 && (DECL_FILE_SCOPE_P (decl)
7664 || (DECL_CONTEXT (decl)
7665 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7666 && !lang_GNU_Fortran ())
7667 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7669 switch (default_kind)
7671 case OMP_CLAUSE_DEFAULT_NONE:
7673 const char *rtype;
7675 if (ctx->region_type & ORT_PARALLEL)
7676 rtype = "parallel";
7677 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7678 rtype = "taskloop";
7679 else if (ctx->region_type & ORT_TASK)
7680 rtype = "task";
7681 else if (ctx->region_type & ORT_TEAMS)
7682 rtype = "teams";
7683 else
7684 gcc_unreachable ();
7686 error ("%qE not specified in enclosing %qs",
7687 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7688 inform (ctx->location, "enclosing %qs", rtype);
7690 /* FALLTHRU */
7691 case OMP_CLAUSE_DEFAULT_SHARED:
7692 flags |= GOVD_SHARED;
7693 break;
7694 case OMP_CLAUSE_DEFAULT_PRIVATE:
7695 flags |= GOVD_PRIVATE;
7696 break;
7697 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7698 flags |= GOVD_FIRSTPRIVATE;
7699 break;
7700 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7701 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7702 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7703 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7705 omp_notice_variable (octx, decl, in_code);
7706 for (; octx; octx = octx->outer_context)
7708 splay_tree_node n2;
7710 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7711 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7712 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7713 continue;
7714 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7716 flags |= GOVD_FIRSTPRIVATE;
7717 goto found_outer;
7719 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7721 flags |= GOVD_SHARED;
7722 goto found_outer;
7727 if (TREE_CODE (decl) == PARM_DECL
7728 || (!is_global_var (decl)
7729 && DECL_CONTEXT (decl) == current_function_decl))
7730 flags |= GOVD_FIRSTPRIVATE;
7731 else
7732 flags |= GOVD_SHARED;
7733 found_outer:
7734 break;
7736 default:
7737 gcc_unreachable ();
7740 return flags;
7743 /* Return string name for types of OpenACC constructs from ORT_* values. */
7745 static const char *
7746 oacc_region_type_name (enum omp_region_type region_type)
7748 switch (region_type)
7750 case ORT_ACC_DATA:
7751 return "data";
7752 case ORT_ACC_PARALLEL:
7753 return "parallel";
7754 case ORT_ACC_KERNELS:
7755 return "kernels";
7756 case ORT_ACC_SERIAL:
7757 return "serial";
7758 default:
7759 gcc_unreachable ();
7763 /* Determine outer default flags for DECL mentioned in an OACC region
7764 but not declared in an enclosing clause. */
7766 static unsigned
7767 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7769 struct gimplify_omp_ctx *ctx_default = ctx;
7770 /* If no 'default' clause appears on this compute construct... */
7771 if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
7773 /* ..., see if one appears on a lexically containing 'data'
7774 construct. */
7775 while ((ctx_default = ctx_default->outer_context))
7777 if (ctx_default->region_type == ORT_ACC_DATA
7778 && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
7779 break;
7781 /* If not, reset. */
7782 if (!ctx_default)
7783 ctx_default = ctx;
7786 bool on_device = false;
7787 bool is_private = false;
7788 bool declared = is_oacc_declared (decl);
7789 tree type = TREE_TYPE (decl);
7791 if (omp_privatize_by_reference (decl))
7792 type = TREE_TYPE (type);
7794 /* For Fortran COMMON blocks, only used variables in those blocks are
7795 transfered and remapped. The block itself will have a private clause to
7796 avoid transfering the data twice.
7797 The hook evaluates to false by default. For a variable in Fortran's COMMON
7798 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7799 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7800 the whole block. For C++ and Fortran, it can also be true under certain
7801 other conditions, if DECL_HAS_VALUE_EXPR. */
7802 if (RECORD_OR_UNION_TYPE_P (type))
7803 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7805 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7806 && is_global_var (decl)
7807 && device_resident_p (decl)
7808 && !is_private)
7810 on_device = true;
7811 flags |= GOVD_MAP_TO_ONLY;
7814 switch (ctx->region_type)
7816 case ORT_ACC_KERNELS:
7817 if (is_private)
7818 flags |= GOVD_FIRSTPRIVATE;
7819 else if (AGGREGATE_TYPE_P (type))
7821 /* Aggregates default to 'present_or_copy', or 'present'. */
7822 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7823 flags |= GOVD_MAP;
7824 else
7825 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7827 else
7828 /* Scalars default to 'copy'. */
7829 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7831 break;
7833 case ORT_ACC_PARALLEL:
7834 case ORT_ACC_SERIAL:
7835 if (is_private)
7836 flags |= GOVD_FIRSTPRIVATE;
7837 else if (on_device || declared)
7838 flags |= GOVD_MAP;
7839 else if (AGGREGATE_TYPE_P (type))
7841 /* Aggregates default to 'present_or_copy', or 'present'. */
7842 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7843 flags |= GOVD_MAP;
7844 else
7845 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7847 else
7848 /* Scalars default to 'firstprivate'. */
7849 flags |= GOVD_FIRSTPRIVATE;
7851 break;
7853 default:
7854 gcc_unreachable ();
7857 if (DECL_ARTIFICIAL (decl))
7858 ; /* We can get compiler-generated decls, and should not complain
7859 about them. */
7860 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7862 error ("%qE not specified in enclosing OpenACC %qs construct",
7863 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
7864 oacc_region_type_name (ctx->region_type));
7865 if (ctx_default != ctx)
7866 inform (ctx->location, "enclosing OpenACC %qs construct and",
7867 oacc_region_type_name (ctx->region_type));
7868 inform (ctx_default->location,
7869 "enclosing OpenACC %qs construct with %qs clause",
7870 oacc_region_type_name (ctx_default->region_type),
7871 "default(none)");
7873 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7874 ; /* Handled above. */
7875 else
7876 gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7878 return flags;
7881 /* Record the fact that DECL was used within the OMP context CTX.
7882 IN_CODE is true when real code uses DECL, and false when we should
7883 merely emit default(none) errors. Return true if DECL is going to
7884 be remapped and thus DECL shouldn't be gimplified into its
7885 DECL_VALUE_EXPR (if any). */
7887 static bool
7888 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7890 splay_tree_node n;
7891 unsigned flags = in_code ? GOVD_SEEN : 0;
7892 bool ret = false, shared;
7894 if (error_operand_p (decl))
7895 return false;
7897 if (ctx->region_type == ORT_NONE)
7898 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7900 if (is_global_var (decl))
7902 /* Threadprivate variables are predetermined. */
7903 if (DECL_THREAD_LOCAL_P (decl))
7904 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7906 if (DECL_HAS_VALUE_EXPR_P (decl))
7908 if (ctx->region_type & ORT_ACC)
7909 /* For OpenACC, defer expansion of value to avoid transfering
7910 privatized common block data instead of im-/explicitly transfered
7911 variables which are in common blocks. */
7913 else
7915 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7917 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7918 return omp_notice_threadprivate_variable (ctx, decl, value);
7922 if (gimplify_omp_ctxp->outer_context == NULL
7923 && VAR_P (decl)
7924 && oacc_get_fn_attrib (current_function_decl))
7926 location_t loc = DECL_SOURCE_LOCATION (decl);
7928 if (lookup_attribute ("omp declare target link",
7929 DECL_ATTRIBUTES (decl)))
7931 error_at (loc,
7932 "%qE with %<link%> clause used in %<routine%> function",
7933 DECL_NAME (decl));
7934 return false;
7936 else if (!lookup_attribute ("omp declare target",
7937 DECL_ATTRIBUTES (decl)))
7939 error_at (loc,
7940 "%qE requires a %<declare%> directive for use "
7941 "in a %<routine%> function", DECL_NAME (decl));
7942 return false;
7947 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7948 if ((ctx->region_type & ORT_TARGET) != 0)
7950 if (ctx->region_type & ORT_ACC)
7951 /* For OpenACC, as remarked above, defer expansion. */
7952 shared = false;
7953 else
7954 shared = true;
7956 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7957 if (n == NULL)
7959 unsigned nflags = flags;
7960 if ((ctx->region_type & ORT_ACC) == 0)
7962 bool is_declare_target = false;
7963 if (is_global_var (decl)
7964 && varpool_node::get_create (decl)->offloadable)
7966 struct gimplify_omp_ctx *octx;
7967 for (octx = ctx->outer_context;
7968 octx; octx = octx->outer_context)
7970 n = splay_tree_lookup (octx->variables,
7971 (splay_tree_key)decl);
7972 if (n
7973 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7974 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7975 break;
7977 is_declare_target = octx == NULL;
7979 if (!is_declare_target)
7981 int gdmk;
7982 enum omp_clause_defaultmap_kind kind;
7983 if (lang_hooks.decls.omp_allocatable_p (decl))
7984 gdmk = GDMK_ALLOCATABLE;
7985 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7986 gdmk = GDMK_SCALAR_TARGET;
7987 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7988 gdmk = GDMK_SCALAR;
7989 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7990 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7991 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7992 == POINTER_TYPE)))
7993 gdmk = GDMK_POINTER;
7994 else
7995 gdmk = GDMK_AGGREGATE;
7996 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7997 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7999 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
8000 nflags |= GOVD_FIRSTPRIVATE;
8001 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
8002 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
8003 else
8004 gcc_unreachable ();
8006 else if (ctx->defaultmap[gdmk] == 0)
8008 tree d = lang_hooks.decls.omp_report_decl (decl);
8009 error ("%qE not specified in enclosing %<target%>",
8010 DECL_NAME (d));
8011 inform (ctx->location, "enclosing %<target%>");
8013 else if (ctx->defaultmap[gdmk]
8014 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
8015 nflags |= ctx->defaultmap[gdmk];
8016 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
8018 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8019 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
8021 else
8023 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8024 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
8029 struct gimplify_omp_ctx *octx = ctx->outer_context;
8030 if ((ctx->region_type & ORT_ACC) && octx)
8032 /* Look in outer OpenACC contexts, to see if there's a
8033 data attribute for this variable. */
8034 omp_notice_variable (octx, decl, in_code);
8036 for (; octx; octx = octx->outer_context)
8038 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
8039 break;
8040 splay_tree_node n2
8041 = splay_tree_lookup (octx->variables,
8042 (splay_tree_key) decl);
8043 if (n2)
8045 if (octx->region_type == ORT_ACC_HOST_DATA)
8046 error ("variable %qE declared in enclosing "
8047 "%<host_data%> region", DECL_NAME (decl));
8048 nflags |= GOVD_MAP;
8049 if (octx->region_type == ORT_ACC_DATA
8050 && (n2->value & GOVD_MAP_0LEN_ARRAY))
8051 nflags |= GOVD_MAP_0LEN_ARRAY;
8052 goto found_outer;
8057 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
8058 | GOVD_MAP_ALLOC_ONLY)) == flags)
8060 tree type = TREE_TYPE (decl);
8062 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8063 && omp_privatize_by_reference (decl))
8064 type = TREE_TYPE (type);
8065 if (!omp_mappable_type (type))
8067 error ("%qD referenced in target region does not have "
8068 "a mappable type", decl);
8069 nflags |= GOVD_MAP | GOVD_EXPLICIT;
8071 else
8073 if ((ctx->region_type & ORT_ACC) != 0)
8074 nflags = oacc_default_clause (ctx, decl, flags);
8075 else
8076 nflags |= GOVD_MAP;
8079 found_outer:
8080 omp_add_variable (ctx, decl, nflags);
8082 else
8084 /* If nothing changed, there's nothing left to do. */
8085 if ((n->value & flags) == flags)
8086 return ret;
8087 flags |= n->value;
8088 n->value = flags;
8090 goto do_outer;
8093 if (n == NULL)
8095 if (ctx->region_type == ORT_WORKSHARE
8096 || ctx->region_type == ORT_TASKGROUP
8097 || ctx->region_type == ORT_SIMD
8098 || ctx->region_type == ORT_ACC
8099 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8100 goto do_outer;
8102 flags = omp_default_clause (ctx, decl, in_code, flags);
8104 if ((flags & GOVD_PRIVATE)
8105 && lang_hooks.decls.omp_private_outer_ref (decl))
8106 flags |= GOVD_PRIVATE_OUTER_REF;
8108 omp_add_variable (ctx, decl, flags);
8110 shared = (flags & GOVD_SHARED) != 0;
8111 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8112 goto do_outer;
8115 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8116 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8117 if (ctx->region_type == ORT_SIMD
8118 && ctx->in_for_exprs
8119 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8120 == GOVD_PRIVATE))
8121 flags &= ~GOVD_SEEN;
8123 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8124 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8125 && DECL_SIZE (decl))
8127 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8129 splay_tree_node n2;
8130 tree t = DECL_VALUE_EXPR (decl);
8131 gcc_assert (INDIRECT_REF_P (t));
8132 t = TREE_OPERAND (t, 0);
8133 gcc_assert (DECL_P (t));
8134 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8135 n2->value |= GOVD_SEEN;
8137 else if (omp_privatize_by_reference (decl)
8138 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8139 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8140 != INTEGER_CST))
8142 splay_tree_node n2;
8143 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8144 gcc_assert (DECL_P (t));
8145 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8146 if (n2)
8147 omp_notice_variable (ctx, t, true);
8151 if (ctx->region_type & ORT_ACC)
8152 /* For OpenACC, as remarked above, defer expansion. */
8153 shared = false;
8154 else
8155 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8156 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8158 /* If nothing changed, there's nothing left to do. */
8159 if ((n->value & flags) == flags)
8160 return ret;
8161 flags |= n->value;
8162 n->value = flags;
8164 do_outer:
8165 /* If the variable is private in the current context, then we don't
8166 need to propagate anything to an outer context. */
8167 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8168 return ret;
8169 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8170 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8171 return ret;
8172 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8173 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8174 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8175 return ret;
8176 if (ctx->outer_context
8177 && omp_notice_variable (ctx->outer_context, decl, in_code))
8178 return true;
8179 return ret;
8182 /* Verify that DECL is private within CTX. If there's specific information
8183 to the contrary in the innermost scope, generate an error. */
8185 static bool
8186 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8188 splay_tree_node n;
8190 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8191 if (n != NULL)
8193 if (n->value & GOVD_SHARED)
8195 if (ctx == gimplify_omp_ctxp)
8197 if (simd)
8198 error ("iteration variable %qE is predetermined linear",
8199 DECL_NAME (decl));
8200 else
8201 error ("iteration variable %qE should be private",
8202 DECL_NAME (decl));
8203 n->value = GOVD_PRIVATE;
8204 return true;
8206 else
8207 return false;
8209 else if ((n->value & GOVD_EXPLICIT) != 0
8210 && (ctx == gimplify_omp_ctxp
8211 || (ctx->region_type == ORT_COMBINED_PARALLEL
8212 && gimplify_omp_ctxp->outer_context == ctx)))
8214 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8215 error ("iteration variable %qE should not be firstprivate",
8216 DECL_NAME (decl));
8217 else if ((n->value & GOVD_REDUCTION) != 0)
8218 error ("iteration variable %qE should not be reduction",
8219 DECL_NAME (decl));
8220 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8221 error ("iteration variable %qE should not be linear",
8222 DECL_NAME (decl));
8224 return (ctx == gimplify_omp_ctxp
8225 || (ctx->region_type == ORT_COMBINED_PARALLEL
8226 && gimplify_omp_ctxp->outer_context == ctx));
8229 if (ctx->region_type != ORT_WORKSHARE
8230 && ctx->region_type != ORT_TASKGROUP
8231 && ctx->region_type != ORT_SIMD
8232 && ctx->region_type != ORT_ACC)
8233 return false;
8234 else if (ctx->outer_context)
8235 return omp_is_private (ctx->outer_context, decl, simd);
8236 return false;
8239 /* Return true if DECL is private within a parallel region
8240 that binds to the current construct's context or in parallel
8241 region's REDUCTION clause. */
8243 static bool
8244 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8246 splay_tree_node n;
8250 ctx = ctx->outer_context;
8251 if (ctx == NULL)
8253 if (is_global_var (decl))
8254 return false;
8256 /* References might be private, but might be shared too,
8257 when checking for copyprivate, assume they might be
8258 private, otherwise assume they might be shared. */
8259 if (copyprivate)
8260 return true;
8262 if (omp_privatize_by_reference (decl))
8263 return false;
8265 /* Treat C++ privatized non-static data members outside
8266 of the privatization the same. */
8267 if (omp_member_access_dummy_var (decl))
8268 return false;
8270 return true;
8273 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8275 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8276 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8278 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8279 || n == NULL
8280 || (n->value & GOVD_MAP) == 0)
8281 continue;
8282 return false;
8285 if (n != NULL)
8287 if ((n->value & GOVD_LOCAL) != 0
8288 && omp_member_access_dummy_var (decl))
8289 return false;
8290 return (n->value & GOVD_SHARED) == 0;
8293 if (ctx->region_type == ORT_WORKSHARE
8294 || ctx->region_type == ORT_TASKGROUP
8295 || ctx->region_type == ORT_SIMD
8296 || ctx->region_type == ORT_ACC)
8297 continue;
8299 break;
8301 while (1);
8302 return false;
8305 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8307 static tree
8308 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8310 tree t = *tp;
8312 /* If this node has been visited, unmark it and keep looking. */
8313 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8314 return t;
8316 if (IS_TYPE_OR_DECL_P (t))
8317 *walk_subtrees = 0;
8318 return NULL_TREE;
8322 /* Gimplify the affinity clause but effectively ignore it.
8323 Generate:
8324 var = begin;
8325 if ((step > 1) ? var <= end : var > end)
8326 locatator_var_expr; */
8328 static void
8329 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8331 tree last_iter = NULL_TREE;
8332 tree last_bind = NULL_TREE;
8333 tree label = NULL_TREE;
8334 tree *last_body = NULL;
8335 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8338 tree t = OMP_CLAUSE_DECL (c);
8339 if (TREE_CODE (t) == TREE_LIST
8340 && TREE_PURPOSE (t)
8341 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8343 if (TREE_VALUE (t) == null_pointer_node)
8344 continue;
8345 if (TREE_PURPOSE (t) != last_iter)
8347 if (last_bind)
8349 append_to_statement_list (label, last_body);
8350 gimplify_and_add (last_bind, pre_p);
8351 last_bind = NULL_TREE;
8353 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8355 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8356 is_gimple_val, fb_rvalue) == GS_ERROR
8357 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8358 is_gimple_val, fb_rvalue) == GS_ERROR
8359 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8360 is_gimple_val, fb_rvalue) == GS_ERROR
8361 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8362 is_gimple_val, fb_rvalue)
8363 == GS_ERROR))
8364 return;
8366 last_iter = TREE_PURPOSE (t);
8367 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8368 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8369 NULL, block);
8370 last_body = &BIND_EXPR_BODY (last_bind);
8371 tree cond = NULL_TREE;
8372 location_t loc = OMP_CLAUSE_LOCATION (c);
8373 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8375 tree var = TREE_VEC_ELT (it, 0);
8376 tree begin = TREE_VEC_ELT (it, 1);
8377 tree end = TREE_VEC_ELT (it, 2);
8378 tree step = TREE_VEC_ELT (it, 3);
8379 loc = DECL_SOURCE_LOCATION (var);
8380 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8381 var, begin);
8382 append_to_statement_list_force (tem, last_body);
8384 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8385 step, build_zero_cst (TREE_TYPE (step)));
8386 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8387 var, end);
8388 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8389 var, end);
8390 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8391 cond1, cond2, cond3);
8392 if (cond)
8393 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8394 boolean_type_node, cond, cond1);
8395 else
8396 cond = cond1;
8398 tree cont_label = create_artificial_label (loc);
8399 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8400 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8401 void_node,
8402 build_and_jump (&cont_label));
8403 append_to_statement_list_force (tem, last_body);
8405 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8407 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8408 last_body);
8409 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8411 if (error_operand_p (TREE_VALUE (t)))
8412 return;
8413 append_to_statement_list_force (TREE_VALUE (t), last_body);
8414 TREE_VALUE (t) = null_pointer_node;
8416 else
8418 if (last_bind)
8420 append_to_statement_list (label, last_body);
8421 gimplify_and_add (last_bind, pre_p);
8422 last_bind = NULL_TREE;
8424 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8426 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8427 NULL, is_gimple_val, fb_rvalue);
8428 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8430 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8431 return;
8432 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8433 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8434 return;
8435 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8438 if (last_bind)
8440 append_to_statement_list (label, last_body);
8441 gimplify_and_add (last_bind, pre_p);
8443 return;
8446 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8447 lower all the depend clauses by populating corresponding depend
8448 array. Returns 0 if there are no such depend clauses, or
8449 2 if all depend clauses should be removed, 1 otherwise. */
8451 static int
8452 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8454 tree c;
8455 gimple *g;
8456 size_t n[5] = { 0, 0, 0, 0, 0 };
8457 bool unused[5];
8458 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8459 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8460 size_t i, j;
8461 location_t first_loc = UNKNOWN_LOCATION;
8463 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8464 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8466 switch (OMP_CLAUSE_DEPEND_KIND (c))
8468 case OMP_CLAUSE_DEPEND_IN:
8469 i = 2;
8470 break;
8471 case OMP_CLAUSE_DEPEND_OUT:
8472 case OMP_CLAUSE_DEPEND_INOUT:
8473 i = 0;
8474 break;
8475 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8476 i = 1;
8477 break;
8478 case OMP_CLAUSE_DEPEND_DEPOBJ:
8479 i = 3;
8480 break;
8481 case OMP_CLAUSE_DEPEND_INOUTSET:
8482 i = 4;
8483 break;
8484 default:
8485 gcc_unreachable ();
8487 tree t = OMP_CLAUSE_DECL (c);
8488 if (first_loc == UNKNOWN_LOCATION)
8489 first_loc = OMP_CLAUSE_LOCATION (c);
8490 if (TREE_CODE (t) == TREE_LIST
8491 && TREE_PURPOSE (t)
8492 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8494 if (TREE_PURPOSE (t) != last_iter)
8496 tree tcnt = size_one_node;
8497 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8499 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8500 is_gimple_val, fb_rvalue) == GS_ERROR
8501 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8502 is_gimple_val, fb_rvalue) == GS_ERROR
8503 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8504 is_gimple_val, fb_rvalue) == GS_ERROR
8505 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8506 is_gimple_val, fb_rvalue)
8507 == GS_ERROR))
8508 return 2;
8509 tree var = TREE_VEC_ELT (it, 0);
8510 tree begin = TREE_VEC_ELT (it, 1);
8511 tree end = TREE_VEC_ELT (it, 2);
8512 tree step = TREE_VEC_ELT (it, 3);
8513 tree orig_step = TREE_VEC_ELT (it, 4);
8514 tree type = TREE_TYPE (var);
8515 tree stype = TREE_TYPE (step);
8516 location_t loc = DECL_SOURCE_LOCATION (var);
8517 tree endmbegin;
8518 /* Compute count for this iterator as
8519 orig_step > 0
8520 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8521 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8522 and compute product of those for the entire depend
8523 clause. */
8524 if (POINTER_TYPE_P (type))
8525 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8526 stype, end, begin);
8527 else
8528 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8529 end, begin);
8530 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8531 step,
8532 build_int_cst (stype, 1));
8533 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8534 build_int_cst (stype, 1));
8535 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8536 unshare_expr (endmbegin),
8537 stepm1);
8538 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8539 pos, step);
8540 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8541 endmbegin, stepp1);
8542 if (TYPE_UNSIGNED (stype))
8544 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8545 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8547 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8548 neg, step);
8549 step = NULL_TREE;
8550 tree cond = fold_build2_loc (loc, LT_EXPR,
8551 boolean_type_node,
8552 begin, end);
8553 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8554 build_int_cst (stype, 0));
8555 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8556 end, begin);
8557 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8558 build_int_cst (stype, 0));
8559 tree osteptype = TREE_TYPE (orig_step);
8560 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8561 orig_step,
8562 build_int_cst (osteptype, 0));
8563 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8564 cond, pos, neg);
8565 cnt = fold_convert_loc (loc, sizetype, cnt);
8566 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8567 fb_rvalue) == GS_ERROR)
8568 return 2;
8569 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8571 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8572 fb_rvalue) == GS_ERROR)
8573 return 2;
8574 last_iter = TREE_PURPOSE (t);
8575 last_count = tcnt;
8577 if (counts[i] == NULL_TREE)
8578 counts[i] = last_count;
8579 else
8580 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8581 PLUS_EXPR, counts[i], last_count);
8583 else
8584 n[i]++;
8586 for (i = 0; i < 5; i++)
8587 if (counts[i])
8588 break;
8589 if (i == 5)
8590 return 0;
8592 tree total = size_zero_node;
8593 for (i = 0; i < 5; i++)
8595 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8596 if (counts[i] == NULL_TREE)
8597 counts[i] = size_zero_node;
8598 if (n[i])
8599 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8600 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8601 fb_rvalue) == GS_ERROR)
8602 return 2;
8603 total = size_binop (PLUS_EXPR, total, counts[i]);
8606 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8607 == GS_ERROR)
8608 return 2;
8609 bool is_old = unused[1] && unused[3] && unused[4];
8610 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8611 size_int (is_old ? 1 : 4));
8612 if (!unused[4])
8613 totalpx = size_binop (PLUS_EXPR, totalpx,
8614 size_binop (MULT_EXPR, counts[4], size_int (2)));
8615 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8616 tree array = create_tmp_var_raw (type);
8617 TREE_ADDRESSABLE (array) = 1;
8618 if (!poly_int_tree_p (totalpx))
8620 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8621 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8622 if (gimplify_omp_ctxp)
8624 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8625 while (ctx
8626 && (ctx->region_type == ORT_WORKSHARE
8627 || ctx->region_type == ORT_TASKGROUP
8628 || ctx->region_type == ORT_SIMD
8629 || ctx->region_type == ORT_ACC))
8630 ctx = ctx->outer_context;
8631 if (ctx)
8632 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8634 gimplify_vla_decl (array, pre_p);
8636 else
8637 gimple_add_tmp_var (array);
8638 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8639 NULL_TREE);
8640 tree tem;
8641 if (!is_old)
8643 tem = build2 (MODIFY_EXPR, void_type_node, r,
8644 build_int_cst (ptr_type_node, 0));
8645 gimplify_and_add (tem, pre_p);
8646 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8647 NULL_TREE);
8649 tem = build2 (MODIFY_EXPR, void_type_node, r,
8650 fold_convert (ptr_type_node, total));
8651 gimplify_and_add (tem, pre_p);
8652 for (i = 1; i < (is_old ? 2 : 4); i++)
8654 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8655 NULL_TREE, NULL_TREE);
8656 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8657 gimplify_and_add (tem, pre_p);
8660 tree cnts[6];
8661 for (j = 5; j; j--)
8662 if (!unused[j - 1])
8663 break;
8664 for (i = 0; i < 5; i++)
8666 if (i && (i >= j || unused[i - 1]))
8668 cnts[i] = cnts[i - 1];
8669 continue;
8671 cnts[i] = create_tmp_var (sizetype);
8672 if (i == 0)
8673 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8674 else
8676 tree t;
8677 if (is_old)
8678 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8679 else
8680 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8681 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8682 == GS_ERROR)
8683 return 2;
8684 g = gimple_build_assign (cnts[i], t);
8686 gimple_seq_add_stmt (pre_p, g);
8688 if (unused[4])
8689 cnts[5] = NULL_TREE;
8690 else
8692 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8693 cnts[5] = create_tmp_var (sizetype);
8694 g = gimple_build_assign (cnts[i], t);
8695 gimple_seq_add_stmt (pre_p, g);
8698 last_iter = NULL_TREE;
8699 tree last_bind = NULL_TREE;
8700 tree *last_body = NULL;
8701 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8702 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8704 switch (OMP_CLAUSE_DEPEND_KIND (c))
8706 case OMP_CLAUSE_DEPEND_IN:
8707 i = 2;
8708 break;
8709 case OMP_CLAUSE_DEPEND_OUT:
8710 case OMP_CLAUSE_DEPEND_INOUT:
8711 i = 0;
8712 break;
8713 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8714 i = 1;
8715 break;
8716 case OMP_CLAUSE_DEPEND_DEPOBJ:
8717 i = 3;
8718 break;
8719 case OMP_CLAUSE_DEPEND_INOUTSET:
8720 i = 4;
8721 break;
8722 default:
8723 gcc_unreachable ();
8725 tree t = OMP_CLAUSE_DECL (c);
8726 if (TREE_CODE (t) == TREE_LIST
8727 && TREE_PURPOSE (t)
8728 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8730 if (TREE_PURPOSE (t) != last_iter)
8732 if (last_bind)
8733 gimplify_and_add (last_bind, pre_p);
8734 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8735 last_bind = build3 (BIND_EXPR, void_type_node,
8736 BLOCK_VARS (block), NULL, block);
8737 TREE_SIDE_EFFECTS (last_bind) = 1;
8738 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8739 tree *p = &BIND_EXPR_BODY (last_bind);
8740 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8742 tree var = TREE_VEC_ELT (it, 0);
8743 tree begin = TREE_VEC_ELT (it, 1);
8744 tree end = TREE_VEC_ELT (it, 2);
8745 tree step = TREE_VEC_ELT (it, 3);
8746 tree orig_step = TREE_VEC_ELT (it, 4);
8747 tree type = TREE_TYPE (var);
8748 location_t loc = DECL_SOURCE_LOCATION (var);
8749 /* Emit:
8750 var = begin;
8751 goto cond_label;
8752 beg_label:
8754 var = var + step;
8755 cond_label:
8756 if (orig_step > 0) {
8757 if (var < end) goto beg_label;
8758 } else {
8759 if (var > end) goto beg_label;
8761 for each iterator, with inner iterators added to
8762 the ... above. */
8763 tree beg_label = create_artificial_label (loc);
8764 tree cond_label = NULL_TREE;
8765 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8766 var, begin);
8767 append_to_statement_list_force (tem, p);
8768 tem = build_and_jump (&cond_label);
8769 append_to_statement_list_force (tem, p);
8770 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8771 append_to_statement_list (tem, p);
8772 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8773 NULL_TREE, NULL_TREE);
8774 TREE_SIDE_EFFECTS (bind) = 1;
8775 SET_EXPR_LOCATION (bind, loc);
8776 append_to_statement_list_force (bind, p);
8777 if (POINTER_TYPE_P (type))
8778 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8779 var, fold_convert_loc (loc, sizetype,
8780 step));
8781 else
8782 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8783 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8784 var, tem);
8785 append_to_statement_list_force (tem, p);
8786 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8787 append_to_statement_list (tem, p);
8788 tree cond = fold_build2_loc (loc, LT_EXPR,
8789 boolean_type_node,
8790 var, end);
8791 tree pos
8792 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8793 cond, build_and_jump (&beg_label),
8794 void_node);
8795 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8796 var, end);
8797 tree neg
8798 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8799 cond, build_and_jump (&beg_label),
8800 void_node);
8801 tree osteptype = TREE_TYPE (orig_step);
8802 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8803 orig_step,
8804 build_int_cst (osteptype, 0));
8805 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8806 cond, pos, neg);
8807 append_to_statement_list_force (tem, p);
8808 p = &BIND_EXPR_BODY (bind);
8810 last_body = p;
8812 last_iter = TREE_PURPOSE (t);
8813 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8815 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8816 0), last_body);
8817 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8819 if (error_operand_p (TREE_VALUE (t)))
8820 return 2;
8821 if (TREE_VALUE (t) != null_pointer_node)
8822 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8823 if (i == 4)
8825 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8826 NULL_TREE, NULL_TREE);
8827 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8828 NULL_TREE, NULL_TREE);
8829 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8830 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8831 void_type_node, r, r2);
8832 append_to_statement_list_force (tem, last_body);
8833 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8834 void_type_node, cnts[i],
8835 size_binop (PLUS_EXPR, cnts[i],
8836 size_int (1)));
8837 append_to_statement_list_force (tem, last_body);
8838 i = 5;
8840 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8841 NULL_TREE, NULL_TREE);
8842 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8843 void_type_node, r, TREE_VALUE (t));
8844 append_to_statement_list_force (tem, last_body);
8845 if (i == 5)
8847 r = build4 (ARRAY_REF, ptr_type_node, array,
8848 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8849 NULL_TREE, NULL_TREE);
8850 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8851 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8852 void_type_node, r, tem);
8853 append_to_statement_list_force (tem, last_body);
8855 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8856 void_type_node, cnts[i],
8857 size_binop (PLUS_EXPR, cnts[i],
8858 size_int (1 + (i == 5))));
8859 append_to_statement_list_force (tem, last_body);
8860 TREE_VALUE (t) = null_pointer_node;
8862 else
8864 if (last_bind)
8866 gimplify_and_add (last_bind, pre_p);
8867 last_bind = NULL_TREE;
8869 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8871 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8872 NULL, is_gimple_val, fb_rvalue);
8873 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8875 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8876 return 2;
8877 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
8878 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8879 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8880 is_gimple_val, fb_rvalue) == GS_ERROR)
8881 return 2;
8882 if (i == 4)
8884 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8885 NULL_TREE, NULL_TREE);
8886 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8887 NULL_TREE, NULL_TREE);
8888 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8889 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
8890 gimplify_and_add (tem, pre_p);
8891 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
8892 cnts[i],
8893 size_int (1)));
8894 gimple_seq_add_stmt (pre_p, g);
8895 i = 5;
8897 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8898 NULL_TREE, NULL_TREE);
8899 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8900 gimplify_and_add (tem, pre_p);
8901 if (i == 5)
8903 r = build4 (ARRAY_REF, ptr_type_node, array,
8904 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8905 NULL_TREE, NULL_TREE);
8906 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8907 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
8908 append_to_statement_list_force (tem, last_body);
8909 gimplify_and_add (tem, pre_p);
8911 g = gimple_build_assign (cnts[i],
8912 size_binop (PLUS_EXPR, cnts[i],
8913 size_int (1 + (i == 5))));
8914 gimple_seq_add_stmt (pre_p, g);
8917 if (last_bind)
8918 gimplify_and_add (last_bind, pre_p);
8919 tree cond = boolean_false_node;
8920 if (is_old)
8922 if (!unused[0])
8923 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8924 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8925 size_int (2)));
8926 if (!unused[2])
8927 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8928 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8929 cnts[2],
8930 size_binop_loc (first_loc, PLUS_EXPR,
8931 totalpx,
8932 size_int (1))));
8934 else
8936 tree prev = size_int (5);
8937 for (i = 0; i < 5; i++)
8939 if (unused[i])
8940 continue;
8941 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8942 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8943 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8944 cnts[i], unshare_expr (prev)));
8947 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8948 build_call_expr_loc (first_loc,
8949 builtin_decl_explicit (BUILT_IN_TRAP),
8950 0), void_node);
8951 gimplify_and_add (tem, pre_p);
8952 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8953 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8954 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8955 OMP_CLAUSE_CHAIN (c) = *list_p;
8956 *list_p = c;
8957 return 1;
8960 /* For a set of mappings describing an array section pointed to by a struct
8961 (or derived type, etc.) component, create an "alloc" or "release" node to
8962 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8963 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8964 be created that is inserted into the list of mapping nodes attached to the
8965 directive being processed -- not part of the sorted list of nodes after
8966 GOMP_MAP_STRUCT.
8968 CODE is the code of the directive being processed. GRP_START and GRP_END
8969 are the first and last of two or three nodes representing this array section
8970 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8971 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8972 filled with the additional node described above, if needed.
8974 This function does not add the new nodes to any lists itself. It is the
8975 responsibility of the caller to do that. */
8977 static tree
8978 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
8979 tree *extra_node)
8981 enum gomp_map_kind mkind
8982 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8983 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8985 gcc_assert (grp_start != grp_end);
8987 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8988 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8989 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
8990 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
8991 tree grp_mid = NULL_TREE;
8992 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
8993 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
8995 if (grp_mid
8996 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8997 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
8998 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
8999 else
9000 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
9002 if (grp_mid
9003 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9004 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
9005 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
9007 tree c3
9008 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9009 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
9010 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
9011 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
9012 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
9014 *extra_node = c3;
9016 else
9017 *extra_node = NULL_TREE;
9019 return c2;
9022 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9023 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9024 If BASE_REF is non-NULL and the containing object is a reference, set
9025 *BASE_REF to that reference before dereferencing the object.
9026 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9027 has array type, else return NULL. */
9029 static tree
9030 extract_base_bit_offset (tree base, poly_int64 *bitposp,
9031 poly_offset_int *poffsetp)
9033 tree offset;
9034 poly_int64 bitsize, bitpos;
9035 machine_mode mode;
9036 int unsignedp, reversep, volatilep = 0;
9037 poly_offset_int poffset;
9039 STRIP_NOPS (base);
9041 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
9042 &unsignedp, &reversep, &volatilep);
9044 STRIP_NOPS (base);
9046 if (offset && poly_int_tree_p (offset))
9048 poffset = wi::to_poly_offset (offset);
9049 offset = NULL_TREE;
9051 else
9052 poffset = 0;
9054 if (maybe_ne (bitpos, 0))
9055 poffset += bits_to_bytes_round_down (bitpos);
9057 *bitposp = bitpos;
9058 *poffsetp = poffset;
9060 return base;
9063 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
9064 started processing the group yet. The TEMPORARY mark is used when we first
9065 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9066 when we have processed all the group's children (i.e. all the base pointers
9067 referred to by the group's mapping nodes, recursively). */
9069 enum omp_tsort_mark {
9070 UNVISITED,
9071 TEMPORARY,
9072 PERMANENT
9075 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
9076 but ignores side effects in the equality comparisons. */
9078 struct tree_operand_hash_no_se : tree_operand_hash
9080 static inline bool equal (const value_type &,
9081 const compare_type &);
9084 inline bool
9085 tree_operand_hash_no_se::equal (const value_type &t1,
9086 const compare_type &t2)
9088 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
9091 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9092 clause. */
9094 struct omp_mapping_group {
9095 tree *grp_start;
9096 tree grp_end;
9097 omp_tsort_mark mark;
9098 /* If we've removed the group but need to reindex, mark the group as
9099 deleted. */
9100 bool deleted;
9101 struct omp_mapping_group *sibling;
9102 struct omp_mapping_group *next;
9105 DEBUG_FUNCTION void
9106 debug_mapping_group (omp_mapping_group *grp)
9108 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9109 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9110 debug_generic_expr (*grp->grp_start);
9111 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9114 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9115 isn't one. */
9117 static tree
9118 omp_get_base_pointer (tree expr)
9120 while (TREE_CODE (expr) == ARRAY_REF
9121 || TREE_CODE (expr) == COMPONENT_REF)
9122 expr = TREE_OPERAND (expr, 0);
9124 if (INDIRECT_REF_P (expr)
9125 || (TREE_CODE (expr) == MEM_REF
9126 && integer_zerop (TREE_OPERAND (expr, 1))))
9128 expr = TREE_OPERAND (expr, 0);
9129 while (TREE_CODE (expr) == COMPOUND_EXPR)
9130 expr = TREE_OPERAND (expr, 1);
9131 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9132 expr = TREE_OPERAND (expr, 0);
9133 if (TREE_CODE (expr) == SAVE_EXPR)
9134 expr = TREE_OPERAND (expr, 0);
9135 STRIP_NOPS (expr);
9136 return expr;
9139 return NULL_TREE;
9142 /* Remove COMPONENT_REFS and indirections from EXPR. */
9144 static tree
9145 omp_strip_components_and_deref (tree expr)
9147 while (TREE_CODE (expr) == COMPONENT_REF
9148 || INDIRECT_REF_P (expr)
9149 || (TREE_CODE (expr) == MEM_REF
9150 && integer_zerop (TREE_OPERAND (expr, 1)))
9151 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9152 || TREE_CODE (expr) == COMPOUND_EXPR)
9153 if (TREE_CODE (expr) == COMPOUND_EXPR)
9154 expr = TREE_OPERAND (expr, 1);
9155 else
9156 expr = TREE_OPERAND (expr, 0);
9158 STRIP_NOPS (expr);
9160 return expr;
9163 static tree
9164 omp_strip_indirections (tree expr)
9166 while (INDIRECT_REF_P (expr)
9167 || (TREE_CODE (expr) == MEM_REF
9168 && integer_zerop (TREE_OPERAND (expr, 1))))
9169 expr = TREE_OPERAND (expr, 0);
9171 return expr;
9174 /* An attach or detach operation depends directly on the address being
9175 attached/detached. Return that address, or none if there are no
9176 attachments/detachments. */
9178 static tree
9179 omp_get_attachment (omp_mapping_group *grp)
9181 tree node = *grp->grp_start;
9183 switch (OMP_CLAUSE_MAP_KIND (node))
9185 case GOMP_MAP_TO:
9186 case GOMP_MAP_FROM:
9187 case GOMP_MAP_TOFROM:
9188 case GOMP_MAP_ALWAYS_FROM:
9189 case GOMP_MAP_ALWAYS_TO:
9190 case GOMP_MAP_ALWAYS_TOFROM:
9191 case GOMP_MAP_FORCE_FROM:
9192 case GOMP_MAP_FORCE_TO:
9193 case GOMP_MAP_FORCE_TOFROM:
9194 case GOMP_MAP_FORCE_PRESENT:
9195 case GOMP_MAP_PRESENT_ALLOC:
9196 case GOMP_MAP_PRESENT_FROM:
9197 case GOMP_MAP_PRESENT_TO:
9198 case GOMP_MAP_PRESENT_TOFROM:
9199 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9200 case GOMP_MAP_ALWAYS_PRESENT_TO:
9201 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9202 case GOMP_MAP_ALLOC:
9203 case GOMP_MAP_RELEASE:
9204 case GOMP_MAP_DELETE:
9205 case GOMP_MAP_FORCE_ALLOC:
9206 if (node == grp->grp_end)
9207 return NULL_TREE;
9209 node = OMP_CLAUSE_CHAIN (node);
9210 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9212 gcc_assert (node != grp->grp_end);
9213 node = OMP_CLAUSE_CHAIN (node);
9215 if (node)
9216 switch (OMP_CLAUSE_MAP_KIND (node))
9218 case GOMP_MAP_POINTER:
9219 case GOMP_MAP_ALWAYS_POINTER:
9220 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9221 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9222 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9223 return NULL_TREE;
9225 case GOMP_MAP_ATTACH_DETACH:
9226 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9227 return OMP_CLAUSE_DECL (node);
9229 default:
9230 internal_error ("unexpected mapping node");
9232 return error_mark_node;
9234 case GOMP_MAP_TO_PSET:
9235 gcc_assert (node != grp->grp_end);
9236 node = OMP_CLAUSE_CHAIN (node);
9237 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9238 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9239 return OMP_CLAUSE_DECL (node);
9240 else
9241 internal_error ("unexpected mapping node");
9242 return error_mark_node;
9244 case GOMP_MAP_ATTACH:
9245 case GOMP_MAP_DETACH:
9246 node = OMP_CLAUSE_CHAIN (node);
9247 if (!node || *grp->grp_start == grp->grp_end)
9248 return OMP_CLAUSE_DECL (*grp->grp_start);
9249 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9250 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9251 return OMP_CLAUSE_DECL (*grp->grp_start);
9252 else
9253 internal_error ("unexpected mapping node");
9254 return error_mark_node;
9256 case GOMP_MAP_STRUCT:
9257 case GOMP_MAP_FORCE_DEVICEPTR:
9258 case GOMP_MAP_DEVICE_RESIDENT:
9259 case GOMP_MAP_LINK:
9260 case GOMP_MAP_IF_PRESENT:
9261 case GOMP_MAP_FIRSTPRIVATE:
9262 case GOMP_MAP_FIRSTPRIVATE_INT:
9263 case GOMP_MAP_USE_DEVICE_PTR:
9264 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9265 return NULL_TREE;
9267 default:
9268 internal_error ("unexpected mapping node");
9271 return error_mark_node;
9274 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9275 mappings, return the chain pointer to the end of that group in the list. */
9277 static tree *
9278 omp_group_last (tree *start_p)
9280 tree c = *start_p, nc, *grp_last_p = start_p;
9282 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9284 nc = OMP_CLAUSE_CHAIN (c);
9286 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9287 return grp_last_p;
9289 switch (OMP_CLAUSE_MAP_KIND (c))
9291 default:
9292 while (nc
9293 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9294 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9295 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9296 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9297 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9298 || (OMP_CLAUSE_MAP_KIND (nc)
9299 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9300 || (OMP_CLAUSE_MAP_KIND (nc)
9301 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9302 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9303 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9305 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9306 c = nc;
9307 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9308 if (nc2
9309 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9310 && (OMP_CLAUSE_MAP_KIND (nc)
9311 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9312 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9314 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9315 c = nc2;
9316 nc2 = OMP_CLAUSE_CHAIN (nc2);
9318 nc = nc2;
9320 break;
9322 case GOMP_MAP_ATTACH:
9323 case GOMP_MAP_DETACH:
9324 /* This is a weird artifact of how directives are parsed: bare attach or
9325 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9326 FIRSTPRIVATE_REFERENCE node. FIXME. */
9327 if (nc
9328 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9329 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9330 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9331 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9332 break;
9334 case GOMP_MAP_TO_PSET:
9335 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9336 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9337 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9338 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9339 break;
9341 case GOMP_MAP_STRUCT:
9343 unsigned HOST_WIDE_INT num_mappings
9344 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9345 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9346 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9347 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9348 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9349 for (unsigned i = 0; i < num_mappings; i++)
9350 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9352 break;
9355 return grp_last_p;
9358 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9359 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9360 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9361 if we have more than one such group, else return NULL. */
9363 static void
9364 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9365 tree gather_sentinel)
9367 for (tree *cp = list_p;
9368 *cp && *cp != gather_sentinel;
9369 cp = &OMP_CLAUSE_CHAIN (*cp))
9371 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9372 continue;
9374 tree *grp_last_p = omp_group_last (cp);
9375 omp_mapping_group grp;
9377 grp.grp_start = cp;
9378 grp.grp_end = *grp_last_p;
9379 grp.mark = UNVISITED;
9380 grp.sibling = NULL;
9381 grp.deleted = false;
9382 grp.next = NULL;
9383 groups->safe_push (grp);
9385 cp = grp_last_p;
9389 static vec<omp_mapping_group> *
9390 omp_gather_mapping_groups (tree *list_p)
9392 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9394 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9396 if (groups->length () > 0)
9397 return groups;
9398 else
9400 delete groups;
9401 return NULL;
9405 /* A pointer mapping group GRP may define a block of memory starting at some
9406 base address, and maybe also define a firstprivate pointer or firstprivate
9407 reference that points to that block. The return value is a node containing
9408 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9409 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9410 return the number of consecutive chained nodes in CHAINED. */
9412 static tree
9413 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9414 tree *firstprivate)
9416 tree node = *grp->grp_start;
9418 *firstprivate = NULL_TREE;
9419 *chained = 1;
9421 switch (OMP_CLAUSE_MAP_KIND (node))
9423 case GOMP_MAP_TO:
9424 case GOMP_MAP_FROM:
9425 case GOMP_MAP_TOFROM:
9426 case GOMP_MAP_ALWAYS_FROM:
9427 case GOMP_MAP_ALWAYS_TO:
9428 case GOMP_MAP_ALWAYS_TOFROM:
9429 case GOMP_MAP_FORCE_FROM:
9430 case GOMP_MAP_FORCE_TO:
9431 case GOMP_MAP_FORCE_TOFROM:
9432 case GOMP_MAP_FORCE_PRESENT:
9433 case GOMP_MAP_PRESENT_ALLOC:
9434 case GOMP_MAP_PRESENT_FROM:
9435 case GOMP_MAP_PRESENT_TO:
9436 case GOMP_MAP_PRESENT_TOFROM:
9437 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9438 case GOMP_MAP_ALWAYS_PRESENT_TO:
9439 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9440 case GOMP_MAP_ALLOC:
9441 case GOMP_MAP_RELEASE:
9442 case GOMP_MAP_DELETE:
9443 case GOMP_MAP_FORCE_ALLOC:
9444 case GOMP_MAP_IF_PRESENT:
9445 if (node == grp->grp_end)
9446 return node;
9448 node = OMP_CLAUSE_CHAIN (node);
9449 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9451 if (node == grp->grp_end)
9452 return *grp->grp_start;
9453 node = OMP_CLAUSE_CHAIN (node);
9455 if (node)
9456 switch (OMP_CLAUSE_MAP_KIND (node))
9458 case GOMP_MAP_POINTER:
9459 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9460 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9461 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9462 *firstprivate = OMP_CLAUSE_DECL (node);
9463 return *grp->grp_start;
9465 case GOMP_MAP_ALWAYS_POINTER:
9466 case GOMP_MAP_ATTACH_DETACH:
9467 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9468 return *grp->grp_start;
9470 default:
9471 internal_error ("unexpected mapping node");
9473 else
9474 internal_error ("unexpected mapping node");
9475 return error_mark_node;
9477 case GOMP_MAP_TO_PSET:
9478 gcc_assert (node != grp->grp_end);
9479 node = OMP_CLAUSE_CHAIN (node);
9480 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9481 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9482 return NULL_TREE;
9483 else
9484 internal_error ("unexpected mapping node");
9485 return error_mark_node;
9487 case GOMP_MAP_ATTACH:
9488 case GOMP_MAP_DETACH:
9489 node = OMP_CLAUSE_CHAIN (node);
9490 if (!node || *grp->grp_start == grp->grp_end)
9491 return NULL_TREE;
9492 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9493 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9495 /* We're mapping the base pointer itself in a bare attach or detach
9496 node. This is a side effect of how parsing works, and the mapping
9497 will be removed anyway (at least for enter/exit data directives).
9498 We should ignore the mapping here. FIXME. */
9499 return NULL_TREE;
9501 else
9502 internal_error ("unexpected mapping node");
9503 return error_mark_node;
9505 case GOMP_MAP_STRUCT:
9507 unsigned HOST_WIDE_INT num_mappings
9508 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9509 node = OMP_CLAUSE_CHAIN (node);
9510 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9511 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9513 *firstprivate = OMP_CLAUSE_DECL (node);
9514 node = OMP_CLAUSE_CHAIN (node);
9516 *chained = num_mappings;
9517 return node;
9520 case GOMP_MAP_FORCE_DEVICEPTR:
9521 case GOMP_MAP_DEVICE_RESIDENT:
9522 case GOMP_MAP_LINK:
9523 case GOMP_MAP_FIRSTPRIVATE:
9524 case GOMP_MAP_FIRSTPRIVATE_INT:
9525 case GOMP_MAP_USE_DEVICE_PTR:
9526 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9527 return NULL_TREE;
9529 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9530 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9531 case GOMP_MAP_POINTER:
9532 case GOMP_MAP_ALWAYS_POINTER:
9533 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9534 /* These shouldn't appear by themselves. */
9535 if (!seen_error ())
9536 internal_error ("unexpected pointer mapping node");
9537 return error_mark_node;
9539 default:
9540 gcc_unreachable ();
9543 return error_mark_node;
9546 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9547 nodes by tree_operand_hash_no_se. */
9549 static void
9550 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9551 omp_mapping_group *> *grpmap,
9552 vec<omp_mapping_group> *groups,
9553 tree reindex_sentinel)
9555 omp_mapping_group *grp;
9556 unsigned int i;
9557 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9559 FOR_EACH_VEC_ELT (*groups, i, grp)
9561 if (reindexing && *grp->grp_start == reindex_sentinel)
9562 above_hwm = true;
9564 if (reindexing && !above_hwm)
9565 continue;
9567 tree fpp;
9568 unsigned int chained;
9569 tree node = omp_group_base (grp, &chained, &fpp);
9571 if (node == error_mark_node || (!node && !fpp))
9572 continue;
9574 for (unsigned j = 0;
9575 node && j < chained;
9576 node = OMP_CLAUSE_CHAIN (node), j++)
9578 tree decl = OMP_CLAUSE_DECL (node);
9579 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9580 meaning node-hash lookups don't work. This is a workaround for
9581 that, but ideally we should just create the INDIRECT_REF at
9582 source instead. FIXME. */
9583 if (TREE_CODE (decl) == MEM_REF
9584 && integer_zerop (TREE_OPERAND (decl, 1)))
9585 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9587 omp_mapping_group **prev = grpmap->get (decl);
9589 if (prev && *prev == grp)
9590 /* Empty. */;
9591 else if (prev)
9593 /* Mapping the same thing twice is normally diagnosed as an error,
9594 but can happen under some circumstances, e.g. in pr99928-16.c,
9595 the directive:
9597 #pragma omp target simd reduction(+:a[:3]) \
9598 map(always, tofrom: a[:6])
9601 will result in two "a[0]" mappings (of different sizes). */
9603 grp->sibling = (*prev)->sibling;
9604 (*prev)->sibling = grp;
9606 else
9607 grpmap->put (decl, grp);
9610 if (!fpp)
9611 continue;
9613 omp_mapping_group **prev = grpmap->get (fpp);
9614 if (prev && *prev != grp)
9616 grp->sibling = (*prev)->sibling;
9617 (*prev)->sibling = grp;
9619 else
9620 grpmap->put (fpp, grp);
9624 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9625 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9627 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9628 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9630 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9632 return grpmap;
9635 /* Rebuild group map from partially-processed clause list (during
9636 omp_build_struct_sibling_lists). We have already processed nodes up until
9637 a high-water mark (HWM). This is a bit tricky because the list is being
9638 reordered as it is scanned, but we know:
9640 1. The list after HWM has not been touched yet, so we can reindex it safely.
9642 2. The list before and including HWM has been altered, but remains
9643 well-formed throughout the sibling-list building operation.
9645 so, we can do the reindex operation in two parts, on the processed and
9646 then the unprocessed halves of the list. */
9648 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9649 omp_reindex_mapping_groups (tree *list_p,
9650 vec<omp_mapping_group> *groups,
9651 vec<omp_mapping_group> *processed_groups,
9652 tree sentinel)
9654 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9655 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9657 processed_groups->truncate (0);
9659 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9660 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9661 if (sentinel)
9662 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9664 return grpmap;
9667 /* Find the immediately-containing struct for a component ref (etc.)
9668 expression EXPR. */
9670 static tree
9671 omp_containing_struct (tree expr)
9673 tree expr0 = expr;
9675 STRIP_NOPS (expr);
9677 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9678 component ref. */
9679 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9680 return expr0;
9682 while (TREE_CODE (expr) == ARRAY_REF)
9683 expr = TREE_OPERAND (expr, 0);
9685 if (TREE_CODE (expr) == COMPONENT_REF)
9686 expr = TREE_OPERAND (expr, 0);
9688 return expr;
9691 /* Return TRUE if DECL describes a component that is part of a whole structure
9692 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9693 that maps that structure, if present. */
9695 static bool
9696 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9697 omp_mapping_group *> *grpmap,
9698 tree decl,
9699 omp_mapping_group **mapped_by_group)
9701 tree wsdecl = NULL_TREE;
9703 *mapped_by_group = NULL;
9705 while (true)
9707 wsdecl = omp_containing_struct (decl);
9708 if (wsdecl == decl)
9709 break;
9710 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9711 if (!wholestruct
9712 && TREE_CODE (wsdecl) == MEM_REF
9713 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9715 tree deref = TREE_OPERAND (wsdecl, 0);
9716 deref = build_fold_indirect_ref (deref);
9717 wholestruct = grpmap->get (deref);
9719 if (wholestruct)
9721 *mapped_by_group = *wholestruct;
9722 return true;
9724 decl = wsdecl;
9727 return false;
9730 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9731 FALSE on error. */
9733 static bool
9734 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9735 vec<omp_mapping_group> *groups,
9736 hash_map<tree_operand_hash_no_se,
9737 omp_mapping_group *> *grpmap,
9738 omp_mapping_group *grp)
9740 if (grp->mark == PERMANENT)
9741 return true;
9742 if (grp->mark == TEMPORARY)
9744 fprintf (stderr, "when processing group:\n");
9745 debug_mapping_group (grp);
9746 internal_error ("base pointer cycle detected");
9747 return false;
9749 grp->mark = TEMPORARY;
9751 tree attaches_to = omp_get_attachment (grp);
9753 if (attaches_to)
9755 omp_mapping_group **basep = grpmap->get (attaches_to);
9757 if (basep && *basep != grp)
9759 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9760 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9761 return false;
9765 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9767 while (decl)
9769 tree base = omp_get_base_pointer (decl);
9771 if (!base)
9772 break;
9774 omp_mapping_group **innerp = grpmap->get (base);
9775 omp_mapping_group *wholestruct;
9777 /* We should treat whole-structure mappings as if all (pointer, in this
9778 case) members are mapped as individual list items. Check if we have
9779 such a whole-structure mapping, if we don't have an explicit reference
9780 to the pointer member itself. */
9781 if (!innerp
9782 && TREE_CODE (base) == COMPONENT_REF
9783 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9784 innerp = &wholestruct;
9786 if (innerp && *innerp != grp)
9788 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9789 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9790 return false;
9791 break;
9794 decl = base;
9797 grp->mark = PERMANENT;
9799 /* Emit grp to output list. */
9801 **outlist = grp;
9802 *outlist = &grp->next;
9804 return true;
9807 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9808 before mappings that use those pointers. This is an implementation of the
9809 depth-first search algorithm, described e.g. at:
9811 https://en.wikipedia.org/wiki/Topological_sorting
9814 static omp_mapping_group *
9815 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9816 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
9817 *grpmap)
9819 omp_mapping_group *grp, *outlist = NULL, **cursor;
9820 unsigned int i;
9822 cursor = &outlist;
9824 FOR_EACH_VEC_ELT (*groups, i, grp)
9826 if (grp->mark != PERMANENT)
9827 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9828 return NULL;
9831 return outlist;
9834 /* Split INLIST into two parts, moving groups corresponding to
9835 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9836 The former list is then appended to the latter. Each sub-list retains the
9837 order of the original list.
9838 Note that ATTACH nodes are later moved to the end of the list in
9839 gimplify_adjust_omp_clauses, for target regions. */
9841 static omp_mapping_group *
9842 omp_segregate_mapping_groups (omp_mapping_group *inlist)
9844 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
9845 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
9847 for (omp_mapping_group *w = inlist; w;)
9849 tree c = *w->grp_start;
9850 omp_mapping_group *next = w->next;
9852 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9854 switch (OMP_CLAUSE_MAP_KIND (c))
9856 case GOMP_MAP_ALLOC:
9857 case GOMP_MAP_RELEASE:
9858 case GOMP_MAP_DELETE:
9859 *ard_tail = w;
9860 w->next = NULL;
9861 ard_tail = &w->next;
9862 break;
9864 default:
9865 *tf_tail = w;
9866 w->next = NULL;
9867 tf_tail = &w->next;
9870 w = next;
9873 /* Now splice the lists together... */
9874 *tf_tail = ard_groups;
9876 return tf_groups;
9879 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9880 those groups based on the output list of omp_tsort_mapping_groups --
9881 singly-linked, threaded through each element's NEXT pointer starting at
9882 HEAD. Each list element appears exactly once in that linked list.
9884 Each element of GROUPS may correspond to one or several mapping nodes.
9885 Node groups are kept together, and in the reordered list, the positions of
9886 the original groups are reused for the positions of the reordered list.
9887 Hence if we have e.g.
9889 {to ptr ptr} firstprivate {tofrom ptr} ...
9890 ^ ^ ^
9891 first group non-"map" second group
9893 and say the second group contains a base pointer for the first so must be
9894 moved before it, the resulting list will contain:
9896 {tofrom ptr} firstprivate {to ptr ptr} ...
9897 ^ prev. second group ^ prev. first group
9900 static tree *
9901 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
9902 omp_mapping_group *head,
9903 tree *list_p)
9905 omp_mapping_group *grp;
9906 unsigned int i;
9907 unsigned numgroups = groups->length ();
9908 auto_vec<tree> old_heads (numgroups);
9909 auto_vec<tree *> old_headps (numgroups);
9910 auto_vec<tree> new_heads (numgroups);
9911 auto_vec<tree> old_succs (numgroups);
9912 bool map_at_start = (list_p == (*groups)[0].grp_start);
9914 tree *new_grp_tail = NULL;
9916 /* Stash the start & end nodes of each mapping group before we start
9917 modifying the list. */
9918 FOR_EACH_VEC_ELT (*groups, i, grp)
9920 old_headps.quick_push (grp->grp_start);
9921 old_heads.quick_push (*grp->grp_start);
9922 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
9925 /* And similarly, the heads of the groups in the order we want to rearrange
9926 the list to. */
9927 for (omp_mapping_group *w = head; w; w = w->next)
9928 new_heads.quick_push (*w->grp_start);
9930 FOR_EACH_VEC_ELT (*groups, i, grp)
9932 gcc_assert (head);
9934 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
9936 /* a {b c d} {e f g} h i j (original)
9938 a {k l m} {e f g} h i j (inserted new group on last iter)
9940 a {k l m} {n o p} h i j (this time, chain last group to new one)
9941 ^new_grp_tail
9943 *new_grp_tail = new_heads[i];
9945 else if (new_grp_tail)
9947 /* a {b c d} e {f g h} i j k (original)
9949 a {l m n} e {f g h} i j k (gap after last iter's group)
9951 a {l m n} e {o p q} h i j (chain last group to old successor)
9952 ^new_grp_tail
9954 *new_grp_tail = old_succs[i - 1];
9955 *old_headps[i] = new_heads[i];
9957 else
9959 /* The first inserted group -- point to new group, and leave end
9960 open.
9961 a {b c d} e f
9963 a {g h i...
9965 *grp->grp_start = new_heads[i];
9968 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
9970 head = head->next;
9973 if (new_grp_tail)
9974 *new_grp_tail = old_succs[numgroups - 1];
9976 gcc_assert (!head);
9978 return map_at_start ? (*groups)[0].grp_start : list_p;
9981 /* DECL is supposed to have lastprivate semantics in the outer contexts
9982 of combined/composite constructs, starting with OCTX.
9983 Add needed lastprivate, shared or map clause if no data sharing or
9984 mapping clause are present. IMPLICIT_P is true if it is an implicit
9985 clause (IV on simd), in which case the lastprivate will not be
9986 copied to some constructs. */
9988 static void
9989 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9990 tree decl, bool implicit_p)
9992 struct gimplify_omp_ctx *orig_octx = octx;
9993 for (; octx; octx = octx->outer_context)
9995 if ((octx->region_type == ORT_COMBINED_PARALLEL
9996 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9997 && splay_tree_lookup (octx->variables,
9998 (splay_tree_key) decl) == NULL)
10000 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
10001 continue;
10003 if ((octx->region_type & ORT_TASK) != 0
10004 && octx->combined_loop
10005 && splay_tree_lookup (octx->variables,
10006 (splay_tree_key) decl) == NULL)
10008 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10009 continue;
10011 if (implicit_p
10012 && octx->region_type == ORT_WORKSHARE
10013 && octx->combined_loop
10014 && splay_tree_lookup (octx->variables,
10015 (splay_tree_key) decl) == NULL
10016 && octx->outer_context
10017 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
10018 && splay_tree_lookup (octx->outer_context->variables,
10019 (splay_tree_key) decl) == NULL)
10021 octx = octx->outer_context;
10022 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10023 continue;
10025 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
10026 && octx->combined_loop
10027 && splay_tree_lookup (octx->variables,
10028 (splay_tree_key) decl) == NULL
10029 && !omp_check_private (octx, decl, false))
10031 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10032 continue;
10034 if (octx->region_type == ORT_COMBINED_TARGET)
10036 splay_tree_node n = splay_tree_lookup (octx->variables,
10037 (splay_tree_key) decl);
10038 if (n == NULL)
10040 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10041 octx = octx->outer_context;
10043 else if (!implicit_p
10044 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
10046 n->value &= ~(GOVD_FIRSTPRIVATE
10047 | GOVD_FIRSTPRIVATE_IMPLICIT
10048 | GOVD_EXPLICIT);
10049 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10050 octx = octx->outer_context;
10053 break;
10055 if (octx && (implicit_p || octx != orig_octx))
10056 omp_notice_variable (octx, decl, true);
10059 /* If we have mappings INNER and OUTER, where INNER is a component access and
10060 OUTER is a mapping of the whole containing struct, check that the mappings
10061 are compatible. We'll be deleting the inner mapping, so we need to make
10062 sure the outer mapping does (at least) the same transfers to/from the device
10063 as the inner mapping. */
10065 bool
10066 omp_check_mapping_compatibility (location_t loc,
10067 omp_mapping_group *outer,
10068 omp_mapping_group *inner)
10070 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
10072 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
10073 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
10075 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
10076 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
10078 if (outer_kind == inner_kind)
10079 return true;
10081 switch (outer_kind)
10083 case GOMP_MAP_ALWAYS_TO:
10084 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10085 || inner_kind == GOMP_MAP_ALLOC
10086 || inner_kind == GOMP_MAP_TO)
10087 return true;
10088 break;
10090 case GOMP_MAP_ALWAYS_FROM:
10091 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10092 || inner_kind == GOMP_MAP_ALLOC
10093 || inner_kind == GOMP_MAP_FROM)
10094 return true;
10095 break;
10097 case GOMP_MAP_TO:
10098 case GOMP_MAP_FROM:
10099 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10100 || inner_kind == GOMP_MAP_ALLOC)
10101 return true;
10102 break;
10104 case GOMP_MAP_ALWAYS_TOFROM:
10105 case GOMP_MAP_TOFROM:
10106 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10107 || inner_kind == GOMP_MAP_ALLOC
10108 || inner_kind == GOMP_MAP_TO
10109 || inner_kind == GOMP_MAP_FROM
10110 || inner_kind == GOMP_MAP_TOFROM)
10111 return true;
10112 break;
10114 default:
10118 error_at (loc, "data movement for component %qE is not compatible with "
10119 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10120 OMP_CLAUSE_DECL (first_outer));
10122 return false;
10125 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10126 clause dependencies we handle for now are struct element mappings and
10127 whole-struct mappings on the same directive, and duplicate clause
10128 detection. */
10130 void
10131 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10132 hash_map<tree_operand_hash_no_se,
10133 omp_mapping_group *> *grpmap)
10135 int i;
10136 omp_mapping_group *grp;
10137 hash_set<tree_operand_hash> *seen_components = NULL;
10138 hash_set<tree_operand_hash> *shown_error = NULL;
10140 FOR_EACH_VEC_ELT (*groups, i, grp)
10142 tree grp_end = grp->grp_end;
10143 tree decl = OMP_CLAUSE_DECL (grp_end);
10145 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10147 if (DECL_P (grp_end))
10148 continue;
10150 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10151 while (TREE_CODE (c) == ARRAY_REF)
10152 c = TREE_OPERAND (c, 0);
10153 if (TREE_CODE (c) != COMPONENT_REF)
10154 continue;
10155 if (!seen_components)
10156 seen_components = new hash_set<tree_operand_hash> ();
10157 if (!shown_error)
10158 shown_error = new hash_set<tree_operand_hash> ();
10159 if (seen_components->contains (c)
10160 && !shown_error->contains (c))
10162 error_at (OMP_CLAUSE_LOCATION (grp_end),
10163 "%qE appears more than once in map clauses",
10164 OMP_CLAUSE_DECL (grp_end));
10165 shown_error->add (c);
10167 else
10168 seen_components->add (c);
10170 omp_mapping_group *struct_group;
10171 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10172 && *grp->grp_start == grp_end)
10174 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10175 struct_group, grp);
10176 /* Remove the whole of this mapping -- redundant. */
10177 grp->deleted = true;
10181 if (seen_components)
10182 delete seen_components;
10183 if (shown_error)
10184 delete shown_error;
10187 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10188 is linked to the previous node pointed to by INSERT_AT. */
10190 static tree *
10191 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10193 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10194 *insert_at = newnode;
10195 return &OMP_CLAUSE_CHAIN (newnode);
10198 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10199 pointed to by chain MOVE_AFTER instead. */
10201 static void
10202 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10204 gcc_assert (node == *old_pos);
10205 *old_pos = OMP_CLAUSE_CHAIN (node);
10206 OMP_CLAUSE_CHAIN (node) = *move_after;
10207 *move_after = node;
10210 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10211 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10212 new nodes are prepended to the list before splicing into the new position.
10213 Return the position we should continue scanning the list at, or NULL to
10214 stay where we were. */
10216 static tree *
10217 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10218 tree *move_after)
10220 if (first_ptr == move_after)
10221 return NULL;
10223 tree tmp = *first_ptr;
10224 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10225 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10226 *move_after = tmp;
10228 return first_ptr;
10231 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10232 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10233 pointer MOVE_AFTER.
10235 The latter list was previously part of the OMP clause list, and the former
10236 (prepended) part is comprised of new nodes.
10238 We start with a list of nodes starting with a struct mapping node. We
10239 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10240 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10241 the group of mapping nodes we are currently processing (from the chain
10242 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10243 we should continue processing from, or NULL to stay where we were.
10245 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10246 different) is worked through below. Here we are processing LAST_NODE, and
10247 FIRST_PTR points at the preceding mapping clause:
10249 #. mapping node chain
10250 ---------------------------------------------------
10251 A. struct_node [->B]
10252 B. comp_1 [->C]
10253 C. comp_2 [->D (move_after)]
10254 D. map_to_3 [->E]
10255 E. attach_3 [->F (first_ptr)]
10256 F. map_to_4 [->G (continue_at)]
10257 G. attach_4 (last_node) [->H]
10258 H. ...
10260 *last_new_tail = *first_ptr;
10262 I. new_node (first_new) [->F (last_new_tail)]
10264 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10266 #. mapping node chain
10267 ----------------------------------------------------
10268 A. struct_node [->B]
10269 B. comp_1 [->C]
10270 C. comp_2 [->D (move_after)]
10271 D. map_to_3 [->E]
10272 E. attach_3 [->H (first_ptr)]
10273 F. map_to_4 [->G (continue_at)]
10274 G. attach_4 (last_node) [->H]
10275 H. ...
10277 I. new_node (first_new) [->F (last_new_tail)]
10279 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10281 #. mapping node chain
10282 ---------------------------------------------------
10283 A. struct_node [->B]
10284 B. comp_1 [->C]
10285 C. comp_2 [->D (move_after)]
10286 D. map_to_3 [->E]
10287 E. attach_3 [->H (continue_at)]
10288 F. map_to_4 [->G]
10289 G. attach_4 (last_node) [->D]
10290 H. ...
10292 I. new_node (first_new) [->F (last_new_tail)]
10294 *move_after = first_new;
10296 #. mapping node chain
10297 ---------------------------------------------------
10298 A. struct_node [->B]
10299 B. comp_1 [->C]
10300 C. comp_2 [->I (move_after)]
10301 D. map_to_3 [->E]
10302 E. attach_3 [->H (continue_at)]
10303 F. map_to_4 [->G]
10304 G. attach_4 (last_node) [->D]
10305 H. ...
10306 I. new_node (first_new) [->F (last_new_tail)]
10308 or, in order:
10310 #. mapping node chain
10311 ---------------------------------------------------
10312 A. struct_node [->B]
10313 B. comp_1 [->C]
10314 C. comp_2 [->I (move_after)]
10315 I. new_node (first_new) [->F (last_new_tail)]
10316 F. map_to_4 [->G]
10317 G. attach_4 (last_node) [->D]
10318 D. map_to_3 [->E]
10319 E. attach_3 [->H (continue_at)]
10320 H. ...
10323 static tree *
10324 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10325 tree *first_ptr, tree last_node,
10326 tree *move_after)
10328 tree *continue_at = NULL;
10329 *last_new_tail = *first_ptr;
10330 if (first_ptr == move_after)
10331 *move_after = first_new;
10332 else
10334 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10335 continue_at = first_ptr;
10336 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10337 *move_after = first_new;
10339 return continue_at;
10342 /* Mapping struct members causes an additional set of nodes to be created,
10343 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10344 number of members being mapped, in order of ascending position (address or
10345 bitwise).
10347 We scan through the list of mapping clauses, calling this function for each
10348 struct member mapping we find, and build up the list of mappings after the
10349 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10350 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10351 moved into place in the sorted list.
10353 struct {
10354 int *a;
10355 int *b;
10356 int c;
10357 int *d;
10360 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10361 struct.d[0:n])
10363 GOMP_MAP_STRUCT (4)
10364 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10365 GOMP_MAP_ALLOC (struct.a)
10366 GOMP_MAP_ALLOC (struct.b)
10367 GOMP_MAP_TO (struct.c)
10368 GOMP_MAP_ALLOC (struct.d)
10371 In the case where we are mapping references to pointers, or in Fortran if
10372 we are mapping an array with a descriptor, additional nodes may be created
10373 after the struct node list also.
10375 The return code is either a pointer to the next node to process (if the
10376 list has been rearranged), else NULL to continue with the next node in the
10377 original list. */
10379 static tree *
10380 omp_accumulate_sibling_list (enum omp_region_type region_type,
10381 enum tree_code code,
10382 hash_map<tree_operand_hash, tree>
10383 *&struct_map_to_clause, tree *grp_start_p,
10384 tree grp_end, tree *inner)
10386 poly_offset_int coffset;
10387 poly_int64 cbitpos;
10388 tree ocd = OMP_CLAUSE_DECL (grp_end);
10389 bool openmp = !(region_type & ORT_ACC);
10390 tree *continue_at = NULL;
10392 while (TREE_CODE (ocd) == ARRAY_REF)
10393 ocd = TREE_OPERAND (ocd, 0);
10395 if (INDIRECT_REF_P (ocd))
10396 ocd = TREE_OPERAND (ocd, 0);
10398 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10400 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10401 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10402 == GOMP_MAP_ATTACH_DETACH)
10403 || (OMP_CLAUSE_MAP_KIND (grp_end)
10404 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10405 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10406 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10408 /* FIXME: If we're not mapping the base pointer in some other clause on this
10409 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10410 early-exit. */
10411 if (openmp && attach_detach)
10412 return NULL;
10414 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10416 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10417 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10419 OMP_CLAUSE_SET_MAP_KIND (l, k);
10421 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10423 OMP_CLAUSE_SIZE (l)
10424 = (!attach ? size_int (1)
10425 : (DECL_P (OMP_CLAUSE_DECL (l))
10426 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10427 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10428 if (struct_map_to_clause == NULL)
10429 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10430 struct_map_to_clause->put (base, l);
10432 if (ptr || attach_detach)
10434 tree extra_node;
10435 tree alloc_node
10436 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10437 &extra_node);
10438 OMP_CLAUSE_CHAIN (l) = alloc_node;
10440 tree *insert_node_pos = grp_start_p;
10442 if (extra_node)
10444 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10445 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10447 else
10448 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10450 *insert_node_pos = l;
10452 else
10454 gcc_assert (*grp_start_p == grp_end);
10455 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10458 tree noind = omp_strip_indirections (base);
10460 if (!openmp
10461 && (region_type & ORT_TARGET)
10462 && TREE_CODE (noind) == COMPONENT_REF)
10464 /* The base for this component access is a struct component access
10465 itself. Insert a node to be processed on the next iteration of
10466 our caller's loop, which will subsequently be turned into a new,
10467 inner GOMP_MAP_STRUCT mapping.
10469 We need to do this else the non-DECL_P base won't be
10470 rewritten correctly in the offloaded region. */
10471 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10472 OMP_CLAUSE_MAP);
10473 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10474 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10475 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10476 *inner = c2;
10477 return NULL;
10480 tree sdecl = omp_strip_components_and_deref (base);
10482 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10484 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10485 OMP_CLAUSE_MAP);
10486 bool base_ref
10487 = (INDIRECT_REF_P (base)
10488 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10489 == REFERENCE_TYPE)
10490 || (INDIRECT_REF_P (TREE_OPERAND (base, 0))
10491 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10492 (TREE_OPERAND (base, 0), 0)))
10493 == REFERENCE_TYPE))));
10494 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10495 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10496 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10497 OMP_CLAUSE_DECL (c2) = sdecl;
10498 tree baddr = build_fold_addr_expr (base);
10499 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10500 ptrdiff_type_node, baddr);
10501 /* This isn't going to be good enough when we add support for more
10502 complicated lvalue expressions. FIXME. */
10503 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10504 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10505 sdecl = build_simple_mem_ref (sdecl);
10506 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10507 ptrdiff_type_node, sdecl);
10508 OMP_CLAUSE_SIZE (c2)
10509 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10510 ptrdiff_type_node, baddr, decladdr);
10511 /* Insert after struct node. */
10512 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10513 OMP_CLAUSE_CHAIN (l) = c2;
10516 return NULL;
10518 else if (struct_map_to_clause)
10520 tree *osc = struct_map_to_clause->get (base);
10521 tree *sc = NULL, *scp = NULL;
10522 sc = &OMP_CLAUSE_CHAIN (*osc);
10523 /* The struct mapping might be immediately followed by a
10524 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10525 indirect access or a reference, or both. (This added node is removed
10526 in omp-low.c after it has been processed there.) */
10527 if (*sc != grp_end
10528 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10529 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10530 sc = &OMP_CLAUSE_CHAIN (*sc);
10531 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10532 if ((ptr || attach_detach) && sc == grp_start_p)
10533 break;
10534 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10535 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10536 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10537 break;
10538 else
10540 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10541 poly_offset_int offset;
10542 poly_int64 bitpos;
10544 if (TREE_CODE (sc_decl) == ARRAY_REF)
10546 while (TREE_CODE (sc_decl) == ARRAY_REF)
10547 sc_decl = TREE_OPERAND (sc_decl, 0);
10548 if (TREE_CODE (sc_decl) != COMPONENT_REF
10549 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10550 break;
10552 else if (INDIRECT_REF_P (sc_decl)
10553 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10554 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10555 == REFERENCE_TYPE))
10556 sc_decl = TREE_OPERAND (sc_decl, 0);
10558 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10559 if (!base2 || !operand_equal_p (base2, base, 0))
10560 break;
10561 if (scp)
10562 continue;
10563 if (maybe_lt (coffset, offset)
10564 || (known_eq (coffset, offset)
10565 && maybe_lt (cbitpos, bitpos)))
10567 if (ptr || attach_detach)
10568 scp = sc;
10569 else
10570 break;
10574 if (!attach)
10575 OMP_CLAUSE_SIZE (*osc)
10576 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10577 if (ptr || attach_detach)
10579 tree cl = NULL_TREE, extra_node;
10580 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10581 grp_end, &extra_node);
10582 tree *tail_chain = NULL;
10584 /* Here, we have:
10586 grp_end : the last (or only) node in this group.
10587 grp_start_p : pointer to the first node in a pointer mapping group
10588 up to and including GRP_END.
10589 sc : pointer to the chain for the end of the struct component
10590 list.
10591 scp : pointer to the chain for the sorted position at which we
10592 should insert in the middle of the struct component list
10593 (else NULL to insert at end).
10594 alloc_node : the "alloc" node for the structure (pointer-type)
10595 component. We insert at SCP (if present), else SC
10596 (the end of the struct component list).
10597 extra_node : a newly-synthesized node for an additional indirect
10598 pointer mapping or a Fortran pointer set, if needed.
10599 cl : first node to prepend before grp_start_p.
10600 tail_chain : pointer to chain of last prepended node.
10602 The general idea is we move the nodes for this struct mapping
10603 together: the alloc node goes into the sorted list directly after
10604 the struct mapping, and any extra nodes (together with the nodes
10605 mapping arrays pointed to by struct components) get moved after
10606 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10607 the end of the struct component mapping list. It's important that
10608 the alloc_node comes first in that case because it's part of the
10609 sorted component mapping list (but subsequent nodes are not!). */
10611 if (scp)
10612 omp_siblist_insert_node_after (alloc_node, scp);
10614 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10615 already inserted it) and the extra_node (if it is present). The
10616 list can be empty if we added alloc_node above and there is no
10617 extra node. */
10618 if (scp && extra_node)
10620 cl = extra_node;
10621 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10623 else if (extra_node)
10625 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10626 cl = alloc_node;
10627 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10629 else if (!scp)
10631 cl = alloc_node;
10632 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10635 continue_at
10636 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10637 grp_start_p, grp_end,
10639 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10641 else if (*sc != grp_end)
10643 gcc_assert (*grp_start_p == grp_end);
10645 /* We are moving the current node back to a previous struct node:
10646 the node that used to point to the current node will now point to
10647 the next node. */
10648 continue_at = grp_start_p;
10649 /* In the non-pointer case, the mapping clause itself is moved into
10650 the correct position in the struct component list, which in this
10651 case is just SC. */
10652 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10655 return continue_at;
10658 /* Scan through GROUPS, and create sorted structure sibling lists without
10659 gimplifying. */
10661 static bool
10662 omp_build_struct_sibling_lists (enum tree_code code,
10663 enum omp_region_type region_type,
10664 vec<omp_mapping_group> *groups,
10665 hash_map<tree_operand_hash_no_se,
10666 omp_mapping_group *> **grpmap,
10667 tree *list_p)
10669 unsigned i;
10670 omp_mapping_group *grp;
10671 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10672 bool success = true;
10673 tree *new_next = NULL;
10674 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10675 auto_vec<omp_mapping_group> pre_hwm_groups;
10677 FOR_EACH_VEC_ELT (*groups, i, grp)
10679 tree c = grp->grp_end;
10680 tree decl = OMP_CLAUSE_DECL (c);
10681 tree grp_end = grp->grp_end;
10682 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10684 if (new_next)
10685 grp->grp_start = new_next;
10687 new_next = NULL;
10689 tree *grp_start_p = grp->grp_start;
10691 if (DECL_P (decl))
10692 continue;
10694 /* Skip groups we marked for deletion in
10695 oacc_resolve_clause_dependencies. */
10696 if (grp->deleted)
10697 continue;
10699 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10700 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10702 /* Don't process an array descriptor that isn't inside a derived type
10703 as a struct (the GOMP_MAP_POINTER following will have the form
10704 "var.data", but such mappings are handled specially). */
10705 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10706 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10707 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10708 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10709 continue;
10712 tree d = decl;
10713 if (TREE_CODE (d) == ARRAY_REF)
10715 while (TREE_CODE (d) == ARRAY_REF)
10716 d = TREE_OPERAND (d, 0);
10717 if (TREE_CODE (d) == COMPONENT_REF
10718 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10719 decl = d;
10721 if (d == decl
10722 && INDIRECT_REF_P (decl)
10723 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10724 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10725 == REFERENCE_TYPE)
10726 && (OMP_CLAUSE_MAP_KIND (c)
10727 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10728 decl = TREE_OPERAND (decl, 0);
10730 STRIP_NOPS (decl);
10732 if (TREE_CODE (decl) != COMPONENT_REF)
10733 continue;
10735 /* If we're mapping the whole struct in another node, skip adding this
10736 node to a sibling list. */
10737 omp_mapping_group *wholestruct;
10738 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10739 &wholestruct))
10741 if (!(region_type & ORT_ACC)
10742 && *grp_start_p == grp_end)
10743 /* Remove the whole of this mapping -- redundant. */
10744 grp->deleted = true;
10746 continue;
10749 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10750 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10751 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10752 && code != OACC_UPDATE
10753 && code != OMP_TARGET_UPDATE)
10755 if (error_operand_p (decl))
10757 success = false;
10758 goto error_out;
10761 tree stype = TREE_TYPE (decl);
10762 if (TREE_CODE (stype) == REFERENCE_TYPE)
10763 stype = TREE_TYPE (stype);
10764 if (TYPE_SIZE_UNIT (stype) == NULL
10765 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10767 error_at (OMP_CLAUSE_LOCATION (c),
10768 "mapping field %qE of variable length "
10769 "structure", OMP_CLAUSE_DECL (c));
10770 success = false;
10771 goto error_out;
10774 tree inner = NULL_TREE;
10776 new_next
10777 = omp_accumulate_sibling_list (region_type, code,
10778 struct_map_to_clause, grp_start_p,
10779 grp_end, &inner);
10781 if (inner)
10783 if (new_next && *new_next == NULL_TREE)
10784 *new_next = inner;
10785 else
10786 *tail = inner;
10788 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10789 omp_mapping_group newgrp;
10790 newgrp.grp_start = new_next ? new_next : tail;
10791 newgrp.grp_end = inner;
10792 newgrp.mark = UNVISITED;
10793 newgrp.sibling = NULL;
10794 newgrp.deleted = false;
10795 newgrp.next = NULL;
10796 groups->safe_push (newgrp);
10798 /* !!! Growing GROUPS might invalidate the pointers in the group
10799 map. Rebuild it here. This is a bit inefficient, but
10800 shouldn't happen very often. */
10801 delete (*grpmap);
10802 *grpmap
10803 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10804 sentinel);
10806 tail = &OMP_CLAUSE_CHAIN (inner);
10811 /* Delete groups marked for deletion above. At this point the order of the
10812 groups may no longer correspond to the order of the underlying list,
10813 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10814 deleted nodes... */
10816 FOR_EACH_VEC_ELT (*groups, i, grp)
10817 if (grp->deleted)
10818 for (tree d = *grp->grp_start;
10819 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10820 d = OMP_CLAUSE_CHAIN (d))
10821 OMP_CLAUSE_DECL (d) = NULL_TREE;
10823 /* ...then sweep through the list removing the now-empty nodes. */
10825 tail = list_p;
10826 while (*tail)
10828 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10829 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
10830 *tail = OMP_CLAUSE_CHAIN (*tail);
10831 else
10832 tail = &OMP_CLAUSE_CHAIN (*tail);
10835 error_out:
10836 if (struct_map_to_clause)
10837 delete struct_map_to_clause;
10839 return success;
10842 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10843 and previous omp contexts. */
10845 static void
10846 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
10847 enum omp_region_type region_type,
10848 enum tree_code code)
10850 struct gimplify_omp_ctx *ctx, *outer_ctx;
10851 tree c;
10852 tree *orig_list_p = list_p;
10853 int handled_depend_iterators = -1;
10854 int nowait = -1;
10856 ctx = new_omp_context (region_type);
10857 ctx->code = code;
10858 outer_ctx = ctx->outer_context;
10859 if (code == OMP_TARGET)
10861 if (!lang_GNU_Fortran ())
10862 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
10863 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
10864 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
10865 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10867 if (!lang_GNU_Fortran ())
10868 switch (code)
10870 case OMP_TARGET:
10871 case OMP_TARGET_DATA:
10872 case OMP_TARGET_ENTER_DATA:
10873 case OMP_TARGET_EXIT_DATA:
10874 case OACC_DECLARE:
10875 case OACC_HOST_DATA:
10876 case OACC_PARALLEL:
10877 case OACC_KERNELS:
10878 ctx->target_firstprivatize_array_bases = true;
10879 default:
10880 break;
10883 if (code == OMP_TARGET
10884 || code == OMP_TARGET_DATA
10885 || code == OMP_TARGET_ENTER_DATA
10886 || code == OMP_TARGET_EXIT_DATA)
10888 vec<omp_mapping_group> *groups;
10889 groups = omp_gather_mapping_groups (list_p);
10890 if (groups)
10892 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10893 grpmap = omp_index_mapping_groups (groups);
10895 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10896 list_p);
10898 omp_mapping_group *outlist = NULL;
10900 /* Topological sorting may fail if we have duplicate nodes, which
10901 we should have detected and shown an error for already. Skip
10902 sorting in that case. */
10903 if (seen_error ())
10904 goto failure;
10906 delete grpmap;
10907 delete groups;
10909 /* Rebuild now we have struct sibling lists. */
10910 groups = omp_gather_mapping_groups (list_p);
10911 grpmap = omp_index_mapping_groups (groups);
10913 outlist = omp_tsort_mapping_groups (groups, grpmap);
10914 outlist = omp_segregate_mapping_groups (outlist);
10915 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
10917 failure:
10918 delete grpmap;
10919 delete groups;
10922 /* OpenMP map clauses with 'present' need to go in front of those
10923 without. */
10924 tree present_map_head = NULL;
10925 tree *present_map_tail_p = &present_map_head;
10926 tree *first_map_clause_p = NULL;
10928 for (tree *c_p = list_p; *c_p; )
10930 tree c = *c_p;
10931 tree *next_c_p = &OMP_CLAUSE_CHAIN (c);
10933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
10935 if (!first_map_clause_p)
10936 first_map_clause_p = c_p;
10937 switch (OMP_CLAUSE_MAP_KIND (c))
10939 case GOMP_MAP_PRESENT_ALLOC:
10940 case GOMP_MAP_PRESENT_FROM:
10941 case GOMP_MAP_PRESENT_TO:
10942 case GOMP_MAP_PRESENT_TOFROM:
10943 next_c_p = c_p;
10944 *c_p = OMP_CLAUSE_CHAIN (c);
10946 OMP_CLAUSE_CHAIN (c) = NULL;
10947 *present_map_tail_p = c;
10948 present_map_tail_p = &OMP_CLAUSE_CHAIN (c);
10950 break;
10952 default:
10953 break;
10957 c_p = next_c_p;
10959 if (first_map_clause_p && present_map_head)
10961 tree next = *first_map_clause_p;
10962 *first_map_clause_p = present_map_head;
10963 *present_map_tail_p = next;
10966 else if (region_type & ORT_ACC)
10968 vec<omp_mapping_group> *groups;
10969 groups = omp_gather_mapping_groups (list_p);
10970 if (groups)
10972 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10973 grpmap = omp_index_mapping_groups (groups);
10975 oacc_resolve_clause_dependencies (groups, grpmap);
10976 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10977 list_p);
10979 delete groups;
10980 delete grpmap;
10984 while ((c = *list_p) != NULL)
10986 bool remove = false;
10987 bool notice_outer = true;
10988 const char *check_non_private = NULL;
10989 unsigned int flags;
10990 tree decl;
10992 switch (OMP_CLAUSE_CODE (c))
10994 case OMP_CLAUSE_PRIVATE:
10995 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
10996 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
10998 flags |= GOVD_PRIVATE_OUTER_REF;
10999 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
11001 else
11002 notice_outer = false;
11003 goto do_add;
11004 case OMP_CLAUSE_SHARED:
11005 flags = GOVD_SHARED | GOVD_EXPLICIT;
11006 goto do_add;
11007 case OMP_CLAUSE_FIRSTPRIVATE:
11008 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11009 check_non_private = "firstprivate";
11010 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11012 gcc_assert (code == OMP_TARGET);
11013 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
11015 goto do_add;
11016 case OMP_CLAUSE_LASTPRIVATE:
11017 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11018 switch (code)
11020 case OMP_DISTRIBUTE:
11021 error_at (OMP_CLAUSE_LOCATION (c),
11022 "conditional %<lastprivate%> clause on "
11023 "%qs construct", "distribute");
11024 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11025 break;
11026 case OMP_TASKLOOP:
11027 error_at (OMP_CLAUSE_LOCATION (c),
11028 "conditional %<lastprivate%> clause on "
11029 "%qs construct", "taskloop");
11030 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11031 break;
11032 default:
11033 break;
11035 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
11036 if (code != OMP_LOOP)
11037 check_non_private = "lastprivate";
11038 decl = OMP_CLAUSE_DECL (c);
11039 if (error_operand_p (decl))
11040 goto do_add;
11041 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
11042 && !lang_hooks.decls.omp_scalar_p (decl, true))
11044 error_at (OMP_CLAUSE_LOCATION (c),
11045 "non-scalar variable %qD in conditional "
11046 "%<lastprivate%> clause", decl);
11047 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11049 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11050 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
11051 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
11052 false);
11053 goto do_add;
11054 case OMP_CLAUSE_REDUCTION:
11055 if (OMP_CLAUSE_REDUCTION_TASK (c))
11057 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11059 if (nowait == -1)
11060 nowait = omp_find_clause (*list_p,
11061 OMP_CLAUSE_NOWAIT) != NULL_TREE;
11062 if (nowait
11063 && (outer_ctx == NULL
11064 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
11066 error_at (OMP_CLAUSE_LOCATION (c),
11067 "%<task%> reduction modifier on a construct "
11068 "with a %<nowait%> clause");
11069 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11072 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
11074 error_at (OMP_CLAUSE_LOCATION (c),
11075 "invalid %<task%> reduction modifier on construct "
11076 "other than %<parallel%>, %qs, %<sections%> or "
11077 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
11078 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11081 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11082 switch (code)
11084 case OMP_SECTIONS:
11085 error_at (OMP_CLAUSE_LOCATION (c),
11086 "%<inscan%> %<reduction%> clause on "
11087 "%qs construct", "sections");
11088 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11089 break;
11090 case OMP_PARALLEL:
11091 error_at (OMP_CLAUSE_LOCATION (c),
11092 "%<inscan%> %<reduction%> clause on "
11093 "%qs construct", "parallel");
11094 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11095 break;
11096 case OMP_TEAMS:
11097 error_at (OMP_CLAUSE_LOCATION (c),
11098 "%<inscan%> %<reduction%> clause on "
11099 "%qs construct", "teams");
11100 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11101 break;
11102 case OMP_TASKLOOP:
11103 error_at (OMP_CLAUSE_LOCATION (c),
11104 "%<inscan%> %<reduction%> clause on "
11105 "%qs construct", "taskloop");
11106 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11107 break;
11108 case OMP_SCOPE:
11109 error_at (OMP_CLAUSE_LOCATION (c),
11110 "%<inscan%> %<reduction%> clause on "
11111 "%qs construct", "scope");
11112 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11113 break;
11114 default:
11115 break;
11117 /* FALLTHRU */
11118 case OMP_CLAUSE_IN_REDUCTION:
11119 case OMP_CLAUSE_TASK_REDUCTION:
11120 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
11121 /* OpenACC permits reductions on private variables. */
11122 if (!(region_type & ORT_ACC)
11123 /* taskgroup is actually not a worksharing region. */
11124 && code != OMP_TASKGROUP)
11125 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
11126 decl = OMP_CLAUSE_DECL (c);
11127 if (TREE_CODE (decl) == MEM_REF)
11129 tree type = TREE_TYPE (decl);
11130 bool saved_into_ssa = gimplify_ctxp->into_ssa;
11131 gimplify_ctxp->into_ssa = false;
11132 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
11133 NULL, is_gimple_val, fb_rvalue, false)
11134 == GS_ERROR)
11136 gimplify_ctxp->into_ssa = saved_into_ssa;
11137 remove = true;
11138 break;
11140 gimplify_ctxp->into_ssa = saved_into_ssa;
11141 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11142 if (DECL_P (v))
11144 omp_firstprivatize_variable (ctx, v);
11145 omp_notice_variable (ctx, v, true);
11147 decl = TREE_OPERAND (decl, 0);
11148 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11150 gimplify_ctxp->into_ssa = false;
11151 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
11152 NULL, is_gimple_val, fb_rvalue, false)
11153 == GS_ERROR)
11155 gimplify_ctxp->into_ssa = saved_into_ssa;
11156 remove = true;
11157 break;
11159 gimplify_ctxp->into_ssa = saved_into_ssa;
11160 v = TREE_OPERAND (decl, 1);
11161 if (DECL_P (v))
11163 omp_firstprivatize_variable (ctx, v);
11164 omp_notice_variable (ctx, v, true);
11166 decl = TREE_OPERAND (decl, 0);
11168 if (TREE_CODE (decl) == ADDR_EXPR
11169 || TREE_CODE (decl) == INDIRECT_REF)
11170 decl = TREE_OPERAND (decl, 0);
11172 goto do_add_decl;
11173 case OMP_CLAUSE_LINEAR:
11174 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
11175 is_gimple_val, fb_rvalue) == GS_ERROR)
11177 remove = true;
11178 break;
11180 else
11182 if (code == OMP_SIMD
11183 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11185 struct gimplify_omp_ctx *octx = outer_ctx;
11186 if (octx
11187 && octx->region_type == ORT_WORKSHARE
11188 && octx->combined_loop
11189 && !octx->distribute)
11191 if (octx->outer_context
11192 && (octx->outer_context->region_type
11193 == ORT_COMBINED_PARALLEL))
11194 octx = octx->outer_context->outer_context;
11195 else
11196 octx = octx->outer_context;
11198 if (octx
11199 && octx->region_type == ORT_WORKSHARE
11200 && octx->combined_loop
11201 && octx->distribute)
11203 error_at (OMP_CLAUSE_LOCATION (c),
11204 "%<linear%> clause for variable other than "
11205 "loop iterator specified on construct "
11206 "combined with %<distribute%>");
11207 remove = true;
11208 break;
11211 /* For combined #pragma omp parallel for simd, need to put
11212 lastprivate and perhaps firstprivate too on the
11213 parallel. Similarly for #pragma omp for simd. */
11214 struct gimplify_omp_ctx *octx = outer_ctx;
11215 bool taskloop_seen = false;
11216 decl = NULL_TREE;
11219 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11220 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11221 break;
11222 decl = OMP_CLAUSE_DECL (c);
11223 if (error_operand_p (decl))
11225 decl = NULL_TREE;
11226 break;
11228 flags = GOVD_SEEN;
11229 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11230 flags |= GOVD_FIRSTPRIVATE;
11231 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11232 flags |= GOVD_LASTPRIVATE;
11233 if (octx
11234 && octx->region_type == ORT_WORKSHARE
11235 && octx->combined_loop)
11237 if (octx->outer_context
11238 && (octx->outer_context->region_type
11239 == ORT_COMBINED_PARALLEL))
11240 octx = octx->outer_context;
11241 else if (omp_check_private (octx, decl, false))
11242 break;
11244 else if (octx
11245 && (octx->region_type & ORT_TASK) != 0
11246 && octx->combined_loop)
11247 taskloop_seen = true;
11248 else if (octx
11249 && octx->region_type == ORT_COMBINED_PARALLEL
11250 && ((ctx->region_type == ORT_WORKSHARE
11251 && octx == outer_ctx)
11252 || taskloop_seen))
11253 flags = GOVD_SEEN | GOVD_SHARED;
11254 else if (octx
11255 && ((octx->region_type & ORT_COMBINED_TEAMS)
11256 == ORT_COMBINED_TEAMS))
11257 flags = GOVD_SEEN | GOVD_SHARED;
11258 else if (octx
11259 && octx->region_type == ORT_COMBINED_TARGET)
11261 if (flags & GOVD_LASTPRIVATE)
11262 flags = GOVD_SEEN | GOVD_MAP;
11264 else
11265 break;
11266 splay_tree_node on
11267 = splay_tree_lookup (octx->variables,
11268 (splay_tree_key) decl);
11269 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11271 octx = NULL;
11272 break;
11274 omp_add_variable (octx, decl, flags);
11275 if (octx->outer_context == NULL)
11276 break;
11277 octx = octx->outer_context;
11279 while (1);
11280 if (octx
11281 && decl
11282 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11283 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11284 omp_notice_variable (octx, decl, true);
11286 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11287 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11288 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11290 notice_outer = false;
11291 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11293 goto do_add;
11295 case OMP_CLAUSE_MAP:
11296 decl = OMP_CLAUSE_DECL (c);
11297 if (error_operand_p (decl))
11298 remove = true;
11299 switch (code)
11301 case OMP_TARGET:
11302 break;
11303 case OACC_DATA:
11304 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11305 break;
11306 /* FALLTHRU */
11307 case OMP_TARGET_DATA:
11308 case OMP_TARGET_ENTER_DATA:
11309 case OMP_TARGET_EXIT_DATA:
11310 case OACC_ENTER_DATA:
11311 case OACC_EXIT_DATA:
11312 case OACC_HOST_DATA:
11313 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11314 || (OMP_CLAUSE_MAP_KIND (c)
11315 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11316 /* For target {,enter ,exit }data only the array slice is
11317 mapped, but not the pointer to it. */
11318 remove = true;
11319 break;
11320 default:
11321 break;
11323 if (remove)
11324 break;
11325 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11327 struct gimplify_omp_ctx *octx;
11328 for (octx = outer_ctx; octx; octx = octx->outer_context)
11330 if (octx->region_type != ORT_ACC_HOST_DATA)
11331 break;
11332 splay_tree_node n2
11333 = splay_tree_lookup (octx->variables,
11334 (splay_tree_key) decl);
11335 if (n2)
11336 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11337 "declared in enclosing %<host_data%> region",
11338 DECL_NAME (decl));
11341 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11342 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11343 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11344 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11345 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11347 remove = true;
11348 break;
11350 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11351 || (OMP_CLAUSE_MAP_KIND (c)
11352 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11353 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11354 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11356 OMP_CLAUSE_SIZE (c)
11357 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11358 false);
11359 if ((region_type & ORT_TARGET) != 0)
11360 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11361 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11364 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11366 tree base = omp_strip_components_and_deref (decl);
11367 if (DECL_P (base))
11369 decl = base;
11370 splay_tree_node n
11371 = splay_tree_lookup (ctx->variables,
11372 (splay_tree_key) decl);
11373 if (seen_error ()
11374 && n
11375 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11377 remove = true;
11378 break;
11380 flags = GOVD_MAP | GOVD_EXPLICIT;
11382 goto do_add_decl;
11386 if (TREE_CODE (decl) == TARGET_EXPR)
11388 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11389 is_gimple_lvalue, fb_lvalue)
11390 == GS_ERROR)
11391 remove = true;
11393 else if (!DECL_P (decl))
11395 tree d = decl, *pd;
11396 if (TREE_CODE (d) == ARRAY_REF)
11398 while (TREE_CODE (d) == ARRAY_REF)
11399 d = TREE_OPERAND (d, 0);
11400 if (TREE_CODE (d) == COMPONENT_REF
11401 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11402 decl = d;
11404 pd = &OMP_CLAUSE_DECL (c);
11405 if (d == decl
11406 && TREE_CODE (decl) == INDIRECT_REF
11407 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11408 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11409 == REFERENCE_TYPE)
11410 && (OMP_CLAUSE_MAP_KIND (c)
11411 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11413 pd = &TREE_OPERAND (decl, 0);
11414 decl = TREE_OPERAND (decl, 0);
11416 /* An "attach/detach" operation on an update directive should
11417 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11418 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11419 depends on the previous mapping. */
11420 if (code == OACC_UPDATE
11421 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11422 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11424 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11426 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11427 == ARRAY_TYPE)
11428 remove = true;
11429 else
11431 gomp_map_kind k = ((code == OACC_EXIT_DATA
11432 || code == OMP_TARGET_EXIT_DATA)
11433 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11434 OMP_CLAUSE_SET_MAP_KIND (c, k);
11438 tree cref = decl;
11440 while (TREE_CODE (cref) == ARRAY_REF)
11441 cref = TREE_OPERAND (cref, 0);
11443 if (TREE_CODE (cref) == INDIRECT_REF)
11444 cref = TREE_OPERAND (cref, 0);
11446 if (TREE_CODE (cref) == COMPONENT_REF)
11448 tree base = cref;
11449 while (base && !DECL_P (base))
11451 tree innerbase = omp_get_base_pointer (base);
11452 if (!innerbase)
11453 break;
11454 base = innerbase;
11456 if (base
11457 && DECL_P (base)
11458 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11459 && POINTER_TYPE_P (TREE_TYPE (base)))
11461 splay_tree_node n
11462 = splay_tree_lookup (ctx->variables,
11463 (splay_tree_key) base);
11464 n->value |= GOVD_SEEN;
11468 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11470 /* Don't gimplify *pd fully at this point, as the base
11471 will need to be adjusted during omp lowering. */
11472 auto_vec<tree, 10> expr_stack;
11473 tree *p = pd;
11474 while (handled_component_p (*p)
11475 || TREE_CODE (*p) == INDIRECT_REF
11476 || TREE_CODE (*p) == ADDR_EXPR
11477 || TREE_CODE (*p) == MEM_REF
11478 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11480 expr_stack.safe_push (*p);
11481 p = &TREE_OPERAND (*p, 0);
11483 for (int i = expr_stack.length () - 1; i >= 0; i--)
11485 tree t = expr_stack[i];
11486 if (TREE_CODE (t) == ARRAY_REF
11487 || TREE_CODE (t) == ARRAY_RANGE_REF)
11489 if (TREE_OPERAND (t, 2) == NULL_TREE)
11491 tree low = unshare_expr (array_ref_low_bound (t));
11492 if (!is_gimple_min_invariant (low))
11494 TREE_OPERAND (t, 2) = low;
11495 if (gimplify_expr (&TREE_OPERAND (t, 2),
11496 pre_p, NULL,
11497 is_gimple_reg,
11498 fb_rvalue) == GS_ERROR)
11499 remove = true;
11502 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11503 NULL, is_gimple_reg,
11504 fb_rvalue) == GS_ERROR)
11505 remove = true;
11506 if (TREE_OPERAND (t, 3) == NULL_TREE)
11508 tree elmt_size = array_ref_element_size (t);
11509 if (!is_gimple_min_invariant (elmt_size))
11511 elmt_size = unshare_expr (elmt_size);
11512 tree elmt_type
11513 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11514 0)));
11515 tree factor
11516 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11517 elmt_size
11518 = size_binop (EXACT_DIV_EXPR, elmt_size,
11519 factor);
11520 TREE_OPERAND (t, 3) = elmt_size;
11521 if (gimplify_expr (&TREE_OPERAND (t, 3),
11522 pre_p, NULL,
11523 is_gimple_reg,
11524 fb_rvalue) == GS_ERROR)
11525 remove = true;
11528 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11529 NULL, is_gimple_reg,
11530 fb_rvalue) == GS_ERROR)
11531 remove = true;
11533 else if (TREE_CODE (t) == COMPONENT_REF)
11535 if (TREE_OPERAND (t, 2) == NULL_TREE)
11537 tree offset = component_ref_field_offset (t);
11538 if (!is_gimple_min_invariant (offset))
11540 offset = unshare_expr (offset);
11541 tree field = TREE_OPERAND (t, 1);
11542 tree factor
11543 = size_int (DECL_OFFSET_ALIGN (field)
11544 / BITS_PER_UNIT);
11545 offset = size_binop (EXACT_DIV_EXPR, offset,
11546 factor);
11547 TREE_OPERAND (t, 2) = offset;
11548 if (gimplify_expr (&TREE_OPERAND (t, 2),
11549 pre_p, NULL,
11550 is_gimple_reg,
11551 fb_rvalue) == GS_ERROR)
11552 remove = true;
11555 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11556 NULL, is_gimple_reg,
11557 fb_rvalue) == GS_ERROR)
11558 remove = true;
11561 for (; expr_stack.length () > 0; )
11563 tree t = expr_stack.pop ();
11565 if (TREE_CODE (t) == ARRAY_REF
11566 || TREE_CODE (t) == ARRAY_RANGE_REF)
11568 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11569 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11570 NULL, is_gimple_val,
11571 fb_rvalue) == GS_ERROR)
11572 remove = true;
11576 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11577 fb_lvalue) == GS_ERROR)
11579 remove = true;
11580 break;
11582 break;
11584 flags = GOVD_MAP | GOVD_EXPLICIT;
11585 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11586 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11587 flags |= GOVD_MAP_ALWAYS_TO;
11589 if ((code == OMP_TARGET
11590 || code == OMP_TARGET_DATA
11591 || code == OMP_TARGET_ENTER_DATA
11592 || code == OMP_TARGET_EXIT_DATA)
11593 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11595 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11596 octx = octx->outer_context)
11598 splay_tree_node n
11599 = splay_tree_lookup (octx->variables,
11600 (splay_tree_key) OMP_CLAUSE_DECL (c));
11601 /* If this is contained in an outer OpenMP region as a
11602 firstprivate value, remove the attach/detach. */
11603 if (n && (n->value & GOVD_FIRSTPRIVATE))
11605 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11606 goto do_add;
11610 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11611 ? GOMP_MAP_DETACH
11612 : GOMP_MAP_ATTACH);
11613 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11616 goto do_add;
11618 case OMP_CLAUSE_AFFINITY:
11619 gimplify_omp_affinity (list_p, pre_p);
11620 remove = true;
11621 break;
11622 case OMP_CLAUSE_DOACROSS:
11623 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11625 tree deps = OMP_CLAUSE_DECL (c);
11626 while (deps && TREE_CODE (deps) == TREE_LIST)
11628 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11629 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11630 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11631 pre_p, NULL, is_gimple_val, fb_rvalue);
11632 deps = TREE_CHAIN (deps);
11635 else
11636 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11637 == OMP_CLAUSE_DOACROSS_SOURCE);
11638 break;
11639 case OMP_CLAUSE_DEPEND:
11640 if (handled_depend_iterators == -1)
11641 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11642 if (handled_depend_iterators)
11644 if (handled_depend_iterators == 2)
11645 remove = true;
11646 break;
11648 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11650 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11651 NULL, is_gimple_val, fb_rvalue);
11652 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11654 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11656 remove = true;
11657 break;
11659 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11661 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11662 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11663 is_gimple_val, fb_rvalue) == GS_ERROR)
11665 remove = true;
11666 break;
11669 if (code == OMP_TASK)
11670 ctx->has_depend = true;
11671 break;
11673 case OMP_CLAUSE_TO:
11674 case OMP_CLAUSE_FROM:
11675 case OMP_CLAUSE__CACHE_:
11676 decl = OMP_CLAUSE_DECL (c);
11677 if (error_operand_p (decl))
11679 remove = true;
11680 break;
11682 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11683 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11684 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11685 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11686 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11688 remove = true;
11689 break;
11691 if (!DECL_P (decl))
11693 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11694 NULL, is_gimple_lvalue, fb_lvalue)
11695 == GS_ERROR)
11697 remove = true;
11698 break;
11700 break;
11702 goto do_notice;
11704 case OMP_CLAUSE_USE_DEVICE_PTR:
11705 case OMP_CLAUSE_USE_DEVICE_ADDR:
11706 flags = GOVD_EXPLICIT;
11707 goto do_add;
11709 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11710 decl = OMP_CLAUSE_DECL (c);
11711 while (TREE_CODE (decl) == INDIRECT_REF
11712 || TREE_CODE (decl) == ARRAY_REF)
11713 decl = TREE_OPERAND (decl, 0);
11714 flags = GOVD_EXPLICIT;
11715 goto do_add_decl;
11717 case OMP_CLAUSE_IS_DEVICE_PTR:
11718 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11719 goto do_add;
11721 do_add:
11722 decl = OMP_CLAUSE_DECL (c);
11723 do_add_decl:
11724 if (error_operand_p (decl))
11726 remove = true;
11727 break;
11729 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11731 tree t = omp_member_access_dummy_var (decl);
11732 if (t)
11734 tree v = DECL_VALUE_EXPR (decl);
11735 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11736 if (outer_ctx)
11737 omp_notice_variable (outer_ctx, t, true);
11740 if (code == OACC_DATA
11741 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11742 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11743 flags |= GOVD_MAP_0LEN_ARRAY;
11744 omp_add_variable (ctx, decl, flags);
11745 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11746 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11747 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11748 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11750 struct gimplify_omp_ctx *pctx
11751 = code == OMP_TARGET ? outer_ctx : ctx;
11752 if (pctx)
11753 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11754 GOVD_LOCAL | GOVD_SEEN);
11755 if (pctx
11756 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11757 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11758 find_decl_expr,
11759 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11760 NULL) == NULL_TREE)
11761 omp_add_variable (pctx,
11762 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11763 GOVD_LOCAL | GOVD_SEEN);
11764 gimplify_omp_ctxp = pctx;
11765 push_gimplify_context ();
11767 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11768 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11770 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11771 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11772 pop_gimplify_context
11773 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11774 push_gimplify_context ();
11775 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11776 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11777 pop_gimplify_context
11778 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11779 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11780 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11782 gimplify_omp_ctxp = outer_ctx;
11784 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11785 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11787 gimplify_omp_ctxp = ctx;
11788 push_gimplify_context ();
11789 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11791 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11792 NULL, NULL);
11793 TREE_SIDE_EFFECTS (bind) = 1;
11794 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11795 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11797 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11798 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11799 pop_gimplify_context
11800 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11801 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11803 gimplify_omp_ctxp = outer_ctx;
11805 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11806 && OMP_CLAUSE_LINEAR_STMT (c))
11808 gimplify_omp_ctxp = ctx;
11809 push_gimplify_context ();
11810 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11812 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11813 NULL, NULL);
11814 TREE_SIDE_EFFECTS (bind) = 1;
11815 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11816 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11818 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11819 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11820 pop_gimplify_context
11821 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11822 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11824 gimplify_omp_ctxp = outer_ctx;
11826 if (notice_outer)
11827 goto do_notice;
11828 break;
11830 case OMP_CLAUSE_COPYIN:
11831 case OMP_CLAUSE_COPYPRIVATE:
11832 decl = OMP_CLAUSE_DECL (c);
11833 if (error_operand_p (decl))
11835 remove = true;
11836 break;
11838 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
11839 && !remove
11840 && !omp_check_private (ctx, decl, true))
11842 remove = true;
11843 if (is_global_var (decl))
11845 if (DECL_THREAD_LOCAL_P (decl))
11846 remove = false;
11847 else if (DECL_HAS_VALUE_EXPR_P (decl))
11849 tree value = get_base_address (DECL_VALUE_EXPR (decl));
11851 if (value
11852 && DECL_P (value)
11853 && DECL_THREAD_LOCAL_P (value))
11854 remove = false;
11857 if (remove)
11858 error_at (OMP_CLAUSE_LOCATION (c),
11859 "copyprivate variable %qE is not threadprivate"
11860 " or private in outer context", DECL_NAME (decl));
11862 do_notice:
11863 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11864 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
11865 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11866 && outer_ctx
11867 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
11868 || (region_type == ORT_WORKSHARE
11869 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11870 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
11871 || code == OMP_LOOP)))
11872 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
11873 || (code == OMP_LOOP
11874 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11875 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
11876 == ORT_COMBINED_TEAMS))))
11878 splay_tree_node on
11879 = splay_tree_lookup (outer_ctx->variables,
11880 (splay_tree_key)decl);
11881 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
11883 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11884 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11885 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11886 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11887 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
11888 == POINTER_TYPE))))
11889 omp_firstprivatize_variable (outer_ctx, decl);
11890 else
11892 omp_add_variable (outer_ctx, decl,
11893 GOVD_SEEN | GOVD_SHARED);
11894 if (outer_ctx->outer_context)
11895 omp_notice_variable (outer_ctx->outer_context, decl,
11896 true);
11900 if (outer_ctx)
11901 omp_notice_variable (outer_ctx, decl, true);
11902 if (check_non_private
11903 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11904 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
11905 || decl == OMP_CLAUSE_DECL (c)
11906 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11907 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11908 == ADDR_EXPR
11909 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11910 == POINTER_PLUS_EXPR
11911 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11912 (OMP_CLAUSE_DECL (c), 0), 0))
11913 == ADDR_EXPR)))))
11914 && omp_check_private (ctx, decl, false))
11916 error ("%s variable %qE is private in outer context",
11917 check_non_private, DECL_NAME (decl));
11918 remove = true;
11920 break;
11922 case OMP_CLAUSE_DETACH:
11923 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
11924 goto do_add;
11926 case OMP_CLAUSE_IF:
11927 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
11928 && OMP_CLAUSE_IF_MODIFIER (c) != code)
11930 const char *p[2];
11931 for (int i = 0; i < 2; i++)
11932 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
11934 case VOID_CST: p[i] = "cancel"; break;
11935 case OMP_PARALLEL: p[i] = "parallel"; break;
11936 case OMP_SIMD: p[i] = "simd"; break;
11937 case OMP_TASK: p[i] = "task"; break;
11938 case OMP_TASKLOOP: p[i] = "taskloop"; break;
11939 case OMP_TARGET_DATA: p[i] = "target data"; break;
11940 case OMP_TARGET: p[i] = "target"; break;
11941 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
11942 case OMP_TARGET_ENTER_DATA:
11943 p[i] = "target enter data"; break;
11944 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
11945 default: gcc_unreachable ();
11947 error_at (OMP_CLAUSE_LOCATION (c),
11948 "expected %qs %<if%> clause modifier rather than %qs",
11949 p[0], p[1]);
11950 remove = true;
11952 /* Fall through. */
11954 case OMP_CLAUSE_FINAL:
11955 OMP_CLAUSE_OPERAND (c, 0)
11956 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
11957 /* Fall through. */
11959 case OMP_CLAUSE_NUM_TEAMS:
11960 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
11961 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11962 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11964 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11966 remove = true;
11967 break;
11969 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11970 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
11971 pre_p, NULL, true);
11973 /* Fall through. */
11975 case OMP_CLAUSE_SCHEDULE:
11976 case OMP_CLAUSE_NUM_THREADS:
11977 case OMP_CLAUSE_THREAD_LIMIT:
11978 case OMP_CLAUSE_DIST_SCHEDULE:
11979 case OMP_CLAUSE_DEVICE:
11980 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
11981 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
11983 if (code != OMP_TARGET)
11985 error_at (OMP_CLAUSE_LOCATION (c),
11986 "%<device%> clause with %<ancestor%> is only "
11987 "allowed on %<target%> construct");
11988 remove = true;
11989 break;
11992 tree clauses = *orig_list_p;
11993 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
11994 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
11995 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
11996 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
11997 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
11998 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
12001 error_at (OMP_CLAUSE_LOCATION (c),
12002 "with %<ancestor%>, only the %<device%>, "
12003 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
12004 "and %<map%> clauses may appear on the "
12005 "construct");
12006 remove = true;
12007 break;
12010 /* Fall through. */
12012 case OMP_CLAUSE_PRIORITY:
12013 case OMP_CLAUSE_GRAINSIZE:
12014 case OMP_CLAUSE_NUM_TASKS:
12015 case OMP_CLAUSE_FILTER:
12016 case OMP_CLAUSE_HINT:
12017 case OMP_CLAUSE_ASYNC:
12018 case OMP_CLAUSE_WAIT:
12019 case OMP_CLAUSE_NUM_GANGS:
12020 case OMP_CLAUSE_NUM_WORKERS:
12021 case OMP_CLAUSE_VECTOR_LENGTH:
12022 case OMP_CLAUSE_WORKER:
12023 case OMP_CLAUSE_VECTOR:
12024 if (OMP_CLAUSE_OPERAND (c, 0)
12025 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
12027 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
12029 remove = true;
12030 break;
12032 /* All these clauses care about value, not a particular decl,
12033 so try to force it into a SSA_NAME or fresh temporary. */
12034 OMP_CLAUSE_OPERAND (c, 0)
12035 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
12036 pre_p, NULL, true);
12038 break;
12040 case OMP_CLAUSE_GANG:
12041 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
12042 is_gimple_val, fb_rvalue) == GS_ERROR)
12043 remove = true;
12044 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
12045 is_gimple_val, fb_rvalue) == GS_ERROR)
12046 remove = true;
12047 break;
12049 case OMP_CLAUSE_NOWAIT:
12050 nowait = 1;
12051 break;
12053 case OMP_CLAUSE_ORDERED:
12054 case OMP_CLAUSE_UNTIED:
12055 case OMP_CLAUSE_COLLAPSE:
12056 case OMP_CLAUSE_TILE:
12057 case OMP_CLAUSE_AUTO:
12058 case OMP_CLAUSE_SEQ:
12059 case OMP_CLAUSE_INDEPENDENT:
12060 case OMP_CLAUSE_MERGEABLE:
12061 case OMP_CLAUSE_PROC_BIND:
12062 case OMP_CLAUSE_SAFELEN:
12063 case OMP_CLAUSE_SIMDLEN:
12064 case OMP_CLAUSE_NOGROUP:
12065 case OMP_CLAUSE_THREADS:
12066 case OMP_CLAUSE_SIMD:
12067 case OMP_CLAUSE_BIND:
12068 case OMP_CLAUSE_IF_PRESENT:
12069 case OMP_CLAUSE_FINALIZE:
12070 break;
12072 case OMP_CLAUSE_ORDER:
12073 ctx->order_concurrent = true;
12074 break;
12076 case OMP_CLAUSE_DEFAULTMAP:
12077 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
12078 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
12080 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
12081 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
12082 gdmkmin = GDMK_SCALAR;
12083 gdmkmax = GDMK_POINTER;
12084 break;
12085 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
12086 gdmkmin = GDMK_SCALAR;
12087 gdmkmax = GDMK_SCALAR_TARGET;
12088 break;
12089 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
12090 gdmkmin = gdmkmax = GDMK_AGGREGATE;
12091 break;
12092 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
12093 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
12094 break;
12095 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
12096 gdmkmin = gdmkmax = GDMK_POINTER;
12097 break;
12098 default:
12099 gcc_unreachable ();
12101 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
12102 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
12104 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
12105 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
12106 break;
12107 case OMP_CLAUSE_DEFAULTMAP_TO:
12108 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
12109 break;
12110 case OMP_CLAUSE_DEFAULTMAP_FROM:
12111 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
12112 break;
12113 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
12114 ctx->defaultmap[gdmk] = GOVD_MAP;
12115 break;
12116 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
12117 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12118 break;
12119 case OMP_CLAUSE_DEFAULTMAP_NONE:
12120 ctx->defaultmap[gdmk] = 0;
12121 break;
12122 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
12123 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
12124 break;
12125 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
12126 switch (gdmk)
12128 case GDMK_SCALAR:
12129 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12130 break;
12131 case GDMK_SCALAR_TARGET:
12132 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
12133 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
12134 break;
12135 case GDMK_AGGREGATE:
12136 case GDMK_ALLOCATABLE:
12137 ctx->defaultmap[gdmk] = GOVD_MAP;
12138 break;
12139 case GDMK_POINTER:
12140 ctx->defaultmap[gdmk] = GOVD_MAP;
12141 if (!lang_GNU_Fortran ())
12142 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
12143 break;
12144 default:
12145 gcc_unreachable ();
12147 break;
12148 default:
12149 gcc_unreachable ();
12151 break;
12153 case OMP_CLAUSE_ALIGNED:
12154 decl = OMP_CLAUSE_DECL (c);
12155 if (error_operand_p (decl))
12157 remove = true;
12158 break;
12160 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12161 is_gimple_val, fb_rvalue) == GS_ERROR)
12163 remove = true;
12164 break;
12166 if (!is_global_var (decl)
12167 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12168 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12169 break;
12171 case OMP_CLAUSE_NONTEMPORAL:
12172 decl = OMP_CLAUSE_DECL (c);
12173 if (error_operand_p (decl))
12175 remove = true;
12176 break;
12178 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12179 break;
12181 case OMP_CLAUSE_ALLOCATE:
12182 decl = OMP_CLAUSE_DECL (c);
12183 if (error_operand_p (decl))
12185 remove = true;
12186 break;
12188 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12189 is_gimple_val, fb_rvalue) == GS_ERROR)
12191 remove = true;
12192 break;
12194 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12195 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12196 == INTEGER_CST))
12198 else if (code == OMP_TASKLOOP
12199 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12200 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12201 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12202 pre_p, NULL, false);
12203 break;
12205 case OMP_CLAUSE_DEFAULT:
12206 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12207 break;
12209 case OMP_CLAUSE_INCLUSIVE:
12210 case OMP_CLAUSE_EXCLUSIVE:
12211 decl = OMP_CLAUSE_DECL (c);
12213 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12214 (splay_tree_key) decl);
12215 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12217 error_at (OMP_CLAUSE_LOCATION (c),
12218 "%qD specified in %qs clause but not in %<inscan%> "
12219 "%<reduction%> clause on the containing construct",
12220 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12221 remove = true;
12223 else
12225 n->value |= GOVD_REDUCTION_INSCAN;
12226 if (outer_ctx->region_type == ORT_SIMD
12227 && outer_ctx->outer_context
12228 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12230 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12231 (splay_tree_key) decl);
12232 if (n && (n->value & GOVD_REDUCTION) != 0)
12233 n->value |= GOVD_REDUCTION_INSCAN;
12237 break;
12239 case OMP_CLAUSE_NOHOST:
12240 default:
12241 gcc_unreachable ();
12244 if (code == OACC_DATA
12245 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12246 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12247 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12248 remove = true;
12249 if (remove)
12250 *list_p = OMP_CLAUSE_CHAIN (c);
12251 else
12252 list_p = &OMP_CLAUSE_CHAIN (c);
12255 ctx->clauses = *orig_list_p;
12256 gimplify_omp_ctxp = ctx;
12259 /* Return true if DECL is a candidate for shared to firstprivate
12260 optimization. We only consider non-addressable scalars, not
12261 too big, and not references. */
12263 static bool
12264 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12266 if (TREE_ADDRESSABLE (decl))
12267 return false;
12268 tree type = TREE_TYPE (decl);
12269 if (!is_gimple_reg_type (type)
12270 || TREE_CODE (type) == REFERENCE_TYPE
12271 || TREE_ADDRESSABLE (type))
12272 return false;
12273 /* Don't optimize too large decls, as each thread/task will have
12274 its own. */
12275 HOST_WIDE_INT len = int_size_in_bytes (type);
12276 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12277 return false;
12278 if (omp_privatize_by_reference (decl))
12279 return false;
12280 return true;
12283 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12284 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12285 GOVD_WRITTEN in outer contexts. */
12287 static void
12288 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12290 for (; ctx; ctx = ctx->outer_context)
12292 splay_tree_node n = splay_tree_lookup (ctx->variables,
12293 (splay_tree_key) decl);
12294 if (n == NULL)
12295 continue;
12296 else if (n->value & GOVD_SHARED)
12298 n->value |= GOVD_WRITTEN;
12299 return;
12301 else if (n->value & GOVD_DATA_SHARE_CLASS)
12302 return;
12306 /* Helper callback for walk_gimple_seq to discover possible stores
12307 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12308 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12309 for those. */
12311 static tree
12312 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12314 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12316 *walk_subtrees = 0;
12317 if (!wi->is_lhs)
12318 return NULL_TREE;
12320 tree op = *tp;
12323 if (handled_component_p (op))
12324 op = TREE_OPERAND (op, 0);
12325 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12326 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12327 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12328 else
12329 break;
12331 while (1);
12332 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12333 return NULL_TREE;
12335 omp_mark_stores (gimplify_omp_ctxp, op);
12336 return NULL_TREE;
12339 /* Helper callback for walk_gimple_seq to discover possible stores
12340 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12341 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12342 for those. */
12344 static tree
12345 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12346 bool *handled_ops_p,
12347 struct walk_stmt_info *wi)
12349 gimple *stmt = gsi_stmt (*gsi_p);
12350 switch (gimple_code (stmt))
12352 /* Don't recurse on OpenMP constructs for which
12353 gimplify_adjust_omp_clauses already handled the bodies,
12354 except handle gimple_omp_for_pre_body. */
12355 case GIMPLE_OMP_FOR:
12356 *handled_ops_p = true;
12357 if (gimple_omp_for_pre_body (stmt))
12358 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12359 omp_find_stores_stmt, omp_find_stores_op, wi);
12360 break;
12361 case GIMPLE_OMP_PARALLEL:
12362 case GIMPLE_OMP_TASK:
12363 case GIMPLE_OMP_SECTIONS:
12364 case GIMPLE_OMP_SINGLE:
12365 case GIMPLE_OMP_SCOPE:
12366 case GIMPLE_OMP_TARGET:
12367 case GIMPLE_OMP_TEAMS:
12368 case GIMPLE_OMP_CRITICAL:
12369 *handled_ops_p = true;
12370 break;
12371 default:
12372 break;
12374 return NULL_TREE;
12377 struct gimplify_adjust_omp_clauses_data
12379 tree *list_p;
12380 gimple_seq *pre_p;
12383 /* For all variables that were not actually used within the context,
12384 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12386 static int
12387 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12389 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12390 gimple_seq *pre_p
12391 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12392 tree decl = (tree) n->key;
12393 unsigned flags = n->value;
12394 enum omp_clause_code code;
12395 tree clause;
12396 bool private_debug;
12398 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12399 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12400 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12401 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12402 return 0;
12403 if ((flags & GOVD_SEEN) == 0)
12404 return 0;
12405 if (flags & GOVD_DEBUG_PRIVATE)
12407 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12408 private_debug = true;
12410 else if (flags & GOVD_MAP)
12411 private_debug = false;
12412 else
12413 private_debug
12414 = lang_hooks.decls.omp_private_debug_clause (decl,
12415 !!(flags & GOVD_SHARED));
12416 if (private_debug)
12417 code = OMP_CLAUSE_PRIVATE;
12418 else if (flags & GOVD_MAP)
12420 code = OMP_CLAUSE_MAP;
12421 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12422 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12424 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12425 return 0;
12427 if (VAR_P (decl)
12428 && DECL_IN_CONSTANT_POOL (decl)
12429 && !lookup_attribute ("omp declare target",
12430 DECL_ATTRIBUTES (decl)))
12432 tree id = get_identifier ("omp declare target");
12433 DECL_ATTRIBUTES (decl)
12434 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12435 varpool_node *node = varpool_node::get (decl);
12436 if (node)
12438 node->offloadable = 1;
12439 if (ENABLE_OFFLOADING)
12440 g->have_offload = true;
12444 else if (flags & GOVD_SHARED)
12446 if (is_global_var (decl))
12448 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12449 while (ctx != NULL)
12451 splay_tree_node on
12452 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12453 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12454 | GOVD_PRIVATE | GOVD_REDUCTION
12455 | GOVD_LINEAR | GOVD_MAP)) != 0)
12456 break;
12457 ctx = ctx->outer_context;
12459 if (ctx == NULL)
12460 return 0;
12462 code = OMP_CLAUSE_SHARED;
12463 /* Don't optimize shared into firstprivate for read-only vars
12464 on tasks with depend clause, we shouldn't try to copy them
12465 until the dependencies are satisfied. */
12466 if (gimplify_omp_ctxp->has_depend)
12467 flags |= GOVD_WRITTEN;
12469 else if (flags & GOVD_PRIVATE)
12470 code = OMP_CLAUSE_PRIVATE;
12471 else if (flags & GOVD_FIRSTPRIVATE)
12473 code = OMP_CLAUSE_FIRSTPRIVATE;
12474 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12475 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12476 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12478 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12479 "%<target%> construct", decl);
12480 return 0;
12483 else if (flags & GOVD_LASTPRIVATE)
12484 code = OMP_CLAUSE_LASTPRIVATE;
12485 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12486 return 0;
12487 else if (flags & GOVD_CONDTEMP)
12489 code = OMP_CLAUSE__CONDTEMP_;
12490 gimple_add_tmp_var (decl);
12492 else
12493 gcc_unreachable ();
12495 if (((flags & GOVD_LASTPRIVATE)
12496 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12497 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12498 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12500 tree chain = *list_p;
12501 clause = build_omp_clause (input_location, code);
12502 OMP_CLAUSE_DECL (clause) = decl;
12503 OMP_CLAUSE_CHAIN (clause) = chain;
12504 if (private_debug)
12505 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12506 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12507 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12508 else if (code == OMP_CLAUSE_SHARED
12509 && (flags & GOVD_WRITTEN) == 0
12510 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12511 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12512 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12513 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12514 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12516 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12517 OMP_CLAUSE_DECL (nc) = decl;
12518 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12519 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12520 OMP_CLAUSE_DECL (clause)
12521 = build_simple_mem_ref_loc (input_location, decl);
12522 OMP_CLAUSE_DECL (clause)
12523 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12524 build_int_cst (build_pointer_type (char_type_node), 0));
12525 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12526 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12527 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12528 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12529 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12530 OMP_CLAUSE_CHAIN (nc) = chain;
12531 OMP_CLAUSE_CHAIN (clause) = nc;
12532 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12533 gimplify_omp_ctxp = ctx->outer_context;
12534 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12535 pre_p, NULL, is_gimple_val, fb_rvalue);
12536 gimplify_omp_ctxp = ctx;
12538 else if (code == OMP_CLAUSE_MAP)
12540 int kind;
12541 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12542 switch (flags & (GOVD_MAP_TO_ONLY
12543 | GOVD_MAP_FORCE
12544 | GOVD_MAP_FORCE_PRESENT
12545 | GOVD_MAP_ALLOC_ONLY
12546 | GOVD_MAP_FROM_ONLY))
12548 case 0:
12549 kind = GOMP_MAP_TOFROM;
12550 break;
12551 case GOVD_MAP_FORCE:
12552 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12553 break;
12554 case GOVD_MAP_TO_ONLY:
12555 kind = GOMP_MAP_TO;
12556 break;
12557 case GOVD_MAP_FROM_ONLY:
12558 kind = GOMP_MAP_FROM;
12559 break;
12560 case GOVD_MAP_ALLOC_ONLY:
12561 kind = GOMP_MAP_ALLOC;
12562 break;
12563 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12564 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12565 break;
12566 case GOVD_MAP_FORCE_PRESENT:
12567 kind = GOMP_MAP_FORCE_PRESENT;
12568 break;
12569 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
12570 kind = GOMP_MAP_FORCE_PRESENT;
12571 break;
12572 default:
12573 gcc_unreachable ();
12575 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12576 /* Setting of the implicit flag for the runtime is currently disabled for
12577 OpenACC. */
12578 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12579 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12580 if (DECL_SIZE (decl)
12581 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12583 tree decl2 = DECL_VALUE_EXPR (decl);
12584 gcc_assert (INDIRECT_REF_P (decl2));
12585 decl2 = TREE_OPERAND (decl2, 0);
12586 gcc_assert (DECL_P (decl2));
12587 tree mem = build_simple_mem_ref (decl2);
12588 OMP_CLAUSE_DECL (clause) = mem;
12589 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12590 if (gimplify_omp_ctxp->outer_context)
12592 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12593 omp_notice_variable (ctx, decl2, true);
12594 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12596 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12597 OMP_CLAUSE_MAP);
12598 OMP_CLAUSE_DECL (nc) = decl;
12599 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12600 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12601 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12602 else
12603 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12604 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12605 OMP_CLAUSE_CHAIN (clause) = nc;
12607 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12608 && omp_privatize_by_reference (decl))
12610 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12611 OMP_CLAUSE_SIZE (clause)
12612 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12613 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12614 gimplify_omp_ctxp = ctx->outer_context;
12615 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12616 pre_p, NULL, is_gimple_val, fb_rvalue);
12617 gimplify_omp_ctxp = ctx;
12618 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12619 OMP_CLAUSE_MAP);
12620 OMP_CLAUSE_DECL (nc) = decl;
12621 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12622 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12623 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12624 OMP_CLAUSE_CHAIN (clause) = nc;
12626 else
12627 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12629 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12631 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12632 OMP_CLAUSE_DECL (nc) = decl;
12633 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12634 OMP_CLAUSE_CHAIN (nc) = chain;
12635 OMP_CLAUSE_CHAIN (clause) = nc;
12636 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12637 gimplify_omp_ctxp = ctx->outer_context;
12638 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12639 (ctx->region_type & ORT_ACC) != 0);
12640 gimplify_omp_ctxp = ctx;
12642 *list_p = clause;
12643 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12644 gimplify_omp_ctxp = ctx->outer_context;
12645 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12646 in simd. Those are only added for the local vars inside of simd body
12647 and they don't need to be e.g. default constructible. */
12648 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12649 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12650 (ctx->region_type & ORT_ACC) != 0);
12651 if (gimplify_omp_ctxp)
12652 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12653 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12654 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12655 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12656 true);
12657 gimplify_omp_ctxp = ctx;
12658 return 0;
12661 static void
12662 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12663 enum tree_code code)
12665 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12666 tree *orig_list_p = list_p;
12667 tree c, decl;
12668 bool has_inscan_reductions = false;
12670 if (body)
12672 struct gimplify_omp_ctx *octx;
12673 for (octx = ctx; octx; octx = octx->outer_context)
12674 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12675 break;
12676 if (octx)
12678 struct walk_stmt_info wi;
12679 memset (&wi, 0, sizeof (wi));
12680 walk_gimple_seq (body, omp_find_stores_stmt,
12681 omp_find_stores_op, &wi);
12685 if (ctx->add_safelen1)
12687 /* If there are VLAs in the body of simd loop, prevent
12688 vectorization. */
12689 gcc_assert (ctx->region_type == ORT_SIMD);
12690 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12691 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12692 OMP_CLAUSE_CHAIN (c) = *list_p;
12693 *list_p = c;
12694 list_p = &OMP_CLAUSE_CHAIN (c);
12697 if (ctx->region_type == ORT_WORKSHARE
12698 && ctx->outer_context
12699 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12701 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12702 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12703 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12705 decl = OMP_CLAUSE_DECL (c);
12706 splay_tree_node n
12707 = splay_tree_lookup (ctx->outer_context->variables,
12708 (splay_tree_key) decl);
12709 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12710 (splay_tree_key) decl));
12711 omp_add_variable (ctx, decl, n->value);
12712 tree c2 = copy_node (c);
12713 OMP_CLAUSE_CHAIN (c2) = *list_p;
12714 *list_p = c2;
12715 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12716 continue;
12717 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12718 OMP_CLAUSE_FIRSTPRIVATE);
12719 OMP_CLAUSE_DECL (c2) = decl;
12720 OMP_CLAUSE_CHAIN (c2) = *list_p;
12721 *list_p = c2;
12725 tree attach_list = NULL_TREE;
12726 tree *attach_tail = &attach_list;
12728 while ((c = *list_p) != NULL)
12730 splay_tree_node n;
12731 bool remove = false;
12732 bool move_attach = false;
12734 switch (OMP_CLAUSE_CODE (c))
12736 case OMP_CLAUSE_FIRSTPRIVATE:
12737 if ((ctx->region_type & ORT_TARGET)
12738 && (ctx->region_type & ORT_ACC) == 0
12739 && TYPE_ATOMIC (strip_array_types
12740 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12742 error_at (OMP_CLAUSE_LOCATION (c),
12743 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12744 "%<target%> construct", OMP_CLAUSE_DECL (c));
12745 remove = true;
12746 break;
12748 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12750 decl = OMP_CLAUSE_DECL (c);
12751 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12752 if ((n->value & GOVD_MAP) != 0)
12754 remove = true;
12755 break;
12757 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12758 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12760 /* FALLTHRU */
12761 case OMP_CLAUSE_PRIVATE:
12762 case OMP_CLAUSE_SHARED:
12763 case OMP_CLAUSE_LINEAR:
12764 decl = OMP_CLAUSE_DECL (c);
12765 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12766 remove = !(n->value & GOVD_SEEN);
12767 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12768 && code == OMP_PARALLEL
12769 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12770 remove = true;
12771 if (! remove)
12773 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12774 if ((n->value & GOVD_DEBUG_PRIVATE)
12775 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12777 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12778 || ((n->value & GOVD_DATA_SHARE_CLASS)
12779 == GOVD_SHARED));
12780 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12781 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12783 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12784 && ctx->has_depend
12785 && DECL_P (decl))
12786 n->value |= GOVD_WRITTEN;
12787 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12788 && (n->value & GOVD_WRITTEN) == 0
12789 && DECL_P (decl)
12790 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12791 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12792 else if (DECL_P (decl)
12793 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12794 && (n->value & GOVD_WRITTEN) != 0)
12795 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12796 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12797 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12798 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12800 else
12801 n->value &= ~GOVD_EXPLICIT;
12802 break;
12804 case OMP_CLAUSE_LASTPRIVATE:
12805 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12806 accurately reflect the presence of a FIRSTPRIVATE clause. */
12807 decl = OMP_CLAUSE_DECL (c);
12808 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12809 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12810 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12811 if (code == OMP_DISTRIBUTE
12812 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12814 remove = true;
12815 error_at (OMP_CLAUSE_LOCATION (c),
12816 "same variable used in %<firstprivate%> and "
12817 "%<lastprivate%> clauses on %<distribute%> "
12818 "construct");
12820 if (!remove
12821 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12822 && DECL_P (decl)
12823 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12824 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12825 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12826 remove = true;
12827 break;
12829 case OMP_CLAUSE_ALIGNED:
12830 decl = OMP_CLAUSE_DECL (c);
12831 if (!is_global_var (decl))
12833 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12834 remove = n == NULL || !(n->value & GOVD_SEEN);
12835 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12837 struct gimplify_omp_ctx *octx;
12838 if (n != NULL
12839 && (n->value & (GOVD_DATA_SHARE_CLASS
12840 & ~GOVD_FIRSTPRIVATE)))
12841 remove = true;
12842 else
12843 for (octx = ctx->outer_context; octx;
12844 octx = octx->outer_context)
12846 n = splay_tree_lookup (octx->variables,
12847 (splay_tree_key) decl);
12848 if (n == NULL)
12849 continue;
12850 if (n->value & GOVD_LOCAL)
12851 break;
12852 /* We have to avoid assigning a shared variable
12853 to itself when trying to add
12854 __builtin_assume_aligned. */
12855 if (n->value & GOVD_SHARED)
12857 remove = true;
12858 break;
12863 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
12865 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12866 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12867 remove = true;
12869 break;
12871 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12872 decl = OMP_CLAUSE_DECL (c);
12873 while (INDIRECT_REF_P (decl)
12874 || TREE_CODE (decl) == ARRAY_REF)
12875 decl = TREE_OPERAND (decl, 0);
12876 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12877 remove = n == NULL || !(n->value & GOVD_SEEN);
12878 break;
12880 case OMP_CLAUSE_IS_DEVICE_PTR:
12881 case OMP_CLAUSE_NONTEMPORAL:
12882 decl = OMP_CLAUSE_DECL (c);
12883 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12884 remove = n == NULL || !(n->value & GOVD_SEEN);
12885 break;
12887 case OMP_CLAUSE_MAP:
12888 switch (OMP_CLAUSE_MAP_KIND (c))
12890 case GOMP_MAP_PRESENT_ALLOC:
12891 case GOMP_MAP_PRESENT_TO:
12892 case GOMP_MAP_PRESENT_FROM:
12893 case GOMP_MAP_PRESENT_TOFROM:
12894 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
12895 break;
12896 default:
12897 break;
12899 if (code == OMP_TARGET_EXIT_DATA
12900 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
12902 remove = true;
12903 break;
12905 /* If we have a target region, we can push all the attaches to the
12906 end of the list (we may have standalone "attach" operations
12907 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12908 the attachment point AND the pointed-to block have been mapped).
12909 If we have something else, e.g. "enter data", we need to keep
12910 "attach" nodes together with the previous node they attach to so
12911 that separate "exit data" operations work properly (see
12912 libgomp/target.c). */
12913 if ((ctx->region_type & ORT_TARGET) != 0
12914 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12915 || (OMP_CLAUSE_MAP_KIND (c)
12916 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
12917 move_attach = true;
12918 decl = OMP_CLAUSE_DECL (c);
12919 /* Data clauses associated with reductions must be
12920 compatible with present_or_copy. Warn and adjust the clause
12921 if that is not the case. */
12922 if (ctx->region_type == ORT_ACC_PARALLEL
12923 || ctx->region_type == ORT_ACC_SERIAL)
12925 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
12926 n = NULL;
12928 if (DECL_P (t))
12929 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
12931 if (n && (n->value & GOVD_REDUCTION))
12933 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
12935 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
12936 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
12937 && kind != GOMP_MAP_FORCE_PRESENT
12938 && kind != GOMP_MAP_POINTER)
12940 warning_at (OMP_CLAUSE_LOCATION (c), 0,
12941 "incompatible data clause with reduction "
12942 "on %qE; promoting to %<present_or_copy%>",
12943 DECL_NAME (t));
12944 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
12948 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
12949 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
12951 remove = true;
12952 break;
12954 if (!DECL_P (decl))
12956 if ((ctx->region_type & ORT_TARGET) != 0
12957 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12959 if (INDIRECT_REF_P (decl)
12960 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12961 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12962 == REFERENCE_TYPE))
12963 decl = TREE_OPERAND (decl, 0);
12964 if (TREE_CODE (decl) == COMPONENT_REF)
12966 while (TREE_CODE (decl) == COMPONENT_REF)
12967 decl = TREE_OPERAND (decl, 0);
12968 if (DECL_P (decl))
12970 n = splay_tree_lookup (ctx->variables,
12971 (splay_tree_key) decl);
12972 if (!(n->value & GOVD_SEEN))
12973 remove = true;
12977 break;
12979 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12980 if ((ctx->region_type & ORT_TARGET) != 0
12981 && !(n->value & GOVD_SEEN)
12982 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
12983 && (!is_global_var (decl)
12984 || !lookup_attribute ("omp declare target link",
12985 DECL_ATTRIBUTES (decl))))
12987 remove = true;
12988 /* For struct element mapping, if struct is never referenced
12989 in target block and none of the mapping has always modifier,
12990 remove all the struct element mappings, which immediately
12991 follow the GOMP_MAP_STRUCT map clause. */
12992 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
12994 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
12995 while (cnt--)
12996 OMP_CLAUSE_CHAIN (c)
12997 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
13000 else if (DECL_SIZE (decl)
13001 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
13002 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
13003 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
13004 && (OMP_CLAUSE_MAP_KIND (c)
13005 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
13007 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
13008 for these, TREE_CODE (DECL_SIZE (decl)) will always be
13009 INTEGER_CST. */
13010 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
13012 tree decl2 = DECL_VALUE_EXPR (decl);
13013 gcc_assert (INDIRECT_REF_P (decl2));
13014 decl2 = TREE_OPERAND (decl2, 0);
13015 gcc_assert (DECL_P (decl2));
13016 tree mem = build_simple_mem_ref (decl2);
13017 OMP_CLAUSE_DECL (c) = mem;
13018 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13019 if (ctx->outer_context)
13021 omp_notice_variable (ctx->outer_context, decl2, true);
13022 omp_notice_variable (ctx->outer_context,
13023 OMP_CLAUSE_SIZE (c), true);
13025 if (((ctx->region_type & ORT_TARGET) != 0
13026 || !ctx->target_firstprivatize_array_bases)
13027 && ((n->value & GOVD_SEEN) == 0
13028 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
13030 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13031 OMP_CLAUSE_MAP);
13032 OMP_CLAUSE_DECL (nc) = decl;
13033 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13034 if (ctx->target_firstprivatize_array_bases)
13035 OMP_CLAUSE_SET_MAP_KIND (nc,
13036 GOMP_MAP_FIRSTPRIVATE_POINTER);
13037 else
13038 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13039 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
13040 OMP_CLAUSE_CHAIN (c) = nc;
13041 c = nc;
13044 else
13046 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13047 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
13048 gcc_assert ((n->value & GOVD_SEEN) == 0
13049 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13050 == 0));
13052 break;
13054 case OMP_CLAUSE_TO:
13055 case OMP_CLAUSE_FROM:
13056 case OMP_CLAUSE__CACHE_:
13057 decl = OMP_CLAUSE_DECL (c);
13058 if (!DECL_P (decl))
13059 break;
13060 if (DECL_SIZE (decl)
13061 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
13063 tree decl2 = DECL_VALUE_EXPR (decl);
13064 gcc_assert (INDIRECT_REF_P (decl2));
13065 decl2 = TREE_OPERAND (decl2, 0);
13066 gcc_assert (DECL_P (decl2));
13067 tree mem = build_simple_mem_ref (decl2);
13068 OMP_CLAUSE_DECL (c) = mem;
13069 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13070 if (ctx->outer_context)
13072 omp_notice_variable (ctx->outer_context, decl2, true);
13073 omp_notice_variable (ctx->outer_context,
13074 OMP_CLAUSE_SIZE (c), true);
13077 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13078 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
13079 break;
13081 case OMP_CLAUSE_REDUCTION:
13082 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
13084 decl = OMP_CLAUSE_DECL (c);
13085 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13086 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
13088 remove = true;
13089 error_at (OMP_CLAUSE_LOCATION (c),
13090 "%qD specified in %<inscan%> %<reduction%> clause "
13091 "but not in %<scan%> directive clause", decl);
13092 break;
13094 has_inscan_reductions = true;
13096 /* FALLTHRU */
13097 case OMP_CLAUSE_IN_REDUCTION:
13098 case OMP_CLAUSE_TASK_REDUCTION:
13099 decl = OMP_CLAUSE_DECL (c);
13100 /* OpenACC reductions need a present_or_copy data clause.
13101 Add one if necessary. Emit error when the reduction is private. */
13102 if (ctx->region_type == ORT_ACC_PARALLEL
13103 || ctx->region_type == ORT_ACC_SERIAL)
13105 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13106 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13108 remove = true;
13109 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
13110 "reduction on %qE", DECL_NAME (decl));
13112 else if ((n->value & GOVD_MAP) == 0)
13114 tree next = OMP_CLAUSE_CHAIN (c);
13115 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
13116 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
13117 OMP_CLAUSE_DECL (nc) = decl;
13118 OMP_CLAUSE_CHAIN (c) = nc;
13119 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13120 (ctx->region_type
13121 & ORT_ACC) != 0);
13122 while (1)
13124 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
13125 if (OMP_CLAUSE_CHAIN (nc) == NULL)
13126 break;
13127 nc = OMP_CLAUSE_CHAIN (nc);
13129 OMP_CLAUSE_CHAIN (nc) = next;
13130 n->value |= GOVD_MAP;
13133 if (DECL_P (decl)
13134 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13135 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13136 break;
13138 case OMP_CLAUSE_ALLOCATE:
13139 decl = OMP_CLAUSE_DECL (c);
13140 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13141 if (n != NULL && !(n->value & GOVD_SEEN))
13143 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
13144 != 0
13145 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
13146 remove = true;
13148 if (!remove
13149 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13150 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
13151 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
13152 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
13153 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
13155 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13156 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
13157 if (n == NULL)
13159 enum omp_clause_default_kind default_kind
13160 = ctx->default_kind;
13161 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
13162 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13163 true);
13164 ctx->default_kind = default_kind;
13166 else
13167 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13168 true);
13170 break;
13172 case OMP_CLAUSE_COPYIN:
13173 case OMP_CLAUSE_COPYPRIVATE:
13174 case OMP_CLAUSE_IF:
13175 case OMP_CLAUSE_NUM_THREADS:
13176 case OMP_CLAUSE_NUM_TEAMS:
13177 case OMP_CLAUSE_THREAD_LIMIT:
13178 case OMP_CLAUSE_DIST_SCHEDULE:
13179 case OMP_CLAUSE_DEVICE:
13180 case OMP_CLAUSE_SCHEDULE:
13181 case OMP_CLAUSE_NOWAIT:
13182 case OMP_CLAUSE_ORDERED:
13183 case OMP_CLAUSE_DEFAULT:
13184 case OMP_CLAUSE_UNTIED:
13185 case OMP_CLAUSE_COLLAPSE:
13186 case OMP_CLAUSE_FINAL:
13187 case OMP_CLAUSE_MERGEABLE:
13188 case OMP_CLAUSE_PROC_BIND:
13189 case OMP_CLAUSE_SAFELEN:
13190 case OMP_CLAUSE_SIMDLEN:
13191 case OMP_CLAUSE_DEPEND:
13192 case OMP_CLAUSE_DOACROSS:
13193 case OMP_CLAUSE_PRIORITY:
13194 case OMP_CLAUSE_GRAINSIZE:
13195 case OMP_CLAUSE_NUM_TASKS:
13196 case OMP_CLAUSE_NOGROUP:
13197 case OMP_CLAUSE_THREADS:
13198 case OMP_CLAUSE_SIMD:
13199 case OMP_CLAUSE_FILTER:
13200 case OMP_CLAUSE_HINT:
13201 case OMP_CLAUSE_DEFAULTMAP:
13202 case OMP_CLAUSE_ORDER:
13203 case OMP_CLAUSE_BIND:
13204 case OMP_CLAUSE_DETACH:
13205 case OMP_CLAUSE_USE_DEVICE_PTR:
13206 case OMP_CLAUSE_USE_DEVICE_ADDR:
13207 case OMP_CLAUSE_ASYNC:
13208 case OMP_CLAUSE_WAIT:
13209 case OMP_CLAUSE_INDEPENDENT:
13210 case OMP_CLAUSE_NUM_GANGS:
13211 case OMP_CLAUSE_NUM_WORKERS:
13212 case OMP_CLAUSE_VECTOR_LENGTH:
13213 case OMP_CLAUSE_GANG:
13214 case OMP_CLAUSE_WORKER:
13215 case OMP_CLAUSE_VECTOR:
13216 case OMP_CLAUSE_AUTO:
13217 case OMP_CLAUSE_SEQ:
13218 case OMP_CLAUSE_TILE:
13219 case OMP_CLAUSE_IF_PRESENT:
13220 case OMP_CLAUSE_FINALIZE:
13221 case OMP_CLAUSE_INCLUSIVE:
13222 case OMP_CLAUSE_EXCLUSIVE:
13223 break;
13225 case OMP_CLAUSE_NOHOST:
13226 default:
13227 gcc_unreachable ();
13230 if (remove)
13231 *list_p = OMP_CLAUSE_CHAIN (c);
13232 else if (move_attach)
13234 /* Remove attach node from here, separate out into its own list. */
13235 *attach_tail = c;
13236 *list_p = OMP_CLAUSE_CHAIN (c);
13237 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13238 attach_tail = &OMP_CLAUSE_CHAIN (c);
13240 else
13241 list_p = &OMP_CLAUSE_CHAIN (c);
13244 /* Splice attach nodes at the end of the list. */
13245 if (attach_list)
13247 *list_p = attach_list;
13248 list_p = attach_tail;
13251 /* Add in any implicit data sharing. */
13252 struct gimplify_adjust_omp_clauses_data data;
13253 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13255 /* OpenMP. Implicit clauses are added at the start of the clause list,
13256 but after any non-map clauses. */
13257 tree *implicit_add_list_p = orig_list_p;
13258 while (*implicit_add_list_p
13259 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13260 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13261 data.list_p = implicit_add_list_p;
13263 else
13264 /* OpenACC. */
13265 data.list_p = list_p;
13266 data.pre_p = pre_p;
13267 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13269 if (has_inscan_reductions)
13270 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13271 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13272 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13274 error_at (OMP_CLAUSE_LOCATION (c),
13275 "%<inscan%> %<reduction%> clause used together with "
13276 "%<linear%> clause for a variable other than loop "
13277 "iterator");
13278 break;
13281 gimplify_omp_ctxp = ctx->outer_context;
13282 delete_omp_context (ctx);
13285 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13286 -1 if unknown yet (simd is involved, won't be known until vectorization)
13287 and 1 if they do. If SCORES is non-NULL, it should point to an array
13288 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13289 of the CONSTRUCTS (position -1 if it will never match) followed by
13290 number of constructs in the OpenMP context construct trait. If the
13291 score depends on whether it will be in a declare simd clone or not,
13292 the function returns 2 and there will be two sets of the scores, the first
13293 one for the case that it is not in a declare simd clone, the other
13294 that it is in a declare simd clone. */
13297 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13298 int *scores)
13300 int matched = 0, cnt = 0;
13301 bool simd_seen = false;
13302 bool target_seen = false;
13303 int declare_simd_cnt = -1;
13304 auto_vec<enum tree_code, 16> codes;
13305 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13307 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13308 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13309 == ORT_TARGET && ctx->code == OMP_TARGET)
13310 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13311 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13312 || (ctx->region_type == ORT_SIMD
13313 && ctx->code == OMP_SIMD
13314 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13316 ++cnt;
13317 if (scores)
13318 codes.safe_push (ctx->code);
13319 else if (matched < nconstructs && ctx->code == constructs[matched])
13321 if (ctx->code == OMP_SIMD)
13323 if (matched)
13324 return 0;
13325 simd_seen = true;
13327 ++matched;
13329 if (ctx->code == OMP_TARGET)
13331 if (scores == NULL)
13332 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13333 target_seen = true;
13334 break;
13337 else if (ctx->region_type == ORT_WORKSHARE
13338 && ctx->code == OMP_LOOP
13339 && ctx->outer_context
13340 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13341 && ctx->outer_context->outer_context
13342 && ctx->outer_context->outer_context->code == OMP_LOOP
13343 && ctx->outer_context->outer_context->distribute)
13344 ctx = ctx->outer_context->outer_context;
13345 ctx = ctx->outer_context;
13347 if (!target_seen
13348 && lookup_attribute ("omp declare simd",
13349 DECL_ATTRIBUTES (current_function_decl)))
13351 /* Declare simd is a maybe case, it is supposed to be added only to the
13352 omp-simd-clone.cc added clones and not to the base function. */
13353 declare_simd_cnt = cnt++;
13354 if (scores)
13355 codes.safe_push (OMP_SIMD);
13356 else if (cnt == 0
13357 && constructs[0] == OMP_SIMD)
13359 gcc_assert (matched == 0);
13360 simd_seen = true;
13361 if (++matched == nconstructs)
13362 return -1;
13365 if (tree attr = lookup_attribute ("omp declare variant variant",
13366 DECL_ATTRIBUTES (current_function_decl)))
13368 enum tree_code variant_constructs[5];
13369 int variant_nconstructs = 0;
13370 if (!target_seen)
13371 variant_nconstructs
13372 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13373 variant_constructs);
13374 for (int i = 0; i < variant_nconstructs; i++)
13376 ++cnt;
13377 if (scores)
13378 codes.safe_push (variant_constructs[i]);
13379 else if (matched < nconstructs
13380 && variant_constructs[i] == constructs[matched])
13382 if (variant_constructs[i] == OMP_SIMD)
13384 if (matched)
13385 return 0;
13386 simd_seen = true;
13388 ++matched;
13392 if (!target_seen
13393 && lookup_attribute ("omp declare target block",
13394 DECL_ATTRIBUTES (current_function_decl)))
13396 if (scores)
13397 codes.safe_push (OMP_TARGET);
13398 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13399 ++matched;
13401 if (scores)
13403 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13405 int j = codes.length () - 1;
13406 for (int i = nconstructs - 1; i >= 0; i--)
13408 while (j >= 0
13409 && (pass != 0 || declare_simd_cnt != j)
13410 && constructs[i] != codes[j])
13411 --j;
13412 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13413 *scores++ = j - 1;
13414 else
13415 *scores++ = j;
13417 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13418 ? codes.length () - 1 : codes.length ());
13420 return declare_simd_cnt == -1 ? 1 : 2;
13422 if (matched == nconstructs)
13423 return simd_seen ? -1 : 1;
13424 return 0;
13427 /* Gimplify OACC_CACHE. */
13429 static void
13430 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13432 tree expr = *expr_p;
13434 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13435 OACC_CACHE);
13436 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13437 OACC_CACHE);
13439 /* TODO: Do something sensible with this information. */
13441 *expr_p = NULL_TREE;
13444 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13445 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13446 kind. The entry kind will replace the one in CLAUSE, while the exit
13447 kind will be used in a new omp_clause and returned to the caller. */
13449 static tree
13450 gimplify_oacc_declare_1 (tree clause)
13452 HOST_WIDE_INT kind, new_op;
13453 bool ret = false;
13454 tree c = NULL;
13456 kind = OMP_CLAUSE_MAP_KIND (clause);
13458 switch (kind)
13460 case GOMP_MAP_ALLOC:
13461 new_op = GOMP_MAP_RELEASE;
13462 ret = true;
13463 break;
13465 case GOMP_MAP_FROM:
13466 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13467 new_op = GOMP_MAP_FROM;
13468 ret = true;
13469 break;
13471 case GOMP_MAP_TOFROM:
13472 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13473 new_op = GOMP_MAP_FROM;
13474 ret = true;
13475 break;
13477 case GOMP_MAP_DEVICE_RESIDENT:
13478 case GOMP_MAP_FORCE_DEVICEPTR:
13479 case GOMP_MAP_FORCE_PRESENT:
13480 case GOMP_MAP_LINK:
13481 case GOMP_MAP_POINTER:
13482 case GOMP_MAP_TO:
13483 break;
13485 default:
13486 gcc_unreachable ();
13487 break;
13490 if (ret)
13492 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13493 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13494 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13497 return c;
13500 /* Gimplify OACC_DECLARE. */
13502 static void
13503 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13505 tree expr = *expr_p;
13506 gomp_target *stmt;
13507 tree clauses, t, decl;
13509 clauses = OACC_DECLARE_CLAUSES (expr);
13511 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13512 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13514 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13516 decl = OMP_CLAUSE_DECL (t);
13518 if (TREE_CODE (decl) == MEM_REF)
13519 decl = TREE_OPERAND (decl, 0);
13521 if (VAR_P (decl) && !is_oacc_declared (decl))
13523 tree attr = get_identifier ("oacc declare target");
13524 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13525 DECL_ATTRIBUTES (decl));
13528 if (VAR_P (decl)
13529 && !is_global_var (decl)
13530 && DECL_CONTEXT (decl) == current_function_decl)
13532 tree c = gimplify_oacc_declare_1 (t);
13533 if (c)
13535 if (oacc_declare_returns == NULL)
13536 oacc_declare_returns = new hash_map<tree, tree>;
13538 oacc_declare_returns->put (decl, c);
13542 if (gimplify_omp_ctxp)
13543 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13546 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13547 clauses);
13549 gimplify_seq_add_stmt (pre_p, stmt);
13551 *expr_p = NULL_TREE;
13554 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13555 gimplification of the body, as well as scanning the body for used
13556 variables. We need to do this scan now, because variable-sized
13557 decls will be decomposed during gimplification. */
13559 static void
13560 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13562 tree expr = *expr_p;
13563 gimple *g;
13564 gimple_seq body = NULL;
13566 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13567 OMP_PARALLEL_COMBINED (expr)
13568 ? ORT_COMBINED_PARALLEL
13569 : ORT_PARALLEL, OMP_PARALLEL);
13571 push_gimplify_context ();
13573 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13574 if (gimple_code (g) == GIMPLE_BIND)
13575 pop_gimplify_context (g);
13576 else
13577 pop_gimplify_context (NULL);
13579 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13580 OMP_PARALLEL);
13582 g = gimple_build_omp_parallel (body,
13583 OMP_PARALLEL_CLAUSES (expr),
13584 NULL_TREE, NULL_TREE);
13585 if (OMP_PARALLEL_COMBINED (expr))
13586 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13587 gimplify_seq_add_stmt (pre_p, g);
13588 *expr_p = NULL_TREE;
13591 /* Gimplify the contents of an OMP_TASK statement. This involves
13592 gimplification of the body, as well as scanning the body for used
13593 variables. We need to do this scan now, because variable-sized
13594 decls will be decomposed during gimplification. */
13596 static void
13597 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13599 tree expr = *expr_p;
13600 gimple *g;
13601 gimple_seq body = NULL;
13602 bool nowait = false;
13603 bool has_depend = false;
13605 if (OMP_TASK_BODY (expr) == NULL_TREE)
13607 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13608 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13610 has_depend = true;
13611 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13613 error_at (OMP_CLAUSE_LOCATION (c),
13614 "%<mutexinoutset%> kind in %<depend%> clause on a "
13615 "%<taskwait%> construct");
13616 break;
13619 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13620 nowait = true;
13621 if (nowait && !has_depend)
13623 error_at (EXPR_LOCATION (expr),
13624 "%<taskwait%> construct with %<nowait%> clause but no "
13625 "%<depend%> clauses");
13626 *expr_p = NULL_TREE;
13627 return;
13631 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13632 omp_find_clause (OMP_TASK_CLAUSES (expr),
13633 OMP_CLAUSE_UNTIED)
13634 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13636 if (OMP_TASK_BODY (expr))
13638 push_gimplify_context ();
13640 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13641 if (gimple_code (g) == GIMPLE_BIND)
13642 pop_gimplify_context (g);
13643 else
13644 pop_gimplify_context (NULL);
13647 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13648 OMP_TASK);
13650 g = gimple_build_omp_task (body,
13651 OMP_TASK_CLAUSES (expr),
13652 NULL_TREE, NULL_TREE,
13653 NULL_TREE, NULL_TREE, NULL_TREE);
13654 if (OMP_TASK_BODY (expr) == NULL_TREE)
13655 gimple_omp_task_set_taskwait_p (g, true);
13656 gimplify_seq_add_stmt (pre_p, g);
13657 *expr_p = NULL_TREE;
13660 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13661 force it into a temporary initialized in PRE_P and add firstprivate clause
13662 to ORIG_FOR_STMT. */
13664 static void
13665 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13666 tree orig_for_stmt)
13668 if (*tp == NULL || is_gimple_constant (*tp))
13669 return;
13671 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13672 /* Reference to pointer conversion is considered useless,
13673 but is significant for firstprivate clause. Force it
13674 here. */
13675 if (type
13676 && TREE_CODE (type) == POINTER_TYPE
13677 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13679 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13680 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13681 gimplify_and_add (m, pre_p);
13682 *tp = v;
13685 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13686 OMP_CLAUSE_DECL (c) = *tp;
13687 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13688 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13691 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13692 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13694 static tree
13695 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13697 switch (TREE_CODE (*tp))
13699 case OMP_ORDERED:
13700 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13701 return *tp;
13702 break;
13703 case OMP_SIMD:
13704 case OMP_PARALLEL:
13705 case OMP_TARGET:
13706 *walk_subtrees = 0;
13707 break;
13708 default:
13709 break;
13711 return NULL_TREE;
13714 /* Gimplify the gross structure of an OMP_FOR statement. */
13716 static enum gimplify_status
13717 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13719 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13720 enum gimplify_status ret = GS_ALL_DONE;
13721 enum gimplify_status tret;
13722 gomp_for *gfor;
13723 gimple_seq for_body, for_pre_body;
13724 int i;
13725 bitmap has_decl_expr = NULL;
13726 enum omp_region_type ort = ORT_WORKSHARE;
13727 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13729 orig_for_stmt = for_stmt = *expr_p;
13731 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13732 != NULL_TREE);
13733 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13735 tree *data[4] = { NULL, NULL, NULL, NULL };
13736 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13737 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13738 find_combined_omp_for, data, NULL);
13739 if (inner_for_stmt == NULL_TREE)
13741 gcc_assert (seen_error ());
13742 *expr_p = NULL_TREE;
13743 return GS_ERROR;
13745 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13747 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13748 &OMP_FOR_PRE_BODY (for_stmt));
13749 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13751 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13753 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13754 &OMP_FOR_PRE_BODY (for_stmt));
13755 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13758 if (data[0])
13760 /* We have some statements or variable declarations in between
13761 the composite construct directives. Move them around the
13762 inner_for_stmt. */
13763 data[0] = expr_p;
13764 for (i = 0; i < 3; i++)
13765 if (data[i])
13767 tree t = *data[i];
13768 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13769 data[i + 1] = data[i];
13770 *data[i] = OMP_BODY (t);
13771 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13772 NULL_TREE, make_node (BLOCK));
13773 OMP_BODY (t) = body;
13774 append_to_statement_list_force (inner_for_stmt,
13775 &BIND_EXPR_BODY (body));
13776 *data[3] = t;
13777 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13778 gcc_assert (*data[3] == inner_for_stmt);
13780 return GS_OK;
13783 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13784 if (!loop_p
13785 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13786 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13787 i)) == TREE_LIST
13788 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13789 i)))
13791 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13792 /* Class iterators aren't allowed on OMP_SIMD, so the only
13793 case we need to solve is distribute parallel for. They are
13794 allowed on the loop construct, but that is already handled
13795 in gimplify_omp_loop. */
13796 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13797 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13798 && data[1]);
13799 tree orig_decl = TREE_PURPOSE (orig);
13800 tree last = TREE_VALUE (orig);
13801 tree *pc;
13802 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13803 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13804 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13805 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13806 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13807 break;
13808 if (*pc == NULL_TREE)
13810 tree *spc;
13811 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13812 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13813 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13814 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13815 break;
13816 if (*spc)
13818 tree c = *spc;
13819 *spc = OMP_CLAUSE_CHAIN (c);
13820 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13821 *pc = c;
13824 if (*pc == NULL_TREE)
13826 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13828 /* private clause will appear only on inner_for_stmt.
13829 Change it into firstprivate, and add private clause
13830 on for_stmt. */
13831 tree c = copy_node (*pc);
13832 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
13833 OMP_FOR_CLAUSES (for_stmt) = c;
13834 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
13835 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13837 else
13839 /* lastprivate clause will appear on both inner_for_stmt
13840 and for_stmt. Add firstprivate clause to
13841 inner_for_stmt. */
13842 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
13843 OMP_CLAUSE_FIRSTPRIVATE);
13844 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
13845 OMP_CLAUSE_CHAIN (c) = *pc;
13846 *pc = c;
13847 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13849 tree c = build_omp_clause (UNKNOWN_LOCATION,
13850 OMP_CLAUSE_FIRSTPRIVATE);
13851 OMP_CLAUSE_DECL (c) = last;
13852 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13853 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13854 c = build_omp_clause (UNKNOWN_LOCATION,
13855 *pc ? OMP_CLAUSE_SHARED
13856 : OMP_CLAUSE_FIRSTPRIVATE);
13857 OMP_CLAUSE_DECL (c) = orig_decl;
13858 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13859 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13861 /* Similarly, take care of C++ range for temporaries, those should
13862 be firstprivate on OMP_PARALLEL if any. */
13863 if (data[1])
13864 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13865 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
13866 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13867 i)) == TREE_LIST
13868 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13869 i)))
13871 tree orig
13872 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13873 tree v = TREE_CHAIN (orig);
13874 tree c = build_omp_clause (UNKNOWN_LOCATION,
13875 OMP_CLAUSE_FIRSTPRIVATE);
13876 /* First add firstprivate clause for the __for_end artificial
13877 decl. */
13878 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
13879 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13880 == REFERENCE_TYPE)
13881 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13882 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13883 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13884 if (TREE_VEC_ELT (v, 0))
13886 /* And now the same for __for_range artificial decl if it
13887 exists. */
13888 c = build_omp_clause (UNKNOWN_LOCATION,
13889 OMP_CLAUSE_FIRSTPRIVATE);
13890 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
13891 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13892 == REFERENCE_TYPE)
13893 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13894 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13895 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13900 switch (TREE_CODE (for_stmt))
13902 case OMP_FOR:
13903 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13905 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13906 OMP_CLAUSE_SCHEDULE))
13907 error_at (EXPR_LOCATION (for_stmt),
13908 "%qs clause may not appear on non-rectangular %qs",
13909 "schedule", lang_GNU_Fortran () ? "do" : "for");
13910 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
13911 error_at (EXPR_LOCATION (for_stmt),
13912 "%qs clause may not appear on non-rectangular %qs",
13913 "ordered", lang_GNU_Fortran () ? "do" : "for");
13915 break;
13916 case OMP_DISTRIBUTE:
13917 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
13918 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13919 OMP_CLAUSE_DIST_SCHEDULE))
13920 error_at (EXPR_LOCATION (for_stmt),
13921 "%qs clause may not appear on non-rectangular %qs",
13922 "dist_schedule", "distribute");
13923 break;
13924 case OACC_LOOP:
13925 ort = ORT_ACC;
13926 break;
13927 case OMP_TASKLOOP:
13928 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13930 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13931 OMP_CLAUSE_GRAINSIZE))
13932 error_at (EXPR_LOCATION (for_stmt),
13933 "%qs clause may not appear on non-rectangular %qs",
13934 "grainsize", "taskloop");
13935 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13936 OMP_CLAUSE_NUM_TASKS))
13937 error_at (EXPR_LOCATION (for_stmt),
13938 "%qs clause may not appear on non-rectangular %qs",
13939 "num_tasks", "taskloop");
13941 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
13942 ort = ORT_UNTIED_TASKLOOP;
13943 else
13944 ort = ORT_TASKLOOP;
13945 break;
13946 case OMP_SIMD:
13947 ort = ORT_SIMD;
13948 break;
13949 default:
13950 gcc_unreachable ();
13953 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13954 clause for the IV. */
13955 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
13957 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
13958 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13959 decl = TREE_OPERAND (t, 0);
13960 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13961 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13962 && OMP_CLAUSE_DECL (c) == decl)
13964 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
13965 break;
13969 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
13970 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
13971 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
13972 ? OMP_LOOP : TREE_CODE (for_stmt));
13974 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
13975 gimplify_omp_ctxp->distribute = true;
13977 /* Handle OMP_FOR_INIT. */
13978 for_pre_body = NULL;
13979 if ((ort == ORT_SIMD
13980 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
13981 && OMP_FOR_PRE_BODY (for_stmt))
13983 has_decl_expr = BITMAP_ALLOC (NULL);
13984 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
13985 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
13987 t = OMP_FOR_PRE_BODY (for_stmt);
13988 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13990 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
13992 tree_stmt_iterator si;
13993 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
13994 tsi_next (&si))
13996 t = tsi_stmt (si);
13997 if (TREE_CODE (t) == DECL_EXPR
13998 && VAR_P (DECL_EXPR_DECL (t)))
13999 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
14003 if (OMP_FOR_PRE_BODY (for_stmt))
14005 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
14006 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
14007 else
14009 struct gimplify_omp_ctx ctx;
14010 memset (&ctx, 0, sizeof (ctx));
14011 ctx.region_type = ORT_NONE;
14012 gimplify_omp_ctxp = &ctx;
14013 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
14014 gimplify_omp_ctxp = NULL;
14017 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
14019 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
14020 for_stmt = inner_for_stmt;
14022 /* For taskloop, need to gimplify the start, end and step before the
14023 taskloop, outside of the taskloop omp context. */
14024 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14026 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14028 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14029 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
14030 ? pre_p : &for_pre_body);
14031 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
14032 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14034 tree v = TREE_OPERAND (t, 1);
14035 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
14036 for_pre_p, orig_for_stmt);
14037 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
14038 for_pre_p, orig_for_stmt);
14040 else
14041 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
14042 orig_for_stmt);
14044 /* Handle OMP_FOR_COND. */
14045 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14046 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14048 tree v = TREE_OPERAND (t, 1);
14049 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
14050 for_pre_p, orig_for_stmt);
14051 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
14052 for_pre_p, orig_for_stmt);
14054 else
14055 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
14056 orig_for_stmt);
14058 /* Handle OMP_FOR_INCR. */
14059 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14060 if (TREE_CODE (t) == MODIFY_EXPR)
14062 decl = TREE_OPERAND (t, 0);
14063 t = TREE_OPERAND (t, 1);
14064 tree *tp = &TREE_OPERAND (t, 1);
14065 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
14066 tp = &TREE_OPERAND (t, 0);
14068 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
14069 orig_for_stmt);
14073 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
14074 OMP_TASKLOOP);
14077 if (orig_for_stmt != for_stmt)
14078 gimplify_omp_ctxp->combined_loop = true;
14080 for_body = NULL;
14081 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14082 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
14083 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14084 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
14086 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
14087 bool is_doacross = false;
14088 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
14089 find_standalone_omp_ordered, NULL))
14091 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
14092 is_doacross = true;
14093 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
14094 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
14095 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14096 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
14098 error_at (OMP_CLAUSE_LOCATION (*pc),
14099 "%<linear%> clause may not be specified together "
14100 "with %<ordered%> clause if stand-alone %<ordered%> "
14101 "construct is nested in it");
14102 *pc = OMP_CLAUSE_CHAIN (*pc);
14104 else
14105 pc = &OMP_CLAUSE_CHAIN (*pc);
14107 int collapse = 1, tile = 0;
14108 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
14109 if (c)
14110 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
14111 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
14112 if (c)
14113 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
14114 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
14115 hash_set<tree> *allocate_uids = NULL;
14116 if (c)
14118 allocate_uids = new hash_set<tree>;
14119 for (; c; c = OMP_CLAUSE_CHAIN (c))
14120 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
14121 allocate_uids->add (OMP_CLAUSE_DECL (c));
14123 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14125 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14126 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14127 decl = TREE_OPERAND (t, 0);
14128 gcc_assert (DECL_P (decl));
14129 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
14130 || POINTER_TYPE_P (TREE_TYPE (decl)));
14131 if (is_doacross)
14133 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
14135 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14136 if (TREE_CODE (orig_decl) == TREE_LIST)
14138 orig_decl = TREE_PURPOSE (orig_decl);
14139 if (!orig_decl)
14140 orig_decl = decl;
14142 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
14144 else
14145 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14146 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14149 if (for_stmt == orig_for_stmt)
14151 tree orig_decl = decl;
14152 if (OMP_FOR_ORIG_DECLS (for_stmt))
14154 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14155 if (TREE_CODE (orig_decl) == TREE_LIST)
14157 orig_decl = TREE_PURPOSE (orig_decl);
14158 if (!orig_decl)
14159 orig_decl = decl;
14162 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
14163 error_at (EXPR_LOCATION (for_stmt),
14164 "threadprivate iteration variable %qD", orig_decl);
14167 /* Make sure the iteration variable is private. */
14168 tree c = NULL_TREE;
14169 tree c2 = NULL_TREE;
14170 if (orig_for_stmt != for_stmt)
14172 /* Preserve this information until we gimplify the inner simd. */
14173 if (has_decl_expr
14174 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14175 TREE_PRIVATE (t) = 1;
14177 else if (ort == ORT_SIMD)
14179 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14180 (splay_tree_key) decl);
14181 omp_is_private (gimplify_omp_ctxp, decl,
14182 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14183 != 1));
14184 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14186 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14187 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14188 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14189 OMP_CLAUSE_LASTPRIVATE);
14190 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14191 OMP_CLAUSE_LASTPRIVATE))
14192 if (OMP_CLAUSE_DECL (c3) == decl)
14194 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14195 "conditional %<lastprivate%> on loop "
14196 "iterator %qD ignored", decl);
14197 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14198 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14201 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14203 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14204 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14205 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14206 if ((has_decl_expr
14207 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14208 || TREE_PRIVATE (t))
14210 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14211 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14213 struct gimplify_omp_ctx *outer
14214 = gimplify_omp_ctxp->outer_context;
14215 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14217 if (outer->region_type == ORT_WORKSHARE
14218 && outer->combined_loop)
14220 n = splay_tree_lookup (outer->variables,
14221 (splay_tree_key)decl);
14222 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14224 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14225 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14227 else
14229 struct gimplify_omp_ctx *octx = outer->outer_context;
14230 if (octx
14231 && octx->region_type == ORT_COMBINED_PARALLEL
14232 && octx->outer_context
14233 && (octx->outer_context->region_type
14234 == ORT_WORKSHARE)
14235 && octx->outer_context->combined_loop)
14237 octx = octx->outer_context;
14238 n = splay_tree_lookup (octx->variables,
14239 (splay_tree_key)decl);
14240 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14242 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14243 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14250 OMP_CLAUSE_DECL (c) = decl;
14251 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14252 OMP_FOR_CLAUSES (for_stmt) = c;
14253 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14254 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14255 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14256 true);
14258 else
14260 bool lastprivate
14261 = (!has_decl_expr
14262 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14263 if (TREE_PRIVATE (t))
14264 lastprivate = false;
14265 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14267 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14268 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14269 lastprivate = false;
14272 struct gimplify_omp_ctx *outer
14273 = gimplify_omp_ctxp->outer_context;
14274 if (outer && lastprivate)
14275 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14276 true);
14278 c = build_omp_clause (input_location,
14279 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14280 : OMP_CLAUSE_PRIVATE);
14281 OMP_CLAUSE_DECL (c) = decl;
14282 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14283 OMP_FOR_CLAUSES (for_stmt) = c;
14284 omp_add_variable (gimplify_omp_ctxp, decl,
14285 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14286 | GOVD_EXPLICIT | GOVD_SEEN);
14287 c = NULL_TREE;
14290 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14292 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14293 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14294 (splay_tree_key) decl);
14295 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14296 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14297 OMP_CLAUSE_LASTPRIVATE);
14298 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14299 OMP_CLAUSE_LASTPRIVATE))
14300 if (OMP_CLAUSE_DECL (c3) == decl)
14302 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14303 "conditional %<lastprivate%> on loop "
14304 "iterator %qD ignored", decl);
14305 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14306 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14309 else
14310 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14312 /* If DECL is not a gimple register, create a temporary variable to act
14313 as an iteration counter. This is valid, since DECL cannot be
14314 modified in the body of the loop. Similarly for any iteration vars
14315 in simd with collapse > 1 where the iterator vars must be
14316 lastprivate. And similarly for vars mentioned in allocate clauses. */
14317 if (orig_for_stmt != for_stmt)
14318 var = decl;
14319 else if (!is_gimple_reg (decl)
14320 || (ort == ORT_SIMD
14321 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14322 || (allocate_uids && allocate_uids->contains (decl)))
14324 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14325 /* Make sure omp_add_variable is not called on it prematurely.
14326 We call it ourselves a few lines later. */
14327 gimplify_omp_ctxp = NULL;
14328 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14329 gimplify_omp_ctxp = ctx;
14330 TREE_OPERAND (t, 0) = var;
14332 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14334 if (ort == ORT_SIMD
14335 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14337 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14338 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14339 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14340 OMP_CLAUSE_DECL (c2) = var;
14341 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14342 OMP_FOR_CLAUSES (for_stmt) = c2;
14343 omp_add_variable (gimplify_omp_ctxp, var,
14344 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14345 if (c == NULL_TREE)
14347 c = c2;
14348 c2 = NULL_TREE;
14351 else
14352 omp_add_variable (gimplify_omp_ctxp, var,
14353 GOVD_PRIVATE | GOVD_SEEN);
14355 else
14356 var = decl;
14358 gimplify_omp_ctxp->in_for_exprs = true;
14359 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14361 tree lb = TREE_OPERAND (t, 1);
14362 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14363 is_gimple_val, fb_rvalue, false);
14364 ret = MIN (ret, tret);
14365 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14366 is_gimple_val, fb_rvalue, false);
14368 else
14369 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14370 is_gimple_val, fb_rvalue, false);
14371 gimplify_omp_ctxp->in_for_exprs = false;
14372 ret = MIN (ret, tret);
14373 if (ret == GS_ERROR)
14374 return ret;
14376 /* Handle OMP_FOR_COND. */
14377 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14378 gcc_assert (COMPARISON_CLASS_P (t));
14379 gcc_assert (TREE_OPERAND (t, 0) == decl);
14381 gimplify_omp_ctxp->in_for_exprs = true;
14382 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14384 tree ub = TREE_OPERAND (t, 1);
14385 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14386 is_gimple_val, fb_rvalue, false);
14387 ret = MIN (ret, tret);
14388 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14389 is_gimple_val, fb_rvalue, false);
14391 else
14392 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14393 is_gimple_val, fb_rvalue, false);
14394 gimplify_omp_ctxp->in_for_exprs = false;
14395 ret = MIN (ret, tret);
14397 /* Handle OMP_FOR_INCR. */
14398 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14399 switch (TREE_CODE (t))
14401 case PREINCREMENT_EXPR:
14402 case POSTINCREMENT_EXPR:
14404 tree decl = TREE_OPERAND (t, 0);
14405 /* c_omp_for_incr_canonicalize_ptr() should have been
14406 called to massage things appropriately. */
14407 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14409 if (orig_for_stmt != for_stmt)
14410 break;
14411 t = build_int_cst (TREE_TYPE (decl), 1);
14412 if (c)
14413 OMP_CLAUSE_LINEAR_STEP (c) = t;
14414 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14415 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14416 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14417 break;
14420 case PREDECREMENT_EXPR:
14421 case POSTDECREMENT_EXPR:
14422 /* c_omp_for_incr_canonicalize_ptr() should have been
14423 called to massage things appropriately. */
14424 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14425 if (orig_for_stmt != for_stmt)
14426 break;
14427 t = build_int_cst (TREE_TYPE (decl), -1);
14428 if (c)
14429 OMP_CLAUSE_LINEAR_STEP (c) = t;
14430 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14431 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14432 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14433 break;
14435 case MODIFY_EXPR:
14436 gcc_assert (TREE_OPERAND (t, 0) == decl);
14437 TREE_OPERAND (t, 0) = var;
14439 t = TREE_OPERAND (t, 1);
14440 switch (TREE_CODE (t))
14442 case PLUS_EXPR:
14443 if (TREE_OPERAND (t, 1) == decl)
14445 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14446 TREE_OPERAND (t, 0) = var;
14447 break;
14450 /* Fallthru. */
14451 case MINUS_EXPR:
14452 case POINTER_PLUS_EXPR:
14453 gcc_assert (TREE_OPERAND (t, 0) == decl);
14454 TREE_OPERAND (t, 0) = var;
14455 break;
14456 default:
14457 gcc_unreachable ();
14460 gimplify_omp_ctxp->in_for_exprs = true;
14461 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14462 is_gimple_val, fb_rvalue, false);
14463 ret = MIN (ret, tret);
14464 if (c)
14466 tree step = TREE_OPERAND (t, 1);
14467 tree stept = TREE_TYPE (decl);
14468 if (POINTER_TYPE_P (stept))
14469 stept = sizetype;
14470 step = fold_convert (stept, step);
14471 if (TREE_CODE (t) == MINUS_EXPR)
14472 step = fold_build1 (NEGATE_EXPR, stept, step);
14473 OMP_CLAUSE_LINEAR_STEP (c) = step;
14474 if (step != TREE_OPERAND (t, 1))
14476 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14477 &for_pre_body, NULL,
14478 is_gimple_val, fb_rvalue, false);
14479 ret = MIN (ret, tret);
14482 gimplify_omp_ctxp->in_for_exprs = false;
14483 break;
14485 default:
14486 gcc_unreachable ();
14489 if (c2)
14491 gcc_assert (c);
14492 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14495 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14497 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14498 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14499 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14500 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14501 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14502 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14503 && OMP_CLAUSE_DECL (c) == decl)
14505 if (is_doacross && (collapse == 1 || i >= collapse))
14506 t = var;
14507 else
14509 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14510 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14511 gcc_assert (TREE_OPERAND (t, 0) == var);
14512 t = TREE_OPERAND (t, 1);
14513 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14514 || TREE_CODE (t) == MINUS_EXPR
14515 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14516 gcc_assert (TREE_OPERAND (t, 0) == var);
14517 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14518 is_doacross ? var : decl,
14519 TREE_OPERAND (t, 1));
14521 gimple_seq *seq;
14522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14523 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14524 else
14525 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14526 push_gimplify_context ();
14527 gimplify_assign (decl, t, seq);
14528 gimple *bind = NULL;
14529 if (gimplify_ctxp->temps)
14531 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14532 *seq = NULL;
14533 gimplify_seq_add_stmt (seq, bind);
14535 pop_gimplify_context (bind);
14538 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14539 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14541 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14542 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14543 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14544 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14545 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14546 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14547 gcc_assert (COMPARISON_CLASS_P (t));
14548 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14549 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14550 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14554 BITMAP_FREE (has_decl_expr);
14555 delete allocate_uids;
14557 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14558 || (loop_p && orig_for_stmt == for_stmt))
14560 push_gimplify_context ();
14561 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14563 OMP_FOR_BODY (orig_for_stmt)
14564 = build3 (BIND_EXPR, void_type_node, NULL,
14565 OMP_FOR_BODY (orig_for_stmt), NULL);
14566 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14570 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14571 &for_body);
14573 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14574 || (loop_p && orig_for_stmt == for_stmt))
14576 if (gimple_code (g) == GIMPLE_BIND)
14577 pop_gimplify_context (g);
14578 else
14579 pop_gimplify_context (NULL);
14582 if (orig_for_stmt != for_stmt)
14583 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14585 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14586 decl = TREE_OPERAND (t, 0);
14587 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14588 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14589 gimplify_omp_ctxp = ctx->outer_context;
14590 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14591 gimplify_omp_ctxp = ctx;
14592 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14593 TREE_OPERAND (t, 0) = var;
14594 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14595 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14596 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14597 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14598 for (int j = i + 1;
14599 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14601 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14602 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14603 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14604 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14606 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14607 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14609 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14610 gcc_assert (COMPARISON_CLASS_P (t));
14611 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14612 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14614 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14615 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14620 gimplify_adjust_omp_clauses (pre_p, for_body,
14621 &OMP_FOR_CLAUSES (orig_for_stmt),
14622 TREE_CODE (orig_for_stmt));
14624 int kind;
14625 switch (TREE_CODE (orig_for_stmt))
14627 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14628 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14629 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14630 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14631 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14632 default:
14633 gcc_unreachable ();
14635 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14637 gimplify_seq_add_seq (pre_p, for_pre_body);
14638 for_pre_body = NULL;
14640 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14641 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14642 for_pre_body);
14643 if (orig_for_stmt != for_stmt)
14644 gimple_omp_for_set_combined_p (gfor, true);
14645 if (gimplify_omp_ctxp
14646 && (gimplify_omp_ctxp->combined_loop
14647 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14648 && gimplify_omp_ctxp->outer_context
14649 && gimplify_omp_ctxp->outer_context->combined_loop)))
14651 gimple_omp_for_set_combined_into_p (gfor, true);
14652 if (gimplify_omp_ctxp->combined_loop)
14653 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14654 else
14655 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14658 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14660 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14661 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14662 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14663 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14664 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14665 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14666 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14667 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14670 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14671 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14672 The outer taskloop stands for computing the number of iterations,
14673 counts for collapsed loops and holding taskloop specific clauses.
14674 The task construct stands for the effect of data sharing on the
14675 explicit task it creates and the inner taskloop stands for expansion
14676 of the static loop inside of the explicit task construct. */
14677 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14679 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14680 tree task_clauses = NULL_TREE;
14681 tree c = *gfor_clauses_ptr;
14682 tree *gtask_clauses_ptr = &task_clauses;
14683 tree outer_for_clauses = NULL_TREE;
14684 tree *gforo_clauses_ptr = &outer_for_clauses;
14685 bitmap lastprivate_uids = NULL;
14686 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14688 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14689 if (c)
14691 lastprivate_uids = BITMAP_ALLOC (NULL);
14692 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14693 OMP_CLAUSE_LASTPRIVATE))
14694 bitmap_set_bit (lastprivate_uids,
14695 DECL_UID (OMP_CLAUSE_DECL (c)));
14697 c = *gfor_clauses_ptr;
14699 for (; c; c = OMP_CLAUSE_CHAIN (c))
14700 switch (OMP_CLAUSE_CODE (c))
14702 /* These clauses are allowed on task, move them there. */
14703 case OMP_CLAUSE_SHARED:
14704 case OMP_CLAUSE_FIRSTPRIVATE:
14705 case OMP_CLAUSE_DEFAULT:
14706 case OMP_CLAUSE_IF:
14707 case OMP_CLAUSE_UNTIED:
14708 case OMP_CLAUSE_FINAL:
14709 case OMP_CLAUSE_MERGEABLE:
14710 case OMP_CLAUSE_PRIORITY:
14711 case OMP_CLAUSE_REDUCTION:
14712 case OMP_CLAUSE_IN_REDUCTION:
14713 *gtask_clauses_ptr = c;
14714 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14715 break;
14716 case OMP_CLAUSE_PRIVATE:
14717 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14719 /* We want private on outer for and firstprivate
14720 on task. */
14721 *gtask_clauses_ptr
14722 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14723 OMP_CLAUSE_FIRSTPRIVATE);
14724 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14725 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14726 openacc);
14727 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14728 *gforo_clauses_ptr = c;
14729 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14731 else
14733 *gtask_clauses_ptr = c;
14734 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14736 break;
14737 /* These clauses go into outer taskloop clauses. */
14738 case OMP_CLAUSE_GRAINSIZE:
14739 case OMP_CLAUSE_NUM_TASKS:
14740 case OMP_CLAUSE_NOGROUP:
14741 *gforo_clauses_ptr = c;
14742 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14743 break;
14744 /* Collapse clause we duplicate on both taskloops. */
14745 case OMP_CLAUSE_COLLAPSE:
14746 *gfor_clauses_ptr = c;
14747 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14748 *gforo_clauses_ptr = copy_node (c);
14749 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14750 break;
14751 /* For lastprivate, keep the clause on inner taskloop, and add
14752 a shared clause on task. If the same decl is also firstprivate,
14753 add also firstprivate clause on the inner taskloop. */
14754 case OMP_CLAUSE_LASTPRIVATE:
14755 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14757 /* For taskloop C++ lastprivate IVs, we want:
14758 1) private on outer taskloop
14759 2) firstprivate and shared on task
14760 3) lastprivate on inner taskloop */
14761 *gtask_clauses_ptr
14762 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14763 OMP_CLAUSE_FIRSTPRIVATE);
14764 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14765 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14766 openacc);
14767 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14768 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14769 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14770 OMP_CLAUSE_PRIVATE);
14771 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14772 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14773 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14774 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14776 *gfor_clauses_ptr = c;
14777 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14778 *gtask_clauses_ptr
14779 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14780 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14781 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14782 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14783 gtask_clauses_ptr
14784 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14785 break;
14786 /* Allocate clause we duplicate on task and inner taskloop
14787 if the decl is lastprivate, otherwise just put on task. */
14788 case OMP_CLAUSE_ALLOCATE:
14789 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14790 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14792 /* Additionally, put firstprivate clause on task
14793 for the allocator if it is not constant. */
14794 *gtask_clauses_ptr
14795 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14796 OMP_CLAUSE_FIRSTPRIVATE);
14797 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14798 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14799 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14801 if (lastprivate_uids
14802 && bitmap_bit_p (lastprivate_uids,
14803 DECL_UID (OMP_CLAUSE_DECL (c))))
14805 *gfor_clauses_ptr = c;
14806 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14807 *gtask_clauses_ptr = copy_node (c);
14808 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14810 else
14812 *gtask_clauses_ptr = c;
14813 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14815 break;
14816 default:
14817 gcc_unreachable ();
14819 *gfor_clauses_ptr = NULL_TREE;
14820 *gtask_clauses_ptr = NULL_TREE;
14821 *gforo_clauses_ptr = NULL_TREE;
14822 BITMAP_FREE (lastprivate_uids);
14823 gimple_set_location (gfor, input_location);
14824 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14825 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14826 NULL_TREE, NULL_TREE, NULL_TREE);
14827 gimple_set_location (g, input_location);
14828 gimple_omp_task_set_taskloop_p (g, true);
14829 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
14830 gomp_for *gforo
14831 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
14832 gimple_omp_for_collapse (gfor),
14833 gimple_omp_for_pre_body (gfor));
14834 gimple_omp_for_set_pre_body (gfor, NULL);
14835 gimple_omp_for_set_combined_p (gforo, true);
14836 gimple_omp_for_set_combined_into_p (gfor, true);
14837 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
14839 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
14840 tree v = create_tmp_var (type);
14841 gimple_omp_for_set_index (gforo, i, v);
14842 t = unshare_expr (gimple_omp_for_initial (gfor, i));
14843 gimple_omp_for_set_initial (gforo, i, t);
14844 gimple_omp_for_set_cond (gforo, i,
14845 gimple_omp_for_cond (gfor, i));
14846 t = unshare_expr (gimple_omp_for_final (gfor, i));
14847 gimple_omp_for_set_final (gforo, i, t);
14848 t = unshare_expr (gimple_omp_for_incr (gfor, i));
14849 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
14850 TREE_OPERAND (t, 0) = v;
14851 gimple_omp_for_set_incr (gforo, i, t);
14852 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
14853 OMP_CLAUSE_DECL (t) = v;
14854 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
14855 gimple_omp_for_set_clauses (gforo, t);
14856 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14858 tree *p1 = NULL, *p2 = NULL;
14859 t = gimple_omp_for_initial (gforo, i);
14860 if (TREE_CODE (t) == TREE_VEC)
14861 p1 = &TREE_VEC_ELT (t, 0);
14862 t = gimple_omp_for_final (gforo, i);
14863 if (TREE_CODE (t) == TREE_VEC)
14865 if (p1)
14866 p2 = &TREE_VEC_ELT (t, 0);
14867 else
14868 p1 = &TREE_VEC_ELT (t, 0);
14870 if (p1)
14872 int j;
14873 for (j = 0; j < i; j++)
14874 if (*p1 == gimple_omp_for_index (gfor, j))
14876 *p1 = gimple_omp_for_index (gforo, j);
14877 if (p2)
14878 *p2 = *p1;
14879 break;
14881 gcc_assert (j < i);
14885 gimplify_seq_add_stmt (pre_p, gforo);
14887 else
14888 gimplify_seq_add_stmt (pre_p, gfor);
14890 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
14892 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14893 unsigned lastprivate_conditional = 0;
14894 while (ctx
14895 && (ctx->region_type == ORT_TARGET_DATA
14896 || ctx->region_type == ORT_TASKGROUP))
14897 ctx = ctx->outer_context;
14898 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
14899 for (tree c = gimple_omp_for_clauses (gfor);
14900 c; c = OMP_CLAUSE_CHAIN (c))
14901 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14902 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14903 ++lastprivate_conditional;
14904 if (lastprivate_conditional)
14906 struct omp_for_data fd;
14907 omp_extract_for_data (gfor, &fd, NULL);
14908 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
14909 lastprivate_conditional);
14910 tree var = create_tmp_var_raw (type);
14911 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
14912 OMP_CLAUSE_DECL (c) = var;
14913 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14914 gimple_omp_for_set_clauses (gfor, c);
14915 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
14918 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
14920 unsigned lastprivate_conditional = 0;
14921 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
14922 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14923 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14924 ++lastprivate_conditional;
14925 if (lastprivate_conditional)
14927 struct omp_for_data fd;
14928 omp_extract_for_data (gfor, &fd, NULL);
14929 tree type = unsigned_type_for (fd.iter_type);
14930 while (lastprivate_conditional--)
14932 tree c = build_omp_clause (UNKNOWN_LOCATION,
14933 OMP_CLAUSE__CONDTEMP_);
14934 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
14935 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14936 gimple_omp_for_set_clauses (gfor, c);
14941 if (ret != GS_ALL_DONE)
14942 return GS_ERROR;
14943 *expr_p = NULL_TREE;
14944 return GS_ALL_DONE;
14947 /* Helper for gimplify_omp_loop, called through walk_tree. */
14949 static tree
14950 note_no_context_vars (tree *tp, int *, void *data)
14952 if (VAR_P (*tp)
14953 && DECL_CONTEXT (*tp) == NULL_TREE
14954 && !is_global_var (*tp))
14956 vec<tree> *d = (vec<tree> *) data;
14957 d->safe_push (*tp);
14958 DECL_CONTEXT (*tp) = current_function_decl;
14960 return NULL_TREE;
14963 /* Gimplify the gross structure of an OMP_LOOP statement. */
14965 static enum gimplify_status
14966 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
14968 tree for_stmt = *expr_p;
14969 tree clauses = OMP_FOR_CLAUSES (for_stmt);
14970 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
14971 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
14972 int i;
14974 /* If order is not present, the behavior is as if order(concurrent)
14975 appeared. */
14976 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
14977 if (order == NULL_TREE)
14979 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
14980 OMP_CLAUSE_CHAIN (order) = clauses;
14981 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
14984 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
14985 if (bind == NULL_TREE)
14987 if (!flag_openmp) /* flag_openmp_simd */
14989 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
14990 kind = OMP_CLAUSE_BIND_TEAMS;
14991 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
14992 kind = OMP_CLAUSE_BIND_PARALLEL;
14993 else
14995 for (; octx; octx = octx->outer_context)
14997 if ((octx->region_type & ORT_ACC) != 0
14998 || octx->region_type == ORT_NONE
14999 || octx->region_type == ORT_IMPLICIT_TARGET)
15000 continue;
15001 break;
15003 if (octx == NULL && !in_omp_construct)
15004 error_at (EXPR_LOCATION (for_stmt),
15005 "%<bind%> clause not specified on a %<loop%> "
15006 "construct not nested inside another OpenMP construct");
15008 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
15009 OMP_CLAUSE_CHAIN (bind) = clauses;
15010 OMP_CLAUSE_BIND_KIND (bind) = kind;
15011 OMP_FOR_CLAUSES (for_stmt) = bind;
15013 else
15014 switch (OMP_CLAUSE_BIND_KIND (bind))
15016 case OMP_CLAUSE_BIND_THREAD:
15017 break;
15018 case OMP_CLAUSE_BIND_PARALLEL:
15019 if (!flag_openmp) /* flag_openmp_simd */
15021 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15022 break;
15024 for (; octx; octx = octx->outer_context)
15025 if (octx->region_type == ORT_SIMD
15026 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
15028 error_at (EXPR_LOCATION (for_stmt),
15029 "%<bind(parallel)%> on a %<loop%> construct nested "
15030 "inside %<simd%> construct");
15031 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15032 break;
15034 kind = OMP_CLAUSE_BIND_PARALLEL;
15035 break;
15036 case OMP_CLAUSE_BIND_TEAMS:
15037 if (!flag_openmp) /* flag_openmp_simd */
15039 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15040 break;
15042 if ((octx
15043 && octx->region_type != ORT_IMPLICIT_TARGET
15044 && octx->region_type != ORT_NONE
15045 && (octx->region_type & ORT_TEAMS) == 0)
15046 || in_omp_construct)
15048 error_at (EXPR_LOCATION (for_stmt),
15049 "%<bind(teams)%> on a %<loop%> region not strictly "
15050 "nested inside of a %<teams%> region");
15051 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15052 break;
15054 kind = OMP_CLAUSE_BIND_TEAMS;
15055 break;
15056 default:
15057 gcc_unreachable ();
15060 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
15061 switch (OMP_CLAUSE_CODE (*pc))
15063 case OMP_CLAUSE_REDUCTION:
15064 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
15066 error_at (OMP_CLAUSE_LOCATION (*pc),
15067 "%<inscan%> %<reduction%> clause on "
15068 "%qs construct", "loop");
15069 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
15071 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
15073 error_at (OMP_CLAUSE_LOCATION (*pc),
15074 "invalid %<task%> reduction modifier on construct "
15075 "other than %<parallel%>, %qs or %<sections%>",
15076 lang_GNU_Fortran () ? "do" : "for");
15077 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
15079 pc = &OMP_CLAUSE_CHAIN (*pc);
15080 break;
15081 case OMP_CLAUSE_LASTPRIVATE:
15082 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15084 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15085 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15086 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
15087 break;
15088 if (OMP_FOR_ORIG_DECLS (for_stmt)
15089 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15090 i)) == TREE_LIST
15091 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15092 i)))
15094 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15095 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
15096 break;
15099 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
15101 error_at (OMP_CLAUSE_LOCATION (*pc),
15102 "%<lastprivate%> clause on a %<loop%> construct refers "
15103 "to a variable %qD which is not the loop iterator",
15104 OMP_CLAUSE_DECL (*pc));
15105 *pc = OMP_CLAUSE_CHAIN (*pc);
15106 break;
15108 pc = &OMP_CLAUSE_CHAIN (*pc);
15109 break;
15110 default:
15111 pc = &OMP_CLAUSE_CHAIN (*pc);
15112 break;
15115 TREE_SET_CODE (for_stmt, OMP_SIMD);
15117 int last;
15118 switch (kind)
15120 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
15121 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
15122 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
15124 for (int pass = 1; pass <= last; pass++)
15126 if (pass == 2)
15128 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
15129 make_node (BLOCK));
15130 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
15131 *expr_p = make_node (OMP_PARALLEL);
15132 TREE_TYPE (*expr_p) = void_type_node;
15133 OMP_PARALLEL_BODY (*expr_p) = bind;
15134 OMP_PARALLEL_COMBINED (*expr_p) = 1;
15135 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
15136 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
15137 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15138 if (OMP_FOR_ORIG_DECLS (for_stmt)
15139 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
15140 == TREE_LIST))
15142 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15143 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
15145 *pc = build_omp_clause (UNKNOWN_LOCATION,
15146 OMP_CLAUSE_FIRSTPRIVATE);
15147 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
15148 pc = &OMP_CLAUSE_CHAIN (*pc);
15152 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
15153 tree *pc = &OMP_FOR_CLAUSES (t);
15154 TREE_TYPE (t) = void_type_node;
15155 OMP_FOR_BODY (t) = *expr_p;
15156 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
15157 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15158 switch (OMP_CLAUSE_CODE (c))
15160 case OMP_CLAUSE_BIND:
15161 case OMP_CLAUSE_ORDER:
15162 case OMP_CLAUSE_COLLAPSE:
15163 *pc = copy_node (c);
15164 pc = &OMP_CLAUSE_CHAIN (*pc);
15165 break;
15166 case OMP_CLAUSE_PRIVATE:
15167 case OMP_CLAUSE_FIRSTPRIVATE:
15168 /* Only needed on innermost. */
15169 break;
15170 case OMP_CLAUSE_LASTPRIVATE:
15171 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15173 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15174 OMP_CLAUSE_FIRSTPRIVATE);
15175 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15176 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15177 pc = &OMP_CLAUSE_CHAIN (*pc);
15179 *pc = copy_node (c);
15180 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15181 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15182 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15184 if (pass != last)
15185 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15186 else
15187 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15188 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15190 pc = &OMP_CLAUSE_CHAIN (*pc);
15191 break;
15192 case OMP_CLAUSE_REDUCTION:
15193 *pc = copy_node (c);
15194 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15195 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15196 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15198 auto_vec<tree> no_context_vars;
15199 int walk_subtrees = 0;
15200 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15201 &walk_subtrees, &no_context_vars);
15202 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15203 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15204 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15205 note_no_context_vars,
15206 &no_context_vars);
15207 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15208 note_no_context_vars,
15209 &no_context_vars);
15211 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15212 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15213 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15214 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15215 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15217 hash_map<tree, tree> decl_map;
15218 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15219 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15220 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15221 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15222 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15223 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15225 copy_body_data id;
15226 memset (&id, 0, sizeof (id));
15227 id.src_fn = current_function_decl;
15228 id.dst_fn = current_function_decl;
15229 id.src_cfun = cfun;
15230 id.decl_map = &decl_map;
15231 id.copy_decl = copy_decl_no_change;
15232 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15233 id.transform_new_cfg = true;
15234 id.transform_return_to_modify = false;
15235 id.eh_lp_nr = 0;
15236 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15237 &id, NULL);
15238 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15239 &id, NULL);
15241 for (tree d : no_context_vars)
15243 DECL_CONTEXT (d) = NULL_TREE;
15244 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15247 else
15249 OMP_CLAUSE_REDUCTION_INIT (*pc)
15250 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15251 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15252 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15254 pc = &OMP_CLAUSE_CHAIN (*pc);
15255 break;
15256 default:
15257 gcc_unreachable ();
15259 *pc = NULL_TREE;
15260 *expr_p = t;
15262 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15266 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15267 of OMP_TARGET's body. */
15269 static tree
15270 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15272 *walk_subtrees = 0;
15273 switch (TREE_CODE (*tp))
15275 case OMP_TEAMS:
15276 return *tp;
15277 case BIND_EXPR:
15278 case STATEMENT_LIST:
15279 *walk_subtrees = 1;
15280 break;
15281 default:
15282 break;
15284 return NULL_TREE;
15287 /* Helper function of optimize_target_teams, determine if the expression
15288 can be computed safely before the target construct on the host. */
15290 static tree
15291 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15293 splay_tree_node n;
15295 if (TYPE_P (*tp))
15297 *walk_subtrees = 0;
15298 return NULL_TREE;
15300 switch (TREE_CODE (*tp))
15302 case VAR_DECL:
15303 case PARM_DECL:
15304 case RESULT_DECL:
15305 *walk_subtrees = 0;
15306 if (error_operand_p (*tp)
15307 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15308 || DECL_HAS_VALUE_EXPR_P (*tp)
15309 || DECL_THREAD_LOCAL_P (*tp)
15310 || TREE_SIDE_EFFECTS (*tp)
15311 || TREE_THIS_VOLATILE (*tp))
15312 return *tp;
15313 if (is_global_var (*tp)
15314 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15315 || lookup_attribute ("omp declare target link",
15316 DECL_ATTRIBUTES (*tp))))
15317 return *tp;
15318 if (VAR_P (*tp)
15319 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15320 && !is_global_var (*tp)
15321 && decl_function_context (*tp) == current_function_decl)
15322 return *tp;
15323 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15324 (splay_tree_key) *tp);
15325 if (n == NULL)
15327 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15328 return NULL_TREE;
15329 return *tp;
15331 else if (n->value & GOVD_LOCAL)
15332 return *tp;
15333 else if (n->value & GOVD_FIRSTPRIVATE)
15334 return NULL_TREE;
15335 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15336 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15337 return NULL_TREE;
15338 return *tp;
15339 case INTEGER_CST:
15340 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15341 return *tp;
15342 return NULL_TREE;
15343 case TARGET_EXPR:
15344 if (TARGET_EXPR_INITIAL (*tp)
15345 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15346 return *tp;
15347 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15348 walk_subtrees, NULL);
15349 /* Allow some reasonable subset of integral arithmetics. */
15350 case PLUS_EXPR:
15351 case MINUS_EXPR:
15352 case MULT_EXPR:
15353 case TRUNC_DIV_EXPR:
15354 case CEIL_DIV_EXPR:
15355 case FLOOR_DIV_EXPR:
15356 case ROUND_DIV_EXPR:
15357 case TRUNC_MOD_EXPR:
15358 case CEIL_MOD_EXPR:
15359 case FLOOR_MOD_EXPR:
15360 case ROUND_MOD_EXPR:
15361 case RDIV_EXPR:
15362 case EXACT_DIV_EXPR:
15363 case MIN_EXPR:
15364 case MAX_EXPR:
15365 case LSHIFT_EXPR:
15366 case RSHIFT_EXPR:
15367 case BIT_IOR_EXPR:
15368 case BIT_XOR_EXPR:
15369 case BIT_AND_EXPR:
15370 case NEGATE_EXPR:
15371 case ABS_EXPR:
15372 case BIT_NOT_EXPR:
15373 case NON_LVALUE_EXPR:
15374 CASE_CONVERT:
15375 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15376 return *tp;
15377 return NULL_TREE;
15378 /* And disallow anything else, except for comparisons. */
15379 default:
15380 if (COMPARISON_CLASS_P (*tp))
15381 return NULL_TREE;
15382 return *tp;
15386 /* Try to determine if the num_teams and/or thread_limit expressions
15387 can have their values determined already before entering the
15388 target construct.
15389 INTEGER_CSTs trivially are,
15390 integral decls that are firstprivate (explicitly or implicitly)
15391 or explicitly map(always, to:) or map(always, tofrom:) on the target
15392 region too, and expressions involving simple arithmetics on those
15393 too, function calls are not ok, dereferencing something neither etc.
15394 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15395 EXPR based on what we find:
15396 0 stands for clause not specified at all, use implementation default
15397 -1 stands for value that can't be determined easily before entering
15398 the target construct.
15399 -2 means that no explicit teams construct was specified
15400 If teams construct is not present at all, use 1 for num_teams
15401 and 0 for thread_limit (only one team is involved, and the thread
15402 limit is implementation defined. */
15404 static void
15405 optimize_target_teams (tree target, gimple_seq *pre_p)
15407 tree body = OMP_BODY (target);
15408 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15409 tree num_teams_lower = NULL_TREE;
15410 tree num_teams_upper = integer_zero_node;
15411 tree thread_limit = integer_zero_node;
15412 location_t num_teams_loc = EXPR_LOCATION (target);
15413 location_t thread_limit_loc = EXPR_LOCATION (target);
15414 tree c, *p, expr;
15415 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15417 if (teams == NULL_TREE)
15418 num_teams_upper = build_int_cst (integer_type_node, -2);
15419 else
15420 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15422 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15424 p = &num_teams_upper;
15425 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15426 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15428 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15429 if (TREE_CODE (expr) == INTEGER_CST)
15430 num_teams_lower = expr;
15431 else if (walk_tree (&expr, computable_teams_clause,
15432 NULL, NULL))
15433 num_teams_lower = integer_minus_one_node;
15434 else
15436 num_teams_lower = expr;
15437 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15438 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15439 is_gimple_val, fb_rvalue, false)
15440 == GS_ERROR)
15442 gimplify_omp_ctxp = target_ctx;
15443 num_teams_lower = integer_minus_one_node;
15445 else
15447 gimplify_omp_ctxp = target_ctx;
15448 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15449 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15450 = num_teams_lower;
15455 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15457 p = &thread_limit;
15458 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15460 else
15461 continue;
15462 expr = OMP_CLAUSE_OPERAND (c, 0);
15463 if (TREE_CODE (expr) == INTEGER_CST)
15465 *p = expr;
15466 continue;
15468 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15470 *p = integer_minus_one_node;
15471 continue;
15473 *p = expr;
15474 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15475 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15476 == GS_ERROR)
15478 gimplify_omp_ctxp = target_ctx;
15479 *p = integer_minus_one_node;
15480 continue;
15482 gimplify_omp_ctxp = target_ctx;
15483 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15484 OMP_CLAUSE_OPERAND (c, 0) = *p;
15486 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15488 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15489 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15490 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15491 OMP_TARGET_CLAUSES (target) = c;
15493 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15494 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15495 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15496 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15497 OMP_TARGET_CLAUSES (target) = c;
15500 /* Gimplify the gross structure of several OMP constructs. */
15502 static void
15503 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15505 tree expr = *expr_p;
15506 gimple *stmt;
15507 gimple_seq body = NULL;
15508 enum omp_region_type ort;
15510 switch (TREE_CODE (expr))
15512 case OMP_SECTIONS:
15513 case OMP_SINGLE:
15514 ort = ORT_WORKSHARE;
15515 break;
15516 case OMP_SCOPE:
15517 ort = ORT_TASKGROUP;
15518 break;
15519 case OMP_TARGET:
15520 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15521 break;
15522 case OACC_KERNELS:
15523 ort = ORT_ACC_KERNELS;
15524 break;
15525 case OACC_PARALLEL:
15526 ort = ORT_ACC_PARALLEL;
15527 break;
15528 case OACC_SERIAL:
15529 ort = ORT_ACC_SERIAL;
15530 break;
15531 case OACC_DATA:
15532 ort = ORT_ACC_DATA;
15533 break;
15534 case OMP_TARGET_DATA:
15535 ort = ORT_TARGET_DATA;
15536 break;
15537 case OMP_TEAMS:
15538 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15539 if (gimplify_omp_ctxp == NULL
15540 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15541 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15542 break;
15543 case OACC_HOST_DATA:
15544 ort = ORT_ACC_HOST_DATA;
15545 break;
15546 default:
15547 gcc_unreachable ();
15550 bool save_in_omp_construct = in_omp_construct;
15551 if ((ort & ORT_ACC) == 0)
15552 in_omp_construct = false;
15553 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15554 TREE_CODE (expr));
15555 if (TREE_CODE (expr) == OMP_TARGET)
15556 optimize_target_teams (expr, pre_p);
15557 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15558 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15560 push_gimplify_context ();
15561 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15562 if (gimple_code (g) == GIMPLE_BIND)
15563 pop_gimplify_context (g);
15564 else
15565 pop_gimplify_context (NULL);
15566 if ((ort & ORT_TARGET_DATA) != 0)
15568 enum built_in_function end_ix;
15569 switch (TREE_CODE (expr))
15571 case OACC_DATA:
15572 case OACC_HOST_DATA:
15573 end_ix = BUILT_IN_GOACC_DATA_END;
15574 break;
15575 case OMP_TARGET_DATA:
15576 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15577 break;
15578 default:
15579 gcc_unreachable ();
15581 tree fn = builtin_decl_explicit (end_ix);
15582 g = gimple_build_call (fn, 0);
15583 gimple_seq cleanup = NULL;
15584 gimple_seq_add_stmt (&cleanup, g);
15585 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15586 body = NULL;
15587 gimple_seq_add_stmt (&body, g);
15590 else
15591 gimplify_and_add (OMP_BODY (expr), &body);
15592 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15593 TREE_CODE (expr));
15594 in_omp_construct = save_in_omp_construct;
15596 switch (TREE_CODE (expr))
15598 case OACC_DATA:
15599 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15600 OMP_CLAUSES (expr));
15601 break;
15602 case OACC_HOST_DATA:
15603 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15605 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15606 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15607 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15610 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15611 OMP_CLAUSES (expr));
15612 break;
15613 case OACC_KERNELS:
15614 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15615 OMP_CLAUSES (expr));
15616 break;
15617 case OACC_PARALLEL:
15618 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15619 OMP_CLAUSES (expr));
15620 break;
15621 case OACC_SERIAL:
15622 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15623 OMP_CLAUSES (expr));
15624 break;
15625 case OMP_SECTIONS:
15626 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15627 break;
15628 case OMP_SINGLE:
15629 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15630 break;
15631 case OMP_SCOPE:
15632 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15633 break;
15634 case OMP_TARGET:
15635 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15636 OMP_CLAUSES (expr));
15637 break;
15638 case OMP_TARGET_DATA:
15639 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15640 to be evaluated before the use_device_{ptr,addr} clauses if they
15641 refer to the same variables. */
15643 tree use_device_clauses;
15644 tree *pc, *uc = &use_device_clauses;
15645 for (pc = &OMP_CLAUSES (expr); *pc; )
15646 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15647 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15649 *uc = *pc;
15650 *pc = OMP_CLAUSE_CHAIN (*pc);
15651 uc = &OMP_CLAUSE_CHAIN (*uc);
15653 else
15654 pc = &OMP_CLAUSE_CHAIN (*pc);
15655 *uc = NULL_TREE;
15656 *pc = use_device_clauses;
15657 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15658 OMP_CLAUSES (expr));
15660 break;
15661 case OMP_TEAMS:
15662 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15663 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15664 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15665 break;
15666 default:
15667 gcc_unreachable ();
15670 gimplify_seq_add_stmt (pre_p, stmt);
15671 *expr_p = NULL_TREE;
15674 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15675 target update constructs. */
15677 static void
15678 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15680 tree expr = *expr_p;
15681 int kind;
15682 gomp_target *stmt;
15683 enum omp_region_type ort = ORT_WORKSHARE;
15685 switch (TREE_CODE (expr))
15687 case OACC_ENTER_DATA:
15688 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15689 ort = ORT_ACC;
15690 break;
15691 case OACC_EXIT_DATA:
15692 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15693 ort = ORT_ACC;
15694 break;
15695 case OACC_UPDATE:
15696 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15697 ort = ORT_ACC;
15698 break;
15699 case OMP_TARGET_UPDATE:
15700 kind = GF_OMP_TARGET_KIND_UPDATE;
15701 break;
15702 case OMP_TARGET_ENTER_DATA:
15703 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15704 break;
15705 case OMP_TARGET_EXIT_DATA:
15706 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15707 break;
15708 default:
15709 gcc_unreachable ();
15711 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15712 ort, TREE_CODE (expr));
15713 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15714 TREE_CODE (expr));
15715 if (TREE_CODE (expr) == OACC_UPDATE
15716 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15717 OMP_CLAUSE_IF_PRESENT))
15719 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15720 clause. */
15721 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15722 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15723 switch (OMP_CLAUSE_MAP_KIND (c))
15725 case GOMP_MAP_FORCE_TO:
15726 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15727 break;
15728 case GOMP_MAP_FORCE_FROM:
15729 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15730 break;
15731 default:
15732 break;
15735 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15736 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15737 OMP_CLAUSE_FINALIZE))
15739 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15740 semantics. */
15741 bool have_clause = false;
15742 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15743 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15744 switch (OMP_CLAUSE_MAP_KIND (c))
15746 case GOMP_MAP_FROM:
15747 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15748 have_clause = true;
15749 break;
15750 case GOMP_MAP_RELEASE:
15751 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15752 have_clause = true;
15753 break;
15754 case GOMP_MAP_TO_PSET:
15755 /* Fortran arrays with descriptors must map that descriptor when
15756 doing standalone "attach" operations (in OpenACC). In that
15757 case GOMP_MAP_TO_PSET appears by itself with no preceding
15758 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15759 break;
15760 case GOMP_MAP_POINTER:
15761 /* TODO PR92929: we may see these here, but they'll always follow
15762 one of the clauses above, and will be handled by libgomp as
15763 one group, so no handling required here. */
15764 gcc_assert (have_clause);
15765 break;
15766 case GOMP_MAP_DETACH:
15767 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15768 have_clause = false;
15769 break;
15770 case GOMP_MAP_STRUCT:
15771 have_clause = false;
15772 break;
15773 default:
15774 gcc_unreachable ();
15777 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15779 gimplify_seq_add_stmt (pre_p, stmt);
15780 *expr_p = NULL_TREE;
15783 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15784 stabilized the lhs of the atomic operation as *ADDR. Return true if
15785 EXPR is this stabilized form. */
15787 static bool
15788 goa_lhs_expr_p (tree expr, tree addr)
15790 /* Also include casts to other type variants. The C front end is fond
15791 of adding these for e.g. volatile variables. This is like
15792 STRIP_TYPE_NOPS but includes the main variant lookup. */
15793 STRIP_USELESS_TYPE_CONVERSION (expr);
15795 if (INDIRECT_REF_P (expr))
15797 expr = TREE_OPERAND (expr, 0);
15798 while (expr != addr
15799 && (CONVERT_EXPR_P (expr)
15800 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15801 && TREE_CODE (expr) == TREE_CODE (addr)
15802 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15804 expr = TREE_OPERAND (expr, 0);
15805 addr = TREE_OPERAND (addr, 0);
15807 if (expr == addr)
15808 return true;
15809 return (TREE_CODE (addr) == ADDR_EXPR
15810 && TREE_CODE (expr) == ADDR_EXPR
15811 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15813 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15814 return true;
15815 return false;
15818 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15819 expression does not involve the lhs, evaluate it into a temporary.
15820 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15821 or -1 if an error was encountered. */
15823 static int
15824 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15825 tree lhs_var, tree &target_expr, bool rhs, int depth)
15827 tree expr = *expr_p;
15828 int saw_lhs = 0;
15830 if (goa_lhs_expr_p (expr, lhs_addr))
15832 if (pre_p)
15833 *expr_p = lhs_var;
15834 return 1;
15836 if (is_gimple_val (expr))
15837 return 0;
15839 /* Maximum depth of lhs in expression is for the
15840 __builtin_clear_padding (...), __builtin_clear_padding (...),
15841 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15842 if (++depth > 7)
15843 goto finish;
15845 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
15847 case tcc_binary:
15848 case tcc_comparison:
15849 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
15850 lhs_var, target_expr, true, depth);
15851 /* FALLTHRU */
15852 case tcc_unary:
15853 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
15854 lhs_var, target_expr, true, depth);
15855 break;
15856 case tcc_expression:
15857 switch (TREE_CODE (expr))
15859 case TRUTH_ANDIF_EXPR:
15860 case TRUTH_ORIF_EXPR:
15861 case TRUTH_AND_EXPR:
15862 case TRUTH_OR_EXPR:
15863 case TRUTH_XOR_EXPR:
15864 case BIT_INSERT_EXPR:
15865 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15866 lhs_addr, lhs_var, target_expr, true,
15867 depth);
15868 /* FALLTHRU */
15869 case TRUTH_NOT_EXPR:
15870 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15871 lhs_addr, lhs_var, target_expr, true,
15872 depth);
15873 break;
15874 case MODIFY_EXPR:
15875 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15876 target_expr, true, depth))
15877 break;
15878 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15879 lhs_addr, lhs_var, target_expr, true,
15880 depth);
15881 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15882 lhs_addr, lhs_var, target_expr, false,
15883 depth);
15884 break;
15885 /* FALLTHRU */
15886 case ADDR_EXPR:
15887 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15888 target_expr, true, depth))
15889 break;
15890 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15891 lhs_addr, lhs_var, target_expr, false,
15892 depth);
15893 break;
15894 case COMPOUND_EXPR:
15895 /* Break out any preevaluations from cp_build_modify_expr. */
15896 for (; TREE_CODE (expr) == COMPOUND_EXPR;
15897 expr = TREE_OPERAND (expr, 1))
15899 /* Special-case __builtin_clear_padding call before
15900 __builtin_memcmp. */
15901 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
15903 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
15904 if (fndecl
15905 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15906 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
15907 && (!pre_p
15908 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
15909 lhs_addr, lhs_var,
15910 target_expr, true, depth)))
15912 if (pre_p)
15913 *expr_p = expr;
15914 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
15915 pre_p, lhs_addr, lhs_var,
15916 target_expr, true, depth);
15917 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
15918 pre_p, lhs_addr, lhs_var,
15919 target_expr, rhs, depth);
15920 return saw_lhs;
15924 if (pre_p)
15925 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
15927 if (!pre_p)
15928 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
15929 target_expr, rhs, depth);
15930 *expr_p = expr;
15931 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
15932 target_expr, rhs, depth);
15933 case COND_EXPR:
15934 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
15935 lhs_var, target_expr, true, depth))
15936 break;
15937 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15938 lhs_addr, lhs_var, target_expr, true,
15939 depth);
15940 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15941 lhs_addr, lhs_var, target_expr, true,
15942 depth);
15943 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
15944 lhs_addr, lhs_var, target_expr, true,
15945 depth);
15946 break;
15947 case TARGET_EXPR:
15948 if (TARGET_EXPR_INITIAL (expr))
15950 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
15951 lhs_var, target_expr, true,
15952 depth))
15953 break;
15954 if (expr == target_expr)
15955 saw_lhs = 1;
15956 else
15958 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
15959 pre_p, lhs_addr, lhs_var,
15960 target_expr, true, depth);
15961 if (saw_lhs && target_expr == NULL_TREE && pre_p)
15962 target_expr = expr;
15965 break;
15966 default:
15967 break;
15969 break;
15970 case tcc_reference:
15971 if (TREE_CODE (expr) == BIT_FIELD_REF
15972 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
15973 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15974 lhs_addr, lhs_var, target_expr, true,
15975 depth);
15976 break;
15977 case tcc_vl_exp:
15978 if (TREE_CODE (expr) == CALL_EXPR)
15980 if (tree fndecl = get_callee_fndecl (expr))
15981 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
15982 BUILT_IN_MEMCMP))
15984 int nargs = call_expr_nargs (expr);
15985 for (int i = 0; i < nargs; i++)
15986 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
15987 pre_p, lhs_addr, lhs_var,
15988 target_expr, true, depth);
15991 break;
15992 default:
15993 break;
15996 finish:
15997 if (saw_lhs == 0 && pre_p)
15999 enum gimplify_status gs;
16000 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
16002 gimplify_stmt (&expr, pre_p);
16003 return saw_lhs;
16005 else if (rhs)
16006 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
16007 else
16008 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
16009 if (gs != GS_ALL_DONE)
16010 saw_lhs = -1;
16013 return saw_lhs;
16016 /* Gimplify an OMP_ATOMIC statement. */
16018 static enum gimplify_status
16019 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
16021 tree addr = TREE_OPERAND (*expr_p, 0);
16022 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
16023 ? NULL : TREE_OPERAND (*expr_p, 1);
16024 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
16025 tree tmp_load;
16026 gomp_atomic_load *loadstmt;
16027 gomp_atomic_store *storestmt;
16028 tree target_expr = NULL_TREE;
16030 tmp_load = create_tmp_reg (type);
16031 if (rhs
16032 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
16033 true, 0) < 0)
16034 return GS_ERROR;
16036 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
16037 != GS_ALL_DONE)
16038 return GS_ERROR;
16040 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
16041 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16042 gimplify_seq_add_stmt (pre_p, loadstmt);
16043 if (rhs)
16045 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
16046 representatives. Use BIT_FIELD_REF on the lhs instead. */
16047 tree rhsarg = rhs;
16048 if (TREE_CODE (rhs) == COND_EXPR)
16049 rhsarg = TREE_OPERAND (rhs, 1);
16050 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
16051 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
16053 tree bitpos = TREE_OPERAND (rhsarg, 2);
16054 tree op1 = TREE_OPERAND (rhsarg, 1);
16055 tree bitsize;
16056 tree tmp_store = tmp_load;
16057 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
16058 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
16059 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
16060 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
16061 else
16062 bitsize = TYPE_SIZE (TREE_TYPE (op1));
16063 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
16064 tree t = build2_loc (EXPR_LOCATION (rhsarg),
16065 MODIFY_EXPR, void_type_node,
16066 build3_loc (EXPR_LOCATION (rhsarg),
16067 BIT_FIELD_REF, TREE_TYPE (op1),
16068 tmp_store, bitsize, bitpos), op1);
16069 if (TREE_CODE (rhs) == COND_EXPR)
16070 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
16071 TREE_OPERAND (rhs, 0), t, void_node);
16072 gimplify_and_add (t, pre_p);
16073 rhs = tmp_store;
16075 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
16076 if (TREE_CODE (rhs) == COND_EXPR)
16077 gimplify_ctxp->allow_rhs_cond_expr = true;
16078 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
16079 is_gimple_val, fb_rvalue);
16080 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
16081 if (gs != GS_ALL_DONE)
16082 return GS_ERROR;
16085 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
16086 rhs = tmp_load;
16087 storestmt
16088 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16089 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
16091 gimple_omp_atomic_set_weak (loadstmt);
16092 gimple_omp_atomic_set_weak (storestmt);
16094 gimplify_seq_add_stmt (pre_p, storestmt);
16095 switch (TREE_CODE (*expr_p))
16097 case OMP_ATOMIC_READ:
16098 case OMP_ATOMIC_CAPTURE_OLD:
16099 *expr_p = tmp_load;
16100 gimple_omp_atomic_set_need_value (loadstmt);
16101 break;
16102 case OMP_ATOMIC_CAPTURE_NEW:
16103 *expr_p = rhs;
16104 gimple_omp_atomic_set_need_value (storestmt);
16105 break;
16106 default:
16107 *expr_p = NULL;
16108 break;
16111 return GS_ALL_DONE;
16114 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
16115 body, and adding some EH bits. */
16117 static enum gimplify_status
16118 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
16120 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
16121 gimple *body_stmt;
16122 gtransaction *trans_stmt;
16123 gimple_seq body = NULL;
16124 int subcode = 0;
16126 /* Wrap the transaction body in a BIND_EXPR so we have a context
16127 where to put decls for OMP. */
16128 if (TREE_CODE (tbody) != BIND_EXPR)
16130 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
16131 TREE_SIDE_EFFECTS (bind) = 1;
16132 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
16133 TRANSACTION_EXPR_BODY (expr) = bind;
16136 push_gimplify_context ();
16137 temp = voidify_wrapper_expr (*expr_p, NULL);
16139 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
16140 pop_gimplify_context (body_stmt);
16142 trans_stmt = gimple_build_transaction (body);
16143 if (TRANSACTION_EXPR_OUTER (expr))
16144 subcode = GTMA_IS_OUTER;
16145 else if (TRANSACTION_EXPR_RELAXED (expr))
16146 subcode = GTMA_IS_RELAXED;
16147 gimple_transaction_set_subcode (trans_stmt, subcode);
16149 gimplify_seq_add_stmt (pre_p, trans_stmt);
16151 if (temp)
16153 *expr_p = temp;
16154 return GS_OK;
16157 *expr_p = NULL_TREE;
16158 return GS_ALL_DONE;
16161 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
16162 is the OMP_BODY of the original EXPR (which has already been
16163 gimplified so it's not present in the EXPR).
16165 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16167 static gimple *
16168 gimplify_omp_ordered (tree expr, gimple_seq body)
16170 tree c, decls;
16171 int failures = 0;
16172 unsigned int i;
16173 tree source_c = NULL_TREE;
16174 tree sink_c = NULL_TREE;
16176 if (gimplify_omp_ctxp)
16178 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16180 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16182 error_at (OMP_CLAUSE_LOCATION (c),
16183 "%<ordered%> construct with %qs clause must be "
16184 "closely nested inside a loop with %<ordered%> clause",
16185 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16186 failures++;
16188 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16189 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16191 bool fail = false;
16192 sink_c = c;
16193 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16194 continue; /* omp_cur_iteration - 1 */
16195 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16196 decls && TREE_CODE (decls) == TREE_LIST;
16197 decls = TREE_CHAIN (decls), ++i)
16198 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16199 continue;
16200 else if (TREE_VALUE (decls)
16201 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16203 error_at (OMP_CLAUSE_LOCATION (c),
16204 "variable %qE is not an iteration "
16205 "of outermost loop %d, expected %qE",
16206 TREE_VALUE (decls), i + 1,
16207 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16208 fail = true;
16209 failures++;
16211 else
16212 TREE_VALUE (decls)
16213 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16214 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16216 error_at (OMP_CLAUSE_LOCATION (c),
16217 "number of variables in %qs clause with "
16218 "%<sink%> modifier does not match number of "
16219 "iteration variables",
16220 OMP_CLAUSE_DOACROSS_DEPEND (c)
16221 ? "depend" : "doacross");
16222 failures++;
16225 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16226 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16228 if (source_c)
16230 error_at (OMP_CLAUSE_LOCATION (c),
16231 "more than one %qs clause with %<source%> "
16232 "modifier on an %<ordered%> construct",
16233 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16234 ? "depend" : "doacross");
16235 failures++;
16237 else
16238 source_c = c;
16241 if (source_c && sink_c)
16243 error_at (OMP_CLAUSE_LOCATION (source_c),
16244 "%qs clause with %<source%> modifier specified "
16245 "together with %qs clauses with %<sink%> modifier "
16246 "on the same construct",
16247 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16248 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16249 failures++;
16252 if (failures)
16253 return gimple_build_nop ();
16254 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16257 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16258 expression produces a value to be used as an operand inside a GIMPLE
16259 statement, the value will be stored back in *EXPR_P. This value will
16260 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16261 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16262 emitted in PRE_P and POST_P.
16264 Additionally, this process may overwrite parts of the input
16265 expression during gimplification. Ideally, it should be
16266 possible to do non-destructive gimplification.
16268 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16269 the expression needs to evaluate to a value to be used as
16270 an operand in a GIMPLE statement, this value will be stored in
16271 *EXPR_P on exit. This happens when the caller specifies one
16272 of fb_lvalue or fb_rvalue fallback flags.
16274 PRE_P will contain the sequence of GIMPLE statements corresponding
16275 to the evaluation of EXPR and all the side-effects that must
16276 be executed before the main expression. On exit, the last
16277 statement of PRE_P is the core statement being gimplified. For
16278 instance, when gimplifying 'if (++a)' the last statement in
16279 PRE_P will be 'if (t.1)' where t.1 is the result of
16280 pre-incrementing 'a'.
16282 POST_P will contain the sequence of GIMPLE statements corresponding
16283 to the evaluation of all the side-effects that must be executed
16284 after the main expression. If this is NULL, the post
16285 side-effects are stored at the end of PRE_P.
16287 The reason why the output is split in two is to handle post
16288 side-effects explicitly. In some cases, an expression may have
16289 inner and outer post side-effects which need to be emitted in
16290 an order different from the one given by the recursive
16291 traversal. For instance, for the expression (*p--)++ the post
16292 side-effects of '--' must actually occur *after* the post
16293 side-effects of '++'. However, gimplification will first visit
16294 the inner expression, so if a separate POST sequence was not
16295 used, the resulting sequence would be:
16297 1 t.1 = *p
16298 2 p = p - 1
16299 3 t.2 = t.1 + 1
16300 4 *p = t.2
16302 However, the post-decrement operation in line #2 must not be
16303 evaluated until after the store to *p at line #4, so the
16304 correct sequence should be:
16306 1 t.1 = *p
16307 2 t.2 = t.1 + 1
16308 3 *p = t.2
16309 4 p = p - 1
16311 So, by specifying a separate post queue, it is possible
16312 to emit the post side-effects in the correct order.
16313 If POST_P is NULL, an internal queue will be used. Before
16314 returning to the caller, the sequence POST_P is appended to
16315 the main output sequence PRE_P.
16317 GIMPLE_TEST_F points to a function that takes a tree T and
16318 returns nonzero if T is in the GIMPLE form requested by the
16319 caller. The GIMPLE predicates are in gimple.cc.
16321 FALLBACK tells the function what sort of a temporary we want if
16322 gimplification cannot produce an expression that complies with
16323 GIMPLE_TEST_F.
16325 fb_none means that no temporary should be generated
16326 fb_rvalue means that an rvalue is OK to generate
16327 fb_lvalue means that an lvalue is OK to generate
16328 fb_either means that either is OK, but an lvalue is preferable.
16329 fb_mayfail means that gimplification may fail (in which case
16330 GS_ERROR will be returned)
16332 The return value is either GS_ERROR or GS_ALL_DONE, since this
16333 function iterates until EXPR is completely gimplified or an error
16334 occurs. */
16336 enum gimplify_status
16337 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16338 bool (*gimple_test_f) (tree), fallback_t fallback)
16340 tree tmp;
16341 gimple_seq internal_pre = NULL;
16342 gimple_seq internal_post = NULL;
16343 tree save_expr;
16344 bool is_statement;
16345 location_t saved_location;
16346 enum gimplify_status ret;
16347 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16348 tree label;
16350 save_expr = *expr_p;
16351 if (save_expr == NULL_TREE)
16352 return GS_ALL_DONE;
16354 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16355 is_statement = gimple_test_f == is_gimple_stmt;
16356 if (is_statement)
16357 gcc_assert (pre_p);
16359 /* Consistency checks. */
16360 if (gimple_test_f == is_gimple_reg)
16361 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16362 else if (gimple_test_f == is_gimple_val
16363 || gimple_test_f == is_gimple_call_addr
16364 || gimple_test_f == is_gimple_condexpr_for_cond
16365 || gimple_test_f == is_gimple_mem_rhs
16366 || gimple_test_f == is_gimple_mem_rhs_or_call
16367 || gimple_test_f == is_gimple_reg_rhs
16368 || gimple_test_f == is_gimple_reg_rhs_or_call
16369 || gimple_test_f == is_gimple_asm_val
16370 || gimple_test_f == is_gimple_mem_ref_addr)
16371 gcc_assert (fallback & fb_rvalue);
16372 else if (gimple_test_f == is_gimple_min_lval
16373 || gimple_test_f == is_gimple_lvalue)
16374 gcc_assert (fallback & fb_lvalue);
16375 else if (gimple_test_f == is_gimple_addressable)
16376 gcc_assert (fallback & fb_either);
16377 else if (gimple_test_f == is_gimple_stmt)
16378 gcc_assert (fallback == fb_none);
16379 else
16381 /* We should have recognized the GIMPLE_TEST_F predicate to
16382 know what kind of fallback to use in case a temporary is
16383 needed to hold the value or address of *EXPR_P. */
16384 gcc_unreachable ();
16387 /* We used to check the predicate here and return immediately if it
16388 succeeds. This is wrong; the design is for gimplification to be
16389 idempotent, and for the predicates to only test for valid forms, not
16390 whether they are fully simplified. */
16391 if (pre_p == NULL)
16392 pre_p = &internal_pre;
16394 if (post_p == NULL)
16395 post_p = &internal_post;
16397 /* Remember the last statements added to PRE_P and POST_P. Every
16398 new statement added by the gimplification helpers needs to be
16399 annotated with location information. To centralize the
16400 responsibility, we remember the last statement that had been
16401 added to both queues before gimplifying *EXPR_P. If
16402 gimplification produces new statements in PRE_P and POST_P, those
16403 statements will be annotated with the same location information
16404 as *EXPR_P. */
16405 pre_last_gsi = gsi_last (*pre_p);
16406 post_last_gsi = gsi_last (*post_p);
16408 saved_location = input_location;
16409 if (save_expr != error_mark_node
16410 && EXPR_HAS_LOCATION (*expr_p))
16411 input_location = EXPR_LOCATION (*expr_p);
16413 /* Loop over the specific gimplifiers until the toplevel node
16414 remains the same. */
16417 /* Strip away as many useless type conversions as possible
16418 at the toplevel. */
16419 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16421 /* Remember the expr. */
16422 save_expr = *expr_p;
16424 /* Die, die, die, my darling. */
16425 if (error_operand_p (save_expr))
16427 ret = GS_ERROR;
16428 break;
16431 /* Do any language-specific gimplification. */
16432 ret = ((enum gimplify_status)
16433 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16434 if (ret == GS_OK)
16436 if (*expr_p == NULL_TREE)
16437 break;
16438 if (*expr_p != save_expr)
16439 continue;
16441 else if (ret != GS_UNHANDLED)
16442 break;
16444 /* Make sure that all the cases set 'ret' appropriately. */
16445 ret = GS_UNHANDLED;
16446 switch (TREE_CODE (*expr_p))
16448 /* First deal with the special cases. */
16450 case POSTINCREMENT_EXPR:
16451 case POSTDECREMENT_EXPR:
16452 case PREINCREMENT_EXPR:
16453 case PREDECREMENT_EXPR:
16454 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16455 fallback != fb_none,
16456 TREE_TYPE (*expr_p));
16457 break;
16459 case VIEW_CONVERT_EXPR:
16460 if ((fallback & fb_rvalue)
16461 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16462 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16464 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16465 post_p, is_gimple_val, fb_rvalue);
16466 recalculate_side_effects (*expr_p);
16467 break;
16469 /* Fallthru. */
16471 case ARRAY_REF:
16472 case ARRAY_RANGE_REF:
16473 case REALPART_EXPR:
16474 case IMAGPART_EXPR:
16475 case COMPONENT_REF:
16476 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16477 fallback ? fallback : fb_rvalue);
16478 break;
16480 case COND_EXPR:
16481 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16483 /* C99 code may assign to an array in a structure value of a
16484 conditional expression, and this has undefined behavior
16485 only on execution, so create a temporary if an lvalue is
16486 required. */
16487 if (fallback == fb_lvalue)
16489 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16490 mark_addressable (*expr_p);
16491 ret = GS_OK;
16493 break;
16495 case CALL_EXPR:
16496 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16498 /* C99 code may assign to an array in a structure returned
16499 from a function, and this has undefined behavior only on
16500 execution, so create a temporary if an lvalue is
16501 required. */
16502 if (fallback == fb_lvalue)
16504 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16505 mark_addressable (*expr_p);
16506 ret = GS_OK;
16508 break;
16510 case TREE_LIST:
16511 gcc_unreachable ();
16513 case COMPOUND_EXPR:
16514 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16515 break;
16517 case COMPOUND_LITERAL_EXPR:
16518 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16519 gimple_test_f, fallback);
16520 break;
16522 case MODIFY_EXPR:
16523 case INIT_EXPR:
16524 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16525 fallback != fb_none);
16526 break;
16528 case TRUTH_ANDIF_EXPR:
16529 case TRUTH_ORIF_EXPR:
16531 /* Preserve the original type of the expression and the
16532 source location of the outer expression. */
16533 tree org_type = TREE_TYPE (*expr_p);
16534 *expr_p = gimple_boolify (*expr_p);
16535 *expr_p = build3_loc (input_location, COND_EXPR,
16536 org_type, *expr_p,
16537 fold_convert_loc
16538 (input_location,
16539 org_type, boolean_true_node),
16540 fold_convert_loc
16541 (input_location,
16542 org_type, boolean_false_node));
16543 ret = GS_OK;
16544 break;
16547 case TRUTH_NOT_EXPR:
16549 tree type = TREE_TYPE (*expr_p);
16550 /* The parsers are careful to generate TRUTH_NOT_EXPR
16551 only with operands that are always zero or one.
16552 We do not fold here but handle the only interesting case
16553 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16554 *expr_p = gimple_boolify (*expr_p);
16555 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16556 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16557 TREE_TYPE (*expr_p),
16558 TREE_OPERAND (*expr_p, 0));
16559 else
16560 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16561 TREE_TYPE (*expr_p),
16562 TREE_OPERAND (*expr_p, 0),
16563 build_int_cst (TREE_TYPE (*expr_p), 1));
16564 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16565 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16566 ret = GS_OK;
16567 break;
16570 case ADDR_EXPR:
16571 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16572 break;
16574 case ANNOTATE_EXPR:
16576 tree cond = TREE_OPERAND (*expr_p, 0);
16577 tree kind = TREE_OPERAND (*expr_p, 1);
16578 tree data = TREE_OPERAND (*expr_p, 2);
16579 tree type = TREE_TYPE (cond);
16580 if (!INTEGRAL_TYPE_P (type))
16582 *expr_p = cond;
16583 ret = GS_OK;
16584 break;
16586 tree tmp = create_tmp_var (type);
16587 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16588 gcall *call
16589 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16590 gimple_call_set_lhs (call, tmp);
16591 gimplify_seq_add_stmt (pre_p, call);
16592 *expr_p = tmp;
16593 ret = GS_ALL_DONE;
16594 break;
16597 case VA_ARG_EXPR:
16598 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16599 break;
16601 CASE_CONVERT:
16602 if (IS_EMPTY_STMT (*expr_p))
16604 ret = GS_ALL_DONE;
16605 break;
16608 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16609 || fallback == fb_none)
16611 /* Just strip a conversion to void (or in void context) and
16612 try again. */
16613 *expr_p = TREE_OPERAND (*expr_p, 0);
16614 ret = GS_OK;
16615 break;
16618 ret = gimplify_conversion (expr_p);
16619 if (ret == GS_ERROR)
16620 break;
16621 if (*expr_p != save_expr)
16622 break;
16623 /* FALLTHRU */
16625 case FIX_TRUNC_EXPR:
16626 /* unary_expr: ... | '(' cast ')' val | ... */
16627 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16628 is_gimple_val, fb_rvalue);
16629 recalculate_side_effects (*expr_p);
16630 break;
16632 case INDIRECT_REF:
16634 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16635 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16636 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16638 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16639 if (*expr_p != save_expr)
16641 ret = GS_OK;
16642 break;
16645 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16646 is_gimple_reg, fb_rvalue);
16647 if (ret == GS_ERROR)
16648 break;
16650 recalculate_side_effects (*expr_p);
16651 *expr_p = fold_build2_loc (input_location, MEM_REF,
16652 TREE_TYPE (*expr_p),
16653 TREE_OPERAND (*expr_p, 0),
16654 build_int_cst (saved_ptr_type, 0));
16655 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16656 TREE_THIS_NOTRAP (*expr_p) = notrap;
16657 ret = GS_OK;
16658 break;
16661 /* We arrive here through the various re-gimplifcation paths. */
16662 case MEM_REF:
16663 /* First try re-folding the whole thing. */
16664 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16665 TREE_OPERAND (*expr_p, 0),
16666 TREE_OPERAND (*expr_p, 1));
16667 if (tmp)
16669 REF_REVERSE_STORAGE_ORDER (tmp)
16670 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16671 *expr_p = tmp;
16672 recalculate_side_effects (*expr_p);
16673 ret = GS_OK;
16674 break;
16676 /* Avoid re-gimplifying the address operand if it is already
16677 in suitable form. Re-gimplifying would mark the address
16678 operand addressable. Always gimplify when not in SSA form
16679 as we still may have to gimplify decls with value-exprs. */
16680 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16681 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16683 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16684 is_gimple_mem_ref_addr, fb_rvalue);
16685 if (ret == GS_ERROR)
16686 break;
16688 recalculate_side_effects (*expr_p);
16689 ret = GS_ALL_DONE;
16690 break;
16692 /* Constants need not be gimplified. */
16693 case INTEGER_CST:
16694 case REAL_CST:
16695 case FIXED_CST:
16696 case STRING_CST:
16697 case COMPLEX_CST:
16698 case VECTOR_CST:
16699 /* Drop the overflow flag on constants, we do not want
16700 that in the GIMPLE IL. */
16701 if (TREE_OVERFLOW_P (*expr_p))
16702 *expr_p = drop_tree_overflow (*expr_p);
16703 ret = GS_ALL_DONE;
16704 break;
16706 case CONST_DECL:
16707 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16708 CONST_DECL node. Otherwise the decl is replaceable by its
16709 value. */
16710 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16711 if (fallback & fb_lvalue)
16712 ret = GS_ALL_DONE;
16713 else
16715 *expr_p = DECL_INITIAL (*expr_p);
16716 ret = GS_OK;
16718 break;
16720 case DECL_EXPR:
16721 ret = gimplify_decl_expr (expr_p, pre_p);
16722 break;
16724 case BIND_EXPR:
16725 ret = gimplify_bind_expr (expr_p, pre_p);
16726 break;
16728 case LOOP_EXPR:
16729 ret = gimplify_loop_expr (expr_p, pre_p);
16730 break;
16732 case SWITCH_EXPR:
16733 ret = gimplify_switch_expr (expr_p, pre_p);
16734 break;
16736 case EXIT_EXPR:
16737 ret = gimplify_exit_expr (expr_p);
16738 break;
16740 case GOTO_EXPR:
16741 /* If the target is not LABEL, then it is a computed jump
16742 and the target needs to be gimplified. */
16743 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16745 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16746 NULL, is_gimple_val, fb_rvalue);
16747 if (ret == GS_ERROR)
16748 break;
16750 gimplify_seq_add_stmt (pre_p,
16751 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16752 ret = GS_ALL_DONE;
16753 break;
16755 case PREDICT_EXPR:
16756 gimplify_seq_add_stmt (pre_p,
16757 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16758 PREDICT_EXPR_OUTCOME (*expr_p)));
16759 ret = GS_ALL_DONE;
16760 break;
16762 case LABEL_EXPR:
16763 ret = gimplify_label_expr (expr_p, pre_p);
16764 label = LABEL_EXPR_LABEL (*expr_p);
16765 gcc_assert (decl_function_context (label) == current_function_decl);
16767 /* If the label is used in a goto statement, or address of the label
16768 is taken, we need to unpoison all variables that were seen so far.
16769 Doing so would prevent us from reporting a false positives. */
16770 if (asan_poisoned_variables
16771 && asan_used_labels != NULL
16772 && asan_used_labels->contains (label)
16773 && !gimplify_omp_ctxp)
16774 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16775 break;
16777 case CASE_LABEL_EXPR:
16778 ret = gimplify_case_label_expr (expr_p, pre_p);
16780 if (gimplify_ctxp->live_switch_vars)
16781 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16782 pre_p);
16783 break;
16785 case RETURN_EXPR:
16786 ret = gimplify_return_expr (*expr_p, pre_p);
16787 break;
16789 case CONSTRUCTOR:
16790 /* Don't reduce this in place; let gimplify_init_constructor work its
16791 magic. Buf if we're just elaborating this for side effects, just
16792 gimplify any element that has side-effects. */
16793 if (fallback == fb_none)
16795 unsigned HOST_WIDE_INT ix;
16796 tree val;
16797 tree temp = NULL_TREE;
16798 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16799 if (TREE_SIDE_EFFECTS (val))
16800 append_to_statement_list (val, &temp);
16802 *expr_p = temp;
16803 ret = temp ? GS_OK : GS_ALL_DONE;
16805 /* C99 code may assign to an array in a constructed
16806 structure or union, and this has undefined behavior only
16807 on execution, so create a temporary if an lvalue is
16808 required. */
16809 else if (fallback == fb_lvalue)
16811 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16812 mark_addressable (*expr_p);
16813 ret = GS_OK;
16815 else
16816 ret = GS_ALL_DONE;
16817 break;
16819 /* The following are special cases that are not handled by the
16820 original GIMPLE grammar. */
16822 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16823 eliminated. */
16824 case SAVE_EXPR:
16825 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16826 break;
16828 case BIT_FIELD_REF:
16829 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16830 post_p, is_gimple_lvalue, fb_either);
16831 recalculate_side_effects (*expr_p);
16832 break;
16834 case TARGET_MEM_REF:
16836 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
16838 if (TMR_BASE (*expr_p))
16839 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
16840 post_p, is_gimple_mem_ref_addr, fb_either);
16841 if (TMR_INDEX (*expr_p))
16842 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
16843 post_p, is_gimple_val, fb_rvalue);
16844 if (TMR_INDEX2 (*expr_p))
16845 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
16846 post_p, is_gimple_val, fb_rvalue);
16847 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16848 ret = MIN (r0, r1);
16850 break;
16852 case NON_LVALUE_EXPR:
16853 /* This should have been stripped above. */
16854 gcc_unreachable ();
16856 case ASM_EXPR:
16857 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
16858 break;
16860 case TRY_FINALLY_EXPR:
16861 case TRY_CATCH_EXPR:
16863 gimple_seq eval, cleanup;
16864 gtry *try_;
16866 /* Calls to destructors are generated automatically in FINALLY/CATCH
16867 block. They should have location as UNKNOWN_LOCATION. However,
16868 gimplify_call_expr will reset these call stmts to input_location
16869 if it finds stmt's location is unknown. To prevent resetting for
16870 destructors, we set the input_location to unknown.
16871 Note that this only affects the destructor calls in FINALLY/CATCH
16872 block, and will automatically reset to its original value by the
16873 end of gimplify_expr. */
16874 input_location = UNKNOWN_LOCATION;
16875 eval = cleanup = NULL;
16876 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
16877 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16878 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
16880 gimple_seq n = NULL, e = NULL;
16881 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16882 0), &n);
16883 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16884 1), &e);
16885 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
16887 geh_else *stmt = gimple_build_eh_else (n, e);
16888 gimple_seq_add_stmt (&cleanup, stmt);
16891 else
16892 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
16893 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16894 if (gimple_seq_empty_p (cleanup))
16896 gimple_seq_add_seq (pre_p, eval);
16897 ret = GS_ALL_DONE;
16898 break;
16900 try_ = gimple_build_try (eval, cleanup,
16901 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16902 ? GIMPLE_TRY_FINALLY
16903 : GIMPLE_TRY_CATCH);
16904 if (EXPR_HAS_LOCATION (save_expr))
16905 gimple_set_location (try_, EXPR_LOCATION (save_expr));
16906 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
16907 gimple_set_location (try_, saved_location);
16908 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
16909 gimple_try_set_catch_is_cleanup (try_,
16910 TRY_CATCH_IS_CLEANUP (*expr_p));
16911 gimplify_seq_add_stmt (pre_p, try_);
16912 ret = GS_ALL_DONE;
16913 break;
16916 case CLEANUP_POINT_EXPR:
16917 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
16918 break;
16920 case TARGET_EXPR:
16921 ret = gimplify_target_expr (expr_p, pre_p, post_p);
16922 break;
16924 case CATCH_EXPR:
16926 gimple *c;
16927 gimple_seq handler = NULL;
16928 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
16929 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
16930 gimplify_seq_add_stmt (pre_p, c);
16931 ret = GS_ALL_DONE;
16932 break;
16935 case EH_FILTER_EXPR:
16937 gimple *ehf;
16938 gimple_seq failure = NULL;
16940 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
16941 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
16942 copy_warning (ehf, *expr_p);
16943 gimplify_seq_add_stmt (pre_p, ehf);
16944 ret = GS_ALL_DONE;
16945 break;
16948 case OBJ_TYPE_REF:
16950 enum gimplify_status r0, r1;
16951 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
16952 post_p, is_gimple_val, fb_rvalue);
16953 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
16954 post_p, is_gimple_val, fb_rvalue);
16955 TREE_SIDE_EFFECTS (*expr_p) = 0;
16956 ret = MIN (r0, r1);
16958 break;
16960 case LABEL_DECL:
16961 /* We get here when taking the address of a label. We mark
16962 the label as "forced"; meaning it can never be removed and
16963 it is a potential target for any computed goto. */
16964 FORCED_LABEL (*expr_p) = 1;
16965 ret = GS_ALL_DONE;
16966 break;
16968 case STATEMENT_LIST:
16969 ret = gimplify_statement_list (expr_p, pre_p);
16970 break;
16972 case WITH_SIZE_EXPR:
16974 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16975 post_p == &internal_post ? NULL : post_p,
16976 gimple_test_f, fallback);
16977 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
16978 is_gimple_val, fb_rvalue);
16979 ret = GS_ALL_DONE;
16981 break;
16983 case VAR_DECL:
16984 case PARM_DECL:
16985 ret = gimplify_var_or_parm_decl (expr_p);
16986 break;
16988 case RESULT_DECL:
16989 /* When within an OMP context, notice uses of variables. */
16990 if (gimplify_omp_ctxp)
16991 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
16992 ret = GS_ALL_DONE;
16993 break;
16995 case DEBUG_EXPR_DECL:
16996 gcc_unreachable ();
16998 case DEBUG_BEGIN_STMT:
16999 gimplify_seq_add_stmt (pre_p,
17000 gimple_build_debug_begin_stmt
17001 (TREE_BLOCK (*expr_p),
17002 EXPR_LOCATION (*expr_p)));
17003 ret = GS_ALL_DONE;
17004 *expr_p = NULL;
17005 break;
17007 case SSA_NAME:
17008 /* Allow callbacks into the gimplifier during optimization. */
17009 ret = GS_ALL_DONE;
17010 break;
17012 case OMP_PARALLEL:
17013 gimplify_omp_parallel (expr_p, pre_p);
17014 ret = GS_ALL_DONE;
17015 break;
17017 case OMP_TASK:
17018 gimplify_omp_task (expr_p, pre_p);
17019 ret = GS_ALL_DONE;
17020 break;
17022 case OMP_SIMD:
17024 /* Temporarily disable into_ssa, as scan_omp_simd
17025 which calls copy_gimple_seq_and_replace_locals can't deal
17026 with SSA_NAMEs defined outside of the body properly. */
17027 bool saved_into_ssa = gimplify_ctxp->into_ssa;
17028 gimplify_ctxp->into_ssa = false;
17029 ret = gimplify_omp_for (expr_p, pre_p);
17030 gimplify_ctxp->into_ssa = saved_into_ssa;
17031 break;
17034 case OMP_FOR:
17035 case OMP_DISTRIBUTE:
17036 case OMP_TASKLOOP:
17037 case OACC_LOOP:
17038 ret = gimplify_omp_for (expr_p, pre_p);
17039 break;
17041 case OMP_LOOP:
17042 ret = gimplify_omp_loop (expr_p, pre_p);
17043 break;
17045 case OACC_CACHE:
17046 gimplify_oacc_cache (expr_p, pre_p);
17047 ret = GS_ALL_DONE;
17048 break;
17050 case OACC_DECLARE:
17051 gimplify_oacc_declare (expr_p, pre_p);
17052 ret = GS_ALL_DONE;
17053 break;
17055 case OACC_HOST_DATA:
17056 case OACC_DATA:
17057 case OACC_KERNELS:
17058 case OACC_PARALLEL:
17059 case OACC_SERIAL:
17060 case OMP_SCOPE:
17061 case OMP_SECTIONS:
17062 case OMP_SINGLE:
17063 case OMP_TARGET:
17064 case OMP_TARGET_DATA:
17065 case OMP_TEAMS:
17066 gimplify_omp_workshare (expr_p, pre_p);
17067 ret = GS_ALL_DONE;
17068 break;
17070 case OACC_ENTER_DATA:
17071 case OACC_EXIT_DATA:
17072 case OACC_UPDATE:
17073 case OMP_TARGET_UPDATE:
17074 case OMP_TARGET_ENTER_DATA:
17075 case OMP_TARGET_EXIT_DATA:
17076 gimplify_omp_target_update (expr_p, pre_p);
17077 ret = GS_ALL_DONE;
17078 break;
17080 case OMP_SECTION:
17081 case OMP_STRUCTURED_BLOCK:
17082 case OMP_MASTER:
17083 case OMP_MASKED:
17084 case OMP_ORDERED:
17085 case OMP_CRITICAL:
17086 case OMP_SCAN:
17088 gimple_seq body = NULL;
17089 gimple *g;
17090 bool saved_in_omp_construct = in_omp_construct;
17092 in_omp_construct = true;
17093 gimplify_and_add (OMP_BODY (*expr_p), &body);
17094 in_omp_construct = saved_in_omp_construct;
17095 switch (TREE_CODE (*expr_p))
17097 case OMP_SECTION:
17098 g = gimple_build_omp_section (body);
17099 break;
17100 case OMP_STRUCTURED_BLOCK:
17101 g = gimple_build_omp_structured_block (body);
17102 break;
17103 case OMP_MASTER:
17104 g = gimple_build_omp_master (body);
17105 break;
17106 case OMP_ORDERED:
17107 g = gimplify_omp_ordered (*expr_p, body);
17108 if (OMP_BODY (*expr_p) == NULL_TREE
17109 && gimple_code (g) == GIMPLE_OMP_ORDERED)
17110 gimple_omp_ordered_standalone (g);
17111 break;
17112 case OMP_MASKED:
17113 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
17114 pre_p, ORT_WORKSHARE, OMP_MASKED);
17115 gimplify_adjust_omp_clauses (pre_p, body,
17116 &OMP_MASKED_CLAUSES (*expr_p),
17117 OMP_MASKED);
17118 g = gimple_build_omp_masked (body,
17119 OMP_MASKED_CLAUSES (*expr_p));
17120 break;
17121 case OMP_CRITICAL:
17122 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
17123 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
17124 gimplify_adjust_omp_clauses (pre_p, body,
17125 &OMP_CRITICAL_CLAUSES (*expr_p),
17126 OMP_CRITICAL);
17127 g = gimple_build_omp_critical (body,
17128 OMP_CRITICAL_NAME (*expr_p),
17129 OMP_CRITICAL_CLAUSES (*expr_p));
17130 break;
17131 case OMP_SCAN:
17132 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
17133 pre_p, ORT_WORKSHARE, OMP_SCAN);
17134 gimplify_adjust_omp_clauses (pre_p, body,
17135 &OMP_SCAN_CLAUSES (*expr_p),
17136 OMP_SCAN);
17137 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
17138 break;
17139 default:
17140 gcc_unreachable ();
17142 gimplify_seq_add_stmt (pre_p, g);
17143 ret = GS_ALL_DONE;
17144 break;
17147 case OMP_TASKGROUP:
17149 gimple_seq body = NULL;
17151 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
17152 bool saved_in_omp_construct = in_omp_construct;
17153 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
17154 OMP_TASKGROUP);
17155 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
17157 in_omp_construct = true;
17158 gimplify_and_add (OMP_BODY (*expr_p), &body);
17159 in_omp_construct = saved_in_omp_construct;
17160 gimple_seq cleanup = NULL;
17161 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
17162 gimple *g = gimple_build_call (fn, 0);
17163 gimple_seq_add_stmt (&cleanup, g);
17164 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
17165 body = NULL;
17166 gimple_seq_add_stmt (&body, g);
17167 g = gimple_build_omp_taskgroup (body, *pclauses);
17168 gimplify_seq_add_stmt (pre_p, g);
17169 ret = GS_ALL_DONE;
17170 break;
17173 case OMP_ATOMIC:
17174 case OMP_ATOMIC_READ:
17175 case OMP_ATOMIC_CAPTURE_OLD:
17176 case OMP_ATOMIC_CAPTURE_NEW:
17177 ret = gimplify_omp_atomic (expr_p, pre_p);
17178 break;
17180 case TRANSACTION_EXPR:
17181 ret = gimplify_transaction (expr_p, pre_p);
17182 break;
17184 case TRUTH_AND_EXPR:
17185 case TRUTH_OR_EXPR:
17186 case TRUTH_XOR_EXPR:
17188 tree orig_type = TREE_TYPE (*expr_p);
17189 tree new_type, xop0, xop1;
17190 *expr_p = gimple_boolify (*expr_p);
17191 new_type = TREE_TYPE (*expr_p);
17192 if (!useless_type_conversion_p (orig_type, new_type))
17194 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17195 ret = GS_OK;
17196 break;
17199 /* Boolified binary truth expressions are semantically equivalent
17200 to bitwise binary expressions. Canonicalize them to the
17201 bitwise variant. */
17202 switch (TREE_CODE (*expr_p))
17204 case TRUTH_AND_EXPR:
17205 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17206 break;
17207 case TRUTH_OR_EXPR:
17208 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17209 break;
17210 case TRUTH_XOR_EXPR:
17211 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17212 break;
17213 default:
17214 break;
17216 /* Now make sure that operands have compatible type to
17217 expression's new_type. */
17218 xop0 = TREE_OPERAND (*expr_p, 0);
17219 xop1 = TREE_OPERAND (*expr_p, 1);
17220 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17221 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17222 new_type,
17223 xop0);
17224 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17225 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17226 new_type,
17227 xop1);
17228 /* Continue classified as tcc_binary. */
17229 goto expr_2;
17232 case VEC_COND_EXPR:
17233 goto expr_3;
17235 case VEC_PERM_EXPR:
17236 /* Classified as tcc_expression. */
17237 goto expr_3;
17239 case BIT_INSERT_EXPR:
17240 /* Argument 3 is a constant. */
17241 goto expr_2;
17243 case POINTER_PLUS_EXPR:
17245 enum gimplify_status r0, r1;
17246 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17247 post_p, is_gimple_val, fb_rvalue);
17248 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17249 post_p, is_gimple_val, fb_rvalue);
17250 recalculate_side_effects (*expr_p);
17251 ret = MIN (r0, r1);
17252 break;
17255 default:
17256 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17258 case tcc_comparison:
17259 /* Handle comparison of objects of non scalar mode aggregates
17260 with a call to memcmp. It would be nice to only have to do
17261 this for variable-sized objects, but then we'd have to allow
17262 the same nest of reference nodes we allow for MODIFY_EXPR and
17263 that's too complex.
17265 Compare scalar mode aggregates as scalar mode values. Using
17266 memcmp for them would be very inefficient at best, and is
17267 plain wrong if bitfields are involved. */
17268 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17269 ret = GS_ERROR;
17270 else
17272 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17274 /* Vector comparisons need no boolification. */
17275 if (TREE_CODE (type) == VECTOR_TYPE)
17276 goto expr_2;
17277 else if (!AGGREGATE_TYPE_P (type))
17279 tree org_type = TREE_TYPE (*expr_p);
17280 *expr_p = gimple_boolify (*expr_p);
17281 if (!useless_type_conversion_p (org_type,
17282 TREE_TYPE (*expr_p)))
17284 *expr_p = fold_convert_loc (input_location,
17285 org_type, *expr_p);
17286 ret = GS_OK;
17288 else
17289 goto expr_2;
17291 else if (TYPE_MODE (type) != BLKmode)
17292 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17293 else
17294 ret = gimplify_variable_sized_compare (expr_p);
17296 break;
17298 /* If *EXPR_P does not need to be special-cased, handle it
17299 according to its class. */
17300 case tcc_unary:
17301 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17302 post_p, is_gimple_val, fb_rvalue);
17303 break;
17305 case tcc_binary:
17306 expr_2:
17308 enum gimplify_status r0, r1;
17310 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17311 post_p, is_gimple_val, fb_rvalue);
17312 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17313 post_p, is_gimple_val, fb_rvalue);
17315 ret = MIN (r0, r1);
17316 break;
17319 expr_3:
17321 enum gimplify_status r0, r1, r2;
17323 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17324 post_p, is_gimple_val, fb_rvalue);
17325 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17326 post_p, is_gimple_val, fb_rvalue);
17327 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17328 post_p, is_gimple_val, fb_rvalue);
17330 ret = MIN (MIN (r0, r1), r2);
17331 break;
17334 case tcc_declaration:
17335 case tcc_constant:
17336 ret = GS_ALL_DONE;
17337 goto dont_recalculate;
17339 default:
17340 gcc_unreachable ();
17343 recalculate_side_effects (*expr_p);
17345 dont_recalculate:
17346 break;
17349 gcc_assert (*expr_p || ret != GS_OK);
17351 while (ret == GS_OK);
17353 /* If we encountered an error_mark somewhere nested inside, either
17354 stub out the statement or propagate the error back out. */
17355 if (ret == GS_ERROR)
17357 if (is_statement)
17358 *expr_p = NULL;
17359 goto out;
17362 /* This was only valid as a return value from the langhook, which
17363 we handled. Make sure it doesn't escape from any other context. */
17364 gcc_assert (ret != GS_UNHANDLED);
17366 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17368 /* We aren't looking for a value, and we don't have a valid
17369 statement. If it doesn't have side-effects, throw it away.
17370 We can also get here with code such as "*&&L;", where L is
17371 a LABEL_DECL that is marked as FORCED_LABEL. */
17372 if (TREE_CODE (*expr_p) == LABEL_DECL
17373 || !TREE_SIDE_EFFECTS (*expr_p))
17374 *expr_p = NULL;
17375 else if (!TREE_THIS_VOLATILE (*expr_p))
17377 /* This is probably a _REF that contains something nested that
17378 has side effects. Recurse through the operands to find it. */
17379 enum tree_code code = TREE_CODE (*expr_p);
17381 switch (code)
17383 case COMPONENT_REF:
17384 case REALPART_EXPR:
17385 case IMAGPART_EXPR:
17386 case VIEW_CONVERT_EXPR:
17387 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17388 gimple_test_f, fallback);
17389 break;
17391 case ARRAY_REF:
17392 case ARRAY_RANGE_REF:
17393 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17394 gimple_test_f, fallback);
17395 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17396 gimple_test_f, fallback);
17397 break;
17399 default:
17400 /* Anything else with side-effects must be converted to
17401 a valid statement before we get here. */
17402 gcc_unreachable ();
17405 *expr_p = NULL;
17407 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17408 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17409 && !is_empty_type (TREE_TYPE (*expr_p)))
17411 /* Historically, the compiler has treated a bare reference
17412 to a non-BLKmode volatile lvalue as forcing a load. */
17413 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17415 /* Normally, we do not want to create a temporary for a
17416 TREE_ADDRESSABLE type because such a type should not be
17417 copied by bitwise-assignment. However, we make an
17418 exception here, as all we are doing here is ensuring that
17419 we read the bytes that make up the type. We use
17420 create_tmp_var_raw because create_tmp_var will abort when
17421 given a TREE_ADDRESSABLE type. */
17422 tree tmp = create_tmp_var_raw (type, "vol");
17423 gimple_add_tmp_var (tmp);
17424 gimplify_assign (tmp, *expr_p, pre_p);
17425 *expr_p = NULL;
17427 else
17428 /* We can't do anything useful with a volatile reference to
17429 an incomplete type, so just throw it away. Likewise for
17430 a BLKmode type, since any implicit inner load should
17431 already have been turned into an explicit one by the
17432 gimplification process. */
17433 *expr_p = NULL;
17436 /* If we are gimplifying at the statement level, we're done. Tack
17437 everything together and return. */
17438 if (fallback == fb_none || is_statement)
17440 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17441 it out for GC to reclaim it. */
17442 *expr_p = NULL_TREE;
17444 if (!gimple_seq_empty_p (internal_pre)
17445 || !gimple_seq_empty_p (internal_post))
17447 gimplify_seq_add_seq (&internal_pre, internal_post);
17448 gimplify_seq_add_seq (pre_p, internal_pre);
17451 /* The result of gimplifying *EXPR_P is going to be the last few
17452 statements in *PRE_P and *POST_P. Add location information
17453 to all the statements that were added by the gimplification
17454 helpers. */
17455 if (!gimple_seq_empty_p (*pre_p))
17456 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17458 if (!gimple_seq_empty_p (*post_p))
17459 annotate_all_with_location_after (*post_p, post_last_gsi,
17460 input_location);
17462 goto out;
17465 #ifdef ENABLE_GIMPLE_CHECKING
17466 if (*expr_p)
17468 enum tree_code code = TREE_CODE (*expr_p);
17469 /* These expressions should already be in gimple IR form. */
17470 gcc_assert (code != MODIFY_EXPR
17471 && code != ASM_EXPR
17472 && code != BIND_EXPR
17473 && code != CATCH_EXPR
17474 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17475 && code != EH_FILTER_EXPR
17476 && code != GOTO_EXPR
17477 && code != LABEL_EXPR
17478 && code != LOOP_EXPR
17479 && code != SWITCH_EXPR
17480 && code != TRY_FINALLY_EXPR
17481 && code != EH_ELSE_EXPR
17482 && code != OACC_PARALLEL
17483 && code != OACC_KERNELS
17484 && code != OACC_SERIAL
17485 && code != OACC_DATA
17486 && code != OACC_HOST_DATA
17487 && code != OACC_DECLARE
17488 && code != OACC_UPDATE
17489 && code != OACC_ENTER_DATA
17490 && code != OACC_EXIT_DATA
17491 && code != OACC_CACHE
17492 && code != OMP_CRITICAL
17493 && code != OMP_FOR
17494 && code != OACC_LOOP
17495 && code != OMP_MASTER
17496 && code != OMP_MASKED
17497 && code != OMP_TASKGROUP
17498 && code != OMP_ORDERED
17499 && code != OMP_PARALLEL
17500 && code != OMP_SCAN
17501 && code != OMP_SECTIONS
17502 && code != OMP_SECTION
17503 && code != OMP_STRUCTURED_BLOCK
17504 && code != OMP_SINGLE
17505 && code != OMP_SCOPE);
17507 #endif
17509 /* Otherwise we're gimplifying a subexpression, so the resulting
17510 value is interesting. If it's a valid operand that matches
17511 GIMPLE_TEST_F, we're done. Unless we are handling some
17512 post-effects internally; if that's the case, we need to copy into
17513 a temporary before adding the post-effects to POST_P. */
17514 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17515 goto out;
17517 /* Otherwise, we need to create a new temporary for the gimplified
17518 expression. */
17520 /* We can't return an lvalue if we have an internal postqueue. The
17521 object the lvalue refers to would (probably) be modified by the
17522 postqueue; we need to copy the value out first, which means an
17523 rvalue. */
17524 if ((fallback & fb_lvalue)
17525 && gimple_seq_empty_p (internal_post)
17526 && is_gimple_addressable (*expr_p))
17528 /* An lvalue will do. Take the address of the expression, store it
17529 in a temporary, and replace the expression with an INDIRECT_REF of
17530 that temporary. */
17531 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17532 unsigned int ref_align = get_object_alignment (*expr_p);
17533 tree ref_type = TREE_TYPE (*expr_p);
17534 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17535 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17536 if (TYPE_ALIGN (ref_type) != ref_align)
17537 ref_type = build_aligned_type (ref_type, ref_align);
17538 *expr_p = build2 (MEM_REF, ref_type,
17539 tmp, build_zero_cst (ref_alias_type));
17541 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17543 /* An rvalue will do. Assign the gimplified expression into a
17544 new temporary TMP and replace the original expression with
17545 TMP. First, make sure that the expression has a type so that
17546 it can be assigned into a temporary. */
17547 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17548 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17550 else
17552 #ifdef ENABLE_GIMPLE_CHECKING
17553 if (!(fallback & fb_mayfail))
17555 fprintf (stderr, "gimplification failed:\n");
17556 print_generic_expr (stderr, *expr_p);
17557 debug_tree (*expr_p);
17558 internal_error ("gimplification failed");
17560 #endif
17561 gcc_assert (fallback & fb_mayfail);
17563 /* If this is an asm statement, and the user asked for the
17564 impossible, don't die. Fail and let gimplify_asm_expr
17565 issue an error. */
17566 ret = GS_ERROR;
17567 goto out;
17570 /* Make sure the temporary matches our predicate. */
17571 gcc_assert ((*gimple_test_f) (*expr_p));
17573 if (!gimple_seq_empty_p (internal_post))
17575 annotate_all_with_location (internal_post, input_location);
17576 gimplify_seq_add_seq (pre_p, internal_post);
17579 out:
17580 input_location = saved_location;
17581 return ret;
17584 /* Like gimplify_expr but make sure the gimplified result is not itself
17585 a SSA name (but a decl if it were). Temporaries required by
17586 evaluating *EXPR_P may be still SSA names. */
17588 static enum gimplify_status
17589 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17590 bool (*gimple_test_f) (tree), fallback_t fallback,
17591 bool allow_ssa)
17593 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17594 gimple_test_f, fallback);
17595 if (! allow_ssa
17596 && TREE_CODE (*expr_p) == SSA_NAME)
17597 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17598 return ret;
17601 /* Look through TYPE for variable-sized objects and gimplify each such
17602 size that we find. Add to LIST_P any statements generated. */
17604 void
17605 gimplify_type_sizes (tree type, gimple_seq *list_p)
17607 if (type == NULL || type == error_mark_node)
17608 return;
17610 const bool ignored_p
17611 = TYPE_NAME (type)
17612 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17613 && DECL_IGNORED_P (TYPE_NAME (type));
17614 tree t;
17616 /* We first do the main variant, then copy into any other variants. */
17617 type = TYPE_MAIN_VARIANT (type);
17619 /* Avoid infinite recursion. */
17620 if (TYPE_SIZES_GIMPLIFIED (type))
17621 return;
17623 TYPE_SIZES_GIMPLIFIED (type) = 1;
17625 switch (TREE_CODE (type))
17627 case INTEGER_TYPE:
17628 case ENUMERAL_TYPE:
17629 case BOOLEAN_TYPE:
17630 case REAL_TYPE:
17631 case FIXED_POINT_TYPE:
17632 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17633 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17635 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17637 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17638 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17640 break;
17642 case ARRAY_TYPE:
17643 /* These types may not have declarations, so handle them here. */
17644 gimplify_type_sizes (TREE_TYPE (type), list_p);
17645 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17646 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17647 with assigned stack slots, for -O1+ -g they should be tracked
17648 by VTA. */
17649 if (!ignored_p
17650 && TYPE_DOMAIN (type)
17651 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17653 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17654 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17655 DECL_IGNORED_P (t) = 0;
17656 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17657 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17658 DECL_IGNORED_P (t) = 0;
17660 break;
17662 case RECORD_TYPE:
17663 case UNION_TYPE:
17664 case QUAL_UNION_TYPE:
17665 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17666 if (TREE_CODE (field) == FIELD_DECL)
17668 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17669 /* Likewise, ensure variable offsets aren't removed. */
17670 if (!ignored_p
17671 && (t = DECL_FIELD_OFFSET (field))
17672 && VAR_P (t)
17673 && DECL_ARTIFICIAL (t))
17674 DECL_IGNORED_P (t) = 0;
17675 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17676 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17677 gimplify_type_sizes (TREE_TYPE (field), list_p);
17679 break;
17681 case POINTER_TYPE:
17682 case REFERENCE_TYPE:
17683 /* We used to recurse on the pointed-to type here, which turned out to
17684 be incorrect because its definition might refer to variables not
17685 yet initialized at this point if a forward declaration is involved.
17687 It was actually useful for anonymous pointed-to types to ensure
17688 that the sizes evaluation dominates every possible later use of the
17689 values. Restricting to such types here would be safe since there
17690 is no possible forward declaration around, but would introduce an
17691 undesirable middle-end semantic to anonymity. We then defer to
17692 front-ends the responsibility of ensuring that the sizes are
17693 evaluated both early and late enough, e.g. by attaching artificial
17694 type declarations to the tree. */
17695 break;
17697 default:
17698 break;
17701 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17702 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17704 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17706 TYPE_SIZE (t) = TYPE_SIZE (type);
17707 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17708 TYPE_SIZES_GIMPLIFIED (t) = 1;
17712 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17713 a size or position, has had all of its SAVE_EXPRs evaluated.
17714 We add any required statements to *STMT_P. */
17716 void
17717 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17719 tree expr = *expr_p;
17721 /* We don't do anything if the value isn't there, is constant, or contains
17722 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17723 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17724 will want to replace it with a new variable, but that will cause problems
17725 if this type is from outside the function. It's OK to have that here. */
17726 if (expr == NULL_TREE
17727 || is_gimple_constant (expr)
17728 || VAR_P (expr)
17729 || CONTAINS_PLACEHOLDER_P (expr))
17730 return;
17732 *expr_p = unshare_expr (expr);
17734 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17735 if the def vanishes. */
17736 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17738 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17739 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17740 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17741 if (is_gimple_constant (*expr_p))
17742 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17745 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17746 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17747 is true, also gimplify the parameters. */
17749 gbind *
17750 gimplify_body (tree fndecl, bool do_parms)
17752 location_t saved_location = input_location;
17753 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17754 gimple *outer_stmt;
17755 gbind *outer_bind;
17757 timevar_push (TV_TREE_GIMPLIFY);
17759 init_tree_ssa (cfun);
17761 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17762 gimplification. */
17763 default_rtl_profile ();
17765 gcc_assert (gimplify_ctxp == NULL);
17766 push_gimplify_context (true);
17768 if (flag_openacc || flag_openmp)
17770 gcc_assert (gimplify_omp_ctxp == NULL);
17771 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17772 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17775 /* Unshare most shared trees in the body and in that of any nested functions.
17776 It would seem we don't have to do this for nested functions because
17777 they are supposed to be output and then the outer function gimplified
17778 first, but the g++ front end doesn't always do it that way. */
17779 unshare_body (fndecl);
17780 unvisit_body (fndecl);
17782 /* Make sure input_location isn't set to something weird. */
17783 input_location = DECL_SOURCE_LOCATION (fndecl);
17785 /* Resolve callee-copies. This has to be done before processing
17786 the body so that DECL_VALUE_EXPR gets processed correctly. */
17787 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17789 /* Gimplify the function's body. */
17790 seq = NULL;
17791 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17792 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17793 if (!outer_stmt)
17795 outer_stmt = gimple_build_nop ();
17796 gimplify_seq_add_stmt (&seq, outer_stmt);
17799 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17800 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17801 if (gimple_code (outer_stmt) == GIMPLE_BIND
17802 && (gimple_seq_first_nondebug_stmt (seq)
17803 == gimple_seq_last_nondebug_stmt (seq)))
17805 outer_bind = as_a <gbind *> (outer_stmt);
17806 if (gimple_seq_first_stmt (seq) != outer_stmt
17807 || gimple_seq_last_stmt (seq) != outer_stmt)
17809 /* If there are debug stmts before or after outer_stmt, move them
17810 inside of outer_bind body. */
17811 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17812 gimple_seq second_seq = NULL;
17813 if (gimple_seq_first_stmt (seq) != outer_stmt
17814 && gimple_seq_last_stmt (seq) != outer_stmt)
17816 second_seq = gsi_split_seq_after (gsi);
17817 gsi_remove (&gsi, false);
17819 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17820 gsi_remove (&gsi, false);
17821 else
17823 gsi_remove (&gsi, false);
17824 second_seq = seq;
17825 seq = NULL;
17827 gimple_seq_add_seq_without_update (&seq,
17828 gimple_bind_body (outer_bind));
17829 gimple_seq_add_seq_without_update (&seq, second_seq);
17830 gimple_bind_set_body (outer_bind, seq);
17833 else
17834 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
17836 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17838 /* If we had callee-copies statements, insert them at the beginning
17839 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17840 if (!gimple_seq_empty_p (parm_stmts))
17842 tree parm;
17844 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
17845 if (parm_cleanup)
17847 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
17848 GIMPLE_TRY_FINALLY);
17849 parm_stmts = NULL;
17850 gimple_seq_add_stmt (&parm_stmts, g);
17852 gimple_bind_set_body (outer_bind, parm_stmts);
17854 for (parm = DECL_ARGUMENTS (current_function_decl);
17855 parm; parm = DECL_CHAIN (parm))
17856 if (DECL_HAS_VALUE_EXPR_P (parm))
17858 DECL_HAS_VALUE_EXPR_P (parm) = 0;
17859 DECL_IGNORED_P (parm) = 0;
17863 if ((flag_openacc || flag_openmp || flag_openmp_simd)
17864 && gimplify_omp_ctxp)
17866 delete_omp_context (gimplify_omp_ctxp);
17867 gimplify_omp_ctxp = NULL;
17870 pop_gimplify_context (outer_bind);
17871 gcc_assert (gimplify_ctxp == NULL);
17873 if (flag_checking && !seen_error ())
17874 verify_gimple_in_seq (gimple_bind_body (outer_bind));
17876 timevar_pop (TV_TREE_GIMPLIFY);
17877 input_location = saved_location;
17879 return outer_bind;
17882 typedef char *char_p; /* For DEF_VEC_P. */
17884 /* Return whether we should exclude FNDECL from instrumentation. */
17886 static bool
17887 flag_instrument_functions_exclude_p (tree fndecl)
17889 vec<char_p> *v;
17891 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
17892 if (v && v->length () > 0)
17894 const char *name;
17895 int i;
17896 char *s;
17898 name = lang_hooks.decl_printable_name (fndecl, 1);
17899 FOR_EACH_VEC_ELT (*v, i, s)
17900 if (strstr (name, s) != NULL)
17901 return true;
17904 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
17905 if (v && v->length () > 0)
17907 const char *name;
17908 int i;
17909 char *s;
17911 name = DECL_SOURCE_FILE (fndecl);
17912 FOR_EACH_VEC_ELT (*v, i, s)
17913 if (strstr (name, s) != NULL)
17914 return true;
17917 return false;
17920 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17921 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17922 the instrumentation function. IF STMT is not NULL, it is a statement
17923 to be executed just before the call to the instrumentation function. */
17925 static void
17926 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
17927 tree cond_var, gimple *stmt)
17929 /* The instrumentation hooks aren't going to call the instrumented
17930 function and the address they receive is expected to be matchable
17931 against symbol addresses. Make sure we don't create a trampoline,
17932 in case the current function is nested. */
17933 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
17934 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
17936 tree label_true, label_false;
17937 if (cond_var)
17939 label_true = create_artificial_label (UNKNOWN_LOCATION);
17940 label_false = create_artificial_label (UNKNOWN_LOCATION);
17941 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
17942 label_true, label_false);
17943 gimplify_seq_add_stmt (seq, cond);
17944 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
17945 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
17946 NOT_TAKEN));
17949 if (stmt)
17950 gimplify_seq_add_stmt (seq, stmt);
17952 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
17953 gcall *call = gimple_build_call (x, 1, integer_zero_node);
17954 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
17955 gimple_call_set_lhs (call, tmp_var);
17956 gimplify_seq_add_stmt (seq, call);
17957 x = builtin_decl_implicit (fncode);
17958 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
17959 gimplify_seq_add_stmt (seq, call);
17961 if (cond_var)
17962 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
17965 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17966 node for the function we want to gimplify.
17968 Return the sequence of GIMPLE statements corresponding to the body
17969 of FNDECL. */
17971 void
17972 gimplify_function_tree (tree fndecl)
17974 gimple_seq seq;
17975 gbind *bind;
17977 gcc_assert (!gimple_body (fndecl));
17979 if (DECL_STRUCT_FUNCTION (fndecl))
17980 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
17981 else
17982 push_struct_function (fndecl);
17984 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17985 if necessary. */
17986 cfun->curr_properties |= PROP_gimple_lva;
17988 if (asan_sanitize_use_after_scope ())
17989 asan_poisoned_variables = new hash_set<tree> ();
17990 bind = gimplify_body (fndecl, true);
17991 if (asan_poisoned_variables)
17993 delete asan_poisoned_variables;
17994 asan_poisoned_variables = NULL;
17997 /* The tree body of the function is no longer needed, replace it
17998 with the new GIMPLE body. */
17999 seq = NULL;
18000 gimple_seq_add_stmt (&seq, bind);
18001 gimple_set_body (fndecl, seq);
18003 /* If we're instrumenting function entry/exit, then prepend the call to
18004 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
18005 catch the exit hook. */
18006 /* ??? Add some way to ignore exceptions for this TFE. */
18007 if (flag_instrument_function_entry_exit
18008 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
18009 /* Do not instrument extern inline functions. */
18010 && !(DECL_DECLARED_INLINE_P (fndecl)
18011 && DECL_EXTERNAL (fndecl)
18012 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
18013 && !flag_instrument_functions_exclude_p (fndecl))
18015 gimple_seq body = NULL, cleanup = NULL;
18016 gassign *assign;
18017 tree cond_var;
18019 /* If -finstrument-functions-once is specified, generate:
18021 static volatile bool C.0 = false;
18022 bool tmp_called;
18024 tmp_called = C.0;
18025 if (!tmp_called)
18027 C.0 = true;
18028 [call profiling enter function]
18031 without specific protection for data races. */
18032 if (flag_instrument_function_entry_exit > 1)
18034 tree first_var
18035 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
18036 VAR_DECL,
18037 create_tmp_var_name ("C"),
18038 boolean_type_node);
18039 DECL_ARTIFICIAL (first_var) = 1;
18040 DECL_IGNORED_P (first_var) = 1;
18041 TREE_STATIC (first_var) = 1;
18042 TREE_THIS_VOLATILE (first_var) = 1;
18043 TREE_USED (first_var) = 1;
18044 DECL_INITIAL (first_var) = boolean_false_node;
18045 varpool_node::add (first_var);
18047 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
18048 assign = gimple_build_assign (cond_var, first_var);
18049 gimplify_seq_add_stmt (&body, assign);
18051 assign = gimple_build_assign (first_var, boolean_true_node);
18054 else
18056 cond_var = NULL_TREE;
18057 assign = NULL;
18060 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
18061 cond_var, assign);
18063 /* If -finstrument-functions-once is specified, generate:
18065 if (!tmp_called)
18066 [call profiling exit function]
18068 without specific protection for data races. */
18069 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
18070 cond_var, NULL);
18072 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
18073 gimplify_seq_add_stmt (&body, tf);
18074 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
18076 /* Replace the current function body with the body
18077 wrapped in the try/finally TF. */
18078 seq = NULL;
18079 gimple_seq_add_stmt (&seq, new_bind);
18080 gimple_set_body (fndecl, seq);
18081 bind = new_bind;
18084 if (sanitize_flags_p (SANITIZE_THREAD)
18085 && param_tsan_instrument_func_entry_exit)
18087 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
18088 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
18089 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
18090 /* Replace the current function body with the body
18091 wrapped in the try/finally TF. */
18092 seq = NULL;
18093 gimple_seq_add_stmt (&seq, new_bind);
18094 gimple_set_body (fndecl, seq);
18097 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18098 cfun->curr_properties |= PROP_gimple_any;
18100 pop_cfun ();
18102 dump_function (TDI_gimple, fndecl);
18105 /* Return a dummy expression of type TYPE in order to keep going after an
18106 error. */
18108 static tree
18109 dummy_object (tree type)
18111 tree t = build_int_cst (build_pointer_type (type), 0);
18112 return build2 (MEM_REF, type, t, t);
18115 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
18116 builtin function, but a very special sort of operator. */
18118 enum gimplify_status
18119 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
18120 gimple_seq *post_p ATTRIBUTE_UNUSED)
18122 tree promoted_type, have_va_type;
18123 tree valist = TREE_OPERAND (*expr_p, 0);
18124 tree type = TREE_TYPE (*expr_p);
18125 tree t, tag, aptag;
18126 location_t loc = EXPR_LOCATION (*expr_p);
18128 /* Verify that valist is of the proper type. */
18129 have_va_type = TREE_TYPE (valist);
18130 if (have_va_type == error_mark_node)
18131 return GS_ERROR;
18132 have_va_type = targetm.canonical_va_list_type (have_va_type);
18133 if (have_va_type == NULL_TREE
18134 && POINTER_TYPE_P (TREE_TYPE (valist)))
18135 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
18136 have_va_type
18137 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
18138 gcc_assert (have_va_type != NULL_TREE);
18140 /* Generate a diagnostic for requesting data of a type that cannot
18141 be passed through `...' due to type promotion at the call site. */
18142 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
18143 != type)
18145 static bool gave_help;
18146 bool warned;
18147 /* Use the expansion point to handle cases such as passing bool (defined
18148 in a system header) through `...'. */
18149 location_t xloc
18150 = expansion_point_location_if_in_system_header (loc);
18152 /* Unfortunately, this is merely undefined, rather than a constraint
18153 violation, so we cannot make this an error. If this call is never
18154 executed, the program is still strictly conforming. */
18155 auto_diagnostic_group d;
18156 warned = warning_at (xloc, 0,
18157 "%qT is promoted to %qT when passed through %<...%>",
18158 type, promoted_type);
18159 if (!gave_help && warned)
18161 gave_help = true;
18162 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
18163 promoted_type, type);
18166 /* We can, however, treat "undefined" any way we please.
18167 Call abort to encourage the user to fix the program. */
18168 if (warned)
18169 inform (xloc, "if this code is reached, the program will abort");
18170 /* Before the abort, allow the evaluation of the va_list
18171 expression to exit or longjmp. */
18172 gimplify_and_add (valist, pre_p);
18173 t = build_call_expr_loc (loc,
18174 builtin_decl_implicit (BUILT_IN_TRAP), 0);
18175 gimplify_and_add (t, pre_p);
18177 /* This is dead code, but go ahead and finish so that the
18178 mode of the result comes out right. */
18179 *expr_p = dummy_object (type);
18180 return GS_ALL_DONE;
18183 tag = build_int_cst (build_pointer_type (type), 0);
18184 aptag = build_int_cst (TREE_TYPE (valist), 0);
18186 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18187 valist, tag, aptag);
18189 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18190 needs to be expanded. */
18191 cfun->curr_properties &= ~PROP_gimple_lva;
18193 return GS_OK;
18196 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18198 DST/SRC are the destination and source respectively. You can pass
18199 ungimplified trees in DST or SRC, in which case they will be
18200 converted to a gimple operand if necessary.
18202 This function returns the newly created GIMPLE_ASSIGN tuple. */
18204 gimple *
18205 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18207 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18208 gimplify_and_add (t, seq_p);
18209 ggc_free (t);
18210 return gimple_seq_last_stmt (*seq_p);
18213 inline hashval_t
18214 gimplify_hasher::hash (const elt_t *p)
18216 tree t = p->val;
18217 return iterative_hash_expr (t, 0);
18220 inline bool
18221 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18223 tree t1 = p1->val;
18224 tree t2 = p2->val;
18225 enum tree_code code = TREE_CODE (t1);
18227 if (TREE_CODE (t2) != code
18228 || TREE_TYPE (t1) != TREE_TYPE (t2))
18229 return false;
18231 if (!operand_equal_p (t1, t2, 0))
18232 return false;
18234 /* Only allow them to compare equal if they also hash equal; otherwise
18235 results are nondeterminate, and we fail bootstrap comparison. */
18236 gcc_checking_assert (hash (p1) == hash (p2));
18238 return true;